1//! This module contains code to equate the input/output types appearing
2//! in the MIR with the expected input/output types from the function
3//! signature. This requires a bit of processing, as the expected types
4//! are supplied to us before normalization and may contain opaque
5//! `impl Trait` instances. In contrast, the input/output types found in
6//! the MIR (specifically, in the special local variables for the
7//! `RETURN_PLACE` the MIR arguments) are always fully normalized (and
8//! contain revealed `impl Trait` values).
910use std::assert_matches;
1112use itertools::Itertools;
13use rustc_hiras hir;
14use rustc_infer::infer::{BoundRegionConversionTime, RegionVariableOrigin};
15use rustc_middle::mir::*;
16use rustc_middle::ty::{self, Ty};
17use rustc_span::Span;
18use tracing::{debug, instrument};
1920use super::{Locations, TypeChecker};
21use crate::renumber::RegionCtxt;
22use crate::universal_regions::DefiningTy;
2324impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
25/// Check explicit closure signature annotation,
26 /// e.g., `|x: FxIndexMap<_, &'static u32>| ...`.
27#[allow(clippy :: suspicious_else_formatting)]
{
let __tracing_attr_span;
let __tracing_attr_guard;
if ::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() ||
{ false } {
__tracing_attr_span =
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("check_signature_annotation",
"rustc_borrowck::type_check::input_output",
::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_borrowck/src/type_check/input_output.rs"),
::tracing_core::__macro_support::Option::Some(27u32),
::tracing_core::__macro_support::Option::Some("rustc_borrowck::type_check::input_output"),
::tracing_core::field::FieldSet::new(&[],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::DEBUG <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{ meta.fields().value_set(&[]) })
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
};
__tracing_attr_guard = __tracing_attr_span.enter();
}
#[warn(clippy :: suspicious_else_formatting)]
{
#[allow(unknown_lints, unreachable_code, clippy ::
diverging_sub_expression, clippy :: empty_loop, clippy ::
let_unit_value, clippy :: let_with_type_underscore, clippy ::
needless_return, clippy :: unreachable)]
if false {
let __tracing_attr_fake_return: () = loop {};
return __tracing_attr_fake_return;
}
{
let mir_def_id = self.body.source.def_id().expect_local();
if !self.tcx().is_closure_like(mir_def_id.to_def_id()) { return; }
if self.body.tainted_by_errors.is_some() { return; }
let user_provided_poly_sig =
self.tcx().closure_user_provided_sig(mir_def_id);
let user_provided_sig =
self.instantiate_canonical(self.body.span,
&user_provided_poly_sig);
let mut user_provided_sig =
self.infcx.instantiate_binder_with_fresh_vars(self.body.span,
BoundRegionConversionTime::FnCall, user_provided_sig);
if let DefiningTy::CoroutineClosure(_, args) =
self.universal_regions.defining_ty {
{
match self.tcx().coroutine_kind(self.tcx().coroutine_for_closure(mir_def_id))
{
Some(hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Async
| hir::CoroutineDesugaring::Gen,
hir::CoroutineSource::Closure)) => {}
ref left_val => {
::core::panicking::assert_matches_failed(left_val,
"Some(hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Async |\nhir::CoroutineDesugaring::Gen, hir::CoroutineSource::Closure))",
::core::option::Option::Some(format_args!("this needs to be modified if we\'re lowering non-async closures")));
}
}
};
let args = args.as_coroutine_closure();
let tupled_upvars_ty =
ty::CoroutineClosureSignature::tupled_upvars_by_closure_kind(self.tcx(),
args.kind(),
Ty::new_tup(self.tcx(), user_provided_sig.inputs()),
args.tupled_upvars_ty(),
args.coroutine_captures_by_ref_ty(),
self.infcx.next_region_var(RegionVariableOrigin::Misc(self.body.span),
|| { RegionCtxt::Unknown }));
let next_ty_var = || self.infcx.next_ty_var(self.body.span);
let output_ty =
Ty::new_coroutine(self.tcx(),
self.tcx().coroutine_for_closure(mir_def_id),
ty::CoroutineArgs::new(self.tcx(),
ty::CoroutineArgsParts {
parent_args: args.parent_args(),
kind_ty: Ty::from_coroutine_closure_kind(self.tcx(),
args.kind()),
return_ty: user_provided_sig.output(),
tupled_upvars_ty,
resume_ty: next_ty_var(),
yield_ty: next_ty_var(),
}).args);
user_provided_sig =
self.tcx().mk_fn_sig(user_provided_sig.inputs().iter().copied(),
output_ty, user_provided_sig.fn_sig_kind);
}
let is_coroutine_with_implicit_resume_ty =
self.tcx().is_coroutine(mir_def_id.to_def_id()) &&
user_provided_sig.inputs().is_empty();
for (&user_ty, arg_decl) in
user_provided_sig.inputs().iter().zip_eq(self.body.args_iter().skip(1
+
if is_coroutine_with_implicit_resume_ty {
1
} else { 0 }).map(|local| &self.body.local_decls[local])) {
self.ascribe_user_type_skip_wf(arg_decl.ty,
ty::UserType::new(ty::UserTypeKind::Ty(user_ty)),
arg_decl.source_info.span);
}
let output_decl = &self.body.local_decls[RETURN_PLACE];
self.ascribe_user_type_skip_wf(output_decl.ty,
ty::UserType::new(ty::UserTypeKind::Ty(user_provided_sig.output())),
output_decl.source_info.span);
}
}
}#[instrument(skip(self), level = "debug")]28pub(super) fn check_signature_annotation(&mut self) {
29let mir_def_id = self.body.source.def_id().expect_local();
3031if !self.tcx().is_closure_like(mir_def_id.to_def_id()) {
32return;
33 }
3435// If the MIR body was constructed via `construct_error` (because an
36 // earlier pass like match checking failed), its args may not match
37 // the user-provided signature (e.g. a coroutine with too many
38 // parameters). Bail out as this can cause panic,
39 // see <https://github.com/rust-lang/rust/issues/139570>.
40if self.body.tainted_by_errors.is_some() {
41return;
42 }
4344let user_provided_poly_sig = self.tcx().closure_user_provided_sig(mir_def_id);
4546// Instantiate the canonicalized variables from user-provided signature
47 // (e.g., the `_` in the code above) with fresh variables.
48 // Then replace the bound items in the fn sig with fresh variables,
49 // so that they represent the view from "inside" the closure.
50let user_provided_sig = self.instantiate_canonical(self.body.span, &user_provided_poly_sig);
51let mut user_provided_sig = self.infcx.instantiate_binder_with_fresh_vars(
52self.body.span,
53 BoundRegionConversionTime::FnCall,
54 user_provided_sig,
55 );
5657// FIXME(async_closures): It's kind of wacky that we must apply this
58 // transformation here, since we do the same thing in HIR typeck.
59 // Maybe we could just fix up the canonicalized signature during HIR typeck?
60if let DefiningTy::CoroutineClosure(_, args) = self.universal_regions.defining_ty {
61assert_matches!(
62self.tcx().coroutine_kind(self.tcx().coroutine_for_closure(mir_def_id)),
63Some(hir::CoroutineKind::Desugared(
64 hir::CoroutineDesugaring::Async | hir::CoroutineDesugaring::Gen,
65 hir::CoroutineSource::Closure
66 )),
67"this needs to be modified if we're lowering non-async closures"
68);
69// Make sure to use the args from `DefiningTy` so the right NLL region vids are
70 // prepopulated into the type.
71let args = args.as_coroutine_closure();
72let tupled_upvars_ty = ty::CoroutineClosureSignature::tupled_upvars_by_closure_kind(
73self.tcx(),
74 args.kind(),
75 Ty::new_tup(self.tcx(), user_provided_sig.inputs()),
76 args.tupled_upvars_ty(),
77 args.coroutine_captures_by_ref_ty(),
78self.infcx.next_region_var(RegionVariableOrigin::Misc(self.body.span), || {
79 RegionCtxt::Unknown
80 }),
81 );
8283let next_ty_var = || self.infcx.next_ty_var(self.body.span);
84let output_ty = Ty::new_coroutine(
85self.tcx(),
86self.tcx().coroutine_for_closure(mir_def_id),
87 ty::CoroutineArgs::new(
88self.tcx(),
89 ty::CoroutineArgsParts {
90 parent_args: args.parent_args(),
91 kind_ty: Ty::from_coroutine_closure_kind(self.tcx(), args.kind()),
92 return_ty: user_provided_sig.output(),
93 tupled_upvars_ty,
94// For async closures, none of these can be annotated, so just fill
95 // them with fresh ty vars.
96resume_ty: next_ty_var(),
97 yield_ty: next_ty_var(),
98 },
99 )
100 .args,
101 );
102103 user_provided_sig = self.tcx().mk_fn_sig(
104 user_provided_sig.inputs().iter().copied(),
105 output_ty,
106 user_provided_sig.fn_sig_kind,
107 );
108 }
109110let is_coroutine_with_implicit_resume_ty = self.tcx().is_coroutine(mir_def_id.to_def_id())
111 && user_provided_sig.inputs().is_empty();
112113for (&user_ty, arg_decl) in user_provided_sig.inputs().iter().zip_eq(
114// In MIR, closure args begin with an implicit `self`.
115 // Also, coroutines have a resume type which may be implicitly `()`.
116self.body
117 .args_iter()
118 .skip(1 + if is_coroutine_with_implicit_resume_ty { 1 } else { 0 })
119 .map(|local| &self.body.local_decls[local]),
120 ) {
121self.ascribe_user_type_skip_wf(
122 arg_decl.ty,
123 ty::UserType::new(ty::UserTypeKind::Ty(user_ty)),
124 arg_decl.source_info.span,
125 );
126 }
127128// If the user explicitly annotated the output type, enforce it.
129let output_decl = &self.body.local_decls[RETURN_PLACE];
130self.ascribe_user_type_skip_wf(
131 output_decl.ty,
132 ty::UserType::new(ty::UserTypeKind::Ty(user_provided_sig.output())),
133 output_decl.source_info.span,
134 );
135 }
136137// FIXME(BoxyUwU): This should probably be part of a larger borrowck dev-guide chapter
138 //
139/// Enforce that the types of the locals corresponding to the inputs and output of
140 /// the body are equal to those of the (normalized) signature.
141 ///
142 /// This is necessary for two reasons:
143 /// - Locals in the MIR all start out with `'erased` regions and then are replaced
144 /// with unconstrained nll vars. If we have a function returning `&'a u32` then
145 /// the local `_0: &'?10 u32` needs to have its region var equated with the nll
146 /// var representing `'a`. i.e. borrow check must uphold that `'?10 = 'a`.
147 /// - When computing the normalized signature we may introduce new unconstrained nll
148 /// vars due to higher ranked where clauses ([#136547]). We then wind up with implied
149 /// bounds involving these vars.
150 ///
151 /// For this reason it is important that we equate with the *normalized* signature
152 /// which was produced when computing implied bounds. If we do not do so then we will
153 /// wind up with implied bounds on nll vars which cannot actually be used as the nll
154 /// var never gets related to anything.
155 ///
156 /// For 'closure-like' bodies this function effectively relates the *inferred* signature
157 /// of the closure against the locals corresponding to the closure's inputs/output. It *does
158 /// not* relate the user provided types for the signature to the locals, this is handled
159 /// separately by: [`TypeChecker::check_signature_annotation`].
160 ///
161 /// [#136547]: <https://www.github.com/rust-lang/rust/issues/136547>
162#[allow(clippy :: suspicious_else_formatting)]
{
let __tracing_attr_span;
let __tracing_attr_guard;
if ::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() ||
{ false } {
__tracing_attr_span =
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("equate_inputs_and_outputs",
"rustc_borrowck::type_check::input_output",
::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_borrowck/src/type_check/input_output.rs"),
::tracing_core::__macro_support::Option::Some(162u32),
::tracing_core::__macro_support::Option::Some("rustc_borrowck::type_check::input_output"),
::tracing_core::field::FieldSet::new(&["normalized_inputs_and_output"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::DEBUG <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&normalized_inputs_and_output)
as &dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
};
__tracing_attr_guard = __tracing_attr_span.enter();
}
#[warn(clippy :: suspicious_else_formatting)]
{
#[allow(unknown_lints, unreachable_code, clippy ::
diverging_sub_expression, clippy :: empty_loop, clippy ::
let_unit_value, clippy :: let_with_type_underscore, clippy ::
needless_return, clippy :: unreachable)]
if false {
let __tracing_attr_fake_return: () = loop {};
return __tracing_attr_fake_return;
}
{
let (&normalized_output_ty, normalized_input_tys) =
normalized_inputs_and_output.split_last().unwrap();
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_borrowck/src/type_check/input_output.rs:167",
"rustc_borrowck::type_check::input_output",
::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_borrowck/src/type_check/input_output.rs"),
::tracing_core::__macro_support::Option::Some(167u32),
::tracing_core::__macro_support::Option::Some("rustc_borrowck::type_check::input_output"),
::tracing_core::field::FieldSet::new(&["normalized_output_ty"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&debug(&normalized_output_ty)
as &dyn Value))])
});
} else { ; }
};
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_borrowck/src/type_check/input_output.rs:168",
"rustc_borrowck::type_check::input_output",
::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_borrowck/src/type_check/input_output.rs"),
::tracing_core::__macro_support::Option::Some(168u32),
::tracing_core::__macro_support::Option::Some("rustc_borrowck::type_check::input_output"),
::tracing_core::field::FieldSet::new(&["normalized_input_tys"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&debug(&normalized_input_tys)
as &dyn Value))])
});
} else { ; }
};
for (argument_index, &normalized_input_ty) in
normalized_input_tys.iter().enumerate() {
if argument_index + 1 >= self.body.local_decls.len() {
self.tcx().dcx().span_bug(self.body.span,
"found more normalized_input_ty than local_decls");
}
let local = Local::from_usize(argument_index + 1);
let mir_input_ty = self.body.local_decls[local].ty;
let mir_input_span =
self.body.local_decls[local].source_info.span;
self.equate_normalized_input_or_output(normalized_input_ty,
mir_input_ty, mir_input_span);
}
if let Some(mir_yield_ty) = self.body.yield_ty() {
let yield_span =
self.body.local_decls[RETURN_PLACE].source_info.span;
self.equate_normalized_input_or_output(self.universal_regions.yield_ty.unwrap(),
mir_yield_ty, yield_span);
}
if let Some(mir_resume_ty) = self.body.resume_ty() {
let yield_span =
self.body.local_decls[RETURN_PLACE].source_info.span;
self.equate_normalized_input_or_output(self.universal_regions.resume_ty.unwrap(),
mir_resume_ty, yield_span);
}
let mir_output_ty = self.body.return_ty();
let output_span =
self.body.local_decls[RETURN_PLACE].source_info.span;
self.equate_normalized_input_or_output(normalized_output_ty,
mir_output_ty, output_span);
}
}
}#[instrument(skip(self), level = "debug")]163pub(super) fn equate_inputs_and_outputs(&mut self, normalized_inputs_and_output: &[Ty<'tcx>]) {
164let (&normalized_output_ty, normalized_input_tys) =
165 normalized_inputs_and_output.split_last().unwrap();
166167debug!(?normalized_output_ty);
168debug!(?normalized_input_tys);
169170// Equate expected input tys with those in the MIR.
171for (argument_index, &normalized_input_ty) in normalized_input_tys.iter().enumerate() {
172if argument_index + 1 >= self.body.local_decls.len() {
173self.tcx()
174 .dcx()
175 .span_bug(self.body.span, "found more normalized_input_ty than local_decls");
176 }
177178// In MIR, argument N is stored in local N+1.
179let local = Local::from_usize(argument_index + 1);
180181let mir_input_ty = self.body.local_decls[local].ty;
182183let mir_input_span = self.body.local_decls[local].source_info.span;
184self.equate_normalized_input_or_output(
185 normalized_input_ty,
186 mir_input_ty,
187 mir_input_span,
188 );
189 }
190191if let Some(mir_yield_ty) = self.body.yield_ty() {
192let yield_span = self.body.local_decls[RETURN_PLACE].source_info.span;
193self.equate_normalized_input_or_output(
194self.universal_regions.yield_ty.unwrap(),
195 mir_yield_ty,
196 yield_span,
197 );
198 }
199200if let Some(mir_resume_ty) = self.body.resume_ty() {
201let yield_span = self.body.local_decls[RETURN_PLACE].source_info.span;
202self.equate_normalized_input_or_output(
203self.universal_regions.resume_ty.unwrap(),
204 mir_resume_ty,
205 yield_span,
206 );
207 }
208209// Equate expected output ty with the type of the RETURN_PLACE in MIR
210let mir_output_ty = self.body.return_ty();
211let output_span = self.body.local_decls[RETURN_PLACE].source_info.span;
212self.equate_normalized_input_or_output(normalized_output_ty, mir_output_ty, output_span);
213 }
214215#[allow(clippy :: suspicious_else_formatting)]
{
let __tracing_attr_span;
let __tracing_attr_guard;
if ::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() ||
{ false } {
__tracing_attr_span =
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("equate_normalized_input_or_output",
"rustc_borrowck::type_check::input_output",
::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_borrowck/src/type_check/input_output.rs"),
::tracing_core::__macro_support::Option::Some(215u32),
::tracing_core::__macro_support::Option::Some("rustc_borrowck::type_check::input_output"),
::tracing_core::field::FieldSet::new(&["a", "b", "span"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::DEBUG <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&a)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&b)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&span)
as &dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
};
__tracing_attr_guard = __tracing_attr_span.enter();
}
#[warn(clippy :: suspicious_else_formatting)]
{
#[allow(unknown_lints, unreachable_code, clippy ::
diverging_sub_expression, clippy :: empty_loop, clippy ::
let_unit_value, clippy :: let_with_type_underscore, clippy ::
needless_return, clippy :: unreachable)]
if false {
let __tracing_attr_fake_return: () = loop {};
return __tracing_attr_fake_return;
}
{
if self.infcx.next_trait_solver() {
return self.eq_types(a, b, Locations::All(span),
ConstraintCategory::BoringNoLocation).unwrap_or_else(|terr|
{
{
crate::type_check::mirbug(self.tcx(), self.last_span,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("broken MIR in {0:?} ({1:?}): {2}",
self.body().source.def_id(), Location::START,
format_args!("equate_normalized_input_or_output: `{0:?}=={1:?}` failed with `{2:?}`",
a, b, terr)))
}))
};
});
}
if let Err(_) =
self.eq_types(a, b, Locations::All(span),
ConstraintCategory::BoringNoLocation) {
let b = self.normalize(b, Locations::All(span));
self.eq_types(a, b, Locations::All(span),
ConstraintCategory::BoringNoLocation).unwrap_or_else(|terr|
{
{
crate::type_check::mirbug(self.tcx(), self.last_span,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("broken MIR in {0:?} ({1:?}): {2}",
self.body().source.def_id(), Location::START,
format_args!("equate_normalized_input_or_output: `{0:?}=={1:?}` failed with `{2:?}`",
a, b, terr)))
}))
};
});
};
}
}
}#[instrument(skip(self), level = "debug")]216fn equate_normalized_input_or_output(&mut self, a: Ty<'tcx>, b: Ty<'tcx>, span: Span) {
217if self.infcx.next_trait_solver() {
218return self
219.eq_types(a, b, Locations::All(span), ConstraintCategory::BoringNoLocation)
220 .unwrap_or_else(|terr| {
221span_mirbug!(
222self,
223 Location::START,
224"equate_normalized_input_or_output: `{a:?}=={b:?}` failed with `{terr:?}`",
225 );
226 });
227 }
228229// This is a hack. `body.local_decls` are not necessarily normalized in the old
230 // solver due to not deeply normalizing in writeback. So we must re-normalize here.
231 //
232 // However, in most cases normalizing is unnecessary so we only do so if it may be
233 // necessary for type equality to hold. This leads to some (very minor) performance
234 // wins.
235if let Err(_) =
236self.eq_types(a, b, Locations::All(span), ConstraintCategory::BoringNoLocation)
237 {
238let b = self.normalize(b, Locations::All(span));
239self.eq_types(a, b, Locations::All(span), ConstraintCategory::BoringNoLocation)
240 .unwrap_or_else(|terr| {
241span_mirbug!(
242self,
243 Location::START,
244"equate_normalized_input_or_output: `{a:?}=={b:?}` failed with `{terr:?}`",
245 );
246 });
247 };
248 }
249}