1//! Manages calling a concrete function (with known MIR body) with argument passing,
2//! and returning the return value to the caller.
34use std::assert_matches;
5use std::borrow::Cow;
67use either::{Left, Right};
8use rustc_abi::{selfas abi, ExternAbi, FieldIdx, Integer, VariantIdx};
9use rustc_hir::def_id::DefId;
10use rustc_hir::{LangItem, find_attr};
11use rustc_middle::ty::layout::{IntegerExt, TyAndLayout};
12use rustc_middle::ty::{self, AdtDef, Instance, Ty, Unnormalized, VariantDef};
13use rustc_middle::{bug, mir, span_bug};
14use rustc_target::callconv::{ArgAbi, FnAbi};
15use tracing::field::Empty;
16use tracing::{info, instrument, trace};
1718use super::{
19CtfeProvenance, EnteredTraceSpan, FnVal, ImmTy, InterpCx, InterpResult, MPlaceTy, Machine,
20OpTy, PlaceTy, Projectable, Provenance, RetagMode, ReturnAction, ReturnContinuation, Scalar,
21interp_ok, throw_ub, throw_ub_format,
22};
23use crate::enter_trace_span;
2425/// An argument passed to a function.
26#[derive(#[automatically_derived]
impl<'tcx, Prov: ::core::clone::Clone + Provenance> ::core::clone::Clone for
FnArg<'tcx, Prov> {
#[inline]
fn clone(&self) -> FnArg<'tcx, Prov> {
match self {
FnArg::Copy(__self_0) =>
FnArg::Copy(::core::clone::Clone::clone(__self_0)),
FnArg::InPlace(__self_0) =>
FnArg::InPlace(::core::clone::Clone::clone(__self_0)),
}
}
}Clone, #[automatically_derived]
impl<'tcx, Prov: ::core::fmt::Debug + Provenance> ::core::fmt::Debug for
FnArg<'tcx, Prov> {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match self {
FnArg::Copy(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f, "Copy",
&__self_0),
FnArg::InPlace(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"InPlace", &__self_0),
}
}
}Debug)]
27pub enum FnArg<'tcx, Prov: Provenance = CtfeProvenance> {
28/// Pass a copy of the given operand.
29Copy(OpTy<'tcx, Prov>),
30/// Allow for the argument to be passed in-place: destroy the value originally stored at that
31 /// place and make the place inaccessible for the duration of the function call. This *must* be
32 /// an in-memory place so that we can do the proper alias checks.
33InPlace(MPlaceTy<'tcx, Prov>),
34}
3536impl<'tcx, Prov: Provenance> FnArg<'tcx, Prov> {
37pub fn layout(&self) -> &TyAndLayout<'tcx> {
38match self {
39 FnArg::Copy(op) => &op.layout,
40 FnArg::InPlace(mplace) => &mplace.layout,
41 }
42 }
4344/// Make a copy of the given fn_arg. Any `InPlace` are degenerated to copies, no protection of the
45 /// original memory occurs.
46pub fn copy_fn_arg(&self) -> OpTy<'tcx, Prov> {
47match self {
48 FnArg::Copy(op) => op.clone(),
49 FnArg::InPlace(mplace) => mplace.clone().into(),
50 }
51 }
52}
5354impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
55/// Make a copy of the given fn_args. Any `InPlace` are degenerated to copies, no protection of the
56 /// original memory occurs.
57pub fn copy_fn_args(args: &[FnArg<'tcx, M::Provenance>]) -> Vec<OpTy<'tcx, M::Provenance>> {
58args.iter().map(|fn_arg| fn_arg.copy_fn_arg()).collect()
59 }
6061/// Helper function for argument untupling.
62fn fn_arg_project_field(
63&self,
64 arg: &FnArg<'tcx, M::Provenance>,
65 field: FieldIdx,
66 ) -> InterpResult<'tcx, FnArg<'tcx, M::Provenance>> {
67interp_ok(match arg {
68 FnArg::Copy(op) => FnArg::Copy(self.project_field(op, field)?),
69 FnArg::InPlace(mplace) => FnArg::InPlace(self.project_field(mplace, field)?),
70 })
71 }
7273/// Find the wrapped inner type of a transparent wrapper.
74 /// Must not be called on 1-ZST (as they don't have a uniquely defined "wrapped field").
75 ///
76 /// We work with `TyAndLayout` here since that makes it much easier to iterate over all fields.
77fn unfold_transparent(
78&self,
79 layout: TyAndLayout<'tcx>,
80 may_unfold: impl Fn(AdtDef<'tcx>) -> bool,
81 ) -> TyAndLayout<'tcx> {
82match layout.ty.kind() {
83 ty::Adt(adt_def, _) if adt_def.repr().transparent() && may_unfold(*adt_def) => {
84if !!adt_def.is_enum() {
::core::panicking::panic("assertion failed: !adt_def.is_enum()")
};assert!(!adt_def.is_enum());
85// Find the non-1-ZST field, and recurse.
86let (_, field) = layout.non_1zst_field(self).unwrap();
87self.unfold_transparent(field, may_unfold)
88 }
89 ty::Pat(base, _) => self.layout_of(*base).expect(
90"if the layout of a pattern type could be computed, so can the layout of its base",
91 ),
92// Not a transparent type, no further unfolding.
93_ => layout,
94 }
95 }
9697/// Unwrap types that are guaranteed a null-pointer-optimization
98fn unfold_npo(&self, layout: TyAndLayout<'tcx>) -> InterpResult<'tcx, TyAndLayout<'tcx>> {
99// Check if this is an option-like type wrapping some type.
100let ty::Adt(def, args) = layout.ty.kind() else {
101// Not an ADT, so definitely no NPO.
102return interp_ok(layout);
103 };
104if def.variants().len() != 2 {
105// Not a 2-variant enum, so no NPO.
106return interp_ok(layout);
107 }
108if !def.is_enum() {
::core::panicking::panic("assertion failed: def.is_enum()")
};assert!(def.is_enum());
109110let all_fields_1zst = |variant: &VariantDef| -> InterpResult<'tcx, _> {
111for field in &variant.fields {
112let ty = field.ty(*self.tcx, args);
113let layout = self.layout_of(ty)?;
114if !layout.is_1zst() {
115return interp_ok(false);
116 }
117 }
118interp_ok(true)
119 };
120121// If one variant consists entirely of 1-ZST, then the other variant
122 // is the only "relevant" one for this check.
123let var0 = VariantIdx::from_u32(0);
124let var1 = VariantIdx::from_u32(1);
125let relevant_variant = if all_fields_1zst(def.variant(var0))? {
126def.variant(var1)
127 } else if all_fields_1zst(def.variant(var1))? {
128def.variant(var0)
129 } else {
130// No variant is all-1-ZST, so no NPO.
131return interp_ok(layout);
132 };
133// The "relevant" variant must have exactly one field, and its type is the "inner" type.
134if relevant_variant.fields.len() != 1 {
135return interp_ok(layout);
136 }
137let inner = relevant_variant.fields[FieldIdx::from_u32(0)].ty(*self.tcx, args);
138let inner = self.layout_of(inner)?;
139140// Check if the inner type is one of the NPO-guaranteed ones.
141 // For that we first unpeel transparent *structs* (but not unions).
142let is_npo =
143 |def: AdtDef<'tcx>| {
{
'done:
{
for i in
::rustc_hir::attrs::HasAttrs::get_attrs(def.did(),
&self.tcx) {
#[allow(unused_imports)]
use rustc_hir::attrs::AttributeKind::*;
let i: &rustc_hir::Attribute = i;
match i {
rustc_hir::Attribute::Parsed(RustcNonnullOptimizationGuaranteed)
=> {
break 'done Some(());
}
rustc_hir::Attribute::Unparsed(..) =>
{}
#[deny(unreachable_patterns)]
_ => {}
}
}
None
}
}
}.is_some()find_attr!(self.tcx, def.did(), RustcNonnullOptimizationGuaranteed);
144let inner = self.unfold_transparent(inner, /* may_unfold */ |def| {
145// Stop at NPO types so that we don't miss that attribute in the check below!
146def.is_struct() && !is_npo(def)
147 });
148interp_ok(match inner.ty.kind() {
149 ty::Ref(..) | ty::FnPtr(..) => {
150// Option<&T> behaves like &T, and same for fn()
151inner152 }
153 ty::Adt(def, _) if is_npo(*def) => {
154// Once we found a `nonnull_optimization_guaranteed` type, further strip off
155 // newtype structs from it to find the underlying ABI type.
156self.unfold_transparent(inner, /* may_unfold */ |def| def.is_struct())
157 }
158_ => {
159// Everything else we do not unfold.
160layout161 }
162 })
163 }
164165/// Check if these two layouts look like they are fn-ABI-compatible.
166 /// (We also compare the `PassMode`, so this doesn't have to check everything. But it turns out
167 /// that only checking the `PassMode` is insufficient.)
168fn layout_compat(
169&self,
170 caller: TyAndLayout<'tcx>,
171 callee: TyAndLayout<'tcx>,
172 ) -> InterpResult<'tcx, bool> {
173// Fast path: equal types are definitely compatible.
174if caller.ty == callee.ty {
175return interp_ok(true);
176 }
177// 1-ZST are compatible with all 1-ZST (and with nothing else).
178if caller.is_1zst() || callee.is_1zst() {
179return interp_ok(caller.is_1zst() && callee.is_1zst());
180 }
181// Unfold newtypes and NPO optimizations.
182let unfold = |layout: TyAndLayout<'tcx>| {
183self.unfold_npo(self.unfold_transparent(layout, /* may_unfold */ |_def| true))
184 };
185let caller = unfold(caller)?;
186let callee = unfold(callee)?;
187// Now see if these inner types are compatible.
188189 // Compatible pointer types. For thin pointers, we have to accept even non-`repr(transparent)`
190 // things as compatible due to `DispatchFromDyn`. For instance, `Rc<i32>` and `*mut i32`
191 // must be compatible. So we just accept everything with Pointer ABI as compatible,
192 // even if this will accept some code that is not stably guaranteed to work.
193 // This also handles function pointers.
194let thin_pointer = |layout: TyAndLayout<'tcx>| match layout.backend_repr {
195 abi::BackendRepr::Scalar(s) => match s.primitive() {
196 abi::Primitive::Pointer(addr_space) => Some(addr_space),
197_ => None,
198 },
199_ => None,
200 };
201if let (Some(caller), Some(callee)) = (thin_pointer(caller), thin_pointer(callee)) {
202return interp_ok(caller == callee);
203 }
204// For wide pointers we have to get the pointee type.
205let pointee_ty = |ty: Ty<'tcx>| -> InterpResult<'tcx, Option<Ty<'tcx>>> {
206// We cannot use `builtin_deref` here since we need to reject `Box<T, MyAlloc>`.
207interp_ok(Some(match ty.kind() {
208 ty::Ref(_, ty, _) => *ty,
209 ty::RawPtr(ty, _) => *ty,
210// We only accept `Box` with the default allocator.
211_ if ty.is_box_global(*self.tcx) => ty.expect_boxed_ty(),
212_ => return interp_ok(None),
213 }))
214 };
215if let (Some(caller), Some(callee)) = (pointee_ty(caller.ty)?, pointee_ty(callee.ty)?) {
216// This is okay if they have the same metadata type.
217let meta_ty = |ty: Ty<'tcx>| {
218// Even if `ty` is normalized, the search for the unsized tail will project
219 // to fields, which can yield non-normalized types. So we need to provide a
220 // normalization function.
221let normalize = |ty| {
222self.tcx.normalize_erasing_regions(self.typing_env, Unnormalized::new_wip(ty))
223 };
224ty.ptr_metadata_ty(*self.tcx, normalize)
225 };
226return interp_ok(meta_ty(caller) == meta_ty(callee));
227 }
228229// Compatible integer types (in particular, usize vs ptr-sized-u32/u64).
230 // `char` counts as `u32.`
231let int_ty = |ty: Ty<'tcx>| {
232Some(match ty.kind() {
233 ty::Int(ity) => (Integer::from_int_ty(&self.tcx, *ity), /* signed */ true),
234 ty::Uint(uty) => (Integer::from_uint_ty(&self.tcx, *uty), /* signed */ false),
235 ty::Char => (Integer::I32, /* signed */ false),
236_ => return None,
237 })
238 };
239if let (Some(caller), Some(callee)) = (int_ty(caller.ty), int_ty(callee.ty)) {
240// This is okay if they are the same integer type.
241return interp_ok(caller == callee);
242 }
243244// Fall back to exact equality.
245interp_ok(caller == callee)
246 }
247248/// Returns a `bool` saying whether the two arguments are ABI-compatible.
249pub fn check_argument_compat(
250&self,
251 caller_abi: &ArgAbi<'tcx, Ty<'tcx>>,
252 callee_abi: &ArgAbi<'tcx, Ty<'tcx>>,
253 ) -> InterpResult<'tcx, bool> {
254// We do not want to accept things as ABI-compatible that just "happen to be" compatible on the current target,
255 // so we implement a type-based check that reflects the guaranteed rules for ABI compatibility.
256if self.layout_compat(caller_abi.layout, callee_abi.layout)? {
257// Ensure that our checks imply actual ABI compatibility for this concrete call.
258 // (This can fail e.g. if `#[rustc_nonnull_optimization_guaranteed]` is used incorrectly.)
259if !caller_abi.eq_abi(callee_abi) {
::core::panicking::panic("assertion failed: caller_abi.eq_abi(callee_abi)")
};assert!(caller_abi.eq_abi(callee_abi));
260interp_ok(true)
261 } else {
262{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/call.rs:262",
"rustc_const_eval::interpret::call",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(262u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("check_argument_compat: incompatible ABIs:\ncaller: {0:?}\ncallee: {1:?}",
caller_abi, callee_abi) as &dyn Value))])
});
} else { ; }
};trace!(
263"check_argument_compat: incompatible ABIs:\ncaller: {:?}\ncallee: {:?}",
264 caller_abi, callee_abi
265 );
266interp_ok(false)
267 }
268 }
269270/// Initialize a single callee argument, checking the types for compatibility.
271fn pass_argument<'x, 'y>(
272&mut self,
273 caller_args: &mut impl Iterator<
274 Item = (&'x FnArg<'tcx, M::Provenance>, &'y ArgAbi<'tcx, Ty<'tcx>>),
275 >,
276 callee_abi: &ArgAbi<'tcx, Ty<'tcx>>,
277 callee_arg_idx: usize,
278 callee_arg: &mir::Place<'tcx>,
279 callee_ty: Ty<'tcx>,
280 already_live: bool,
281 is_drop_in_place: bool,
282 ) -> InterpResult<'tcx>
283where
284'tcx: 'x,
285'tcx: 'y,
286 {
287match (&callee_ty, &callee_abi.layout.ty) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(callee_ty, callee_abi.layout.ty);
288if callee_abi.is_ignore() {
289// This one is skipped. Still must be made live though!
290if !already_live {
291self.storage_live(callee_arg.as_local().unwrap())?;
292 }
293return interp_ok(());
294 }
295// Find next caller arg.
296let Some((caller_arg, caller_abi)) = caller_args.next() else {
297do yeet ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::Ub(::alloc::__export::must_use({
::alloc::fmt::format(format_args!("calling a function with fewer arguments than it requires"))
})));throw_ub_format!("calling a function with fewer arguments than it requires");
298 };
299match (&caller_arg.layout().layout, &caller_abi.layout.layout) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(caller_arg.layout().layout, caller_abi.layout.layout);
300// Sadly we cannot assert that `caller_arg.layout().ty` and `caller_abi.layout.ty` are
301 // equal; in closures the types sometimes differ. We just hope that `caller_abi` is the
302 // right type to print to the user.
303304 // Check compatibility
305if !self.check_argument_compat(caller_abi, callee_abi)? {
306do yeet ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::AbiMismatchArgument {
arg_idx: callee_arg_idx,
caller_ty: caller_abi.layout.ty,
callee_ty: callee_abi.layout.ty,
});throw_ub!(AbiMismatchArgument {
307 arg_idx: callee_arg_idx,
308 caller_ty: caller_abi.layout.ty,
309 callee_ty: callee_abi.layout.ty
310 });
311 }
312// We work with a copy of the argument for now; if this is in-place argument passing, we
313 // will later protect the source it comes from. This means the callee cannot observe if we
314 // did in-place of by-copy argument passing, except for pointer equality tests.
315let caller_arg_copy = caller_arg.copy_fn_arg();
316if !already_live {
317let local = callee_arg.as_local().unwrap();
318let meta = caller_arg_copy.meta();
319// `check_argument_compat` ensures that if metadata is needed, both have the same type,
320 // so we know they will use the metadata the same way.
321if !(!meta.has_meta() || caller_arg_copy.layout.ty == callee_ty) {
::core::panicking::panic("assertion failed: !meta.has_meta() || caller_arg_copy.layout.ty == callee_ty")
};assert!(!meta.has_meta() || caller_arg_copy.layout.ty == callee_ty);
322323self.storage_live_dyn(local, meta)?;
324 }
325// Now we can finally actually evaluate the callee place.
326let mut callee_arg = self.eval_place(*callee_arg)?;
327// drop_in_place has a signature which says that the first argument is `*mut T`
328 // but really it's `&mut T`. This is where we handle that terrible hack in
329 // the MIR semantics.
330 // FIXME(#154274): remove this hack.
331if is_drop_in_place && callee_arg_idx == 0 {
332let pointee_ty = callee_arg.layout.ty.builtin_deref(true).unwrap();
333let mutref_ty = Ty::new_mut_ref(*self.tcx, self.tcx.lifetimes.re_erased, pointee_ty);
334callee_arg = callee_arg.transmute(self.layout_of(mutref_ty)?, self)?;
335 }
336// We allow some transmutes here.
337 // FIXME: Depending on the PassMode, this should reset some padding to uninitialized. (This
338 // is true for all `copy_op`, but there are a lot of special cases for argument passing
339 // specifically.)
340self.copy_op_allow_transmute(&caller_arg_copy, &callee_arg)?;
341// If this was an in-place pass, protect the place it comes from for the duration of the call.
342if let FnArg::InPlace(mplace) = caller_arg {
343 M::protect_in_place_function_argument(self, mplace)?;
344 }
345interp_ok(())
346 }
347348/// The main entry point for creating a new stack frame: performs ABI checks and initializes
349 /// arguments.
350#[allow(clippy :: suspicious_else_formatting)]
{
let __tracing_attr_span;
let __tracing_attr_guard;
if ::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() ||
{ false } {
__tracing_attr_span =
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("init_stack_frame",
"rustc_const_eval::interpret::call",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(350u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["instance", "body",
"caller_fn_abi", "args", "with_caller_location",
"destination", "cont"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::TRACE <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&instance)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&body)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&caller_fn_abi)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&args)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&with_caller_location
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&destination)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&cont)
as &dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
};
__tracing_attr_guard = __tracing_attr_span.enter();
}
#[warn(clippy :: suspicious_else_formatting)]
{
#[allow(unknown_lints, unreachable_code, clippy ::
diverging_sub_expression, clippy :: empty_loop, clippy ::
let_unit_value, clippy :: let_with_type_underscore, clippy ::
needless_return, clippy :: unreachable)]
if false {
let __tracing_attr_fake_return: InterpResult<'tcx> = loop {};
return __tracing_attr_fake_return;
}
{
let _trace =
<M as
crate::interpret::Machine>::enter_trace_span(||
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("step",
"rustc_const_eval::interpret::call", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(361u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["step", "instance",
"tracing_separate_thread"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::INFO <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&display(&"init_stack_frame")
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&display(&instance)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&Empty as
&dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
});
let extra_tys =
if caller_fn_abi.c_variadic {
let fixed_count =
usize::try_from(caller_fn_abi.fixed_count).unwrap();
let extra_tys =
args[fixed_count..].iter().map(|arg| arg.layout().ty);
self.tcx.mk_type_list_from_iter(extra_tys)
} else { ty::List::empty() };
let callee_fn_abi =
self.fn_abi_of_instance_no_deduced_attrs(instance,
extra_tys)?;
if caller_fn_abi.conv != callee_fn_abi.conv {
do yeet ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::Ub(::alloc::__export::must_use({
::alloc::fmt::format(format_args!("calling a function with calling convention \"{0}\" using calling convention \"{1}\"",
callee_fn_abi.conv, caller_fn_abi.conv))
})))
}
if caller_fn_abi.c_variadic != callee_fn_abi.c_variadic {
do yeet ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::CVariadicMismatch {
caller_is_c_variadic: caller_fn_abi.c_variadic,
callee_is_c_variadic: callee_fn_abi.c_variadic,
});
}
if caller_fn_abi.c_variadic &&
caller_fn_abi.fixed_count != callee_fn_abi.fixed_count {
do yeet ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::CVariadicFixedCountMismatch {
caller: caller_fn_abi.fixed_count,
callee: callee_fn_abi.fixed_count,
});
}
M::check_fn_target_features(self, instance)?;
if !callee_fn_abi.can_unwind {
match &mut cont {
ReturnContinuation::Stop { .. } => {}
ReturnContinuation::Goto { unwind, .. } => {
*unwind = mir::UnwindAction::Unreachable;
}
}
}
let destination_mplace =
self.place_to_op(destination)?.as_mplace_or_imm().left();
self.push_stack_frame_raw(instance, body, destination, cont)?;
let preamble_span = self.frame().loc.unwrap_right();
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/call.rs:420",
"rustc_const_eval::interpret::call",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(420u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("caller ABI: {0:#?}, args: {1:#?}",
caller_fn_abi,
args.iter().map(|arg|
(arg.layout().ty,
match arg {
FnArg::Copy(op) =>
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("copy({0:?})", op))
}),
FnArg::InPlace(mplace) =>
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("in-place({0:?})",
mplace))
}),
})).collect::<Vec<_>>()) as &dyn Value))])
});
} else { ; }
};
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/call.rs:433",
"rustc_const_eval::interpret::call",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(433u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("spread_arg: {0:?}, locals: {1:#?}",
body.spread_arg,
body.args_iter().map(|local|
(local,
self.layout_of_local(self.frame(), local,
None).unwrap().ty)).collect::<Vec<_>>()) as &dyn Value))])
});
} else { ; }
};
match (&(args.len() + if with_caller_location { 1 } else { 0 }),
&caller_fn_abi.args.len()) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val,
&*right_val,
::core::option::Option::Some(format_args!("mismatch between caller ABI and caller arguments")));
}
}
};
let mut caller_args =
args.iter().zip(caller_fn_abi.args.iter()).filter(|arg_and_abi|
!arg_and_abi.1.is_ignore());
let mut callee_args_abis = callee_fn_abi.args.iter().enumerate();
let va_list_arg =
callee_fn_abi.c_variadic.then(||
mir::Local::from_usize(body.arg_count));
let is_drop_in_place =
{
let def_id = body.source.def_id();
self.tcx.is_lang_item(def_id, LangItem::DropInPlace) ||
self.tcx.is_lang_item(def_id, LangItem::AsyncDropInPlace)
};
M::with_retag_mode(self, RetagMode::FnEntry,
|ecx|
{
for local in body.args_iter() {
ecx.frame_mut().loc =
Right(body.local_decls[local].source_info.span);
let dest = mir::Place::from(local);
let ty = ecx.layout_of_local(ecx.frame(), local, None)?.ty;
if Some(local) == va_list_arg {
ecx.storage_live(local)?;
let place = ecx.eval_place(dest)?;
let mplace = ecx.force_allocation(&place)?;
let varargs =
ecx.allocate_varargs(&mut caller_args,
(&mut callee_args_abis).filter(|(_, abi)|
!abi.is_ignore()))?;
ecx.frame_mut().va_list = varargs.clone();
let key = ecx.va_list_ptr(varargs.into());
ecx.write_bytes_ptr(mplace.ptr(),
(0..mplace.layout.size.bytes()).map(|_| 0u8))?;
let key_mplace = ecx.va_list_key_field(&mplace)?;
ecx.write_pointer(key, &key_mplace)?;
} else if Some(local) == body.spread_arg {
ecx.storage_live(local)?;
let ty::Tuple(fields) =
ty.kind() else {
::rustc_middle::util::bug::span_bug_fmt(ecx.cur_span(),
format_args!("non-tuple type for `spread_arg`: {0}", ty))
};
for (i, field_ty) in fields.iter().enumerate() {
let dest =
dest.project_deeper(&[mir::ProjectionElem::Field(FieldIdx::from_usize(i),
field_ty)], *ecx.tcx);
let (idx, callee_abi) = callee_args_abis.next().unwrap();
ecx.pass_argument(&mut caller_args, callee_abi, idx, &dest,
field_ty, true, is_drop_in_place)?;
}
} else {
let (idx, callee_abi) = callee_args_abis.next().unwrap();
ecx.pass_argument(&mut caller_args, callee_abi, idx, &dest,
ty, false, is_drop_in_place)?;
}
}
interp_ok(())
})?;
self.frame_mut().loc =
Right(body.local_decls[mir::RETURN_PLACE].source_info.span);
if !self.check_argument_compat(&caller_fn_abi.ret,
&callee_fn_abi.ret)? {
do yeet ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::AbiMismatchReturn {
caller_ty: caller_fn_abi.ret.layout.ty,
callee_ty: callee_fn_abi.ret.layout.ty,
});
}
if let Some(mplace) = destination_mplace {
M::protect_in_place_function_argument(self, &mplace)?;
}
self.frame_mut().loc = Right(preamble_span);
if instance.def.requires_caller_location(*self.tcx) {
callee_args_abis.next().unwrap();
}
if !callee_args_abis.next().is_none() {
{
::core::panicking::panic_fmt(format_args!("mismatch between callee ABI and callee body arguments"));
}
};
if caller_args.next().is_some() {
do yeet ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::Ub(::alloc::__export::must_use({
::alloc::fmt::format(format_args!("calling a function with more arguments than it expected"))
})));
}
self.push_stack_frame_done()
}
}
}#[instrument(skip(self), level = "trace")]351pub fn init_stack_frame(
352&mut self,
353 instance: Instance<'tcx>,
354 body: &'tcx mir::Body<'tcx>,
355 caller_fn_abi: &FnAbi<'tcx, Ty<'tcx>>,
356 args: &[FnArg<'tcx, M::Provenance>],
357 with_caller_location: bool,
358 destination: &PlaceTy<'tcx, M::Provenance>,
359mut cont: ReturnContinuation,
360 ) -> InterpResult<'tcx> {
361let _trace = enter_trace_span!(M, step::init_stack_frame, %instance, tracing_separate_thread = Empty);
362363// The first order of business is to figure out the callee signature.
364 // However, that requires the list of variadic arguments.
365 // We use the *caller* information to determine where to split the list of arguments,
366 // and then later check that the callee indeed has the same number of fixed arguments.
367let extra_tys = if caller_fn_abi.c_variadic {
368let fixed_count = usize::try_from(caller_fn_abi.fixed_count).unwrap();
369let extra_tys = args[fixed_count..].iter().map(|arg| arg.layout().ty);
370self.tcx.mk_type_list_from_iter(extra_tys)
371 } else {
372 ty::List::empty()
373 };
374let callee_fn_abi = self.fn_abi_of_instance_no_deduced_attrs(instance, extra_tys)?;
375376if caller_fn_abi.conv != callee_fn_abi.conv {
377throw_ub_format!(
378"calling a function with calling convention \"{callee_conv}\" using calling convention \"{caller_conv}\"",
379 callee_conv = callee_fn_abi.conv,
380 caller_conv = caller_fn_abi.conv,
381 )
382 }
383384if caller_fn_abi.c_variadic != callee_fn_abi.c_variadic {
385throw_ub!(CVariadicMismatch {
386 caller_is_c_variadic: caller_fn_abi.c_variadic,
387 callee_is_c_variadic: callee_fn_abi.c_variadic,
388 });
389 }
390if caller_fn_abi.c_variadic && caller_fn_abi.fixed_count != callee_fn_abi.fixed_count {
391throw_ub!(CVariadicFixedCountMismatch {
392 caller: caller_fn_abi.fixed_count,
393 callee: callee_fn_abi.fixed_count,
394 });
395 }
396397// Check that all target features required by the callee (i.e., from
398 // the attribute `#[target_feature(enable = ...)]`) are enabled at
399 // compile time.
400M::check_fn_target_features(self, instance)?;
401402if !callee_fn_abi.can_unwind {
403// The callee cannot unwind, so force the `Unreachable` unwind handling.
404match &mut cont {
405 ReturnContinuation::Stop { .. } => {}
406 ReturnContinuation::Goto { unwind, .. } => {
407*unwind = mir::UnwindAction::Unreachable;
408 }
409 }
410 }
411412// *Before* pushing the new frame, determine whether the return destination is in memory.
413 // Need to use `place_to_op` to be *sure* we get the mplace if there is one.
414let destination_mplace = self.place_to_op(destination)?.as_mplace_or_imm().left();
415416// Push the "raw" frame -- this leaves locals uninitialized.
417self.push_stack_frame_raw(instance, body, destination, cont)?;
418let preamble_span = self.frame().loc.unwrap_right(); // the span used for preamble errors
419420trace!(
421"caller ABI: {:#?}, args: {:#?}",
422 caller_fn_abi,
423 args.iter()
424 .map(|arg| (
425 arg.layout().ty,
426match arg {
427 FnArg::Copy(op) => format!("copy({op:?})"),
428 FnArg::InPlace(mplace) => format!("in-place({mplace:?})"),
429 }
430 ))
431 .collect::<Vec<_>>()
432 );
433trace!(
434"spread_arg: {:?}, locals: {:#?}",
435 body.spread_arg,
436 body.args_iter()
437 .map(|local| (local, self.layout_of_local(self.frame(), local, None).unwrap().ty,))
438 .collect::<Vec<_>>()
439 );
440441// In principle, we have two iterators: Where the arguments come from, and where
442 // they go to.
443444 // The "where they come from" part is easy, we expect the caller to do any special handling
445 // that might be required here (e.g. for untupling).
446 // If `with_caller_location` is set we pretend there is an extra argument (that
447 // we will not pass; our `caller_location` intrinsic implementation walks the stack instead).
448assert_eq!(
449 args.len() + if with_caller_location { 1 } else { 0 },
450 caller_fn_abi.args.len(),
451"mismatch between caller ABI and caller arguments",
452 );
453let mut caller_args = args
454 .iter()
455 .zip(caller_fn_abi.args.iter())
456 .filter(|arg_and_abi| !arg_and_abi.1.is_ignore());
457458// Now we have to spread them out across the callee's locals,
459 // taking into account the `spread_arg`. If we could write
460 // this is a single iterator (that handles `spread_arg`), then
461 // `pass_argument` would be the loop body. It takes care to
462 // not advance `caller_iter` for ignored arguments.
463let mut callee_args_abis = callee_fn_abi.args.iter().enumerate();
464// Determine whether there is a special VaList argument. This is always the
465 // last argument, and since arguments start at index 1 that's `arg_count`.
466let va_list_arg = callee_fn_abi.c_variadic.then(|| mir::Local::from_usize(body.arg_count));
467// Part of the hack for #154274, see `pass_argument`.
468let is_drop_in_place = {
469let def_id = body.source.def_id();
470self.tcx.is_lang_item(def_id, LangItem::DropInPlace)
471 || self.tcx.is_lang_item(def_id, LangItem::AsyncDropInPlace)
472 };
473474// During argument passing, we want retagging with protectors.
475M::with_retag_mode(self, RetagMode::FnEntry, |ecx| {
476for local in body.args_iter() {
477// Update the span that we show in case of an error to point to this argument.
478ecx.frame_mut().loc = Right(body.local_decls[local].source_info.span);
479// Construct the destination place for this argument. At this point all
480 // locals are still dead, so we cannot construct a `PlaceTy`.
481let dest = mir::Place::from(local);
482// `layout_of_local` does more than just the instantiation we need to get the
483 // type, but the result gets cached so this avoids calling the instantiation
484 // query *again* the next time this local is accessed.
485let ty = ecx.layout_of_local(ecx.frame(), local, None)?.ty;
486if Some(local) == va_list_arg {
487// This is the last callee-side argument of a variadic function.
488 // This argument is a VaList holding the remaining caller-side arguments.
489ecx.storage_live(local)?;
490491let place = ecx.eval_place(dest)?;
492let mplace = ecx.force_allocation(&place)?;
493494// Consume the remaining arguments by putting them into the variable argument
495 // list.
496let varargs = ecx.allocate_varargs(
497&mut caller_args,
498// "Ignored" arguments aren't actually passed, so the callee should also
499 // ignore them. (`pass_argument` does this for regular arguments.)
500(&mut callee_args_abis).filter(|(_, abi)| !abi.is_ignore()),
501 )?;
502// When the frame is dropped, these variable arguments are deallocated.
503ecx.frame_mut().va_list = varargs.clone();
504let key = ecx.va_list_ptr(varargs.into());
505506// Zero the VaList, so it is fully initialized.
507ecx.write_bytes_ptr(
508 mplace.ptr(),
509 (0..mplace.layout.size.bytes()).map(|_| 0u8),
510 )?;
511512// Store the "key" pointer in the right field.
513let key_mplace = ecx.va_list_key_field(&mplace)?;
514 ecx.write_pointer(key, &key_mplace)?;
515 } else if Some(local) == body.spread_arg {
516// Make the local live once, then fill in the value field by field.
517ecx.storage_live(local)?;
518// Must be a tuple
519let ty::Tuple(fields) = ty.kind() else {
520span_bug!(ecx.cur_span(), "non-tuple type for `spread_arg`: {ty}")
521 };
522for (i, field_ty) in fields.iter().enumerate() {
523let dest = dest.project_deeper(
524&[mir::ProjectionElem::Field(FieldIdx::from_usize(i), field_ty)],
525*ecx.tcx,
526 );
527let (idx, callee_abi) = callee_args_abis.next().unwrap();
528 ecx.pass_argument(
529&mut caller_args,
530 callee_abi,
531 idx,
532&dest,
533 field_ty,
534/* already_live */ true,
535 is_drop_in_place,
536 )?;
537 }
538 } else {
539// Normal argument. Cannot mark it as live yet, it might be unsized!
540let (idx, callee_abi) = callee_args_abis.next().unwrap();
541 ecx.pass_argument(
542&mut caller_args,
543 callee_abi,
544 idx,
545&dest,
546 ty,
547/* already_live */ false,
548 is_drop_in_place,
549 )?;
550 }
551 }
552 interp_ok(())
553 })?;
554555// Don't forget to check the return type!
556self.frame_mut().loc = Right(body.local_decls[mir::RETURN_PLACE].source_info.span);
557if !self.check_argument_compat(&caller_fn_abi.ret, &callee_fn_abi.ret)? {
558throw_ub!(AbiMismatchReturn {
559 caller_ty: caller_fn_abi.ret.layout.ty,
560 callee_ty: callee_fn_abi.ret.layout.ty
561 });
562 }
563// Protect return place for in-place return value passing.
564 // We only need to protect anything if this is actually an in-memory place.
565if let Some(mplace) = destination_mplace {
566 M::protect_in_place_function_argument(self, &mplace)?;
567 }
568569// For the final checks, use same span as preamble since it is unclear what else to do.
570self.frame_mut().loc = Right(preamble_span);
571// If the callee needs a caller location, pretend we consume one more argument from the ABI.
572if instance.def.requires_caller_location(*self.tcx) {
573 callee_args_abis.next().unwrap();
574 }
575// Now we should have no more caller args or callee arg ABIs.
576assert!(
577 callee_args_abis.next().is_none(),
578"mismatch between callee ABI and callee body arguments"
579);
580if caller_args.next().is_some() {
581throw_ub_format!("calling a function with more arguments than it expected");
582 }
583584// Done!
585self.push_stack_frame_done()
586 }
587588/// Initiate a call to this function -- pushing the stack frame and initializing the arguments.
589 ///
590 /// `caller_fn_abi` is used to determine if all the arguments are passed the proper way.
591 /// However, we also need `caller_abi` to determine if we need to do untupling of arguments.
592 ///
593 /// `with_caller_location` indicates whether the caller passed a caller location. Miri
594 /// implements caller locations without argument passing, but to match `FnAbi` we need to know
595 /// when those arguments are present.
596pub(super) fn init_fn_call(
597&mut self,
598 fn_val: FnVal<'tcx, M::ExtraFnVal>,
599 (caller_abi, caller_fn_abi): (ExternAbi, &FnAbi<'tcx, Ty<'tcx>>),
600 args: &[FnArg<'tcx, M::Provenance>],
601 with_caller_location: bool,
602 destination: &PlaceTy<'tcx, M::Provenance>,
603 target: Option<mir::BasicBlock>,
604 unwind: mir::UnwindAction,
605 ) -> InterpResult<'tcx> {
606let _trace =
607<M as
crate::interpret::Machine>::enter_trace_span(||
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("step",
"rustc_const_eval::interpret::call", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(607u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["step",
"tracing_separate_thread", "fn_val"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::INFO <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&display(&"init_fn_call")
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&Empty as
&dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&debug(&fn_val) as
&dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
})enter_trace_span!(M, step::init_fn_call, tracing_separate_thread = Empty, ?fn_val)608 .or_if_tracing_disabled(|| {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/call.rs:608",
"rustc_const_eval::interpret::call",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(608u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("init_fn_call: {0:#?}",
fn_val) as &dyn Value))])
});
} else { ; }
}trace!("init_fn_call: {:#?}", fn_val));
609610let instance = match fn_val {
611 FnVal::Instance(instance) => instance,
612 FnVal::Other(extra) => {
613return M::call_extra_fn(
614self,
615extra,
616caller_fn_abi,
617args,
618destination,
619target,
620unwind,
621 );
622 }
623 };
624625match instance.def {
626 ty::InstanceKind::Intrinsic(def_id) => {
627if !self.tcx.intrinsic(def_id).is_some() {
::core::panicking::panic("assertion failed: self.tcx.intrinsic(def_id).is_some()")
};assert!(self.tcx.intrinsic(def_id).is_some());
628// FIXME: Should `InPlace` arguments be reset to uninit?
629if let Some(fallback) = M::call_intrinsic(
630self,
631instance,
632&Self::copy_fn_args(args),
633destination,
634target,
635unwind,
636 )? {
637if !!self.tcx.intrinsic(fallback.def_id()).unwrap().must_be_overridden {
::core::panicking::panic("assertion failed: !self.tcx.intrinsic(fallback.def_id()).unwrap().must_be_overridden")
};assert!(!self.tcx.intrinsic(fallback.def_id()).unwrap().must_be_overridden);
638{
match fallback.def {
ty::InstanceKind::Item(_) => {}
ref left_val => {
::core::panicking::assert_matches_failed(left_val,
"ty::InstanceKind::Item(_)", ::core::option::Option::None);
}
}
};assert_matches!(fallback.def, ty::InstanceKind::Item(_));
639return self.init_fn_call(
640 FnVal::Instance(fallback),
641 (caller_abi, caller_fn_abi),
642args,
643with_caller_location,
644destination,
645target,
646unwind,
647 );
648 } else {
649interp_ok(())
650 }
651 }
652 ty::InstanceKind::VTableShim(..)
653 | ty::InstanceKind::ReifyShim(..)
654 | ty::InstanceKind::ClosureOnceShim { .. }
655 | ty::InstanceKind::ConstructCoroutineInClosureShim { .. }
656 | ty::InstanceKind::FnPtrShim(..)
657 | ty::InstanceKind::DropGlue(..)
658 | ty::InstanceKind::CloneShim(..)
659 | ty::InstanceKind::FnPtrAddrShim(..)
660 | ty::InstanceKind::ThreadLocalShim(..)
661 | ty::InstanceKind::AsyncDropGlueCtorShim(..)
662 | ty::InstanceKind::AsyncDropGlue(..)
663 | ty::InstanceKind::FutureDropPollShim(..)
664 | ty::InstanceKind::Item(_) => {
665// We need MIR for this fn.
666 // Note that this can be an intrinsic, if we are executing its fallback body.
667let Some((body, instance)) = M::find_mir_or_eval_fn(
668self,
669instance,
670caller_fn_abi,
671args,
672destination,
673target,
674unwind,
675 )?
676else {
677return interp_ok(());
678 };
679680// Special handling for the closure ABI: untuple the last argument.
681let args: Cow<'_, [FnArg<'tcx, M::Provenance>]> =
682if caller_abi == ExternAbi::RustCall && !args.is_empty() {
683// Untuple
684let (untuple_arg, args) = args.split_last().unwrap();
685let ty::Tuple(untuple_fields) = untuple_arg.layout().ty.kind() else {
686::rustc_middle::util::bug::span_bug_fmt(self.cur_span(),
format_args!("untuple argument must be a tuple"))span_bug!(self.cur_span(), "untuple argument must be a tuple")687 };
688{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/call.rs:688",
"rustc_const_eval::interpret::call",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(688u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("init_fn_call: Will pass last argument by untupling")
as &dyn Value))])
});
} else { ; }
};trace!("init_fn_call: Will pass last argument by untupling");
689Cow::from(
690args.iter()
691// The regular arguments.
692.map(|a| interp_ok(a.clone()))
693// The fields of the untupled argument.
694.chain((0..untuple_fields.len()).map(|i| {
695self.fn_arg_project_field(untuple_arg, FieldIdx::from_usize(i))
696 }))
697 .collect::<InterpResult<'_, Vec<_>>>()?,
698 )
699 } else {
700// Plain arg passing
701Cow::from(args)
702 };
703704self.init_stack_frame(
705instance,
706body,
707caller_fn_abi,
708&args,
709with_caller_location,
710destination,
711 ReturnContinuation::Goto { ret: target, unwind },
712 )
713 }
714// `InstanceKind::Virtual` does not have callable MIR. Calls to `Virtual` instances must be
715 // codegen'd / interpreted as virtual calls through the vtable.
716ty::InstanceKind::Virtual(def_id, idx) => {
717let mut args = args.to_vec();
718// We have to implement all "dyn-compatible receivers". So we have to go search for a
719 // pointer or `dyn Trait` type, but it could be wrapped in newtypes. So recursively
720 // unwrap those newtypes until we are there.
721 // An `InPlace` does nothing here, we keep the original receiver intact. We can't
722 // really pass the argument in-place anyway, and we are constructing a new
723 // `Immediate` receiver.
724let mut receiver = args[0].copy_fn_arg();
725let receiver_place = loop {
726match receiver.layout.ty.kind() {
727 ty::Ref(..) | ty::RawPtr(..) => {
728// We do *not* use `deref_pointer` here: we don't want to conceptually
729 // create a place that must be dereferenceable, since the receiver might
730 // be a raw pointer and (for `*const dyn Trait`) we don't need to
731 // actually access memory to resolve this method.
732 // Also see <https://github.com/rust-lang/miri/issues/2786>.
733let val = self.read_immediate(&receiver)?;
734break self.imm_ptr_to_mplace(&val)?;
735 }
736 ty::Dynamic(..) => break receiver.assert_mem_place(), // no immediate unsized values
737_ => {
738// Not there yet, search for the only non-ZST field.
739 // (The rules for `DispatchFromDyn` ensure there's exactly one such field.)
740let (idx, _) = receiver.layout.non_1zst_field(self).expect(
741"not exactly one non-1-ZST field in a `DispatchFromDyn` type",
742 );
743receiver = self.project_field(&receiver, idx)?;
744 }
745 }
746 };
747748// Obtain the underlying trait we are working on, and the adjusted receiver argument.
749 // Doesn't have to be a `dyn Trait`, but the unsized tail must be `dyn Trait`.
750 // (For that reason we also cannot use `unpack_dyn_trait`.)
751let receiver_tail =
752self.tcx.struct_tail_for_codegen(receiver_place.layout.ty, self.typing_env);
753let ty::Dynamic(receiver_trait, _) = receiver_tail.kind() else {
754::rustc_middle::util::bug::span_bug_fmt(self.cur_span(),
format_args!("dynamic call on non-`dyn` type {0}", receiver_tail))span_bug!(self.cur_span(), "dynamic call on non-`dyn` type {}", receiver_tail)755 };
756if !receiver_place.layout.is_unsized() {
::core::panicking::panic("assertion failed: receiver_place.layout.is_unsized()")
};assert!(receiver_place.layout.is_unsized());
757758// Get the required information from the vtable.
759let vptr = receiver_place.meta().unwrap_meta().to_pointer(self)?;
760let dyn_ty = self.get_ptr_vtable_ty(vptr, Some(receiver_trait))?;
761let adjusted_recv = receiver_place.ptr();
762763// Now determine the actual method to call. Usually we use the easy way of just
764 // looking up the method at index `idx`.
765let vtable_entries = self.vtable_entries(receiver_trait.principal(), dyn_ty);
766let Some(ty::VtblEntry::Method(fn_inst)) = vtable_entries.get(idx).copied() else {
767// FIXME(fee1-dead) these could be variants of the UB info enum instead of this
768do yeet ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::Ub(::alloc::__export::must_use({
::alloc::fmt::format(format_args!("`dyn` call trying to call something that is not a method"))
})));throw_ub_format!("`dyn` call trying to call something that is not a method");
769 };
770{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/call.rs:770",
"rustc_const_eval::interpret::call",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(770u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("Virtual call dispatches to {0:#?}",
fn_inst) as &dyn Value))])
});
} else { ; }
};trace!("Virtual call dispatches to {fn_inst:#?}");
771// We can also do the lookup based on `def_id` and `dyn_ty`, and check that that
772 // produces the same result.
773self.assert_virtual_instance_matches_concrete(dyn_ty, def_id, instance, fn_inst);
774775// Adjust receiver argument. Layout can be any (thin) ptr.
776let receiver_ty = Ty::new_mut_ptr(self.tcx.tcx, dyn_ty);
777args[0] = FnArg::Copy(
778ImmTy::from_immediate(
779Scalar::from_maybe_pointer(adjusted_recv, self).into(),
780self.layout_of(receiver_ty)?,
781 )
782 .into(),
783 );
784{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/call.rs:784",
"rustc_const_eval::interpret::call",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(784u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("Patched receiver operand to {0:#?}",
args[0]) as &dyn Value))])
});
} else { ; }
};trace!("Patched receiver operand to {:#?}", args[0]);
785// Need to also adjust the type in the ABI. Strangely, the layout there is actually
786 // already fine! Just the type is bogus. This is due to what `force_thin_self_ptr`
787 // does in `fn_abi_new_uncached`; supposedly, codegen relies on having the bogus
788 // type, so we just patch this up locally.
789let mut caller_fn_abi = caller_fn_abi.clone();
790caller_fn_abi.args[0].layout.ty = receiver_ty;
791792// recurse with concrete function
793self.init_fn_call(
794 FnVal::Instance(fn_inst),
795 (caller_abi, &caller_fn_abi),
796&args,
797with_caller_location,
798destination,
799target,
800unwind,
801 )
802 }
803 }
804 }
805806fn assert_virtual_instance_matches_concrete(
807&self,
808 dyn_ty: Ty<'tcx>,
809 def_id: DefId,
810 virtual_instance: ty::Instance<'tcx>,
811 concrete_instance: ty::Instance<'tcx>,
812 ) {
813let tcx = *self.tcx;
814815let trait_def_id = tcx.parent(def_id);
816let virtual_trait_ref = ty::TraitRef::from_assoc(tcx, trait_def_id, virtual_instance.args);
817let existential_trait_ref = ty::ExistentialTraitRef::erase_self_ty(tcx, virtual_trait_ref);
818let concrete_trait_ref = existential_trait_ref.with_self_ty(tcx, dyn_ty);
819820let concrete_method = {
821let _trace = <M as
crate::interpret::Machine>::enter_trace_span(||
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("resolve",
"rustc_const_eval::interpret::call", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(821u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["resolve", "def_id"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::INFO <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&display(&"expect_resolve_for_vtable")
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&debug(&def_id) as
&dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
})enter_trace_span!(M, resolve::expect_resolve_for_vtable, ?def_id);
822Instance::expect_resolve_for_vtable(
823tcx,
824self.typing_env,
825def_id,
826virtual_instance.args.rebase_onto(tcx, trait_def_id, concrete_trait_ref.args),
827self.cur_span(),
828 )
829 };
830match (&concrete_instance, &concrete_method) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(concrete_instance, concrete_method);
831 }
832833/// Initiate a tail call to this function -- popping the current stack frame, pushing the new
834 /// stack frame and initializing the arguments.
835pub(super) fn init_fn_tail_call(
836&mut self,
837 fn_val: FnVal<'tcx, M::ExtraFnVal>,
838 (caller_abi, caller_fn_abi): (ExternAbi, &FnAbi<'tcx, Ty<'tcx>>),
839 args: &[FnArg<'tcx, M::Provenance>],
840 with_caller_location: bool,
841 ) -> InterpResult<'tcx> {
842{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/call.rs:842",
"rustc_const_eval::interpret::call",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(842u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("init_fn_tail_call: {0:#?}",
fn_val) as &dyn Value))])
});
} else { ; }
};trace!("init_fn_tail_call: {:#?}", fn_val);
843// This is the "canonical" implementation of tails calls,
844 // a pop of the current stack frame, followed by a normal call
845 // which pushes a new stack frame, with the return address from
846 // the popped stack frame.
847 //
848 // Note that we cannot use `return_from_current_stack_frame`,
849 // as that "executes" the goto to the return block, but we don't want to,
850 // only the tail called function should return to the current return block.
851852 // The arguments need to all be copied since the current stack frame will be removed
853 // before the callee even starts executing.
854 // FIXME(explicit_tail_calls,#144855): does this match what codegen does?
855let args = args.iter().map(|fn_arg| FnArg::Copy(fn_arg.copy_fn_arg())).collect::<Vec<_>>();
856// Remove the frame from the stack.
857let frame = self.pop_stack_frame_raw()?;
858// Remember where this frame would have returned to.
859let ReturnContinuation::Goto { ret, unwind } = frame.return_cont() else {
860::rustc_middle::util::bug::bug_fmt(format_args!("can\'t tailcall as root of the stack"));bug!("can't tailcall as root of the stack");
861 };
862// There's no return value to deal with! Instead, we forward the old return place
863 // to the new function.
864 // FIXME(explicit_tail_calls):
865 // we should check if both caller&callee can/n't unwind,
866 // see <https://github.com/rust-lang/rust/pull/113128#issuecomment-1614979803>
867868 // Now push the new stack frame.
869self.init_fn_call(
870fn_val,
871 (caller_abi, caller_fn_abi),
872&*args,
873with_caller_location,
874frame.return_place(),
875ret,
876unwind,
877 )?;
878879// Finally, clear the local variables. Has to be done after pushing to support
880 // non-scalar arguments.
881 // FIXME(explicit_tail_calls,#144855): revisit this once codegen supports indirect
882 // arguments, to ensure the semantics are compatible.
883let return_action = self.cleanup_stack_frame(/* unwinding */ false, frame)?;
884match (&return_action, &ReturnAction::Normal) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(return_action, ReturnAction::Normal);
885886interp_ok(())
887 }
888889pub(super) fn init_drop_in_place_call(
890&mut self,
891 place: &PlaceTy<'tcx, M::Provenance>,
892 instance: ty::Instance<'tcx>,
893 target: mir::BasicBlock,
894 unwind: mir::UnwindAction,
895 ) -> InterpResult<'tcx> {
896{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/call.rs:896",
"rustc_const_eval::interpret::call",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(896u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("init_drop_in_place_call: {0:?},\n instance={1:?}",
place, instance) as &dyn Value))])
});
} else { ; }
};trace!("init_drop_in_place_call: {:?},\n instance={:?}", place, instance);
897// We take the address of the object. This may well be unaligned, which is fine
898 // for us here. However, unaligned accesses will probably make the actual drop
899 // implementation fail -- a problem shared by rustc.
900let place = self.force_allocation(place)?;
901902// We behave a bit different from codegen here.
903 // Codegen creates an `InstanceKind::Virtual` with index 0 (the slot of the drop method) and
904 // then dispatches that to the normal call machinery. However, our call machinery currently
905 // only supports calling `VtblEntry::Method`; it would choke on a `MetadataDropInPlace`. So
906 // instead we do the virtual call stuff ourselves. It's easier here than in `eval_fn_call`
907 // since we can just get a place of the underlying type and use `mplace_to_imm_ptr`.
908let place = match place.layout.ty.kind() {
909 ty::Dynamic(data, _) => {
910// Dropping a trait object. Need to find actual drop fn.
911self.unpack_dyn_trait(&place, data)?
912}
913_ => {
914if true {
match (&instance,
&ty::Instance::resolve_drop_in_place(*self.tcx, place.layout.ty))
{
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val,
&*right_val, ::core::option::Option::None);
}
}
};
};debug_assert_eq!(
915 instance,
916 ty::Instance::resolve_drop_in_place(*self.tcx, place.layout.ty)
917 );
918place919 }
920 };
921let instance = {
922let _trace =
923<M as
crate::interpret::Machine>::enter_trace_span(||
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("resolve",
"rustc_const_eval::interpret::call", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(923u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["resolve", "ty"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::INFO <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&display(&"resolve_drop_in_place")
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&debug(&place.layout.ty)
as &dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
})enter_trace_span!(M, resolve::resolve_drop_in_place, ty = ?place.layout.ty);
924 ty::Instance::resolve_drop_in_place(*self.tcx, place.layout.ty)
925 };
926let fn_abi = self.fn_abi_of_instance_no_deduced_attrs(instance, ty::List::empty())?;
927928let arg = self.mplace_to_imm_ptr(&place, None)?;
929let ret = MPlaceTy::fake_alloc_zst(self.layout_of(self.tcx.types.unit)?);
930931self.init_fn_call(
932 FnVal::Instance(instance),
933 (ExternAbi::Rust, fn_abi),
934&[FnArg::Copy(arg.into())],
935false,
936&ret.into(),
937Some(target),
938unwind,
939 )
940 }
941942/// Pops the current frame from the stack, copies the return value to the caller, deallocates
943 /// the memory for allocated locals, and jumps to an appropriate place.
944 ///
945 /// If `unwinding` is `false`, then we are performing a normal return
946 /// from a function. In this case, we jump back into the frame of the caller,
947 /// and continue execution as normal.
948 ///
949 /// If `unwinding` is `true`, then we are in the middle of a panic,
950 /// and need to unwind this frame. In this case, we jump to the
951 /// `cleanup` block for the function, which is responsible for running
952 /// `Drop` impls for any locals that have been initialized at this point.
953 /// The cleanup block ends with a special `Resume` terminator, which will
954 /// cause us to continue unwinding.
955#[allow(clippy :: suspicious_else_formatting)]
{
let __tracing_attr_span;
let __tracing_attr_guard;
if ::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() ||
{ false } {
__tracing_attr_span =
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("return_from_current_stack_frame",
"rustc_const_eval::interpret::call",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(955u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["unwinding"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::TRACE <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&unwinding as
&dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
};
__tracing_attr_guard = __tracing_attr_span.enter();
}
#[warn(clippy :: suspicious_else_formatting)]
{
#[allow(unknown_lints, unreachable_code, clippy ::
diverging_sub_expression, clippy :: empty_loop, clippy ::
let_unit_value, clippy :: let_with_type_underscore, clippy ::
needless_return, clippy :: unreachable)]
if false {
let __tracing_attr_fake_return: InterpResult<'tcx> = loop {};
return __tracing_attr_fake_return;
}
{
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/call.rs:960",
"rustc_const_eval::interpret::call", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(960u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::INFO <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("popping stack frame ({0})",
if unwinding {
"during unwinding"
} else { "returning from function" }) as &dyn Value))])
});
} else { ; }
};
match (&unwinding,
&match self.frame().loc {
Left(loc) => self.body().basic_blocks[loc.block].is_cleanup,
Right(_) => true,
}) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val,
&*right_val, ::core::option::Option::None);
}
}
};
if unwinding && self.frame_idx() == 0 {
do yeet ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::Ub(::alloc::__export::must_use({
::alloc::fmt::format(format_args!("unwinding past the topmost frame of the stack"))
})));
}
let return_op =
self.local_to_op(mir::RETURN_PLACE,
None).expect("return place should always be live");
let frame = self.pop_stack_frame_raw()?;
if !unwinding {
self.copy_op_allow_transmute(&return_op,
frame.return_place())?;
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/call.rs:986",
"rustc_const_eval::interpret::call",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(986u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("return value: {0:?}",
self.dump_place(frame.return_place())) as &dyn Value))])
});
} else { ; }
};
}
let return_cont = frame.return_cont();
let return_action = self.cleanup_stack_frame(unwinding, frame)?;
match return_action {
ReturnAction::Normal => {}
ReturnAction::NoJump => { return interp_ok(()); }
ReturnAction::NoCleanup => {
if !self.stack().is_empty() {
{
::core::panicking::panic_fmt(format_args!("only the topmost frame should ever be leaked"));
}
};
if !!unwinding {
{
::core::panicking::panic_fmt(format_args!("tried to skip cleanup during unwinding"));
}
};
return interp_ok(());
}
}
if unwinding {
match return_cont {
ReturnContinuation::Goto { unwind, .. } => {
self.unwind_to_block(unwind)
}
ReturnContinuation::Stop { .. } => {
{
::core::panicking::panic_fmt(format_args!("encountered ReturnContinuation::Stop when unwinding!"));
}
}
}
} else {
match return_cont {
ReturnContinuation::Goto { ret, .. } =>
self.return_to_block(ret),
ReturnContinuation::Stop { .. } => {
if !self.stack().is_empty() {
{
::core::panicking::panic_fmt(format_args!("only the bottommost frame can have ReturnContinuation::Stop"));
}
};
interp_ok(())
}
}
}
}
}
}#[instrument(skip(self), level = "trace")]956pub(super) fn return_from_current_stack_frame(
957&mut self,
958 unwinding: bool,
959 ) -> InterpResult<'tcx> {
960info!(
961"popping stack frame ({})",
962if unwinding { "during unwinding" } else { "returning from function" }
963 );
964965// Check `unwinding`.
966assert_eq!(
967 unwinding,
968match self.frame().loc {
969 Left(loc) => self.body().basic_blocks[loc.block].is_cleanup,
970 Right(_) => true,
971 }
972 );
973if unwinding && self.frame_idx() == 0 {
974throw_ub_format!("unwinding past the topmost frame of the stack");
975 }
976977// Get out the return value. Must happen *before* the frame is popped as we have to get the
978 // local's value out.
979let return_op =
980self.local_to_op(mir::RETURN_PLACE, None).expect("return place should always be live");
981// Remove the frame from the stack.
982let frame = self.pop_stack_frame_raw()?;
983// Copy the return value and remember the return continuation.
984if !unwinding {
985self.copy_op_allow_transmute(&return_op, frame.return_place())?;
986trace!("return value: {:?}", self.dump_place(frame.return_place()));
987 }
988let return_cont = frame.return_cont();
989// Finish popping the stack frame.
990let return_action = self.cleanup_stack_frame(unwinding, frame)?;
991// Jump to the next block.
992match return_action {
993 ReturnAction::Normal => {}
994 ReturnAction::NoJump => {
995// The hook already did everything.
996return interp_ok(());
997 }
998 ReturnAction::NoCleanup => {
999// If we are not doing cleanup, also skip everything else.
1000assert!(self.stack().is_empty(), "only the topmost frame should ever be leaked");
1001assert!(!unwinding, "tried to skip cleanup during unwinding");
1002// Don't jump anywhere.
1003return interp_ok(());
1004 }
1005 }
10061007// Normal return, figure out where to jump.
1008if unwinding {
1009// Follow the unwind edge.
1010match return_cont {
1011 ReturnContinuation::Goto { unwind, .. } => {
1012// This must be the very last thing that happens, since it can in fact push a new stack frame.
1013self.unwind_to_block(unwind)
1014 }
1015 ReturnContinuation::Stop { .. } => {
1016panic!("encountered ReturnContinuation::Stop when unwinding!")
1017 }
1018 }
1019 } else {
1020// Follow the normal return edge.
1021match return_cont {
1022 ReturnContinuation::Goto { ret, .. } => self.return_to_block(ret),
1023 ReturnContinuation::Stop { .. } => {
1024assert!(
1025self.stack().is_empty(),
1026"only the bottommost frame can have ReturnContinuation::Stop"
1027);
1028 interp_ok(())
1029 }
1030 }
1031 }
1032 }
1033}