1//! This module contains the `InterpCx` methods for executing a single step of the interpreter.
2//!
3//! The main entry point is the `step` method.
45use std::iter;
67use either::Either;
8use rustc_abi::{FIRST_VARIANT, FieldIdx};
9use rustc_data_structures::fx::FxHashSet;
10use rustc_index::IndexSlice;
11use rustc_middle::ty::{self, Instance, Ty};
12use rustc_middle::{bug, mir, span_bug};
13use rustc_span::Spanned;
14use rustc_target::callconv::FnAbi;
15use tracing::field::Empty;
16use tracing::{info, instrument, trace};
1718use super::{
19EnteredTraceSpan, FnArg, FnVal, ImmTy, Immediate, InterpCx, InterpResult, Machine,
20MemPlaceMeta, PlaceTy, Projectable, RetagMode, interp_ok, throw_ub, throw_unsup_format,
21};
22use crate::{enter_trace_span, util};
2324struct EvaluatedCalleeAndArgs<'tcx, M: Machine<'tcx>> {
25 callee: FnVal<'tcx, M::ExtraFnVal>,
26 args: Vec<FnArg<'tcx, M::Provenance>>,
27 fn_sig: ty::FnSig<'tcx>,
28 fn_abi: &'tcx FnAbi<'tcx, Ty<'tcx>>,
29/// True if the function is marked as `#[track_caller]` ([`ty::InstanceKind::requires_caller_location`])
30with_caller_location: bool,
31}
3233impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
34/// Returns `true` as long as there are more things to do.
35 ///
36 /// This is used by [priroda](https://github.com/oli-obk/priroda)
37 ///
38 /// This is marked `#inline(always)` to work around adversarial codegen when `opt-level = 3`
39#[inline(always)]
40pub fn step(&mut self) -> InterpResult<'tcx, bool> {
41if self.stack().is_empty() {
42return interp_ok(false);
43 }
4445let Either::Left(loc) = self.frame().loc else {
46// We are unwinding and this fn has no cleanup code.
47 // Just go on unwinding.
48{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/step.rs:48",
"rustc_const_eval::interpret::step",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/step.rs"),
::tracing_core::__macro_support::Option::Some(48u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::step"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("unwinding: skipping frame")
as &dyn Value))])
});
} else { ; }
};trace!("unwinding: skipping frame");
49self.return_from_current_stack_frame(/* unwinding */ true)?;
50return interp_ok(true);
51 };
52let basic_block = &self.body().basic_blocks[loc.block];
5354if let Some(stmt) = basic_block.statements.get(loc.statement_index) {
55let old_frames = self.frame_idx();
56self.eval_statement(stmt)?;
57// Make sure we are not updating `statement_index` of the wrong frame.
58match (&old_frames, &self.frame_idx()) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(old_frames, self.frame_idx());
59// Advance the program counter.
60self.frame_mut().loc.as_mut().left().unwrap().statement_index += 1;
61return interp_ok(true);
62 }
6364 M::before_terminator(self)?;
6566let terminator = basic_block.terminator();
67self.eval_terminator(terminator)?;
68if !self.stack().is_empty() {
69if let Either::Left(loc) = self.frame().loc {
70{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/step.rs:70",
"rustc_const_eval::interpret::step", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/step.rs"),
::tracing_core::__macro_support::Option::Some(70u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::step"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::INFO <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("// executing {0:?}",
loc.block) as &dyn Value))])
});
} else { ; }
};info!("// executing {:?}", loc.block);
71 }
72 }
73interp_ok(true)
74 }
7576/// Runs the interpretation logic for the given `mir::Statement` at the current frame and
77 /// statement counter.
78 ///
79 /// This does NOT move the statement counter forward, the caller has to do that!
80pub fn eval_statement(&mut self, stmt: &mir::Statement<'tcx>) -> InterpResult<'tcx> {
81let _trace = <M as
crate::interpret::Machine>::enter_trace_span(||
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("step",
"rustc_const_eval::interpret::step", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/step.rs"),
::tracing_core::__macro_support::Option::Some(81u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::step"),
::tracing_core::field::FieldSet::new(&["step", "stmt",
"span", "tracing_separate_thread"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::INFO <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&display(&"eval_statement")
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&debug(&stmt.kind)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&debug(&stmt.source_info.span)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&Empty as
&dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
})enter_trace_span!(
82 M,
83 step::eval_statement,
84 stmt = ?stmt.kind,
85 span = ?stmt.source_info.span,
86 tracing_separate_thread = Empty,
87 )88 .or_if_tracing_disabled(|| {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/step.rs:88",
"rustc_const_eval::interpret::step", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/step.rs"),
::tracing_core::__macro_support::Option::Some(88u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::step"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::INFO <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("{0:?}",
stmt.kind) as &dyn Value))])
});
} else { ; }
}info!("{:?}", stmt.kind));
8990use rustc_middle::mir::StatementKind::*;
9192match &stmt.kind {
93Assign(box (place, rvalue)) => self.eval_rvalue_into_place(rvalue, *place)?,
9495SetDiscriminant { place, variant_index } => {
96let dest = self.eval_place(**place)?;
97self.write_discriminant(*variant_index, &dest)?;
98 }
99100// Mark locals as alive
101StorageLive(local) => {
102self.storage_live(*local)?;
103 }
104105// Mark locals as dead
106StorageDead(local) => {
107self.storage_dead(*local)?;
108 }
109110// No dynamic semantics attached to `FakeRead`; MIR
111 // interpreter is solely intended for borrowck'ed code.
112FakeRead(..) => {}
113114Intrinsic(box intrinsic) => self.eval_nondiverging_intrinsic(intrinsic)?,
115116// Evaluate the place expression, without reading from it.
117PlaceMention(box place) => {
118let _ = self.eval_place(*place)?;
119 }
120121// This exists purely to guide borrowck lifetime inference, and does not have
122 // an operational effect.
123AscribeUserType(..) => {}
124125// Currently, Miri discards Coverage statements. Coverage statements are only injected
126 // via an optional compile time MIR pass and have no side effects. Since Coverage
127 // statements don't exist at the source level, it is safe for Miri to ignore them, even
128 // for undefined behavior (UB) checks.
129 //
130 // A coverage counter inside a const expression (for example, a counter injected in a
131 // const function) is discarded when the const is evaluated at compile time. Whether
132 // this should change, and/or how to implement a const eval counter, is a subject of the
133 // following issue:
134 //
135 // FIXME(#73156): Handle source code coverage in const eval
136Coverage(..) => {}
137138ConstEvalCounter => {
139 M::increment_const_eval_counter(self)?;
140 }
141142// Defined to do nothing. These are added by optimization passes, to avoid changing the
143 // size of MIR constantly.
144Nop => {}
145146// Only used for temporary lifetime lints
147BackwardIncompatibleDropHint { .. } => {}
148 }
149150interp_ok(())
151 }
152153/// Evaluate an assignment statement.
154 ///
155 /// There is no separate `eval_rvalue` function. Instead, the code for handling each rvalue
156 /// type writes its results directly into the memory specified by the place.
157pub fn eval_rvalue_into_place(
158&mut self,
159 rvalue: &mir::Rvalue<'tcx>,
160 place: mir::Place<'tcx>,
161 ) -> InterpResult<'tcx> {
162let dest = self.eval_place(place)?;
163// FIXME: ensure some kind of non-aliasing between LHS and RHS?
164 // Also see https://github.com/rust-lang/rust/issues/68364.
165166use rustc_middle::mir::Rvalue::*;
167match *rvalue {
168ThreadLocalRef(did) => {
169let ptr = M::thread_local_static_pointer(self, did)?;
170self.write_pointer(ptr, &dest)?;
171 }
172173Use(ref operand, with_retag) => {
174// Avoid recomputing the layout
175let op = self.eval_operand(operand, Some(dest.layout))?;
176let mode = if with_retag.yes() { RetagMode::Default } else { RetagMode::None };
177 M::with_retag_mode(self, mode, |ecx| ecx.copy_op(&op, &dest))?;
178 }
179180CopyForDeref(_) => ::rustc_middle::util::bug::bug_fmt(format_args!("`CopyForDeref` in runtime MIR"))bug!("`CopyForDeref` in runtime MIR"),
181182BinaryOp(bin_op, box (ref left, ref right)) => {
183let layout = util::binop_left_homogeneous(bin_op).then_some(dest.layout);
184let left = self.read_immediate(&self.eval_operand(left, layout)?)?;
185let layout = util::binop_right_homogeneous(bin_op).then_some(left.layout);
186let right = self.read_immediate(&self.eval_operand(right, layout)?)?;
187let result = self.binary_op(bin_op, &left, &right)?;
188match (&result.layout, &dest.layout) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::Some(format_args!("layout mismatch for result of {0:?}",
bin_op)));
}
}
};assert_eq!(result.layout, dest.layout, "layout mismatch for result of {bin_op:?}");
189self.write_immediate(*result, &dest)?;
190 }
191192UnaryOp(un_op, ref operand) => {
193// The operand always has the same type as the result.
194let val = self.read_immediate(&self.eval_operand(operand, Some(dest.layout))?)?;
195let result = self.unary_op(un_op, &val)?;
196match (&result.layout, &dest.layout) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::Some(format_args!("layout mismatch for result of {0:?}",
un_op)));
}
}
};assert_eq!(result.layout, dest.layout, "layout mismatch for result of {un_op:?}");
197self.write_immediate(*result, &dest)?;
198 }
199200Aggregate(box ref kind, ref operands) => {
201self.write_aggregate(kind, operands, &dest)?;
202 }
203204Repeat(ref operand, _) => {
205self.write_repeat(operand, &dest)?;
206 }
207208Ref(_, borrow_kind, place) => {
209let src = self.eval_place(place)?;
210let place = self.force_allocation(&src)?;
211let mut val = ImmTy::from_immediate(place.to_ref(self), dest.layout);
212// A fresh reference was created, make sure it gets retagged with the right mode.
213let mode = if borrow_kind.is_two_phase_borrow() {
214 RetagMode::TwoPhase215 } else {
216 RetagMode::Default217 };
218 M::with_retag_mode(self, mode, |ecx| {
219// If validation is disabled, we still want to do this retag. This is because
220 // const-eval disables validation for performance reasons but wants to retag
221 // shared references. So we add a bit of a hack here to do the retag manually
222 // if the write would not incur validation.
223if !M::enforce_validity(ecx, val.layout) {
224if let Some(new_val) = M::retag_ptr_value(ecx, &val, val.layout.ty)? {
225val = new_val;
226 }
227 }
228// Now do the actual write.
229ecx.write_immediate(*val, &dest)
230 })?;
231 }
232233RawPtr(kind, place) => {
234// Figure out whether this is an addr_of of an already raw place.
235let place_base_raw = if place.is_indirect_first_projection() {
236let ty = self.frame().body.local_decls[place.local].ty;
237ty.is_raw_ptr()
238 } else {
239// Not a deref, and thus not raw.
240false
241};
242243let src = self.eval_place(place)?;
244let place = self.force_allocation(&src)?;
245let mut val = ImmTy::from_immediate(place.to_ref(self), dest.layout);
246if !place_base_raw && !kind.is_fake() {
247// If this was not already raw, it needs retagging -- except for "fake"
248 // raw borrows whose defining property is that they do not get retagged.
249val = M::with_retag_mode(self, RetagMode::Raw, |ecx| {
250interp_ok(M::retag_ptr_value(ecx, &val, val.layout.ty)?.unwrap_or(val))
251 })?;
252 }
253// This writes a raw pointer so it will not do any retags.
254self.write_immediate(*val, &dest)?;
255 }
256257Cast(cast_kind, ref operand, cast_ty) => {
258let src = self.eval_operand(operand, None)?;
259let cast_ty =
260self.instantiate_from_current_frame_and_normalize_erasing_regions(cast_ty)?;
261self.cast(&src, cast_kind, cast_ty, &dest)?;
262 }
263264Discriminant(place) => {
265let op = self.eval_place_to_op(place, None)?;
266let variant = self.read_discriminant(&op)?;
267let discr = self.discriminant_for_variant(op.layout.ty, variant)?;
268self.write_immediate(*discr, &dest)?;
269 }
270271WrapUnsafeBinder(ref op, _ty) => {
272// Constructing an unsafe binder acts like a transmute
273 // since the operand's layout does not change.
274let op = self.eval_operand(op, None)?;
275self.copy_op_allow_transmute(&op, &dest)?;
276 }
277 }
278279{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/step.rs:279",
"rustc_const_eval::interpret::step",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/step.rs"),
::tracing_core::__macro_support::Option::Some(279u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::step"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("{0:?}",
self.dump_place(&dest)) as &dyn Value))])
});
} else { ; }
};trace!("{:?}", self.dump_place(&dest));
280281interp_ok(())
282 }
283284/// Writes the aggregate to the destination.
285#[allow(clippy :: suspicious_else_formatting)]
{
let __tracing_attr_span;
let __tracing_attr_guard;
if ::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() ||
{ false } {
__tracing_attr_span =
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("write_aggregate",
"rustc_const_eval::interpret::step",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/step.rs"),
::tracing_core::__macro_support::Option::Some(285u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::step"),
::tracing_core::field::FieldSet::new(&["kind", "operands",
"dest"], ::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::TRACE <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&kind)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&operands)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&dest)
as &dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
};
__tracing_attr_guard = __tracing_attr_span.enter();
}
#[warn(clippy :: suspicious_else_formatting)]
{
#[allow(unknown_lints, unreachable_code, clippy ::
diverging_sub_expression, clippy :: empty_loop, clippy ::
let_unit_value, clippy :: let_with_type_underscore, clippy ::
needless_return, clippy :: unreachable)]
if false {
let __tracing_attr_fake_return: InterpResult<'tcx> = loop {};
return __tracing_attr_fake_return;
}
{
let (variant_index, variant_dest, active_field_index) =
match *kind {
mir::AggregateKind::Adt(_, variant_index, _, _,
active_field_index) => {
let variant_dest =
self.project_downcast(dest, variant_index)?;
(variant_index, variant_dest, active_field_index)
}
mir::AggregateKind::RawPtr(..) => {
let [data, meta] =
&operands.raw else {
::rustc_middle::util::bug::bug_fmt(format_args!("{0:?} should have 2 operands, had {1:?}",
kind, operands));
};
let data = self.eval_operand(data, None)?;
let data = self.read_pointer(&data)?;
let meta = self.eval_operand(meta, None)?;
let meta =
if meta.layout.is_zst() {
MemPlaceMeta::None
} else { MemPlaceMeta::Meta(self.read_scalar(&meta)?) };
let ptr_imm =
Immediate::new_pointer_with_meta(data, meta, self);
let ptr = ImmTy::from_immediate(ptr_imm, dest.layout);
self.copy_op(&ptr, dest)?;
return interp_ok(());
}
_ => (FIRST_VARIANT, dest.clone(), None),
};
if active_field_index.is_some() {
match (&operands.len(), &1) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val,
&*right_val, ::core::option::Option::None);
}
}
};
}
for (field_index, operand) in operands.iter_enumerated() {
let field_index = active_field_index.unwrap_or(field_index);
let field_dest =
self.project_field(&variant_dest, field_index)?;
let op = self.eval_operand(operand, Some(field_dest.layout))?;
self.copy_op_no_validate(&op, &field_dest, false)?;
}
self.write_discriminant(variant_index, dest)?;
if M::enforce_validity(self, dest.layout()) {
self.validate_operand(dest,
M::enforce_validity_recursively(self, dest.layout()),
true)?;
}
interp_ok(())
}
}
}#[instrument(skip(self), level = "trace")]286fn write_aggregate(
287&mut self,
288 kind: &mir::AggregateKind<'tcx>,
289 operands: &IndexSlice<FieldIdx, mir::Operand<'tcx>>,
290 dest: &PlaceTy<'tcx, M::Provenance>,
291 ) -> InterpResult<'tcx> {
292let (variant_index, variant_dest, active_field_index) = match *kind {
293 mir::AggregateKind::Adt(_, variant_index, _, _, active_field_index) => {
294let variant_dest = self.project_downcast(dest, variant_index)?;
295 (variant_index, variant_dest, active_field_index)
296 }
297 mir::AggregateKind::RawPtr(..) => {
298// Pointers don't have "fields" in the normal sense, so the
299 // projection-based code below would either fail in projection
300 // or in type mismatches. Instead, build an `Immediate` from
301 // the parts and write that to the destination.
302let [data, meta] = &operands.raw else {
303bug!("{kind:?} should have 2 operands, had {operands:?}");
304 };
305let data = self.eval_operand(data, None)?;
306let data = self.read_pointer(&data)?;
307let meta = self.eval_operand(meta, None)?;
308let meta = if meta.layout.is_zst() {
309 MemPlaceMeta::None
310 } else {
311 MemPlaceMeta::Meta(self.read_scalar(&meta)?)
312 };
313let ptr_imm = Immediate::new_pointer_with_meta(data, meta, self);
314let ptr = ImmTy::from_immediate(ptr_imm, dest.layout);
315self.copy_op(&ptr, dest)?;
316return interp_ok(());
317 }
318_ => (FIRST_VARIANT, dest.clone(), None),
319 };
320if active_field_index.is_some() {
321assert_eq!(operands.len(), 1);
322 }
323for (field_index, operand) in operands.iter_enumerated() {
324let field_index = active_field_index.unwrap_or(field_index);
325let field_dest = self.project_field(&variant_dest, field_index)?;
326let op = self.eval_operand(operand, Some(field_dest.layout))?;
327// We validate manually below so we don't have to do it here.
328self.copy_op_no_validate(&op, &field_dest, /*allow_transmute*/ false)?;
329 }
330self.write_discriminant(variant_index, dest)?;
331// Validate that the entire thing is valid, and reset padding that might be in between the
332 // fields.
333if M::enforce_validity(self, dest.layout()) {
334self.validate_operand(
335 dest,
336 M::enforce_validity_recursively(self, dest.layout()),
337/*reset_provenance_and_padding*/ true,
338 )?;
339 }
340 interp_ok(())
341 }
342343/// Repeats `operand` into the destination. `dest` must have array type, and that type
344 /// determines how often `operand` is repeated.
345fn write_repeat(
346&mut self,
347 operand: &mir::Operand<'tcx>,
348 dest: &PlaceTy<'tcx, M::Provenance>,
349 ) -> InterpResult<'tcx> {
350let src = self.eval_operand(operand, None)?;
351if !src.layout.is_sized() {
::core::panicking::panic("assertion failed: src.layout.is_sized()")
};assert!(src.layout.is_sized());
352let dest = self.force_allocation(&dest)?;
353let length = dest.len(self)?;
354355if length == 0 {
356// Nothing to copy... but let's still make sure that `dest` as a place is valid.
357self.get_place_alloc_mut(&dest)?;
358 } else {
359// Write the src to the first element.
360let first = self.project_index(&dest, 0)?;
361self.copy_op(&src, &first)?;
362363// This is performance-sensitive code for big static/const arrays! So we
364 // avoid writing each operand individually and instead just make many copies
365 // of the first element.
366let elem_size = first.layout.size;
367let first_ptr = first.ptr();
368let rest_ptr = first_ptr.wrapping_offset(elem_size, self);
369// No alignment requirement since `copy_op` above already checked it.
370self.mem_copy_repeatedly(
371first_ptr,
372rest_ptr,
373elem_size,
374length - 1,
375/*nonoverlapping:*/ true,
376 )?;
377 }
378379interp_ok(())
380 }
381382/// Evaluate the arguments of a function call
383fn eval_fn_call_argument(
384&mut self,
385 op: &mir::Operand<'tcx>,
386 move_definitely_disjoint: bool,
387 ) -> InterpResult<'tcx, FnArg<'tcx, M::Provenance>> {
388interp_ok(match op {
389 mir::Operand::Copy(_) | mir::Operand::Constant(_) | mir::Operand::RuntimeChecks(_) => {
390// Make a regular copy.
391let op = self.eval_operand(op, None)?;
392 FnArg::Copy(op)
393 }
394 mir::Operand::Move(place) => {
395let place = self.eval_place(*place)?;
396if move_definitely_disjoint {
397// We still have to ensure that no *other* pointers are used to access this place,
398 // so *if* it is in memory then we have to treat it as `InPlace`.
399 // Use `place_to_op` to guarantee that we notice it being in memory.
400let op = self.place_to_op(&place)?;
401match op.as_mplace_or_imm() {
402 Either::Left(mplace) => FnArg::InPlace(mplace),
403 Either::Right(_imm) => FnArg::Copy(op),
404 }
405 } else {
406// We have to force this into memory to detect aliasing among `Move` arguments.
407FnArg::InPlace(self.force_allocation(&place)?)
408 }
409 }
410 })
411 }
412413/// Shared part of `Call` and `TailCall` implementation — finding and evaluating all the
414 /// necessary information about callee and arguments to make a call.
415fn eval_callee_and_args(
416&mut self,
417 terminator: &mir::Terminator<'tcx>,
418 func: &mir::Operand<'tcx>,
419 args: &[Spanned<mir::Operand<'tcx>>],
420 dest: &mir::Place<'tcx>,
421 ) -> InterpResult<'tcx, EvaluatedCalleeAndArgs<'tcx, M>> {
422let func = self.eval_operand(func, None)?;
423424// Evaluating function call arguments. The tricky part here is dealing with `Move`
425 // arguments: we have to ensure no two such arguments alias. This would be most easily done
426 // by just forcing them all into memory and then doing the usual in-place argument
427 // protection, but then we'd force *a lot* of arguments into memory. So we do some syntactic
428 // pre-processing here where if all `move` arguments are syntactically distinct local
429 // variables (and none is indirect), we can skip the in-memory forcing.
430 // We have to include `dest` in that list so that we can detect aliasing of an in-place
431 // argument with the return place.
432let move_definitely_disjoint = 'move_definitely_disjoint: {
433let mut previous_locals = FxHashSet::<mir::Local>::default();
434for place in args
435 .iter()
436 .filter_map(|a| {
437// We only have to care about `Move` arguments.
438if let mir::Operand::Move(place) = &a.node { Some(place) } else { None }
439 })
440 .chain(iter::once(dest))
441 {
442if place.is_indirect_first_projection() {
443// An indirect in-place argument could alias with anything else...
444break 'move_definitely_disjoint false;
445 }
446if !previous_locals.insert(place.local) {
447// This local is the base for two arguments! They might overlap.
448break 'move_definitely_disjoint false;
449 }
450 }
451// We found no violation so they are all definitely disjoint.
452true
453};
454let args = args455 .iter()
456 .map(|arg| self.eval_fn_call_argument(&arg.node, move_definitely_disjoint))
457 .collect::<InterpResult<'tcx, Vec<_>>>()?;
458459let fn_sig_binder = {
460let _trace = <M as
crate::interpret::Machine>::enter_trace_span(||
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("fn_sig",
"rustc_const_eval::interpret::step", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/step.rs"),
::tracing_core::__macro_support::Option::Some(460u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::step"),
::tracing_core::field::FieldSet::new(&["ty"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::INFO <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&debug(&func.layout.ty.kind())
as &dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
})enter_trace_span!(M, "fn_sig", ty = ?func.layout.ty.kind());
461func.layout.ty.fn_sig(*self.tcx)
462 };
463let fn_sig = self.tcx.normalize_erasing_late_bound_regions(self.typing_env, fn_sig_binder);
464let extra_args = &args[fn_sig.inputs().len()..];
465let extra_args =
466self.tcx.mk_type_list_from_iter(extra_args.iter().map(|arg| arg.layout().ty));
467468let (callee, fn_abi, with_caller_location) = match *func.layout.ty.kind() {
469 ty::FnPtr(..) => {
470let fn_ptr = self.read_pointer(&func)?;
471let fn_val = self.get_ptr_fn(fn_ptr)?;
472 (fn_val, self.fn_abi_of_fn_ptr(fn_sig_binder, extra_args)?, false)
473 }
474 ty::FnDef(def_id, args) => {
475let instance = self.resolve(def_id, args)?;
476 (
477 FnVal::Instance(instance),
478self.fn_abi_of_instance_no_deduced_attrs(instance, extra_args)?,
479instance.def.requires_caller_location(*self.tcx),
480 )
481 }
482_ => {
483::rustc_middle::util::bug::span_bug_fmt(terminator.source_info.span,
format_args!("invalid callee of type {0}", func.layout.ty))span_bug!(terminator.source_info.span, "invalid callee of type {}", func.layout.ty)484 }
485 };
486487interp_ok(EvaluatedCalleeAndArgs { callee, args, fn_sig, fn_abi, with_caller_location })
488 }
489490fn eval_terminator(&mut self, terminator: &mir::Terminator<'tcx>) -> InterpResult<'tcx> {
491let _trace = <M as
crate::interpret::Machine>::enter_trace_span(||
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("step",
"rustc_const_eval::interpret::step", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/step.rs"),
::tracing_core::__macro_support::Option::Some(491u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::step"),
::tracing_core::field::FieldSet::new(&["step", "terminator",
"span", "tracing_separate_thread"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::INFO <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&display(&"eval_terminator")
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&debug(&terminator.kind)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&debug(&terminator.source_info.span)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&Empty as
&dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
})enter_trace_span!(
492 M,
493 step::eval_terminator,
494 terminator = ?terminator.kind,
495 span = ?terminator.source_info.span,
496 tracing_separate_thread = Empty,
497 )498 .or_if_tracing_disabled(|| {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/step.rs:498",
"rustc_const_eval::interpret::step", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/step.rs"),
::tracing_core::__macro_support::Option::Some(498u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::step"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::INFO <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("{0:?}",
terminator.kind) as &dyn Value))])
});
} else { ; }
}info!("{:?}", terminator.kind));
499500use rustc_middle::mir::TerminatorKind::*;
501match terminator.kind {
502Return => {
503self.return_from_current_stack_frame(/* unwinding */ false)?
504}
505506Goto { target } => self.go_to_block(target),
507508SwitchInt { ref discr, ref targets } => {
509let discr = self.read_immediate(&self.eval_operand(discr, None)?)?;
510{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/step.rs:510",
"rustc_const_eval::interpret::step",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/step.rs"),
::tracing_core::__macro_support::Option::Some(510u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::step"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("SwitchInt({0:?})",
*discr) as &dyn Value))])
});
} else { ; }
};trace!("SwitchInt({:?})", *discr);
511512// Branch to the `otherwise` case by default, if no match is found.
513let mut target_block = targets.otherwise();
514515for (const_int, target) in targets.iter() {
516// Compare using MIR BinOp::Eq, to also support pointer values.
517 // (Avoiding `self.binary_op` as that does some redundant layout computation.)
518let res = self.binary_op(
519 mir::BinOp::Eq,
520&discr,
521&ImmTy::from_uint(const_int, discr.layout),
522 )?;
523if res.to_scalar().to_bool()? {
524 target_block = target;
525break;
526 }
527 }
528529self.go_to_block(target_block);
530 }
531532Call {
533ref func,
534ref args,
535 destination,
536 target,
537 unwind,
538 call_source: _,
539 fn_span: _,
540 } => {
541let old_stack = self.frame_idx();
542let old_loc = self.frame().loc;
543544let EvaluatedCalleeAndArgs { callee, args, fn_sig, fn_abi, with_caller_location } =
545self.eval_callee_and_args(terminator, func, args, &destination)?;
546547let destination = self.eval_place(destination)?;
548self.init_fn_call(
549callee,
550 (fn_sig.abi(), fn_abi),
551&args,
552with_caller_location,
553&destination,
554target,
555if fn_abi.can_unwind { unwind } else { mir::UnwindAction::Unreachable },
556 )?;
557// Sanity-check that `eval_fn_call` either pushed a new frame or
558 // did a jump to another block. We disable the sanity check for functions that
559 // can't return, since Miri sometimes does have to keep the location the same
560 // for those (which is fine since execution will continue on a different thread).
561if target.is_some() && self.frame_idx() == old_stack && self.frame().loc == old_loc562 {
563::rustc_middle::util::bug::span_bug_fmt(terminator.source_info.span,
format_args!("evaluating this call made no progress"));span_bug!(terminator.source_info.span, "evaluating this call made no progress");
564 }
565 }
566567TailCall { ref func, ref args, fn_span: _ } => {
568let old_frame_idx = self.frame_idx();
569570let EvaluatedCalleeAndArgs { callee, args, fn_sig, fn_abi, with_caller_location } =
571self.eval_callee_and_args(terminator, func, args, &mir::Place::return_place())?;
572573self.init_fn_tail_call(
574callee,
575 (fn_sig.abi(), fn_abi),
576&args,
577with_caller_location,
578 )?;
579580if self.frame_idx() != old_frame_idx {
581::rustc_middle::util::bug::span_bug_fmt(terminator.source_info.span,
format_args!("evaluating this tail call pushed a new stack frame"));span_bug!(
582terminator.source_info.span,
583"evaluating this tail call pushed a new stack frame"
584);
585 }
586 }
587588Drop { place, target, unwind, replace: _, drop, async_fut } => {
589if !(async_fut.is_none() && drop.is_none()) {
{
::core::panicking::panic_fmt(format_args!("Async Drop must be expanded or reset to sync in runtime MIR"));
}
};assert!(
590 async_fut.is_none() && drop.is_none(),
591"Async Drop must be expanded or reset to sync in runtime MIR"
592);
593let place = self.eval_place(place)?;
594let instance = {
595let _trace =
596<M as
crate::interpret::Machine>::enter_trace_span(||
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("resolve",
"rustc_const_eval::interpret::step", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/step.rs"),
::tracing_core::__macro_support::Option::Some(596u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::step"),
::tracing_core::field::FieldSet::new(&["resolve", "ty"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::INFO <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&display(&"resolve_drop_in_place")
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&debug(&place.layout.ty)
as &dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
})enter_trace_span!(M, resolve::resolve_drop_in_place, ty = ?place.layout.ty);
597Instance::resolve_drop_in_place(*self.tcx, place.layout.ty)
598 };
599if let ty::InstanceKind::DropGlue(_, None) = instance.def {
600// This is the branch we enter if and only if the dropped type has no drop glue
601 // whatsoever. This can happen as a result of monomorphizing a drop of a
602 // generic. In order to make sure that generic and non-generic code behaves
603 // roughly the same (and in keeping with Mir semantics) we do nothing here.
604self.go_to_block(target);
605return interp_ok(());
606 }
607{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/step.rs:607",
"rustc_const_eval::interpret::step",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/step.rs"),
::tracing_core::__macro_support::Option::Some(607u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::step"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("TerminatorKind::drop: {0:?}, type {1}",
place, place.layout.ty) as &dyn Value))])
});
} else { ; }
};trace!("TerminatorKind::drop: {:?}, type {}", place, place.layout.ty);
608self.init_drop_in_place_call(&place, instance, target, unwind)?;
609 }
610611Assert { ref cond, expected, ref msg, target, unwind } => {
612let ignored =
613 M::ignore_optional_overflow_checks(self) && msg.is_optional_overflow_check();
614let cond_val = self.read_scalar(&self.eval_operand(cond, None)?)?.to_bool()?;
615if ignored || expected == cond_val {
616self.go_to_block(target);
617 } else {
618 M::assert_panic(self, msg, unwind)?;
619 }
620 }
621622UnwindTerminate(reason) => {
623 M::unwind_terminate(self, reason)?;
624 }
625626// When we encounter Resume, we've finished unwinding
627 // cleanup for the current stack frame. We pop it in order
628 // to continue unwinding the next frame
629UnwindResume => {
630{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/step.rs:630",
"rustc_const_eval::interpret::step",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/step.rs"),
::tracing_core::__macro_support::Option::Some(630u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::step"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("unwinding: resuming from cleanup")
as &dyn Value))])
});
} else { ; }
};trace!("unwinding: resuming from cleanup");
631// By definition, a Resume terminator means
632 // that we're unwinding
633self.return_from_current_stack_frame(/* unwinding */ true)?;
634return interp_ok(());
635 }
636637// It is UB to ever encounter this.
638Unreachable => do yeet ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::Unreachable)throw_ub!(Unreachable),
639640// These should never occur for MIR we actually run.
641FalseEdge { .. } | FalseUnwind { .. } | Yield { .. } | CoroutineDrop => ::rustc_middle::util::bug::span_bug_fmt(terminator.source_info.span,
format_args!("{0:#?} should have been eliminated by MIR pass",
terminator.kind))span_bug!(
642terminator.source_info.span,
643"{:#?} should have been eliminated by MIR pass",
644 terminator.kind
645 ),
646647InlineAsm { .. } => {
648do yeet ::rustc_middle::mir::interpret::InterpErrorKind::Unsupported(::rustc_middle::mir::interpret::UnsupportedOpInfo::Unsupported(::alloc::__export::must_use({
::alloc::fmt::format(format_args!("inline assembly is not supported"))
})));throw_unsup_format!("inline assembly is not supported");
649 }
650 }
651652interp_ok(())
653 }
654}