Skip to main content

rustc_const_eval/const_eval/
machine.rs

1use std::borrow::{Borrow, Cow};
2use std::hash::Hash;
3use std::{fmt, mem};
4
5use rustc_abi::{Align, FIRST_VARIANT, Size};
6use rustc_ast::Mutability;
7use rustc_data_structures::fx::{FxHashMap, FxIndexMap, IndexEntry};
8use rustc_hir::def_id::{DefId, LocalDefId};
9use rustc_hir::{self as hir, CRATE_HIR_ID, LangItem, find_attr};
10use rustc_middle::mir::AssertMessage;
11use rustc_middle::mir::interpret::ReportedErrorInfo;
12use rustc_middle::query::TyCtxtAt;
13use rustc_middle::ty::layout::{HasTypingEnv, TyAndLayout, ValidityRequirement};
14use rustc_middle::ty::{self, FieldInfo, Ty, TyCtxt};
15use rustc_middle::{bug, mir, span_bug};
16use rustc_span::{Span, Symbol, sym};
17use rustc_target::callconv::FnAbi;
18use tracing::debug;
19
20use super::error::*;
21use crate::errors::{LongRunning, LongRunningWarn};
22use crate::interpret::{
23    self, AllocId, AllocInit, AllocRange, ConstAllocation, CtfeProvenance, FnArg, Frame,
24    GlobalAlloc, ImmTy, InterpCx, InterpResult, OpTy, PlaceTy, Pointer, RangeSet, RetagMode,
25    Scalar, compile_time_machine, ensure_monomorphic_enough, err_inval, interp_ok, throw_exhaust,
26    throw_inval, throw_ub, throw_ub_format, throw_unsup, throw_unsup_format,
27    type_implements_dyn_trait,
28};
29
30/// When hitting this many interpreted terminators we emit a deny by default lint
31/// that notfies the user that their constant takes a long time to evaluate. If that's
32/// what they intended, they can just allow the lint.
33const LINT_TERMINATOR_LIMIT: usize = 2_000_000;
34/// The limit used by `-Z tiny-const-eval-limit`. This smaller limit is useful for internal
35/// tests not needing to run 30s or more to show some behaviour.
36const TINY_LINT_TERMINATOR_LIMIT: usize = 20;
37/// After this many interpreted terminators, we start emitting progress indicators at every
38/// power of two of interpreted terminators.
39const PROGRESS_INDICATOR_START: usize = 4_000_000;
40
41/// Extra machine state for CTFE, and the Machine instance.
42//
43// Should be public because out-of-tree rustc consumers need this
44// if they want to interact with constant values.
45pub struct CompileTimeMachine<'tcx> {
46    /// The number of terminators that have been evaluated.
47    ///
48    /// This is used to produce lints informing the user that the compiler is not stuck.
49    /// Set to `usize::MAX` to never report anything.
50    pub(super) num_evaluated_steps: usize,
51
52    /// The virtual call stack.
53    pub(super) stack: Vec<Frame<'tcx>>,
54
55    /// Pattern matching on consts with references would be unsound if those references
56    /// could point to anything mutable. Therefore, when evaluating consts and when constructing valtrees,
57    /// we ensure that only immutable global memory can be accessed.
58    pub(super) can_access_mut_global: CanAccessMutGlobal,
59
60    /// Whether to check alignment during evaluation.
61    pub(super) check_alignment: CheckAlignment,
62
63    /// If `Some`, we are evaluating the initializer of the static with the given `LocalDefId`,
64    /// storing the result in the given `AllocId`.
65    /// Used to prevent accesses to a static's base allocation, as that may allow for self-initialization loops.
66    pub(crate) static_root_ids: Option<(AllocId, LocalDefId)>,
67
68    /// A cache of "data range" computations for unions (i.e., the offsets of non-padding bytes).
69    union_data_ranges: FxHashMap<Ty<'tcx>, RangeSet>,
70
71    /// The current retag mode.
72    retag_mode: RetagMode,
73}
74
75#[derive(#[automatically_derived]
impl ::core::marker::Copy for CheckAlignment { }Copy, #[automatically_derived]
impl ::core::clone::Clone for CheckAlignment {
    #[inline]
    fn clone(&self) -> CheckAlignment { *self }
}Clone)]
76pub enum CheckAlignment {
77    /// Ignore all alignment requirements.
78    /// This is mainly used in interning.
79    No,
80    /// Hard error when dereferencing a misaligned pointer.
81    Error,
82}
83
84#[derive(#[automatically_derived]
impl ::core::marker::Copy for CanAccessMutGlobal { }Copy, #[automatically_derived]
impl ::core::clone::Clone for CanAccessMutGlobal {
    #[inline]
    fn clone(&self) -> CanAccessMutGlobal { *self }
}Clone, #[automatically_derived]
impl ::core::cmp::PartialEq for CanAccessMutGlobal {
    #[inline]
    fn eq(&self, other: &CanAccessMutGlobal) -> bool {
        let __self_discr = ::core::intrinsics::discriminant_value(self);
        let __arg1_discr = ::core::intrinsics::discriminant_value(other);
        __self_discr == __arg1_discr
    }
}PartialEq)]
85pub(crate) enum CanAccessMutGlobal {
86    No,
87    Yes,
88}
89
90impl From<bool> for CanAccessMutGlobal {
91    fn from(value: bool) -> Self {
92        if value { Self::Yes } else { Self::No }
93    }
94}
95
96impl<'tcx> CompileTimeMachine<'tcx> {
97    pub(crate) fn new(
98        can_access_mut_global: CanAccessMutGlobal,
99        check_alignment: CheckAlignment,
100    ) -> Self {
101        CompileTimeMachine {
102            num_evaluated_steps: 0,
103            stack: Vec::new(),
104            can_access_mut_global,
105            check_alignment,
106            static_root_ids: None,
107            union_data_ranges: FxHashMap::default(),
108            retag_mode: RetagMode::Default,
109        }
110    }
111}
112
113impl<K: Hash + Eq, V> interpret::AllocMap<K, V> for FxIndexMap<K, V> {
114    #[inline(always)]
115    fn contains_key<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> bool
116    where
117        K: Borrow<Q>,
118    {
119        FxIndexMap::contains_key(self, k)
120    }
121
122    #[inline(always)]
123    fn contains_key_ref<Q: ?Sized + Hash + Eq>(&self, k: &Q) -> bool
124    where
125        K: Borrow<Q>,
126    {
127        FxIndexMap::contains_key(self, k)
128    }
129
130    #[inline(always)]
131    fn insert(&mut self, k: K, v: V) -> Option<V> {
132        FxIndexMap::insert(self, k, v)
133    }
134
135    #[inline(always)]
136    fn remove<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> Option<V>
137    where
138        K: Borrow<Q>,
139    {
140        // FIXME(#120456) - is `swap_remove` correct?
141        FxIndexMap::swap_remove(self, k)
142    }
143
144    #[inline(always)]
145    fn filter_map_collect<T>(&self, mut f: impl FnMut(&K, &V) -> Option<T>) -> Vec<T> {
146        self.iter().filter_map(move |(k, v)| f(k, v)).collect()
147    }
148
149    #[inline(always)]
150    fn get_or<E>(&self, k: K, vacant: impl FnOnce() -> Result<V, E>) -> Result<&V, E> {
151        match self.get(&k) {
152            Some(v) => Ok(v),
153            None => {
154                vacant()?;
155                ::rustc_middle::util::bug::bug_fmt(format_args!("The CTFE machine shouldn\'t ever need to extend the alloc_map when reading"))bug!("The CTFE machine shouldn't ever need to extend the alloc_map when reading")
156            }
157        }
158    }
159
160    #[inline(always)]
161    fn get_mut_or<E>(&mut self, k: K, vacant: impl FnOnce() -> Result<V, E>) -> Result<&mut V, E> {
162        match self.entry(k) {
163            IndexEntry::Occupied(e) => Ok(e.into_mut()),
164            IndexEntry::Vacant(e) => {
165                let v = vacant()?;
166                Ok(e.insert(v))
167            }
168        }
169    }
170}
171
172pub type CompileTimeInterpCx<'tcx> = InterpCx<'tcx, CompileTimeMachine<'tcx>>;
173
174#[derive(#[automatically_derived]
impl ::core::fmt::Debug for MemoryKind {
    #[inline]
    fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
        match self {
            MemoryKind::Heap { was_made_global: __self_0 } =>
                ::core::fmt::Formatter::debug_struct_field1_finish(f, "Heap",
                    "was_made_global", &__self_0),
        }
    }
}Debug, #[automatically_derived]
impl ::core::cmp::PartialEq for MemoryKind {
    #[inline]
    fn eq(&self, other: &MemoryKind) -> bool {
        match (self, other) {
            (MemoryKind::Heap { was_made_global: __self_0 },
                MemoryKind::Heap { was_made_global: __arg1_0 }) =>
                __self_0 == __arg1_0,
        }
    }
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for MemoryKind {
    #[inline]
    #[doc(hidden)]
    #[coverage(off)]
    fn assert_fields_are_eq(&self) {
        let _: ::core::cmp::AssertParamIsEq<bool>;
    }
}Eq, #[automatically_derived]
impl ::core::marker::Copy for MemoryKind { }Copy, #[automatically_derived]
impl ::core::clone::Clone for MemoryKind {
    #[inline]
    fn clone(&self) -> MemoryKind {
        let _: ::core::clone::AssertParamIsClone<bool>;
        *self
    }
}Clone)]
175pub enum MemoryKind {
176    Heap {
177        /// Indicates whether `make_global` was called on this allocation.
178        /// If this is `true`, the allocation must be immutable.
179        was_made_global: bool,
180    },
181}
182
183impl fmt::Display for MemoryKind {
184    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
185        match self {
186            MemoryKind::Heap { was_made_global } => {
187                f.write_fmt(format_args!("heap allocation{0}",
        if *was_made_global { " (made global)" } else { "" }))write!(f, "heap allocation{}", if *was_made_global { " (made global)" } else { "" })
188            }
189        }
190    }
191}
192
193impl interpret::MayLeak for MemoryKind {
194    #[inline(always)]
195    fn may_leak(self) -> bool {
196        match self {
197            MemoryKind::Heap { was_made_global } => was_made_global,
198        }
199    }
200}
201
202impl interpret::MayLeak for ! {
203    #[inline(always)]
204    fn may_leak(self) -> bool {
205        // `self` is uninhabited
206        self
207    }
208}
209
210impl<'tcx> CompileTimeInterpCx<'tcx> {
211    fn location_triple_for_span(&self, span: Span) -> (Symbol, u32, u32) {
212        let topmost = span.ctxt().outer_expn().expansion_cause().unwrap_or(span);
213        let caller = self.tcx.sess.source_map().lookup_char_pos(topmost.lo());
214
215        use rustc_span::RemapPathScopeComponents;
216        (
217            Symbol::intern(
218                &caller.file.name.display(RemapPathScopeComponents::DIAGNOSTICS).to_string_lossy(),
219            ),
220            u32::try_from(caller.line).unwrap(),
221            u32::try_from(caller.col_display).unwrap().checked_add(1).unwrap(),
222        )
223    }
224
225    /// "Intercept" a function call, because we have something special to do for it.
226    /// All `#[rustc_do_not_const_check]` functions MUST be hooked here.
227    /// If this returns `Some` function, which may be `instance` or a different function with
228    /// compatible arguments, then evaluation should continue with that function.
229    /// If this returns `None`, the function call has been handled and the function has returned.
230    fn hook_special_const_fn(
231        &mut self,
232        instance: ty::Instance<'tcx>,
233        args: &[FnArg<'tcx>],
234        _dest: &PlaceTy<'tcx>,
235        _ret: Option<mir::BasicBlock>,
236    ) -> InterpResult<'tcx, Option<ty::Instance<'tcx>>> {
237        let def_id = instance.def_id();
238
239        if self.tcx.is_lang_item(def_id, LangItem::PanicDisplay)
240            || self.tcx.is_lang_item(def_id, LangItem::BeginPanic)
241        {
242            let args = Self::copy_fn_args(args);
243            // &str or &&str
244            if !(args.len() == 1) {
    ::core::panicking::panic("assertion failed: args.len() == 1")
};assert!(args.len() == 1);
245
246            let mut msg_place = self.deref_pointer(&args[0])?;
247            while msg_place.layout.ty.is_ref() {
248                msg_place = self.deref_pointer(&msg_place)?;
249            }
250
251            let msg = Symbol::intern(self.read_str(&msg_place)?);
252            let span = self.find_closest_untracked_caller_location();
253            let (file, line, col) = self.location_triple_for_span(span);
254            return Err(ConstEvalErrKind::Panic { msg, file, line, col }).into();
255        } else if self.tcx.is_lang_item(def_id, LangItem::PanicFmt) {
256            // For panic_fmt, call const_panic_fmt instead.
257            let const_def_id = self.tcx.require_lang_item(LangItem::ConstPanicFmt, self.tcx.span);
258            let new_instance = ty::Instance::expect_resolve(
259                *self.tcx,
260                self.typing_env(),
261                const_def_id,
262                instance.args,
263                self.cur_span(),
264            );
265
266            return interp_ok(Some(new_instance));
267        }
268        interp_ok(Some(instance))
269    }
270
271    /// See documentation on the `ptr_guaranteed_cmp` intrinsic.
272    /// Returns `2` if the result is unknown.
273    /// Returns `1` if the pointers are guaranteed equal.
274    /// Returns `0` if the pointers are guaranteed inequal.
275    ///
276    /// Note that this intrinsic is exposed on stable for comparison with null. In other words, any
277    /// change to this function that affects comparison with null is insta-stable!
278    fn guaranteed_cmp(&mut self, a: Scalar, b: Scalar) -> InterpResult<'tcx, u8> {
279        interp_ok(match (a, b) {
280            // Comparisons between integers are always known.
281            (Scalar::Int(a), Scalar::Int(b)) => (a == b) as u8,
282            // Comparing a pointer `ptr` with an integer `int` is equivalent to comparing
283            // `ptr-int` with null, so we can reduce this case to a `scalar_may_be_null` test.
284            (Scalar::Int(int), Scalar::Ptr(ptr, _)) | (Scalar::Ptr(ptr, _), Scalar::Int(int)) => {
285                let int = int.to_target_usize(*self.tcx);
286                // The `wrapping_neg` here may produce a value that is not
287                // a valid target usize any more... but `wrapping_offset` handles that correctly.
288                let offset_ptr = ptr.wrapping_offset(Size::from_bytes(int.wrapping_neg()), self);
289                if !self.scalar_may_be_null(Scalar::from_pointer(offset_ptr, self))? {
290                    // `ptr.wrapping_sub(int)` is definitely not equal to `0`, so `ptr != int`
291                    0
292                } else {
293                    // `ptr.wrapping_sub(int)` could be equal to `0`, but might not be,
294                    // so we cannot know for sure if `ptr == int` or not
295                    2
296                }
297            }
298            (Scalar::Ptr(a, _), Scalar::Ptr(b, _)) => {
299                let (a_prov, a_offset) = a.prov_and_relative_offset();
300                let (b_prov, b_offset) = b.prov_and_relative_offset();
301                let a_allocid = a_prov.alloc_id();
302                let b_allocid = b_prov.alloc_id();
303                let a_info = self.get_alloc_info(a_allocid);
304                let b_info = self.get_alloc_info(b_allocid);
305
306                // Check if the pointers cannot be equal due to alignment
307                if a_info.align > Align::ONE && b_info.align > Align::ONE {
308                    let min_align = Ord::min(a_info.align.bytes(), b_info.align.bytes());
309                    let a_residue = a_offset.bytes() % min_align;
310                    let b_residue = b_offset.bytes() % min_align;
311                    if a_residue != b_residue {
312                        // If the two pointers have a different residue modulo their
313                        // common alignment, they cannot be equal.
314                        return interp_ok(0);
315                    }
316                    // The pointers have the same residue modulo their common alignment,
317                    // so they could be equal. Try the other checks.
318                }
319
320                if let (Some(GlobalAlloc::Static(a_did)), Some(GlobalAlloc::Static(b_did))) = (
321                    self.tcx.try_get_global_alloc(a_allocid),
322                    self.tcx.try_get_global_alloc(b_allocid),
323                ) {
324                    if a_allocid == b_allocid {
325                        if true {
    match (&a_did, &b_did) {
        (left_val, right_val) => {
            if !(*left_val == *right_val) {
                let kind = ::core::panicking::AssertKind::Eq;
                ::core::panicking::assert_failed(kind, &*left_val,
                    &*right_val,
                    ::core::option::Option::Some(format_args!("different static item DefIds had same AllocId? {0:?} == {1:?}, {2:?} != {3:?}",
                            a_allocid, b_allocid, a_did, b_did)));
            }
        }
    };
};debug_assert_eq!(
326                            a_did, b_did,
327                            "different static item DefIds had same AllocId? {a_allocid:?} == {b_allocid:?}, {a_did:?} != {b_did:?}"
328                        );
329                        // Comparing two pointers into the same static. As per
330                        // https://doc.rust-lang.org/nightly/reference/items/static-items.html#r-items.static.intro
331                        // a static cannot be duplicated, so if two pointers are into the same
332                        // static, they are equal if and only if their offsets are equal.
333                        (a_offset == b_offset) as u8
334                    } else {
335                        if true {
    match (&(a_did), &(b_did)) {
        (left_val, right_val) => {
            if *left_val == *right_val {
                let kind = ::core::panicking::AssertKind::Ne;
                ::core::panicking::assert_failed(kind, &*left_val,
                    &*right_val,
                    ::core::option::Option::Some(format_args!("same static item DefId had two different AllocIds? {0:?} != {1:?}, {2:?} == {3:?}",
                            a_allocid, b_allocid, a_did, b_did)));
            }
        }
    };
};debug_assert_ne!(
336                            a_did, b_did,
337                            "same static item DefId had two different AllocIds? {a_allocid:?} != {b_allocid:?}, {a_did:?} == {b_did:?}"
338                        );
339                        // Comparing two pointers into the different statics.
340                        // We can never determine for sure that two pointers into different statics
341                        // are *equal*, but we can know that they are *inequal* if they are both
342                        // strictly in-bounds (i.e. in-bounds and not one-past-the-end) of
343                        // their respective static, as different non-zero-sized statics cannot
344                        // overlap or be deduplicated as per
345                        // https://doc.rust-lang.org/nightly/reference/items/static-items.html#r-items.static.intro
346                        // (non-deduplication), and
347                        // https://doc.rust-lang.org/nightly/reference/items/static-items.html#r-items.static.storage-disjointness
348                        // (non-overlapping).
349                        if a_offset < a_info.size && b_offset < b_info.size {
350                            0
351                        } else {
352                            // Otherwise, conservatively say we don't know.
353                            // There are some cases we could still return `0` for, e.g.
354                            // if the pointers being equal would require their statics to overlap
355                            // one or more bytes, but for simplicity we currently only check
356                            // strictly in-bounds pointers.
357                            2
358                        }
359                    }
360                } else {
361                    // All other cases we conservatively say we don't know.
362                    //
363                    // For comparing statics to non-statics, as per https://doc.rust-lang.org/nightly/reference/items/static-items.html#r-items.static.storage-disjointness
364                    // immutable statics can overlap with other kinds of allocations sometimes.
365                    //
366                    // FIXME: We could be more decisive for (non-zero-sized) mutable statics,
367                    // which cannot overlap with other kinds of allocations.
368                    //
369                    // Functions and vtables can be duplicated and deduplicated, so we
370                    // cannot be sure of runtime equality of pointers to the same one, or the
371                    // runtime inequality of pointers to different ones (see e.g. #73722),
372                    // so comparing those should return 2, whether they are the same allocation
373                    // or not.
374                    //
375                    // `GlobalAlloc::TypeId` exists mostly to prevent consteval from comparing
376                    // `TypeId`s, so comparing those should always return 2, whether they are the
377                    // same allocation or not.
378                    //
379                    // FIXME: We could revisit comparing pointers into the same
380                    // `GlobalAlloc::Memory` once https://github.com/rust-lang/rust/issues/128775
381                    // is fixed (but they can be deduplicated, so comparing pointers into different
382                    // ones should return 2).
383                    2
384                }
385            }
386        })
387    }
388}
389
390impl<'tcx> CompileTimeMachine<'tcx> {
391    #[inline(always)]
392    /// Find the first stack frame that is within the current crate, if any.
393    /// Otherwise, return the crate's HirId
394    pub fn best_lint_scope(&self, tcx: TyCtxt<'tcx>) -> hir::HirId {
395        self.stack.iter().find_map(|frame| frame.lint_root(tcx)).unwrap_or(CRATE_HIR_ID)
396    }
397}
398
399impl<'tcx> interpret::Machine<'tcx> for CompileTimeMachine<'tcx> {
400    CtfeProvenance
bool
!
crate::const_eval::MemoryKind
rustc_data_structures::fx::FxIndexMap<AllocId,
(MemoryKind<Self::MemoryKind>, Allocation)>
Option<Self::MemoryKind>
None
()
()
Box<[u8]>
&InterpCx<'tcx, Self>
_ecx
bool
false;
&mut InterpCx<'tcx, Self>
_ecx
mir::UnwindTerminateReason
_reason
InterpResult<'tcx>
{
    ::core::panicking::panic_fmt(format_args!("internal error: entered unreachable code: {0}",
            format_args!("unwinding cannot happen during compile-time evaluation")));
}
&InterpCx<'tcx, Self>
_ecx
ty::Instance<'tcx>
_instance
InterpResult<'tcx>
interp_ok(());
&mut InterpCx<'tcx, Self>
_ecx
!
fn_val
&FnAbi<'tcx, Ty<'tcx>>
_abi
&[FnArg<'tcx>]
_args
&PlaceTy<'tcx, Self::Provenance>
_destination
Option<mir::BasicBlock>
_target
mir::UnwindAction
_unwind
InterpResult<'tcx>
match fn_val {}
&InterpCx<'tcx, Self>
_ecx
bool
true;
&InterpCx<'tcx, Self>
_ecx
AllocId
_id
&'b Allocation
alloc
InterpResult<'tcx, Cow<'b, Allocation<Self::Provenance>>>
interp_ok(Cow::Borrowed(alloc));
&InterpCx<'tcx, Self>
_ecx
AllocId
_id
MemoryKind<Self::MemoryKind>
_kind
Size
_size
Align
_align
InterpResult<'tcx, Self::AllocExtra>
interp_ok(());
&InterpCx<'tcx, Self>
ecx
DefId
def_id
InterpResult<'tcx, Pointer>
interp_ok(Pointer::new(ecx.tcx.reserve_and_set_static_alloc(def_id).into(),
        Size::ZERO));
&InterpCx<'tcx, Self>
_ecx
Pointer<CtfeProvenance>
ptr
Option<MemoryKind<Self::MemoryKind>>
_kind
InterpResult<'tcx, Pointer<CtfeProvenance>>
interp_ok(ptr);
&InterpCx<'tcx, Self>
_ecx
u64
addr
InterpResult<'tcx, Pointer<Option<CtfeProvenance>>>
interp_ok(Pointer::without_provenance(addr));
&InterpCx<'tcx, Self>
_ecx
Pointer<CtfeProvenance>
ptr
i64
_size
Option<(AllocId, Size, Self::ProvenanceExtra)>
let (prov, offset) = ptr.prov_and_relative_offset();
Some((prov.alloc_id(), offset, prov.immutable()));
&InterpCx<'tcx, Self>
_ecx
Option<ty::Instance<'tcx>>
_instance
usize
CTFE_ALLOC_SALT;compile_time_machine!(<'tcx>);
401
402    const PANIC_ON_ALLOC_FAIL: bool = false; // will be raised as a proper error
403
404    #[inline(always)]
405    fn enforce_alignment(ecx: &InterpCx<'tcx, Self>) -> bool {
406        #[allow(non_exhaustive_omitted_patterns)] match ecx.machine.check_alignment {
    CheckAlignment::Error => true,
    _ => false,
}matches!(ecx.machine.check_alignment, CheckAlignment::Error)
407    }
408
409    #[inline(always)]
410    fn enforce_validity(ecx: &InterpCx<'tcx, Self>, layout: TyAndLayout<'tcx>) -> bool {
411        ecx.tcx.sess.opts.unstable_opts.extra_const_ub_checks || layout.is_uninhabited()
412    }
413
414    fn load_mir(
415        ecx: &InterpCx<'tcx, Self>,
416        instance: ty::InstanceKind<'tcx>,
417    ) -> &'tcx mir::Body<'tcx> {
418        match instance {
419            ty::InstanceKind::Item(def) => ecx.tcx.mir_for_ctfe(def),
420            _ => ecx.tcx.instance_mir(instance),
421        }
422    }
423
424    fn find_mir_or_eval_fn(
425        ecx: &mut InterpCx<'tcx, Self>,
426        orig_instance: ty::Instance<'tcx>,
427        _abi: &FnAbi<'tcx, Ty<'tcx>>,
428        args: &[FnArg<'tcx>],
429        dest: &PlaceTy<'tcx>,
430        ret: Option<mir::BasicBlock>,
431        _unwind: mir::UnwindAction, // unwinding is not supported in consts
432    ) -> InterpResult<'tcx, Option<(&'tcx mir::Body<'tcx>, ty::Instance<'tcx>)>> {
433        {
    use ::tracing::__macro_support::Callsite as _;
    static __CALLSITE: ::tracing::callsite::DefaultCallsite =
        {
            static META: ::tracing::Metadata<'static> =
                {
                    ::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/const_eval/machine.rs:433",
                        "rustc_const_eval::const_eval::machine",
                        ::tracing::Level::DEBUG,
                        ::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/const_eval/machine.rs"),
                        ::tracing_core::__macro_support::Option::Some(433u32),
                        ::tracing_core::__macro_support::Option::Some("rustc_const_eval::const_eval::machine"),
                        ::tracing_core::field::FieldSet::new(&["message"],
                            ::tracing_core::callsite::Identifier(&__CALLSITE)),
                        ::tracing::metadata::Kind::EVENT)
                };
            ::tracing::callsite::DefaultCallsite::new(&META)
        };
    let enabled =
        ::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
                &&
                ::tracing::Level::DEBUG <=
                    ::tracing::level_filters::LevelFilter::current() &&
            {
                let interest = __CALLSITE.interest();
                !interest.is_never() &&
                    ::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
                        interest)
            };
    if enabled {
        (|value_set: ::tracing::field::ValueSet|
                    {
                        let meta = __CALLSITE.metadata();
                        ::tracing::Event::dispatch(meta, &value_set);
                        ;
                    })({
                #[allow(unused_imports)]
                use ::tracing::field::{debug, display, Value};
                let mut iter = __CALLSITE.metadata().fields().iter();
                __CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
                                    ::tracing::__macro_support::Option::Some(&format_args!("find_mir_or_eval_fn: {0:?}",
                                                    orig_instance) as &dyn Value))])
            });
    } else { ; }
};debug!("find_mir_or_eval_fn: {:?}", orig_instance);
434
435        // Replace some functions.
436        let Some(instance) = ecx.hook_special_const_fn(orig_instance, args, dest, ret)? else {
437            // Call has already been handled.
438            return interp_ok(None);
439        };
440
441        // Only check non-glue functions
442        if let ty::InstanceKind::Item(def) = instance.def {
443            // Execution might have wandered off into other crates, so we cannot do a stability-
444            // sensitive check here. But we can at least rule out functions that are not const at
445            // all. That said, we have to allow calling functions inside a `const trait`. These
446            // *are* const-checked!
447            if !ecx.tcx.is_const_fn(def) || {
        {
            'done:
                {
                for i in
                    ::rustc_hir::attrs::HasAttrs::get_attrs(def, &ecx.tcx) {
                    #[allow(unused_imports)]
                    use rustc_hir::attrs::AttributeKind::*;
                    let i: &rustc_hir::Attribute = i;
                    match i {
                        rustc_hir::Attribute::Parsed(RustcDoNotConstCheck) => {
                            break 'done Some(());
                        }
                        rustc_hir::Attribute::Unparsed(..) =>
                            {}
                            #[deny(unreachable_patterns)]
                            _ => {}
                    }
                }
                None
            }
        }
    }.is_some()find_attr!(ecx.tcx, def, RustcDoNotConstCheck) {
448                // We certainly do *not* want to actually call the fn
449                // though, so be sure we return here.
450                do yeet ::rustc_middle::mir::interpret::InterpErrorKind::Unsupported(::rustc_middle::mir::interpret::UnsupportedOpInfo::Unsupported(::alloc::__export::must_use({
                    ::alloc::fmt::format(format_args!("calling non-const function `{0}`",
                            instance))
                })))throw_unsup_format!("calling non-const function `{}`", instance)
451            }
452        }
453
454        // This is a const fn. Call it.
455        // In case of replacement, we return the *original* instance to make backtraces work out
456        // (and we hope this does not confuse the FnAbi checks too much).
457        interp_ok(Some((ecx.load_mir(instance.def, None)?, orig_instance)))
458    }
459
460    fn panic_nounwind(ecx: &mut InterpCx<'tcx, Self>, msg: &str) -> InterpResult<'tcx> {
461        let msg = Symbol::intern(msg);
462        let span = ecx.find_closest_untracked_caller_location();
463        let (file, line, col) = ecx.location_triple_for_span(span);
464        Err(ConstEvalErrKind::Panic { msg, file, line, col }).into()
465    }
466
467    fn call_intrinsic(
468        ecx: &mut InterpCx<'tcx, Self>,
469        instance: ty::Instance<'tcx>,
470        args: &[OpTy<'tcx>],
471        dest: &PlaceTy<'tcx, Self::Provenance>,
472        target: Option<mir::BasicBlock>,
473        _unwind: mir::UnwindAction,
474    ) -> InterpResult<'tcx, Option<ty::Instance<'tcx>>> {
475        // Shared intrinsics.
476        if ecx.eval_intrinsic(instance, args, dest, target)? {
477            return interp_ok(None);
478        }
479        let intrinsic_name = ecx.tcx.item_name(instance.def_id());
480
481        // CTFE-specific intrinsics.
482        match intrinsic_name {
483            sym::ptr_guaranteed_cmp => {
484                let a = ecx.read_scalar(&args[0])?;
485                let b = ecx.read_scalar(&args[1])?;
486                let cmp = ecx.guaranteed_cmp(a, b)?;
487                ecx.write_scalar(Scalar::from_u8(cmp), dest)?;
488            }
489            sym::const_allocate => {
490                let size = ecx.read_scalar(&args[0])?.to_target_usize(ecx)?;
491                let align = ecx.read_scalar(&args[1])?.to_target_usize(ecx)?;
492
493                let align = match Align::from_bytes(align) {
494                    Ok(a) => a,
495                    Err(err) => {
496                        do yeet ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::Ub(::alloc::__export::must_use({
                    ::alloc::fmt::format(format_args!("invalid align passed to `const_allocate`: {0}",
                            err))
                })))throw_ub_format!("invalid align passed to `const_allocate`: {err}")
497                    }
498                };
499
500                let ptr = ecx.allocate_ptr(
501                    Size::from_bytes(size),
502                    align,
503                    interpret::MemoryKind::Machine(MemoryKind::Heap { was_made_global: false }),
504                    AllocInit::Uninit,
505                )?;
506                ecx.write_pointer(ptr, dest)?;
507            }
508            sym::const_deallocate => {
509                let ptr = ecx.read_pointer(&args[0])?;
510                let size = ecx.read_scalar(&args[1])?.to_target_usize(ecx)?;
511                let align = ecx.read_scalar(&args[2])?.to_target_usize(ecx)?;
512
513                let size = Size::from_bytes(size);
514                let align = match Align::from_bytes(align) {
515                    Ok(a) => a,
516                    Err(err) => {
517                        do yeet ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::Ub(::alloc::__export::must_use({
                    ::alloc::fmt::format(format_args!("invalid align passed to `const_deallocate`: {0}",
                            err))
                })))throw_ub_format!("invalid align passed to `const_deallocate`: {err}")
518                    }
519                };
520
521                // If an allocation is created in an another const,
522                // we don't deallocate it.
523                let (alloc_id, _, _) = ecx.ptr_get_alloc_id(ptr, 0)?;
524                let is_allocated_in_another_const = #[allow(non_exhaustive_omitted_patterns)] match ecx.tcx.try_get_global_alloc(alloc_id)
    {
    Some(interpret::GlobalAlloc::Memory(_)) => true,
    _ => false,
}matches!(
525                    ecx.tcx.try_get_global_alloc(alloc_id),
526                    Some(interpret::GlobalAlloc::Memory(_))
527                );
528
529                if !is_allocated_in_another_const {
530                    ecx.deallocate_ptr(
531                        ptr,
532                        Some((size, align)),
533                        interpret::MemoryKind::Machine(MemoryKind::Heap { was_made_global: false }),
534                    )?;
535                }
536            }
537
538            sym::const_make_global => {
539                let ptr = ecx.read_pointer(&args[0])?;
540                ecx.make_const_heap_ptr_global(ptr)?;
541                ecx.write_pointer(ptr, dest)?;
542            }
543
544            // The intrinsic represents whether the value is known to the optimizer (LLVM).
545            // We're not doing any optimizations here, so there is no optimizer that could know the value.
546            // (We know the value here in the machine of course, but this is the runtime of that code,
547            // not the optimization stage.)
548            sym::is_val_statically_known => ecx.write_scalar(Scalar::from_bool(false), dest)?,
549
550            // We handle these here since Miri does not want to have them.
551            sym::assert_inhabited
552            | sym::assert_zero_valid
553            | sym::assert_mem_uninitialized_valid => {
554                let ty = instance.args.type_at(0);
555                let requirement = ValidityRequirement::from_intrinsic(intrinsic_name).unwrap();
556
557                let should_panic = !ecx
558                    .tcx
559                    .check_validity_requirement((requirement, ecx.typing_env().as_query_input(ty)))
560                    .map_err(|_| ::rustc_middle::mir::interpret::InterpErrorKind::InvalidProgram(::rustc_middle::mir::interpret::InvalidProgramInfo::TooGeneric)err_inval!(TooGeneric))?;
561
562                if should_panic {
563                    let layout = ecx.layout_of(ty)?;
564
565                    let msg = match requirement {
566                        // For *all* intrinsics we first check `is_uninhabited` to give a more specific
567                        // error message.
568                        _ if layout.is_uninhabited() => ::alloc::__export::must_use({
        ::alloc::fmt::format(format_args!("aborted execution: attempted to instantiate uninhabited type `{0}`",
                ty))
    })format!(
569                            "aborted execution: attempted to instantiate uninhabited type `{ty}`"
570                        ),
571                        ValidityRequirement::Inhabited => ::rustc_middle::util::bug::bug_fmt(format_args!("handled earlier"))bug!("handled earlier"),
572                        ValidityRequirement::Zero => ::alloc::__export::must_use({
        ::alloc::fmt::format(format_args!("aborted execution: attempted to zero-initialize type `{0}`, which is invalid",
                ty))
    })format!(
573                            "aborted execution: attempted to zero-initialize type `{ty}`, which is invalid"
574                        ),
575                        ValidityRequirement::UninitMitigated0x01Fill => ::alloc::__export::must_use({
        ::alloc::fmt::format(format_args!("aborted execution: attempted to leave type `{0}` uninitialized, which is invalid",
                ty))
    })format!(
576                            "aborted execution: attempted to leave type `{ty}` uninitialized, which is invalid"
577                        ),
578                        ValidityRequirement::Uninit => ::rustc_middle::util::bug::bug_fmt(format_args!("assert_uninit_valid doesn\'t exist"))bug!("assert_uninit_valid doesn't exist"),
579                    };
580
581                    Self::panic_nounwind(ecx, &msg)?;
582                    // Skip the `return_to_block` at the end (we panicked, we do not return).
583                    return interp_ok(None);
584                }
585            }
586
587            sym::type_id_vtable => {
588                let tp_ty = ecx.read_type_id(&args[0])?;
589                let result_ty = ecx.read_type_id(&args[1])?;
590
591                let (implements_trait, preds) = type_implements_dyn_trait(ecx, tp_ty, result_ty)?;
592
593                if implements_trait {
594                    let vtable_ptr = ecx.get_vtable_ptr(tp_ty, preds)?;
595                    // Writing a non-null pointer into an `Option<NonNull>` will automatically make it `Some`.
596                    ecx.write_pointer(vtable_ptr, dest)?;
597                } else {
598                    // Write `None`
599                    ecx.write_discriminant(FIRST_VARIANT, dest)?;
600                }
601            }
602
603            sym::type_of => {
604                let ty = ecx.read_type_id(&args[0])?;
605                ecx.write_type_info(ty, dest)?;
606            }
607
608            sym::field_offset => {
609                let frt_ty = instance.args.type_at(0);
610                ensure_monomorphic_enough(ecx.tcx.tcx, frt_ty)?;
611
612                let (ty, variant, field) = if let ty::Adt(def, args) = frt_ty.kind()
613                    && let Some(FieldInfo { base, variant_idx, field_idx, .. }) =
614                        def.field_representing_type_info(ecx.tcx.tcx, args)
615                {
616                    (base, variant_idx, field_idx)
617                } else {
618                    ::rustc_middle::util::bug::span_bug_fmt(ecx.cur_span(),
    format_args!("expected field representing type, got {0}", frt_ty))span_bug!(ecx.cur_span(), "expected field representing type, got {frt_ty}")
619                };
620                let layout = ecx.layout_of(ty)?;
621                let cx = ty::layout::LayoutCx::new(ecx.tcx.tcx, ecx.typing_env());
622
623                let layout = layout.for_variant(&cx, variant);
624                let offset = layout.fields.offset(field.index()).bytes();
625
626                ecx.write_scalar(Scalar::from_target_usize(offset, ecx), dest)?;
627            }
628
629            _ => {
630                // We haven't handled the intrinsic, let's see if we can use a fallback body.
631                if ecx.tcx.intrinsic(instance.def_id()).unwrap().must_be_overridden {
632                    do yeet ::rustc_middle::mir::interpret::InterpErrorKind::Unsupported(::rustc_middle::mir::interpret::UnsupportedOpInfo::Unsupported(::alloc::__export::must_use({
                    ::alloc::fmt::format(format_args!("intrinsic `{0}` is not supported at compile-time",
                            intrinsic_name))
                })));throw_unsup_format!(
633                        "intrinsic `{intrinsic_name}` is not supported at compile-time"
634                    );
635                }
636                return interp_ok(Some(ty::Instance {
637                    def: ty::InstanceKind::Item(instance.def_id()),
638                    args: instance.args,
639                }));
640            }
641        }
642
643        // Intrinsic is done, jump to next block.
644        ecx.return_to_block(target)?;
645        interp_ok(None)
646    }
647
648    fn assert_panic(
649        ecx: &mut InterpCx<'tcx, Self>,
650        msg: &AssertMessage<'tcx>,
651        _unwind: mir::UnwindAction,
652    ) -> InterpResult<'tcx> {
653        use rustc_middle::mir::AssertKind::*;
654        // Convert `AssertKind<Operand>` to `AssertKind<Scalar>`.
655        let eval_to_int =
656            |op| ecx.read_immediate(&ecx.eval_operand(op, None)?).map(|x| x.to_const_int());
657        let err = match msg {
658            BoundsCheck { len, index } => {
659                let len = eval_to_int(len)?;
660                let index = eval_to_int(index)?;
661                BoundsCheck { len, index }
662            }
663            Overflow(op, l, r) => Overflow(*op, eval_to_int(l)?, eval_to_int(r)?),
664            OverflowNeg(op) => OverflowNeg(eval_to_int(op)?),
665            DivisionByZero(op) => DivisionByZero(eval_to_int(op)?),
666            RemainderByZero(op) => RemainderByZero(eval_to_int(op)?),
667            ResumedAfterReturn(coroutine_kind) => ResumedAfterReturn(*coroutine_kind),
668            ResumedAfterPanic(coroutine_kind) => ResumedAfterPanic(*coroutine_kind),
669            ResumedAfterDrop(coroutine_kind) => ResumedAfterDrop(*coroutine_kind),
670            MisalignedPointerDereference { required, found } => MisalignedPointerDereference {
671                required: eval_to_int(required)?,
672                found: eval_to_int(found)?,
673            },
674            NullPointerDereference => NullPointerDereference,
675            InvalidEnumConstruction(source) => InvalidEnumConstruction(eval_to_int(source)?),
676        };
677        Err(ConstEvalErrKind::AssertFailure(err)).into()
678    }
679
680    #[inline(always)]
681    fn runtime_checks(
682        _ecx: &InterpCx<'tcx, Self>,
683        _r: mir::RuntimeChecks,
684    ) -> InterpResult<'tcx, bool> {
685        // We can't look at `tcx.sess` here as that can differ across crates, which can lead to
686        // unsound differences in evaluating the same constant at different instantiation sites.
687        interp_ok(true)
688    }
689
690    fn binary_ptr_op(
691        _ecx: &InterpCx<'tcx, Self>,
692        _bin_op: mir::BinOp,
693        _left: &ImmTy<'tcx>,
694        _right: &ImmTy<'tcx>,
695    ) -> InterpResult<'tcx, ImmTy<'tcx>> {
696        do yeet ::rustc_middle::mir::interpret::InterpErrorKind::Unsupported(::rustc_middle::mir::interpret::UnsupportedOpInfo::Unsupported(::alloc::__export::must_use({
                    ::alloc::fmt::format(format_args!("pointer arithmetic or comparison is not supported at compile-time"))
                })));throw_unsup_format!("pointer arithmetic or comparison is not supported at compile-time");
697    }
698
699    fn increment_const_eval_counter(ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx> {
700        // The step limit has already been hit in a previous call to `increment_const_eval_counter`.
701
702        if let Some(new_steps) = ecx.machine.num_evaluated_steps.checked_add(1) {
703            let (limit, start) = if ecx.tcx.sess.opts.unstable_opts.tiny_const_eval_limit {
704                (TINY_LINT_TERMINATOR_LIMIT, TINY_LINT_TERMINATOR_LIMIT)
705            } else {
706                (LINT_TERMINATOR_LIMIT, PROGRESS_INDICATOR_START)
707            };
708
709            ecx.machine.num_evaluated_steps = new_steps;
710            // By default, we have a *deny* lint kicking in after some time
711            // to ensure `loop {}` doesn't just go forever.
712            // In case that lint got reduced, in particular for `--cap-lint` situations, we also
713            // have a hard warning shown every now and then for really long executions.
714            if new_steps == limit {
715                // By default, we stop after a million steps, but the user can disable this lint
716                // to be able to run until the heat death of the universe or power loss, whichever
717                // comes first.
718                let hir_id = ecx.machine.best_lint_scope(*ecx.tcx);
719                let is_error = ecx
720                    .tcx
721                    .lint_level_at_node(
722                        rustc_session::lint::builtin::LONG_RUNNING_CONST_EVAL,
723                        hir_id,
724                    )
725                    .level
726                    .is_error();
727                let span = ecx.cur_span();
728                ecx.tcx.emit_node_span_lint(
729                    rustc_session::lint::builtin::LONG_RUNNING_CONST_EVAL,
730                    hir_id,
731                    span,
732                    LongRunning { item_span: ecx.tcx.span },
733                );
734                // If this was a hard error, don't bother continuing evaluation.
735                if is_error {
736                    let guard = ecx
737                        .tcx
738                        .dcx()
739                        .span_delayed_bug(span, "The deny lint should have already errored");
740                    do yeet ::rustc_middle::mir::interpret::InterpErrorKind::InvalidProgram(::rustc_middle::mir::interpret::InvalidProgramInfo::AlreadyReported(ReportedErrorInfo::allowed_in_infallible(guard)));throw_inval!(AlreadyReported(ReportedErrorInfo::allowed_in_infallible(guard)));
741                }
742            } else if new_steps > start && new_steps.is_power_of_two() {
743                // Only report after a certain number of terminators have been evaluated and the
744                // current number of evaluated terminators is a power of 2. The latter gives us a cheap
745                // way to implement exponential backoff.
746                let span = ecx.cur_span();
747                // We store a unique number in `force_duplicate` to evade `-Z deduplicate-diagnostics`.
748                // `new_steps` is guaranteed to be unique because `ecx.machine.num_evaluated_steps` is
749                // always increasing.
750                ecx.tcx.dcx().emit_warn(LongRunningWarn {
751                    span,
752                    item_span: ecx.tcx.span,
753                    force_duplicate: new_steps,
754                });
755            }
756        }
757
758        interp_ok(())
759    }
760
761    #[inline(always)]
762    fn expose_provenance(
763        _ecx: &InterpCx<'tcx, Self>,
764        _provenance: Self::Provenance,
765    ) -> InterpResult<'tcx> {
766        // This is only reachable with -Zunleash-the-miri-inside-of-you.
767        do yeet ::rustc_middle::mir::interpret::InterpErrorKind::Unsupported(::rustc_middle::mir::interpret::UnsupportedOpInfo::Unsupported(::alloc::__export::must_use({
                    ::alloc::fmt::format(format_args!("exposing pointers is not possible at compile-time"))
                })))throw_unsup_format!("exposing pointers is not possible at compile-time")
768    }
769
770    #[inline(always)]
771    fn init_frame(
772        ecx: &mut InterpCx<'tcx, Self>,
773        frame: Frame<'tcx>,
774    ) -> InterpResult<'tcx, Frame<'tcx>> {
775        // Enforce stack size limit. Add 1 because this is run before the new frame is pushed.
776        if !ecx.recursion_limit.value_within_limit(ecx.stack().len() + 1) {
777            do yeet ::rustc_middle::mir::interpret::InterpErrorKind::ResourceExhaustion(::rustc_middle::mir::interpret::ResourceExhaustionInfo::StackFrameLimitReached)throw_exhaust!(StackFrameLimitReached)
778        } else {
779            interp_ok(frame)
780        }
781    }
782
783    #[inline(always)]
784    fn stack<'a>(
785        ecx: &'a InterpCx<'tcx, Self>,
786    ) -> &'a [Frame<'tcx, Self::Provenance, Self::FrameExtra>] {
787        &ecx.machine.stack
788    }
789
790    #[inline(always)]
791    fn stack_mut<'a>(
792        ecx: &'a mut InterpCx<'tcx, Self>,
793    ) -> &'a mut Vec<Frame<'tcx, Self::Provenance, Self::FrameExtra>> {
794        &mut ecx.machine.stack
795    }
796
797    fn before_access_global(
798        _tcx: TyCtxtAt<'tcx>,
799        machine: &Self,
800        alloc_id: AllocId,
801        alloc: ConstAllocation<'tcx>,
802        _static_def_id: Option<DefId>,
803        is_write: bool,
804    ) -> InterpResult<'tcx> {
805        let alloc = alloc.inner();
806        if is_write {
807            // Write access. These are never allowed, but we give a targeted error message.
808            match alloc.mutability {
809                Mutability::Not => do yeet ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::WriteToReadOnly(alloc_id))throw_ub!(WriteToReadOnly(alloc_id)),
810                Mutability::Mut => Err(ConstEvalErrKind::ModifiedGlobal).into(),
811            }
812        } else {
813            // Read access. These are usually allowed, with some exceptions.
814            if machine.can_access_mut_global == CanAccessMutGlobal::Yes {
815                // Machine configuration allows us read from anything (e.g., `static` initializer).
816                interp_ok(())
817            } else if alloc.mutability == Mutability::Mut {
818                // Machine configuration does not allow us to read statics (e.g., `const`
819                // initializer).
820                Err(ConstEvalErrKind::ConstAccessesMutGlobal).into()
821            } else {
822                // Immutable global, this read is fine.
823                match (&alloc.mutability, &Mutability::Not) {
    (left_val, right_val) => {
        if !(*left_val == *right_val) {
            let kind = ::core::panicking::AssertKind::Eq;
            ::core::panicking::assert_failed(kind, &*left_val, &*right_val,
                ::core::option::Option::None);
        }
    }
};assert_eq!(alloc.mutability, Mutability::Not);
824                interp_ok(())
825            }
826        }
827    }
828
829    fn retag_ptr_value(
830        ecx: &mut InterpCx<'tcx, Self>,
831        val: &ImmTy<'tcx, CtfeProvenance>,
832        _ty: Ty<'tcx>,
833    ) -> InterpResult<'tcx, Option<ImmTy<'tcx, CtfeProvenance>>> {
834        if #[allow(non_exhaustive_omitted_patterns)] match ecx.machine.retag_mode {
    RetagMode::None | RetagMode::Raw => true,
    _ => false,
}matches!(ecx.machine.retag_mode, RetagMode::None | RetagMode::Raw) {
835            return interp_ok(None);
836        }
837        // If it's a frozen shared reference that's not already immutable, potentially make it immutable.
838        // (Do nothing on `None` provenance, that cannot store immutability anyway.)
839        if let ty::Ref(_, ty, mutbl) = val.layout.ty.kind()
840            && *mutbl == Mutability::Not
841            && val
842                .to_scalar_and_meta()
843                .0
844                .to_pointer(ecx)?
845                .provenance
846                .is_some_and(|p| !p.immutable())
847        {
848            // That next check is expensive, that's why we have all the guards above.
849            let is_immutable = ty.is_freeze(*ecx.tcx, ecx.typing_env());
850            let place = ecx.imm_ptr_to_mplace(val)?;
851            let new_place = if is_immutable {
852                place.map_provenance(CtfeProvenance::as_immutable)
853            } else {
854                // Even if it is not immutable, remember that it is a shared reference.
855                // This allows it to become part of the final value of the constant.
856                // (See <https://github.com/rust-lang/rust/pull/128543> for why we allow this
857                // even when there is interior mutability.)
858                place.map_provenance(CtfeProvenance::as_shared_ref)
859            };
860            interp_ok(Some(ImmTy::from_immediate(new_place.to_ref(ecx), val.layout)))
861        } else {
862            interp_ok(None)
863        }
864    }
865
866    fn with_retag_mode<T>(
867        ecx: &mut InterpCx<'tcx, Self>,
868        mode: RetagMode,
869        f: impl FnOnce(&mut InterpCx<'tcx, Self>) -> InterpResult<'tcx, T>,
870    ) -> InterpResult<'tcx, T> {
871        let old_mode = mem::replace(&mut ecx.machine.retag_mode, mode);
872        let ret = f(ecx);
873        ecx.machine.retag_mode = old_mode;
874        ret
875    }
876
877    fn before_memory_write(
878        _tcx: TyCtxtAt<'tcx>,
879        _machine: &mut Self,
880        _alloc_extra: &mut Self::AllocExtra,
881        _ptr: Pointer<Option<Self::Provenance>>,
882        (_alloc_id, immutable): (AllocId, bool),
883        range: AllocRange,
884    ) -> InterpResult<'tcx> {
885        if range.size == Size::ZERO {
886            // Nothing to check.
887            return interp_ok(());
888        }
889        // Reject writes through immutable pointers.
890        if immutable {
891            return Err(ConstEvalErrKind::WriteThroughImmutablePointer).into();
892        }
893        // Everything else is fine.
894        interp_ok(())
895    }
896
897    fn before_alloc_access(
898        tcx: TyCtxtAt<'tcx>,
899        machine: &Self,
900        alloc_id: AllocId,
901    ) -> InterpResult<'tcx> {
902        if machine.stack.is_empty() {
903            // Get out of the way for the final copy.
904            return interp_ok(());
905        }
906        // Check if this is the currently evaluated static.
907        if Some(alloc_id) == machine.static_root_ids.map(|(id, _)| id) {
908            return Err(ConstEvalErrKind::RecursiveStatic).into();
909        }
910        // If this is another static, make sure we fire off the query to detect cycles.
911        // But only do that when checks for static recursion are enabled.
912        if machine.static_root_ids.is_some() {
913            if let Some(GlobalAlloc::Static(def_id)) = tcx.try_get_global_alloc(alloc_id) {
914                if tcx.is_foreign_item(def_id) {
915                    do yeet ::rustc_middle::mir::interpret::InterpErrorKind::Unsupported(::rustc_middle::mir::interpret::UnsupportedOpInfo::ExternStatic(def_id));throw_unsup!(ExternStatic(def_id));
916                }
917                tcx.eval_static_initializer(def_id)?;
918            }
919        }
920        interp_ok(())
921    }
922
923    fn cached_union_data_range<'e>(
924        ecx: &'e mut InterpCx<'tcx, Self>,
925        ty: Ty<'tcx>,
926        compute_range: impl FnOnce() -> RangeSet,
927    ) -> Cow<'e, RangeSet> {
928        if ecx.tcx.sess.opts.unstable_opts.extra_const_ub_checks {
929            Cow::Borrowed(ecx.machine.union_data_ranges.entry(ty).or_insert_with(compute_range))
930        } else {
931            // Don't bother caching, we're only doing one validation at the end anyway.
932            Cow::Owned(compute_range())
933        }
934    }
935
936    fn get_default_alloc_params(&self) -> <Self::Bytes as mir::interpret::AllocBytes>::AllocParams {
937    }
938}
939
940// Please do not add any code below the above `Machine` trait impl. I (oli-obk) plan more cleanups
941// so we can end up having a file with just that impl, but for now, let's keep the impl discoverable
942// at the bottom of this file.