Skip to main content

rustc_const_eval/interpret/
machine.rs

1//! This module contains everything needed to instantiate an interpreter.
2//! This separation exists to ensure that no fancy miri features like
3//! interpreting common C functions leak into CTFE.
4
5use std::borrow::{Borrow, Cow};
6use std::fmt::Debug;
7use std::hash::Hash;
8
9use rustc_abi::{Align, Size};
10use rustc_apfloat::{Float, FloatConvert};
11use rustc_middle::query::TyCtxtAt;
12use rustc_middle::ty::Ty;
13use rustc_middle::ty::layout::TyAndLayout;
14use rustc_middle::{mir, ty};
15use rustc_span::def_id::DefId;
16use rustc_target::callconv::FnAbi;
17
18use super::{
19    AllocBytes, AllocId, AllocKind, AllocRange, Allocation, CTFE_ALLOC_SALT, ConstAllocation,
20    CtfeProvenance, EnteredTraceSpan, FnArg, Frame, ImmTy, InterpCx, InterpResult, MPlaceTy,
21    MemoryKind, Misalignment, OpTy, PlaceTy, Pointer, Provenance, RangeSet, interp_ok, throw_unsup,
22};
23
24/// Data returned by [`Machine::after_stack_pop`], and consumed by
25/// [`InterpCx::return_from_current_stack_frame`] to determine what actions should be done when
26/// returning from a stack frame.
27#[derive(#[automatically_derived]
impl ::core::cmp::Eq for ReturnAction {
    #[inline]
    #[doc(hidden)]
    #[coverage(off)]
    fn assert_fields_are_eq(&self) {}
}Eq, #[automatically_derived]
impl ::core::cmp::PartialEq for ReturnAction {
    #[inline]
    fn eq(&self, other: &ReturnAction) -> bool {
        let __self_discr = ::core::intrinsics::discriminant_value(self);
        let __arg1_discr = ::core::intrinsics::discriminant_value(other);
        __self_discr == __arg1_discr
    }
}PartialEq, #[automatically_derived]
impl ::core::fmt::Debug for ReturnAction {
    #[inline]
    fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
        ::core::fmt::Formatter::write_str(f,
            match self {
                ReturnAction::Normal => "Normal",
                ReturnAction::NoJump => "NoJump",
                ReturnAction::NoCleanup => "NoCleanup",
            })
    }
}Debug, #[automatically_derived]
impl ::core::marker::Copy for ReturnAction { }Copy, #[automatically_derived]
impl ::core::clone::Clone for ReturnAction {
    #[inline]
    fn clone(&self) -> ReturnAction { *self }
}Clone)]
28pub enum ReturnAction {
29    /// Indicates that no special handling should be
30    /// done - we'll either return normally or unwind
31    /// based on the terminator for the function
32    /// we're leaving.
33    Normal,
34
35    /// Indicates that we should *not* jump to the return/unwind address, as the callback already
36    /// took care of everything.
37    NoJump,
38
39    /// Returned by [`InterpCx::pop_stack_frame_raw`] when no cleanup should be done.
40    NoCleanup,
41}
42
43/// The currently active retagging mode.
44#[derive(#[automatically_derived]
impl ::core::cmp::Eq for RetagMode {
    #[inline]
    #[doc(hidden)]
    #[coverage(off)]
    fn assert_fields_are_eq(&self) {}
}Eq, #[automatically_derived]
impl ::core::cmp::PartialEq for RetagMode {
    #[inline]
    fn eq(&self, other: &RetagMode) -> bool {
        let __self_discr = ::core::intrinsics::discriminant_value(self);
        let __arg1_discr = ::core::intrinsics::discriminant_value(other);
        __self_discr == __arg1_discr
    }
}PartialEq, #[automatically_derived]
impl ::core::fmt::Debug for RetagMode {
    #[inline]
    fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
        ::core::fmt::Formatter::write_str(f,
            match self {
                RetagMode::Default => "Default",
                RetagMode::TwoPhase => "TwoPhase",
                RetagMode::FnEntry => "FnEntry",
                RetagMode::Raw => "Raw",
                RetagMode::None => "None",
            })
    }
}Debug, #[automatically_derived]
impl ::core::marker::Copy for RetagMode { }Copy, #[automatically_derived]
impl ::core::clone::Clone for RetagMode {
    #[inline]
    fn clone(&self) -> RetagMode { *self }
}Clone)]
45pub enum RetagMode {
46    /// A regular retag.
47    Default,
48    /// Retag preparing for a two-phase borrow.
49    TwoPhase,
50    /// The initial retag of arguments when entering a function.
51    FnEntry,
52    /// Retagging for reference-to-raw-pointer cast.
53    Raw,
54    /// No retagging.
55    None,
56}
57
58/// Whether this kind of memory is allowed to leak
59pub trait MayLeak: Copy {
60    fn may_leak(self) -> bool;
61}
62
63/// The functionality needed by memory to manage its allocations
64pub trait AllocMap<K: Hash + Eq, V> {
65    /// Tests if the map contains the given key.
66    /// Deliberately takes `&mut` because that is sufficient, and some implementations
67    /// can be more efficient then (using `RefCell::get_mut`).
68    fn contains_key<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> bool
69    where
70        K: Borrow<Q>;
71
72    /// Callers should prefer [`AllocMap::contains_key`] when it is possible to call because it may
73    /// be more efficient. This function exists for callers that only have a shared reference
74    /// (which might make it slightly less efficient than `contains_key`, e.g. if
75    /// the data is stored inside a `RefCell`).
76    fn contains_key_ref<Q: ?Sized + Hash + Eq>(&self, k: &Q) -> bool
77    where
78        K: Borrow<Q>;
79
80    /// Inserts a new entry into the map.
81    fn insert(&mut self, k: K, v: V) -> Option<V>;
82
83    /// Removes an entry from the map.
84    fn remove<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> Option<V>
85    where
86        K: Borrow<Q>;
87
88    /// Returns data based on the keys and values in the map.
89    fn filter_map_collect<T>(&self, f: impl FnMut(&K, &V) -> Option<T>) -> Vec<T>;
90
91    /// Returns a reference to entry `k`. If no such entry exists, call
92    /// `vacant` and either forward its error, or add its result to the map
93    /// and return a reference to *that*.
94    fn get_or<E>(&self, k: K, vacant: impl FnOnce() -> Result<V, E>) -> Result<&V, E>;
95
96    /// Returns a mutable reference to entry `k`. If no such entry exists, call
97    /// `vacant` and either forward its error, or add its result to the map
98    /// and return a reference to *that*.
99    fn get_mut_or<E>(&mut self, k: K, vacant: impl FnOnce() -> Result<V, E>) -> Result<&mut V, E>;
100
101    /// Read-only lookup.
102    fn get(&self, k: K) -> Option<&V> {
103        self.get_or(k, || Err(())).ok()
104    }
105
106    /// Mutable lookup.
107    fn get_mut(&mut self, k: K) -> Option<&mut V> {
108        self.get_mut_or(k, || Err(())).ok()
109    }
110}
111
112/// Methods of this trait signifies a point where CTFE evaluation would fail
113/// and some use case dependent behaviour can instead be applied.
114pub trait Machine<'tcx>: Sized {
115    /// Additional memory kinds a machine wishes to distinguish from the builtin ones
116    type MemoryKind: Debug + std::fmt::Display + MayLeak + Eq + 'static;
117
118    /// Pointers are "tagged" with provenance information; typically the `AllocId` they belong to.
119    type Provenance: Provenance + Eq + Hash + 'static;
120
121    /// When getting the AllocId of a pointer, some extra data is also obtained from the provenance
122    /// that is passed to memory access hooks so they can do things with it.
123    type ProvenanceExtra: Copy + 'static;
124
125    /// Machines can define extra (non-instance) things that represent values of function pointers.
126    /// For example, Miri uses this to return a function pointer from `dlsym`
127    /// that can later be called to execute the right thing.
128    type ExtraFnVal: Debug + Copy;
129
130    /// Extra data stored in every call frame.
131    type FrameExtra;
132
133    /// Extra data stored in every allocation.
134    type AllocExtra: Debug + Clone + 'tcx;
135
136    /// Type for the bytes of the allocation.
137    type Bytes: AllocBytes + 'static;
138
139    /// Memory's allocation map
140    type MemoryMap: AllocMap<
141            AllocId,
142            (
143                MemoryKind<Self::MemoryKind>,
144                Allocation<Self::Provenance, Self::AllocExtra, Self::Bytes>,
145            ),
146        > + Default
147        + Clone;
148
149    /// The memory kind to use for copied global memory (held in `tcx`) --
150    /// or None if such memory should not be mutated and thus any such attempt will cause
151    /// a `ModifiedStatic` error to be raised.
152    /// Statics are copied under two circumstances: When they are mutated, and when
153    /// `adjust_allocation` (see below) returns an owned allocation
154    /// that is added to the memory so that the work is not done twice.
155    const GLOBAL_KIND: Option<Self::MemoryKind>;
156
157    /// Should the machine panic on allocation failures?
158    const PANIC_ON_ALLOC_FAIL: bool;
159
160    /// Determines whether `eval_mir_constant` can never fail because all required consts have
161    /// already been checked before.
162    const ALL_CONSTS_ARE_PRECHECKED: bool = true;
163
164    /// Whether memory accesses should be alignment-checked.
165    fn enforce_alignment(ecx: &InterpCx<'tcx, Self>) -> bool;
166
167    /// Gives the machine a chance to detect more misalignment than the built-in checks would catch.
168    #[inline(always)]
169    fn alignment_check(
170        _ecx: &InterpCx<'tcx, Self>,
171        _alloc_id: AllocId,
172        _alloc_align: Align,
173        _alloc_kind: AllocKind,
174        _offset: Size,
175        _align: Align,
176    ) -> Option<Misalignment> {
177        None
178    }
179
180    /// Whether to enforce the validity invariant for a specific layout.
181    fn enforce_validity(ecx: &InterpCx<'tcx, Self>, layout: TyAndLayout<'tcx>) -> bool;
182    /// Whether to enforce the validity invariant *recursively*.
183    fn enforce_validity_recursively(
184        _ecx: &InterpCx<'tcx, Self>,
185        _layout: TyAndLayout<'tcx>,
186    ) -> bool {
187        false
188    }
189
190    /// Whether Assert(OverflowNeg) and Assert(Overflow) MIR terminators should actually
191    /// check for overflow.
192    fn ignore_optional_overflow_checks(_ecx: &InterpCx<'tcx, Self>) -> bool;
193
194    /// Entry point for obtaining the MIR of anything that should get evaluated.
195    /// So not just functions and shims, but also const/static initializers, anonymous
196    /// constants, ...
197    fn load_mir(
198        ecx: &InterpCx<'tcx, Self>,
199        instance: ty::InstanceKind<'tcx>,
200    ) -> &'tcx mir::Body<'tcx> {
201        ecx.tcx.instance_mir(instance)
202    }
203
204    /// Entry point to all function calls.
205    ///
206    /// Returns either the mir to use for the call, or `None` if execution should
207    /// just proceed (which usually means this hook did all the work that the
208    /// called function should usually have done). In the latter case, it is
209    /// this hook's responsibility to advance the instruction pointer!
210    /// (This is to support functions like `__rust_maybe_catch_panic` that neither find a MIR
211    /// nor just jump to `ret`, but instead push their own stack frame.)
212    /// Passing `dest`and `ret` in the same `Option` proved very annoying when only one of them
213    /// was used.
214    fn find_mir_or_eval_fn(
215        ecx: &mut InterpCx<'tcx, Self>,
216        instance: ty::Instance<'tcx>,
217        abi: &FnAbi<'tcx, Ty<'tcx>>,
218        args: &[FnArg<'tcx, Self::Provenance>],
219        destination: &PlaceTy<'tcx, Self::Provenance>,
220        target: Option<mir::BasicBlock>,
221        unwind: mir::UnwindAction,
222    ) -> InterpResult<'tcx, Option<(&'tcx mir::Body<'tcx>, ty::Instance<'tcx>)>>;
223
224    /// Execute `fn_val`. It is the hook's responsibility to advance the instruction
225    /// pointer as appropriate.
226    fn call_extra_fn(
227        ecx: &mut InterpCx<'tcx, Self>,
228        fn_val: Self::ExtraFnVal,
229        abi: &FnAbi<'tcx, Ty<'tcx>>,
230        args: &[FnArg<'tcx, Self::Provenance>],
231        destination: &PlaceTy<'tcx, Self::Provenance>,
232        target: Option<mir::BasicBlock>,
233        unwind: mir::UnwindAction,
234    ) -> InterpResult<'tcx>;
235
236    /// Directly process an intrinsic without pushing a stack frame. It is the hook's
237    /// responsibility to advance the instruction pointer as appropriate.
238    ///
239    /// Returns `None` if the intrinsic was fully handled.
240    /// Otherwise, returns an `Instance` of the function that implements the intrinsic.
241    fn call_intrinsic(
242        ecx: &mut InterpCx<'tcx, Self>,
243        instance: ty::Instance<'tcx>,
244        args: &[OpTy<'tcx, Self::Provenance>],
245        destination: &PlaceTy<'tcx, Self::Provenance>,
246        target: Option<mir::BasicBlock>,
247        unwind: mir::UnwindAction,
248    ) -> InterpResult<'tcx, Option<ty::Instance<'tcx>>>;
249
250    /// Check whether the given function may be executed on the current machine, in terms of the
251    /// target features is requires.
252    fn check_fn_target_features(
253        _ecx: &InterpCx<'tcx, Self>,
254        _instance: ty::Instance<'tcx>,
255    ) -> InterpResult<'tcx>;
256
257    /// Called to evaluate `Assert` MIR terminators that trigger a panic.
258    fn assert_panic(
259        ecx: &mut InterpCx<'tcx, Self>,
260        msg: &mir::AssertMessage<'tcx>,
261        unwind: mir::UnwindAction,
262    ) -> InterpResult<'tcx>;
263
264    /// Called to trigger a non-unwinding panic.
265    fn panic_nounwind(_ecx: &mut InterpCx<'tcx, Self>, msg: &str) -> InterpResult<'tcx>;
266
267    /// Called when unwinding reached a state where execution should be terminated.
268    fn unwind_terminate(
269        ecx: &mut InterpCx<'tcx, Self>,
270        reason: mir::UnwindTerminateReason,
271    ) -> InterpResult<'tcx>;
272
273    /// Called for all binary operations where the LHS has pointer type.
274    ///
275    /// Returns a (value, overflowed) pair if the operation succeeded
276    fn binary_ptr_op(
277        ecx: &InterpCx<'tcx, Self>,
278        bin_op: mir::BinOp,
279        left: &ImmTy<'tcx, Self::Provenance>,
280        right: &ImmTy<'tcx, Self::Provenance>,
281    ) -> InterpResult<'tcx, ImmTy<'tcx, Self::Provenance>>;
282
283    /// Generate the NaN returned by a float operation, given the list of inputs.
284    /// (This is all inputs, not just NaN inputs!)
285    fn generate_nan<F1: Float + FloatConvert<F2>, F2: Float>(
286        _ecx: &InterpCx<'tcx, Self>,
287        _inputs: &[F1],
288    ) -> F2 {
289        // By default we always return the preferred NaN.
290        F2::NAN
291    }
292
293    /// Apply non-determinism to float operations that do not return a precise result.
294    fn apply_float_nondet(
295        _ecx: &mut InterpCx<'tcx, Self>,
296        val: ImmTy<'tcx, Self::Provenance>,
297    ) -> InterpResult<'tcx, ImmTy<'tcx, Self::Provenance>> {
298        interp_ok(val)
299    }
300
301    /// Determines the result of `min`/`max` on floats when the arguments are equal.
302    fn equal_float_min_max<F: Float>(_ecx: &InterpCx<'tcx, Self>, a: F, _b: F) -> F {
303        // By default, we pick the left argument.
304        a
305    }
306
307    /// Determines whether the `fmuladd` intrinsics fuse the multiply-add or use separate operations.
308    fn float_fuse_mul_add(_ecx: &InterpCx<'tcx, Self>) -> bool;
309
310    /// Called before a basic block terminator is executed.
311    #[inline]
312    fn before_terminator(_ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx> {
313        interp_ok(())
314    }
315
316    /// Determines the result of a `Operand::RuntimeChecks` invocation.
317    fn runtime_checks(
318        _ecx: &InterpCx<'tcx, Self>,
319        r: mir::RuntimeChecks,
320    ) -> InterpResult<'tcx, bool>;
321
322    /// Called when the interpreter encounters a `StatementKind::ConstEvalCounter` instruction.
323    /// You can use this to detect long or endlessly running programs.
324    #[inline]
325    fn increment_const_eval_counter(_ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx> {
326        interp_ok(())
327    }
328
329    /// Called before a global allocation is accessed.
330    /// `def_id` is `Some` if this is the "lazy" allocation of a static.
331    #[inline]
332    fn before_access_global(
333        _tcx: TyCtxtAt<'tcx>,
334        _machine: &Self,
335        _alloc_id: AllocId,
336        _allocation: ConstAllocation<'tcx>,
337        _static_def_id: Option<DefId>,
338        _is_write: bool,
339    ) -> InterpResult<'tcx> {
340        interp_ok(())
341    }
342
343    /// Return the `AllocId` for the given thread-local static in the current thread.
344    fn thread_local_static_pointer(
345        _ecx: &mut InterpCx<'tcx, Self>,
346        def_id: DefId,
347    ) -> InterpResult<'tcx, Pointer<Self::Provenance>> {
348        do yeet ::rustc_middle::mir::interpret::InterpErrorKind::Unsupported(::rustc_middle::mir::interpret::UnsupportedOpInfo::ThreadLocalStatic(def_id))throw_unsup!(ThreadLocalStatic(def_id))
349    }
350
351    /// Return the `AllocId` for the given `extern static`.
352    fn extern_static_pointer(
353        ecx: &InterpCx<'tcx, Self>,
354        def_id: DefId,
355    ) -> InterpResult<'tcx, Pointer<Self::Provenance>>;
356
357    /// "Int-to-pointer cast"
358    fn ptr_from_addr_cast(
359        ecx: &InterpCx<'tcx, Self>,
360        addr: u64,
361    ) -> InterpResult<'tcx, Pointer<Option<Self::Provenance>>>;
362
363    /// Marks a pointer as exposed, allowing its provenance
364    /// to be recovered. "Pointer-to-int cast"
365    fn expose_provenance(
366        ecx: &InterpCx<'tcx, Self>,
367        provenance: Self::Provenance,
368    ) -> InterpResult<'tcx>;
369
370    /// Convert a pointer with provenance into an allocation-offset pair and extra provenance info.
371    /// `size` says how many bytes of memory are expected at that pointer. The *sign* of `size` can
372    /// be used to disambiguate situations where a wildcard pointer sits right in between two
373    /// allocations.
374    ///
375    /// If `ptr.provenance.get_alloc_id()` is `Some(p)`, the returned `AllocId` must be `p`.
376    /// The resulting `AllocId` will just be used for that one step and the forgotten again
377    /// (i.e., we'll never turn the data returned here back into a `Pointer` that might be
378    /// stored in machine state).
379    ///
380    /// When this fails, that means the pointer does not point to a live allocation.
381    fn ptr_get_alloc(
382        ecx: &InterpCx<'tcx, Self>,
383        ptr: Pointer<Self::Provenance>,
384        size: i64,
385    ) -> Option<(AllocId, Size, Self::ProvenanceExtra)>;
386
387    /// Return a "root" pointer for the given allocation: the one that is used for direct
388    /// accesses to this static/const/fn allocation, or the one returned from the heap allocator.
389    ///
390    /// Not called on `extern` or thread-local statics (those use the methods above).
391    ///
392    /// `kind` is the kind of the allocation the pointer points to; it can be `None` when
393    /// it's a global and `GLOBAL_KIND` is `None`.
394    fn adjust_alloc_root_pointer(
395        ecx: &InterpCx<'tcx, Self>,
396        ptr: Pointer,
397        kind: Option<MemoryKind<Self::MemoryKind>>,
398    ) -> InterpResult<'tcx, Pointer<Self::Provenance>>;
399
400    /// Called to adjust global allocations to the Provenance and AllocExtra of this machine.
401    ///
402    /// If `alloc` contains pointers, then they are all pointing to globals.
403    ///
404    /// This should avoid copying if no work has to be done! If this returns an owned
405    /// allocation (because a copy had to be done to adjust things), machine memory will
406    /// cache the result. (This relies on `AllocMap::get_or` being able to add the
407    /// owned allocation to the map even when the map is shared.)
408    fn adjust_global_allocation<'b>(
409        ecx: &InterpCx<'tcx, Self>,
410        id: AllocId,
411        alloc: &'b Allocation,
412    ) -> InterpResult<'tcx, Cow<'b, Allocation<Self::Provenance, Self::AllocExtra, Self::Bytes>>>;
413
414    /// Initialize the extra state of an allocation local to this machine.
415    ///
416    /// This is guaranteed to be called exactly once on all allocations local to this machine.
417    /// It will not be called automatically for global allocations; `adjust_global_allocation`
418    /// has to do that itself if that is desired.
419    fn init_local_allocation(
420        ecx: &InterpCx<'tcx, Self>,
421        id: AllocId,
422        kind: MemoryKind<Self::MemoryKind>,
423        size: Size,
424        align: Align,
425    ) -> InterpResult<'tcx, Self::AllocExtra>;
426
427    /// Hook for performing extra checks on a memory read access.
428    /// `ptr` will always be a pointer with the provenance in `prov` pointing to the beginning of
429    /// `range`.
430    ///
431    /// This will *not* be called during validation!
432    ///
433    /// Takes read-only access to the allocation so we can keep all the memory read
434    /// operations take `&self`. Use a `RefCell` in `AllocExtra` if you
435    /// need to mutate.
436    ///
437    /// This is not invoked for ZST accesses, as no read actually happens.
438    #[inline(always)]
439    fn before_memory_read(
440        _tcx: TyCtxtAt<'tcx>,
441        _machine: &Self,
442        _alloc_extra: &Self::AllocExtra,
443        _ptr: Pointer<Option<Self::Provenance>>,
444        _prov: (AllocId, Self::ProvenanceExtra),
445        _range: AllocRange,
446    ) -> InterpResult<'tcx> {
447        interp_ok(())
448    }
449
450    /// Hook for performing extra checks on any memory read access,
451    /// that involves an allocation, even ZST reads.
452    ///
453    /// This will *not* be called during validation!
454    ///
455    /// Used to prevent statics from self-initializing by reading from their own memory
456    /// as it is being initialized.
457    fn before_alloc_access(
458        _tcx: TyCtxtAt<'tcx>,
459        _machine: &Self,
460        _alloc_id: AllocId,
461    ) -> InterpResult<'tcx> {
462        interp_ok(())
463    }
464
465    /// Hook for performing extra checks on a memory write access.
466    /// This is not invoked for ZST accesses, as no write actually happens.
467    /// `ptr` will always be a pointer with the provenance in `prov` pointing to the beginning of
468    /// `range`.
469    #[inline(always)]
470    fn before_memory_write(
471        _tcx: TyCtxtAt<'tcx>,
472        _machine: &mut Self,
473        _alloc_extra: &mut Self::AllocExtra,
474        _ptr: Pointer<Option<Self::Provenance>>,
475        _prov: (AllocId, Self::ProvenanceExtra),
476        _range: AllocRange,
477    ) -> InterpResult<'tcx> {
478        interp_ok(())
479    }
480
481    /// Hook for performing extra operations on a memory deallocation.
482    /// `ptr` will always be a pointer with the provenance in `prov` pointing to the beginning of
483    /// the allocation.
484    #[inline(always)]
485    fn before_memory_deallocation(
486        _tcx: TyCtxtAt<'tcx>,
487        _machine: &mut Self,
488        _alloc_extra: &mut Self::AllocExtra,
489        _ptr: Pointer<Option<Self::Provenance>>,
490        _prov: (AllocId, Self::ProvenanceExtra),
491        _size: Size,
492        _align: Align,
493        _kind: MemoryKind<Self::MemoryKind>,
494    ) -> InterpResult<'tcx> {
495        interp_ok(())
496    }
497
498    /// Executes a retagging operation for a single pointer.
499    /// Returns the possibly adjusted pointer. Return `None` if the pointer
500    /// was left unchanged.
501    ///
502    /// `ty` is the full type of the pointer. This is not the same as `val.layout.ty` for boxes
503    /// where `val` is just the inner raw pointer, but `ty` is the entire `Box` type.
504    #[inline]
505    fn retag_ptr_value(
506        _ecx: &mut InterpCx<'tcx, Self>,
507        _val: &ImmTy<'tcx, Self::Provenance>,
508        _ty: Ty<'tcx>,
509    ) -> InterpResult<'tcx, Option<ImmTy<'tcx, Self::Provenance>>> {
510        interp_ok(None)
511    }
512
513    /// Invoke `f` in a state where calls to `retag_ptr_value` will use the given retag mode.
514    #[inline(always)]
515    fn with_retag_mode<T>(
516        ecx: &mut InterpCx<'tcx, Self>,
517        _mode: RetagMode,
518        f: impl FnOnce(&mut InterpCx<'tcx, Self>) -> InterpResult<'tcx, T>,
519    ) -> InterpResult<'tcx, T> {
520        f(ecx)
521    }
522
523    /// Called on places used for in-place function argument and return value handling.
524    ///
525    /// These places need to be protected to make sure the program cannot tell whether the
526    /// argument/return value was actually copied or passed in-place..
527    fn protect_in_place_function_argument(
528        ecx: &mut InterpCx<'tcx, Self>,
529        mplace: &MPlaceTy<'tcx, Self::Provenance>,
530    ) -> InterpResult<'tcx> {
531        // Without an aliasing model, all we can do is put `Uninit` into the place.
532        // Conveniently this also ensures that the place actually points to suitable memory.
533        ecx.write_uninit(mplace)
534    }
535
536    /// Called immediately before a new stack frame gets pushed.
537    fn init_frame(
538        ecx: &mut InterpCx<'tcx, Self>,
539        frame: Frame<'tcx, Self::Provenance>,
540    ) -> InterpResult<'tcx, Frame<'tcx, Self::Provenance, Self::FrameExtra>>;
541
542    /// Borrow the current thread's stack.
543    fn stack<'a>(
544        ecx: &'a InterpCx<'tcx, Self>,
545    ) -> &'a [Frame<'tcx, Self::Provenance, Self::FrameExtra>];
546
547    /// Mutably borrow the current thread's stack.
548    fn stack_mut<'a>(
549        ecx: &'a mut InterpCx<'tcx, Self>,
550    ) -> &'a mut Vec<Frame<'tcx, Self::Provenance, Self::FrameExtra>>;
551
552    /// Called immediately after a stack frame got pushed and its locals got initialized.
553    fn after_stack_push(_ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx> {
554        interp_ok(())
555    }
556
557    /// Called just before the frame is removed from the stack (followed by return value copy and
558    /// local cleanup).
559    fn before_stack_pop(_ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx> {
560        interp_ok(())
561    }
562
563    /// Called immediately after a stack frame got popped, but before jumping back to the caller.
564    /// The `locals` have already been destroyed!
565    #[inline(always)]
566    fn after_stack_pop(
567        _ecx: &mut InterpCx<'tcx, Self>,
568        _frame: Frame<'tcx, Self::Provenance, Self::FrameExtra>,
569        unwinding: bool,
570    ) -> InterpResult<'tcx, ReturnAction> {
571        // By default, we do not support unwinding from panics
572        if !!unwinding { ::core::panicking::panic("assertion failed: !unwinding") };assert!(!unwinding);
573        interp_ok(ReturnAction::Normal)
574    }
575
576    /// Called immediately after an "immediate" local variable is read in a given frame
577    /// (i.e., this is called for reads that do not end up accessing addressable memory).
578    #[inline(always)]
579    fn after_local_read(
580        _ecx: &InterpCx<'tcx, Self>,
581        _frame: &Frame<'tcx, Self::Provenance, Self::FrameExtra>,
582        _local: mir::Local,
583    ) -> InterpResult<'tcx> {
584        interp_ok(())
585    }
586
587    /// Called immediately after an "immediate" local variable is assigned a new value
588    /// (i.e., this is called for writes that do not end up in memory).
589    /// `storage_live` indicates whether this is the initial write upon `StorageLive`.
590    #[inline(always)]
591    fn after_local_write(
592        _ecx: &mut InterpCx<'tcx, Self>,
593        _local: mir::Local,
594        _storage_live: bool,
595    ) -> InterpResult<'tcx> {
596        interp_ok(())
597    }
598
599    /// Called immediately after actual memory was allocated for a local
600    /// but before the local's stack frame is updated to point to that memory.
601    #[inline(always)]
602    fn after_local_moved_to_memory(
603        _ecx: &mut InterpCx<'tcx, Self>,
604        _local: mir::Local,
605        _mplace: &MPlaceTy<'tcx, Self::Provenance>,
606    ) -> InterpResult<'tcx> {
607        interp_ok(())
608    }
609
610    /// Returns the salt to be used for a deduplicated global alloation.
611    /// If the allocation is for a function, the instance is provided as well
612    /// (this lets Miri ensure unique addresses for some functions).
613    fn get_global_alloc_salt(
614        ecx: &InterpCx<'tcx, Self>,
615        instance: Option<ty::Instance<'tcx>>,
616    ) -> usize;
617
618    fn cached_union_data_range<'e>(
619        _ecx: &'e mut InterpCx<'tcx, Self>,
620        _ty: Ty<'tcx>,
621        compute_range: impl FnOnce() -> RangeSet,
622    ) -> Cow<'e, RangeSet> {
623        // Default to no caching.
624        Cow::Owned(compute_range())
625    }
626
627    /// Compute the value passed to the constructors of the `AllocBytes` type for
628    /// abstract machine allocations.
629    fn get_default_alloc_params(&self) -> <Self::Bytes as AllocBytes>::AllocParams;
630
631    /// Allows enabling/disabling tracing calls from within `rustc_const_eval` at compile time, by
632    /// delegating the entering of [tracing::Span]s to implementors of the [Machine] trait. The
633    /// default implementation corresponds to tracing being disabled, meaning the tracing calls will
634    /// supposedly be optimized out completely. To enable tracing, override this trait method and
635    /// return `span.entered()`. Also see [crate::enter_trace_span].
636    #[must_use]
637    #[inline(always)]
638    fn enter_trace_span(_span: impl FnOnce() -> tracing::Span) -> impl EnteredTraceSpan {
639        ()
640    }
641}
642
643/// A lot of the flexibility above is just needed for `Miri`, but all "compile-time" machines
644/// (CTFE and ConstProp) use the same instance. Here, we share that code.
645pub macro compile_time_machine(<$tcx: lifetime>) {
646    type Provenance = CtfeProvenance;
647    type ProvenanceExtra = bool; // the "immutable" flag
648
649    type ExtraFnVal = !;
650
651    type MemoryKind = $crate::const_eval::MemoryKind;
652    type MemoryMap =
653        rustc_data_structures::fx::FxIndexMap<AllocId, (MemoryKind<Self::MemoryKind>, Allocation)>;
654    const GLOBAL_KIND: Option<Self::MemoryKind> = None; // no copying of globals from `tcx` to machine memory
655
656    type AllocExtra = ();
657    type FrameExtra = ();
658    type Bytes = Box<[u8]>;
659
660    #[inline(always)]
661    fn ignore_optional_overflow_checks(_ecx: &InterpCx<$tcx, Self>) -> bool {
662        false
663    }
664
665    #[inline(always)]
666    fn unwind_terminate(
667        _ecx: &mut InterpCx<$tcx, Self>,
668        _reason: mir::UnwindTerminateReason,
669    ) -> InterpResult<$tcx> {
670        unreachable!("unwinding cannot happen during compile-time evaluation")
671    }
672
673    #[inline(always)]
674    fn check_fn_target_features(
675        _ecx: &InterpCx<$tcx, Self>,
676        _instance: ty::Instance<$tcx>,
677    ) -> InterpResult<$tcx> {
678        // For now we don't do any checking here. We can't use `tcx.sess` because that can differ
679        // between crates, and we need to ensure that const-eval always behaves the same.
680        interp_ok(())
681    }
682
683    #[inline(always)]
684    fn call_extra_fn(
685        _ecx: &mut InterpCx<$tcx, Self>,
686        fn_val: !,
687        _abi: &FnAbi<$tcx, Ty<$tcx>>,
688        _args: &[FnArg<$tcx>],
689        _destination: &PlaceTy<$tcx, Self::Provenance>,
690        _target: Option<mir::BasicBlock>,
691        _unwind: mir::UnwindAction,
692    ) -> InterpResult<$tcx> {
693        match fn_val {}
694    }
695
696    #[inline(always)]
697    fn float_fuse_mul_add(_ecx: &InterpCx<$tcx, Self>) -> bool {
698        true
699    }
700
701    #[inline(always)]
702    fn adjust_global_allocation<'b>(
703        _ecx: &InterpCx<$tcx, Self>,
704        _id: AllocId,
705        alloc: &'b Allocation,
706    ) -> InterpResult<$tcx, Cow<'b, Allocation<Self::Provenance>>> {
707        // Overwrite default implementation: no need to adjust anything.
708        interp_ok(Cow::Borrowed(alloc))
709    }
710
711    fn init_local_allocation(
712        _ecx: &InterpCx<$tcx, Self>,
713        _id: AllocId,
714        _kind: MemoryKind<Self::MemoryKind>,
715        _size: Size,
716        _align: Align,
717    ) -> InterpResult<$tcx, Self::AllocExtra> {
718        interp_ok(())
719    }
720
721    fn extern_static_pointer(
722        ecx: &InterpCx<$tcx, Self>,
723        def_id: DefId,
724    ) -> InterpResult<$tcx, Pointer> {
725        // Use the `AllocId` associated with the `DefId`. Any actual *access* will fail.
726        interp_ok(Pointer::new(ecx.tcx.reserve_and_set_static_alloc(def_id).into(), Size::ZERO))
727    }
728
729    #[inline(always)]
730    fn adjust_alloc_root_pointer(
731        _ecx: &InterpCx<$tcx, Self>,
732        ptr: Pointer<CtfeProvenance>,
733        _kind: Option<MemoryKind<Self::MemoryKind>>,
734    ) -> InterpResult<$tcx, Pointer<CtfeProvenance>> {
735        interp_ok(ptr)
736    }
737
738    #[inline(always)]
739    fn ptr_from_addr_cast(
740        _ecx: &InterpCx<$tcx, Self>,
741        addr: u64,
742    ) -> InterpResult<$tcx, Pointer<Option<CtfeProvenance>>> {
743        // Allow these casts, but make the pointer not dereferenceable.
744        // (I.e., they behave like transmutation.)
745        // This is correct because no pointers can ever be exposed in compile-time evaluation.
746        interp_ok(Pointer::without_provenance(addr))
747    }
748
749    #[inline(always)]
750    fn ptr_get_alloc(
751        _ecx: &InterpCx<$tcx, Self>,
752        ptr: Pointer<CtfeProvenance>,
753        _size: i64,
754    ) -> Option<(AllocId, Size, Self::ProvenanceExtra)> {
755        let (prov, offset) = ptr.prov_and_relative_offset();
756        Some((prov.alloc_id(), offset, prov.immutable()))
757    }
758
759    #[inline(always)]
760    fn get_global_alloc_salt(
761        _ecx: &InterpCx<$tcx, Self>,
762        _instance: Option<ty::Instance<$tcx>>,
763    ) -> usize {
764        CTFE_ALLOC_SALT
765    }
766}