1//! This module contains everything needed to instantiate an interpreter.
2//! This separation exists to ensure that no fancy miri features like
3//! interpreting common C functions leak into CTFE.
45use std::borrow::{Borrow, Cow};
6use std::fmt::Debug;
7use std::hash::Hash;
89use rustc_abi::{Align, Size};
10use rustc_apfloat::{Float, FloatConvert};
11use rustc_middle::query::TyCtxtAt;
12use rustc_middle::ty::Ty;
13use rustc_middle::ty::layout::TyAndLayout;
14use rustc_middle::{mir, ty};
15use rustc_span::def_id::DefId;
16use rustc_target::callconv::FnAbi;
1718use super::{
19AllocBytes, AllocId, AllocKind, AllocRange, Allocation, CTFE_ALLOC_SALT, ConstAllocation,
20CtfeProvenance, EnteredTraceSpan, FnArg, Frame, ImmTy, InterpCx, InterpResult, MPlaceTy,
21MemoryKind, Misalignment, OpTy, PlaceTy, Pointer, Provenance, RangeSet, interp_ok, throw_unsup,
22};
2324/// Data returned by [`Machine::after_stack_pop`], and consumed by
25/// [`InterpCx::return_from_current_stack_frame`] to determine what actions should be done when
26/// returning from a stack frame.
27#[derive(#[automatically_derived]
impl ::core::cmp::Eq for ReturnAction {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_fields_are_eq(&self) {}
}Eq, #[automatically_derived]
impl ::core::cmp::PartialEq for ReturnAction {
#[inline]
fn eq(&self, other: &ReturnAction) -> bool {
let __self_discr = ::core::intrinsics::discriminant_value(self);
let __arg1_discr = ::core::intrinsics::discriminant_value(other);
__self_discr == __arg1_discr
}
}PartialEq, #[automatically_derived]
impl ::core::fmt::Debug for ReturnAction {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::write_str(f,
match self {
ReturnAction::Normal => "Normal",
ReturnAction::NoJump => "NoJump",
ReturnAction::NoCleanup => "NoCleanup",
})
}
}Debug, #[automatically_derived]
impl ::core::marker::Copy for ReturnAction { }Copy, #[automatically_derived]
impl ::core::clone::Clone for ReturnAction {
#[inline]
fn clone(&self) -> ReturnAction { *self }
}Clone)]
28pub enum ReturnAction {
29/// Indicates that no special handling should be
30 /// done - we'll either return normally or unwind
31 /// based on the terminator for the function
32 /// we're leaving.
33Normal,
3435/// Indicates that we should *not* jump to the return/unwind address, as the callback already
36 /// took care of everything.
37NoJump,
3839/// Returned by [`InterpCx::pop_stack_frame_raw`] when no cleanup should be done.
40NoCleanup,
41}
4243/// The currently active retagging mode.
44#[derive(#[automatically_derived]
impl ::core::cmp::Eq for RetagMode {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_fields_are_eq(&self) {}
}Eq, #[automatically_derived]
impl ::core::cmp::PartialEq for RetagMode {
#[inline]
fn eq(&self, other: &RetagMode) -> bool {
let __self_discr = ::core::intrinsics::discriminant_value(self);
let __arg1_discr = ::core::intrinsics::discriminant_value(other);
__self_discr == __arg1_discr
}
}PartialEq, #[automatically_derived]
impl ::core::fmt::Debug for RetagMode {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::write_str(f,
match self {
RetagMode::Default => "Default",
RetagMode::TwoPhase => "TwoPhase",
RetagMode::FnEntry => "FnEntry",
RetagMode::Raw => "Raw",
RetagMode::None => "None",
})
}
}Debug, #[automatically_derived]
impl ::core::marker::Copy for RetagMode { }Copy, #[automatically_derived]
impl ::core::clone::Clone for RetagMode {
#[inline]
fn clone(&self) -> RetagMode { *self }
}Clone)]
45pub enum RetagMode {
46/// A regular retag.
47Default,
48/// Retag preparing for a two-phase borrow.
49TwoPhase,
50/// The initial retag of arguments when entering a function.
51FnEntry,
52/// Retagging for reference-to-raw-pointer cast.
53Raw,
54/// No retagging.
55None,
56}
5758/// Whether this kind of memory is allowed to leak
59pub trait MayLeak: Copy {
60fn may_leak(self) -> bool;
61}
6263/// The functionality needed by memory to manage its allocations
64pub trait AllocMap<K: Hash + Eq, V> {
65/// Tests if the map contains the given key.
66 /// Deliberately takes `&mut` because that is sufficient, and some implementations
67 /// can be more efficient then (using `RefCell::get_mut`).
68fn contains_key<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> bool69where
70K: Borrow<Q>;
7172/// Callers should prefer [`AllocMap::contains_key`] when it is possible to call because it may
73 /// be more efficient. This function exists for callers that only have a shared reference
74 /// (which might make it slightly less efficient than `contains_key`, e.g. if
75 /// the data is stored inside a `RefCell`).
76fn contains_key_ref<Q: ?Sized + Hash + Eq>(&self, k: &Q) -> bool77where
78K: Borrow<Q>;
7980/// Inserts a new entry into the map.
81fn insert(&mut self, k: K, v: V) -> Option<V>;
8283/// Removes an entry from the map.
84fn remove<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> Option<V>
85where
86K: Borrow<Q>;
8788/// Returns data based on the keys and values in the map.
89fn filter_map_collect<T>(&self, f: impl FnMut(&K, &V) -> Option<T>) -> Vec<T>;
9091/// Returns a reference to entry `k`. If no such entry exists, call
92 /// `vacant` and either forward its error, or add its result to the map
93 /// and return a reference to *that*.
94fn get_or<E>(&self, k: K, vacant: impl FnOnce() -> Result<V, E>) -> Result<&V, E>;
9596/// Returns a mutable reference to entry `k`. If no such entry exists, call
97 /// `vacant` and either forward its error, or add its result to the map
98 /// and return a reference to *that*.
99fn get_mut_or<E>(&mut self, k: K, vacant: impl FnOnce() -> Result<V, E>) -> Result<&mut V, E>;
100101/// Read-only lookup.
102fn get(&self, k: K) -> Option<&V> {
103self.get_or(k, || Err(())).ok()
104 }
105106/// Mutable lookup.
107fn get_mut(&mut self, k: K) -> Option<&mut V> {
108self.get_mut_or(k, || Err(())).ok()
109 }
110}
111112/// Methods of this trait signifies a point where CTFE evaluation would fail
113/// and some use case dependent behaviour can instead be applied.
114pub trait Machine<'tcx>: Sized {
115/// Additional memory kinds a machine wishes to distinguish from the builtin ones
116type MemoryKind: Debug + std::fmt::Display + MayLeak + Eq + 'static;
117118/// Pointers are "tagged" with provenance information; typically the `AllocId` they belong to.
119type Provenance: Provenance + Eq + Hash + 'static;
120121/// When getting the AllocId of a pointer, some extra data is also obtained from the provenance
122 /// that is passed to memory access hooks so they can do things with it.
123type ProvenanceExtra: Copy + 'static;
124125/// Machines can define extra (non-instance) things that represent values of function pointers.
126 /// For example, Miri uses this to return a function pointer from `dlsym`
127 /// that can later be called to execute the right thing.
128type ExtraFnVal: Debug + Copy;
129130/// Extra data stored in every call frame.
131type FrameExtra;
132133/// Extra data stored in every allocation.
134type AllocExtra: Debug + Clone + 'tcx;
135136/// Type for the bytes of the allocation.
137type Bytes: AllocBytes + 'static;
138139/// Memory's allocation map
140type MemoryMap: AllocMap<
141AllocId,
142 (
143MemoryKind<Self::MemoryKind>,
144Allocation<Self::Provenance, Self::AllocExtra, Self::Bytes>,
145 ),
146 > + Default147 + Clone;
148149/// The memory kind to use for copied global memory (held in `tcx`) --
150 /// or None if such memory should not be mutated and thus any such attempt will cause
151 /// a `ModifiedStatic` error to be raised.
152 /// Statics are copied under two circumstances: When they are mutated, and when
153 /// `adjust_allocation` (see below) returns an owned allocation
154 /// that is added to the memory so that the work is not done twice.
155const GLOBAL_KIND: Option<Self::MemoryKind>;
156157/// Should the machine panic on allocation failures?
158const PANIC_ON_ALLOC_FAIL: bool;
159160/// Determines whether `eval_mir_constant` can never fail because all required consts have
161 /// already been checked before.
162const ALL_CONSTS_ARE_PRECHECKED: bool = true;
163164/// Whether memory accesses should be alignment-checked.
165fn enforce_alignment(ecx: &InterpCx<'tcx, Self>) -> bool;
166167/// Gives the machine a chance to detect more misalignment than the built-in checks would catch.
168#[inline(always)]
169fn alignment_check(
170 _ecx: &InterpCx<'tcx, Self>,
171 _alloc_id: AllocId,
172 _alloc_align: Align,
173 _alloc_kind: AllocKind,
174 _offset: Size,
175 _align: Align,
176 ) -> Option<Misalignment> {
177None178 }
179180/// Whether to enforce the validity invariant for a specific layout.
181fn enforce_validity(ecx: &InterpCx<'tcx, Self>, layout: TyAndLayout<'tcx>) -> bool;
182/// Whether to enforce the validity invariant *recursively*.
183fn enforce_validity_recursively(
184 _ecx: &InterpCx<'tcx, Self>,
185 _layout: TyAndLayout<'tcx>,
186 ) -> bool {
187false
188}
189190/// Whether Assert(OverflowNeg) and Assert(Overflow) MIR terminators should actually
191 /// check for overflow.
192fn ignore_optional_overflow_checks(_ecx: &InterpCx<'tcx, Self>) -> bool;
193194/// Entry point for obtaining the MIR of anything that should get evaluated.
195 /// So not just functions and shims, but also const/static initializers, anonymous
196 /// constants, ...
197fn load_mir(
198 ecx: &InterpCx<'tcx, Self>,
199 instance: ty::InstanceKind<'tcx>,
200 ) -> &'tcx mir::Body<'tcx> {
201ecx.tcx.instance_mir(instance)
202 }
203204/// Entry point to all function calls.
205 ///
206 /// Returns either the mir to use for the call, or `None` if execution should
207 /// just proceed (which usually means this hook did all the work that the
208 /// called function should usually have done). In the latter case, it is
209 /// this hook's responsibility to advance the instruction pointer!
210 /// (This is to support functions like `__rust_maybe_catch_panic` that neither find a MIR
211 /// nor just jump to `ret`, but instead push their own stack frame.)
212 /// Passing `dest`and `ret` in the same `Option` proved very annoying when only one of them
213 /// was used.
214fn find_mir_or_eval_fn(
215 ecx: &mut InterpCx<'tcx, Self>,
216 instance: ty::Instance<'tcx>,
217 abi: &FnAbi<'tcx, Ty<'tcx>>,
218 args: &[FnArg<'tcx, Self::Provenance>],
219 destination: &PlaceTy<'tcx, Self::Provenance>,
220 target: Option<mir::BasicBlock>,
221 unwind: mir::UnwindAction,
222 ) -> InterpResult<'tcx, Option<(&'tcx mir::Body<'tcx>, ty::Instance<'tcx>)>>;
223224/// Execute `fn_val`. It is the hook's responsibility to advance the instruction
225 /// pointer as appropriate.
226fn call_extra_fn(
227 ecx: &mut InterpCx<'tcx, Self>,
228 fn_val: Self::ExtraFnVal,
229 abi: &FnAbi<'tcx, Ty<'tcx>>,
230 args: &[FnArg<'tcx, Self::Provenance>],
231 destination: &PlaceTy<'tcx, Self::Provenance>,
232 target: Option<mir::BasicBlock>,
233 unwind: mir::UnwindAction,
234 ) -> InterpResult<'tcx>;
235236/// Directly process an intrinsic without pushing a stack frame. It is the hook's
237 /// responsibility to advance the instruction pointer as appropriate.
238 ///
239 /// Returns `None` if the intrinsic was fully handled.
240 /// Otherwise, returns an `Instance` of the function that implements the intrinsic.
241fn call_intrinsic(
242 ecx: &mut InterpCx<'tcx, Self>,
243 instance: ty::Instance<'tcx>,
244 args: &[OpTy<'tcx, Self::Provenance>],
245 destination: &PlaceTy<'tcx, Self::Provenance>,
246 target: Option<mir::BasicBlock>,
247 unwind: mir::UnwindAction,
248 ) -> InterpResult<'tcx, Option<ty::Instance<'tcx>>>;
249250/// Check whether the given function may be executed on the current machine, in terms of the
251 /// target features is requires.
252fn check_fn_target_features(
253 _ecx: &InterpCx<'tcx, Self>,
254 _instance: ty::Instance<'tcx>,
255 ) -> InterpResult<'tcx>;
256257/// Called to evaluate `Assert` MIR terminators that trigger a panic.
258fn assert_panic(
259 ecx: &mut InterpCx<'tcx, Self>,
260 msg: &mir::AssertMessage<'tcx>,
261 unwind: mir::UnwindAction,
262 ) -> InterpResult<'tcx>;
263264/// Called to trigger a non-unwinding panic.
265fn panic_nounwind(_ecx: &mut InterpCx<'tcx, Self>, msg: &str) -> InterpResult<'tcx>;
266267/// Called when unwinding reached a state where execution should be terminated.
268fn unwind_terminate(
269 ecx: &mut InterpCx<'tcx, Self>,
270 reason: mir::UnwindTerminateReason,
271 ) -> InterpResult<'tcx>;
272273/// Called for all binary operations where the LHS has pointer type.
274 ///
275 /// Returns a (value, overflowed) pair if the operation succeeded
276fn binary_ptr_op(
277 ecx: &InterpCx<'tcx, Self>,
278 bin_op: mir::BinOp,
279 left: &ImmTy<'tcx, Self::Provenance>,
280 right: &ImmTy<'tcx, Self::Provenance>,
281 ) -> InterpResult<'tcx, ImmTy<'tcx, Self::Provenance>>;
282283/// Generate the NaN returned by a float operation, given the list of inputs.
284 /// (This is all inputs, not just NaN inputs!)
285fn generate_nan<F1: Float + FloatConvert<F2>, F2: Float>(
286 _ecx: &InterpCx<'tcx, Self>,
287 _inputs: &[F1],
288 ) -> F2 {
289// By default we always return the preferred NaN.
290F2::NAN291 }
292293/// Apply non-determinism to float operations that do not return a precise result.
294fn apply_float_nondet(
295 _ecx: &mut InterpCx<'tcx, Self>,
296 val: ImmTy<'tcx, Self::Provenance>,
297 ) -> InterpResult<'tcx, ImmTy<'tcx, Self::Provenance>> {
298interp_ok(val)
299 }
300301/// Determines the result of `min`/`max` on floats when the arguments are equal.
302fn equal_float_min_max<F: Float>(_ecx: &InterpCx<'tcx, Self>, a: F, _b: F) -> F {
303// By default, we pick the left argument.
304a305 }
306307/// Determines whether the `fmuladd` intrinsics fuse the multiply-add or use separate operations.
308fn float_fuse_mul_add(_ecx: &InterpCx<'tcx, Self>) -> bool;
309310/// Called before a basic block terminator is executed.
311#[inline]
312fn before_terminator(_ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx> {
313interp_ok(())
314 }
315316/// Determines the result of a `Operand::RuntimeChecks` invocation.
317fn runtime_checks(
318 _ecx: &InterpCx<'tcx, Self>,
319 r: mir::RuntimeChecks,
320 ) -> InterpResult<'tcx, bool>;
321322/// Called when the interpreter encounters a `StatementKind::ConstEvalCounter` instruction.
323 /// You can use this to detect long or endlessly running programs.
324#[inline]
325fn increment_const_eval_counter(_ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx> {
326interp_ok(())
327 }
328329/// Called before a global allocation is accessed.
330 /// `def_id` is `Some` if this is the "lazy" allocation of a static.
331#[inline]
332fn before_access_global(
333 _tcx: TyCtxtAt<'tcx>,
334 _machine: &Self,
335 _alloc_id: AllocId,
336 _allocation: ConstAllocation<'tcx>,
337 _static_def_id: Option<DefId>,
338 _is_write: bool,
339 ) -> InterpResult<'tcx> {
340interp_ok(())
341 }
342343/// Return the `AllocId` for the given thread-local static in the current thread.
344fn thread_local_static_pointer(
345 _ecx: &mut InterpCx<'tcx, Self>,
346 def_id: DefId,
347 ) -> InterpResult<'tcx, Pointer<Self::Provenance>> {
348do yeet ::rustc_middle::mir::interpret::InterpErrorKind::Unsupported(::rustc_middle::mir::interpret::UnsupportedOpInfo::ThreadLocalStatic(def_id))throw_unsup!(ThreadLocalStatic(def_id))349 }
350351/// Return the `AllocId` for the given `extern static`.
352fn extern_static_pointer(
353 ecx: &InterpCx<'tcx, Self>,
354 def_id: DefId,
355 ) -> InterpResult<'tcx, Pointer<Self::Provenance>>;
356357/// "Int-to-pointer cast"
358fn ptr_from_addr_cast(
359 ecx: &InterpCx<'tcx, Self>,
360 addr: u64,
361 ) -> InterpResult<'tcx, Pointer<Option<Self::Provenance>>>;
362363/// Marks a pointer as exposed, allowing its provenance
364 /// to be recovered. "Pointer-to-int cast"
365fn expose_provenance(
366 ecx: &InterpCx<'tcx, Self>,
367 provenance: Self::Provenance,
368 ) -> InterpResult<'tcx>;
369370/// Convert a pointer with provenance into an allocation-offset pair and extra provenance info.
371 /// `size` says how many bytes of memory are expected at that pointer. The *sign* of `size` can
372 /// be used to disambiguate situations where a wildcard pointer sits right in between two
373 /// allocations.
374 ///
375 /// If `ptr.provenance.get_alloc_id()` is `Some(p)`, the returned `AllocId` must be `p`.
376 /// The resulting `AllocId` will just be used for that one step and the forgotten again
377 /// (i.e., we'll never turn the data returned here back into a `Pointer` that might be
378 /// stored in machine state).
379 ///
380 /// When this fails, that means the pointer does not point to a live allocation.
381fn ptr_get_alloc(
382 ecx: &InterpCx<'tcx, Self>,
383 ptr: Pointer<Self::Provenance>,
384 size: i64,
385 ) -> Option<(AllocId, Size, Self::ProvenanceExtra)>;
386387/// Return a "root" pointer for the given allocation: the one that is used for direct
388 /// accesses to this static/const/fn allocation, or the one returned from the heap allocator.
389 ///
390 /// Not called on `extern` or thread-local statics (those use the methods above).
391 ///
392 /// `kind` is the kind of the allocation the pointer points to; it can be `None` when
393 /// it's a global and `GLOBAL_KIND` is `None`.
394fn adjust_alloc_root_pointer(
395 ecx: &InterpCx<'tcx, Self>,
396 ptr: Pointer,
397 kind: Option<MemoryKind<Self::MemoryKind>>,
398 ) -> InterpResult<'tcx, Pointer<Self::Provenance>>;
399400/// Called to adjust global allocations to the Provenance and AllocExtra of this machine.
401 ///
402 /// If `alloc` contains pointers, then they are all pointing to globals.
403 ///
404 /// This should avoid copying if no work has to be done! If this returns an owned
405 /// allocation (because a copy had to be done to adjust things), machine memory will
406 /// cache the result. (This relies on `AllocMap::get_or` being able to add the
407 /// owned allocation to the map even when the map is shared.)
408fn adjust_global_allocation<'b>(
409 ecx: &InterpCx<'tcx, Self>,
410 id: AllocId,
411 alloc: &'b Allocation,
412 ) -> InterpResult<'tcx, Cow<'b, Allocation<Self::Provenance, Self::AllocExtra, Self::Bytes>>>;
413414/// Initialize the extra state of an allocation local to this machine.
415 ///
416 /// This is guaranteed to be called exactly once on all allocations local to this machine.
417 /// It will not be called automatically for global allocations; `adjust_global_allocation`
418 /// has to do that itself if that is desired.
419fn init_local_allocation(
420 ecx: &InterpCx<'tcx, Self>,
421 id: AllocId,
422 kind: MemoryKind<Self::MemoryKind>,
423 size: Size,
424 align: Align,
425 ) -> InterpResult<'tcx, Self::AllocExtra>;
426427/// Hook for performing extra checks on a memory read access.
428 /// `ptr` will always be a pointer with the provenance in `prov` pointing to the beginning of
429 /// `range`.
430 ///
431 /// This will *not* be called during validation!
432 ///
433 /// Takes read-only access to the allocation so we can keep all the memory read
434 /// operations take `&self`. Use a `RefCell` in `AllocExtra` if you
435 /// need to mutate.
436 ///
437 /// This is not invoked for ZST accesses, as no read actually happens.
438#[inline(always)]
439fn before_memory_read(
440 _tcx: TyCtxtAt<'tcx>,
441 _machine: &Self,
442 _alloc_extra: &Self::AllocExtra,
443 _ptr: Pointer<Option<Self::Provenance>>,
444 _prov: (AllocId, Self::ProvenanceExtra),
445 _range: AllocRange,
446 ) -> InterpResult<'tcx> {
447interp_ok(())
448 }
449450/// Hook for performing extra checks on any memory read access,
451 /// that involves an allocation, even ZST reads.
452 ///
453 /// This will *not* be called during validation!
454 ///
455 /// Used to prevent statics from self-initializing by reading from their own memory
456 /// as it is being initialized.
457fn before_alloc_access(
458 _tcx: TyCtxtAt<'tcx>,
459 _machine: &Self,
460 _alloc_id: AllocId,
461 ) -> InterpResult<'tcx> {
462interp_ok(())
463 }
464465/// Hook for performing extra checks on a memory write access.
466 /// This is not invoked for ZST accesses, as no write actually happens.
467 /// `ptr` will always be a pointer with the provenance in `prov` pointing to the beginning of
468 /// `range`.
469#[inline(always)]
470fn before_memory_write(
471 _tcx: TyCtxtAt<'tcx>,
472 _machine: &mut Self,
473 _alloc_extra: &mut Self::AllocExtra,
474 _ptr: Pointer<Option<Self::Provenance>>,
475 _prov: (AllocId, Self::ProvenanceExtra),
476 _range: AllocRange,
477 ) -> InterpResult<'tcx> {
478interp_ok(())
479 }
480481/// Hook for performing extra operations on a memory deallocation.
482 /// `ptr` will always be a pointer with the provenance in `prov` pointing to the beginning of
483 /// the allocation.
484#[inline(always)]
485fn before_memory_deallocation(
486 _tcx: TyCtxtAt<'tcx>,
487 _machine: &mut Self,
488 _alloc_extra: &mut Self::AllocExtra,
489 _ptr: Pointer<Option<Self::Provenance>>,
490 _prov: (AllocId, Self::ProvenanceExtra),
491 _size: Size,
492 _align: Align,
493 _kind: MemoryKind<Self::MemoryKind>,
494 ) -> InterpResult<'tcx> {
495interp_ok(())
496 }
497498/// Executes a retagging operation for a single pointer.
499 /// Returns the possibly adjusted pointer. Return `None` if the pointer
500 /// was left unchanged.
501 ///
502 /// `ty` is the full type of the pointer. This is not the same as `val.layout.ty` for boxes
503 /// where `val` is just the inner raw pointer, but `ty` is the entire `Box` type.
504#[inline]
505fn retag_ptr_value(
506 _ecx: &mut InterpCx<'tcx, Self>,
507 _val: &ImmTy<'tcx, Self::Provenance>,
508 _ty: Ty<'tcx>,
509 ) -> InterpResult<'tcx, Option<ImmTy<'tcx, Self::Provenance>>> {
510interp_ok(None)
511 }
512513/// Invoke `f` in a state where calls to `retag_ptr_value` will use the given retag mode.
514#[inline(always)]
515fn with_retag_mode<T>(
516 ecx: &mut InterpCx<'tcx, Self>,
517 _mode: RetagMode,
518 f: impl FnOnce(&mut InterpCx<'tcx, Self>) -> InterpResult<'tcx, T>,
519 ) -> InterpResult<'tcx, T> {
520f(ecx)
521 }
522523/// Called on places used for in-place function argument and return value handling.
524 ///
525 /// These places need to be protected to make sure the program cannot tell whether the
526 /// argument/return value was actually copied or passed in-place..
527fn protect_in_place_function_argument(
528 ecx: &mut InterpCx<'tcx, Self>,
529 mplace: &MPlaceTy<'tcx, Self::Provenance>,
530 ) -> InterpResult<'tcx> {
531// Without an aliasing model, all we can do is put `Uninit` into the place.
532 // Conveniently this also ensures that the place actually points to suitable memory.
533ecx.write_uninit(mplace)
534 }
535536/// Called immediately before a new stack frame gets pushed.
537fn init_frame(
538 ecx: &mut InterpCx<'tcx, Self>,
539 frame: Frame<'tcx, Self::Provenance>,
540 ) -> InterpResult<'tcx, Frame<'tcx, Self::Provenance, Self::FrameExtra>>;
541542/// Borrow the current thread's stack.
543fn stack<'a>(
544 ecx: &'a InterpCx<'tcx, Self>,
545 ) -> &'a [Frame<'tcx, Self::Provenance, Self::FrameExtra>];
546547/// Mutably borrow the current thread's stack.
548fn stack_mut<'a>(
549 ecx: &'a mut InterpCx<'tcx, Self>,
550 ) -> &'a mut Vec<Frame<'tcx, Self::Provenance, Self::FrameExtra>>;
551552/// Called immediately after a stack frame got pushed and its locals got initialized.
553fn after_stack_push(_ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx> {
554interp_ok(())
555 }
556557/// Called just before the frame is removed from the stack (followed by return value copy and
558 /// local cleanup).
559fn before_stack_pop(_ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx> {
560interp_ok(())
561 }
562563/// Called immediately after a stack frame got popped, but before jumping back to the caller.
564 /// The `locals` have already been destroyed!
565#[inline(always)]
566fn after_stack_pop(
567 _ecx: &mut InterpCx<'tcx, Self>,
568 _frame: Frame<'tcx, Self::Provenance, Self::FrameExtra>,
569 unwinding: bool,
570 ) -> InterpResult<'tcx, ReturnAction> {
571// By default, we do not support unwinding from panics
572if !!unwinding { ::core::panicking::panic("assertion failed: !unwinding") };assert!(!unwinding);
573interp_ok(ReturnAction::Normal)
574 }
575576/// Called immediately after an "immediate" local variable is read in a given frame
577 /// (i.e., this is called for reads that do not end up accessing addressable memory).
578#[inline(always)]
579fn after_local_read(
580 _ecx: &InterpCx<'tcx, Self>,
581 _frame: &Frame<'tcx, Self::Provenance, Self::FrameExtra>,
582 _local: mir::Local,
583 ) -> InterpResult<'tcx> {
584interp_ok(())
585 }
586587/// Called immediately after an "immediate" local variable is assigned a new value
588 /// (i.e., this is called for writes that do not end up in memory).
589 /// `storage_live` indicates whether this is the initial write upon `StorageLive`.
590#[inline(always)]
591fn after_local_write(
592 _ecx: &mut InterpCx<'tcx, Self>,
593 _local: mir::Local,
594 _storage_live: bool,
595 ) -> InterpResult<'tcx> {
596interp_ok(())
597 }
598599/// Called immediately after actual memory was allocated for a local
600 /// but before the local's stack frame is updated to point to that memory.
601#[inline(always)]
602fn after_local_moved_to_memory(
603 _ecx: &mut InterpCx<'tcx, Self>,
604 _local: mir::Local,
605 _mplace: &MPlaceTy<'tcx, Self::Provenance>,
606 ) -> InterpResult<'tcx> {
607interp_ok(())
608 }
609610/// Returns the salt to be used for a deduplicated global alloation.
611 /// If the allocation is for a function, the instance is provided as well
612 /// (this lets Miri ensure unique addresses for some functions).
613fn get_global_alloc_salt(
614 ecx: &InterpCx<'tcx, Self>,
615 instance: Option<ty::Instance<'tcx>>,
616 ) -> usize;
617618fn cached_union_data_range<'e>(
619 _ecx: &'e mut InterpCx<'tcx, Self>,
620 _ty: Ty<'tcx>,
621 compute_range: impl FnOnce() -> RangeSet,
622 ) -> Cow<'e, RangeSet> {
623// Default to no caching.
624Cow::Owned(compute_range())
625 }
626627/// Compute the value passed to the constructors of the `AllocBytes` type for
628 /// abstract machine allocations.
629fn get_default_alloc_params(&self) -> <Self::Bytes as AllocBytes>::AllocParams;
630631/// Allows enabling/disabling tracing calls from within `rustc_const_eval` at compile time, by
632 /// delegating the entering of [tracing::Span]s to implementors of the [Machine] trait. The
633 /// default implementation corresponds to tracing being disabled, meaning the tracing calls will
634 /// supposedly be optimized out completely. To enable tracing, override this trait method and
635 /// return `span.entered()`. Also see [crate::enter_trace_span].
636#[must_use]
637 #[inline(always)]
638fn enter_trace_span(_span: impl FnOnce() -> tracing::Span) -> impl EnteredTraceSpan {
639 ()
640 }
641}
642643/// A lot of the flexibility above is just needed for `Miri`, but all "compile-time" machines
644/// (CTFE and ConstProp) use the same instance. Here, we share that code.
645pub macro compile_time_machine(<$tcx: lifetime>) {
646type Provenance = CtfeProvenance;
647type ProvenanceExtra = bool; // the "immutable" flag
648649type ExtraFnVal = !;
650651type MemoryKind = $crate::const_eval::MemoryKind;
652type MemoryMap =
653 rustc_data_structures::fx::FxIndexMap<AllocId, (MemoryKind<Self::MemoryKind>, Allocation)>;
654const GLOBAL_KIND: Option<Self::MemoryKind> = None; // no copying of globals from `tcx` to machine memory
655656type AllocExtra = ();
657type FrameExtra = ();
658type Bytes = Box<[u8]>;
659660#[inline(always)]
661fn ignore_optional_overflow_checks(_ecx: &InterpCx<$tcx, Self>) -> bool {
662false
663}
664665#[inline(always)]
666fn unwind_terminate(
667 _ecx: &mut InterpCx<$tcx, Self>,
668 _reason: mir::UnwindTerminateReason,
669 ) -> InterpResult<$tcx> {
670unreachable!("unwinding cannot happen during compile-time evaluation")
671 }
672673#[inline(always)]
674fn check_fn_target_features(
675 _ecx: &InterpCx<$tcx, Self>,
676 _instance: ty::Instance<$tcx>,
677 ) -> InterpResult<$tcx> {
678// For now we don't do any checking here. We can't use `tcx.sess` because that can differ
679 // between crates, and we need to ensure that const-eval always behaves the same.
680interp_ok(())
681 }
682683#[inline(always)]
684fn call_extra_fn(
685 _ecx: &mut InterpCx<$tcx, Self>,
686 fn_val: !,
687 _abi: &FnAbi<$tcx, Ty<$tcx>>,
688 _args: &[FnArg<$tcx>],
689 _destination: &PlaceTy<$tcx, Self::Provenance>,
690 _target: Option<mir::BasicBlock>,
691 _unwind: mir::UnwindAction,
692 ) -> InterpResult<$tcx> {
693match fn_val {}
694 }
695696#[inline(always)]
697fn float_fuse_mul_add(_ecx: &InterpCx<$tcx, Self>) -> bool {
698true
699}
700701#[inline(always)]
702fn adjust_global_allocation<'b>(
703 _ecx: &InterpCx<$tcx, Self>,
704 _id: AllocId,
705 alloc: &'b Allocation,
706 ) -> InterpResult<$tcx, Cow<'b, Allocation<Self::Provenance>>> {
707// Overwrite default implementation: no need to adjust anything.
708interp_ok(Cow::Borrowed(alloc))
709 }
710711fn init_local_allocation(
712 _ecx: &InterpCx<$tcx, Self>,
713 _id: AllocId,
714 _kind: MemoryKind<Self::MemoryKind>,
715 _size: Size,
716 _align: Align,
717 ) -> InterpResult<$tcx, Self::AllocExtra> {
718 interp_ok(())
719 }
720721fn extern_static_pointer(
722 ecx: &InterpCx<$tcx, Self>,
723 def_id: DefId,
724 ) -> InterpResult<$tcx, Pointer> {
725// Use the `AllocId` associated with the `DefId`. Any actual *access* will fail.
726interp_ok(Pointer::new(ecx.tcx.reserve_and_set_static_alloc(def_id).into(), Size::ZERO))
727 }
728729#[inline(always)]
730fn adjust_alloc_root_pointer(
731 _ecx: &InterpCx<$tcx, Self>,
732 ptr: Pointer<CtfeProvenance>,
733 _kind: Option<MemoryKind<Self::MemoryKind>>,
734 ) -> InterpResult<$tcx, Pointer<CtfeProvenance>> {
735 interp_ok(ptr)
736 }
737738#[inline(always)]
739fn ptr_from_addr_cast(
740 _ecx: &InterpCx<$tcx, Self>,
741 addr: u64,
742 ) -> InterpResult<$tcx, Pointer<Option<CtfeProvenance>>> {
743// Allow these casts, but make the pointer not dereferenceable.
744 // (I.e., they behave like transmutation.)
745 // This is correct because no pointers can ever be exposed in compile-time evaluation.
746interp_ok(Pointer::without_provenance(addr))
747 }
748749#[inline(always)]
750fn ptr_get_alloc(
751 _ecx: &InterpCx<$tcx, Self>,
752 ptr: Pointer<CtfeProvenance>,
753 _size: i64,
754 ) -> Option<(AllocId, Size, Self::ProvenanceExtra)> {
755let (prov, offset) = ptr.prov_and_relative_offset();
756Some((prov.alloc_id(), offset, prov.immutable()))
757 }
758759#[inline(always)]
760fn get_global_alloc_salt(
761 _ecx: &InterpCx<$tcx, Self>,
762 _instance: Option<ty::Instance<$tcx>>,
763 ) -> usize {
764 CTFE_ALLOC_SALT
765 }
766}