Skip to main content

rustc_const_eval/interpret/
validity.rs

1//! Check the validity invariant of a given value, and tell the user
2//! where in the value it got violated.
3//! In const context, this goes even further and tries to approximate const safety.
4//! That's useful because it means other passes (e.g. promotion) can rely on `const`s
5//! to be const-safe.
6
7use std::borrow::Cow;
8use std::fmt::{self, Write};
9use std::hash::Hash;
10use std::mem;
11use std::num::NonZero;
12
13use either::{Left, Right};
14use hir::def::DefKind;
15use rustc_abi::{
16    BackendRepr, FieldIdx, FieldsShape, Scalar as ScalarAbi, Size, VariantIdx, Variants,
17    WrappingRange,
18};
19use rustc_ast::Mutability;
20use rustc_data_structures::fx::FxHashSet;
21use rustc_hir as hir;
22use rustc_middle::bug;
23use rustc_middle::mir::interpret::{
24    InterpErrorKind, InvalidMetaKind, Misalignment, Provenance, alloc_range, interp_ok,
25};
26use rustc_middle::ty::layout::{LayoutCx, TyAndLayout};
27use rustc_middle::ty::{self, Ty};
28use rustc_span::{Symbol, sym};
29use tracing::trace;
30
31use super::machine::AllocMap;
32use super::{
33    AllocId, CheckInAllocMsg, GlobalAlloc, ImmTy, Immediate, InterpCx, InterpResult, MPlaceTy,
34    Machine, MemPlaceMeta, PlaceTy, Pointer, Projectable, Scalar, ValueVisitor, err_ub,
35    format_interp_error,
36};
37use crate::enter_trace_span;
38
39// for the validation errors
40#[rustfmt::skip]
41use super::InterpErrorKind::UndefinedBehavior as Ub;
42use super::InterpErrorKind::Unsupported as Unsup;
43use super::UndefinedBehaviorInfo::*;
44use super::UnsupportedOpInfo::*;
45
46macro_rules! err_validation_failure {
47    ($where:expr,  $msg:expr ) => {{
48        let where_ = &$where;
49        let path = if !where_.projs.is_empty() {
50            let mut path = String::new();
51            write_path(&mut path, &where_.projs);
52            Some(path)
53        } else {
54            None
55        };
56
57        #[allow(unused)]
58        use ValidationErrorKind::*;
59        let msg = ValidationErrorKind::from($msg);
60        err_ub!(ValidationError {
61            orig_ty: where_.orig_ty,
62            path,
63            ptr_bytes_warning: msg.ptr_bytes_warning(),
64            msg: msg.to_string(),
65        })
66    }};
67}
68
69macro_rules! throw_validation_failure {
70    ($where:expr, $msg:expr ) => {
71        do yeet err_validation_failure!($where, $msg)
72    };
73}
74
75/// If $e throws an error matching the pattern, throw a validation failure.
76/// Other errors are passed back to the caller, unchanged -- and if they reach the root of
77/// the visitor, we make sure only validation errors and `InvalidProgram` errors are left.
78/// This lets you use the patterns as a kind of validation list, asserting which errors
79/// can possibly happen:
80///
81/// ```ignore(illustrative)
82/// let v = try_validation!(some_fn(x), some_path, {
83///     Foo | Bar | Baz => format!("some failure involving {x}"),
84/// });
85/// ```
86///
87/// The patterns must be of type `UndefinedBehaviorInfo`.
88macro_rules! try_validation {
89    ($e:expr, $where:expr,
90    $( $( $p:pat_param )|+ => $msg:expr ),+ $(,)?
91    ) => {{
92        $e.map_err_kind(|e| {
93            // We catch the error and turn it into a validation failure. We are okay with
94            // allocation here as this can only slow down builds that fail anyway.
95            match e {
96                $(
97                    $($p)|+ => {
98                        err_validation_failure!(
99                            $where,
100                            $msg
101                        )
102                    }
103                ),+,
104                e => e,
105            }
106        })?
107    }};
108}
109
110#[derive(#[automatically_derived]
impl ::core::fmt::Debug for PtrKind {
    #[inline]
    fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
        match self {
            PtrKind::Ref(__self_0) =>
                ::core::fmt::Formatter::debug_tuple_field1_finish(f, "Ref",
                    &__self_0),
            PtrKind::Box => ::core::fmt::Formatter::write_str(f, "Box"),
        }
    }
}Debug, #[automatically_derived]
impl ::core::clone::Clone for PtrKind {
    #[inline]
    fn clone(&self) -> PtrKind {
        let _: ::core::clone::AssertParamIsClone<Mutability>;
        *self
    }
}Clone, #[automatically_derived]
impl ::core::marker::Copy for PtrKind { }Copy, #[automatically_derived]
impl ::core::cmp::PartialEq for PtrKind {
    #[inline]
    fn eq(&self, other: &PtrKind) -> bool {
        let __self_discr = ::core::intrinsics::discriminant_value(self);
        let __arg1_discr = ::core::intrinsics::discriminant_value(other);
        __self_discr == __arg1_discr &&
            match (self, other) {
                (PtrKind::Ref(__self_0), PtrKind::Ref(__arg1_0)) =>
                    __self_0 == __arg1_0,
                _ => true,
            }
    }
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for PtrKind {
    #[inline]
    #[doc(hidden)]
    #[coverage(off)]
    fn assert_fields_are_eq(&self) {
        let _: ::core::cmp::AssertParamIsEq<Mutability>;
    }
}Eq)]
111enum PtrKind {
112    Ref(Mutability),
113    Box,
114}
115
116impl fmt::Display for PtrKind {
117    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
118        let str = match self {
119            PtrKind::Ref(_) => "reference",
120            PtrKind::Box => "box",
121        };
122        f.write_fmt(format_args!("{0}", str))write!(f, "{str}")
123    }
124}
125
126#[derive(#[automatically_derived]
impl ::core::fmt::Debug for ExpectedKind {
    #[inline]
    fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
        ::core::fmt::Formatter::write_str(f,
            match self {
                ExpectedKind::Reference => "Reference",
                ExpectedKind::Box => "Box",
                ExpectedKind::RawPtr => "RawPtr",
                ExpectedKind::Bool => "Bool",
                ExpectedKind::Char => "Char",
                ExpectedKind::Float => "Float",
                ExpectedKind::Int => "Int",
                ExpectedKind::FnPtr => "FnPtr",
                ExpectedKind::Str => "Str",
            })
    }
}Debug)]
127enum ExpectedKind {
128    Reference,
129    Box,
130    RawPtr,
131    Bool,
132    Char,
133    Float,
134    Int,
135    FnPtr,
136    Str,
137}
138
139impl fmt::Display for ExpectedKind {
140    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
141        let str = match self {
142            ExpectedKind::Reference => "expected a reference",
143            ExpectedKind::Box => "expected a box",
144            ExpectedKind::RawPtr => "expected a raw pointer",
145            ExpectedKind::Bool => "expected a boolean",
146            ExpectedKind::Char => "expected a unicode scalar value",
147            ExpectedKind::Float => "expected a floating point number",
148            ExpectedKind::Int => "expected an integer",
149            ExpectedKind::FnPtr => "expected a function pointer",
150            ExpectedKind::Str => "expected a string",
151        };
152        f.write_fmt(format_args!("{0}", str))write!(f, "{str}")
153    }
154}
155
156impl From<PtrKind> for ExpectedKind {
157    fn from(x: PtrKind) -> ExpectedKind {
158        match x {
159            PtrKind::Box => ExpectedKind::Box,
160            PtrKind::Ref(_) => ExpectedKind::Reference,
161        }
162    }
163}
164
165/// Validation errors that can be emitted in one than one place get a variant here so that
166/// we format them consistently. Everything else uses the `String` fallback.
167#[derive(#[automatically_derived]
impl<'tcx> ::core::fmt::Debug for ValidationErrorKind<'tcx> {
    #[inline]
    fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
        match self {
            ValidationErrorKind::Uninit { expected: __self_0 } =>
                ::core::fmt::Formatter::debug_struct_field1_finish(f,
                    "Uninit", "expected", &__self_0),
            ValidationErrorKind::PointerAsInt { expected: __self_0 } =>
                ::core::fmt::Formatter::debug_struct_field1_finish(f,
                    "PointerAsInt", "expected", &__self_0),
            ValidationErrorKind::PartialPointer =>
                ::core::fmt::Formatter::write_str(f, "PartialPointer"),
            ValidationErrorKind::InvalidMetaWrongTrait {
                vtable_dyn_type: __self_0, expected_dyn_type: __self_1 } =>
                ::core::fmt::Formatter::debug_struct_field2_finish(f,
                    "InvalidMetaWrongTrait", "vtable_dyn_type", __self_0,
                    "expected_dyn_type", &__self_1),
            ValidationErrorKind::GeneralError { msg: __self_0 } =>
                ::core::fmt::Formatter::debug_struct_field1_finish(f,
                    "GeneralError", "msg", &__self_0),
        }
    }
}Debug)]
168enum ValidationErrorKind<'tcx> {
169    Uninit {
170        expected: ExpectedKind,
171    },
172    PointerAsInt {
173        expected: ExpectedKind,
174    },
175    PartialPointer,
176    InvalidMetaWrongTrait {
177        /// The vtable that was actually referenced by the wide pointer metadata.
178        vtable_dyn_type: &'tcx ty::List<ty::PolyExistentialPredicate<'tcx>>,
179        /// The vtable that was expected at the point in MIR that it was accessed.
180        expected_dyn_type: &'tcx ty::List<ty::PolyExistentialPredicate<'tcx>>,
181    },
182    GeneralError {
183        msg: String,
184    },
185}
186
187impl<'tcx> ValidationErrorKind<'tcx> {
188    // We don't do this via `fmt::Display` to so that we can do a move in the `GeneralError` case.
189    fn to_string(self) -> String {
190        use ValidationErrorKind::*;
191        match self {
192            Uninit { expected } => ::alloc::__export::must_use({
        ::alloc::fmt::format(format_args!("encountered uninitialized memory, but {0}",
                expected))
    })format!("encountered uninitialized memory, but {expected}"),
193            PointerAsInt { expected } => ::alloc::__export::must_use({
        ::alloc::fmt::format(format_args!("encountered a pointer, but {0}",
                expected))
    })format!("encountered a pointer, but {expected}"),
194            PartialPointer => ::alloc::__export::must_use({
        ::alloc::fmt::format(format_args!("encountered a partial pointer or a mix of pointers"))
    })format!("encountered a partial pointer or a mix of pointers"),
195            InvalidMetaWrongTrait { vtable_dyn_type, expected_dyn_type } => ::alloc::__export::must_use({
        ::alloc::fmt::format(format_args!("wrong trait in wide pointer vtable: expected `{0}`, but encountered `{1}`",
                expected_dyn_type, vtable_dyn_type))
    })format!(
196                "wrong trait in wide pointer vtable: expected `{expected_dyn_type}`, but encountered `{vtable_dyn_type}`"
197            ),
198            GeneralError { msg } => msg,
199        }
200    }
201
202    fn ptr_bytes_warning(&self) -> bool {
203        use ValidationErrorKind::*;
204        #[allow(non_exhaustive_omitted_patterns)] match self {
    PointerAsInt { .. } | PartialPointer => true,
    _ => false,
}matches!(self, PointerAsInt { .. } | PartialPointer)
205    }
206}
207
208impl<'tcx> From<String> for ValidationErrorKind<'tcx> {
209    fn from(msg: String) -> Self {
210        ValidationErrorKind::GeneralError { msg }
211    }
212}
213
214fn fmt_range(r: WrappingRange, max_hi: u128) -> String {
215    let WrappingRange { start: lo, end: hi } = r;
216    if !(hi <= max_hi) {
    ::core::panicking::panic("assertion failed: hi <= max_hi")
};assert!(hi <= max_hi);
217    if lo > hi {
218        ::alloc::__export::must_use({
        ::alloc::fmt::format(format_args!("less or equal to {0}, or greater or equal to {1}",
                hi, lo))
    })format!("less or equal to {hi}, or greater or equal to {lo}")
219    } else if lo == hi {
220        ::alloc::__export::must_use({
        ::alloc::fmt::format(format_args!("equal to {0}", lo))
    })format!("equal to {lo}")
221    } else if lo == 0 {
222        if !(hi < max_hi) {
    {
        ::core::panicking::panic_fmt(format_args!("should not be printing if the range covers everything"));
    }
};assert!(hi < max_hi, "should not be printing if the range covers everything");
223        ::alloc::__export::must_use({
        ::alloc::fmt::format(format_args!("less or equal to {0}", hi))
    })format!("less or equal to {hi}")
224    } else if hi == max_hi {
225        if !(lo > 0) {
    {
        ::core::panicking::panic_fmt(format_args!("should not be printing if the range covers everything"));
    }
};assert!(lo > 0, "should not be printing if the range covers everything");
226        ::alloc::__export::must_use({
        ::alloc::fmt::format(format_args!("greater or equal to {0}", lo))
    })format!("greater or equal to {lo}")
227    } else {
228        ::alloc::__export::must_use({
        ::alloc::fmt::format(format_args!("in the range {0}..={1}", lo, hi))
    })format!("in the range {lo}..={hi}")
229    }
230}
231
232/// We want to show a nice path to the invalid field for diagnostics,
233/// but avoid string operations in the happy case where no error happens.
234/// So we track a `Vec<PathElem>` where `PathElem` contains all the data we
235/// need to later print something for the user.
236#[derive(#[automatically_derived]
impl<'tcx> ::core::marker::Copy for PathElem<'tcx> { }Copy, #[automatically_derived]
impl<'tcx> ::core::clone::Clone for PathElem<'tcx> {
    #[inline]
    fn clone(&self) -> PathElem<'tcx> {
        let _: ::core::clone::AssertParamIsClone<Symbol>;
        let _: ::core::clone::AssertParamIsClone<VariantIdx>;
        let _: ::core::clone::AssertParamIsClone<usize>;
        let _: ::core::clone::AssertParamIsClone<Ty<'tcx>>;
        *self
    }
}Clone, #[automatically_derived]
impl<'tcx> ::core::fmt::Debug for PathElem<'tcx> {
    #[inline]
    fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
        match self {
            PathElem::Field(__self_0) =>
                ::core::fmt::Formatter::debug_tuple_field1_finish(f, "Field",
                    &__self_0),
            PathElem::Variant(__self_0) =>
                ::core::fmt::Formatter::debug_tuple_field1_finish(f,
                    "Variant", &__self_0),
            PathElem::CoroutineState(__self_0) =>
                ::core::fmt::Formatter::debug_tuple_field1_finish(f,
                    "CoroutineState", &__self_0),
            PathElem::CapturedVar(__self_0) =>
                ::core::fmt::Formatter::debug_tuple_field1_finish(f,
                    "CapturedVar", &__self_0),
            PathElem::ArrayElem(__self_0) =>
                ::core::fmt::Formatter::debug_tuple_field1_finish(f,
                    "ArrayElem", &__self_0),
            PathElem::TupleElem(__self_0) =>
                ::core::fmt::Formatter::debug_tuple_field1_finish(f,
                    "TupleElem", &__self_0),
            PathElem::Deref => ::core::fmt::Formatter::write_str(f, "Deref"),
            PathElem::EnumTag =>
                ::core::fmt::Formatter::write_str(f, "EnumTag"),
            PathElem::CoroutineTag =>
                ::core::fmt::Formatter::write_str(f, "CoroutineTag"),
            PathElem::DynDowncast(__self_0) =>
                ::core::fmt::Formatter::debug_tuple_field1_finish(f,
                    "DynDowncast", &__self_0),
            PathElem::Vtable =>
                ::core::fmt::Formatter::write_str(f, "Vtable"),
        }
    }
}Debug)]
237pub enum PathElem<'tcx> {
238    Field(Symbol),
239    Variant(Symbol),
240    CoroutineState(VariantIdx),
241    CapturedVar(Symbol),
242    ArrayElem(usize),
243    TupleElem(usize),
244    Deref,
245    EnumTag,
246    CoroutineTag,
247    DynDowncast(Ty<'tcx>),
248    Vtable,
249}
250
251#[derive(#[automatically_derived]
impl<'tcx> ::core::clone::Clone for Path<'tcx> {
    #[inline]
    fn clone(&self) -> Path<'tcx> {
        Path {
            orig_ty: ::core::clone::Clone::clone(&self.orig_ty),
            projs: ::core::clone::Clone::clone(&self.projs),
        }
    }
}Clone, #[automatically_derived]
impl<'tcx> ::core::fmt::Debug for Path<'tcx> {
    #[inline]
    fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
        ::core::fmt::Formatter::debug_struct_field2_finish(f, "Path",
            "orig_ty", &self.orig_ty, "projs", &&self.projs)
    }
}Debug)]
252pub struct Path<'tcx> {
253    orig_ty: Ty<'tcx>,
254    projs: Vec<PathElem<'tcx>>,
255}
256
257impl<'tcx> Path<'tcx> {
258    fn new(ty: Ty<'tcx>) -> Self {
259        Self { orig_ty: ty, projs: ::alloc::vec::Vec::new()vec![] }
260    }
261}
262
263/// Extra things to check for during validation of CTFE results.
264#[derive(#[automatically_derived]
impl ::core::marker::Copy for CtfeValidationMode { }Copy, #[automatically_derived]
impl ::core::clone::Clone for CtfeValidationMode {
    #[inline]
    fn clone(&self) -> CtfeValidationMode {
        let _: ::core::clone::AssertParamIsClone<Mutability>;
        let _: ::core::clone::AssertParamIsClone<bool>;
        *self
    }
}Clone)]
265pub enum CtfeValidationMode {
266    /// Validation of a `static`
267    Static { mutbl: Mutability },
268    /// Validation of a promoted.
269    Promoted,
270    /// Validation of a `const`.
271    /// `allow_immutable_unsafe_cell` says whether we allow `UnsafeCell` in immutable memory (which is the
272    /// case for the top-level allocation of a `const`, where this is fine because the allocation will be
273    /// copied at each use site).
274    Const { allow_immutable_unsafe_cell: bool },
275}
276
277impl CtfeValidationMode {
278    fn allow_immutable_unsafe_cell(self) -> bool {
279        match self {
280            CtfeValidationMode::Static { .. } => false,
281            CtfeValidationMode::Promoted { .. } => false,
282            CtfeValidationMode::Const { allow_immutable_unsafe_cell, .. } => {
283                allow_immutable_unsafe_cell
284            }
285        }
286    }
287}
288
289/// State for tracking recursive validation of references
290pub struct RefTracking<T, PATH = ()> {
291    seen: FxHashSet<T>,
292    todo: Vec<(T, PATH)>,
293}
294
295impl<T: Clone + Eq + Hash + std::fmt::Debug, PATH> RefTracking<T, PATH> {
296    pub fn empty() -> Self {
297        RefTracking { seen: FxHashSet::default(), todo: ::alloc::vec::Vec::new()vec![] }
298    }
299    pub fn next(&mut self) -> Option<(T, PATH)> {
300        self.todo.pop()
301    }
302
303    fn track(&mut self, val: T, path: impl FnOnce() -> PATH) {
304        if self.seen.insert(val.clone()) {
305            {
    use ::tracing::__macro_support::Callsite as _;
    static __CALLSITE: ::tracing::callsite::DefaultCallsite =
        {
            static META: ::tracing::Metadata<'static> =
                {
                    ::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/validity.rs:305",
                        "rustc_const_eval::interpret::validity",
                        ::tracing::Level::TRACE,
                        ::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/validity.rs"),
                        ::tracing_core::__macro_support::Option::Some(305u32),
                        ::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::validity"),
                        ::tracing_core::field::FieldSet::new(&["message"],
                            ::tracing_core::callsite::Identifier(&__CALLSITE)),
                        ::tracing::metadata::Kind::EVENT)
                };
            ::tracing::callsite::DefaultCallsite::new(&META)
        };
    let enabled =
        ::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
                &&
                ::tracing::Level::TRACE <=
                    ::tracing::level_filters::LevelFilter::current() &&
            {
                let interest = __CALLSITE.interest();
                !interest.is_never() &&
                    ::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
                        interest)
            };
    if enabled {
        (|value_set: ::tracing::field::ValueSet|
                    {
                        let meta = __CALLSITE.metadata();
                        ::tracing::Event::dispatch(meta, &value_set);
                        ;
                    })({
                #[allow(unused_imports)]
                use ::tracing::field::{debug, display, Value};
                let mut iter = __CALLSITE.metadata().fields().iter();
                __CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
                                    ::tracing::__macro_support::Option::Some(&format_args!("Recursing below ptr {0:#?}",
                                                    val) as &dyn Value))])
            });
    } else { ; }
};trace!("Recursing below ptr {:#?}", val);
306            let path = path();
307            // Remember to come back to this later.
308            self.todo.push((val, path));
309        }
310    }
311}
312
313impl<'tcx, T: Clone + Eq + Hash + std::fmt::Debug> RefTracking<T, Path<'tcx>> {
314    pub fn new(val: T, ty: Ty<'tcx>) -> Self {
315        let mut ref_tracking_for_consts =
316            RefTracking { seen: FxHashSet::default(), todo: ::alloc::boxed::box_assume_init_into_vec_unsafe(::alloc::intrinsics::write_box_via_move(::alloc::boxed::Box::new_uninit(),
        [(val.clone(), Path::new(ty))]))vec![(val.clone(), Path::new(ty))] };
317        ref_tracking_for_consts.seen.insert(val);
318        ref_tracking_for_consts
319    }
320}
321
322/// Format a path
323fn write_path(out: &mut String, path: &[PathElem<'_>]) {
324    use self::PathElem::*;
325
326    for elem in path.iter() {
327        match elem {
328            Field(name) => out.write_fmt(format_args!(".{0}", name))write!(out, ".{name}"),
329            EnumTag => out.write_fmt(format_args!(".<enum-tag>"))write!(out, ".<enum-tag>"),
330            Variant(name) => out.write_fmt(format_args!(".<enum-variant({0})>", name))write!(out, ".<enum-variant({name})>"),
331            CoroutineTag => out.write_fmt(format_args!(".<coroutine-tag>"))write!(out, ".<coroutine-tag>"),
332            CoroutineState(idx) => out.write_fmt(format_args!(".<coroutine-state({0})>", idx.index()))write!(out, ".<coroutine-state({})>", idx.index()),
333            CapturedVar(name) => out.write_fmt(format_args!(".<captured-var({0})>", name))write!(out, ".<captured-var({name})>"),
334            TupleElem(idx) => out.write_fmt(format_args!(".{0}", idx))write!(out, ".{idx}"),
335            ArrayElem(idx) => out.write_fmt(format_args!("[{0}]", idx))write!(out, "[{idx}]"),
336            // `.<deref>` does not match Rust syntax, but it is more readable for long paths -- and
337            // some of the other items here also are not Rust syntax. Actually we can't
338            // even use the usual syntax because we are just showing the projections,
339            // not the root.
340            Deref => out.write_fmt(format_args!(".<deref>"))write!(out, ".<deref>"),
341            DynDowncast(ty) => out.write_fmt(format_args!(".<dyn-downcast({0})>", ty))write!(out, ".<dyn-downcast({ty})>"),
342            Vtable => out.write_fmt(format_args!(".<vtable>"))write!(out, ".<vtable>"),
343        }
344        .unwrap()
345    }
346}
347
348/// Represents a set of `Size` values as a sorted list of ranges.
349// These are (offset, length) pairs, and they are sorted and mutually disjoint,
350// and never adjacent (i.e. there's always a gap between two of them).
351#[derive(#[automatically_derived]
impl ::core::fmt::Debug for RangeSet {
    #[inline]
    fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
        ::core::fmt::Formatter::debug_tuple_field1_finish(f, "RangeSet",
            &&self.0)
    }
}Debug, #[automatically_derived]
impl ::core::clone::Clone for RangeSet {
    #[inline]
    fn clone(&self) -> RangeSet {
        RangeSet(::core::clone::Clone::clone(&self.0))
    }
}Clone)]
352pub struct RangeSet(Vec<(Size, Size)>);
353
354impl RangeSet {
355    fn add_range(&mut self, offset: Size, size: Size) {
356        if size.bytes() == 0 {
357            // No need to track empty ranges.
358            return;
359        }
360        let v = &mut self.0;
361        // We scan for a partition point where the left partition is all the elements that end
362        // strictly before we start. Those are elements that are too "low" to merge with us.
363        let idx =
364            v.partition_point(|&(other_offset, other_size)| other_offset + other_size < offset);
365        // Now we want to either merge with the first element of the second partition, or insert ourselves before that.
366        if let Some(&(other_offset, other_size)) = v.get(idx)
367            && offset + size >= other_offset
368        {
369            // Their end is >= our start (otherwise it would not be in the 2nd partition) and
370            // our end is >= their start. This means we can merge the ranges.
371            let new_start = other_offset.min(offset);
372            let mut new_end = (other_offset + other_size).max(offset + size);
373            // We grew to the right, so merge with overlapping/adjacent elements.
374            // (We also may have grown to the left, but that can never make us adjacent with
375            // anything there since we selected the first such candidate via `partition_point`.)
376            let mut scan_right = 1;
377            while let Some(&(next_offset, next_size)) = v.get(idx + scan_right)
378                && new_end >= next_offset
379            {
380                // Increase our size to absorb the next element.
381                new_end = new_end.max(next_offset + next_size);
382                // Look at the next element.
383                scan_right += 1;
384            }
385            // Update the element we grew.
386            v[idx] = (new_start, new_end - new_start);
387            // Remove the elements we absorbed (if any).
388            if scan_right > 1 {
389                drop(v.drain((idx + 1)..(idx + scan_right)));
390            }
391        } else {
392            // Insert new element.
393            v.insert(idx, (offset, size));
394        }
395    }
396}
397
398struct ValidityVisitor<'rt, 'tcx, M: Machine<'tcx>> {
399    /// The `path` may be pushed to, but the part that is present when a function
400    /// starts must not be changed!  `with_elem` relies on this stack discipline.
401    path: Path<'tcx>,
402    ref_tracking: Option<&'rt mut RefTracking<MPlaceTy<'tcx, M::Provenance>, Path<'tcx>>>,
403    /// `None` indicates this is not validating for CTFE (but for runtime).
404    ctfe_mode: Option<CtfeValidationMode>,
405    ecx: &'rt mut InterpCx<'tcx, M>,
406    /// Whether provenance should be reset outside of pointers (emulating the effect of a typed
407    /// copy).
408    reset_provenance_and_padding: bool,
409    /// This tracks which byte ranges in this value contain data; the remaining bytes are padding.
410    /// The ideal representation here would be pointer-length pairs, but to keep things more compact
411    /// we only store a (range) set of offsets -- the base pointer is the same throughout the entire
412    /// visit, after all.
413    /// If this is `Some`, then `reset_provenance_and_padding` must be true (but not vice versa:
414    /// we might not track data vs padding bytes if the operand isn't stored in memory anyway).
415    data_bytes: Option<RangeSet>,
416    /// True if we are inside of `MaybeDangling`. This disables pointer access checks.
417    may_dangle: bool,
418}
419
420impl<'rt, 'tcx, M: Machine<'tcx>> ValidityVisitor<'rt, 'tcx, M> {
421    fn aggregate_field_path_elem(
422        &mut self,
423        layout: TyAndLayout<'tcx>,
424        field: usize,
425        field_ty: Ty<'tcx>,
426    ) -> PathElem<'tcx> {
427        // First, check if we are projecting to a variant.
428        match layout.variants {
429            Variants::Multiple { tag_field, .. } => {
430                if tag_field.as_usize() == field {
431                    return match layout.ty.kind() {
432                        ty::Adt(def, ..) if def.is_enum() => PathElem::EnumTag,
433                        ty::Coroutine(..) => PathElem::CoroutineTag,
434                        _ => ::rustc_middle::util::bug::bug_fmt(format_args!("non-variant type {0:?}",
        layout.ty))bug!("non-variant type {:?}", layout.ty),
435                    };
436                }
437            }
438            Variants::Single { .. } | Variants::Empty => {}
439        }
440
441        // Now we know we are projecting to a field, so figure out which one.
442        match layout.ty.kind() {
443            // coroutines, closures, and coroutine-closures all have upvars that may be named.
444            ty::Closure(def_id, _) | ty::Coroutine(def_id, _) | ty::CoroutineClosure(def_id, _) => {
445                let mut name = None;
446                // FIXME this should be more descriptive i.e. CapturePlace instead of CapturedVar
447                // https://github.com/rust-lang/project-rfc-2229/issues/46
448                if let Some(local_def_id) = def_id.as_local() {
449                    let captures = self.ecx.tcx.closure_captures(local_def_id);
450                    if let Some(captured_place) = captures.get(field) {
451                        // Sometimes the index is beyond the number of upvars (seen
452                        // for a coroutine).
453                        let var_hir_id = captured_place.get_root_variable();
454                        let node = self.ecx.tcx.hir_node(var_hir_id);
455                        if let hir::Node::Pat(pat) = node
456                            && let hir::PatKind::Binding(_, _, ident, _) = pat.kind
457                        {
458                            name = Some(ident.name);
459                        }
460                    }
461                }
462
463                PathElem::CapturedVar(name.unwrap_or_else(|| {
464                    // Fall back to showing the field index.
465                    sym::integer(field)
466                }))
467            }
468
469            // tuples
470            ty::Tuple(_) => PathElem::TupleElem(field),
471
472            // enums
473            ty::Adt(def, ..) if def.is_enum() => {
474                // we might be projecting *to* a variant, or to a field *in* a variant.
475                match layout.variants {
476                    Variants::Single { index } => {
477                        // Inside a variant
478                        PathElem::Field(def.variant(index).fields[FieldIdx::from_usize(field)].name)
479                    }
480                    Variants::Empty => {
    ::core::panicking::panic_fmt(format_args!("there is no field in Variants::Empty types"));
}panic!("there is no field in Variants::Empty types"),
481                    Variants::Multiple { .. } => ::rustc_middle::util::bug::bug_fmt(format_args!("we handled variants above"))bug!("we handled variants above"),
482                }
483            }
484
485            // other ADTs
486            ty::Adt(def, _) => {
487                PathElem::Field(def.non_enum_variant().fields[FieldIdx::from_usize(field)].name)
488            }
489
490            // arrays/slices
491            ty::Array(..) | ty::Slice(..) => PathElem::ArrayElem(field),
492
493            // dyn traits
494            ty::Dynamic(..) => {
495                match (&field, &0) {
    (left_val, right_val) => {
        if !(*left_val == *right_val) {
            let kind = ::core::panicking::AssertKind::Eq;
            ::core::panicking::assert_failed(kind, &*left_val, &*right_val,
                ::core::option::Option::None);
        }
    }
};assert_eq!(field, 0);
496                PathElem::DynDowncast(field_ty)
497            }
498
499            // nothing else has an aggregate layout
500            _ => ::rustc_middle::util::bug::bug_fmt(format_args!("aggregate_field_path_elem: got non-aggregate type {0:?}",
        layout.ty))bug!("aggregate_field_path_elem: got non-aggregate type {:?}", layout.ty),
501        }
502    }
503
504    fn with_elem<R>(
505        &mut self,
506        elem: PathElem<'tcx>,
507        f: impl FnOnce(&mut Self) -> InterpResult<'tcx, R>,
508    ) -> InterpResult<'tcx, R> {
509        // Remember the old state
510        let path_len = self.path.projs.len();
511        // Record new element
512        self.path.projs.push(elem);
513        // Perform operation
514        let r = f(self)?;
515        // Undo changes
516        self.path.projs.truncate(path_len);
517        // Done
518        interp_ok(r)
519    }
520
521    fn read_immediate(
522        &self,
523        val: &PlaceTy<'tcx, M::Provenance>,
524        expected: ExpectedKind,
525    ) -> InterpResult<'tcx, ImmTy<'tcx, M::Provenance>> {
526        interp_ok({
    self.ecx.read_immediate(val).map_err_kind(|e|
                {
                    match e {
                        Ub(InvalidUninitBytes(_)) => {
                            {
                                let where_ = &self.path;
                                let path =
                                    if !where_.projs.is_empty() {
                                        let mut path = String::new();
                                        write_path(&mut path, &where_.projs);
                                        Some(path)
                                    } else { None };
                                #[allow(unused)]
                                use ValidationErrorKind::*;
                                let msg = ValidationErrorKind::from(Uninit { expected });
                                ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
                                        orig_ty: where_.orig_ty,
                                        path,
                                        ptr_bytes_warning: msg.ptr_bytes_warning(),
                                        msg: msg.to_string(),
                                    })
                            }
                        }
                        Unsup(ReadPointerAsInt(_)) => {
                            {
                                let where_ = &self.path;
                                let path =
                                    if !where_.projs.is_empty() {
                                        let mut path = String::new();
                                        write_path(&mut path, &where_.projs);
                                        Some(path)
                                    } else { None };
                                #[allow(unused)]
                                use ValidationErrorKind::*;
                                let msg =
                                    ValidationErrorKind::from(PointerAsInt { expected });
                                ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
                                        orig_ty: where_.orig_ty,
                                        path,
                                        ptr_bytes_warning: msg.ptr_bytes_warning(),
                                        msg: msg.to_string(),
                                    })
                            }
                        }
                        Unsup(ReadPartialPointer(_)) => {
                            {
                                let where_ = &self.path;
                                let path =
                                    if !where_.projs.is_empty() {
                                        let mut path = String::new();
                                        write_path(&mut path, &where_.projs);
                                        Some(path)
                                    } else { None };
                                #[allow(unused)]
                                use ValidationErrorKind::*;
                                let msg = ValidationErrorKind::from(PartialPointer);
                                ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
                                        orig_ty: where_.orig_ty,
                                        path,
                                        ptr_bytes_warning: msg.ptr_bytes_warning(),
                                        msg: msg.to_string(),
                                    })
                            }
                        }
                        e => e,
                    }
                })?
}try_validation!(
527            self.ecx.read_immediate(val),
528            self.path,
529            Ub(InvalidUninitBytes(_)) =>
530                Uninit { expected },
531            // The `Unsup` cases can only occur during CTFE
532            Unsup(ReadPointerAsInt(_)) =>
533                PointerAsInt { expected },
534            Unsup(ReadPartialPointer(_)) =>
535                PartialPointer,
536        ))
537    }
538
539    fn read_scalar(
540        &self,
541        val: &PlaceTy<'tcx, M::Provenance>,
542        expected: ExpectedKind,
543    ) -> InterpResult<'tcx, Scalar<M::Provenance>> {
544        interp_ok(self.read_immediate(val, expected)?.to_scalar())
545    }
546
547    /// Given a place and a pointer loaded from that place, ensure that the place does
548    /// not store any more provenance than the pointer does. IOW, if any provenance
549    /// was discarded when loading the pointer, it will also get discarded in-memory.
550    fn reset_pointer_provenance(
551        &mut self,
552        place: &PlaceTy<'tcx, M::Provenance>,
553        ptr: &ImmTy<'tcx, M::Provenance>,
554    ) -> InterpResult<'tcx> {
555        if #[allow(non_exhaustive_omitted_patterns)] match ptr.layout.backend_repr {
    BackendRepr::Scalar(..) => true,
    _ => false,
}matches!(ptr.layout.backend_repr, BackendRepr::Scalar(..)) {
556            // A thin pointer. If it has provenance, we don't have to do anything.
557            // If it does not, ensure we clear the provenance in memory.
558            if !#[allow(non_exhaustive_omitted_patterns)] match ptr.to_scalar() {
    Scalar::Ptr(..) => true,
    _ => false,
}matches!(ptr.to_scalar(), Scalar::Ptr(..)) {
559                // The loaded pointer has no provenance. Some bytes of its representation still
560                // might have provenance, which we have to clear.
561                self.ecx.clear_provenance(place)?;
562            }
563        } else {
564            // A wide pointer. This means we have to worry both about the pointer itself and the
565            // metadata. We do the lazy thing and just write back the value we got. Just
566            // clearing provenance in a targeted manner would be more efficient, but unless this
567            // is a perf hotspot it's just not worth the effort.
568            self.ecx.write_immediate_no_validate(**ptr, place)?;
569        }
570        interp_ok(())
571    }
572
573    fn check_wide_ptr_meta(
574        &mut self,
575        meta: MemPlaceMeta<M::Provenance>,
576        pointee: TyAndLayout<'tcx>,
577    ) -> InterpResult<'tcx> {
578        let tail = self.ecx.tcx.struct_tail_for_codegen(pointee.ty, self.ecx.typing_env);
579        match tail.kind() {
580            ty::Dynamic(data, _) => {
581                let vtable = meta.unwrap_meta().to_pointer(self.ecx)?;
582                // Make sure it is a genuine vtable pointer for the right trait.
583                {
    self.ecx.get_ptr_vtable_ty(vtable,
                Some(data)).map_err_kind(|e|
                {
                    match e {
                        Ub(DanglingIntPointer { .. } | InvalidVTablePointer(..)) =>
                            {
                            {
                                let where_ = &self.path;
                                let path =
                                    if !where_.projs.is_empty() {
                                        let mut path = String::new();
                                        write_path(&mut path, &where_.projs);
                                        Some(path)
                                    } else { None };
                                #[allow(unused)]
                                use ValidationErrorKind::*;
                                let msg =
                                    ValidationErrorKind::from(::alloc::__export::must_use({
                                                ::alloc::fmt::format(format_args!("encountered {0}, but expected a vtable pointer",
                                                        vtable))
                                            }));
                                ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
                                        orig_ty: where_.orig_ty,
                                        path,
                                        ptr_bytes_warning: msg.ptr_bytes_warning(),
                                        msg: msg.to_string(),
                                    })
                            }
                        }
                        Ub(InvalidVTableTrait { vtable_dyn_type, expected_dyn_type
                            }) => {
                            {
                                let where_ = &self.path;
                                let path =
                                    if !where_.projs.is_empty() {
                                        let mut path = String::new();
                                        write_path(&mut path, &where_.projs);
                                        Some(path)
                                    } else { None };
                                #[allow(unused)]
                                use ValidationErrorKind::*;
                                let msg =
                                    ValidationErrorKind::from(InvalidMetaWrongTrait {
                                            expected_dyn_type,
                                            vtable_dyn_type,
                                        });
                                ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
                                        orig_ty: where_.orig_ty,
                                        path,
                                        ptr_bytes_warning: msg.ptr_bytes_warning(),
                                        msg: msg.to_string(),
                                    })
                            }
                        }
                        e => e,
                    }
                })?
};try_validation!(
584                    self.ecx.get_ptr_vtable_ty(vtable, Some(data)),
585                    self.path,
586                    Ub(DanglingIntPointer{ .. } | InvalidVTablePointer(..)) =>
587                        format!("encountered {vtable}, but expected a vtable pointer"),
588                    Ub(InvalidVTableTrait { vtable_dyn_type, expected_dyn_type }) =>
589                        InvalidMetaWrongTrait { expected_dyn_type, vtable_dyn_type },
590                );
591            }
592            ty::Slice(..) | ty::Str => {
593                let _len = meta.unwrap_meta().to_target_usize(self.ecx)?;
594                // We do not check that `len * elem_size <= isize::MAX`:
595                // that is only required for references, and there it falls out of the
596                // "dereferenceable" check performed by Stacked Borrows.
597            }
598            ty::Foreign(..) => {
599                // Unsized, but not wide.
600            }
601            _ => ::rustc_middle::util::bug::bug_fmt(format_args!("Unexpected unsized type tail: {0:?}",
        tail))bug!("Unexpected unsized type tail: {:?}", tail),
602        }
603
604        interp_ok(())
605    }
606
607    /// Check a reference or `Box`.
608    ///
609    /// `ty` is the actual type of `value`; for a Box, `value` will be just the inner raw pointer.
610    fn check_safe_pointer(
611        &mut self,
612        value: &PlaceTy<'tcx, M::Provenance>,
613        ty: Ty<'tcx>,
614        ptr_kind: PtrKind,
615    ) -> InterpResult<'tcx> {
616        let ptr = self.read_immediate(value, ptr_kind.into())?;
617        if self.reset_provenance_and_padding {
618            // There's no padding in a pointer.
619            self.add_data_range_place(value);
620            // Resetting provenance is done below, together with retagging, to avoid
621            // redundant writes.
622        }
623        let place = self.ecx.imm_ptr_to_mplace(&ptr)?;
624        // Handle wide pointers.
625        // Check metadata early, for better diagnostics
626        if place.layout.is_unsized() {
627            self.check_wide_ptr_meta(place.meta(), place.layout)?;
628        }
629
630        // Determine size and alignment of pointee.
631        let size_and_align = {
    self.ecx.size_and_align_of_val(&place).map_err_kind(|e|
                {
                    match e {
                        Ub(InvalidMeta(msg)) => {
                            {
                                let where_ = &self.path;
                                let path =
                                    if !where_.projs.is_empty() {
                                        let mut path = String::new();
                                        write_path(&mut path, &where_.projs);
                                        Some(path)
                                    } else { None };
                                #[allow(unused)]
                                use ValidationErrorKind::*;
                                let msg =
                                    ValidationErrorKind::from(::alloc::__export::must_use({
                                                ::alloc::fmt::format(format_args!("encountered invalid {1} metadata: {0}",
                                                        match msg {
                                                            InvalidMetaKind::SliceTooBig =>
                                                                "slice is bigger than largest supported object",
                                                            InvalidMetaKind::TooBig =>
                                                                "total size is bigger than largest supported object",
                                                        }, ptr_kind))
                                            }));
                                ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
                                        orig_ty: where_.orig_ty,
                                        path,
                                        ptr_bytes_warning: msg.ptr_bytes_warning(),
                                        msg: msg.to_string(),
                                    })
                            }
                        }
                        e => e,
                    }
                })?
}try_validation!(
632            self.ecx.size_and_align_of_val(&place),
633            self.path,
634            Ub(InvalidMeta(msg)) => format!(
635                "encountered invalid {ptr_kind} metadata: {}",
636                match msg {
637                    InvalidMetaKind::SliceTooBig => "slice is bigger than largest supported object",
638                    InvalidMetaKind::TooBig => "total size is bigger than largest supported object",
639                }
640            )
641        );
642        let (size, align) = size_and_align
643            // for the purpose of validity, consider foreign types to have
644            // alignment and size determined by the layout (size will be 0,
645            // alignment should take attributes into account).
646            .unwrap_or_else(|| (place.layout.size, place.layout.align.abi));
647
648        // If we're not allow to dangle, make sure this is dereferenceable and retag it for
649        // the aliasing model.
650        let adjusted_ptr = if !self.may_dangle {
651            {
    self.ecx.check_ptr_access(place.ptr(), size,
                CheckInAllocMsg::Dereferenceable).map_err_kind(|e|
                {
                    match e {
                        Ub(DanglingIntPointer { addr: 0, .. }) => {
                            {
                                let where_ = &self.path;
                                let path =
                                    if !where_.projs.is_empty() {
                                        let mut path = String::new();
                                        write_path(&mut path, &where_.projs);
                                        Some(path)
                                    } else { None };
                                #[allow(unused)]
                                use ValidationErrorKind::*;
                                let msg =
                                    ValidationErrorKind::from(::alloc::__export::must_use({
                                                ::alloc::fmt::format(format_args!("encountered a null {0}",
                                                        ptr_kind))
                                            }));
                                ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
                                        orig_ty: where_.orig_ty,
                                        path,
                                        ptr_bytes_warning: msg.ptr_bytes_warning(),
                                        msg: msg.to_string(),
                                    })
                            }
                        }
                        Ub(DanglingIntPointer { addr: i, .. }) => {
                            {
                                let where_ = &self.path;
                                let path =
                                    if !where_.projs.is_empty() {
                                        let mut path = String::new();
                                        write_path(&mut path, &where_.projs);
                                        Some(path)
                                    } else { None };
                                #[allow(unused)]
                                use ValidationErrorKind::*;
                                let msg =
                                    ValidationErrorKind::from(::alloc::__export::must_use({
                                                ::alloc::fmt::format(format_args!("encountered a dangling {1} ({0} has no provenance)",
                                                        Pointer::<Option<AllocId>>::without_provenance(i),
                                                        ptr_kind))
                                            }));
                                ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
                                        orig_ty: where_.orig_ty,
                                        path,
                                        ptr_bytes_warning: msg.ptr_bytes_warning(),
                                        msg: msg.to_string(),
                                    })
                            }
                        }
                        Ub(PointerOutOfBounds { .. }) => {
                            {
                                let where_ = &self.path;
                                let path =
                                    if !where_.projs.is_empty() {
                                        let mut path = String::new();
                                        write_path(&mut path, &where_.projs);
                                        Some(path)
                                    } else { None };
                                #[allow(unused)]
                                use ValidationErrorKind::*;
                                let msg =
                                    ValidationErrorKind::from(::alloc::__export::must_use({
                                                ::alloc::fmt::format(format_args!("encountered a dangling {0} (going beyond the bounds of its allocation)",
                                                        ptr_kind))
                                            }));
                                ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
                                        orig_ty: where_.orig_ty,
                                        path,
                                        ptr_bytes_warning: msg.ptr_bytes_warning(),
                                        msg: msg.to_string(),
                                    })
                            }
                        }
                        Ub(PointerUseAfterFree(..)) => {
                            {
                                let where_ = &self.path;
                                let path =
                                    if !where_.projs.is_empty() {
                                        let mut path = String::new();
                                        write_path(&mut path, &where_.projs);
                                        Some(path)
                                    } else { None };
                                #[allow(unused)]
                                use ValidationErrorKind::*;
                                let msg =
                                    ValidationErrorKind::from(::alloc::__export::must_use({
                                                ::alloc::fmt::format(format_args!("encountered a dangling {0} (use-after-free)",
                                                        ptr_kind))
                                            }));
                                ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
                                        orig_ty: where_.orig_ty,
                                        path,
                                        ptr_bytes_warning: msg.ptr_bytes_warning(),
                                        msg: msg.to_string(),
                                    })
                            }
                        }
                        e => e,
                    }
                })?
};try_validation!(
652                self.ecx.check_ptr_access(
653                    place.ptr(),
654                    size,
655                    CheckInAllocMsg::Dereferenceable, // will anyway be replaced by validity message
656                ),
657                self.path,
658                Ub(DanglingIntPointer { addr: 0, .. }) =>
659                    format!("encountered a null {ptr_kind}"),
660                Ub(DanglingIntPointer { addr: i, .. }) =>
661                    format!(
662                        "encountered a dangling {ptr_kind} ({ptr} has no provenance)",
663                        ptr = Pointer::<Option<AllocId>>::without_provenance(i)
664                    ),
665                Ub(PointerOutOfBounds { .. }) =>
666                    format!("encountered a dangling {ptr_kind} (going beyond the bounds of its allocation)"),
667                Ub(PointerUseAfterFree(..)) =>
668                    format!("encountered a dangling {ptr_kind} (use-after-free)"),
669            );
670            if self.reset_provenance_and_padding {
671                M::retag_ptr_value(self.ecx, &ptr, ty).map_err_kind(|e| match e {
672                    Ub(WriteToReadOnly(_)) => {
673                        {
    let where_ = &self.path;
    let path =
        if !where_.projs.is_empty() {
            let mut path = String::new();
            write_path(&mut path, &where_.projs);
            Some(path)
        } else { None };
    #[allow(unused)]
    use ValidationErrorKind::*;
    let msg =
        ValidationErrorKind::from(::alloc::__export::must_use({
                    ::alloc::fmt::format(format_args!("encountered {0} pointing to read-only memory",
                            if ptr_kind == PtrKind::Box {
                                "box"
                            } else { "mutable reference" }))
                }));
    ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
            orig_ty: where_.orig_ty,
            path,
            ptr_bytes_warning: msg.ptr_bytes_warning(),
            msg: msg.to_string(),
        })
}err_validation_failure!(
674                            self.path,
675                            format!(
676                                "encountered {} pointing to read-only memory",
677                                if ptr_kind == PtrKind::Box { "box" } else { "mutable reference" },
678                            )
679                        )
680                    }
681                    InterpErrorKind::MachineStop(mut machine_err) => {
682                        // Enhance the aliasing model error with the current path.
683                        if !self.path.projs.is_empty() {
684                            let mut path = String::new();
685                            write_path(&mut path, &self.path.projs);
686                            machine_err.with_validation_path(path);
687                        }
688                        InterpErrorKind::MachineStop(machine_err)
689                    }
690                    e => e,
691                })?
692            } else {
693                // We can't retag if we're not resetting provenance.
694                None
695            }
696        } else {
697            // Pointer remains unchanged.
698            None
699        };
700        // If the pointer needs adjusting, write back adjusted pointer. This automatically
701        // also clears any excess provenance. Otherwise, just clear the provenance.
702        if let Some(ptr) = adjusted_ptr {
703            self.ecx.write_immediate_no_validate(*ptr, value)?;
704        } else if self.reset_provenance_and_padding {
705            self.reset_pointer_provenance(value, &ptr)?;
706        }
707
708        // Check alignment after dereferenceable (if both are violated, trigger the error above).
709        {
    self.ecx.check_ptr_align(place.ptr(),
                align).map_err_kind(|e|
                {
                    match e {
                        Ub(AlignmentCheckFailed(Misalignment { required, has },
                            _msg)) => {
                            {
                                let where_ = &self.path;
                                let path =
                                    if !where_.projs.is_empty() {
                                        let mut path = String::new();
                                        write_path(&mut path, &where_.projs);
                                        Some(path)
                                    } else { None };
                                #[allow(unused)]
                                use ValidationErrorKind::*;
                                let msg =
                                    ValidationErrorKind::from(::alloc::__export::must_use({
                                                ::alloc::fmt::format(format_args!("encountered an unaligned {2} (required {0} byte alignment but found {1})",
                                                        required.bytes(), has.bytes(), ptr_kind))
                                            }));
                                ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
                                        orig_ty: where_.orig_ty,
                                        path,
                                        ptr_bytes_warning: msg.ptr_bytes_warning(),
                                        msg: msg.to_string(),
                                    })
                            }
                        }
                        e => e,
                    }
                })?
};try_validation!(
710            self.ecx.check_ptr_align(
711                place.ptr(),
712                align,
713            ),
714            self.path,
715            Ub(AlignmentCheckFailed(Misalignment { required, has }, _msg)) => format!(
716                "encountered an unaligned {ptr_kind} (required {required_bytes} byte alignment but found {found_bytes})",
717                required_bytes = required.bytes(),
718                found_bytes = has.bytes()
719            ),
720        );
721
722        // Make sure this is non-null. This is obviously needed when `may_dangle` is set,
723        // but even if we did check dereferenceability above that would still allow null
724        // pointers if `size` is zero.
725        let scalar = Scalar::from_maybe_pointer(place.ptr(), self.ecx);
726        if self.ecx.scalar_may_be_null(scalar)? {
727            let maybe = !M::Provenance::OFFSET_IS_ADDR && #[allow(non_exhaustive_omitted_patterns)] match scalar {
    Scalar::Ptr(..) => true,
    _ => false,
}matches!(scalar, Scalar::Ptr(..));
728            do yeet {
        let where_ = &self.path;
        let path =
            if !where_.projs.is_empty() {
                let mut path = String::new();
                write_path(&mut path, &where_.projs);
                Some(path)
            } else { None };
        #[allow(unused)]
        use ValidationErrorKind::*;
        let msg =
            ValidationErrorKind::from(::alloc::__export::must_use({
                        ::alloc::fmt::format(format_args!("encountered a {0}null {1}",
                                if maybe { "maybe-" } else { "" }, ptr_kind))
                    }));
        ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
                orig_ty: where_.orig_ty,
                path,
                ptr_bytes_warning: msg.ptr_bytes_warning(),
                msg: msg.to_string(),
            })
    }throw_validation_failure!(
729                self.path,
730                format!(
731                    "encountered a {maybe}null {ptr_kind}",
732                    maybe = if maybe { "maybe-" } else { "" }
733                )
734            )
735        }
736        // Do not allow references to uninhabited types.
737        if place.layout.is_uninhabited() {
738            let ty = place.layout.ty;
739            do yeet {
        let where_ = &self.path;
        let path =
            if !where_.projs.is_empty() {
                let mut path = String::new();
                write_path(&mut path, &where_.projs);
                Some(path)
            } else { None };
        #[allow(unused)]
        use ValidationErrorKind::*;
        let msg =
            ValidationErrorKind::from(::alloc::__export::must_use({
                        ::alloc::fmt::format(format_args!("encountered a {0} pointing to uninhabited type {1}",
                                ptr_kind, ty))
                    }));
        ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
                orig_ty: where_.orig_ty,
                path,
                ptr_bytes_warning: msg.ptr_bytes_warning(),
                msg: msg.to_string(),
            })
    }throw_validation_failure!(
740                self.path,
741                format!("encountered a {ptr_kind} pointing to uninhabited type {ty}")
742            )
743        }
744
745        // Recursive checking (but not inside `MaybeDangling` of course).
746        if let Some(ref_tracking) = self.ref_tracking.as_deref_mut()
747            && !self.may_dangle
748        {
749            // Proceed recursively even for ZST, no reason to skip them!
750            // `!` is a ZST and we want to validate it.
751            if let Some(ctfe_mode) = self.ctfe_mode {
752                let mut skip_recursive_check = false;
753                // CTFE imposes restrictions on what references can point to.
754                if let Ok((alloc_id, _offset, _prov)) =
755                    self.ecx.ptr_try_get_alloc_id(place.ptr(), 0)
756                {
757                    // Everything should be already interned.
758                    let Some(global_alloc) = self.ecx.tcx.try_get_global_alloc(alloc_id) else {
759                        if self.ecx.memory.alloc_map.contains_key(&alloc_id) {
760                            // This can happen when interning didn't complete due to, e.g.
761                            // missing `make_global`. This must mean other errors are already
762                            // being reported.
763                            self.ecx.tcx.dcx().delayed_bug(
764                                "interning did not complete, there should be an error",
765                            );
766                            return interp_ok(());
767                        }
768                        // We can't have *any* references to non-existing allocations in const-eval
769                        // as the rest of rustc isn't happy with them... so we throw an error, even
770                        // though for zero-sized references this isn't really UB.
771                        // A potential future alternative would be to resurrect this as a zero-sized allocation
772                        // (which codegen will then compile to an aligned dummy pointer anyway).
773                        do yeet {
        let where_ = &self.path;
        let path =
            if !where_.projs.is_empty() {
                let mut path = String::new();
                write_path(&mut path, &where_.projs);
                Some(path)
            } else { None };
        #[allow(unused)]
        use ValidationErrorKind::*;
        let msg =
            ValidationErrorKind::from(::alloc::__export::must_use({
                        ::alloc::fmt::format(format_args!("encountered a dangling {0} (use-after-free)",
                                ptr_kind))
                    }));
        ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
                orig_ty: where_.orig_ty,
                path,
                ptr_bytes_warning: msg.ptr_bytes_warning(),
                msg: msg.to_string(),
            })
    };throw_validation_failure!(
774                            self.path,
775                            format!("encountered a dangling {ptr_kind} (use-after-free)")
776                        );
777                    };
778                    let (size, _align) =
779                        global_alloc.size_and_align(*self.ecx.tcx, self.ecx.typing_env);
780                    let alloc_actual_mutbl =
781                        global_alloc.mutability(*self.ecx.tcx, self.ecx.typing_env);
782
783                    match global_alloc {
784                        GlobalAlloc::Static(did) => {
785                            let DefKind::Static { nested, .. } = self.ecx.tcx.def_kind(did) else {
786                                ::rustc_middle::util::bug::bug_fmt(format_args!("impossible case reached"))bug!()
787                            };
788                            if !!self.ecx.tcx.is_thread_local_static(did) {
    ::core::panicking::panic("assertion failed: !self.ecx.tcx.is_thread_local_static(did)")
};assert!(!self.ecx.tcx.is_thread_local_static(did));
789                            if !self.ecx.tcx.is_static(did) {
    ::core::panicking::panic("assertion failed: self.ecx.tcx.is_static(did)")
};assert!(self.ecx.tcx.is_static(did));
790                            match ctfe_mode {
791                                CtfeValidationMode::Static { .. }
792                                | CtfeValidationMode::Promoted { .. } => {
793                                    // We skip recursively checking other statics. These statics must be sound by
794                                    // themselves, and the only way to get broken statics here is by using
795                                    // unsafe code.
796                                    // The reasons we don't check other statics is twofold. For one, in all
797                                    // sound cases, the static was already validated on its own, and second, we
798                                    // trigger cycle errors if we try to compute the value of the other static
799                                    // and that static refers back to us (potentially through a promoted).
800                                    // This could miss some UB, but that's fine.
801                                    // We still walk nested allocations, as they are fundamentally part of this validation run.
802                                    // This means we will also recurse into nested statics of *other*
803                                    // statics, even though we do not recurse into other statics directly.
804                                    // That's somewhat inconsistent but harmless.
805                                    skip_recursive_check = !nested;
806                                }
807                                CtfeValidationMode::Const { .. } => {
808                                    // If this is mutable memory or an `extern static`, there's no point in checking it -- we'd
809                                    // just get errors trying to read the value.
810                                    if alloc_actual_mutbl.is_mut()
811                                        || self.ecx.tcx.is_foreign_item(did)
812                                    {
813                                        skip_recursive_check = true;
814                                    }
815                                }
816                            }
817                        }
818                        _ => (),
819                    }
820
821                    // If this allocation has size zero, there is no actual mutability here.
822                    if size != Size::ZERO {
823                        // Determine whether this pointer expects to be pointing to something mutable.
824                        let ptr_expected_mutbl = match ptr_kind {
825                            PtrKind::Box => Mutability::Mut,
826                            PtrKind::Ref(mutbl) => {
827                                // We do not take into account interior mutability here since we cannot know if
828                                // there really is an `UnsafeCell` inside `Option<UnsafeCell>` -- so we check
829                                // that in the recursive descent behind this reference (controlled by
830                                // `allow_immutable_unsafe_cell`).
831                                mutbl
832                            }
833                        };
834                        // Mutable pointer to immutable memory is no good.
835                        if ptr_expected_mutbl == Mutability::Mut
836                            && alloc_actual_mutbl == Mutability::Not
837                        {
838                            // This can actually occur with transmutes.
839                            do yeet {
        let where_ = &self.path;
        let path =
            if !where_.projs.is_empty() {
                let mut path = String::new();
                write_path(&mut path, &where_.projs);
                Some(path)
            } else { None };
        #[allow(unused)]
        use ValidationErrorKind::*;
        let msg =
            ValidationErrorKind::from(::alloc::__export::must_use({
                        ::alloc::fmt::format(format_args!("encountered mutable reference or box pointing to read-only memory"))
                    }));
        ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
                orig_ty: where_.orig_ty,
                path,
                ptr_bytes_warning: msg.ptr_bytes_warning(),
                msg: msg.to_string(),
            })
    };throw_validation_failure!(
840                                self.path,
841                                format!(
842                                    "encountered mutable reference or box pointing to read-only memory"
843                                )
844                            );
845                        }
846                    }
847                }
848                // Potentially skip recursive check.
849                if skip_recursive_check {
850                    return interp_ok(());
851                }
852            } else {
853                // This is not CTFE, so it's Miri with recursive checking.
854                // FIXME: should we skip `UnsafeCell` behind shared references? Currently that is
855                // not needed since validation reads bypass Stacked Borrows and data race checks,
856                // but is that really coherent?
857            }
858            let path = &self.path;
859            ref_tracking.track(place, || {
860                // We need to clone the path anyway, make sure it gets created
861                // with enough space for the additional `Deref`.
862                let mut new_projs = Vec::with_capacity(path.projs.len() + 1);
863                new_projs.extend(&path.projs);
864                new_projs.push(PathElem::Deref);
865                Path { projs: new_projs, orig_ty: path.orig_ty }
866            });
867        }
868        interp_ok(())
869    }
870
871    /// Check if this is a value of primitive type, and if yes check the validity of the value
872    /// at that type. Return `true` if the type is indeed primitive.
873    ///
874    /// Note that not all of these have `FieldsShape::Primitive`, e.g. wide references.
875    fn try_visit_primitive(
876        &mut self,
877        value: &PlaceTy<'tcx, M::Provenance>,
878    ) -> InterpResult<'tcx, bool> {
879        // Go over all the primitive types
880        let ty = value.layout.ty;
881        match ty.kind() {
882            ty::Bool => {
883                let scalar = self.read_scalar(value, ExpectedKind::Bool)?;
884                {
    scalar.to_bool().map_err_kind(|e|
                {
                    match e {
                        Ub(InvalidBool(..)) => {
                            {
                                let where_ = &self.path;
                                let path =
                                    if !where_.projs.is_empty() {
                                        let mut path = String::new();
                                        write_path(&mut path, &where_.projs);
                                        Some(path)
                                    } else { None };
                                #[allow(unused)]
                                use ValidationErrorKind::*;
                                let msg =
                                    ValidationErrorKind::from(::alloc::__export::must_use({
                                                ::alloc::fmt::format(format_args!("encountered {0:x}, but expected a boolean",
                                                        scalar))
                                            }));
                                ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
                                        orig_ty: where_.orig_ty,
                                        path,
                                        ptr_bytes_warning: msg.ptr_bytes_warning(),
                                        msg: msg.to_string(),
                                    })
                            }
                        }
                        e => e,
                    }
                })?
};try_validation!(
885                    scalar.to_bool(),
886                    self.path,
887                    Ub(InvalidBool(..)) =>
888                        format!("encountered {scalar:x}, but expected a boolean"),
889                );
890                if self.reset_provenance_and_padding {
891                    self.ecx.clear_provenance(value)?;
892                    self.add_data_range_place(value);
893                }
894                interp_ok(true)
895            }
896            ty::Char => {
897                let scalar = self.read_scalar(value, ExpectedKind::Char)?;
898                {
    scalar.to_char().map_err_kind(|e|
                {
                    match e {
                        Ub(InvalidChar(..)) => {
                            {
                                let where_ = &self.path;
                                let path =
                                    if !where_.projs.is_empty() {
                                        let mut path = String::new();
                                        write_path(&mut path, &where_.projs);
                                        Some(path)
                                    } else { None };
                                #[allow(unused)]
                                use ValidationErrorKind::*;
                                let msg =
                                    ValidationErrorKind::from(::alloc::__export::must_use({
                                                ::alloc::fmt::format(format_args!("encountered {0:x}, but expected a valid unicode scalar value (in `0..=0x10FFFF` but not in `0xD800..=0xDFFF`)",
                                                        scalar))
                                            }));
                                ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
                                        orig_ty: where_.orig_ty,
                                        path,
                                        ptr_bytes_warning: msg.ptr_bytes_warning(),
                                        msg: msg.to_string(),
                                    })
                            }
                        }
                        e => e,
                    }
                })?
};try_validation!(
899                    scalar.to_char(),
900                    self.path,
901                    Ub(InvalidChar(..)) =>
902                        format!("encountered {scalar:x}, but expected a valid unicode scalar value \
903                          (in `0..=0x10FFFF` but not in `0xD800..=0xDFFF`)")
904                );
905                if self.reset_provenance_and_padding {
906                    self.ecx.clear_provenance(value)?;
907                    self.add_data_range_place(value);
908                }
909                interp_ok(true)
910            }
911            ty::Float(_) | ty::Int(_) | ty::Uint(_) => {
912                // NOTE: Keep this in sync with the array optimization for int/float
913                // types below!
914                self.read_scalar(
915                    value,
916                    if #[allow(non_exhaustive_omitted_patterns)] match ty.kind() {
    ty::Float(..) => true,
    _ => false,
}matches!(ty.kind(), ty::Float(..)) {
917                        ExpectedKind::Float
918                    } else {
919                        ExpectedKind::Int
920                    },
921                )?;
922                if self.reset_provenance_and_padding {
923                    self.ecx.clear_provenance(value)?;
924                    self.add_data_range_place(value);
925                }
926                interp_ok(true)
927            }
928            ty::RawPtr(..) => {
929                let ptr = self.read_immediate(value, ExpectedKind::RawPtr)?;
930                if self.reset_provenance_and_padding {
931                    self.reset_pointer_provenance(value, &ptr)?;
932                    // There's no padding in a pointer.
933                    self.add_data_range_place(value);
934                }
935
936                let place = self.ecx.imm_ptr_to_mplace(&ptr)?;
937                if place.layout.is_unsized() {
938                    self.check_wide_ptr_meta(place.meta(), place.layout)?;
939                }
940                interp_ok(true)
941            }
942            ty::Ref(_, _ty, mutbl) => {
943                self.check_safe_pointer(value, ty, PtrKind::Ref(*mutbl))?;
944                interp_ok(true)
945            }
946            ty::FnPtr(..) => {
947                let scalar = self.read_scalar(value, ExpectedKind::FnPtr)?;
948
949                // If we check references recursively, also check that this points to a function.
950                if let Some(_) = self.ref_tracking {
951                    let ptr = scalar.to_pointer(self.ecx)?;
952                    let _fn = {
    self.ecx.get_ptr_fn(ptr).map_err_kind(|e|
                {
                    match e {
                        Ub(DanglingIntPointer { .. } | InvalidFunctionPointer(..))
                            => {
                            {
                                let where_ = &self.path;
                                let path =
                                    if !where_.projs.is_empty() {
                                        let mut path = String::new();
                                        write_path(&mut path, &where_.projs);
                                        Some(path)
                                    } else { None };
                                #[allow(unused)]
                                use ValidationErrorKind::*;
                                let msg =
                                    ValidationErrorKind::from(::alloc::__export::must_use({
                                                ::alloc::fmt::format(format_args!("encountered {0}, but expected a function pointer",
                                                        ptr))
                                            }));
                                ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
                                        orig_ty: where_.orig_ty,
                                        path,
                                        ptr_bytes_warning: msg.ptr_bytes_warning(),
                                        msg: msg.to_string(),
                                    })
                            }
                        }
                        e => e,
                    }
                })?
}try_validation!(
953                        self.ecx.get_ptr_fn(ptr),
954                        self.path,
955                        Ub(DanglingIntPointer{ .. } | InvalidFunctionPointer(..)) =>
956                            format!("encountered {ptr}, but expected a function pointer"),
957                    );
958                    // FIXME: Check if the signature matches
959                } else {
960                    // Otherwise (for standalone Miri and for `-Zextra-const-ub-checks`),
961                    // we have to still check it to be non-null.
962                    if self.ecx.scalar_may_be_null(scalar)? {
963                        let maybe =
964                            !M::Provenance::OFFSET_IS_ADDR && #[allow(non_exhaustive_omitted_patterns)] match scalar {
    Scalar::Ptr(..) => true,
    _ => false,
}matches!(scalar, Scalar::Ptr(..));
965                        do yeet {
        let where_ = &self.path;
        let path =
            if !where_.projs.is_empty() {
                let mut path = String::new();
                write_path(&mut path, &where_.projs);
                Some(path)
            } else { None };
        #[allow(unused)]
        use ValidationErrorKind::*;
        let msg =
            ValidationErrorKind::from(::alloc::__export::must_use({
                        ::alloc::fmt::format(format_args!("encountered a {0}null function pointer",
                                if maybe { "maybe-" } else { "" }))
                    }));
        ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
                orig_ty: where_.orig_ty,
                path,
                ptr_bytes_warning: msg.ptr_bytes_warning(),
                msg: msg.to_string(),
            })
    };throw_validation_failure!(
966                            self.path,
967                            format!(
968                                "encountered a {maybe}null function pointer",
969                                maybe = if maybe { "maybe-" } else { "" }
970                            )
971                        );
972                    }
973                }
974                if self.reset_provenance_and_padding {
975                    // Make sure we do not preserve partial provenance. This matches the thin
976                    // pointer handling in `deref_pointer`.
977                    if #[allow(non_exhaustive_omitted_patterns)] match scalar {
    Scalar::Int(..) => true,
    _ => false,
}matches!(scalar, Scalar::Int(..)) {
978                        self.ecx.clear_provenance(value)?;
979                    }
980                    self.add_data_range_place(value);
981                }
982                interp_ok(true)
983            }
984            ty::Never => {
985                do yeet {
        let where_ = &self.path;
        let path =
            if !where_.projs.is_empty() {
                let mut path = String::new();
                write_path(&mut path, &where_.projs);
                Some(path)
            } else { None };
        #[allow(unused)]
        use ValidationErrorKind::*;
        let msg =
            ValidationErrorKind::from(::alloc::__export::must_use({
                        ::alloc::fmt::format(format_args!("encountered a value of the never type `!`"))
                    }));
        ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
                orig_ty: where_.orig_ty,
                path,
                ptr_bytes_warning: msg.ptr_bytes_warning(),
                msg: msg.to_string(),
            })
    }throw_validation_failure!(
986                    self.path,
987                    format!("encountered a value of the never type `!`")
988                )
989            }
990            ty::Foreign(..) | ty::FnDef(..) => {
991                // Nothing to check.
992                interp_ok(true)
993            }
994            ty::UnsafeBinder(_) => {
    ::core::panicking::panic_fmt(format_args!("not yet implemented: {0}",
            format_args!("FIXME(unsafe_binder)")));
}todo!("FIXME(unsafe_binder)"),
995            // The above should be all the primitive types. The rest is compound, we
996            // check them by visiting their fields/variants.
997            ty::Adt(..)
998            | ty::Tuple(..)
999            | ty::Array(..)
1000            | ty::Slice(..)
1001            | ty::Str
1002            | ty::Dynamic(..)
1003            | ty::Closure(..)
1004            | ty::Pat(..)
1005            | ty::CoroutineClosure(..)
1006            | ty::Coroutine(..) => interp_ok(false),
1007            // Some types only occur during typechecking, they have no layout.
1008            // We should not see them here and we could not check them anyway.
1009            ty::Error(_)
1010            | ty::Infer(..)
1011            | ty::Placeholder(..)
1012            | ty::Bound(..)
1013            | ty::Param(..)
1014            | ty::Alias(..)
1015            | ty::CoroutineWitness(..) => ::rustc_middle::util::bug::bug_fmt(format_args!("Encountered invalid type {0:?}",
        ty))bug!("Encountered invalid type {:?}", ty),
1016        }
1017    }
1018
1019    fn visit_scalar(
1020        &mut self,
1021        scalar: Scalar<M::Provenance>,
1022        scalar_layout: ScalarAbi,
1023    ) -> InterpResult<'tcx> {
1024        let size = scalar_layout.size(self.ecx);
1025        let valid_range = scalar_layout.valid_range(self.ecx);
1026        let WrappingRange { start, end } = valid_range;
1027        let max_value = size.unsigned_int_max();
1028        if !(end <= max_value) {
    ::core::panicking::panic("assertion failed: end <= max_value")
};assert!(end <= max_value);
1029        let bits = match scalar.try_to_scalar_int() {
1030            Ok(int) => int.to_bits(size),
1031            Err(_) => {
1032                // So this is a pointer then, and casting to an int failed.
1033                // Can only happen during CTFE.
1034                // We support 2 kinds of ranges here: full range, and excluding zero.
1035                if start == 1 && end == max_value {
1036                    // Only null is the niche. So make sure the ptr is NOT null.
1037                    if self.ecx.scalar_may_be_null(scalar)? {
1038                        do yeet {
        let where_ = &self.path;
        let path =
            if !where_.projs.is_empty() {
                let mut path = String::new();
                write_path(&mut path, &where_.projs);
                Some(path)
            } else { None };
        #[allow(unused)]
        use ValidationErrorKind::*;
        let msg =
            ValidationErrorKind::from(::alloc::__export::must_use({
                        ::alloc::fmt::format(format_args!("encountered a maybe-null pointer, but expected something that is definitely non-zero"))
                    }));
        ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
                orig_ty: where_.orig_ty,
                path,
                ptr_bytes_warning: msg.ptr_bytes_warning(),
                msg: msg.to_string(),
            })
    }throw_validation_failure!(
1039                            self.path,
1040                            format!(
1041                                "encountered a maybe-null pointer, but expected something that is definitely non-zero"
1042                            )
1043                        )
1044                    } else {
1045                        return interp_ok(());
1046                    }
1047                } else if scalar_layout.is_always_valid(self.ecx) {
1048                    // Easy. (This is reachable if `enforce_number_validity` is set.)
1049                    return interp_ok(());
1050                } else {
1051                    // Conservatively, we reject, because the pointer *could* have a bad value.
1052                    do yeet {
        let where_ = &self.path;
        let path =
            if !where_.projs.is_empty() {
                let mut path = String::new();
                write_path(&mut path, &where_.projs);
                Some(path)
            } else { None };
        #[allow(unused)]
        use ValidationErrorKind::*;
        let msg =
            ValidationErrorKind::from(::alloc::__export::must_use({
                        ::alloc::fmt::format(format_args!("encountered a pointer with unknown absolute address, but expected something that is definitely {0}",
                                fmt_range(valid_range, max_value)))
                    }));
        ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
                orig_ty: where_.orig_ty,
                path,
                ptr_bytes_warning: msg.ptr_bytes_warning(),
                msg: msg.to_string(),
            })
    }throw_validation_failure!(
1053                        self.path,
1054                        format!(
1055                            "encountered a pointer with unknown absolute address, but expected something that is definitely {in_range}",
1056                            in_range = fmt_range(valid_range, max_value)
1057                        )
1058                    )
1059                }
1060            }
1061        };
1062        // Now compare.
1063        if valid_range.contains(bits) {
1064            interp_ok(())
1065        } else {
1066            do yeet {
        let where_ = &self.path;
        let path =
            if !where_.projs.is_empty() {
                let mut path = String::new();
                write_path(&mut path, &where_.projs);
                Some(path)
            } else { None };
        #[allow(unused)]
        use ValidationErrorKind::*;
        let msg =
            ValidationErrorKind::from(::alloc::__export::must_use({
                        ::alloc::fmt::format(format_args!("encountered {1}, but expected something {0}",
                                fmt_range(valid_range, max_value), bits))
                    }));
        ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
                orig_ty: where_.orig_ty,
                path,
                ptr_bytes_warning: msg.ptr_bytes_warning(),
                msg: msg.to_string(),
            })
    }throw_validation_failure!(
1067                self.path,
1068                format!(
1069                    "encountered {bits}, but expected something {in_range}",
1070                    in_range = fmt_range(valid_range, max_value)
1071                )
1072            )
1073        }
1074    }
1075
1076    fn in_mutable_memory(&self, val: &PlaceTy<'tcx, M::Provenance>) -> bool {
1077        if true {
    if !self.ctfe_mode.is_some() {
        ::core::panicking::panic("assertion failed: self.ctfe_mode.is_some()")
    };
};debug_assert!(self.ctfe_mode.is_some());
1078        if let Some(mplace) = val.as_mplace_or_local().left() {
1079            if let Some(alloc_id) = mplace.ptr().provenance.and_then(|p| p.get_alloc_id()) {
1080                let tcx = *self.ecx.tcx;
1081                // Everything must be already interned.
1082                let mutbl = tcx.global_alloc(alloc_id).mutability(tcx, self.ecx.typing_env);
1083                if let Some((_, alloc)) = self.ecx.memory.alloc_map.get(alloc_id) {
1084                    match (&alloc.mutability, &mutbl) {
    (left_val, right_val) => {
        if !(*left_val == *right_val) {
            let kind = ::core::panicking::AssertKind::Eq;
            ::core::panicking::assert_failed(kind, &*left_val, &*right_val,
                ::core::option::Option::None);
        }
    }
};assert_eq!(alloc.mutability, mutbl);
1085                }
1086                mutbl.is_mut()
1087            } else {
1088                // No memory at all.
1089                false
1090            }
1091        } else {
1092            // A local variable -- definitely mutable.
1093            true
1094        }
1095    }
1096
1097    /// Add the given pointer-length pair to the "data" range of this visit.
1098    fn add_data_range(&mut self, ptr: Pointer<Option<M::Provenance>>, size: Size) {
1099        if let Some(data_bytes) = self.data_bytes.as_mut() {
1100            // We only have to store the offset, the rest is the same for all pointers here.
1101            // The logic is agnostic to whether the offset is relative or absolute as long as
1102            // it is consistent.
1103            let (_prov, offset) = ptr.into_raw_parts();
1104            // Add this.
1105            data_bytes.add_range(offset, size);
1106        };
1107    }
1108
1109    /// Add the entire given place to the "data" range of this visit.
1110    fn add_data_range_place(&mut self, place: &PlaceTy<'tcx, M::Provenance>) {
1111        // Only sized places can be added this way.
1112        if true {
    if !place.layout.is_sized() {
        ::core::panicking::panic("assertion failed: place.layout.is_sized()")
    };
};debug_assert!(place.layout.is_sized());
1113        if let Some(data_bytes) = self.data_bytes.as_mut() {
1114            let offset = Self::data_range_offset(self.ecx, place);
1115            data_bytes.add_range(offset, place.layout.size);
1116        }
1117    }
1118
1119    /// Convert a place into the offset it starts at, for the purpose of data_range tracking.
1120    /// Must only be called if `data_bytes` is `Some(_)`.
1121    fn data_range_offset(ecx: &InterpCx<'tcx, M>, place: &PlaceTy<'tcx, M::Provenance>) -> Size {
1122        // The presence of `data_bytes` implies that our place is in memory.
1123        let ptr = ecx
1124            .place_to_op(place)
1125            .expect("place must be in memory")
1126            .as_mplace_or_imm()
1127            .expect_left("place must be in memory")
1128            .ptr();
1129        let (_prov, offset) = ptr.into_raw_parts();
1130        offset
1131    }
1132
1133    fn reset_padding(&mut self, place: &PlaceTy<'tcx, M::Provenance>) -> InterpResult<'tcx> {
1134        let Some(data_bytes) = self.data_bytes.as_mut() else { return interp_ok(()) };
1135        // Our value must be in memory, otherwise we would not have set up `data_bytes`.
1136        let mplace = self.ecx.force_allocation(place)?;
1137        // Determine starting offset and size.
1138        let (_prov, start_offset) = mplace.ptr().into_raw_parts();
1139        let (size, _align) = self
1140            .ecx
1141            .size_and_align_of_val(&mplace)?
1142            .unwrap_or((mplace.layout.size, mplace.layout.align.abi));
1143        // If there is no padding at all, we can skip the rest: check for
1144        // a single data range covering the entire value.
1145        if data_bytes.0 == &[(start_offset, size)] {
1146            return interp_ok(());
1147        }
1148        // Get a handle for the allocation. Do this only once, to avoid looking up the same
1149        // allocation over and over again. (Though to be fair, iterating the value already does
1150        // exactly that.)
1151        let Some(mut alloc) = self.ecx.get_ptr_alloc_mut(mplace.ptr(), size)? else {
1152            // A ZST, no padding to clear.
1153            return interp_ok(());
1154        };
1155        // Add a "finalizer" data range at the end, so that the iteration below finds all gaps
1156        // between ranges.
1157        data_bytes.0.push((start_offset + size, Size::ZERO));
1158        // Iterate, and reset gaps.
1159        let mut padding_cleared_until = start_offset;
1160        for &(offset, size) in data_bytes.0.iter() {
1161            if !(offset >= padding_cleared_until) {
    {
        ::core::panicking::panic_fmt(format_args!("reset_padding on {0}: previous field ended at offset {1}, next field starts at {2} (and has a size of {3} bytes)",
                mplace.layout.ty,
                (padding_cleared_until - start_offset).bytes(),
                (offset - start_offset).bytes(), size.bytes()));
    }
};assert!(
1162                offset >= padding_cleared_until,
1163                "reset_padding on {}: previous field ended at offset {}, next field starts at {} (and has a size of {} bytes)",
1164                mplace.layout.ty,
1165                (padding_cleared_until - start_offset).bytes(),
1166                (offset - start_offset).bytes(),
1167                size.bytes(),
1168            );
1169            if offset > padding_cleared_until {
1170                // We found padding. Adjust the range to be relative to `alloc`, and make it uninit.
1171                let padding_start = padding_cleared_until - start_offset;
1172                let padding_size = offset - padding_cleared_until;
1173                let range = alloc_range(padding_start, padding_size);
1174                {
    use ::tracing::__macro_support::Callsite as _;
    static __CALLSITE: ::tracing::callsite::DefaultCallsite =
        {
            static META: ::tracing::Metadata<'static> =
                {
                    ::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/validity.rs:1174",
                        "rustc_const_eval::interpret::validity",
                        ::tracing::Level::TRACE,
                        ::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/validity.rs"),
                        ::tracing_core::__macro_support::Option::Some(1174u32),
                        ::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::validity"),
                        ::tracing_core::field::FieldSet::new(&["message"],
                            ::tracing_core::callsite::Identifier(&__CALLSITE)),
                        ::tracing::metadata::Kind::EVENT)
                };
            ::tracing::callsite::DefaultCallsite::new(&META)
        };
    let enabled =
        ::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
                &&
                ::tracing::Level::TRACE <=
                    ::tracing::level_filters::LevelFilter::current() &&
            {
                let interest = __CALLSITE.interest();
                !interest.is_never() &&
                    ::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
                        interest)
            };
    if enabled {
        (|value_set: ::tracing::field::ValueSet|
                    {
                        let meta = __CALLSITE.metadata();
                        ::tracing::Event::dispatch(meta, &value_set);
                        ;
                    })({
                #[allow(unused_imports)]
                use ::tracing::field::{debug, display, Value};
                let mut iter = __CALLSITE.metadata().fields().iter();
                __CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
                                    ::tracing::__macro_support::Option::Some(&format_args!("reset_padding on {0}: resetting padding range {1:?}",
                                                    mplace.layout.ty, range) as &dyn Value))])
            });
    } else { ; }
};trace!("reset_padding on {}: resetting padding range {range:?}", mplace.layout.ty);
1175                alloc.write_uninit(range);
1176            }
1177            padding_cleared_until = offset + size;
1178        }
1179        if !(padding_cleared_until == start_offset + size) {
    ::core::panicking::panic("assertion failed: padding_cleared_until == start_offset + size")
};assert!(padding_cleared_until == start_offset + size);
1180        interp_ok(())
1181    }
1182
1183    /// Computes the data range of this union type:
1184    /// which bytes are inside a field (i.e., not padding.)
1185    fn union_data_range<'e>(
1186        ecx: &'e mut InterpCx<'tcx, M>,
1187        layout: TyAndLayout<'tcx>,
1188    ) -> Cow<'e, RangeSet> {
1189        if !layout.ty.is_union() {
    ::core::panicking::panic("assertion failed: layout.ty.is_union()")
};assert!(layout.ty.is_union());
1190        if !layout.is_sized() {
    {
        ::core::panicking::panic_fmt(format_args!("there are no unsized unions"));
    }
};assert!(layout.is_sized(), "there are no unsized unions");
1191        let layout_cx = LayoutCx::new(*ecx.tcx, ecx.typing_env);
1192        return M::cached_union_data_range(ecx, layout.ty, || {
1193            let mut out = RangeSet(Vec::new());
1194            union_data_range_uncached(&layout_cx, layout, Size::ZERO, &mut out);
1195            out
1196        });
1197
1198        /// Helper for recursive traversal: add data ranges of the given type to `out`.
1199        fn union_data_range_uncached<'tcx>(
1200            cx: &LayoutCx<'tcx>,
1201            layout: TyAndLayout<'tcx>,
1202            base_offset: Size,
1203            out: &mut RangeSet,
1204        ) {
1205            // If this is a ZST, we don't contain any data. In particular, this helps us to quickly
1206            // skip over huge arrays of ZST.
1207            if layout.is_zst() {
1208                return;
1209            }
1210            // Just recursively add all the fields of everything to the output.
1211            match &layout.fields {
1212                FieldsShape::Primitive => {
1213                    out.add_range(base_offset, layout.size);
1214                }
1215                &FieldsShape::Union(fields) => {
1216                    // Currently, all fields start at offset 0 (relative to `base_offset`).
1217                    for field in 0..fields.get() {
1218                        let field = layout.field(cx, field);
1219                        union_data_range_uncached(cx, field, base_offset, out);
1220                    }
1221                }
1222                &FieldsShape::Array { stride, count } => {
1223                    let elem = layout.field(cx, 0);
1224
1225                    // Fast-path for large arrays of simple types that do not contain any padding.
1226                    if elem.backend_repr.is_scalar() {
1227                        out.add_range(base_offset, elem.size * count);
1228                    } else {
1229                        for idx in 0..count {
1230                            // This repeats the same computation for every array element... but the alternative
1231                            // is to allocate temporary storage for a dedicated `out` set for the array element,
1232                            // and replicating that N times. Is that better?
1233                            union_data_range_uncached(cx, elem, base_offset + idx * stride, out);
1234                        }
1235                    }
1236                }
1237                FieldsShape::Arbitrary { offsets, .. } => {
1238                    for (field, &offset) in offsets.iter_enumerated() {
1239                        let field = layout.field(cx, field.as_usize());
1240                        union_data_range_uncached(cx, field, base_offset + offset, out);
1241                    }
1242                }
1243            }
1244            // Don't forget potential other variants.
1245            match &layout.variants {
1246                Variants::Single { .. } | Variants::Empty => {
1247                    // Fully handled above.
1248                }
1249                Variants::Multiple { variants, .. } => {
1250                    for variant in variants.indices() {
1251                        let variant = layout.for_variant(cx, variant);
1252                        union_data_range_uncached(cx, variant, base_offset, out);
1253                    }
1254                }
1255            }
1256        }
1257    }
1258}
1259
1260impl<'rt, 'tcx, M: Machine<'tcx>> ValueVisitor<'tcx, M> for ValidityVisitor<'rt, 'tcx, M> {
1261    type V = PlaceTy<'tcx, M::Provenance>;
1262
1263    #[inline(always)]
1264    fn ecx(&self) -> &InterpCx<'tcx, M> {
1265        self.ecx
1266    }
1267
1268    fn read_discriminant(
1269        &mut self,
1270        val: &PlaceTy<'tcx, M::Provenance>,
1271    ) -> InterpResult<'tcx, VariantIdx> {
1272        self.with_elem(PathElem::EnumTag, move |this| {
1273            interp_ok({
    this.ecx.read_discriminant(val).map_err_kind(|e|
                {
                    match e {
                        Ub(InvalidTag(val)) => {
                            {
                                let where_ = &this.path;
                                let path =
                                    if !where_.projs.is_empty() {
                                        let mut path = String::new();
                                        write_path(&mut path, &where_.projs);
                                        Some(path)
                                    } else { None };
                                #[allow(unused)]
                                use ValidationErrorKind::*;
                                let msg =
                                    ValidationErrorKind::from(::alloc::__export::must_use({
                                                ::alloc::fmt::format(format_args!("encountered {0:x}, but expected a valid enum tag",
                                                        val))
                                            }));
                                ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
                                        orig_ty: where_.orig_ty,
                                        path,
                                        ptr_bytes_warning: msg.ptr_bytes_warning(),
                                        msg: msg.to_string(),
                                    })
                            }
                        }
                        Ub(UninhabitedEnumVariantRead(_)) => {
                            {
                                let where_ = &this.path;
                                let path =
                                    if !where_.projs.is_empty() {
                                        let mut path = String::new();
                                        write_path(&mut path, &where_.projs);
                                        Some(path)
                                    } else { None };
                                #[allow(unused)]
                                use ValidationErrorKind::*;
                                let msg =
                                    ValidationErrorKind::from(::alloc::__export::must_use({
                                                ::alloc::fmt::format(format_args!("encountered an uninhabited enum variant"))
                                            }));
                                ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
                                        orig_ty: where_.orig_ty,
                                        path,
                                        ptr_bytes_warning: msg.ptr_bytes_warning(),
                                        msg: msg.to_string(),
                                    })
                            }
                        }
                        e => e,
                    }
                })?
}try_validation!(
1274                this.ecx.read_discriminant(val),
1275                this.path,
1276                Ub(InvalidTag(val)) =>
1277                    format!("encountered {val:x}, but expected a valid enum tag"),
1278                Ub(UninhabitedEnumVariantRead(_)) =>
1279                    format!("encountered an uninhabited enum variant"),
1280                // Uninit / bad provenance are not possible since the field was already previously
1281                // checked at its integer type.
1282            ))
1283        })
1284    }
1285
1286    #[inline]
1287    fn visit_field(
1288        &mut self,
1289        old_val: &PlaceTy<'tcx, M::Provenance>,
1290        field: usize,
1291        new_val: &PlaceTy<'tcx, M::Provenance>,
1292    ) -> InterpResult<'tcx> {
1293        let elem = self.aggregate_field_path_elem(old_val.layout, field, new_val.layout.ty);
1294        self.with_elem(elem, move |this| this.visit_value(new_val))
1295    }
1296
1297    #[inline]
1298    fn visit_variant(
1299        &mut self,
1300        old_val: &PlaceTy<'tcx, M::Provenance>,
1301        variant_id: VariantIdx,
1302        new_val: &PlaceTy<'tcx, M::Provenance>,
1303    ) -> InterpResult<'tcx> {
1304        let name = match old_val.layout.ty.kind() {
1305            ty::Adt(adt, _) => PathElem::Variant(adt.variant(variant_id).name),
1306            // Coroutines also have variants
1307            ty::Coroutine(..) => PathElem::CoroutineState(variant_id),
1308            _ => ::rustc_middle::util::bug::bug_fmt(format_args!("Unexpected type with variant: {0:?}",
        old_val.layout.ty))bug!("Unexpected type with variant: {:?}", old_val.layout.ty),
1309        };
1310        self.with_elem(name, move |this| this.visit_value(new_val))
1311    }
1312
1313    #[inline(always)]
1314    fn visit_union(
1315        &mut self,
1316        val: &PlaceTy<'tcx, M::Provenance>,
1317        _fields: NonZero<usize>,
1318    ) -> InterpResult<'tcx> {
1319        // Special check for CTFE validation, preventing `UnsafeCell` inside unions in immutable memory.
1320        if self.ctfe_mode.is_some_and(|c| !c.allow_immutable_unsafe_cell()) {
1321            // Unsized unions are currently not a thing, but let's keep this code consistent with
1322            // the check in `visit_value`.
1323            let zst = self.ecx.size_and_align_of_val(val)?.is_some_and(|(s, _a)| s.bytes() == 0);
1324            if !zst && !val.layout.ty.is_freeze(*self.ecx.tcx, self.ecx.typing_env) {
1325                if !self.in_mutable_memory(val) {
1326                    do yeet {
        let where_ = &self.path;
        let path =
            if !where_.projs.is_empty() {
                let mut path = String::new();
                write_path(&mut path, &where_.projs);
                Some(path)
            } else { None };
        #[allow(unused)]
        use ValidationErrorKind::*;
        let msg =
            ValidationErrorKind::from(::alloc::__export::must_use({
                        ::alloc::fmt::format(format_args!("encountered `UnsafeCell` in read-only memory"))
                    }));
        ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
                orig_ty: where_.orig_ty,
                path,
                ptr_bytes_warning: msg.ptr_bytes_warning(),
                msg: msg.to_string(),
            })
    };throw_validation_failure!(
1327                        self.path,
1328                        format!("encountered `UnsafeCell` in read-only memory")
1329                    );
1330                }
1331            }
1332        }
1333        if self.reset_provenance_and_padding
1334            && let Some(data_bytes) = self.data_bytes.as_mut()
1335        {
1336            let base_offset = Self::data_range_offset(self.ecx, val);
1337            // Determine and add data range for this union.
1338            let union_data_range = Self::union_data_range(self.ecx, val.layout);
1339            for &(offset, size) in union_data_range.0.iter() {
1340                data_bytes.add_range(base_offset + offset, size);
1341            }
1342        }
1343        interp_ok(())
1344    }
1345
1346    #[inline]
1347    fn visit_box(
1348        &mut self,
1349        box_ty: Ty<'tcx>,
1350        val: &PlaceTy<'tcx, M::Provenance>,
1351    ) -> InterpResult<'tcx> {
1352        self.check_safe_pointer(&val, box_ty, PtrKind::Box)?;
1353        interp_ok(())
1354    }
1355
1356    #[inline]
1357    fn visit_value(&mut self, val: &PlaceTy<'tcx, M::Provenance>) -> InterpResult<'tcx> {
1358        {
    use ::tracing::__macro_support::Callsite as _;
    static __CALLSITE: ::tracing::callsite::DefaultCallsite =
        {
            static META: ::tracing::Metadata<'static> =
                {
                    ::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/validity.rs:1358",
                        "rustc_const_eval::interpret::validity",
                        ::tracing::Level::TRACE,
                        ::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/validity.rs"),
                        ::tracing_core::__macro_support::Option::Some(1358u32),
                        ::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::validity"),
                        ::tracing_core::field::FieldSet::new(&["message"],
                            ::tracing_core::callsite::Identifier(&__CALLSITE)),
                        ::tracing::metadata::Kind::EVENT)
                };
            ::tracing::callsite::DefaultCallsite::new(&META)
        };
    let enabled =
        ::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
                &&
                ::tracing::Level::TRACE <=
                    ::tracing::level_filters::LevelFilter::current() &&
            {
                let interest = __CALLSITE.interest();
                !interest.is_never() &&
                    ::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
                        interest)
            };
    if enabled {
        (|value_set: ::tracing::field::ValueSet|
                    {
                        let meta = __CALLSITE.metadata();
                        ::tracing::Event::dispatch(meta, &value_set);
                        ;
                    })({
                #[allow(unused_imports)]
                use ::tracing::field::{debug, display, Value};
                let mut iter = __CALLSITE.metadata().fields().iter();
                __CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
                                    ::tracing::__macro_support::Option::Some(&format_args!("visit_value: {0:?}, {1:?}",
                                                    *val, val.layout) as &dyn Value))])
            });
    } else { ; }
};trace!("visit_value: {:?}, {:?}", *val, val.layout);
1359
1360        // Check primitive types -- the leaves of our recursive descent.
1361        // This is called even for enum discriminants (which are "fields" of their enum),
1362        // so for integer-typed discriminants the provenance reset will happen here.
1363        // We assume that the Scalar validity range does not restrict these values
1364        // any further than `try_visit_primitive` does!
1365        if self.try_visit_primitive(val)? {
1366            return interp_ok(());
1367        }
1368
1369        // Special check preventing `UnsafeCell` in the inner part of constants
1370        if self.ctfe_mode.is_some_and(|c| !c.allow_immutable_unsafe_cell()) {
1371            // Exclude ZST values. We need to compute the dynamic size/align to properly
1372            // handle slices and trait objects.
1373            let zst = self.ecx.size_and_align_of_val(val)?.is_some_and(|(s, _a)| s.bytes() == 0);
1374            if !zst
1375                && let Some(def) = val.layout.ty.ty_adt_def()
1376                && def.is_unsafe_cell()
1377            {
1378                if !self.in_mutable_memory(val) {
1379                    do yeet {
        let where_ = &self.path;
        let path =
            if !where_.projs.is_empty() {
                let mut path = String::new();
                write_path(&mut path, &where_.projs);
                Some(path)
            } else { None };
        #[allow(unused)]
        use ValidationErrorKind::*;
        let msg =
            ValidationErrorKind::from(::alloc::__export::must_use({
                        ::alloc::fmt::format(format_args!("encountered `UnsafeCell` in read-only memory"))
                    }));
        ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
                orig_ty: where_.orig_ty,
                path,
                ptr_bytes_warning: msg.ptr_bytes_warning(),
                msg: msg.to_string(),
            })
    };throw_validation_failure!(
1380                        self.path,
1381                        format!("encountered `UnsafeCell` in read-only memory")
1382                    );
1383                }
1384            }
1385        }
1386
1387        // Recursively walk the value at its type. Apply optimizations for some large types.
1388        match val.layout.ty.kind() {
1389            ty::Str => {
1390                let mplace = val.assert_mem_place(); // strings are unsized and hence never immediate
1391                let len = mplace.len(self.ecx)?;
1392                let expected = ExpectedKind::Str;
1393                {
    self.ecx.read_bytes_ptr_strip_provenance(mplace.ptr(),
                Size::from_bytes(len)).map_err_kind(|e|
                {
                    match e {
                        Ub(InvalidUninitBytes(..)) => {
                            {
                                let where_ = &self.path;
                                let path =
                                    if !where_.projs.is_empty() {
                                        let mut path = String::new();
                                        write_path(&mut path, &where_.projs);
                                        Some(path)
                                    } else { None };
                                #[allow(unused)]
                                use ValidationErrorKind::*;
                                let msg = ValidationErrorKind::from(Uninit { expected });
                                ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
                                        orig_ty: where_.orig_ty,
                                        path,
                                        ptr_bytes_warning: msg.ptr_bytes_warning(),
                                        msg: msg.to_string(),
                                    })
                            }
                        }
                        Unsup(ReadPointerAsInt(_)) => {
                            {
                                let where_ = &self.path;
                                let path =
                                    if !where_.projs.is_empty() {
                                        let mut path = String::new();
                                        write_path(&mut path, &where_.projs);
                                        Some(path)
                                    } else { None };
                                #[allow(unused)]
                                use ValidationErrorKind::*;
                                let msg =
                                    ValidationErrorKind::from(PointerAsInt { expected });
                                ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
                                        orig_ty: where_.orig_ty,
                                        path,
                                        ptr_bytes_warning: msg.ptr_bytes_warning(),
                                        msg: msg.to_string(),
                                    })
                            }
                        }
                        e => e,
                    }
                })?
};try_validation!(
1394                    self.ecx.read_bytes_ptr_strip_provenance(mplace.ptr(), Size::from_bytes(len)),
1395                    self.path,
1396                    Ub(InvalidUninitBytes(..)) =>
1397                        Uninit { expected },
1398                    Unsup(ReadPointerAsInt(_)) =>
1399                        PointerAsInt { expected },
1400                );
1401            }
1402            ty::Array(tys, ..) | ty::Slice(tys)
1403                // This optimization applies for types that can hold arbitrary non-provenance bytes (such as
1404                // integer and floating point types).
1405                // FIXME(wesleywiser) This logic could be extended further to arbitrary structs or
1406                // tuples made up of integer/floating point types or inhabited ZSTs with no padding.
1407                if #[allow(non_exhaustive_omitted_patterns)] match tys.kind() {
    ty::Int(..) | ty::Uint(..) | ty::Float(..) => true,
    _ => false,
}matches!(tys.kind(), ty::Int(..) | ty::Uint(..) | ty::Float(..))
1408                =>
1409            {
1410                let expected = if tys.is_integral() { ExpectedKind::Int } else { ExpectedKind::Float };
1411                // Optimized handling for arrays of integer/float type.
1412
1413                // This is the length of the array/slice.
1414                let len = val.len(self.ecx)?;
1415                // This is the element type size.
1416                let layout = self.ecx.layout_of(*tys)?;
1417                // This is the size in bytes of the whole array. (This checks for overflow.)
1418                let size = layout.size * len;
1419                // If the size is 0, there is nothing to check.
1420                // (`size` can only be 0 if `len` is 0, and empty arrays are always valid.)
1421                if size == Size::ZERO {
1422                    return interp_ok(());
1423                }
1424                // Now that we definitely have a non-ZST array, we know it lives in memory -- except it may
1425                // be an uninitialized local variable, those are also "immediate".
1426                let mplace = match val.to_op(self.ecx)?.as_mplace_or_imm() {
1427                    Left(mplace) => mplace,
1428                    Right(imm) => match *imm {
1429                        Immediate::Uninit =>
1430                            do yeet {
        let where_ = &self.path;
        let path =
            if !where_.projs.is_empty() {
                let mut path = String::new();
                write_path(&mut path, &where_.projs);
                Some(path)
            } else { None };
        #[allow(unused)]
        use ValidationErrorKind::*;
        let msg = ValidationErrorKind::from(Uninit { expected });
        ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
                orig_ty: where_.orig_ty,
                path,
                ptr_bytes_warning: msg.ptr_bytes_warning(),
                msg: msg.to_string(),
            })
    }throw_validation_failure!(
1431                                self.path,
1432                                Uninit { expected }
1433                            ),
1434                        Immediate::Scalar(..) | Immediate::ScalarPair(..) =>
1435                            ::rustc_middle::util::bug::bug_fmt(format_args!("arrays/slices can never have Scalar/ScalarPair layout"))bug!("arrays/slices can never have Scalar/ScalarPair layout"),
1436                    }
1437                };
1438
1439                // Optimization: we just check the entire range at once.
1440                // NOTE: Keep this in sync with the handling of integer and float
1441                // types above, in `visit_primitive`.
1442                // No need for an alignment check here, this is not an actual memory access.
1443                let alloc = self.ecx.get_ptr_alloc(mplace.ptr(), size)?.expect("we already excluded size 0");
1444
1445                alloc.get_bytes_strip_provenance().map_err_kind(|kind| {
1446                    // Some error happened, try to provide a more detailed description.
1447                    // For some errors we might be able to provide extra information.
1448                    // (This custom logic does not fit the `try_validation!` macro.)
1449                    match kind {
1450                        Ub(InvalidUninitBytes(Some((_alloc_id, access)))) | Unsup(ReadPointerAsInt(Some((_alloc_id, access)))) => {
1451                            // Some byte was uninitialized, determine which
1452                            // element that byte belongs to so we can
1453                            // provide an index.
1454                            let i = usize::try_from(
1455                                access.bad.start.bytes() / layout.size.bytes(),
1456                            )
1457                            .unwrap();
1458                            self.path.projs.push(PathElem::ArrayElem(i));
1459
1460                            if #[allow(non_exhaustive_omitted_patterns)] match kind {
    Ub(InvalidUninitBytes(_)) => true,
    _ => false,
}matches!(kind, Ub(InvalidUninitBytes(_))) {
1461                                {
    let where_ = &self.path;
    let path =
        if !where_.projs.is_empty() {
            let mut path = String::new();
            write_path(&mut path, &where_.projs);
            Some(path)
        } else { None };
    #[allow(unused)]
    use ValidationErrorKind::*;
    let msg = ValidationErrorKind::from(Uninit { expected });
    ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
            orig_ty: where_.orig_ty,
            path,
            ptr_bytes_warning: msg.ptr_bytes_warning(),
            msg: msg.to_string(),
        })
}err_validation_failure!(self.path, Uninit { expected })
1462                            } else {
1463                                {
    let where_ = &self.path;
    let path =
        if !where_.projs.is_empty() {
            let mut path = String::new();
            write_path(&mut path, &where_.projs);
            Some(path)
        } else { None };
    #[allow(unused)]
    use ValidationErrorKind::*;
    let msg = ValidationErrorKind::from(PointerAsInt { expected });
    ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
            orig_ty: where_.orig_ty,
            path,
            ptr_bytes_warning: msg.ptr_bytes_warning(),
            msg: msg.to_string(),
        })
}err_validation_failure!(self.path, PointerAsInt {expected})
1464                            }
1465                        }
1466
1467                        // Propagate upwards (that will also check for unexpected errors).
1468                        err => err,
1469                    }
1470                })?;
1471
1472                // Don't forget that these are all non-pointer types, and thus do not preserve
1473                // provenance.
1474                if self.reset_provenance_and_padding {
1475                    // We can't share this with above as above, we might be looking at read-only memory.
1476                    let mut alloc = self.ecx.get_ptr_alloc_mut(mplace.ptr(), size)?.expect("we already excluded size 0");
1477                    alloc.clear_provenance();
1478                    // Also, mark this as containing data, not padding.
1479                    self.add_data_range(mplace.ptr(), size);
1480                }
1481            }
1482            // Fast path for arrays and slices of ZSTs. We only need to check a single ZST element
1483            // of an array and not all of them, because there's only a single value of a specific
1484            // ZST type, so either validation fails for all elements or none.
1485            ty::Array(tys, ..) | ty::Slice(tys) if self.ecx.layout_of(*tys)?.is_zst() => {
1486                // Validate just the first element (if any).
1487                if val.len(self.ecx)? > 0 {
1488                    self.visit_field(val, 0, &self.ecx.project_index(val, 0)?)?;
1489                }
1490            }
1491            ty::Pat(base, pat) => {
1492                // First check that the base type is valid
1493                self.visit_value(&val.transmute(self.ecx.layout_of(*base)?, self.ecx)?)?;
1494                // When you extend this match, make sure to also add tests to
1495                // tests/ui/type/pattern_types/validity.rs
1496                match **pat {
1497                    // Range and non-null patterns are precisely reflected into `valid_range` and thus
1498                    // handled fully by `visit_scalar` (called below).
1499                    ty::PatternKind::Range { .. } => {},
1500                    ty::PatternKind::NotNull => {},
1501
1502                    // FIXME(pattern_types): check that the value is covered by one of the variants.
1503                    // For now, we rely on layout computation setting the scalar's `valid_range` to
1504                    // match the pattern. However, this cannot always work; the layout may
1505                    // pessimistically cover actually illegal ranges and Miri would miss that UB.
1506                    // The consolation here is that codegen also will miss that UB, so at least
1507                    // we won't see optimizations actually breaking such programs.
1508                    ty::PatternKind::Or(_patterns) => {}
1509                }
1510                // FIXME(pattern_types): handle everything based on the pattern, not on the layout.
1511                // it's ok to run scalar validation even if the pattern type is `u8 is 0..=255` and thus
1512                // allows uninit values, because that's rare and so not a perf issue.
1513                match val.layout.backend_repr {
1514                    BackendRepr::Scalar(scalar_layout) => {
1515                        if !scalar_layout.is_uninit_valid() {
1516                            // There is something to check here.
1517                            // We read directly via `ecx` since the read cannot fail -- we already read
1518                            // this field above when recursing into the field.
1519                            let scalar = self.ecx.read_scalar(val)?;
1520                            self.visit_scalar(scalar, scalar_layout)?;
1521                        }
1522                    }
1523                    BackendRepr::ScalarPair(a_layout, b_layout) => {
1524                        // We can only proceed if *both* scalars need to be initialized.
1525                        // FIXME: find a way to also check ScalarPair when one side can be uninit but
1526                        // the other must be init.
1527                        if !a_layout.is_uninit_valid() && !b_layout.is_uninit_valid() {
1528                            // We read directly via `ecx` since the read cannot fail -- we already read
1529                            // this field above when recursing into the field.
1530                            let (a, b) = self.ecx.read_immediate(val)?.to_scalar_pair();
1531                            self.visit_scalar(a, a_layout)?;
1532                            self.visit_scalar(b, b_layout)?;
1533                        }
1534                    }
1535                    BackendRepr::SimdVector { .. } | BackendRepr::SimdScalableVector { .. } => ::core::panicking::panic("internal error: entered unreachable code")unreachable!(),
1536                    BackendRepr::Memory { .. } => ::core::panicking::panic("internal error: entered unreachable code")unreachable!()
1537                }
1538            }
1539            ty::Adt(adt, _) if adt.is_maybe_dangling() => {
1540                let old_may_dangle = mem::replace(&mut self.may_dangle, true);
1541
1542                let inner = self.ecx.project_field(val, FieldIdx::ZERO)?;
1543                self.visit_value(&inner)?;
1544
1545                self.may_dangle = old_may_dangle;
1546            }
1547            _ => {
1548                // default handler
1549                {
    self.walk_value(val).map_err_kind(|e|
                {
                    match e {
                        Ub(InvalidVTableTrait { vtable_dyn_type, expected_dyn_type
                            }) => {
                            {
                                let where_ = &self.path;
                                let path =
                                    if !where_.projs.is_empty() {
                                        let mut path = String::new();
                                        write_path(&mut path, &where_.projs);
                                        Some(path)
                                    } else { None };
                                #[allow(unused)]
                                use ValidationErrorKind::*;
                                let msg =
                                    ValidationErrorKind::from(InvalidMetaWrongTrait {
                                            expected_dyn_type,
                                            vtable_dyn_type,
                                        });
                                ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
                                        orig_ty: where_.orig_ty,
                                        path,
                                        ptr_bytes_warning: msg.ptr_bytes_warning(),
                                        msg: msg.to_string(),
                                    })
                            }
                        }
                        e => e,
                    }
                })?
};try_validation!(
1550                    self.walk_value(val),
1551                    self.path,
1552                    // It's not great to catch errors here, since we can't give a very good path,
1553                    // but it's better than ICEing.
1554                    Ub(InvalidVTableTrait { vtable_dyn_type, expected_dyn_type }) =>
1555                        InvalidMetaWrongTrait { expected_dyn_type, vtable_dyn_type },
1556                );
1557            }
1558        }
1559
1560        // *After* all of this, check further information stored in the layout.
1561        // On leaf types like `!` or empty enums, this will raise the error.
1562        // This means that for types wrapping such a type, we won't ever get here, but it's
1563        // just the simplest way to check for this case.
1564        //
1565        // FIXME: We could avoid some redundant checks here. For newtypes wrapping
1566        // scalars, we do the same check on every "level" (e.g., first we check
1567        // the fields of MyNewtype, and then we check MyNewType again).
1568        if val.layout.is_uninhabited() {
1569            let ty = val.layout.ty;
1570            do yeet {
        let where_ = &self.path;
        let path =
            if !where_.projs.is_empty() {
                let mut path = String::new();
                write_path(&mut path, &where_.projs);
                Some(path)
            } else { None };
        #[allow(unused)]
        use ValidationErrorKind::*;
        let msg =
            ValidationErrorKind::from(::alloc::__export::must_use({
                        ::alloc::fmt::format(format_args!("encountered a value of uninhabited type `{0}`",
                                ty))
                    }));
        ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
                orig_ty: where_.orig_ty,
                path,
                ptr_bytes_warning: msg.ptr_bytes_warning(),
                msg: msg.to_string(),
            })
    };throw_validation_failure!(
1571                self.path,
1572                format!("encountered a value of uninhabited type `{ty}`")
1573            );
1574        }
1575        if truecfg!(debug_assertions) {
1576            // Check that we don't miss any new changes to layout computation in our checks above.
1577            match val.layout.backend_repr {
1578                BackendRepr::Scalar(scalar_layout) => {
1579                    if !scalar_layout.is_uninit_valid() {
1580                        // There is something to check here.
1581                        // We read directly via `ecx` since the read cannot fail -- we already read
1582                        // this field above when recursing into the field.
1583                        let scalar = self
1584                            .ecx
1585                            .read_scalar(val)
1586                            .expect("the above checks should have fully handled this situation");
1587                        self.visit_scalar(scalar, scalar_layout)
1588                            .expect("the above checks should have fully handled this situation");
1589                    }
1590                }
1591                BackendRepr::ScalarPair(a_layout, b_layout) => {
1592                    // We can only proceed if *both* scalars need to be initialized.
1593                    // FIXME: find a way to also check ScalarPair when one side can be uninit but
1594                    // the other must be init.
1595                    if !a_layout.is_uninit_valid() && !b_layout.is_uninit_valid() {
1596                        let (a, b) = self
1597                            .ecx
1598                            .read_immediate(val)
1599                            .expect("the above checks should have fully handled this situation")
1600                            .to_scalar_pair();
1601                        self.visit_scalar(a, a_layout)
1602                            .expect("the above checks should have fully handled this situation");
1603                        self.visit_scalar(b, b_layout)
1604                            .expect("the above checks should have fully handled this situation");
1605                    }
1606                }
1607                BackendRepr::SimdVector { .. } | BackendRepr::SimdScalableVector { .. } => {}
1608                BackendRepr::Memory { .. } => {}
1609            }
1610        }
1611
1612        interp_ok(())
1613    }
1614}
1615
1616impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
1617    /// The internal core entry point for all validation operations.
1618    fn validate_operand_internal(
1619        &mut self,
1620        val: &PlaceTy<'tcx, M::Provenance>,
1621        path: Path<'tcx>,
1622        ref_tracking: Option<&mut RefTracking<MPlaceTy<'tcx, M::Provenance>, Path<'tcx>>>,
1623        ctfe_mode: Option<CtfeValidationMode>,
1624        reset_provenance_and_padding: bool,
1625        start_in_may_dangle: bool,
1626    ) -> InterpResult<'tcx> {
1627        {
    use ::tracing::__macro_support::Callsite as _;
    static __CALLSITE: ::tracing::callsite::DefaultCallsite =
        {
            static META: ::tracing::Metadata<'static> =
                {
                    ::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/validity.rs:1627",
                        "rustc_const_eval::interpret::validity",
                        ::tracing::Level::TRACE,
                        ::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/validity.rs"),
                        ::tracing_core::__macro_support::Option::Some(1627u32),
                        ::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::validity"),
                        ::tracing_core::field::FieldSet::new(&["message"],
                            ::tracing_core::callsite::Identifier(&__CALLSITE)),
                        ::tracing::metadata::Kind::EVENT)
                };
            ::tracing::callsite::DefaultCallsite::new(&META)
        };
    let enabled =
        ::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
                &&
                ::tracing::Level::TRACE <=
                    ::tracing::level_filters::LevelFilter::current() &&
            {
                let interest = __CALLSITE.interest();
                !interest.is_never() &&
                    ::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
                        interest)
            };
    if enabled {
        (|value_set: ::tracing::field::ValueSet|
                    {
                        let meta = __CALLSITE.metadata();
                        ::tracing::Event::dispatch(meta, &value_set);
                        ;
                    })({
                #[allow(unused_imports)]
                use ::tracing::field::{debug, display, Value};
                let mut iter = __CALLSITE.metadata().fields().iter();
                __CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
                                    ::tracing::__macro_support::Option::Some(&format_args!("validate_operand_internal: {0:?}, {1:?}",
                                                    *val, val.layout.ty) as &dyn Value))])
            });
    } else { ; }
};trace!("validate_operand_internal: {:?}, {:?}", *val, val.layout.ty);
1628
1629        // Run the visitor.
1630        self.run_for_validation_mut(|ecx| {
1631            let reset_padding = reset_provenance_and_padding && {
1632                // Check if `val` is actually stored in memory. If not, padding is not even
1633                // represented and we need not reset it.
1634                ecx.place_to_op(val)?.as_mplace_or_imm().is_left()
1635            };
1636            let mut v = ValidityVisitor {
1637                path,
1638                ref_tracking,
1639                ctfe_mode,
1640                ecx,
1641                reset_provenance_and_padding,
1642                data_bytes: reset_padding.then_some(RangeSet(Vec::new())),
1643                may_dangle: start_in_may_dangle,
1644            };
1645            v.visit_value(val)?;
1646            v.reset_padding(val)?;
1647            interp_ok(())
1648        })
1649        .map_err_info(|err| {
1650            if !#[allow(non_exhaustive_omitted_patterns)] match err.kind() {
    InterpErrorKind::UndefinedBehavior(ValidationError { .. }) |
        InterpErrorKind::InvalidProgram(_) | InterpErrorKind::Unsupported(_) |
        InterpErrorKind::MachineStop(_) => true,
    _ => false,
}matches!(
1651                err.kind(),
1652                InterpErrorKind::UndefinedBehavior(ValidationError { .. })
1653                    | InterpErrorKind::InvalidProgram(_)
1654                    | InterpErrorKind::Unsupported(_)
1655                // We have to also ignore machine-specific errors since we do retagging
1656                // during validation.
1657                | InterpErrorKind::MachineStop(_)
1658            ) {
1659                ::rustc_middle::util::bug::bug_fmt(format_args!("Unexpected error during validation: {0}",
        format_interp_error(err)));bug!("Unexpected error during validation: {}", format_interp_error(err));
1660            }
1661            err
1662        })
1663    }
1664
1665    /// This function checks the data at `val` to be const-valid.
1666    /// `val` is assumed to cover valid memory if it is an indirect operand.
1667    /// It will error if the bits at the destination do not match the ones described by the layout.
1668    ///
1669    /// `ref_tracking` is used to record references that we encounter so that they
1670    /// can be checked recursively by an outside driving loop.
1671    ///
1672    /// `constant` controls whether this must satisfy the rules for constants:
1673    /// - no pointers to statics.
1674    /// - no `UnsafeCell` or non-ZST `&mut`.
1675    #[inline(always)]
1676    pub(crate) fn const_validate_operand(
1677        &mut self,
1678        val: &PlaceTy<'tcx, M::Provenance>,
1679        path: Path<'tcx>,
1680        ref_tracking: &mut RefTracking<MPlaceTy<'tcx, M::Provenance>, Path<'tcx>>,
1681        ctfe_mode: CtfeValidationMode,
1682    ) -> InterpResult<'tcx> {
1683        self.validate_operand_internal(
1684            val,
1685            path,
1686            Some(ref_tracking),
1687            Some(ctfe_mode),
1688            /*reset_provenance*/ false,
1689            /*start_in_may_dangle*/ false,
1690        )
1691    }
1692
1693    /// This function checks the data at `val` to be runtime-valid.
1694    /// `val` is assumed to cover valid memory if it is an indirect operand.
1695    /// It will error if the bits at the destination do not match the ones described by the layout.
1696    #[inline(always)]
1697    pub fn validate_operand(
1698        &mut self,
1699        val: &PlaceTy<'tcx, M::Provenance>,
1700        recursive: bool,
1701        reset_provenance_and_padding: bool,
1702    ) -> InterpResult<'tcx> {
1703        let _trace = <M as
        crate::interpret::Machine>::enter_trace_span(||
        {
            use ::tracing::__macro_support::Callsite as _;
            static __CALLSITE: ::tracing::callsite::DefaultCallsite =
                {
                    static META: ::tracing::Metadata<'static> =
                        {
                            ::tracing_core::metadata::Metadata::new("validate_operand",
                                "rustc_const_eval::interpret::validity",
                                ::tracing::Level::INFO,
                                ::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/validity.rs"),
                                ::tracing_core::__macro_support::Option::Some(1703u32),
                                ::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::validity"),
                                ::tracing_core::field::FieldSet::new(&["recursive",
                                                "reset_provenance_and_padding", "val"],
                                    ::tracing_core::callsite::Identifier(&__CALLSITE)),
                                ::tracing::metadata::Kind::SPAN)
                        };
                    ::tracing::callsite::DefaultCallsite::new(&META)
                };
            let mut interest = ::tracing::subscriber::Interest::never();
            if ::tracing::Level::INFO <=
                                ::tracing::level_filters::STATIC_MAX_LEVEL &&
                            ::tracing::Level::INFO <=
                                ::tracing::level_filters::LevelFilter::current() &&
                        { interest = __CALLSITE.interest(); !interest.is_never() }
                    &&
                    ::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
                        interest) {
                let meta = __CALLSITE.metadata();
                ::tracing::Span::new(meta,
                    &{
                            #[allow(unused_imports)]
                            use ::tracing::field::{debug, display, Value};
                            let mut iter = meta.fields().iter();
                            meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
                                                ::tracing::__macro_support::Option::Some(&recursive as
                                                        &dyn Value)),
                                            (&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
                                                ::tracing::__macro_support::Option::Some(&reset_provenance_and_padding
                                                        as &dyn Value)),
                                            (&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
                                                ::tracing::__macro_support::Option::Some(&debug(&val) as
                                                        &dyn Value))])
                        })
            } else {
                let span =
                    ::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
                {};
                span
            }
        })enter_trace_span!(
1704            M,
1705            "validate_operand",
1706            recursive,
1707            reset_provenance_and_padding,
1708            ?val,
1709        );
1710        // Note that we *could* actually be in CTFE here with `-Zextra-const-ub-checks`, but it's
1711        // still correct to not use `ctfe_mode`: that mode is for validation of the final constant
1712        // value, it rules out things like `UnsafeCell` in awkward places.
1713        if !recursive {
1714            return self.validate_operand_internal(
1715                val,
1716                Path::new(val.layout.ty),
1717                None,
1718                None,
1719                reset_provenance_and_padding,
1720                /*start_in_may_dangle*/ false,
1721            );
1722        }
1723        // Do a recursive check.
1724        let mut ref_tracking = RefTracking::empty();
1725        self.validate_operand_internal(
1726            val,
1727            Path::new(val.layout.ty),
1728            Some(&mut ref_tracking),
1729            None,
1730            reset_provenance_and_padding,
1731            /*start_in_may_dangle*/ false,
1732        )?;
1733        while let Some((mplace, path)) = ref_tracking.todo.pop() {
1734            // Things behind reference do *not* have the provenance reset. In fact
1735            // we treat the entire thing as being inside MaybeDangling, i.e., references
1736            // do not have to be dereferenceable.
1737            self.validate_operand_internal(
1738                &mplace.into(),
1739                path,
1740                None, // no further recursion
1741                None,
1742                /*reset_provenance_and_padding*/ false,
1743                /*start_in_may_dangle*/ true,
1744            )?;
1745        }
1746        interp_ok(())
1747    }
1748}