1//! Check the validity invariant of a given value, and tell the user
2//! where in the value it got violated.
3//! In const context, this goes even further and tries to approximate const safety.
4//! That's useful because it means other passes (e.g. promotion) can rely on `const`s
5//! to be const-safe.
67use std::borrow::Cow;
8use std::fmt::{self, Write};
9use std::hash::Hash;
10use std::mem;
11use std::num::NonZero;
1213use either::{Left, Right};
14use hir::def::DefKind;
15use rustc_abi::{
16BackendRepr, FieldIdx, FieldsShape, Scalaras ScalarAbi, Size, VariantIdx, Variants,
17WrappingRange,
18};
19use rustc_ast::Mutability;
20use rustc_data_structures::fx::FxHashSet;
21use rustc_hiras hir;
22use rustc_middle::bug;
23use rustc_middle::mir::interpret::{
24InterpErrorKind, InvalidMetaKind, Misalignment, Provenance, alloc_range, interp_ok,
25};
26use rustc_middle::ty::layout::{LayoutCx, TyAndLayout};
27use rustc_middle::ty::{self, Ty};
28use rustc_span::{Symbol, sym};
29use tracing::trace;
3031use super::machine::AllocMap;
32use super::{
33AllocId, CheckInAllocMsg, GlobalAlloc, ImmTy, Immediate, InterpCx, InterpResult, MPlaceTy,
34Machine, MemPlaceMeta, PlaceTy, Pointer, Projectable, Scalar, ValueVisitor, err_ub,
35format_interp_error,
36};
37use crate::enter_trace_span;
3839// for the validation errors
40#[rustfmt::skip]
41use super::InterpErrorKind::UndefinedBehavioras Ub;
42use super::InterpErrorKind::Unsupportedas Unsup;
43use super::UndefinedBehaviorInfo::*;
44use super::UnsupportedOpInfo::*;
4546macro_rules!err_validation_failure {
47 ($where:expr, $msg:expr ) => {{
48let where_ = &$where;
49let path = if !where_.projs.is_empty() {
50let mut path = String::new();
51 write_path(&mut path, &where_.projs);
52Some(path)
53 } else {
54None
55};
5657#[allow(unused)]
58use ValidationErrorKind::*;
59let msg = ValidationErrorKind::from($msg);
60err_ub!(ValidationError {
61 orig_ty: where_.orig_ty,
62 path,
63 ptr_bytes_warning: msg.ptr_bytes_warning(),
64 msg: msg.to_string(),
65 })
66 }};
67}
6869macro_rules!throw_validation_failure {
70 ($where:expr, $msg:expr ) => {
71do yeet err_validation_failure!($where, $msg)
72 };
73}
7475/// If $e throws an error matching the pattern, throw a validation failure.
76/// Other errors are passed back to the caller, unchanged -- and if they reach the root of
77/// the visitor, we make sure only validation errors and `InvalidProgram` errors are left.
78/// This lets you use the patterns as a kind of validation list, asserting which errors
79/// can possibly happen:
80///
81/// ```ignore(illustrative)
82/// let v = try_validation!(some_fn(x), some_path, {
83/// Foo | Bar | Baz => format!("some failure involving {x}"),
84/// });
85/// ```
86///
87/// The patterns must be of type `UndefinedBehaviorInfo`.
88macro_rules!try_validation {
89 ($e:expr, $where:expr,
90 $( $( $p:pat_param )|+ => $msg:expr ),+ $(,)?
91) => {{
92$e.map_err_kind(|e| {
93// We catch the error and turn it into a validation failure. We are okay with
94 // allocation here as this can only slow down builds that fail anyway.
95match e {
96 $(
97 $($p)|+ => {
98err_validation_failure!(
99$where,
100$msg
101)
102 }
103 ),+,
104 e => e,
105 }
106 })?
107}};
108}
109110#[derive(#[automatically_derived]
impl ::core::fmt::Debug for PtrKind {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match self {
PtrKind::Ref(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f, "Ref",
&__self_0),
PtrKind::Box => ::core::fmt::Formatter::write_str(f, "Box"),
}
}
}Debug, #[automatically_derived]
impl ::core::clone::Clone for PtrKind {
#[inline]
fn clone(&self) -> PtrKind {
let _: ::core::clone::AssertParamIsClone<Mutability>;
*self
}
}Clone, #[automatically_derived]
impl ::core::marker::Copy for PtrKind { }Copy, #[automatically_derived]
impl ::core::cmp::PartialEq for PtrKind {
#[inline]
fn eq(&self, other: &PtrKind) -> bool {
let __self_discr = ::core::intrinsics::discriminant_value(self);
let __arg1_discr = ::core::intrinsics::discriminant_value(other);
__self_discr == __arg1_discr &&
match (self, other) {
(PtrKind::Ref(__self_0), PtrKind::Ref(__arg1_0)) =>
__self_0 == __arg1_0,
_ => true,
}
}
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for PtrKind {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_fields_are_eq(&self) {
let _: ::core::cmp::AssertParamIsEq<Mutability>;
}
}Eq)]
111enum PtrKind {
112 Ref(Mutability),
113 Box,
114}
115116impl fmt::Displayfor PtrKind {
117fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
118let str = match self {
119 PtrKind::Ref(_) => "reference",
120 PtrKind::Box => "box",
121 };
122f.write_fmt(format_args!("{0}", str))write!(f, "{str}")123 }
124}
125126#[derive(#[automatically_derived]
impl ::core::fmt::Debug for ExpectedKind {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::write_str(f,
match self {
ExpectedKind::Reference => "Reference",
ExpectedKind::Box => "Box",
ExpectedKind::RawPtr => "RawPtr",
ExpectedKind::Bool => "Bool",
ExpectedKind::Char => "Char",
ExpectedKind::Float => "Float",
ExpectedKind::Int => "Int",
ExpectedKind::FnPtr => "FnPtr",
ExpectedKind::Str => "Str",
})
}
}Debug)]
127enum ExpectedKind {
128 Reference,
129 Box,
130 RawPtr,
131 Bool,
132 Char,
133 Float,
134 Int,
135 FnPtr,
136 Str,
137}
138139impl fmt::Displayfor ExpectedKind {
140fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
141let str = match self {
142 ExpectedKind::Reference => "expected a reference",
143 ExpectedKind::Box => "expected a box",
144 ExpectedKind::RawPtr => "expected a raw pointer",
145 ExpectedKind::Bool => "expected a boolean",
146 ExpectedKind::Char => "expected a unicode scalar value",
147 ExpectedKind::Float => "expected a floating point number",
148 ExpectedKind::Int => "expected an integer",
149 ExpectedKind::FnPtr => "expected a function pointer",
150 ExpectedKind::Str => "expected a string",
151 };
152f.write_fmt(format_args!("{0}", str))write!(f, "{str}")153 }
154}
155156impl From<PtrKind> for ExpectedKind {
157fn from(x: PtrKind) -> ExpectedKind {
158match x {
159 PtrKind::Box => ExpectedKind::Box,
160 PtrKind::Ref(_) => ExpectedKind::Reference,
161 }
162 }
163}
164165/// Validation errors that can be emitted in one than one place get a variant here so that
166/// we format them consistently. Everything else uses the `String` fallback.
167#[derive(#[automatically_derived]
impl<'tcx> ::core::fmt::Debug for ValidationErrorKind<'tcx> {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match self {
ValidationErrorKind::Uninit { expected: __self_0 } =>
::core::fmt::Formatter::debug_struct_field1_finish(f,
"Uninit", "expected", &__self_0),
ValidationErrorKind::PointerAsInt { expected: __self_0 } =>
::core::fmt::Formatter::debug_struct_field1_finish(f,
"PointerAsInt", "expected", &__self_0),
ValidationErrorKind::PartialPointer =>
::core::fmt::Formatter::write_str(f, "PartialPointer"),
ValidationErrorKind::InvalidMetaWrongTrait {
vtable_dyn_type: __self_0, expected_dyn_type: __self_1 } =>
::core::fmt::Formatter::debug_struct_field2_finish(f,
"InvalidMetaWrongTrait", "vtable_dyn_type", __self_0,
"expected_dyn_type", &__self_1),
ValidationErrorKind::GeneralError { msg: __self_0 } =>
::core::fmt::Formatter::debug_struct_field1_finish(f,
"GeneralError", "msg", &__self_0),
}
}
}Debug)]
168enum ValidationErrorKind<'tcx> {
169 Uninit {
170 expected: ExpectedKind,
171 },
172 PointerAsInt {
173 expected: ExpectedKind,
174 },
175 PartialPointer,
176 InvalidMetaWrongTrait {
177/// The vtable that was actually referenced by the wide pointer metadata.
178vtable_dyn_type: &'tcx ty::List<ty::PolyExistentialPredicate<'tcx>>,
179/// The vtable that was expected at the point in MIR that it was accessed.
180expected_dyn_type: &'tcx ty::List<ty::PolyExistentialPredicate<'tcx>>,
181 },
182 GeneralError {
183 msg: String,
184 },
185}
186187impl<'tcx> ValidationErrorKind<'tcx> {
188// We don't do this via `fmt::Display` to so that we can do a move in the `GeneralError` case.
189fn to_string(self) -> String {
190use ValidationErrorKind::*;
191match self {
192Uninit { expected } => ::alloc::__export::must_use({
::alloc::fmt::format(format_args!("encountered uninitialized memory, but {0}",
expected))
})format!("encountered uninitialized memory, but {expected}"),
193PointerAsInt { expected } => ::alloc::__export::must_use({
::alloc::fmt::format(format_args!("encountered a pointer, but {0}",
expected))
})format!("encountered a pointer, but {expected}"),
194PartialPointer => ::alloc::__export::must_use({
::alloc::fmt::format(format_args!("encountered a partial pointer or a mix of pointers"))
})format!("encountered a partial pointer or a mix of pointers"),
195InvalidMetaWrongTrait { vtable_dyn_type, expected_dyn_type } => ::alloc::__export::must_use({
::alloc::fmt::format(format_args!("wrong trait in wide pointer vtable: expected `{0}`, but encountered `{1}`",
expected_dyn_type, vtable_dyn_type))
})format!(
196"wrong trait in wide pointer vtable: expected `{expected_dyn_type}`, but encountered `{vtable_dyn_type}`"
197),
198GeneralError { msg } => msg,
199 }
200 }
201202fn ptr_bytes_warning(&self) -> bool {
203use ValidationErrorKind::*;
204#[allow(non_exhaustive_omitted_patterns)] match self {
PointerAsInt { .. } | PartialPointer => true,
_ => false,
}matches!(self, PointerAsInt { .. } | PartialPointer)205 }
206}
207208impl<'tcx> From<String> for ValidationErrorKind<'tcx> {
209fn from(msg: String) -> Self {
210 ValidationErrorKind::GeneralError { msg }
211 }
212}
213214fn fmt_range(r: WrappingRange, max_hi: u128) -> String {
215let WrappingRange { start: lo, end: hi } = r;
216if !(hi <= max_hi) {
::core::panicking::panic("assertion failed: hi <= max_hi")
};assert!(hi <= max_hi);
217if lo > hi {
218::alloc::__export::must_use({
::alloc::fmt::format(format_args!("less or equal to {0}, or greater or equal to {1}",
hi, lo))
})format!("less or equal to {hi}, or greater or equal to {lo}")219 } else if lo == hi {
220::alloc::__export::must_use({
::alloc::fmt::format(format_args!("equal to {0}", lo))
})format!("equal to {lo}")221 } else if lo == 0 {
222if !(hi < max_hi) {
{
::core::panicking::panic_fmt(format_args!("should not be printing if the range covers everything"));
}
};assert!(hi < max_hi, "should not be printing if the range covers everything");
223::alloc::__export::must_use({
::alloc::fmt::format(format_args!("less or equal to {0}", hi))
})format!("less or equal to {hi}")224 } else if hi == max_hi {
225if !(lo > 0) {
{
::core::panicking::panic_fmt(format_args!("should not be printing if the range covers everything"));
}
};assert!(lo > 0, "should not be printing if the range covers everything");
226::alloc::__export::must_use({
::alloc::fmt::format(format_args!("greater or equal to {0}", lo))
})format!("greater or equal to {lo}")227 } else {
228::alloc::__export::must_use({
::alloc::fmt::format(format_args!("in the range {0}..={1}", lo, hi))
})format!("in the range {lo}..={hi}")229 }
230}
231232/// We want to show a nice path to the invalid field for diagnostics,
233/// but avoid string operations in the happy case where no error happens.
234/// So we track a `Vec<PathElem>` where `PathElem` contains all the data we
235/// need to later print something for the user.
236#[derive(#[automatically_derived]
impl<'tcx> ::core::marker::Copy for PathElem<'tcx> { }Copy, #[automatically_derived]
impl<'tcx> ::core::clone::Clone for PathElem<'tcx> {
#[inline]
fn clone(&self) -> PathElem<'tcx> {
let _: ::core::clone::AssertParamIsClone<Symbol>;
let _: ::core::clone::AssertParamIsClone<VariantIdx>;
let _: ::core::clone::AssertParamIsClone<usize>;
let _: ::core::clone::AssertParamIsClone<Ty<'tcx>>;
*self
}
}Clone, #[automatically_derived]
impl<'tcx> ::core::fmt::Debug for PathElem<'tcx> {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match self {
PathElem::Field(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f, "Field",
&__self_0),
PathElem::Variant(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"Variant", &__self_0),
PathElem::CoroutineState(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"CoroutineState", &__self_0),
PathElem::CapturedVar(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"CapturedVar", &__self_0),
PathElem::ArrayElem(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"ArrayElem", &__self_0),
PathElem::TupleElem(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"TupleElem", &__self_0),
PathElem::Deref => ::core::fmt::Formatter::write_str(f, "Deref"),
PathElem::EnumTag =>
::core::fmt::Formatter::write_str(f, "EnumTag"),
PathElem::CoroutineTag =>
::core::fmt::Formatter::write_str(f, "CoroutineTag"),
PathElem::DynDowncast(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"DynDowncast", &__self_0),
PathElem::Vtable =>
::core::fmt::Formatter::write_str(f, "Vtable"),
}
}
}Debug)]
237pub enum PathElem<'tcx> {
238 Field(Symbol),
239 Variant(Symbol),
240 CoroutineState(VariantIdx),
241 CapturedVar(Symbol),
242 ArrayElem(usize),
243 TupleElem(usize),
244 Deref,
245 EnumTag,
246 CoroutineTag,
247 DynDowncast(Ty<'tcx>),
248 Vtable,
249}
250251#[derive(#[automatically_derived]
impl<'tcx> ::core::clone::Clone for Path<'tcx> {
#[inline]
fn clone(&self) -> Path<'tcx> {
Path {
orig_ty: ::core::clone::Clone::clone(&self.orig_ty),
projs: ::core::clone::Clone::clone(&self.projs),
}
}
}Clone, #[automatically_derived]
impl<'tcx> ::core::fmt::Debug for Path<'tcx> {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field2_finish(f, "Path",
"orig_ty", &self.orig_ty, "projs", &&self.projs)
}
}Debug)]
252pub struct Path<'tcx> {
253 orig_ty: Ty<'tcx>,
254 projs: Vec<PathElem<'tcx>>,
255}
256257impl<'tcx> Path<'tcx> {
258fn new(ty: Ty<'tcx>) -> Self {
259Self { orig_ty: ty, projs: ::alloc::vec::Vec::new()vec![] }
260 }
261}
262263/// Extra things to check for during validation of CTFE results.
264#[derive(#[automatically_derived]
impl ::core::marker::Copy for CtfeValidationMode { }Copy, #[automatically_derived]
impl ::core::clone::Clone for CtfeValidationMode {
#[inline]
fn clone(&self) -> CtfeValidationMode {
let _: ::core::clone::AssertParamIsClone<Mutability>;
let _: ::core::clone::AssertParamIsClone<bool>;
*self
}
}Clone)]
265pub enum CtfeValidationMode {
266/// Validation of a `static`
267Static { mutbl: Mutability },
268/// Validation of a promoted.
269Promoted,
270/// Validation of a `const`.
271 /// `allow_immutable_unsafe_cell` says whether we allow `UnsafeCell` in immutable memory (which is the
272 /// case for the top-level allocation of a `const`, where this is fine because the allocation will be
273 /// copied at each use site).
274Const { allow_immutable_unsafe_cell: bool },
275}
276277impl CtfeValidationMode {
278fn allow_immutable_unsafe_cell(self) -> bool {
279match self {
280 CtfeValidationMode::Static { .. } => false,
281 CtfeValidationMode::Promoted { .. } => false,
282 CtfeValidationMode::Const { allow_immutable_unsafe_cell, .. } => {
283allow_immutable_unsafe_cell284 }
285 }
286 }
287}
288289/// State for tracking recursive validation of references
290pub struct RefTracking<T, PATH = ()> {
291 seen: FxHashSet<T>,
292 todo: Vec<(T, PATH)>,
293}
294295impl<T: Clone + Eq + Hash + std::fmt::Debug, PATH> RefTracking<T, PATH> {
296pub fn empty() -> Self {
297RefTracking { seen: FxHashSet::default(), todo: ::alloc::vec::Vec::new()vec![] }
298 }
299pub fn next(&mut self) -> Option<(T, PATH)> {
300self.todo.pop()
301 }
302303fn track(&mut self, val: T, path: impl FnOnce() -> PATH) {
304if self.seen.insert(val.clone()) {
305{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/validity.rs:305",
"rustc_const_eval::interpret::validity",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/validity.rs"),
::tracing_core::__macro_support::Option::Some(305u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::validity"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("Recursing below ptr {0:#?}",
val) as &dyn Value))])
});
} else { ; }
};trace!("Recursing below ptr {:#?}", val);
306let path = path();
307// Remember to come back to this later.
308self.todo.push((val, path));
309 }
310 }
311}
312313impl<'tcx, T: Clone + Eq + Hash + std::fmt::Debug> RefTracking<T, Path<'tcx>> {
314pub fn new(val: T, ty: Ty<'tcx>) -> Self {
315let mut ref_tracking_for_consts =
316RefTracking { seen: FxHashSet::default(), todo: ::alloc::boxed::box_assume_init_into_vec_unsafe(::alloc::intrinsics::write_box_via_move(::alloc::boxed::Box::new_uninit(),
[(val.clone(), Path::new(ty))]))vec![(val.clone(), Path::new(ty))] };
317ref_tracking_for_consts.seen.insert(val);
318ref_tracking_for_consts319 }
320}
321322/// Format a path
323fn write_path(out: &mut String, path: &[PathElem<'_>]) {
324use self::PathElem::*;
325326for elem in path.iter() {
327match elem {
328 Field(name) => out.write_fmt(format_args!(".{0}", name))write!(out, ".{name}"),
329 EnumTag => out.write_fmt(format_args!(".<enum-tag>"))write!(out, ".<enum-tag>"),
330 Variant(name) => out.write_fmt(format_args!(".<enum-variant({0})>", name))write!(out, ".<enum-variant({name})>"),
331 CoroutineTag => out.write_fmt(format_args!(".<coroutine-tag>"))write!(out, ".<coroutine-tag>"),
332 CoroutineState(idx) => out.write_fmt(format_args!(".<coroutine-state({0})>", idx.index()))write!(out, ".<coroutine-state({})>", idx.index()),
333 CapturedVar(name) => out.write_fmt(format_args!(".<captured-var({0})>", name))write!(out, ".<captured-var({name})>"),
334 TupleElem(idx) => out.write_fmt(format_args!(".{0}", idx))write!(out, ".{idx}"),
335 ArrayElem(idx) => out.write_fmt(format_args!("[{0}]", idx))write!(out, "[{idx}]"),
336// `.<deref>` does not match Rust syntax, but it is more readable for long paths -- and
337 // some of the other items here also are not Rust syntax. Actually we can't
338 // even use the usual syntax because we are just showing the projections,
339 // not the root.
340 Deref => out.write_fmt(format_args!(".<deref>"))write!(out, ".<deref>"),
341 DynDowncast(ty) => out.write_fmt(format_args!(".<dyn-downcast({0})>", ty))write!(out, ".<dyn-downcast({ty})>"),
342 Vtable => out.write_fmt(format_args!(".<vtable>"))write!(out, ".<vtable>"),
343 }
344 .unwrap()
345 }
346}
347348/// Represents a set of `Size` values as a sorted list of ranges.
349// These are (offset, length) pairs, and they are sorted and mutually disjoint,
350// and never adjacent (i.e. there's always a gap between two of them).
351#[derive(#[automatically_derived]
impl ::core::fmt::Debug for RangeSet {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_tuple_field1_finish(f, "RangeSet",
&&self.0)
}
}Debug, #[automatically_derived]
impl ::core::clone::Clone for RangeSet {
#[inline]
fn clone(&self) -> RangeSet {
RangeSet(::core::clone::Clone::clone(&self.0))
}
}Clone)]
352pub struct RangeSet(Vec<(Size, Size)>);
353354impl RangeSet {
355fn add_range(&mut self, offset: Size, size: Size) {
356if size.bytes() == 0 {
357// No need to track empty ranges.
358return;
359 }
360let v = &mut self.0;
361// We scan for a partition point where the left partition is all the elements that end
362 // strictly before we start. Those are elements that are too "low" to merge with us.
363let idx =
364v.partition_point(|&(other_offset, other_size)| other_offset + other_size < offset);
365// Now we want to either merge with the first element of the second partition, or insert ourselves before that.
366if let Some(&(other_offset, other_size)) = v.get(idx)
367 && offset + size >= other_offset368 {
369// Their end is >= our start (otherwise it would not be in the 2nd partition) and
370 // our end is >= their start. This means we can merge the ranges.
371let new_start = other_offset.min(offset);
372let mut new_end = (other_offset + other_size).max(offset + size);
373// We grew to the right, so merge with overlapping/adjacent elements.
374 // (We also may have grown to the left, but that can never make us adjacent with
375 // anything there since we selected the first such candidate via `partition_point`.)
376let mut scan_right = 1;
377while let Some(&(next_offset, next_size)) = v.get(idx + scan_right)
378 && new_end >= next_offset
379 {
380// Increase our size to absorb the next element.
381new_end = new_end.max(next_offset + next_size);
382// Look at the next element.
383scan_right += 1;
384 }
385// Update the element we grew.
386v[idx] = (new_start, new_end - new_start);
387// Remove the elements we absorbed (if any).
388if scan_right > 1 {
389drop(v.drain((idx + 1)..(idx + scan_right)));
390 }
391 } else {
392// Insert new element.
393v.insert(idx, (offset, size));
394 }
395 }
396}
397398struct ValidityVisitor<'rt, 'tcx, M: Machine<'tcx>> {
399/// The `path` may be pushed to, but the part that is present when a function
400 /// starts must not be changed! `with_elem` relies on this stack discipline.
401path: Path<'tcx>,
402 ref_tracking: Option<&'rt mut RefTracking<MPlaceTy<'tcx, M::Provenance>, Path<'tcx>>>,
403/// `None` indicates this is not validating for CTFE (but for runtime).
404ctfe_mode: Option<CtfeValidationMode>,
405 ecx: &'rt mut InterpCx<'tcx, M>,
406/// Whether provenance should be reset outside of pointers (emulating the effect of a typed
407 /// copy).
408reset_provenance_and_padding: bool,
409/// This tracks which byte ranges in this value contain data; the remaining bytes are padding.
410 /// The ideal representation here would be pointer-length pairs, but to keep things more compact
411 /// we only store a (range) set of offsets -- the base pointer is the same throughout the entire
412 /// visit, after all.
413 /// If this is `Some`, then `reset_provenance_and_padding` must be true (but not vice versa:
414 /// we might not track data vs padding bytes if the operand isn't stored in memory anyway).
415data_bytes: Option<RangeSet>,
416/// True if we are inside of `MaybeDangling`. This disables pointer access checks.
417may_dangle: bool,
418}
419420impl<'rt, 'tcx, M: Machine<'tcx>> ValidityVisitor<'rt, 'tcx, M> {
421fn aggregate_field_path_elem(
422&mut self,
423 layout: TyAndLayout<'tcx>,
424 field: usize,
425 field_ty: Ty<'tcx>,
426 ) -> PathElem<'tcx> {
427// First, check if we are projecting to a variant.
428match layout.variants {
429 Variants::Multiple { tag_field, .. } => {
430if tag_field.as_usize() == field {
431return match layout.ty.kind() {
432 ty::Adt(def, ..) if def.is_enum() => PathElem::EnumTag,
433 ty::Coroutine(..) => PathElem::CoroutineTag,
434_ => ::rustc_middle::util::bug::bug_fmt(format_args!("non-variant type {0:?}",
layout.ty))bug!("non-variant type {:?}", layout.ty),
435 };
436 }
437 }
438 Variants::Single { .. } | Variants::Empty => {}
439 }
440441// Now we know we are projecting to a field, so figure out which one.
442match layout.ty.kind() {
443// coroutines, closures, and coroutine-closures all have upvars that may be named.
444ty::Closure(def_id, _) | ty::Coroutine(def_id, _) | ty::CoroutineClosure(def_id, _) => {
445let mut name = None;
446// FIXME this should be more descriptive i.e. CapturePlace instead of CapturedVar
447 // https://github.com/rust-lang/project-rfc-2229/issues/46
448if let Some(local_def_id) = def_id.as_local() {
449let captures = self.ecx.tcx.closure_captures(local_def_id);
450if let Some(captured_place) = captures.get(field) {
451// Sometimes the index is beyond the number of upvars (seen
452 // for a coroutine).
453let var_hir_id = captured_place.get_root_variable();
454let node = self.ecx.tcx.hir_node(var_hir_id);
455if let hir::Node::Pat(pat) = node456 && let hir::PatKind::Binding(_, _, ident, _) = pat.kind
457 {
458name = Some(ident.name);
459 }
460 }
461 }
462463 PathElem::CapturedVar(name.unwrap_or_else(|| {
464// Fall back to showing the field index.
465sym::integer(field)
466 }))
467 }
468469// tuples
470ty::Tuple(_) => PathElem::TupleElem(field),
471472// enums
473ty::Adt(def, ..) if def.is_enum() => {
474// we might be projecting *to* a variant, or to a field *in* a variant.
475match layout.variants {
476 Variants::Single { index } => {
477// Inside a variant
478PathElem::Field(def.variant(index).fields[FieldIdx::from_usize(field)].name)
479 }
480 Variants::Empty => {
::core::panicking::panic_fmt(format_args!("there is no field in Variants::Empty types"));
}panic!("there is no field in Variants::Empty types"),
481 Variants::Multiple { .. } => ::rustc_middle::util::bug::bug_fmt(format_args!("we handled variants above"))bug!("we handled variants above"),
482 }
483 }
484485// other ADTs
486ty::Adt(def, _) => {
487 PathElem::Field(def.non_enum_variant().fields[FieldIdx::from_usize(field)].name)
488 }
489490// arrays/slices
491ty::Array(..) | ty::Slice(..) => PathElem::ArrayElem(field),
492493// dyn traits
494ty::Dynamic(..) => {
495match (&field, &0) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(field, 0);
496 PathElem::DynDowncast(field_ty)
497 }
498499// nothing else has an aggregate layout
500_ => ::rustc_middle::util::bug::bug_fmt(format_args!("aggregate_field_path_elem: got non-aggregate type {0:?}",
layout.ty))bug!("aggregate_field_path_elem: got non-aggregate type {:?}", layout.ty),
501 }
502 }
503504fn with_elem<R>(
505&mut self,
506 elem: PathElem<'tcx>,
507 f: impl FnOnce(&mut Self) -> InterpResult<'tcx, R>,
508 ) -> InterpResult<'tcx, R> {
509// Remember the old state
510let path_len = self.path.projs.len();
511// Record new element
512self.path.projs.push(elem);
513// Perform operation
514let r = f(self)?;
515// Undo changes
516self.path.projs.truncate(path_len);
517// Done
518interp_ok(r)
519 }
520521fn read_immediate(
522&self,
523 val: &PlaceTy<'tcx, M::Provenance>,
524 expected: ExpectedKind,
525 ) -> InterpResult<'tcx, ImmTy<'tcx, M::Provenance>> {
526interp_ok({
self.ecx.read_immediate(val).map_err_kind(|e|
{
match e {
Ub(InvalidUninitBytes(_)) => {
{
let where_ = &self.path;
let path =
if !where_.projs.is_empty() {
let mut path = String::new();
write_path(&mut path, &where_.projs);
Some(path)
} else { None };
#[allow(unused)]
use ValidationErrorKind::*;
let msg = ValidationErrorKind::from(Uninit { expected });
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
orig_ty: where_.orig_ty,
path,
ptr_bytes_warning: msg.ptr_bytes_warning(),
msg: msg.to_string(),
})
}
}
Unsup(ReadPointerAsInt(_)) => {
{
let where_ = &self.path;
let path =
if !where_.projs.is_empty() {
let mut path = String::new();
write_path(&mut path, &where_.projs);
Some(path)
} else { None };
#[allow(unused)]
use ValidationErrorKind::*;
let msg =
ValidationErrorKind::from(PointerAsInt { expected });
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
orig_ty: where_.orig_ty,
path,
ptr_bytes_warning: msg.ptr_bytes_warning(),
msg: msg.to_string(),
})
}
}
Unsup(ReadPartialPointer(_)) => {
{
let where_ = &self.path;
let path =
if !where_.projs.is_empty() {
let mut path = String::new();
write_path(&mut path, &where_.projs);
Some(path)
} else { None };
#[allow(unused)]
use ValidationErrorKind::*;
let msg = ValidationErrorKind::from(PartialPointer);
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
orig_ty: where_.orig_ty,
path,
ptr_bytes_warning: msg.ptr_bytes_warning(),
msg: msg.to_string(),
})
}
}
e => e,
}
})?
}try_validation!(
527self.ecx.read_immediate(val),
528self.path,
529 Ub(InvalidUninitBytes(_)) =>
530 Uninit { expected },
531// The `Unsup` cases can only occur during CTFE
532Unsup(ReadPointerAsInt(_)) =>
533 PointerAsInt { expected },
534 Unsup(ReadPartialPointer(_)) =>
535 PartialPointer,
536 ))
537 }
538539fn read_scalar(
540&self,
541 val: &PlaceTy<'tcx, M::Provenance>,
542 expected: ExpectedKind,
543 ) -> InterpResult<'tcx, Scalar<M::Provenance>> {
544interp_ok(self.read_immediate(val, expected)?.to_scalar())
545 }
546547/// Given a place and a pointer loaded from that place, ensure that the place does
548 /// not store any more provenance than the pointer does. IOW, if any provenance
549 /// was discarded when loading the pointer, it will also get discarded in-memory.
550fn reset_pointer_provenance(
551&mut self,
552 place: &PlaceTy<'tcx, M::Provenance>,
553 ptr: &ImmTy<'tcx, M::Provenance>,
554 ) -> InterpResult<'tcx> {
555if #[allow(non_exhaustive_omitted_patterns)] match ptr.layout.backend_repr {
BackendRepr::Scalar(..) => true,
_ => false,
}matches!(ptr.layout.backend_repr, BackendRepr::Scalar(..)) {
556// A thin pointer. If it has provenance, we don't have to do anything.
557 // If it does not, ensure we clear the provenance in memory.
558if !#[allow(non_exhaustive_omitted_patterns)] match ptr.to_scalar() {
Scalar::Ptr(..) => true,
_ => false,
}matches!(ptr.to_scalar(), Scalar::Ptr(..)) {
559// The loaded pointer has no provenance. Some bytes of its representation still
560 // might have provenance, which we have to clear.
561self.ecx.clear_provenance(place)?;
562 }
563 } else {
564// A wide pointer. This means we have to worry both about the pointer itself and the
565 // metadata. We do the lazy thing and just write back the value we got. Just
566 // clearing provenance in a targeted manner would be more efficient, but unless this
567 // is a perf hotspot it's just not worth the effort.
568self.ecx.write_immediate_no_validate(**ptr, place)?;
569 }
570interp_ok(())
571 }
572573fn check_wide_ptr_meta(
574&mut self,
575 meta: MemPlaceMeta<M::Provenance>,
576 pointee: TyAndLayout<'tcx>,
577 ) -> InterpResult<'tcx> {
578let tail = self.ecx.tcx.struct_tail_for_codegen(pointee.ty, self.ecx.typing_env);
579match tail.kind() {
580 ty::Dynamic(data, _) => {
581let vtable = meta.unwrap_meta().to_pointer(self.ecx)?;
582// Make sure it is a genuine vtable pointer for the right trait.
583{
self.ecx.get_ptr_vtable_ty(vtable,
Some(data)).map_err_kind(|e|
{
match e {
Ub(DanglingIntPointer { .. } | InvalidVTablePointer(..)) =>
{
{
let where_ = &self.path;
let path =
if !where_.projs.is_empty() {
let mut path = String::new();
write_path(&mut path, &where_.projs);
Some(path)
} else { None };
#[allow(unused)]
use ValidationErrorKind::*;
let msg =
ValidationErrorKind::from(::alloc::__export::must_use({
::alloc::fmt::format(format_args!("encountered {0}, but expected a vtable pointer",
vtable))
}));
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
orig_ty: where_.orig_ty,
path,
ptr_bytes_warning: msg.ptr_bytes_warning(),
msg: msg.to_string(),
})
}
}
Ub(InvalidVTableTrait { vtable_dyn_type, expected_dyn_type
}) => {
{
let where_ = &self.path;
let path =
if !where_.projs.is_empty() {
let mut path = String::new();
write_path(&mut path, &where_.projs);
Some(path)
} else { None };
#[allow(unused)]
use ValidationErrorKind::*;
let msg =
ValidationErrorKind::from(InvalidMetaWrongTrait {
expected_dyn_type,
vtable_dyn_type,
});
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
orig_ty: where_.orig_ty,
path,
ptr_bytes_warning: msg.ptr_bytes_warning(),
msg: msg.to_string(),
})
}
}
e => e,
}
})?
};try_validation!(
584self.ecx.get_ptr_vtable_ty(vtable, Some(data)),
585self.path,
586 Ub(DanglingIntPointer{ .. } | InvalidVTablePointer(..)) =>
587format!("encountered {vtable}, but expected a vtable pointer"),
588 Ub(InvalidVTableTrait { vtable_dyn_type, expected_dyn_type }) =>
589 InvalidMetaWrongTrait { expected_dyn_type, vtable_dyn_type },
590 );
591 }
592 ty::Slice(..) | ty::Str => {
593let _len = meta.unwrap_meta().to_target_usize(self.ecx)?;
594// We do not check that `len * elem_size <= isize::MAX`:
595 // that is only required for references, and there it falls out of the
596 // "dereferenceable" check performed by Stacked Borrows.
597}
598 ty::Foreign(..) => {
599// Unsized, but not wide.
600}
601_ => ::rustc_middle::util::bug::bug_fmt(format_args!("Unexpected unsized type tail: {0:?}",
tail))bug!("Unexpected unsized type tail: {:?}", tail),
602 }
603604interp_ok(())
605 }
606607/// Check a reference or `Box`.
608 ///
609 /// `ty` is the actual type of `value`; for a Box, `value` will be just the inner raw pointer.
610fn check_safe_pointer(
611&mut self,
612 value: &PlaceTy<'tcx, M::Provenance>,
613 ty: Ty<'tcx>,
614 ptr_kind: PtrKind,
615 ) -> InterpResult<'tcx> {
616let ptr = self.read_immediate(value, ptr_kind.into())?;
617if self.reset_provenance_and_padding {
618// There's no padding in a pointer.
619self.add_data_range_place(value);
620// Resetting provenance is done below, together with retagging, to avoid
621 // redundant writes.
622}
623let place = self.ecx.imm_ptr_to_mplace(&ptr)?;
624// Handle wide pointers.
625 // Check metadata early, for better diagnostics
626if place.layout.is_unsized() {
627self.check_wide_ptr_meta(place.meta(), place.layout)?;
628 }
629630// Determine size and alignment of pointee.
631let size_and_align = {
self.ecx.size_and_align_of_val(&place).map_err_kind(|e|
{
match e {
Ub(InvalidMeta(msg)) => {
{
let where_ = &self.path;
let path =
if !where_.projs.is_empty() {
let mut path = String::new();
write_path(&mut path, &where_.projs);
Some(path)
} else { None };
#[allow(unused)]
use ValidationErrorKind::*;
let msg =
ValidationErrorKind::from(::alloc::__export::must_use({
::alloc::fmt::format(format_args!("encountered invalid {1} metadata: {0}",
match msg {
InvalidMetaKind::SliceTooBig =>
"slice is bigger than largest supported object",
InvalidMetaKind::TooBig =>
"total size is bigger than largest supported object",
}, ptr_kind))
}));
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
orig_ty: where_.orig_ty,
path,
ptr_bytes_warning: msg.ptr_bytes_warning(),
msg: msg.to_string(),
})
}
}
e => e,
}
})?
}try_validation!(
632self.ecx.size_and_align_of_val(&place),
633self.path,
634 Ub(InvalidMeta(msg)) => format!(
635"encountered invalid {ptr_kind} metadata: {}",
636match msg {
637 InvalidMetaKind::SliceTooBig => "slice is bigger than largest supported object",
638 InvalidMetaKind::TooBig => "total size is bigger than largest supported object",
639 }
640 )
641 );
642let (size, align) = size_and_align643// for the purpose of validity, consider foreign types to have
644 // alignment and size determined by the layout (size will be 0,
645 // alignment should take attributes into account).
646.unwrap_or_else(|| (place.layout.size, place.layout.align.abi));
647648// If we're not allow to dangle, make sure this is dereferenceable and retag it for
649 // the aliasing model.
650let adjusted_ptr = if !self.may_dangle {
651{
self.ecx.check_ptr_access(place.ptr(), size,
CheckInAllocMsg::Dereferenceable).map_err_kind(|e|
{
match e {
Ub(DanglingIntPointer { addr: 0, .. }) => {
{
let where_ = &self.path;
let path =
if !where_.projs.is_empty() {
let mut path = String::new();
write_path(&mut path, &where_.projs);
Some(path)
} else { None };
#[allow(unused)]
use ValidationErrorKind::*;
let msg =
ValidationErrorKind::from(::alloc::__export::must_use({
::alloc::fmt::format(format_args!("encountered a null {0}",
ptr_kind))
}));
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
orig_ty: where_.orig_ty,
path,
ptr_bytes_warning: msg.ptr_bytes_warning(),
msg: msg.to_string(),
})
}
}
Ub(DanglingIntPointer { addr: i, .. }) => {
{
let where_ = &self.path;
let path =
if !where_.projs.is_empty() {
let mut path = String::new();
write_path(&mut path, &where_.projs);
Some(path)
} else { None };
#[allow(unused)]
use ValidationErrorKind::*;
let msg =
ValidationErrorKind::from(::alloc::__export::must_use({
::alloc::fmt::format(format_args!("encountered a dangling {1} ({0} has no provenance)",
Pointer::<Option<AllocId>>::without_provenance(i),
ptr_kind))
}));
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
orig_ty: where_.orig_ty,
path,
ptr_bytes_warning: msg.ptr_bytes_warning(),
msg: msg.to_string(),
})
}
}
Ub(PointerOutOfBounds { .. }) => {
{
let where_ = &self.path;
let path =
if !where_.projs.is_empty() {
let mut path = String::new();
write_path(&mut path, &where_.projs);
Some(path)
} else { None };
#[allow(unused)]
use ValidationErrorKind::*;
let msg =
ValidationErrorKind::from(::alloc::__export::must_use({
::alloc::fmt::format(format_args!("encountered a dangling {0} (going beyond the bounds of its allocation)",
ptr_kind))
}));
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
orig_ty: where_.orig_ty,
path,
ptr_bytes_warning: msg.ptr_bytes_warning(),
msg: msg.to_string(),
})
}
}
Ub(PointerUseAfterFree(..)) => {
{
let where_ = &self.path;
let path =
if !where_.projs.is_empty() {
let mut path = String::new();
write_path(&mut path, &where_.projs);
Some(path)
} else { None };
#[allow(unused)]
use ValidationErrorKind::*;
let msg =
ValidationErrorKind::from(::alloc::__export::must_use({
::alloc::fmt::format(format_args!("encountered a dangling {0} (use-after-free)",
ptr_kind))
}));
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
orig_ty: where_.orig_ty,
path,
ptr_bytes_warning: msg.ptr_bytes_warning(),
msg: msg.to_string(),
})
}
}
e => e,
}
})?
};try_validation!(
652self.ecx.check_ptr_access(
653 place.ptr(),
654 size,
655 CheckInAllocMsg::Dereferenceable, // will anyway be replaced by validity message
656),
657self.path,
658 Ub(DanglingIntPointer { addr: 0, .. }) =>
659format!("encountered a null {ptr_kind}"),
660 Ub(DanglingIntPointer { addr: i, .. }) =>
661format!(
662"encountered a dangling {ptr_kind} ({ptr} has no provenance)",
663 ptr = Pointer::<Option<AllocId>>::without_provenance(i)
664 ),
665 Ub(PointerOutOfBounds { .. }) =>
666format!("encountered a dangling {ptr_kind} (going beyond the bounds of its allocation)"),
667 Ub(PointerUseAfterFree(..)) =>
668format!("encountered a dangling {ptr_kind} (use-after-free)"),
669 );
670if self.reset_provenance_and_padding {
671 M::retag_ptr_value(self.ecx, &ptr, ty).map_err_kind(|e| match e {
672Ub(WriteToReadOnly(_)) => {
673{
let where_ = &self.path;
let path =
if !where_.projs.is_empty() {
let mut path = String::new();
write_path(&mut path, &where_.projs);
Some(path)
} else { None };
#[allow(unused)]
use ValidationErrorKind::*;
let msg =
ValidationErrorKind::from(::alloc::__export::must_use({
::alloc::fmt::format(format_args!("encountered {0} pointing to read-only memory",
if ptr_kind == PtrKind::Box {
"box"
} else { "mutable reference" }))
}));
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
orig_ty: where_.orig_ty,
path,
ptr_bytes_warning: msg.ptr_bytes_warning(),
msg: msg.to_string(),
})
}err_validation_failure!(
674self.path,
675format!(
676"encountered {} pointing to read-only memory",
677if ptr_kind == PtrKind::Box { "box" } else { "mutable reference" },
678 )
679 )680 }
681 InterpErrorKind::MachineStop(mut machine_err) => {
682// Enhance the aliasing model error with the current path.
683if !self.path.projs.is_empty() {
684let mut path = String::new();
685write_path(&mut path, &self.path.projs);
686machine_err.with_validation_path(path);
687 }
688 InterpErrorKind::MachineStop(machine_err)
689 }
690 e => e,
691 })?
692} else {
693// We can't retag if we're not resetting provenance.
694None695 }
696 } else {
697// Pointer remains unchanged.
698None699 };
700// If the pointer needs adjusting, write back adjusted pointer. This automatically
701 // also clears any excess provenance. Otherwise, just clear the provenance.
702if let Some(ptr) = adjusted_ptr {
703self.ecx.write_immediate_no_validate(*ptr, value)?;
704 } else if self.reset_provenance_and_padding {
705self.reset_pointer_provenance(value, &ptr)?;
706 }
707708// Check alignment after dereferenceable (if both are violated, trigger the error above).
709{
self.ecx.check_ptr_align(place.ptr(),
align).map_err_kind(|e|
{
match e {
Ub(AlignmentCheckFailed(Misalignment { required, has },
_msg)) => {
{
let where_ = &self.path;
let path =
if !where_.projs.is_empty() {
let mut path = String::new();
write_path(&mut path, &where_.projs);
Some(path)
} else { None };
#[allow(unused)]
use ValidationErrorKind::*;
let msg =
ValidationErrorKind::from(::alloc::__export::must_use({
::alloc::fmt::format(format_args!("encountered an unaligned {2} (required {0} byte alignment but found {1})",
required.bytes(), has.bytes(), ptr_kind))
}));
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
orig_ty: where_.orig_ty,
path,
ptr_bytes_warning: msg.ptr_bytes_warning(),
msg: msg.to_string(),
})
}
}
e => e,
}
})?
};try_validation!(
710self.ecx.check_ptr_align(
711 place.ptr(),
712 align,
713 ),
714self.path,
715 Ub(AlignmentCheckFailed(Misalignment { required, has }, _msg)) => format!(
716"encountered an unaligned {ptr_kind} (required {required_bytes} byte alignment but found {found_bytes})",
717 required_bytes = required.bytes(),
718 found_bytes = has.bytes()
719 ),
720 );
721722// Make sure this is non-null. This is obviously needed when `may_dangle` is set,
723 // but even if we did check dereferenceability above that would still allow null
724 // pointers if `size` is zero.
725let scalar = Scalar::from_maybe_pointer(place.ptr(), self.ecx);
726if self.ecx.scalar_may_be_null(scalar)? {
727let maybe = !M::Provenance::OFFSET_IS_ADDR && #[allow(non_exhaustive_omitted_patterns)] match scalar {
Scalar::Ptr(..) => true,
_ => false,
}matches!(scalar, Scalar::Ptr(..));
728do yeet {
let where_ = &self.path;
let path =
if !where_.projs.is_empty() {
let mut path = String::new();
write_path(&mut path, &where_.projs);
Some(path)
} else { None };
#[allow(unused)]
use ValidationErrorKind::*;
let msg =
ValidationErrorKind::from(::alloc::__export::must_use({
::alloc::fmt::format(format_args!("encountered a {0}null {1}",
if maybe { "maybe-" } else { "" }, ptr_kind))
}));
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
orig_ty: where_.orig_ty,
path,
ptr_bytes_warning: msg.ptr_bytes_warning(),
msg: msg.to_string(),
})
}throw_validation_failure!(
729self.path,
730format!(
731"encountered a {maybe}null {ptr_kind}",
732 maybe = if maybe { "maybe-" } else { "" }
733 )
734 )735 }
736// Do not allow references to uninhabited types.
737if place.layout.is_uninhabited() {
738let ty = place.layout.ty;
739do yeet {
let where_ = &self.path;
let path =
if !where_.projs.is_empty() {
let mut path = String::new();
write_path(&mut path, &where_.projs);
Some(path)
} else { None };
#[allow(unused)]
use ValidationErrorKind::*;
let msg =
ValidationErrorKind::from(::alloc::__export::must_use({
::alloc::fmt::format(format_args!("encountered a {0} pointing to uninhabited type {1}",
ptr_kind, ty))
}));
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
orig_ty: where_.orig_ty,
path,
ptr_bytes_warning: msg.ptr_bytes_warning(),
msg: msg.to_string(),
})
}throw_validation_failure!(
740self.path,
741format!("encountered a {ptr_kind} pointing to uninhabited type {ty}")
742 )743 }
744745// Recursive checking (but not inside `MaybeDangling` of course).
746if let Some(ref_tracking) = self.ref_tracking.as_deref_mut()
747 && !self.may_dangle
748 {
749// Proceed recursively even for ZST, no reason to skip them!
750 // `!` is a ZST and we want to validate it.
751if let Some(ctfe_mode) = self.ctfe_mode {
752let mut skip_recursive_check = false;
753// CTFE imposes restrictions on what references can point to.
754if let Ok((alloc_id, _offset, _prov)) =
755self.ecx.ptr_try_get_alloc_id(place.ptr(), 0)
756 {
757// Everything should be already interned.
758let Some(global_alloc) = self.ecx.tcx.try_get_global_alloc(alloc_id) else {
759if self.ecx.memory.alloc_map.contains_key(&alloc_id) {
760// This can happen when interning didn't complete due to, e.g.
761 // missing `make_global`. This must mean other errors are already
762 // being reported.
763self.ecx.tcx.dcx().delayed_bug(
764"interning did not complete, there should be an error",
765 );
766return interp_ok(());
767 }
768// We can't have *any* references to non-existing allocations in const-eval
769 // as the rest of rustc isn't happy with them... so we throw an error, even
770 // though for zero-sized references this isn't really UB.
771 // A potential future alternative would be to resurrect this as a zero-sized allocation
772 // (which codegen will then compile to an aligned dummy pointer anyway).
773do yeet {
let where_ = &self.path;
let path =
if !where_.projs.is_empty() {
let mut path = String::new();
write_path(&mut path, &where_.projs);
Some(path)
} else { None };
#[allow(unused)]
use ValidationErrorKind::*;
let msg =
ValidationErrorKind::from(::alloc::__export::must_use({
::alloc::fmt::format(format_args!("encountered a dangling {0} (use-after-free)",
ptr_kind))
}));
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
orig_ty: where_.orig_ty,
path,
ptr_bytes_warning: msg.ptr_bytes_warning(),
msg: msg.to_string(),
})
};throw_validation_failure!(
774self.path,
775format!("encountered a dangling {ptr_kind} (use-after-free)")
776 );
777 };
778let (size, _align) =
779global_alloc.size_and_align(*self.ecx.tcx, self.ecx.typing_env);
780let alloc_actual_mutbl =
781global_alloc.mutability(*self.ecx.tcx, self.ecx.typing_env);
782783match global_alloc {
784 GlobalAlloc::Static(did) => {
785let DefKind::Static { nested, .. } = self.ecx.tcx.def_kind(did) else {
786::rustc_middle::util::bug::bug_fmt(format_args!("impossible case reached"))bug!()787 };
788if !!self.ecx.tcx.is_thread_local_static(did) {
::core::panicking::panic("assertion failed: !self.ecx.tcx.is_thread_local_static(did)")
};assert!(!self.ecx.tcx.is_thread_local_static(did));
789if !self.ecx.tcx.is_static(did) {
::core::panicking::panic("assertion failed: self.ecx.tcx.is_static(did)")
};assert!(self.ecx.tcx.is_static(did));
790match ctfe_mode {
791 CtfeValidationMode::Static { .. }
792 | CtfeValidationMode::Promoted { .. } => {
793// We skip recursively checking other statics. These statics must be sound by
794 // themselves, and the only way to get broken statics here is by using
795 // unsafe code.
796 // The reasons we don't check other statics is twofold. For one, in all
797 // sound cases, the static was already validated on its own, and second, we
798 // trigger cycle errors if we try to compute the value of the other static
799 // and that static refers back to us (potentially through a promoted).
800 // This could miss some UB, but that's fine.
801 // We still walk nested allocations, as they are fundamentally part of this validation run.
802 // This means we will also recurse into nested statics of *other*
803 // statics, even though we do not recurse into other statics directly.
804 // That's somewhat inconsistent but harmless.
805skip_recursive_check = !nested;
806 }
807 CtfeValidationMode::Const { .. } => {
808// If this is mutable memory or an `extern static`, there's no point in checking it -- we'd
809 // just get errors trying to read the value.
810if alloc_actual_mutbl.is_mut()
811 || self.ecx.tcx.is_foreign_item(did)
812 {
813skip_recursive_check = true;
814 }
815 }
816 }
817 }
818_ => (),
819 }
820821// If this allocation has size zero, there is no actual mutability here.
822if size != Size::ZERO {
823// Determine whether this pointer expects to be pointing to something mutable.
824let ptr_expected_mutbl = match ptr_kind {
825 PtrKind::Box => Mutability::Mut,
826 PtrKind::Ref(mutbl) => {
827// We do not take into account interior mutability here since we cannot know if
828 // there really is an `UnsafeCell` inside `Option<UnsafeCell>` -- so we check
829 // that in the recursive descent behind this reference (controlled by
830 // `allow_immutable_unsafe_cell`).
831mutbl832 }
833 };
834// Mutable pointer to immutable memory is no good.
835if ptr_expected_mutbl == Mutability::Mut836 && alloc_actual_mutbl == Mutability::Not837 {
838// This can actually occur with transmutes.
839do yeet {
let where_ = &self.path;
let path =
if !where_.projs.is_empty() {
let mut path = String::new();
write_path(&mut path, &where_.projs);
Some(path)
} else { None };
#[allow(unused)]
use ValidationErrorKind::*;
let msg =
ValidationErrorKind::from(::alloc::__export::must_use({
::alloc::fmt::format(format_args!("encountered mutable reference or box pointing to read-only memory"))
}));
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
orig_ty: where_.orig_ty,
path,
ptr_bytes_warning: msg.ptr_bytes_warning(),
msg: msg.to_string(),
})
};throw_validation_failure!(
840self.path,
841format!(
842"encountered mutable reference or box pointing to read-only memory"
843)
844 );
845 }
846 }
847 }
848// Potentially skip recursive check.
849if skip_recursive_check {
850return interp_ok(());
851 }
852 } else {
853// This is not CTFE, so it's Miri with recursive checking.
854 // FIXME: should we skip `UnsafeCell` behind shared references? Currently that is
855 // not needed since validation reads bypass Stacked Borrows and data race checks,
856 // but is that really coherent?
857}
858let path = &self.path;
859ref_tracking.track(place, || {
860// We need to clone the path anyway, make sure it gets created
861 // with enough space for the additional `Deref`.
862let mut new_projs = Vec::with_capacity(path.projs.len() + 1);
863new_projs.extend(&path.projs);
864new_projs.push(PathElem::Deref);
865Path { projs: new_projs, orig_ty: path.orig_ty }
866 });
867 }
868interp_ok(())
869 }
870871/// Check if this is a value of primitive type, and if yes check the validity of the value
872 /// at that type. Return `true` if the type is indeed primitive.
873 ///
874 /// Note that not all of these have `FieldsShape::Primitive`, e.g. wide references.
875fn try_visit_primitive(
876&mut self,
877 value: &PlaceTy<'tcx, M::Provenance>,
878 ) -> InterpResult<'tcx, bool> {
879// Go over all the primitive types
880let ty = value.layout.ty;
881match ty.kind() {
882 ty::Bool => {
883let scalar = self.read_scalar(value, ExpectedKind::Bool)?;
884{
scalar.to_bool().map_err_kind(|e|
{
match e {
Ub(InvalidBool(..)) => {
{
let where_ = &self.path;
let path =
if !where_.projs.is_empty() {
let mut path = String::new();
write_path(&mut path, &where_.projs);
Some(path)
} else { None };
#[allow(unused)]
use ValidationErrorKind::*;
let msg =
ValidationErrorKind::from(::alloc::__export::must_use({
::alloc::fmt::format(format_args!("encountered {0:x}, but expected a boolean",
scalar))
}));
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
orig_ty: where_.orig_ty,
path,
ptr_bytes_warning: msg.ptr_bytes_warning(),
msg: msg.to_string(),
})
}
}
e => e,
}
})?
};try_validation!(
885 scalar.to_bool(),
886self.path,
887 Ub(InvalidBool(..)) =>
888format!("encountered {scalar:x}, but expected a boolean"),
889 );
890if self.reset_provenance_and_padding {
891self.ecx.clear_provenance(value)?;
892self.add_data_range_place(value);
893 }
894interp_ok(true)
895 }
896 ty::Char => {
897let scalar = self.read_scalar(value, ExpectedKind::Char)?;
898{
scalar.to_char().map_err_kind(|e|
{
match e {
Ub(InvalidChar(..)) => {
{
let where_ = &self.path;
let path =
if !where_.projs.is_empty() {
let mut path = String::new();
write_path(&mut path, &where_.projs);
Some(path)
} else { None };
#[allow(unused)]
use ValidationErrorKind::*;
let msg =
ValidationErrorKind::from(::alloc::__export::must_use({
::alloc::fmt::format(format_args!("encountered {0:x}, but expected a valid unicode scalar value (in `0..=0x10FFFF` but not in `0xD800..=0xDFFF`)",
scalar))
}));
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
orig_ty: where_.orig_ty,
path,
ptr_bytes_warning: msg.ptr_bytes_warning(),
msg: msg.to_string(),
})
}
}
e => e,
}
})?
};try_validation!(
899 scalar.to_char(),
900self.path,
901 Ub(InvalidChar(..)) =>
902format!("encountered {scalar:x}, but expected a valid unicode scalar value \
903 (in `0..=0x10FFFF` but not in `0xD800..=0xDFFF`)")
904 );
905if self.reset_provenance_and_padding {
906self.ecx.clear_provenance(value)?;
907self.add_data_range_place(value);
908 }
909interp_ok(true)
910 }
911 ty::Float(_) | ty::Int(_) | ty::Uint(_) => {
912// NOTE: Keep this in sync with the array optimization for int/float
913 // types below!
914self.read_scalar(
915value,
916if #[allow(non_exhaustive_omitted_patterns)] match ty.kind() {
ty::Float(..) => true,
_ => false,
}matches!(ty.kind(), ty::Float(..)) {
917 ExpectedKind::Float918 } else {
919 ExpectedKind::Int920 },
921 )?;
922if self.reset_provenance_and_padding {
923self.ecx.clear_provenance(value)?;
924self.add_data_range_place(value);
925 }
926interp_ok(true)
927 }
928 ty::RawPtr(..) => {
929let ptr = self.read_immediate(value, ExpectedKind::RawPtr)?;
930if self.reset_provenance_and_padding {
931self.reset_pointer_provenance(value, &ptr)?;
932// There's no padding in a pointer.
933self.add_data_range_place(value);
934 }
935936let place = self.ecx.imm_ptr_to_mplace(&ptr)?;
937if place.layout.is_unsized() {
938self.check_wide_ptr_meta(place.meta(), place.layout)?;
939 }
940interp_ok(true)
941 }
942 ty::Ref(_, _ty, mutbl) => {
943self.check_safe_pointer(value, ty, PtrKind::Ref(*mutbl))?;
944interp_ok(true)
945 }
946 ty::FnPtr(..) => {
947let scalar = self.read_scalar(value, ExpectedKind::FnPtr)?;
948949// If we check references recursively, also check that this points to a function.
950if let Some(_) = self.ref_tracking {
951let ptr = scalar.to_pointer(self.ecx)?;
952let _fn = {
self.ecx.get_ptr_fn(ptr).map_err_kind(|e|
{
match e {
Ub(DanglingIntPointer { .. } | InvalidFunctionPointer(..))
=> {
{
let where_ = &self.path;
let path =
if !where_.projs.is_empty() {
let mut path = String::new();
write_path(&mut path, &where_.projs);
Some(path)
} else { None };
#[allow(unused)]
use ValidationErrorKind::*;
let msg =
ValidationErrorKind::from(::alloc::__export::must_use({
::alloc::fmt::format(format_args!("encountered {0}, but expected a function pointer",
ptr))
}));
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
orig_ty: where_.orig_ty,
path,
ptr_bytes_warning: msg.ptr_bytes_warning(),
msg: msg.to_string(),
})
}
}
e => e,
}
})?
}try_validation!(
953self.ecx.get_ptr_fn(ptr),
954self.path,
955 Ub(DanglingIntPointer{ .. } | InvalidFunctionPointer(..)) =>
956format!("encountered {ptr}, but expected a function pointer"),
957 );
958// FIXME: Check if the signature matches
959} else {
960// Otherwise (for standalone Miri and for `-Zextra-const-ub-checks`),
961 // we have to still check it to be non-null.
962if self.ecx.scalar_may_be_null(scalar)? {
963let maybe =
964 !M::Provenance::OFFSET_IS_ADDR && #[allow(non_exhaustive_omitted_patterns)] match scalar {
Scalar::Ptr(..) => true,
_ => false,
}matches!(scalar, Scalar::Ptr(..));
965do yeet {
let where_ = &self.path;
let path =
if !where_.projs.is_empty() {
let mut path = String::new();
write_path(&mut path, &where_.projs);
Some(path)
} else { None };
#[allow(unused)]
use ValidationErrorKind::*;
let msg =
ValidationErrorKind::from(::alloc::__export::must_use({
::alloc::fmt::format(format_args!("encountered a {0}null function pointer",
if maybe { "maybe-" } else { "" }))
}));
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
orig_ty: where_.orig_ty,
path,
ptr_bytes_warning: msg.ptr_bytes_warning(),
msg: msg.to_string(),
})
};throw_validation_failure!(
966self.path,
967format!(
968"encountered a {maybe}null function pointer",
969 maybe = if maybe { "maybe-" } else { "" }
970 )
971 );
972 }
973 }
974if self.reset_provenance_and_padding {
975// Make sure we do not preserve partial provenance. This matches the thin
976 // pointer handling in `deref_pointer`.
977if #[allow(non_exhaustive_omitted_patterns)] match scalar {
Scalar::Int(..) => true,
_ => false,
}matches!(scalar, Scalar::Int(..)) {
978self.ecx.clear_provenance(value)?;
979 }
980self.add_data_range_place(value);
981 }
982interp_ok(true)
983 }
984 ty::Never => {
985do yeet {
let where_ = &self.path;
let path =
if !where_.projs.is_empty() {
let mut path = String::new();
write_path(&mut path, &where_.projs);
Some(path)
} else { None };
#[allow(unused)]
use ValidationErrorKind::*;
let msg =
ValidationErrorKind::from(::alloc::__export::must_use({
::alloc::fmt::format(format_args!("encountered a value of the never type `!`"))
}));
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
orig_ty: where_.orig_ty,
path,
ptr_bytes_warning: msg.ptr_bytes_warning(),
msg: msg.to_string(),
})
}throw_validation_failure!(
986self.path,
987format!("encountered a value of the never type `!`")
988 )989 }
990 ty::Foreign(..) | ty::FnDef(..) => {
991// Nothing to check.
992interp_ok(true)
993 }
994 ty::UnsafeBinder(_) => {
::core::panicking::panic_fmt(format_args!("not yet implemented: {0}",
format_args!("FIXME(unsafe_binder)")));
}todo!("FIXME(unsafe_binder)"),
995// The above should be all the primitive types. The rest is compound, we
996 // check them by visiting their fields/variants.
997ty::Adt(..)
998 | ty::Tuple(..)
999 | ty::Array(..)
1000 | ty::Slice(..)
1001 | ty::Str1002 | ty::Dynamic(..)
1003 | ty::Closure(..)
1004 | ty::Pat(..)
1005 | ty::CoroutineClosure(..)
1006 | ty::Coroutine(..) => interp_ok(false),
1007// Some types only occur during typechecking, they have no layout.
1008 // We should not see them here and we could not check them anyway.
1009ty::Error(_)
1010 | ty::Infer(..)
1011 | ty::Placeholder(..)
1012 | ty::Bound(..)
1013 | ty::Param(..)
1014 | ty::Alias(..)
1015 | ty::CoroutineWitness(..) => ::rustc_middle::util::bug::bug_fmt(format_args!("Encountered invalid type {0:?}",
ty))bug!("Encountered invalid type {:?}", ty),
1016 }
1017 }
10181019fn visit_scalar(
1020&mut self,
1021 scalar: Scalar<M::Provenance>,
1022 scalar_layout: ScalarAbi,
1023 ) -> InterpResult<'tcx> {
1024let size = scalar_layout.size(self.ecx);
1025let valid_range = scalar_layout.valid_range(self.ecx);
1026let WrappingRange { start, end } = valid_range;
1027let max_value = size.unsigned_int_max();
1028if !(end <= max_value) {
::core::panicking::panic("assertion failed: end <= max_value")
};assert!(end <= max_value);
1029let bits = match scalar.try_to_scalar_int() {
1030Ok(int) => int.to_bits(size),
1031Err(_) => {
1032// So this is a pointer then, and casting to an int failed.
1033 // Can only happen during CTFE.
1034 // We support 2 kinds of ranges here: full range, and excluding zero.
1035if start == 1 && end == max_value {
1036// Only null is the niche. So make sure the ptr is NOT null.
1037if self.ecx.scalar_may_be_null(scalar)? {
1038do yeet {
let where_ = &self.path;
let path =
if !where_.projs.is_empty() {
let mut path = String::new();
write_path(&mut path, &where_.projs);
Some(path)
} else { None };
#[allow(unused)]
use ValidationErrorKind::*;
let msg =
ValidationErrorKind::from(::alloc::__export::must_use({
::alloc::fmt::format(format_args!("encountered a maybe-null pointer, but expected something that is definitely non-zero"))
}));
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
orig_ty: where_.orig_ty,
path,
ptr_bytes_warning: msg.ptr_bytes_warning(),
msg: msg.to_string(),
})
}throw_validation_failure!(
1039self.path,
1040format!(
1041"encountered a maybe-null pointer, but expected something that is definitely non-zero"
1042)
1043 )1044 } else {
1045return interp_ok(());
1046 }
1047 } else if scalar_layout.is_always_valid(self.ecx) {
1048// Easy. (This is reachable if `enforce_number_validity` is set.)
1049return interp_ok(());
1050 } else {
1051// Conservatively, we reject, because the pointer *could* have a bad value.
1052do yeet {
let where_ = &self.path;
let path =
if !where_.projs.is_empty() {
let mut path = String::new();
write_path(&mut path, &where_.projs);
Some(path)
} else { None };
#[allow(unused)]
use ValidationErrorKind::*;
let msg =
ValidationErrorKind::from(::alloc::__export::must_use({
::alloc::fmt::format(format_args!("encountered a pointer with unknown absolute address, but expected something that is definitely {0}",
fmt_range(valid_range, max_value)))
}));
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
orig_ty: where_.orig_ty,
path,
ptr_bytes_warning: msg.ptr_bytes_warning(),
msg: msg.to_string(),
})
}throw_validation_failure!(
1053self.path,
1054format!(
1055"encountered a pointer with unknown absolute address, but expected something that is definitely {in_range}",
1056 in_range = fmt_range(valid_range, max_value)
1057 )
1058 )1059 }
1060 }
1061 };
1062// Now compare.
1063if valid_range.contains(bits) {
1064interp_ok(())
1065 } else {
1066do yeet {
let where_ = &self.path;
let path =
if !where_.projs.is_empty() {
let mut path = String::new();
write_path(&mut path, &where_.projs);
Some(path)
} else { None };
#[allow(unused)]
use ValidationErrorKind::*;
let msg =
ValidationErrorKind::from(::alloc::__export::must_use({
::alloc::fmt::format(format_args!("encountered {1}, but expected something {0}",
fmt_range(valid_range, max_value), bits))
}));
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
orig_ty: where_.orig_ty,
path,
ptr_bytes_warning: msg.ptr_bytes_warning(),
msg: msg.to_string(),
})
}throw_validation_failure!(
1067self.path,
1068format!(
1069"encountered {bits}, but expected something {in_range}",
1070 in_range = fmt_range(valid_range, max_value)
1071 )
1072 )1073 }
1074 }
10751076fn in_mutable_memory(&self, val: &PlaceTy<'tcx, M::Provenance>) -> bool {
1077if true {
if !self.ctfe_mode.is_some() {
::core::panicking::panic("assertion failed: self.ctfe_mode.is_some()")
};
};debug_assert!(self.ctfe_mode.is_some());
1078if let Some(mplace) = val.as_mplace_or_local().left() {
1079if let Some(alloc_id) = mplace.ptr().provenance.and_then(|p| p.get_alloc_id()) {
1080let tcx = *self.ecx.tcx;
1081// Everything must be already interned.
1082let mutbl = tcx.global_alloc(alloc_id).mutability(tcx, self.ecx.typing_env);
1083if let Some((_, alloc)) = self.ecx.memory.alloc_map.get(alloc_id) {
1084match (&alloc.mutability, &mutbl) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(alloc.mutability, mutbl);
1085 }
1086mutbl.is_mut()
1087 } else {
1088// No memory at all.
1089false
1090}
1091 } else {
1092// A local variable -- definitely mutable.
1093true
1094}
1095 }
10961097/// Add the given pointer-length pair to the "data" range of this visit.
1098fn add_data_range(&mut self, ptr: Pointer<Option<M::Provenance>>, size: Size) {
1099if let Some(data_bytes) = self.data_bytes.as_mut() {
1100// We only have to store the offset, the rest is the same for all pointers here.
1101 // The logic is agnostic to whether the offset is relative or absolute as long as
1102 // it is consistent.
1103let (_prov, offset) = ptr.into_raw_parts();
1104// Add this.
1105data_bytes.add_range(offset, size);
1106 };
1107 }
11081109/// Add the entire given place to the "data" range of this visit.
1110fn add_data_range_place(&mut self, place: &PlaceTy<'tcx, M::Provenance>) {
1111// Only sized places can be added this way.
1112if true {
if !place.layout.is_sized() {
::core::panicking::panic("assertion failed: place.layout.is_sized()")
};
};debug_assert!(place.layout.is_sized());
1113if let Some(data_bytes) = self.data_bytes.as_mut() {
1114let offset = Self::data_range_offset(self.ecx, place);
1115data_bytes.add_range(offset, place.layout.size);
1116 }
1117 }
11181119/// Convert a place into the offset it starts at, for the purpose of data_range tracking.
1120 /// Must only be called if `data_bytes` is `Some(_)`.
1121fn data_range_offset(ecx: &InterpCx<'tcx, M>, place: &PlaceTy<'tcx, M::Provenance>) -> Size {
1122// The presence of `data_bytes` implies that our place is in memory.
1123let ptr = ecx1124 .place_to_op(place)
1125 .expect("place must be in memory")
1126 .as_mplace_or_imm()
1127 .expect_left("place must be in memory")
1128 .ptr();
1129let (_prov, offset) = ptr.into_raw_parts();
1130offset1131 }
11321133fn reset_padding(&mut self, place: &PlaceTy<'tcx, M::Provenance>) -> InterpResult<'tcx> {
1134let Some(data_bytes) = self.data_bytes.as_mut() else { return interp_ok(()) };
1135// Our value must be in memory, otherwise we would not have set up `data_bytes`.
1136let mplace = self.ecx.force_allocation(place)?;
1137// Determine starting offset and size.
1138let (_prov, start_offset) = mplace.ptr().into_raw_parts();
1139let (size, _align) = self1140 .ecx
1141 .size_and_align_of_val(&mplace)?
1142.unwrap_or((mplace.layout.size, mplace.layout.align.abi));
1143// If there is no padding at all, we can skip the rest: check for
1144 // a single data range covering the entire value.
1145if data_bytes.0 == &[(start_offset, size)] {
1146return interp_ok(());
1147 }
1148// Get a handle for the allocation. Do this only once, to avoid looking up the same
1149 // allocation over and over again. (Though to be fair, iterating the value already does
1150 // exactly that.)
1151let Some(mut alloc) = self.ecx.get_ptr_alloc_mut(mplace.ptr(), size)? else {
1152// A ZST, no padding to clear.
1153return interp_ok(());
1154 };
1155// Add a "finalizer" data range at the end, so that the iteration below finds all gaps
1156 // between ranges.
1157data_bytes.0.push((start_offset + size, Size::ZERO));
1158// Iterate, and reset gaps.
1159let mut padding_cleared_until = start_offset;
1160for &(offset, size) in data_bytes.0.iter() {
1161if !(offset >= padding_cleared_until) {
{
::core::panicking::panic_fmt(format_args!("reset_padding on {0}: previous field ended at offset {1}, next field starts at {2} (and has a size of {3} bytes)",
mplace.layout.ty,
(padding_cleared_until - start_offset).bytes(),
(offset - start_offset).bytes(), size.bytes()));
}
};assert!(
1162 offset >= padding_cleared_until,
1163"reset_padding on {}: previous field ended at offset {}, next field starts at {} (and has a size of {} bytes)",
1164 mplace.layout.ty,
1165 (padding_cleared_until - start_offset).bytes(),
1166 (offset - start_offset).bytes(),
1167 size.bytes(),
1168 );
1169if offset > padding_cleared_until {
1170// We found padding. Adjust the range to be relative to `alloc`, and make it uninit.
1171let padding_start = padding_cleared_until - start_offset;
1172let padding_size = offset - padding_cleared_until;
1173let range = alloc_range(padding_start, padding_size);
1174{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/validity.rs:1174",
"rustc_const_eval::interpret::validity",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/validity.rs"),
::tracing_core::__macro_support::Option::Some(1174u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::validity"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("reset_padding on {0}: resetting padding range {1:?}",
mplace.layout.ty, range) as &dyn Value))])
});
} else { ; }
};trace!("reset_padding on {}: resetting padding range {range:?}", mplace.layout.ty);
1175 alloc.write_uninit(range);
1176 }
1177 padding_cleared_until = offset + size;
1178 }
1179if !(padding_cleared_until == start_offset + size) {
::core::panicking::panic("assertion failed: padding_cleared_until == start_offset + size")
};assert!(padding_cleared_until == start_offset + size);
1180interp_ok(())
1181 }
11821183/// Computes the data range of this union type:
1184 /// which bytes are inside a field (i.e., not padding.)
1185fn union_data_range<'e>(
1186 ecx: &'e mut InterpCx<'tcx, M>,
1187 layout: TyAndLayout<'tcx>,
1188 ) -> Cow<'e, RangeSet> {
1189if !layout.ty.is_union() {
::core::panicking::panic("assertion failed: layout.ty.is_union()")
};assert!(layout.ty.is_union());
1190if !layout.is_sized() {
{
::core::panicking::panic_fmt(format_args!("there are no unsized unions"));
}
};assert!(layout.is_sized(), "there are no unsized unions");
1191let layout_cx = LayoutCx::new(*ecx.tcx, ecx.typing_env);
1192return M::cached_union_data_range(ecx, layout.ty, || {
1193let mut out = RangeSet(Vec::new());
1194union_data_range_uncached(&layout_cx, layout, Size::ZERO, &mut out);
1195out1196 });
11971198/// Helper for recursive traversal: add data ranges of the given type to `out`.
1199fn union_data_range_uncached<'tcx>(
1200 cx: &LayoutCx<'tcx>,
1201 layout: TyAndLayout<'tcx>,
1202 base_offset: Size,
1203 out: &mut RangeSet,
1204 ) {
1205// If this is a ZST, we don't contain any data. In particular, this helps us to quickly
1206 // skip over huge arrays of ZST.
1207if layout.is_zst() {
1208return;
1209 }
1210// Just recursively add all the fields of everything to the output.
1211match &layout.fields {
1212 FieldsShape::Primitive => {
1213out.add_range(base_offset, layout.size);
1214 }
1215&FieldsShape::Union(fields) => {
1216// Currently, all fields start at offset 0 (relative to `base_offset`).
1217for field in 0..fields.get() {
1218let field = layout.field(cx, field);
1219 union_data_range_uncached(cx, field, base_offset, out);
1220 }
1221 }
1222&FieldsShape::Array { stride, count } => {
1223let elem = layout.field(cx, 0);
12241225// Fast-path for large arrays of simple types that do not contain any padding.
1226if elem.backend_repr.is_scalar() {
1227out.add_range(base_offset, elem.size * count);
1228 } else {
1229for idx in 0..count {
1230// This repeats the same computation for every array element... but the alternative
1231 // is to allocate temporary storage for a dedicated `out` set for the array element,
1232 // and replicating that N times. Is that better?
1233union_data_range_uncached(cx, elem, base_offset + idx * stride, out);
1234 }
1235 }
1236 }
1237 FieldsShape::Arbitrary { offsets, .. } => {
1238for (field, &offset) in offsets.iter_enumerated() {
1239let field = layout.field(cx, field.as_usize());
1240 union_data_range_uncached(cx, field, base_offset + offset, out);
1241 }
1242 }
1243 }
1244// Don't forget potential other variants.
1245match &layout.variants {
1246 Variants::Single { .. } | Variants::Empty => {
1247// Fully handled above.
1248}
1249 Variants::Multiple { variants, .. } => {
1250for variant in variants.indices() {
1251let variant = layout.for_variant(cx, variant);
1252 union_data_range_uncached(cx, variant, base_offset, out);
1253 }
1254 }
1255 }
1256 }
1257 }
1258}
12591260impl<'rt, 'tcx, M: Machine<'tcx>> ValueVisitor<'tcx, M> for ValidityVisitor<'rt, 'tcx, M> {
1261type V = PlaceTy<'tcx, M::Provenance>;
12621263#[inline(always)]
1264fn ecx(&self) -> &InterpCx<'tcx, M> {
1265self.ecx
1266 }
12671268fn read_discriminant(
1269&mut self,
1270 val: &PlaceTy<'tcx, M::Provenance>,
1271 ) -> InterpResult<'tcx, VariantIdx> {
1272self.with_elem(PathElem::EnumTag, move |this| {
1273interp_ok({
this.ecx.read_discriminant(val).map_err_kind(|e|
{
match e {
Ub(InvalidTag(val)) => {
{
let where_ = &this.path;
let path =
if !where_.projs.is_empty() {
let mut path = String::new();
write_path(&mut path, &where_.projs);
Some(path)
} else { None };
#[allow(unused)]
use ValidationErrorKind::*;
let msg =
ValidationErrorKind::from(::alloc::__export::must_use({
::alloc::fmt::format(format_args!("encountered {0:x}, but expected a valid enum tag",
val))
}));
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
orig_ty: where_.orig_ty,
path,
ptr_bytes_warning: msg.ptr_bytes_warning(),
msg: msg.to_string(),
})
}
}
Ub(UninhabitedEnumVariantRead(_)) => {
{
let where_ = &this.path;
let path =
if !where_.projs.is_empty() {
let mut path = String::new();
write_path(&mut path, &where_.projs);
Some(path)
} else { None };
#[allow(unused)]
use ValidationErrorKind::*;
let msg =
ValidationErrorKind::from(::alloc::__export::must_use({
::alloc::fmt::format(format_args!("encountered an uninhabited enum variant"))
}));
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
orig_ty: where_.orig_ty,
path,
ptr_bytes_warning: msg.ptr_bytes_warning(),
msg: msg.to_string(),
})
}
}
e => e,
}
})?
}try_validation!(
1274 this.ecx.read_discriminant(val),
1275 this.path,
1276 Ub(InvalidTag(val)) =>
1277format!("encountered {val:x}, but expected a valid enum tag"),
1278 Ub(UninhabitedEnumVariantRead(_)) =>
1279format!("encountered an uninhabited enum variant"),
1280// Uninit / bad provenance are not possible since the field was already previously
1281 // checked at its integer type.
1282))
1283 })
1284 }
12851286#[inline]
1287fn visit_field(
1288&mut self,
1289 old_val: &PlaceTy<'tcx, M::Provenance>,
1290 field: usize,
1291 new_val: &PlaceTy<'tcx, M::Provenance>,
1292 ) -> InterpResult<'tcx> {
1293let elem = self.aggregate_field_path_elem(old_val.layout, field, new_val.layout.ty);
1294self.with_elem(elem, move |this| this.visit_value(new_val))
1295 }
12961297#[inline]
1298fn visit_variant(
1299&mut self,
1300 old_val: &PlaceTy<'tcx, M::Provenance>,
1301 variant_id: VariantIdx,
1302 new_val: &PlaceTy<'tcx, M::Provenance>,
1303 ) -> InterpResult<'tcx> {
1304let name = match old_val.layout.ty.kind() {
1305 ty::Adt(adt, _) => PathElem::Variant(adt.variant(variant_id).name),
1306// Coroutines also have variants
1307ty::Coroutine(..) => PathElem::CoroutineState(variant_id),
1308_ => ::rustc_middle::util::bug::bug_fmt(format_args!("Unexpected type with variant: {0:?}",
old_val.layout.ty))bug!("Unexpected type with variant: {:?}", old_val.layout.ty),
1309 };
1310self.with_elem(name, move |this| this.visit_value(new_val))
1311 }
13121313#[inline(always)]
1314fn visit_union(
1315&mut self,
1316 val: &PlaceTy<'tcx, M::Provenance>,
1317 _fields: NonZero<usize>,
1318 ) -> InterpResult<'tcx> {
1319// Special check for CTFE validation, preventing `UnsafeCell` inside unions in immutable memory.
1320if self.ctfe_mode.is_some_and(|c| !c.allow_immutable_unsafe_cell()) {
1321// Unsized unions are currently not a thing, but let's keep this code consistent with
1322 // the check in `visit_value`.
1323let zst = self.ecx.size_and_align_of_val(val)?.is_some_and(|(s, _a)| s.bytes() == 0);
1324if !zst && !val.layout.ty.is_freeze(*self.ecx.tcx, self.ecx.typing_env) {
1325if !self.in_mutable_memory(val) {
1326do yeet {
let where_ = &self.path;
let path =
if !where_.projs.is_empty() {
let mut path = String::new();
write_path(&mut path, &where_.projs);
Some(path)
} else { None };
#[allow(unused)]
use ValidationErrorKind::*;
let msg =
ValidationErrorKind::from(::alloc::__export::must_use({
::alloc::fmt::format(format_args!("encountered `UnsafeCell` in read-only memory"))
}));
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
orig_ty: where_.orig_ty,
path,
ptr_bytes_warning: msg.ptr_bytes_warning(),
msg: msg.to_string(),
})
};throw_validation_failure!(
1327self.path,
1328format!("encountered `UnsafeCell` in read-only memory")
1329 );
1330 }
1331 }
1332 }
1333if self.reset_provenance_and_padding
1334 && let Some(data_bytes) = self.data_bytes.as_mut()
1335 {
1336let base_offset = Self::data_range_offset(self.ecx, val);
1337// Determine and add data range for this union.
1338let union_data_range = Self::union_data_range(self.ecx, val.layout);
1339for &(offset, size) in union_data_range.0.iter() {
1340 data_bytes.add_range(base_offset + offset, size);
1341 }
1342 }
1343interp_ok(())
1344 }
13451346#[inline]
1347fn visit_box(
1348&mut self,
1349 box_ty: Ty<'tcx>,
1350 val: &PlaceTy<'tcx, M::Provenance>,
1351 ) -> InterpResult<'tcx> {
1352self.check_safe_pointer(&val, box_ty, PtrKind::Box)?;
1353interp_ok(())
1354 }
13551356#[inline]
1357fn visit_value(&mut self, val: &PlaceTy<'tcx, M::Provenance>) -> InterpResult<'tcx> {
1358{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/validity.rs:1358",
"rustc_const_eval::interpret::validity",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/validity.rs"),
::tracing_core::__macro_support::Option::Some(1358u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::validity"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("visit_value: {0:?}, {1:?}",
*val, val.layout) as &dyn Value))])
});
} else { ; }
};trace!("visit_value: {:?}, {:?}", *val, val.layout);
13591360// Check primitive types -- the leaves of our recursive descent.
1361 // This is called even for enum discriminants (which are "fields" of their enum),
1362 // so for integer-typed discriminants the provenance reset will happen here.
1363 // We assume that the Scalar validity range does not restrict these values
1364 // any further than `try_visit_primitive` does!
1365if self.try_visit_primitive(val)? {
1366return interp_ok(());
1367 }
13681369// Special check preventing `UnsafeCell` in the inner part of constants
1370if self.ctfe_mode.is_some_and(|c| !c.allow_immutable_unsafe_cell()) {
1371// Exclude ZST values. We need to compute the dynamic size/align to properly
1372 // handle slices and trait objects.
1373let zst = self.ecx.size_and_align_of_val(val)?.is_some_and(|(s, _a)| s.bytes() == 0);
1374if !zst1375 && let Some(def) = val.layout.ty.ty_adt_def()
1376 && def.is_unsafe_cell()
1377 {
1378if !self.in_mutable_memory(val) {
1379do yeet {
let where_ = &self.path;
let path =
if !where_.projs.is_empty() {
let mut path = String::new();
write_path(&mut path, &where_.projs);
Some(path)
} else { None };
#[allow(unused)]
use ValidationErrorKind::*;
let msg =
ValidationErrorKind::from(::alloc::__export::must_use({
::alloc::fmt::format(format_args!("encountered `UnsafeCell` in read-only memory"))
}));
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
orig_ty: where_.orig_ty,
path,
ptr_bytes_warning: msg.ptr_bytes_warning(),
msg: msg.to_string(),
})
};throw_validation_failure!(
1380self.path,
1381format!("encountered `UnsafeCell` in read-only memory")
1382 );
1383 }
1384 }
1385 }
13861387// Recursively walk the value at its type. Apply optimizations for some large types.
1388match val.layout.ty.kind() {
1389 ty::Str => {
1390let mplace = val.assert_mem_place(); // strings are unsized and hence never immediate
1391let len = mplace.len(self.ecx)?;
1392let expected = ExpectedKind::Str;
1393{
self.ecx.read_bytes_ptr_strip_provenance(mplace.ptr(),
Size::from_bytes(len)).map_err_kind(|e|
{
match e {
Ub(InvalidUninitBytes(..)) => {
{
let where_ = &self.path;
let path =
if !where_.projs.is_empty() {
let mut path = String::new();
write_path(&mut path, &where_.projs);
Some(path)
} else { None };
#[allow(unused)]
use ValidationErrorKind::*;
let msg = ValidationErrorKind::from(Uninit { expected });
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
orig_ty: where_.orig_ty,
path,
ptr_bytes_warning: msg.ptr_bytes_warning(),
msg: msg.to_string(),
})
}
}
Unsup(ReadPointerAsInt(_)) => {
{
let where_ = &self.path;
let path =
if !where_.projs.is_empty() {
let mut path = String::new();
write_path(&mut path, &where_.projs);
Some(path)
} else { None };
#[allow(unused)]
use ValidationErrorKind::*;
let msg =
ValidationErrorKind::from(PointerAsInt { expected });
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
orig_ty: where_.orig_ty,
path,
ptr_bytes_warning: msg.ptr_bytes_warning(),
msg: msg.to_string(),
})
}
}
e => e,
}
})?
};try_validation!(
1394self.ecx.read_bytes_ptr_strip_provenance(mplace.ptr(), Size::from_bytes(len)),
1395self.path,
1396 Ub(InvalidUninitBytes(..)) =>
1397 Uninit { expected },
1398 Unsup(ReadPointerAsInt(_)) =>
1399 PointerAsInt { expected },
1400 );
1401 }
1402 ty::Array(tys, ..) | ty::Slice(tys)
1403// This optimization applies for types that can hold arbitrary non-provenance bytes (such as
1404 // integer and floating point types).
1405 // FIXME(wesleywiser) This logic could be extended further to arbitrary structs or
1406 // tuples made up of integer/floating point types or inhabited ZSTs with no padding.
1407if #[allow(non_exhaustive_omitted_patterns)] match tys.kind() {
ty::Int(..) | ty::Uint(..) | ty::Float(..) => true,
_ => false,
}matches!(tys.kind(), ty::Int(..) | ty::Uint(..) | ty::Float(..))1408 =>
1409 {
1410let expected = if tys.is_integral() { ExpectedKind::Int } else { ExpectedKind::Float };
1411// Optimized handling for arrays of integer/float type.
14121413 // This is the length of the array/slice.
1414let len = val.len(self.ecx)?;
1415// This is the element type size.
1416let layout = self.ecx.layout_of(*tys)?;
1417// This is the size in bytes of the whole array. (This checks for overflow.)
1418let size = layout.size * len;
1419// If the size is 0, there is nothing to check.
1420 // (`size` can only be 0 if `len` is 0, and empty arrays are always valid.)
1421if size == Size::ZERO {
1422return interp_ok(());
1423 }
1424// Now that we definitely have a non-ZST array, we know it lives in memory -- except it may
1425 // be an uninitialized local variable, those are also "immediate".
1426let mplace = match val.to_op(self.ecx)?.as_mplace_or_imm() {
1427Left(mplace) => mplace,
1428Right(imm) => match *imm {
1429 Immediate::Uninit =>
1430do yeet {
let where_ = &self.path;
let path =
if !where_.projs.is_empty() {
let mut path = String::new();
write_path(&mut path, &where_.projs);
Some(path)
} else { None };
#[allow(unused)]
use ValidationErrorKind::*;
let msg = ValidationErrorKind::from(Uninit { expected });
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
orig_ty: where_.orig_ty,
path,
ptr_bytes_warning: msg.ptr_bytes_warning(),
msg: msg.to_string(),
})
}throw_validation_failure!(
1431self.path,
1432 Uninit { expected }
1433 ),
1434 Immediate::Scalar(..) | Immediate::ScalarPair(..) =>
1435::rustc_middle::util::bug::bug_fmt(format_args!("arrays/slices can never have Scalar/ScalarPair layout"))bug!("arrays/slices can never have Scalar/ScalarPair layout"),
1436 }
1437 };
14381439// Optimization: we just check the entire range at once.
1440 // NOTE: Keep this in sync with the handling of integer and float
1441 // types above, in `visit_primitive`.
1442 // No need for an alignment check here, this is not an actual memory access.
1443let alloc = self.ecx.get_ptr_alloc(mplace.ptr(), size)?.expect("we already excluded size 0");
14441445alloc.get_bytes_strip_provenance().map_err_kind(|kind| {
1446// Some error happened, try to provide a more detailed description.
1447 // For some errors we might be able to provide extra information.
1448 // (This custom logic does not fit the `try_validation!` macro.)
1449match kind {
1450Ub(InvalidUninitBytes(Some((_alloc_id, access)))) | Unsup(ReadPointerAsInt(Some((_alloc_id, access)))) => {
1451// Some byte was uninitialized, determine which
1452 // element that byte belongs to so we can
1453 // provide an index.
1454let i = usize::try_from(
1455access.bad.start.bytes() / layout.size.bytes(),
1456 )
1457 .unwrap();
1458self.path.projs.push(PathElem::ArrayElem(i));
14591460if #[allow(non_exhaustive_omitted_patterns)] match kind {
Ub(InvalidUninitBytes(_)) => true,
_ => false,
}matches!(kind, Ub(InvalidUninitBytes(_))) {
1461{
let where_ = &self.path;
let path =
if !where_.projs.is_empty() {
let mut path = String::new();
write_path(&mut path, &where_.projs);
Some(path)
} else { None };
#[allow(unused)]
use ValidationErrorKind::*;
let msg = ValidationErrorKind::from(Uninit { expected });
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
orig_ty: where_.orig_ty,
path,
ptr_bytes_warning: msg.ptr_bytes_warning(),
msg: msg.to_string(),
})
}err_validation_failure!(self.path, Uninit { expected })1462 } else {
1463{
let where_ = &self.path;
let path =
if !where_.projs.is_empty() {
let mut path = String::new();
write_path(&mut path, &where_.projs);
Some(path)
} else { None };
#[allow(unused)]
use ValidationErrorKind::*;
let msg = ValidationErrorKind::from(PointerAsInt { expected });
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
orig_ty: where_.orig_ty,
path,
ptr_bytes_warning: msg.ptr_bytes_warning(),
msg: msg.to_string(),
})
}err_validation_failure!(self.path, PointerAsInt {expected})1464 }
1465 }
14661467// Propagate upwards (that will also check for unexpected errors).
1468err => err,
1469 }
1470 })?;
14711472// Don't forget that these are all non-pointer types, and thus do not preserve
1473 // provenance.
1474if self.reset_provenance_and_padding {
1475// We can't share this with above as above, we might be looking at read-only memory.
1476let mut alloc = self.ecx.get_ptr_alloc_mut(mplace.ptr(), size)?.expect("we already excluded size 0");
1477alloc.clear_provenance();
1478// Also, mark this as containing data, not padding.
1479self.add_data_range(mplace.ptr(), size);
1480 }
1481 }
1482// Fast path for arrays and slices of ZSTs. We only need to check a single ZST element
1483 // of an array and not all of them, because there's only a single value of a specific
1484 // ZST type, so either validation fails for all elements or none.
1485ty::Array(tys, ..) | ty::Slice(tys) if self.ecx.layout_of(*tys)?.is_zst() => {
1486// Validate just the first element (if any).
1487if val.len(self.ecx)? > 0 {
1488self.visit_field(val, 0, &self.ecx.project_index(val, 0)?)?;
1489 }
1490 }
1491 ty::Pat(base, pat) => {
1492// First check that the base type is valid
1493self.visit_value(&val.transmute(self.ecx.layout_of(*base)?, self.ecx)?)?;
1494// When you extend this match, make sure to also add tests to
1495 // tests/ui/type/pattern_types/validity.rs((
1496match **pat {
1497// Range and non-null patterns are precisely reflected into `valid_range` and thus
1498 // handled fully by `visit_scalar` (called below).
1499ty::PatternKind::Range { .. } => {},
1500 ty::PatternKind::NotNull => {},
15011502// FIXME(pattern_types): check that the value is covered by one of the variants.
1503 // For now, we rely on layout computation setting the scalar's `valid_range` to
1504 // match the pattern. However, this cannot always work; the layout may
1505 // pessimistically cover actually illegal ranges and Miri would miss that UB.
1506 // The consolation here is that codegen also will miss that UB, so at least
1507 // we won't see optimizations actually breaking such programs.
1508ty::PatternKind::Or(_patterns) => {}
1509 }
1510 }
1511 ty::Adt(adt, _) if adt.is_maybe_dangling() => {
1512let old_may_dangle = mem::replace(&mut self.may_dangle, true);
15131514let inner = self.ecx.project_field(val, FieldIdx::ZERO)?;
1515self.visit_value(&inner)?;
15161517self.may_dangle = old_may_dangle;
1518 }
1519_ => {
1520// default handler
1521{
self.walk_value(val).map_err_kind(|e|
{
match e {
Ub(InvalidVTableTrait { vtable_dyn_type, expected_dyn_type
}) => {
{
let where_ = &self.path;
let path =
if !where_.projs.is_empty() {
let mut path = String::new();
write_path(&mut path, &where_.projs);
Some(path)
} else { None };
#[allow(unused)]
use ValidationErrorKind::*;
let msg =
ValidationErrorKind::from(InvalidMetaWrongTrait {
expected_dyn_type,
vtable_dyn_type,
});
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
orig_ty: where_.orig_ty,
path,
ptr_bytes_warning: msg.ptr_bytes_warning(),
msg: msg.to_string(),
})
}
}
e => e,
}
})?
};try_validation!(
1522self.walk_value(val),
1523self.path,
1524// It's not great to catch errors here, since we can't give a very good path,
1525 // but it's better than ICEing.
1526Ub(InvalidVTableTrait { vtable_dyn_type, expected_dyn_type }) =>
1527 InvalidMetaWrongTrait { expected_dyn_type, vtable_dyn_type },
1528 );
1529 }
1530 }
15311532// *After* all of this, check further information stored in the layout. We need to check
1533 // this to handle types like `NonNull` where the `Scalar` info is more restrictive than what
1534 // the fields say (`rustc_layout_scalar_valid_range_start`). But in most cases, this will
1535 // just propagate what the fields say, and then we want the error to point at the field --
1536 // so, we first recurse, then we do this check.
1537 //
1538 // FIXME: We could avoid some redundant checks here. For newtypes wrapping
1539 // scalars, we do the same check on every "level" (e.g., first we check
1540 // MyNewtype and then the scalar in there).
1541if val.layout.is_uninhabited() {
1542let ty = val.layout.ty;
1543do yeet {
let where_ = &self.path;
let path =
if !where_.projs.is_empty() {
let mut path = String::new();
write_path(&mut path, &where_.projs);
Some(path)
} else { None };
#[allow(unused)]
use ValidationErrorKind::*;
let msg =
ValidationErrorKind::from(::alloc::__export::must_use({
::alloc::fmt::format(format_args!("encountered a value of uninhabited type `{0}`",
ty))
}));
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
orig_ty: where_.orig_ty,
path,
ptr_bytes_warning: msg.ptr_bytes_warning(),
msg: msg.to_string(),
})
};throw_validation_failure!(
1544self.path,
1545format!("encountered a value of uninhabited type `{ty}`")
1546 );
1547 }
1548match val.layout.backend_repr {
1549 BackendRepr::Scalar(scalar_layout) => {
1550if !scalar_layout.is_uninit_valid() {
1551// There is something to check here.
1552 // We read directly via `ecx` since the read cannot fail -- we already read
1553 // this field above when recursing into the field.
1554let scalar = self.ecx.read_scalar(val)?;
1555self.visit_scalar(scalar, scalar_layout)?;
1556 }
1557 }
1558 BackendRepr::ScalarPair(a_layout, b_layout) => {
1559// We can only proceed if *both* scalars need to be initialized.
1560 // FIXME: find a way to also check ScalarPair when one side can be uninit but
1561 // the other must be init.
1562if !a_layout.is_uninit_valid() && !b_layout.is_uninit_valid() {
1563// We read directly via `ecx` since the read cannot fail -- we already read
1564 // this field above when recursing into the field.
1565let (a, b) = self.ecx.read_immediate(val)?.to_scalar_pair();
1566self.visit_scalar(a, a_layout)?;
1567self.visit_scalar(b, b_layout)?;
1568 }
1569 }
1570 BackendRepr::SimdVector { .. } | BackendRepr::SimdScalableVector { .. } => {
1571// No checks here, we assume layout computation gets this right.
1572 // (This is harder to check since Miri does not represent these as `Immediate`. We
1573 // also cannot use field projections since this might be a newtype around a vector.)
1574}
1575 BackendRepr::Memory { .. } => {
1576// Nothing to do.
1577}
1578 }
15791580interp_ok(())
1581 }
1582}
15831584impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
1585/// The internal core entry point for all validation operations.
1586fn validate_operand_internal(
1587&mut self,
1588 val: &PlaceTy<'tcx, M::Provenance>,
1589 path: Path<'tcx>,
1590 ref_tracking: Option<&mut RefTracking<MPlaceTy<'tcx, M::Provenance>, Path<'tcx>>>,
1591 ctfe_mode: Option<CtfeValidationMode>,
1592 reset_provenance_and_padding: bool,
1593 start_in_may_dangle: bool,
1594 ) -> InterpResult<'tcx> {
1595{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/validity.rs:1595",
"rustc_const_eval::interpret::validity",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/validity.rs"),
::tracing_core::__macro_support::Option::Some(1595u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::validity"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("validate_operand_internal: {0:?}, {1:?}",
*val, val.layout.ty) as &dyn Value))])
});
} else { ; }
};trace!("validate_operand_internal: {:?}, {:?}", *val, val.layout.ty);
15961597// Run the visitor.
1598self.run_for_validation_mut(|ecx| {
1599let reset_padding = reset_provenance_and_padding && {
1600// Check if `val` is actually stored in memory. If not, padding is not even
1601 // represented and we need not reset it.
1602ecx.place_to_op(val)?.as_mplace_or_imm().is_left()
1603 };
1604let mut v = ValidityVisitor {
1605path,
1606ref_tracking,
1607ctfe_mode,
1608ecx,
1609reset_provenance_and_padding,
1610 data_bytes: reset_padding.then_some(RangeSet(Vec::new())),
1611 may_dangle: start_in_may_dangle,
1612 };
1613v.visit_value(val)?;
1614v.reset_padding(val)?;
1615interp_ok(())
1616 })
1617 .map_err_info(|err| {
1618if !#[allow(non_exhaustive_omitted_patterns)] match err.kind() {
InterpErrorKind::UndefinedBehavior(ValidationError { .. }) |
InterpErrorKind::InvalidProgram(_) | InterpErrorKind::Unsupported(_) |
InterpErrorKind::MachineStop(_) => true,
_ => false,
}matches!(
1619 err.kind(),
1620 InterpErrorKind::UndefinedBehavior(ValidationError { .. })
1621 | InterpErrorKind::InvalidProgram(_)
1622 | InterpErrorKind::Unsupported(_)
1623// We have to also ignore machine-specific errors since we do retagging
1624 // during validation.
1625| InterpErrorKind::MachineStop(_)
1626 ) {
1627::rustc_middle::util::bug::bug_fmt(format_args!("Unexpected error during validation: {0}",
format_interp_error(err)));bug!("Unexpected error during validation: {}", format_interp_error(err));
1628 }
1629err1630 })
1631 }
16321633/// This function checks the data at `val` to be const-valid.
1634 /// `val` is assumed to cover valid memory if it is an indirect operand.
1635 /// It will error if the bits at the destination do not match the ones described by the layout.
1636 ///
1637 /// `ref_tracking` is used to record references that we encounter so that they
1638 /// can be checked recursively by an outside driving loop.
1639 ///
1640 /// `constant` controls whether this must satisfy the rules for constants:
1641 /// - no pointers to statics.
1642 /// - no `UnsafeCell` or non-ZST `&mut`.
1643#[inline(always)]
1644pub(crate) fn const_validate_operand(
1645&mut self,
1646 val: &PlaceTy<'tcx, M::Provenance>,
1647 path: Path<'tcx>,
1648 ref_tracking: &mut RefTracking<MPlaceTy<'tcx, M::Provenance>, Path<'tcx>>,
1649 ctfe_mode: CtfeValidationMode,
1650 ) -> InterpResult<'tcx> {
1651self.validate_operand_internal(
1652val,
1653path,
1654Some(ref_tracking),
1655Some(ctfe_mode),
1656/*reset_provenance*/ false,
1657/*start_in_may_dangle*/ false,
1658 )
1659 }
16601661/// This function checks the data at `val` to be runtime-valid.
1662 /// `val` is assumed to cover valid memory if it is an indirect operand.
1663 /// It will error if the bits at the destination do not match the ones described by the layout.
1664#[inline(always)]
1665pub fn validate_operand(
1666&mut self,
1667 val: &PlaceTy<'tcx, M::Provenance>,
1668 recursive: bool,
1669 reset_provenance_and_padding: bool,
1670 ) -> InterpResult<'tcx> {
1671let _trace = <M as
crate::interpret::Machine>::enter_trace_span(||
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("validate_operand",
"rustc_const_eval::interpret::validity",
::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/validity.rs"),
::tracing_core::__macro_support::Option::Some(1671u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::validity"),
::tracing_core::field::FieldSet::new(&["recursive",
"reset_provenance_and_padding", "val"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::INFO <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&recursive as
&dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&reset_provenance_and_padding
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&debug(&val) as
&dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
})enter_trace_span!(
1672 M,
1673"validate_operand",
1674 recursive,
1675 reset_provenance_and_padding,
1676?val,
1677 );
1678// Note that we *could* actually be in CTFE here with `-Zextra-const-ub-checks`, but it's
1679 // still correct to not use `ctfe_mode`: that mode is for validation of the final constant
1680 // value, it rules out things like `UnsafeCell` in awkward places.
1681if !recursive {
1682return self.validate_operand_internal(
1683val,
1684Path::new(val.layout.ty),
1685None,
1686None,
1687reset_provenance_and_padding,
1688/*start_in_may_dangle*/ false,
1689 );
1690 }
1691// Do a recursive check.
1692let mut ref_tracking = RefTracking::empty();
1693self.validate_operand_internal(
1694val,
1695Path::new(val.layout.ty),
1696Some(&mut ref_tracking),
1697None,
1698reset_provenance_and_padding,
1699/*start_in_may_dangle*/ false,
1700 )?;
1701while let Some((mplace, path)) = ref_tracking.todo.pop() {
1702// Things behind reference do *not* have the provenance reset. In fact
1703 // we treat the entire thing as being inside MaybeDangling, i.e., references
1704 // do not have to be dereferenceable.
1705self.validate_operand_internal(
1706&mplace.into(),
1707 path,
1708None, // no further recursion
1709None,
1710/*reset_provenance_and_padding*/ false,
1711/*start_in_may_dangle*/ true,
1712 )?;
1713 }
1714interp_ok(())
1715 }
1716}