Skip to main content

miri/borrow_tracker/tree_borrows/
mod.rs

1use rustc_abi::Size;
2use rustc_hir::find_attr;
3use rustc_middle::mir::Mutability;
4use rustc_middle::ty::layout::HasTypingEnv;
5use rustc_middle::ty::{self, Ty};
6
7use self::foreign_access_skipping::IdempotentForeignAccess;
8use self::tree::LocationState;
9use crate::borrow_tracker::{AccessKind, GlobalState, GlobalStateInner, ProtectorKind};
10use crate::concurrency::data_race::{NaReadType, NaWriteType};
11use crate::*;
12
13pub mod diagnostics;
14mod foreign_access_skipping;
15mod perms;
16mod tree;
17mod tree_visitor;
18mod unimap;
19mod wildcard;
20
21#[cfg(test)]
22mod exhaustive;
23
24use self::perms::Permission;
25pub use self::tree::Tree;
26
27pub type AllocState = Tree;
28
29impl<'tcx> Tree {
30    /// Create a new allocation, i.e. a new tree
31    pub fn new_allocation(
32        id: AllocId,
33        size: Size,
34        state: &mut GlobalStateInner,
35        _kind: MemoryKind,
36        machine: &MiriMachine<'tcx>,
37    ) -> Self {
38        let tag = state.root_ptr_tag(id, machine); // Fresh tag for the root
39        let span = machine.current_user_relevant_span();
40        Tree::new(tag, size, span)
41    }
42
43    /// Check that an access on the entire range is permitted, and update
44    /// the tree.
45    pub fn before_memory_access(
46        &mut self,
47        access_kind: AccessKind,
48        alloc_id: AllocId,
49        prov: ProvenanceExtra,
50        range: AllocRange,
51        machine: &MiriMachine<'tcx>,
52    ) -> InterpResult<'tcx> {
53        trace!(
54            "{} with tag {:?}: {:?}, size {}",
55            access_kind,
56            prov,
57            interpret::Pointer::new(alloc_id, range.start),
58            range.size.bytes(),
59        );
60        let global = machine.borrow_tracker.as_ref().unwrap();
61        let span = machine.current_user_relevant_span();
62        self.perform_access(
63            prov,
64            range,
65            access_kind,
66            diagnostics::AccessCause::Explicit(access_kind),
67            global,
68            alloc_id,
69            span,
70        )
71    }
72
73    /// Check that this pointer has permission to deallocate this range.
74    pub fn before_memory_deallocation(
75        &mut self,
76        alloc_id: AllocId,
77        prov: ProvenanceExtra,
78        size: Size,
79        machine: &MiriMachine<'tcx>,
80    ) -> InterpResult<'tcx> {
81        let global = machine.borrow_tracker.as_ref().unwrap();
82        let span = machine.current_user_relevant_span();
83        self.dealloc(prov, alloc_range(Size::ZERO, size), global, alloc_id, span)
84    }
85
86    /// A tag just lost its protector.
87    ///
88    /// This emits a special kind of access that is only applied
89    /// to accessed locations, as a protection against other
90    /// tags not having been made aware of the existence of this
91    /// protector.
92    pub fn release_protector(
93        &mut self,
94        machine: &MiriMachine<'tcx>,
95        global: &GlobalState,
96        tag: BorTag,
97        alloc_id: AllocId, // diagnostics
98    ) -> InterpResult<'tcx> {
99        let span = machine.current_user_relevant_span();
100        self.perform_protector_end_access(tag, global, alloc_id, span)?;
101
102        self.update_exposure_for_protector_release(tag);
103
104        interp_ok(())
105    }
106}
107
108/// Policy for a new borrow.
109#[derive(Debug, Clone, Copy)]
110pub struct NewPermission {
111    /// Permission for the frozen part of the range.
112    freeze_perm: Permission,
113    /// Permission for the non-frozen part of the range.
114    nonfreeze_perm: Permission,
115    /// Permission for memory outside the range.
116    outside_perm: Permission,
117    /// Whether this pointer is part of the arguments of a function call.
118    /// `protector` is `Some(_)` for all pointers marked `noalias`.
119    protector: Option<ProtectorKind>,
120}
121
122impl<'tcx> NewPermission {
123    /// Determine NewPermission of the reference/Box from the type of the pointee.
124    ///
125    /// A `ref_mutability` of `None` indicates a `Box` type.
126    fn new(
127        pointee: Ty<'tcx>,
128        ref_mutability: Option<Mutability>,
129        mode: RetagMode,
130        cx: &crate::MiriInterpCx<'tcx>,
131    ) -> Option<Self> {
132        if mode == RetagMode::None {
133            return None;
134        }
135
136        let ty_is_unpin = pointee.is_unpin(*cx.tcx, cx.typing_env())
137            && pointee.is_unsafe_unpin(*cx.tcx, cx.typing_env());
138        let ty_is_freeze = pointee.is_freeze(*cx.tcx, cx.typing_env());
139        let is_protected = mode == RetagMode::FnEntry;
140
141        // Check if the implicit writes feature is globally enabled, using the
142        // `-Zmiri-tree-borrows-implicit-writes` flag, and not locally disabled using the
143        // `#[rustc_no_writable]` attribute. For performance reasons, only performs the lookup if
144        // is_protected is true as implicit writes are only performed for protected references.
145        let implicit_writes_enabled = is_protected && {
146            let implicit_writes = cx
147                .machine
148                .borrow_tracker
149                .as_ref()
150                .unwrap()
151                .borrow()
152                .borrow_tracker_method
153                .get_tree_borrows_params()
154                .implicit_writes;
155            let def_id = cx.frame().instance().def_id();
156            implicit_writes && !find_attr!(cx.tcx, def_id, RustcNoWritable)
157        };
158
159        if matches!(ref_mutability, Some(Mutability::Mut) | None if !ty_is_unpin) {
160            // Mutable reference / Box to pinning type: retagging is a NOP.
161            // FIXME: with `UnsafePinned`, this should do proper per-byte tracking.
162            return None;
163        }
164
165        enum Part {
166            InsideFrozen,
167            InsideUnsafeCell,
168            Outside,
169        }
170        use Part::*;
171
172        let perm = |part: Part| {
173            // Whether we should consider this byte to be frozen.
174            // Outside bytes are frozen only if the entire type is frozen.
175            let frozen = match part {
176                InsideFrozen => true,
177                InsideUnsafeCell => false,
178                Outside => ty_is_freeze,
179            };
180            match ref_mutability {
181                // Shared references
182                Some(Mutability::Not) =>
183                    if frozen {
184                        Permission::new_frozen()
185                    } else {
186                        Permission::new_cell()
187                    },
188                // Mutable references
189                Some(Mutability::Mut) => {
190                    if implicit_writes_enabled && !matches!(part, Outside) {
191                        // We cannot use `Unique` for the outside part.
192                        Permission::new_unique()
193                    } else if is_protected || frozen {
194                        // We also use this for protected `&mut UnsafeCell` as otherwise adding
195                        // `noalias` would not be sound.
196                        Permission::new_reserved_frz()
197                    } else {
198                        Permission::new_reserved_im()
199                    }
200                }
201                // Boxes
202                None => {
203                    if implicit_writes_enabled && !matches!(part, Outside) {
204                        // Boxes are treated the same as mutable references.
205                        Permission::new_unique()
206                    } else if is_protected || frozen {
207                        // We also use this for protected `Box<UnsafeCell>` as otherwise adding
208                        // `noalias` would not be sound.
209                        Permission::new_reserved_frz()
210                    } else {
211                        Permission::new_reserved_im()
212                    }
213                }
214            }
215        };
216
217        Some(NewPermission {
218            freeze_perm: perm(InsideFrozen),
219            nonfreeze_perm: perm(InsideUnsafeCell),
220            outside_perm: perm(Outside),
221            protector: is_protected.then_some(if ref_mutability.is_some() {
222                // Strong protector for references
223                ProtectorKind::StrongProtector
224            } else {
225                // Weak protector for boxes
226                ProtectorKind::WeakProtector
227            }),
228        })
229    }
230}
231
232/// Retagging/reborrowing.
233/// Policy on which permission to grant to each pointer should be left to
234/// the implementation of NewPermission.
235impl<'tcx> EvalContextPrivExt<'tcx> for crate::MiriInterpCx<'tcx> {}
236trait EvalContextPrivExt<'tcx>: crate::MiriInterpCxExt<'tcx> {
237    /// Returns the provenance that should be used henceforth.
238    fn tb_reborrow(
239        &mut self,
240        place: &MPlaceTy<'tcx>, // parent tag extracted from here
241        ptr_size: Size,
242        new_perm: NewPermission,
243        new_tag: BorTag,
244    ) -> InterpResult<'tcx, Option<Provenance>> {
245        let this = self.eval_context_mut();
246        // Ensure we bail out if the pointer goes out-of-bounds (see miri#1050).
247        this.check_ptr_access(place.ptr(), ptr_size, CheckInAllocMsg::Dereferenceable)?;
248
249        // It is crucial that this gets called on all code paths, to ensure we track tag creation.
250        let log_creation = |this: &MiriInterpCx<'tcx>,
251                            loc: Option<(AllocId, Size, ProvenanceExtra)>| // alloc_id, base_offset, orig_tag
252         -> InterpResult<'tcx> {
253            let global = this.machine.borrow_tracker.as_ref().unwrap().borrow();
254            let ty = place.layout.ty;
255            if global.tracked_pointer_tags.contains(&new_tag) {
256                 let ty_is_freeze = ty.is_freeze(*this.tcx, this.typing_env());
257                 let kind_str =
258                     if ty_is_freeze {
259                         format!("initial state {} (pointee type {ty})", new_perm.freeze_perm)
260                     } else {
261                         format!("initial state {}/{} outside/inside UnsafeCell (pointee type {ty})", new_perm.freeze_perm, new_perm.nonfreeze_perm)
262                     };
263                this.emit_diagnostic(NonHaltingDiagnostic::CreatedPointerTag(
264                    new_tag.inner(),
265                    Some(kind_str),
266                    loc.map(|(alloc_id, base_offset, orig_tag)| (alloc_id, alloc_range(base_offset, ptr_size), orig_tag)),
267                ));
268            }
269            drop(global); // don't hold that reference any longer than we have to
270            interp_ok(())
271        };
272
273        trace!("Reborrow of size {:?}", ptr_size);
274        // Unlike SB, we *do* a proper retag for size 0 if can identify the allocation.
275        // After all, the pointer may be lazily initialized outside this initial range.
276        let Ok((alloc_id, base_offset, parent_prov)) = this.ptr_try_get_alloc_id(place.ptr(), 0)
277        else {
278            assert_eq!(ptr_size, Size::ZERO); // we did the deref check above, size has to be 0 here
279            // This pointer doesn't come with an AllocId, so there's no
280            // memory to do retagging in.
281            let new_prov = place.ptr().provenance;
282            trace!("reborrow of size 0: reusing {:?} (pointee {})", place.ptr(), place.layout.ty,);
283            log_creation(this, None)?;
284            // Keep original provenance.
285            return interp_ok(new_prov);
286        };
287        let new_prov = Provenance::Concrete { alloc_id, tag: new_tag };
288
289        log_creation(this, Some((alloc_id, base_offset, parent_prov)))?;
290
291        trace!(
292            "reborrow: reference {:?} derived from {:?} (pointee {}): {:?}, size {}",
293            new_tag,
294            parent_prov,
295            place.layout.ty,
296            interpret::Pointer::new(alloc_id, base_offset),
297            ptr_size.bytes()
298        );
299
300        if let Some(protect) = new_perm.protector {
301            // We register the protection in two different places.
302            // This makes creating a protector slower, but checking whether a tag
303            // is protected faster.
304            this.frame_mut()
305                .extra
306                .borrow_tracker
307                .as_mut()
308                .unwrap()
309                .protected_tags
310                .push((alloc_id, new_tag));
311            this.machine
312                .borrow_tracker
313                .as_mut()
314                .expect("We should have borrow tracking data")
315                .get_mut()
316                .protected_tags
317                .insert(new_tag, protect);
318        }
319
320        let alloc_kind = this.get_alloc_info(alloc_id).kind;
321        if !matches!(alloc_kind, AllocKind::LiveData) {
322            assert_eq!(ptr_size, Size::ZERO); // we did the deref check above, size has to be 0 here
323            // There's not actually any bytes here where accesses could even be tracked.
324            // Just produce the new provenance, nothing else to do.
325            return interp_ok(Some(new_prov));
326        }
327
328        let protected = new_perm.protector.is_some();
329        let precise_interior_mut = this
330            .machine
331            .borrow_tracker
332            .as_mut()
333            .unwrap()
334            .get_mut()
335            .borrow_tracker_method
336            .get_tree_borrows_params()
337            .precise_interior_mut;
338
339        // Compute initial "inside" permissions.
340        let loc_state = |frozen: bool| -> LocationState {
341            let perm = if frozen { new_perm.freeze_perm } else { new_perm.nonfreeze_perm };
342            let sifa = perm.strongest_idempotent_foreign_access(protected);
343
344            if perm.associated_access().is_some() {
345                LocationState::new_accessed(perm, sifa)
346            } else {
347                LocationState::new_non_accessed(perm, sifa)
348            }
349        };
350        let inside_perms = if !precise_interior_mut {
351            // For `!Freeze` types, just pretend the entire thing is an `UnsafeCell`.
352            let ty_is_freeze = place.layout.ty.is_freeze(*this.tcx, this.typing_env());
353            DedupRangeMap::new(ptr_size, loc_state(ty_is_freeze))
354        } else {
355            // The initial state will be overwritten by the visitor below.
356            let mut perms_map = DedupRangeMap::new(
357                ptr_size,
358                LocationState::new_accessed(
359                    Permission::new_disabled(),
360                    IdempotentForeignAccess::None,
361                ),
362            );
363            this.visit_freeze_sensitive(place, ptr_size, |range, frozen| {
364                let state = loc_state(frozen);
365                for (_loc_range, loc) in perms_map.iter_mut(range.start, range.size) {
366                    *loc = state;
367                }
368                interp_ok(())
369            })?;
370            perms_map
371        };
372
373        let alloc_extra = this.get_alloc_extra(alloc_id)?;
374        let mut tree_borrows = alloc_extra.borrow_tracker_tb().borrow_mut();
375
376        for (perm_range, loc_state) in inside_perms.iter_all() {
377            if let Some(access) = loc_state.permission().associated_access() {
378                // Some reborrows incur a read/write access to the parent.
379                // As a write also implies a read, a single write is performed instead of a read and a write.
380
381                // writing to an immutable allocation (static variables) is UB, check this here
382                if access == AccessKind::Write
383                    && this.get_alloc_mutability(alloc_id).unwrap().is_not()
384                {
385                    throw_ub!(WriteToReadOnly(alloc_id))
386                }
387
388                // Adjust range to be relative to allocation start (rather than to `place`).
389                let range_in_alloc = AllocRange {
390                    start: Size::from_bytes(perm_range.start) + base_offset,
391                    size: Size::from_bytes(perm_range.end - perm_range.start),
392                };
393
394                tree_borrows.perform_access(
395                    parent_prov,
396                    range_in_alloc,
397                    access,
398                    diagnostics::AccessCause::Reborrow(access),
399                    this.machine.borrow_tracker.as_ref().unwrap(),
400                    alloc_id,
401                    this.machine.current_user_relevant_span(),
402                )?;
403
404                // Also inform the data race model (but only if any bytes are actually affected).
405                if range_in_alloc.size.bytes() > 0 {
406                    if let Some(data_race) = alloc_extra.data_race.as_vclocks_ref() {
407                        match access {
408                            AccessKind::Read =>
409                                data_race.read_non_atomic(
410                                    alloc_id,
411                                    range_in_alloc,
412                                    NaReadType::Retag,
413                                    Some(place.layout.ty),
414                                    &this.machine,
415                                )?,
416                            AccessKind::Write =>
417                                data_race.write_non_atomic(
418                                    alloc_id,
419                                    range_in_alloc,
420                                    NaWriteType::Retag,
421                                    Some(place.layout.ty),
422                                    &this.machine,
423                                )?,
424                        };
425                    }
426                }
427            }
428        }
429
430        // Record the parent-child pair in the tree.
431        tree_borrows.new_child(
432            base_offset,
433            parent_prov,
434            new_tag,
435            inside_perms,
436            new_perm.outside_perm,
437            protected,
438            this.machine.current_user_relevant_span(),
439        )?;
440
441        interp_ok(Some(new_prov))
442    }
443
444    fn tb_retag_place(
445        &mut self,
446        place: &MPlaceTy<'tcx>,
447        new_perm: NewPermission,
448    ) -> InterpResult<'tcx, MPlaceTy<'tcx>> {
449        let this = self.eval_context_mut();
450
451        // Determine the size of the reborrow.
452        // For most types this is the entire size of the place, however
453        // - when `extern type` is involved we use the size of the known prefix,
454        // - if the pointer is not reborrowed (raw pointer) then we override the size
455        //   to do a zero-length reborrow.
456        let reborrow_size =
457            this.size_and_align_of_val(place)?.map(|(size, _)| size).unwrap_or(place.layout.size);
458        trace!("Creating new permission: {:?} with size {:?}", new_perm, reborrow_size);
459
460        // This new tag is not guaranteed to actually be used.
461        //
462        // If you run out of tags, consider the following optimization: adjust `tb_reborrow`
463        // so that rather than taking as input a fresh tag and deciding whether it uses this
464        // one or the parent it instead just returns whether a new tag should be created.
465        // This will avoid creating tags than end up never being used.
466        let new_tag = this.machine.borrow_tracker.as_mut().unwrap().get_mut().new_ptr();
467
468        // Compute the actual reborrow.
469        let new_prov = this.tb_reborrow(place, reborrow_size, new_perm, new_tag)?;
470
471        // Adjust place.
472        // (If the closure gets called, that means the old provenance was `Some`, and hence the new
473        // one must also be `Some`.)
474        interp_ok(place.clone().map_provenance(|_| new_prov.unwrap()))
475    }
476}
477
478impl<'tcx> EvalContextExt<'tcx> for crate::MiriInterpCx<'tcx> {}
479pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> {
480    /// Retag a pointer. References are passed to `from_ref_ty` and
481    /// raw pointers are never reborrowed.
482    fn tb_retag_ptr_value(
483        &mut self,
484        val: &ImmTy<'tcx>,
485        ty: Ty<'tcx>,
486        mode: RetagMode,
487    ) -> InterpResult<'tcx, Option<ImmTy<'tcx>>> {
488        let this = self.eval_context_mut();
489        let new_perm = match *ty.kind() {
490            _ if ty.is_box_global(*this.tcx) => {
491                // The `None` marks this as a Box.
492                NewPermission::new(ty.builtin_deref(true).unwrap(), None, mode, this)
493            }
494            ty::Ref(_, pointee, mutability) =>
495                NewPermission::new(pointee, Some(mutability), mode, this),
496
497            ty::RawPtr(..) => {
498                assert!(mode == RetagMode::Raw);
499                // We don't give new tags to raw pointers.
500                None
501            }
502            _ if ty.is_box() => {
503                // No retagging for boxes with local allocators.
504                None
505            }
506            _ => panic!("tb_retag_ptr_value: invalid type {ty}"),
507        };
508        if let Some(new_perm) = new_perm {
509            let place = this.imm_ptr_to_mplace(val)?;
510            let new_place = this.tb_retag_place(&place, new_perm)?;
511            interp_ok(Some(ImmTy::from_immediate(new_place.to_ref(this), val.layout)))
512        } else {
513            interp_ok(None)
514        }
515    }
516
517    /// Protect a place so that it cannot be used any more for the duration of the current function
518    /// call.
519    ///
520    /// This is used to ensure soundness of in-place function argument/return passing.
521    fn tb_protect_place(&mut self, place: &MPlaceTy<'tcx>) -> InterpResult<'tcx, MPlaceTy<'tcx>> {
522        let this = self.eval_context_mut();
523
524        // Retag it. With protection! That is the entire point.
525        let new_perm = NewPermission {
526            // Note: If we are creating a protected Reserved, which can
527            // never be ReservedIM, the value of the `ty_is_freeze`
528            // argument doesn't matter
529            // (`ty_is_freeze || true` in `new_reserved` will always be `true`).
530            freeze_perm: Permission::new_reserved_frz(),
531            nonfreeze_perm: Permission::new_reserved_frz(),
532            outside_perm: Permission::new_reserved_frz(),
533            protector: Some(ProtectorKind::StrongProtector),
534        };
535        this.tb_retag_place(place, new_perm)
536    }
537
538    /// Mark the given tag as exposed. It was found on a pointer with the given AllocId.
539    fn tb_expose_tag(&self, alloc_id: AllocId, tag: BorTag) -> InterpResult<'tcx> {
540        let this = self.eval_context_ref();
541
542        // Function pointers and dead objects don't have an alloc_extra so we ignore them.
543        // This is okay because accessing them is UB anyway, no need for any Tree Borrows checks.
544        // NOT using `get_alloc_extra_mut` since this might be a read-only allocation!
545        let kind = this.get_alloc_info(alloc_id).kind;
546        match kind {
547            AllocKind::LiveData => {
548                // This should have alloc_extra data, but `get_alloc_extra` can still fail
549                // if converting this alloc_id from a global to a local one
550                // uncovers a non-supported `extern static`.
551                let alloc_extra = this.get_alloc_extra(alloc_id)?;
552                trace!("Tree Borrows tag {tag:?} exposed in {alloc_id:?}");
553
554                let global = this.machine.borrow_tracker.as_ref().unwrap();
555                let protected_tags = &global.borrow().protected_tags;
556                let protected = protected_tags.contains_key(&tag);
557                alloc_extra.borrow_tracker_tb().borrow_mut().expose_tag(tag, protected);
558            }
559            AllocKind::Function
560            | AllocKind::VTable
561            | AllocKind::TypeId
562            | AllocKind::Dead
563            | AllocKind::VaList => {
564                // No tree borrows on these allocations.
565            }
566        }
567        interp_ok(())
568    }
569
570    /// Display the tree.
571    fn print_tree(&mut self, alloc_id: AllocId, show_unnamed: bool) -> InterpResult<'tcx> {
572        let this = self.eval_context_mut();
573        let alloc_extra = this.get_alloc_extra(alloc_id)?;
574        let tree_borrows = alloc_extra.borrow_tracker_tb().borrow();
575        let borrow_tracker = &this.machine.borrow_tracker.as_ref().unwrap().borrow();
576        tree_borrows.print_tree(&borrow_tracker.protected_tags, show_unnamed)
577    }
578
579    /// Give a name to the pointer, usually the name it has in the source code (for debugging).
580    /// The name given is `name` and the pointer that receives it is the `nth_parent`
581    /// of `ptr` (with 0 representing `ptr` itself)
582    fn tb_give_pointer_debug_name(
583        &mut self,
584        ptr: Pointer,
585        nth_parent: u8,
586        name: &str,
587    ) -> InterpResult<'tcx> {
588        let this = self.eval_context_mut();
589        let (tag, alloc_id) = match ptr.provenance {
590            Some(Provenance::Concrete { tag, alloc_id }) => (tag, alloc_id),
591            Some(Provenance::Wildcard) => {
592                eprintln!("Can't give the name {name} to wildcard pointer");
593                return interp_ok(());
594            }
595            None => {
596                eprintln!("Can't give the name {name} to pointer without provenance");
597                return interp_ok(());
598            }
599        };
600        let alloc_extra = this.get_alloc_extra(alloc_id)?;
601        let mut tree_borrows = alloc_extra.borrow_tracker_tb().borrow_mut();
602        tree_borrows.give_pointer_debug_name(tag, nth_parent, name)
603    }
604}