1 use std::cell::RefCell;
2 use std::collections::HashSet;
5 use std::num::NonZeroU64;
7 use rustc::ty::{self, layout::Size};
8 use rustc::hir::{MutMutable, MutImmutable};
9 use rustc::mir::RetagKind;
12 EvalResult, InterpError, MiriEvalContext, HelpersEvalContextExt, Evaluator, MutValueVisitor,
13 MemoryKind, MiriMemoryKind, RangeMap, Allocation, AllocationExtra,
14 Pointer, Immediate, ImmTy, PlaceTy, MPlaceTy,
17 pub type PtrId = NonZeroU64;
18 pub type CallId = NonZeroU64;
20 /// Tracking pointer provenance
21 #[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
27 impl fmt::Display for Tag {
28 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
30 Tag::Tagged(id) => write!(f, "{}", id),
31 Tag::Untagged => write!(f, "<untagged>"),
36 /// Indicates which permission is granted (by this item to some pointers)
37 #[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
39 /// Grants unique mutable access.
41 /// Grants shared mutable access.
43 /// Greants shared read-only access.
47 /// An item in the per-location borrow stack.
48 #[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
50 /// The permission this item grants.
52 /// The pointers the permission is granted to.
54 /// An optional protector, ensuring the item cannot get popped until `CallId` is over.
55 protector: Option<CallId>,
58 impl fmt::Display for Item {
59 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
60 write!(f, "[{:?} for {}", self.perm, self.tag)?;
61 if let Some(call) = self.protector {
62 write!(f, " (call {})", call)?;
69 /// Extra per-location state.
70 #[derive(Clone, Debug, PartialEq, Eq)]
72 /// Used *mostly* as a stack; never empty.
73 /// We sometimes push into the middle but never remove from the middle.
74 /// The same tag may occur multiple times, e.g. from a two-phase borrow.
76 /// * Above a `SharedReadOnly` there can only be more `SharedReadOnly`.
81 /// Extra per-allocation state.
82 #[derive(Clone, Debug)]
84 // Even reading memory can have effects on the stack, so we need a `RefCell` here.
85 stacks: RefCell<RangeMap<Stack>>,
86 // Pointer to global state
90 /// Extra global state, available to the memory access hooks.
92 pub struct GlobalState {
95 active_calls: HashSet<CallId>,
97 pub type MemoryState = Rc<RefCell<GlobalState>>;
99 /// Indicates which kind of access is being performed.
100 #[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
101 pub enum AccessKind {
106 impl fmt::Display for AccessKind {
107 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
109 AccessKind::Read => write!(f, "read"),
110 AccessKind::Write => write!(f, "write"),
115 /// Indicates which kind of reference is being created.
116 /// Used by high-level `reborrow` to compute which permissions to grant to the
118 #[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
120 /// `&mut` and `Box`.
122 /// `&` with or without interior mutability.
124 /// `*mut`/`*const` (raw pointers).
125 Raw { mutable: bool },
128 impl fmt::Display for RefKind {
129 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
131 RefKind::Unique => write!(f, "unique"),
132 RefKind::Shared => write!(f, "shared"),
133 RefKind::Raw { mutable: true } => write!(f, "raw (mutable)"),
134 RefKind::Raw { mutable: false } => write!(f, "raw (constant)"),
139 /// Utilities for initialization and ID generation
140 impl Default for GlobalState {
141 fn default() -> Self {
143 next_ptr_id: NonZeroU64::new(1).unwrap(),
144 next_call_id: NonZeroU64::new(1).unwrap(),
145 active_calls: HashSet::default(),
151 pub fn new_ptr(&mut self) -> PtrId {
152 let id = self.next_ptr_id;
153 self.next_ptr_id = NonZeroU64::new(id.get() + 1).unwrap();
157 pub fn new_call(&mut self) -> CallId {
158 let id = self.next_call_id;
159 trace!("new_call: Assigning ID {}", id);
160 self.active_calls.insert(id);
161 self.next_call_id = NonZeroU64::new(id.get() + 1).unwrap();
165 pub fn end_call(&mut self, id: CallId) {
166 assert!(self.active_calls.remove(&id));
169 fn is_active(&self, id: CallId) -> bool {
170 self.active_calls.contains(&id)
174 // # Stacked Borrows Core Begin
176 /// We need to make at least the following things true:
178 /// U1: After creating a `Uniq`, it is at the top.
179 /// U2: If the top is `Uniq`, accesses must be through that `Uniq` or remove it it.
180 /// U3: If an access happens with a `Uniq`, it requires the `Uniq` to be in the stack.
182 /// F1: After creating a `&`, the parts outside `UnsafeCell` have our `SharedReadOnly` on top.
183 /// F2: If a write access happens, it pops the `SharedReadOnly`. This has three pieces:
184 /// F2a: If a write happens granted by an item below our `SharedReadOnly`, the `SharedReadOnly`
186 /// F2b: No `SharedReadWrite` or `Unique` will ever be added on top of our `SharedReadOnly`.
187 /// F3: If an access happens with an `&` outside `UnsafeCell`,
188 /// it requires the `SharedReadOnly` to still be in the stack.
190 impl Default for Tag {
192 fn default() -> Tag {
197 /// Core relations on `Permission` define which accesses are allowed:
198 /// On every access, we try to find a *granting* item, and then we remove all
199 /// *incompatible* items above it.
201 /// This defines for a given permission, whether it permits the given kind of access.
202 fn grants(self, access: AccessKind) -> bool {
203 match (self, access) {
204 // Unique and SharedReadWrite allow any kind of access.
205 (Permission::Unique, _) |
206 (Permission::SharedReadWrite, _) =>
208 // SharedReadOnly only permits read access.
209 (Permission::SharedReadOnly, AccessKind::Read) =>
211 (Permission::SharedReadOnly, AccessKind::Write) =>
216 /// This defines for a given permission, which other permissions it can tolerate "above" itself
217 /// for which kinds of accesses.
218 /// If true, then `other` is allowed to remain on top of `self` when `access` happens.
219 fn compatible_with(self, access: AccessKind, other: Permission) -> bool {
220 use self::Permission::*;
222 match (self, access, other) {
223 // Some cases are impossible.
224 (SharedReadOnly, _, SharedReadWrite) |
225 (SharedReadOnly, _, Unique) =>
226 bug!("There can never be a SharedReadWrite or a Unique on top of a SharedReadOnly"),
227 // When `other` is `SharedReadOnly`, that is NEVER compatible with
229 // This makes sure read-only pointers become invalid on write accesses (ensures F2a).
230 (_, AccessKind::Write, SharedReadOnly) =>
232 // When `other` is `Unique`, that is compatible with nothing.
233 // This makes sure unique pointers become invalid on incompatible accesses (ensures U2).
236 // When we are unique and this is a write/dealloc, we tolerate nothing.
237 // This makes sure we re-assert uniqueness ("being on top") on write accesses.
238 // (This is particularily important such that when a new mutable ref gets created, it gets
239 // pushed onto the right item -- this behaves like a write and we assert uniqueness of the
240 // pointer from which this comes, *if* it was a unique pointer.)
241 (Unique, AccessKind::Write, _) =>
243 // `SharedReadWrite` items can tolerate any other akin items for any kind of access.
244 (SharedReadWrite, _, SharedReadWrite) =>
246 // Any item can tolerate read accesses for shared items.
247 // This includes unique items! Reads from unique pointers do not invalidate
249 (_, AccessKind::Read, SharedReadWrite) |
250 (_, AccessKind::Read, SharedReadOnly) =>
257 /// Core per-location operations: access, dealloc, reborrow.
259 /// Find the item granting the given kind of access to the given tag, and where that item is in the stack.
260 fn find_granting(&self, access: AccessKind, tag: Tag) -> Option<(usize, Permission)> {
262 .enumerate() // we also need to know *where* in the stack
263 .rev() // search top-to-bottom
264 // Return permission of first item that grants access.
265 // We require a permission with the right tag, ensuring U3 and F3.
266 .find_map(|(idx, item)|
267 if item.perm.grants(access) && tag == item.tag {
268 Some((idx, item.perm))
275 /// Test if a memory `access` using pointer tagged `tag` is granted.
276 /// If yes, return the index of the item that granted it.
281 global: &GlobalState,
282 ) -> EvalResult<'tcx, usize> {
283 // Two main steps: Find granting item, remove all incompatible items above.
285 // Step 1: Find granting item.
286 let (granting_idx, granting_perm) = self.find_granting(access, tag)
287 .ok_or_else(|| InterpError::MachineError(format!(
288 "no item granting {} access to tag {} found in borrow stack",
292 // Step 2: Remove everything incompatible above them. Make sure we do not remove protected
294 // We do *not* maintain a stack discipline here. We could, in principle, decide to only
295 // keep the items immediately above `granting_idx` that are compatible, and then pop the rest.
296 // However, that kills off entire "branches" of pointer derivation too easily:
297 // in `let raw = &mut *x as *mut _; let _val = *x;`, the second statement would pop the `Unique`
298 // from the reborrow of the first statement, and subsequently also pop the `SharedReadWrite` for `raw`.
299 // This pattern occurs a lot in the standard library: create a raw pointer, then also create a shared
300 // reference and use that.
302 // Implemented with indices because there does not seem to be a nice iterator and range-based
304 let mut cur = granting_idx + 1;
305 while let Some(item) = self.borrows.get(cur) {
306 if granting_perm.compatible_with(access, item.perm) {
307 // Keep this, check next.
310 // Aha! This is a bad one, remove it, and make sure it is not protected.
311 let item = self.borrows.remove(cur);
312 if let Some(call) = item.protector {
313 if global.is_active(call) {
314 return err!(MachineError(format!(
315 "not granting {} access to tag {} because incompatible item {} is protected",
320 trace!("access: removing item {}", item);
326 return Ok(granting_idx);
329 /// Deallocate a location: Like a write access, but also there must be no
330 /// active protectors at all.
334 global: &GlobalState,
335 ) -> EvalResult<'tcx> {
336 // Step 1: Find granting item.
337 self.find_granting(AccessKind::Write, tag)
338 .ok_or_else(|| InterpError::MachineError(format!(
339 "no item granting write access for deallocation to tag {} found in borrow stack",
343 // We must make sure there are no protected items remaining on the stack.
344 // Also clear the stack, no more accesses are possible.
345 for item in self.borrows.drain(..) {
346 if let Some(call) = item.protector {
347 if global.is_active(call) {
348 return err!(MachineError(format!(
349 "deallocating with active protector ({})", call
358 /// `reborrow` helper function: test that the stack invariants are still maintained.
359 fn test_invariants(&self) {
360 let mut saw_shared_read_only = false;
361 for item in self.borrows.iter() {
363 Permission::SharedReadOnly => {
364 saw_shared_read_only = true;
366 // Otherwise, if we saw one before, that's a bug.
367 perm if saw_shared_read_only => {
368 bug!("Found {:?} on top of a SharedReadOnly!", perm);
375 /// Derived a new pointer from one with the given tag.
376 /// `weak` controls whether this is a weak reborrow: weak reborrows do not act as
377 /// accesses, and they add the new item directly on top of the one it is derived
378 /// from instead of all the way at the top of the stack.
384 global: &GlobalState,
385 ) -> EvalResult<'tcx> {
386 // Figure out which access `perm` corresponds to.
387 let access = if new.perm.grants(AccessKind::Write) {
392 // Now we figure out which item grants our parent (`derived_from`) this kind of access.
393 // We use that to determine where to put the new item.
394 let (derived_from_idx, _) = self.find_granting(access, derived_from)
395 .ok_or_else(|| InterpError::MachineError(format!(
396 "no item to reborrow for {:?} from tag {} found in borrow stack", new.perm, derived_from,
399 // Compute where to put the new item.
400 // Either way, we ensure that we insert the new item in a way that between
401 // `derived_from` and the new one, there are only items *compatible with* `derived_from`.
402 let new_idx = if weak {
403 // A weak SharedReadOnly reborrow might be added below other items, violating the
404 // invariant that only SharedReadOnly can sit on top of SharedReadOnly.
405 assert!(new.perm != Permission::SharedReadOnly, "Weak SharedReadOnly reborrows don't work");
406 // A very liberal reborrow because the new pointer does not expect any kind of aliasing guarantee.
407 // Just insert new permission as child of old permission, and maintain everything else.
408 // This inserts "as far down as possible", which is good because it makes this pointer as
409 // long-lived as possible *and* we want all the items that are incompatible with this
410 // to actually get removed from the stack. If we pushed a `SharedReadWrite` on top of
411 // a `SharedReadOnly`, we'd violate the invariant that `SaredReadOnly` are at the top
412 // and we'd allow write access without invalidating frozen shared references!
413 // This ensures F2b for `SharedReadWrite` by adding the new item below any
414 // potentially existing `SharedReadOnly`.
417 // A "safe" reborrow for a pointer that actually expects some aliasing guarantees.
418 // Here, creating a reference actually counts as an access, and pops incompatible
419 // stuff off the stack.
420 // This ensures F2b for `Unique`, by removing offending `SharedReadOnly`.
421 let check_idx = self.access(access, derived_from, global)?;
422 assert_eq!(check_idx, derived_from_idx, "somehow we saw different items??");
424 // We insert "as far up as possible": We know only compatible items are remaining
425 // on top of `derived_from`, and we want the new item at the top so that we
426 // get the strongest possible guarantees.
427 // This ensures U1 and F1.
431 // Put the new item there. As an optimization, deduplicate if it is equal to one of its new neighbors.
432 if self.borrows[new_idx-1] == new || self.borrows.get(new_idx) == Some(&new) {
433 // Optimization applies, done.
434 trace!("reborrow: avoiding adding redundant item {}", new);
436 trace!("reborrow: adding item {}", new);
437 self.borrows.insert(new_idx, new);
440 // Make sure that after all this, the stack's invariant is still maintained.
441 if cfg!(debug_assertions) {
442 self.test_invariants();
448 // # Stacked Borrows Core End
450 /// Map per-stack operations to higher-level per-location-range operations.
452 /// Creates new stack with initial tag.
458 let item = Item { perm: Permission::Unique, tag, protector: None };
463 stacks: RefCell::new(RangeMap::new(size, stack)),
468 /// Call `f` on every stack in the range.
473 f: impl Fn(&mut Stack, &GlobalState) -> EvalResult<'tcx>,
474 ) -> EvalResult<'tcx> {
475 let global = self.global.borrow();
476 let mut stacks = self.stacks.borrow_mut();
477 for stack in stacks.iter_mut(ptr.offset, size) {
484 /// Glue code to connect with Miri Machine Hooks
486 pub fn new_allocation(
489 kind: MemoryKind<MiriMemoryKind>,
491 let tag = match kind {
492 MemoryKind::Stack => {
493 // New unique borrow. This `Uniq` is not accessible by the program,
494 // so it will only ever be used when using the local directly (i.e.,
495 // not through a pointer). That is, whenever we directly use a local, this will pop
496 // everything else off the stack, invalidating all previous pointers,
497 // and in particular, *all* raw pointers. This subsumes the explicit
498 // `reset` which the blog post [1] says to perform when accessing a local.
500 // [1]: <https://www.ralfj.de/blog/2018/08/07/stacked-borrows.html>
501 Tag::Tagged(extra.borrow_mut().new_ptr())
507 let stack = Stacks::new(size, tag, Rc::clone(extra));
512 impl AllocationExtra<Tag> for Stacks {
514 fn memory_read<'tcx>(
515 alloc: &Allocation<Tag, Stacks>,
518 ) -> EvalResult<'tcx> {
519 trace!("read access with tag {}: {:?}, size {}", ptr.tag, ptr, size.bytes());
520 alloc.extra.for_each(ptr, size, |stack, global| {
521 stack.access(AccessKind::Read, ptr.tag, global)?;
527 fn memory_written<'tcx>(
528 alloc: &mut Allocation<Tag, Stacks>,
531 ) -> EvalResult<'tcx> {
532 trace!("write access with tag {}: {:?}, size {}", ptr.tag, ptr, size.bytes());
533 alloc.extra.for_each(ptr, size, |stack, global| {
534 stack.access(AccessKind::Write, ptr.tag, global)?;
540 fn memory_deallocated<'tcx>(
541 alloc: &mut Allocation<Tag, Stacks>,
544 ) -> EvalResult<'tcx> {
545 trace!("deallocation with tag {}: {:?}, size {}", ptr.tag, ptr, size.bytes());
546 alloc.extra.for_each(ptr, size, |stack, global| {
547 stack.dealloc(ptr.tag, global)
552 /// Retagging/reborrowing. There is some policy in here, such as which permissions
553 /// to grant for which references, when to add protectors, and how to realize two-phase
554 /// borrows in terms of the primitives above.
555 impl<'a, 'mir, 'tcx> EvalContextPrivExt<'a, 'mir, 'tcx> for crate::MiriEvalContext<'a, 'mir, 'tcx> {}
556 trait EvalContextPrivExt<'a, 'mir, 'tcx: 'a+'mir>: crate::MiriEvalContextExt<'a, 'mir, 'tcx> {
559 place: MPlaceTy<'tcx, Tag>,
565 ) -> EvalResult<'tcx> {
566 let this = self.eval_context_mut();
567 let protector = if protect { Some(this.frame().extra) } else { None };
568 let ptr = place.ptr.to_ptr()?;
569 trace!("reborrow: {:?} reference {} derived from {} (pointee {}): {:?}, size {}",
570 kind, new_tag, ptr.tag, place.layout.ty, ptr, size.bytes());
572 // Get the allocation. It might not be mutable, so we cannot use `get_mut`.
573 let alloc = this.memory().get(ptr.alloc_id)?;
574 alloc.check_bounds(this, ptr, size)?;
575 // Update the stacks.
576 // Make sure that raw pointers and mutable shared references are reborrowed "weak":
577 // There could be existing unique pointers reborrowed from them that should remain valid!
578 let perm = match kind {
579 RefKind::Unique => Permission::Unique,
580 RefKind::Raw { mutable: true } => Permission::SharedReadWrite,
581 RefKind::Shared | RefKind::Raw { mutable: false } => {
582 // Shared references and *const are a whole different kind of game, the
583 // permission is not uniform across the entire range!
584 // We need a frozen-sensitive reborrow.
585 return this.visit_freeze_sensitive(place, size, |cur_ptr, size, frozen| {
586 // We are only ever `SharedReadOnly` inside the frozen bits.
587 let perm = if frozen { Permission::SharedReadOnly } else { Permission::SharedReadWrite };
588 let weak = perm == Permission::SharedReadWrite;
589 let item = Item { perm, tag: new_tag, protector };
590 alloc.extra.for_each(cur_ptr, size, |stack, global| {
591 stack.reborrow(cur_ptr.tag, force_weak || weak, item, global)
596 debug_assert_ne!(perm, Permission::SharedReadOnly, "SharedReadOnly must be used frozen-sensitive");
597 let weak = perm == Permission::SharedReadWrite;
598 let item = Item { perm, tag: new_tag, protector };
599 alloc.extra.for_each(ptr, size, |stack, global| {
600 stack.reborrow(ptr.tag, force_weak || weak, item, global)
604 /// Retags an indidual pointer, returning the retagged version.
605 /// `mutbl` can be `None` to make this a raw pointer.
608 val: ImmTy<'tcx, Tag>,
612 ) -> EvalResult<'tcx, Immediate<Tag>> {
613 let this = self.eval_context_mut();
614 // We want a place for where the ptr *points to*, so we get one.
615 let place = this.ref_to_mplace(val)?;
616 let size = this.size_and_align_of_mplace(place)?
617 .map(|(size, _)| size)
618 .unwrap_or_else(|| place.layout.size);
619 if size == Size::ZERO {
620 // Nothing to do for ZSTs.
624 // Compute new borrow.
625 let new_tag = match kind {
626 RefKind::Raw { .. } => Tag::Untagged,
627 _ => Tag::Tagged(this.memory().extra.borrow_mut().new_ptr()),
631 // TODO: With `two_phase == true`, this performs a weak reborrow for a `Unique`. That
632 // can lead to some possibly surprising effects, if the parent permission is
633 // `SharedReadWrite` then we now have a `Unique` in the middle of them, which "splits"
634 // them in terms of what remains valid when the `Unique` gets used. Is that really
636 this.reborrow(place, size, kind, new_tag, /*force_weak:*/ two_phase, protect)?;
637 let new_place = place.replace_tag(new_tag);
638 // Handle two-phase borrows.
640 assert!(kind == RefKind::Unique, "two-phase shared borrows make no sense");
641 // Grant read access *to the parent pointer* with the old tag *derived from the new tag* (`new_place`).
642 // This means the old pointer has multiple items in the stack now, which otherwise cannot happen
643 // for unique references -- but in this case it precisely expresses the semantics we want.
644 let old_tag = place.ptr.to_ptr().unwrap().tag;
645 this.reborrow(new_place, size, RefKind::Shared, old_tag, /*force_weak:*/ false, /*protect:*/ false)?;
648 // Return new pointer.
649 Ok(new_place.to_ref())
653 impl<'a, 'mir, 'tcx> EvalContextExt<'a, 'mir, 'tcx> for crate::MiriEvalContext<'a, 'mir, 'tcx> {}
654 pub trait EvalContextExt<'a, 'mir, 'tcx: 'a+'mir>: crate::MiriEvalContextExt<'a, 'mir, 'tcx> {
658 place: PlaceTy<'tcx, Tag>
659 ) -> EvalResult<'tcx> {
660 let this = self.eval_context_mut();
661 // Determine mutability and whether to add a protector.
662 // Cannot use `builtin_deref` because that reports *immutable* for `Box`,
663 // making it useless.
664 fn qualify(ty: ty::Ty<'_>, kind: RetagKind) -> Option<(RefKind, bool)> {
666 // References are simple.
667 ty::Ref(_, _, MutMutable) =>
668 Some((RefKind::Unique, kind == RetagKind::FnEntry)),
669 ty::Ref(_, _, MutImmutable) =>
670 Some((RefKind::Shared, kind == RetagKind::FnEntry)),
671 // Raw pointers need to be enabled.
672 ty::RawPtr(tym) if kind == RetagKind::Raw =>
673 Some((RefKind::Raw { mutable: tym.mutbl == MutMutable }, false)),
674 // Boxes do not get a protector: protectors reflect that references outlive the call
675 // they were passed in to; that's just not the case for boxes.
676 ty::Adt(..) if ty.is_box() => Some((RefKind::Unique, false)),
681 // We need a visitor to visit all references. However, that requires
682 // a `MemPlace`, so we have a fast path for reference types that
683 // avoids allocating.
684 if let Some((mutbl, protector)) = qualify(place.layout.ty, kind) {
686 let val = this.read_immediate(this.place_to_op(place)?)?;
687 let val = this.retag_reference(val, mutbl, protector, kind == RetagKind::TwoPhase)?;
688 this.write_immediate(val, place)?;
691 let place = this.force_allocation(place)?;
693 let mut visitor = RetagVisitor { ecx: this, kind };
694 visitor.visit_value(place)?;
696 // The actual visitor.
697 struct RetagVisitor<'ecx, 'a, 'mir, 'tcx> {
698 ecx: &'ecx mut MiriEvalContext<'a, 'mir, 'tcx>,
701 impl<'ecx, 'a, 'mir, 'tcx>
702 MutValueVisitor<'a, 'mir, 'tcx, Evaluator<'tcx>>
704 RetagVisitor<'ecx, 'a, 'mir, 'tcx>
706 type V = MPlaceTy<'tcx, Tag>;
709 fn ecx(&mut self) -> &mut MiriEvalContext<'a, 'mir, 'tcx> {
713 // Primitives of reference type, that is the one thing we are interested in.
714 fn visit_primitive(&mut self, place: MPlaceTy<'tcx, Tag>) -> EvalResult<'tcx>
716 // Cannot use `builtin_deref` because that reports *immutable* for `Box`,
717 // making it useless.
718 if let Some((mutbl, protector)) = qualify(place.layout.ty, self.kind) {
719 let val = self.ecx.read_immediate(place.into())?;
720 let val = self.ecx.retag_reference(
724 self.kind == RetagKind::TwoPhase
726 self.ecx.write_immediate(val, place.into())?;