1 use std::cell::RefCell;
2 use std::collections::HashSet;
5 use std::num::NonZeroU64;
7 use rustc::ty::{self, layout::Size};
8 use rustc::hir::{MutMutable, MutImmutable};
9 use rustc::mir::RetagKind;
12 EvalResult, InterpError, MiriEvalContext, HelpersEvalContextExt, Evaluator, MutValueVisitor,
13 MemoryKind, MiriMemoryKind, RangeMap, Allocation, AllocationExtra, CheckInAllocMsg,
14 Pointer, Immediate, ImmTy, PlaceTy, MPlaceTy,
17 pub type PtrId = NonZeroU64;
18 pub type CallId = NonZeroU64;
20 /// Tracking pointer provenance
21 #[derive(Copy, Clone, Hash, PartialEq, Eq)]
27 impl fmt::Debug for Tag {
28 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
30 Tag::Tagged(id) => write!(f, "<{}>", id),
31 Tag::Untagged => write!(f, "<untagged>"),
36 /// Indicates which permission is granted (by this item to some pointers)
37 #[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
39 /// Grants unique mutable access.
41 /// Grants shared mutable access.
43 /// Grants shared read-only access.
45 /// Grants no access, but separates two groups of SharedReadWrite so they are not
46 /// all considered mutually compatible.
50 /// An item in the per-location borrow stack.
51 #[derive(Copy, Clone, Hash, PartialEq, Eq)]
53 /// The permission this item grants.
55 /// The pointers the permission is granted to.
57 /// An optional protector, ensuring the item cannot get popped until `CallId` is over.
58 protector: Option<CallId>,
61 impl fmt::Debug for Item {
62 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
63 write!(f, "[{:?} for {:?}", self.perm, self.tag)?;
64 if let Some(call) = self.protector {
65 write!(f, " (call {})", call)?;
72 /// Extra per-location state.
73 #[derive(Clone, Debug, PartialEq, Eq)]
75 /// Used *mostly* as a stack; never empty.
77 /// * Above a `SharedReadOnly` there can only be more `SharedReadOnly`.
78 /// * Except for `Untagged`, no tag occurs in the stack more than once.
83 /// Extra per-allocation state.
84 #[derive(Clone, Debug)]
86 // Even reading memory can have effects on the stack, so we need a `RefCell` here.
87 stacks: RefCell<RangeMap<Stack>>,
88 // Pointer to global state
92 /// Extra global state, available to the memory access hooks.
94 pub struct GlobalState {
97 active_calls: HashSet<CallId>,
99 pub type MemoryState = Rc<RefCell<GlobalState>>;
101 /// Indicates which kind of access is being performed.
102 #[derive(Copy, Clone, Hash, PartialEq, Eq)]
103 pub enum AccessKind {
108 impl fmt::Display for AccessKind {
109 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
111 AccessKind::Read => write!(f, "read access"),
112 AccessKind::Write => write!(f, "write access"),
117 /// Indicates which kind of reference is being created.
118 /// Used by high-level `reborrow` to compute which permissions to grant to the
120 #[derive(Copy, Clone, Hash, PartialEq, Eq)]
122 /// `&mut` and `Box`.
123 Unique { two_phase: bool },
124 /// `&` with or without interior mutability.
126 /// `*mut`/`*const` (raw pointers).
127 Raw { mutable: bool },
130 impl fmt::Display for RefKind {
131 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
133 RefKind::Unique { two_phase: false } => write!(f, "unique"),
134 RefKind::Unique { two_phase: true } => write!(f, "unique (two-phase)"),
135 RefKind::Shared => write!(f, "shared"),
136 RefKind::Raw { mutable: true } => write!(f, "raw (mutable)"),
137 RefKind::Raw { mutable: false } => write!(f, "raw (constant)"),
142 /// Utilities for initialization and ID generation
143 impl Default for GlobalState {
144 fn default() -> Self {
146 next_ptr_id: NonZeroU64::new(1).unwrap(),
147 next_call_id: NonZeroU64::new(1).unwrap(),
148 active_calls: HashSet::default(),
154 pub fn new_ptr(&mut self) -> PtrId {
155 let id = self.next_ptr_id;
156 self.next_ptr_id = NonZeroU64::new(id.get() + 1).unwrap();
160 pub fn new_call(&mut self) -> CallId {
161 let id = self.next_call_id;
162 trace!("new_call: Assigning ID {}", id);
163 self.active_calls.insert(id);
164 self.next_call_id = NonZeroU64::new(id.get() + 1).unwrap();
168 pub fn end_call(&mut self, id: CallId) {
169 assert!(self.active_calls.remove(&id));
172 fn is_active(&self, id: CallId) -> bool {
173 self.active_calls.contains(&id)
177 // # Stacked Borrows Core Begin
179 /// We need to make at least the following things true:
181 /// U1: After creating a `Uniq`, it is at the top.
182 /// U2: If the top is `Uniq`, accesses must be through that `Uniq` or remove it it.
183 /// U3: If an access happens with a `Uniq`, it requires the `Uniq` to be in the stack.
185 /// F1: After creating a `&`, the parts outside `UnsafeCell` have our `SharedReadOnly` on top.
186 /// F2: If a write access happens, it pops the `SharedReadOnly`. This has three pieces:
187 /// F2a: If a write happens granted by an item below our `SharedReadOnly`, the `SharedReadOnly`
189 /// F2b: No `SharedReadWrite` or `Unique` will ever be added on top of our `SharedReadOnly`.
190 /// F3: If an access happens with an `&` outside `UnsafeCell`,
191 /// it requires the `SharedReadOnly` to still be in the stack.
193 impl Default for Tag {
195 fn default() -> Tag {
201 /// Core relation on `Permission` to define which accesses are allowed
203 /// This defines for a given permission, whether it permits the given kind of access.
204 fn grants(self, access: AccessKind) -> bool {
205 // Disabled grants nothing. Otherwise, all items grant read access, and except for SharedReadOnly they grant write access.
206 self != Permission::Disabled && (access == AccessKind::Read || self != Permission::SharedReadOnly)
210 /// Core per-location operations: access, dealloc, reborrow.
212 /// Find the item granting the given kind of access to the given tag, and return where
213 /// it is on the stack.
214 fn find_granting(&self, access: AccessKind, tag: Tag) -> Option<usize> {
216 .enumerate() // we also need to know *where* in the stack
217 .rev() // search top-to-bottom
218 // Return permission of first item that grants access.
219 // We require a permission with the right tag, ensuring U3 and F3.
220 .find_map(|(idx, item)|
221 if tag == item.tag && item.perm.grants(access) {
229 /// Find the first write-incompatible item above the given one --
230 /// i.e, find the height to which the stack will be truncated when writing to `granting`.
231 fn find_first_write_incompaible(&self, granting: usize) -> usize {
232 let perm = self.borrows[granting].perm;
234 Permission::SharedReadOnly =>
235 bug!("Cannot use SharedReadOnly for writing"),
236 Permission::Disabled =>
237 bug!("Cannot use Disabled for anything"),
238 Permission::Unique =>
239 // On a write, everything above us is incompatible.
241 Permission::SharedReadWrite => {
242 // The SharedReadWrite *just* above us are compatible, to skip those.
243 let mut idx = granting + 1;
244 while let Some(item) = self.borrows.get(idx) {
245 if item.perm == Permission::SharedReadWrite {
249 // Found first incompatible!
258 /// Check if the given item is protected.
259 fn check_protector(item: &Item, tag: Option<Tag>, global: &GlobalState) -> EvalResult<'tcx> {
260 if let Some(call) = item.protector {
261 if global.is_active(call) {
262 if let Some(tag) = tag {
263 return err!(MachineError(format!(
264 "not granting access to tag {:?} because incompatible item is protected: {:?}",
268 return err!(MachineError(format!(
269 "deallocating while item is protected: {:?}", item
277 /// Test if a memory `access` using pointer tagged `tag` is granted.
278 /// If yes, return the index of the item that granted it.
283 global: &GlobalState,
284 ) -> EvalResult<'tcx> {
285 // Two main steps: Find granting item, remove incompatible items above.
287 // Step 1: Find granting item.
288 let granting_idx = self.find_granting(access, tag)
289 .ok_or_else(|| InterpError::MachineError(format!(
290 "no item granting {} to tag {:?} found in borrow stack",
294 // Step 2: Remove incompatible items above them. Make sure we do not remove protected
295 // items. Behavior differs for reads and writes.
296 if access == AccessKind::Write {
297 // Remove everything above the write-compatible items, like a proper stack. This makes sure read-only and unique
298 // pointers become invalid on write accesses (ensures F2a, and ensures U2 for write accesses).
299 let first_incompatible_idx = self.find_first_write_incompaible(granting_idx);
300 for item in self.borrows.drain(first_incompatible_idx..).rev() {
301 trace!("access: popping item {:?}", item);
302 Stack::check_protector(&item, Some(tag), global)?;
305 // On a read, *disable* all `Unique` above the granting item. This ensures U2 for read accesses.
306 // The reason this is not following the stack discipline (by removing the first Unique and
307 // everything on top of it) is that in `let raw = &mut *x as *mut _; let _val = *x;`, the second statement
308 // would pop the `Unique` from the reborrow of the first statement, and subsequently also pop the
309 // `SharedReadWrite` for `raw`.
310 // This pattern occurs a lot in the standard library: create a raw pointer, then also create a shared
311 // reference and use that.
312 // We *disable* instead of removing `Unique` to avoid "connecting" two neighbouring blocks of SRWs.
313 for idx in (granting_idx+1 .. self.borrows.len()).rev() {
314 let item = &mut self.borrows[idx];
315 if item.perm == Permission::Unique {
316 trace!("access: disabling item {:?}", item);
317 Stack::check_protector(item, Some(tag), global)?;
318 item.perm = Permission::Disabled;
327 /// Deallocate a location: Like a write access, but also there must be no
328 /// active protectors at all because we will remove all items.
332 global: &GlobalState,
333 ) -> EvalResult<'tcx> {
334 // Step 1: Find granting item.
335 self.find_granting(AccessKind::Write, tag)
336 .ok_or_else(|| InterpError::MachineError(format!(
337 "no item granting write access for deallocation to tag {:?} found in borrow stack",
341 // Step 2: Remove all items. Also checks for protectors.
342 for item in self.borrows.drain(..).rev() {
343 Stack::check_protector(&item, None, global)?;
349 /// `reborrow` helper function: test that the stack invariants are still maintained.
350 fn test_invariants(&self) {
351 let mut saw_shared_read_only = false;
352 for item in self.borrows.iter() {
354 Permission::SharedReadOnly => {
355 saw_shared_read_only = true;
357 // Otherwise, if we saw one before, that's a bug.
358 perm if saw_shared_read_only => {
359 bug!("Found {:?} on top of a SharedReadOnly!", perm);
366 /// Derived a new pointer from one with the given tag.
367 /// `weak` controls whether this operation is weak or strong: weak granting does not act as
368 /// an access, and they add the new item directly on top of the one it is derived
369 /// from instead of all the way at the top of the stack.
374 global: &GlobalState,
375 ) -> EvalResult<'tcx> {
376 // Figure out which access `perm` corresponds to.
377 let access = if new.perm.grants(AccessKind::Write) {
382 // Now we figure out which item grants our parent (`derived_from`) this kind of access.
383 // We use that to determine where to put the new item.
384 let granting_idx = self.find_granting(access, derived_from)
385 .ok_or_else(|| InterpError::MachineError(format!(
386 "trying to reborrow for {:?}, but parent tag {:?} does not have an appropriate item in the borrow stack", new.perm, derived_from,
389 // Compute where to put the new item.
390 // Either way, we ensure that we insert the new item in a way such that between
391 // `derived_from` and the new one, there are only items *compatible with* `derived_from`.
392 let new_idx = if new.perm == Permission::SharedReadWrite {
393 assert!(access == AccessKind::Write, "this case only makes sense for stack-like accesses");
394 // SharedReadWrite can coexist with "existing loans", meaning they don't act like a write
395 // access. Instead of popping the stack, we insert the item at the place the stack would
396 // be popped to (i.e., we insert it above all the write-compatible items).
397 // This ensures F2b by adding the new item below any potentially existing `SharedReadOnly`.
398 self.find_first_write_incompaible(granting_idx)
400 // A "safe" reborrow for a pointer that actually expects some aliasing guarantees.
401 // Here, creating a reference actually counts as an access.
402 // This ensures F2b for `Unique`, by removing offending `SharedReadOnly`.
403 self.access(access, derived_from, global)?;
405 // We insert "as far up as possible": We know only compatible items are remaining
406 // on top of `derived_from`, and we want the new item at the top so that we
407 // get the strongest possible guarantees.
408 // This ensures U1 and F1.
412 // Put the new item there. As an optimization, deduplicate if it is equal to one of its new neighbors.
413 if self.borrows[new_idx-1] == new || self.borrows.get(new_idx) == Some(&new) {
414 // Optimization applies, done.
415 trace!("reborrow: avoiding adding redundant item {:?}", new);
417 trace!("reborrow: adding item {:?}", new);
418 self.borrows.insert(new_idx, new);
421 // Make sure that after all this, the stack's invariant is still maintained.
422 if cfg!(debug_assertions) {
423 self.test_invariants();
429 // # Stacked Borrows Core End
431 /// Map per-stack operations to higher-level per-location-range operations.
433 /// Creates new stack with initial tag.
439 let item = Item { perm: Permission::Unique, tag, protector: None };
444 stacks: RefCell::new(RangeMap::new(size, stack)),
449 /// Call `f` on every stack in the range.
454 f: impl Fn(&mut Stack, &GlobalState) -> EvalResult<'tcx>,
455 ) -> EvalResult<'tcx> {
456 let global = self.global.borrow();
457 let mut stacks = self.stacks.borrow_mut();
458 for stack in stacks.iter_mut(ptr.offset, size) {
465 /// Glue code to connect with Miri Machine Hooks
467 pub fn new_allocation(
470 kind: MemoryKind<MiriMemoryKind>,
472 let tag = match kind {
473 MemoryKind::Stack => {
474 // New unique borrow. This `Uniq` is not accessible by the program,
475 // so it will only ever be used when using the local directly (i.e.,
476 // not through a pointer). That is, whenever we directly use a local, this will pop
477 // everything else off the stack, invalidating all previous pointers,
478 // and in particular, *all* raw pointers. This subsumes the explicit
479 // `reset` which the blog post [1] says to perform when accessing a local.
481 // [1]: <https://www.ralfj.de/blog/2018/08/07/stacked-borrows.html>
482 Tag::Tagged(extra.borrow_mut().new_ptr())
488 let stack = Stacks::new(size, tag, Rc::clone(extra));
493 impl AllocationExtra<Tag> for Stacks {
495 fn memory_read<'tcx>(
496 alloc: &Allocation<Tag, Stacks>,
499 ) -> EvalResult<'tcx> {
500 trace!("read access with tag {:?}: {:?}, size {}", ptr.tag, ptr.erase_tag(), size.bytes());
501 alloc.extra.for_each(ptr, size, |stack, global| {
502 stack.access(AccessKind::Read, ptr.tag, global)?;
508 fn memory_written<'tcx>(
509 alloc: &mut Allocation<Tag, Stacks>,
512 ) -> EvalResult<'tcx> {
513 trace!("write access with tag {:?}: {:?}, size {}", ptr.tag, ptr.erase_tag(), size.bytes());
514 alloc.extra.for_each(ptr, size, |stack, global| {
515 stack.access(AccessKind::Write, ptr.tag, global)?;
521 fn memory_deallocated<'tcx>(
522 alloc: &mut Allocation<Tag, Stacks>,
525 ) -> EvalResult<'tcx> {
526 trace!("deallocation with tag {:?}: {:?}, size {}", ptr.tag, ptr.erase_tag(), size.bytes());
527 alloc.extra.for_each(ptr, size, |stack, global| {
528 stack.dealloc(ptr.tag, global)
533 /// Retagging/reborrowing. There is some policy in here, such as which permissions
534 /// to grant for which references, and when to add protectors.
535 impl<'a, 'mir, 'tcx> EvalContextPrivExt<'a, 'mir, 'tcx> for crate::MiriEvalContext<'a, 'mir, 'tcx> {}
536 trait EvalContextPrivExt<'a, 'mir, 'tcx: 'a+'mir>: crate::MiriEvalContextExt<'a, 'mir, 'tcx> {
539 place: MPlaceTy<'tcx, Tag>,
544 ) -> EvalResult<'tcx> {
545 let this = self.eval_context_mut();
546 let protector = if protect { Some(this.frame().extra) } else { None };
547 let ptr = place.ptr.to_ptr()?;
548 trace!("reborrow: {} reference {:?} derived from {:?} (pointee {}): {:?}, size {}",
549 kind, new_tag, ptr.tag, place.layout.ty, ptr.erase_tag(), size.bytes());
551 // Get the allocation. It might not be mutable, so we cannot use `get_mut`.
552 let alloc = this.memory().get(ptr.alloc_id)?;
553 alloc.check_bounds(this, ptr, size, CheckInAllocMsg::InboundsTest)?;
554 // Update the stacks.
555 // Make sure that raw pointers and mutable shared references are reborrowed "weak":
556 // There could be existing unique pointers reborrowed from them that should remain valid!
557 let perm = match kind {
558 RefKind::Unique { two_phase: false } => Permission::Unique,
559 RefKind::Unique { two_phase: true } => Permission::SharedReadWrite,
560 RefKind::Raw { mutable: true } => Permission::SharedReadWrite,
561 RefKind::Shared | RefKind::Raw { mutable: false } => {
562 // Shared references and *const are a whole different kind of game, the
563 // permission is not uniform across the entire range!
564 // We need a frozen-sensitive reborrow.
565 return this.visit_freeze_sensitive(place, size, |cur_ptr, size, frozen| {
566 // We are only ever `SharedReadOnly` inside the frozen bits.
567 let perm = if frozen { Permission::SharedReadOnly } else { Permission::SharedReadWrite };
568 let item = Item { perm, tag: new_tag, protector };
569 alloc.extra.for_each(cur_ptr, size, |stack, global| {
570 stack.grant(cur_ptr.tag, item, global)
575 let item = Item { perm, tag: new_tag, protector };
576 alloc.extra.for_each(ptr, size, |stack, global| {
577 stack.grant(ptr.tag, item, global)
581 /// Retags an indidual pointer, returning the retagged version.
582 /// `mutbl` can be `None` to make this a raw pointer.
585 val: ImmTy<'tcx, Tag>,
588 ) -> EvalResult<'tcx, Immediate<Tag>> {
589 let this = self.eval_context_mut();
590 // We want a place for where the ptr *points to*, so we get one.
591 let place = this.ref_to_mplace(val)?;
592 let size = this.size_and_align_of_mplace(place)?
593 .map(|(size, _)| size)
594 .unwrap_or_else(|| place.layout.size);
595 if size == Size::ZERO {
596 // Nothing to do for ZSTs.
600 // Compute new borrow.
601 let new_tag = match kind {
602 RefKind::Raw { .. } => Tag::Untagged,
603 _ => Tag::Tagged(this.memory().extra.borrow_mut().new_ptr()),
607 this.reborrow(place, size, kind, new_tag, protect)?;
608 let new_place = place.replace_tag(new_tag);
610 // Return new pointer.
611 Ok(new_place.to_ref())
615 impl<'a, 'mir, 'tcx> EvalContextExt<'a, 'mir, 'tcx> for crate::MiriEvalContext<'a, 'mir, 'tcx> {}
616 pub trait EvalContextExt<'a, 'mir, 'tcx: 'a+'mir>: crate::MiriEvalContextExt<'a, 'mir, 'tcx> {
620 place: PlaceTy<'tcx, Tag>
621 ) -> EvalResult<'tcx> {
622 let this = self.eval_context_mut();
623 // Determine mutability and whether to add a protector.
624 // Cannot use `builtin_deref` because that reports *immutable* for `Box`,
625 // making it useless.
626 fn qualify(ty: ty::Ty<'_>, kind: RetagKind) -> Option<(RefKind, bool)> {
628 // References are simple.
629 ty::Ref(_, _, MutMutable) =>
630 Some((RefKind::Unique { two_phase: kind == RetagKind::TwoPhase}, kind == RetagKind::FnEntry)),
631 ty::Ref(_, _, MutImmutable) =>
632 Some((RefKind::Shared, kind == RetagKind::FnEntry)),
633 // Raw pointers need to be enabled.
634 ty::RawPtr(tym) if kind == RetagKind::Raw =>
635 Some((RefKind::Raw { mutable: tym.mutbl == MutMutable }, false)),
636 // Boxes do not get a protector: protectors reflect that references outlive the call
637 // they were passed in to; that's just not the case for boxes.
638 ty::Adt(..) if ty.is_box() => Some((RefKind::Unique { two_phase: false }, false)),
643 // We need a visitor to visit all references. However, that requires
644 // a `MemPlace`, so we have a fast path for reference types that
645 // avoids allocating.
646 if let Some((mutbl, protector)) = qualify(place.layout.ty, kind) {
648 let val = this.read_immediate(this.place_to_op(place)?)?;
649 let val = this.retag_reference(val, mutbl, protector)?;
650 this.write_immediate(val, place)?;
653 let place = this.force_allocation(place)?;
655 let mut visitor = RetagVisitor { ecx: this, kind };
656 visitor.visit_value(place)?;
658 // The actual visitor.
659 struct RetagVisitor<'ecx, 'a, 'mir, 'tcx> {
660 ecx: &'ecx mut MiriEvalContext<'a, 'mir, 'tcx>,
663 impl<'ecx, 'a, 'mir, 'tcx>
664 MutValueVisitor<'a, 'mir, 'tcx, Evaluator<'tcx>>
666 RetagVisitor<'ecx, 'a, 'mir, 'tcx>
668 type V = MPlaceTy<'tcx, Tag>;
671 fn ecx(&mut self) -> &mut MiriEvalContext<'a, 'mir, 'tcx> {
675 // Primitives of reference type, that is the one thing we are interested in.
676 fn visit_primitive(&mut self, place: MPlaceTy<'tcx, Tag>) -> EvalResult<'tcx>
678 // Cannot use `builtin_deref` because that reports *immutable* for `Box`,
679 // making it useless.
680 if let Some((mutbl, protector)) = qualify(place.layout.ty, self.kind) {
681 let val = self.ecx.read_immediate(place.into())?;
682 let val = self.ecx.retag_reference(
687 self.ecx.write_immediate(val, place.into())?;