1 use std::cell::RefCell;
2 use std::collections::HashSet;
5 use rustc::ty::{self, layout::Size};
6 use rustc::hir::{Mutability, MutMutable, MutImmutable};
7 use rustc::mir::RetagKind;
10 EvalResult, EvalErrorKind, MiriEvalContext, HelpersEvalContextExt, Evaluator, MutValueVisitor,
11 MemoryKind, MiriMemoryKind, RangeMap, AllocId, Allocation, AllocationExtra,
12 Pointer, Immediate, ImmTy, PlaceTy, MPlaceTy,
15 pub type Timestamp = u64;
16 pub type CallId = u64;
18 /// Information about which kind of borrow was used to create the reference this is tagged
20 #[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
22 /// A unique (mutable) reference.
24 /// A shared reference. This is also used by raw pointers, which do not track details
25 /// of how or when they were created, hence the timestamp is optional.
26 /// Shr(Some(_)) does NOT mean that the destination of this reference is frozen;
27 /// that depends on the type! Only those parts outside of an `UnsafeCell` are actually
29 Shr(Option<Timestamp>),
34 pub fn is_shared(self) -> bool {
36 Borrow::Shr(_) => true,
42 pub fn is_unique(self) -> bool {
44 Borrow::Uniq(_) => true,
50 impl Default for Borrow {
51 fn default() -> Self {
56 /// An item in the per-location borrow stack
57 #[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
58 pub enum BorStackItem {
59 /// Indicates the unique reference that may mutate.
61 /// Indicates that the location has been shared. Used for raw pointers, but
62 /// also for shared references. The latter *additionally* get frozen
63 /// when there is no `UnsafeCell`.
65 /// A barrier, tracking the function it belongs to by its index on the call stack
69 /// Extra per-location state
70 #[derive(Clone, Debug, PartialEq, Eq)]
72 borrows: Vec<BorStackItem>, // used as a stack; never empty
73 frozen_since: Option<Timestamp>, // virtual frozen "item" on top of the stack
78 pub fn is_frozen(&self) -> bool {
79 self.frozen_since.is_some()
83 /// What kind of reference is being used?
84 #[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
88 /// & without interior mutability
90 /// * (raw pointer) or & to `UnsafeCell`
94 /// What kind of access is being performed?
95 #[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
102 /// Extra global state in the memory, available to the memory access hooks
104 pub struct BarrierTracking {
106 active_calls: HashSet<CallId>,
108 pub type MemoryState = Rc<RefCell<BarrierTracking>>;
110 impl Default for BarrierTracking {
111 fn default() -> Self {
114 active_calls: HashSet::default(),
119 impl BarrierTracking {
120 pub fn new_call(&mut self) -> CallId {
121 let id = self.next_id;
122 trace!("new_call: Assigning ID {}", id);
123 self.active_calls.insert(id);
128 pub fn end_call(&mut self, id: CallId) {
129 assert!(self.active_calls.remove(&id));
132 fn is_active(&self, id: CallId) -> bool {
133 self.active_calls.contains(&id)
137 /// Extra global machine state
138 #[derive(Clone, Debug)]
143 impl Default for State {
144 fn default() -> Self {
150 fn increment_clock(&mut self) -> Timestamp {
151 let val = self.clock;
152 self.clock = val + 1;
157 /// Extra per-allocation state
158 #[derive(Clone, Debug)]
160 // Even reading memory can have effects on the stack, so we need a `RefCell` here.
161 stacks: RefCell<RangeMap<Stack>>,
162 barrier_tracking: MemoryState,
165 /// Core per-location operations: deref, access, create.
166 /// We need to make at least the following things true:
168 /// U1: After creating a Uniq, it is at the top (+unfrozen).
169 /// U2: If the top is Uniq (+unfrozen), accesses must be through that Uniq or pop it.
170 /// U3: If an access (deref sufficient?) happens with a Uniq, it requires the Uniq to be in the stack.
172 /// F1: After creating a &, the parts outside `UnsafeCell` are frozen.
173 /// F2: If a write access happens, it unfreezes.
174 /// F3: If an access (well, a deref) happens with an & outside `UnsafeCell`, it requires the location to still be frozen.
176 /// Deref `bor`: Check if the location is frozen and the tag in the stack.
177 /// This dos *not* constitute an access! "Deref" refers to the `*` operator
178 /// in Rust, and includs cases like `&*x` or `(*x).foo` where no or only part
179 /// of the memory actually gets accessed. Also we cannot know if we are
180 /// going to read or write.
181 /// Returns the index of the item we matched, `None` if it was the frozen one.
182 /// `kind` indicates which kind of reference is being dereferenced.
187 ) -> Result<Option<usize>, String> {
188 // Exclude unique ref with frozen tag.
189 if let (RefKind::Unique, Borrow::Shr(Some(_))) = (kind, bor) {
190 return Err(format!("Encountered mutable reference with frozen tag ({:?})", bor));
192 // Checks related to freezing
194 Borrow::Shr(Some(bor_t)) if kind == RefKind::Frozen => {
195 // We need the location to be frozen. This ensures F3.
196 let frozen = self.frozen_since.map_or(false, |itm_t| itm_t <= bor_t);
197 return if frozen { Ok(None) } else {
198 Err(format!("Location is not frozen long enough"))
201 Borrow::Shr(_) if self.frozen_since.is_some() => {
202 return Ok(None) // Shared deref to frozen location, looking good
204 _ => {} // Not sufficient, go on looking.
206 // If we got here, we have to look for our item in the stack.
207 for (idx, &itm) in self.borrows.iter().enumerate().rev() {
209 (BorStackItem::Uniq(itm_t), Borrow::Uniq(bor_t)) if itm_t == bor_t => {
210 // Found matching unique item. This satisfies U3.
213 (BorStackItem::Shr, Borrow::Shr(_)) => {
214 // Found matching shared/raw item.
217 // Go on looking. We ignore barriers! When an `&mut` and an `&` alias,
218 // dereferencing the `&` is still possible (to reborrow), but doing
223 // If we got here, we did not find our item. We have to error to satisfy U3.
224 Err(format!("Borrow being dereferenced ({:?}) does not exist on the stack", bor))
227 /// Perform an actual memory access using `bor`. We do not know any types here
228 /// or whether things should be frozen, but we *do* know if this is reading
234 barrier_tracking: &BarrierTracking,
235 ) -> EvalResult<'tcx> {
236 // Check if we can match the frozen "item".
237 // Not possible on writes!
238 if self.is_frozen() {
239 if kind == AccessKind::Read {
240 // When we are frozen, we just accept all reads. No harm in this.
241 // The deref already checked that `Uniq` items are in the stack, and that
242 // the location is frozen if it should be.
245 trace!("access: Unfreezing");
247 // Unfreeze on writes. This ensures F2.
248 self.frozen_since = None;
249 // Pop the stack until we have something matching.
250 while let Some(&itm) = self.borrows.last() {
252 (BorStackItem::FnBarrier(call), _) if barrier_tracking.is_active(call) => {
253 return err!(MachineError(format!(
254 "Stopping looking for borrow being accessed ({:?}) because of barrier ({})",
258 (BorStackItem::Uniq(itm_t), Borrow::Uniq(bor_t)) if itm_t == bor_t => {
259 // Found matching unique item. Continue after the match.
261 (BorStackItem::Shr, _) if kind == AccessKind::Read => {
262 // When reading, everything can use a shared item!
263 // We do not want to do this when writing: Writing to an `&mut`
264 // should reaffirm its exclusivity (i.e., make sure it is
265 // on top of the stack). Continue after the match.
267 (BorStackItem::Shr, Borrow::Shr(_)) => {
268 // Found matching shared item. Continue after the match.
271 // Pop this, go on. This ensures U2.
272 let itm = self.borrows.pop().unwrap();
273 trace!("access: Popping {:?}", itm);
277 // If we got here, we found a matching item. Congratulations!
278 // However, we are not done yet: If this access is deallocating, we must make sure
279 // there are no active barriers remaining on the stack.
280 if kind == AccessKind::Dealloc {
281 for &itm in self.borrows.iter().rev() {
283 BorStackItem::FnBarrier(call) if barrier_tracking.is_active(call) => {
284 return err!(MachineError(format!(
285 "Deallocating with active barrier ({})", call
295 // If we got here, we did not find our item.
296 err!(MachineError(format!(
297 "Borrow being accessed ({:?}) does not exist on the stack",
302 /// Initiate `bor`; mostly this means pushing.
303 /// This operation cannot fail; it is up to the caller to ensure that the precondition
304 /// is met: We cannot push `Uniq` onto frozen stacks.
305 /// `kind` indicates which kind of reference is being created.
306 fn create(&mut self, bor: Borrow, kind: RefKind) {
307 // When creating a frozen reference, freeze. This ensures F1.
308 // We also do *not* push anything else to the stack, making sure that no nother kind
309 // of access (like writing through raw pointers) is permitted.
310 if kind == RefKind::Frozen {
311 let bor_t = match bor {
312 Borrow::Shr(Some(t)) => t,
313 _ => bug!("Creating illegal borrow {:?} for frozen ref", bor),
315 // It is possible that we already are frozen (e.g. if we just pushed a barrier,
316 // the redundancy check would not have kicked in).
317 match self.frozen_since {
318 Some(loc_t) => assert!(loc_t <= bor_t, "Trying to freeze location for longer than it was already frozen"),
320 trace!("create: Freezing");
321 self.frozen_since = Some(bor_t);
326 assert!(self.frozen_since.is_none(), "Trying to create non-frozen reference to frozen location");
328 // Push new item to the stack.
329 let itm = match bor {
330 Borrow::Uniq(t) => BorStackItem::Uniq(t),
331 Borrow::Shr(_) => BorStackItem::Shr,
333 if *self.borrows.last().unwrap() == itm {
334 // This is just an optimization, no functional change: Avoid stacking
335 // multiple `Shr` on top of each other.
336 assert!(bor.is_shared());
337 trace!("create: Sharing a shared location is a NOP");
340 trace!("create: Pushing {:?}", itm);
341 self.borrows.push(itm);
346 fn barrier(&mut self, call: CallId) {
347 let itm = BorStackItem::FnBarrier(call);
348 if *self.borrows.last().unwrap() == itm {
349 // This is just an optimization, no functional change: Avoid stacking
350 // multiple identical barriers on top of each other.
351 // This can happen when a function receives several shared references
353 trace!("barrier: Avoiding redundant extra barrier");
355 trace!("barrier: Pushing barrier for call {}", call);
356 self.borrows.push(itm);
361 /// Higher-level per-location operations: deref, access, reborrow.
363 /// Check that this stack is fine with being dereferenced
366 ptr: Pointer<Borrow>,
369 ) -> EvalResult<'tcx> {
370 trace!("deref for tag {:?} as {:?}: {:?}, size {}",
371 ptr.tag, kind, ptr, size.bytes());
372 let stacks = self.stacks.borrow();
373 for stack in stacks.iter(ptr.offset, size) {
374 stack.deref(ptr.tag, kind).map_err(EvalErrorKind::MachineError)?;
379 /// `ptr` got used, reflect that in the stack.
382 ptr: Pointer<Borrow>,
385 ) -> EvalResult<'tcx> {
386 trace!("{:?} access of tag {:?}: {:?}, size {}", kind, ptr.tag, ptr, size.bytes());
387 // Even reads can have a side-effect, by invalidating other references.
388 // This is fundamentally necessary since `&mut` asserts that there
389 // are no accesses through other references, not even reads.
390 let barrier_tracking = self.barrier_tracking.borrow();
391 let mut stacks = self.stacks.borrow_mut();
392 for stack in stacks.iter_mut(ptr.offset, size) {
393 stack.access(ptr.tag, kind, &*barrier_tracking)?;
398 /// Reborrow the given pointer to the new tag for the given kind of reference.
399 /// This works on `&self` because we might encounter references to constant memory.
402 ptr: Pointer<Borrow>,
404 mut barrier: Option<CallId>,
407 ) -> EvalResult<'tcx> {
408 assert_eq!(new_bor.is_unique(), new_kind == RefKind::Unique);
409 trace!("reborrow for tag {:?} to {:?} as {:?}: {:?}, size {}",
410 ptr.tag, new_bor, new_kind, ptr, size.bytes());
411 if new_kind == RefKind::Raw {
412 // No barrier for raw, including `&UnsafeCell`. They can rightfully
413 // alias with `&mut`.
414 // FIXME: This means that the `dereferencable` attribute on non-frozen shared
415 // references is incorrect! They are dereferencable when the function is
416 // called, but might become non-dereferencable during the course of execution.
417 // Also see [1], [2].
419 // [1]: <https://internals.rust-lang.org/t/
420 // is-it-possible-to-be-memory-safe-with-deallocated-self/8457/8>,
421 // [2]: <https://lists.llvm.org/pipermail/llvm-dev/2018-July/124555.html>
424 let barrier_tracking = self.barrier_tracking.borrow();
425 let mut stacks = self.stacks.borrow_mut();
426 for stack in stacks.iter_mut(ptr.offset, size) {
427 // Access source `ptr`, create new ref.
428 let ptr_idx = stack.deref(ptr.tag, new_kind).map_err(EvalErrorKind::MachineError)?;
429 // If we can deref the new tag already, and if that tag lives higher on
430 // the stack than the one we come from, just use that.
431 // IOW, we check if `new_bor` *already* is "derived from" `ptr.tag`.
432 // This also checks frozenness, if required.
433 let bor_redundant = barrier.is_none() &&
434 match (ptr_idx, stack.deref(new_bor, new_kind)) {
435 // If the new borrow works with the frozen item, or else if it lives
436 // above the old one in the stack, our job here is done.
437 (_, Ok(None)) => true,
438 (Some(ptr_idx), Ok(Some(new_idx))) if new_idx >= ptr_idx => true,
439 // Otherwise we need to create a new borrow.
443 assert!(new_bor.is_shared(), "A unique reborrow can never be redundant");
444 trace!("reborrow is redundant");
447 // We need to do some actual work.
448 let access_kind = if new_kind == RefKind::Unique {
453 stack.access(ptr.tag, access_kind, &*barrier_tracking)?;
454 if let Some(call) = barrier {
457 stack.create(new_bor, new_kind);
464 impl AllocationExtra<Borrow, MemoryState> for Stacks {
466 fn memory_allocated<'tcx>(size: Size, extra: &MemoryState) -> Self {
468 borrows: vec![BorStackItem::Shr],
472 stacks: RefCell::new(RangeMap::new(size, stack)),
473 barrier_tracking: Rc::clone(extra),
478 fn memory_read<'tcx>(
479 alloc: &Allocation<Borrow, Stacks>,
480 ptr: Pointer<Borrow>,
482 ) -> EvalResult<'tcx> {
483 alloc.extra.access(ptr, size, AccessKind::Read)
487 fn memory_written<'tcx>(
488 alloc: &mut Allocation<Borrow, Stacks>,
489 ptr: Pointer<Borrow>,
491 ) -> EvalResult<'tcx> {
492 alloc.extra.access(ptr, size, AccessKind::Write)
496 fn memory_deallocated<'tcx>(
497 alloc: &mut Allocation<Borrow, Stacks>,
498 ptr: Pointer<Borrow>,
500 ) -> EvalResult<'tcx> {
501 alloc.extra.access(ptr, size, AccessKind::Dealloc)
506 /// Pushes the first item to the stacks.
507 pub(crate) fn first_item(
512 for stack in self.stacks.get_mut().iter_mut(Size::ZERO, size) {
513 assert!(stack.borrows.len() == 1);
514 assert_eq!(stack.borrows.pop().unwrap(), BorStackItem::Shr);
515 stack.borrows.push(itm);
520 impl<'a, 'mir, 'tcx> EvalContextPrivExt<'a, 'mir, 'tcx> for crate::MiriEvalContext<'a, 'mir, 'tcx> {}
521 trait EvalContextPrivExt<'a, 'mir, 'tcx: 'a+'mir>: crate::MiriEvalContextExt<'a, 'mir, 'tcx> {
524 place: MPlaceTy<'tcx, Borrow>,
528 ) -> EvalResult<'tcx> {
529 let this = self.eval_context_mut();
530 let ptr = place.ptr.to_ptr()?;
531 let barrier = if fn_barrier { Some(this.frame().extra) } else { None };
532 trace!("reborrow: Creating new reference for {:?} (pointee {}): {:?}",
533 ptr, place.layout.ty, new_bor);
535 // Get the allocation. It might not be mutable, so we cannot use `get_mut`.
536 let alloc = this.memory().get(ptr.alloc_id)?;
537 alloc.check_bounds(this, ptr, size)?;
538 // Update the stacks.
539 if let Borrow::Shr(Some(_)) = new_bor {
540 // Reference that cares about freezing. We need a frozen-sensitive reborrow.
541 this.visit_freeze_sensitive(place, size, |cur_ptr, size, frozen| {
542 let kind = if frozen { RefKind::Frozen } else { RefKind::Raw };
543 alloc.extra.reborrow(cur_ptr, size, barrier, new_bor, kind)
546 // Just treat this as one big chunk.
547 let kind = if new_bor.is_unique() { RefKind::Unique } else { RefKind::Raw };
548 alloc.extra.reborrow(ptr, size, barrier, new_bor, kind)?;
553 /// Retag an indidual pointer, returning the retagged version.
554 /// `mutbl` can be `None` to make this a raw pointer.
557 val: ImmTy<'tcx, Borrow>,
558 mutbl: Option<Mutability>,
561 ) -> EvalResult<'tcx, Immediate<Borrow>> {
562 let this = self.eval_context_mut();
563 // We want a place for where the ptr *points to*, so we get one.
564 let place = this.ref_to_mplace(val)?;
565 let size = this.size_and_align_of_mplace(place)?
566 .map(|(size, _)| size)
567 .unwrap_or_else(|| place.layout.size);
568 if size == Size::ZERO {
569 // Nothing to do for ZSTs.
573 // Compute new borrow.
574 let time = this.machine.stacked_borrows.increment_clock();
575 let new_bor = match mutbl {
576 Some(MutMutable) => Borrow::Uniq(time),
577 Some(MutImmutable) => Borrow::Shr(Some(time)),
578 None => Borrow::default(),
582 this.reborrow(place, size, fn_barrier, new_bor)?;
583 let new_place = place.with_tag(new_bor);
584 // Handle two-phase borrows.
586 assert!(mutbl == Some(MutMutable), "two-phase shared borrows make no sense");
587 // We immediately share it, to allow read accesses
588 let two_phase_time = this.machine.stacked_borrows.increment_clock();
589 let two_phase_bor = Borrow::Shr(Some(two_phase_time));
590 this.reborrow(new_place, size, /*fn_barrier*/false, two_phase_bor)?;
594 Ok(new_place.to_ref())
598 impl<'a, 'mir, 'tcx> EvalContextExt<'a, 'mir, 'tcx> for crate::MiriEvalContext<'a, 'mir, 'tcx> {}
599 pub trait EvalContextExt<'a, 'mir, 'tcx: 'a+'mir>: crate::MiriEvalContextExt<'a, 'mir, 'tcx> {
600 fn tag_new_allocation(
603 kind: MemoryKind<MiriMemoryKind>,
605 let this = self.eval_context_mut();
606 let time = match kind {
607 MemoryKind::Stack => {
608 // New unique borrow. This `Uniq` is not accessible by the program,
609 // so it will only ever be used when using the local directly (i.e.,
610 // not through a pointer). IOW, whenever we directly use a local this will pop
611 // everything else off the stack, invalidating all previous pointers
612 // and, in particular, *all* raw pointers. This subsumes the explicit
613 // `reset` which the blog post [1] says to perform when accessing a local.
615 // [1] https://www.ralfj.de/blog/2018/08/07/stacked-borrows.html
616 this.machine.stacked_borrows.increment_clock()
619 // Nothing to do for everything else
620 return Borrow::default()
623 // Make this the active borrow for this allocation
624 let alloc = this.memory_mut().get_mut(id).expect("This is a new allocation, it must still exist");
625 let size = Size::from_bytes(alloc.bytes.len() as u64);
626 alloc.extra.first_item(BorStackItem::Uniq(time), size);
630 /// Called for value-to-place conversion. `mutability` is `None` for raw pointers.
632 /// Note that this does NOT mean that all this memory will actually get accessed/referenced!
633 /// We could be in the middle of `&(*var).1`.
636 place: MPlaceTy<'tcx, Borrow>,
638 mutability: Option<Mutability>,
639 ) -> EvalResult<'tcx> {
640 let this = self.eval_context_ref();
641 trace!("ptr_dereference: Accessing {} reference for {:?} (pointee {})",
642 if let Some(mutability) = mutability { format!("{:?}", mutability) } else { format!("raw") },
643 place.ptr, place.layout.ty);
644 let ptr = place.ptr.to_ptr()?;
645 if mutability.is_none() {
646 // No further checks on raw derefs -- only the access itself will be checked.
650 // Get the allocation
651 let alloc = this.memory().get(ptr.alloc_id)?;
652 alloc.check_bounds(this, ptr, size)?;
653 // If we got here, we do some checking, *but* we leave the tag unchanged.
654 if let Borrow::Shr(Some(_)) = ptr.tag {
655 assert_eq!(mutability, Some(MutImmutable));
656 // We need a frozen-sensitive check
657 this.visit_freeze_sensitive(place, size, |cur_ptr, size, frozen| {
658 let kind = if frozen { RefKind::Frozen } else { RefKind::Raw };
659 alloc.extra.deref(cur_ptr, size, kind)
662 // Just treat this as one big chunk
663 let kind = if mutability == Some(MutMutable) { RefKind::Unique } else { RefKind::Raw };
664 alloc.extra.deref(ptr, size, kind)?;
674 place: PlaceTy<'tcx, Borrow>
675 ) -> EvalResult<'tcx> {
676 let this = self.eval_context_mut();
677 // Determine mutability and whether to add a barrier.
678 // Cannot use `builtin_deref` because that reports *immutable* for `Box`,
679 // making it useless.
680 fn qualify(ty: ty::Ty<'_>, kind: RetagKind) -> Option<(Option<Mutability>, bool)> {
682 // References are simple
683 ty::Ref(_, _, mutbl) => Some((Some(mutbl), kind == RetagKind::FnEntry)),
684 // Raw pointers need to be enabled
685 ty::RawPtr(..) if kind == RetagKind::Raw => Some((None, false)),
686 // Boxes do not get a barrier: Barriers reflect that references outlive the call
687 // they were passed in to; that's just not the case for boxes.
688 ty::Adt(..) if ty.is_box() => Some((Some(MutMutable), false)),
693 // We need a visitor to visit all references. However, that requires
694 // a `MemPlace`, so we have a fast path for reference types that
695 // avoids allocating.
696 if let Some((mutbl, barrier)) = qualify(place.layout.ty, kind) {
698 let val = this.read_immediate(this.place_to_op(place)?)?;
699 let val = this.retag_reference(val, mutbl, barrier, kind == RetagKind::TwoPhase)?;
700 this.write_immediate(val, place)?;
703 let place = this.force_allocation(place)?;
705 let mut visitor = RetagVisitor { ecx: this, kind };
706 visitor.visit_value(place)?;
708 // The actual visitor
709 struct RetagVisitor<'ecx, 'a, 'mir, 'tcx> {
710 ecx: &'ecx mut MiriEvalContext<'a, 'mir, 'tcx>,
713 impl<'ecx, 'a, 'mir, 'tcx>
714 MutValueVisitor<'a, 'mir, 'tcx, Evaluator<'tcx>>
716 RetagVisitor<'ecx, 'a, 'mir, 'tcx>
718 type V = MPlaceTy<'tcx, Borrow>;
721 fn ecx(&mut self) -> &mut MiriEvalContext<'a, 'mir, 'tcx> {
725 // Primitives of reference type, that is the one thing we are interested in.
726 fn visit_primitive(&mut self, place: MPlaceTy<'tcx, Borrow>) -> EvalResult<'tcx>
728 // Cannot use `builtin_deref` because that reports *immutable* for `Box`,
729 // making it useless.
730 if let Some((mutbl, barrier)) = qualify(place.layout.ty, self.kind) {
731 let val = self.ecx.read_immediate(place.into())?;
732 let val = self.ecx.retag_reference(
736 self.kind == RetagKind::TwoPhase
738 self.ecx.write_immediate(val, place.into())?;