//! Implements "Stacked Borrows". See <https://github.com/rust-lang/unsafe-code-guidelines/blob/master/wip/stacked-borrows.md>
//! for further information.
+use log::trace;
use std::cell::RefCell;
use std::fmt;
use std::num::NonZeroU64;
-use log::trace;
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_hir::Mutability;
&self,
ptr: Pointer<Tag>,
size: Size,
- global: &GlobalState,
- f: impl Fn(Pointer<Tag>, &mut Stack, &GlobalState) -> InterpResult<'tcx>,
+ f: impl Fn(Pointer<Tag>, &mut Stack) -> InterpResult<'tcx>,
) -> InterpResult<'tcx> {
let mut stacks = self.stacks.borrow_mut();
for (offset, stack) in stacks.iter_mut(ptr.offset, size) {
let mut cur_ptr = ptr;
cur_ptr.offset = offset;
- f(cur_ptr, stack, &*global)?;
+ f(cur_ptr, stack)?;
+ }
+ Ok(())
+ }
+
+ /// Call `f` on every stack in the range.
+ fn for_each_mut(
+ &mut self,
+ ptr: Pointer<Tag>,
+ size: Size,
+ f: impl Fn(Pointer<Tag>, &mut Stack) -> InterpResult<'tcx>,
+ ) -> InterpResult<'tcx> {
+ let stacks = self.stacks.get_mut();
+ for (offset, stack) in stacks.iter_mut(ptr.offset, size) {
+ let mut cur_ptr = ptr;
+ cur_ptr.offset = offset;
+ f(cur_ptr, stack)?;
}
Ok(())
}
}
#[inline(always)]
- pub fn memory_read<'tcx>(&self, ptr: Pointer<Tag>, size: Size, extra: &MemoryExtra) -> InterpResult<'tcx> {
+ pub fn memory_read<'tcx>(
+ &self,
+ ptr: Pointer<Tag>,
+ size: Size,
+ extra: &MemoryExtra,
+ ) -> InterpResult<'tcx> {
trace!("read access with tag {:?}: {:?}, size {}", ptr.tag, ptr.erase_tag(), size.bytes());
- self.for_each(ptr, size, &*extra.borrow(), |ptr, stack, global| stack.access(AccessKind::Read, ptr, global))
+ let global = &*extra.borrow();
+ self.for_each(ptr, size, move |ptr, stack| stack.access(AccessKind::Read, ptr, global))
}
#[inline(always)]
- pub fn memory_written<'tcx>(&mut self, ptr: Pointer<Tag>, size: Size, extra: &mut MemoryExtra) -> InterpResult<'tcx> {
+ pub fn memory_written<'tcx>(
+ &mut self,
+ ptr: Pointer<Tag>,
+ size: Size,
+ extra: &mut MemoryExtra,
+ ) -> InterpResult<'tcx> {
trace!("write access with tag {:?}: {:?}, size {}", ptr.tag, ptr.erase_tag(), size.bytes());
- self.for_each(ptr, size, extra.get_mut(), |ptr, stack, global| stack.access(AccessKind::Write, ptr, global))
+ let global = extra.get_mut();
+ self.for_each_mut(ptr, size, move |ptr, stack| stack.access(AccessKind::Write, ptr, global))
}
#[inline(always)]
extra: &mut MemoryExtra,
) -> InterpResult<'tcx> {
trace!("deallocation with tag {:?}: {:?}, size {}", ptr.tag, ptr.erase_tag(), size.bytes());
- self.for_each(ptr, size, extra.get_mut(), |ptr, stack, global| stack.dealloc(ptr, global))
+ let global = extra.get_mut();
+ self.for_each_mut(ptr, size, move |ptr, stack| stack.dealloc(ptr, global))
}
}
new_tag: Tag,
protect: bool,
) -> InterpResult<'tcx> {
+ // Nothing to do for ZSTs.
+ if size == Size::ZERO {
+ trace!(
+ "reborrow of size 0: {} reference {:?} derived from {:?} (pointee {})",
+ kind,
+ new_tag,
+ place.ptr,
+ place.layout.ty,
+ );
+ return Ok(());
+ }
+
let this = self.eval_context_mut();
let protector = if protect { Some(this.frame().extra.call_id) } else { None };
let ptr = place.ptr.assert_ptr();
size.bytes()
);
- // Get the allocation. We need both the allocation and the MemoryExtra, so we cannot use `&mut`.
- // FIXME: make `get_alloc_extra_mut` also return `&mut MemoryExtra`.
- let extra = this.memory.get_alloc_extra(ptr.alloc_id)?;
- let stacked_borrows =
- extra.stacked_borrows.as_ref().expect("we should have Stacked Borrows data");
- let global = this.memory.extra.stacked_borrows.as_ref().unwrap().borrow();
// Update the stacks.
// Make sure that raw pointers and mutable shared references are reborrowed "weak":
// There could be existing unique pointers reborrowed from them that should remain valid!
// Shared references and *const are a whole different kind of game, the
// permission is not uniform across the entire range!
// We need a frozen-sensitive reborrow.
+ // We have to use shared references to alloc/memory_extra here since
+ // `visit_freeze_sensitive` needs to access the global state.
+ let extra = this.memory.get_alloc_extra(ptr.alloc_id)?;
+ let stacked_borrows =
+ extra.stacked_borrows.as_ref().expect("we should have Stacked Borrows data");
+ let global = this.memory.extra.stacked_borrows.as_ref().unwrap().borrow();
return this.visit_freeze_sensitive(place, size, |cur_ptr, size, frozen| {
// We are only ever `SharedReadOnly` inside the frozen bits.
let perm = if frozen {
Permission::SharedReadWrite
};
let item = Item { perm, tag: new_tag, protector };
- stacked_borrows.for_each(cur_ptr, size, &*global, |cur_ptr, stack, global| {
- stack.grant(cur_ptr, item, global)
+ stacked_borrows.for_each(cur_ptr, size, |cur_ptr, stack| {
+ stack.grant(cur_ptr, item, &*global)
})
});
}
};
+ // Here we can avoid `borrow()` calls because we have mutable references.
+ // Note that this asserts that the allocation is mutable -- but since we are creating a
+ // mutable pointer, that seems reasonable.
+ let (alloc_extra, memory_extra) = this.memory.get_alloc_extra_mut(ptr.alloc_id)?;
+ let stacked_borrows =
+ alloc_extra.stacked_borrows.as_mut().expect("we should have Stacked Borrows data");
+ let global = memory_extra.stacked_borrows.as_mut().unwrap().get_mut();
let item = Item { perm, tag: new_tag, protector };
- stacked_borrows.for_each(ptr, size, &*global, |ptr, stack, global| stack.grant(ptr, item, global))
+ stacked_borrows.for_each_mut(ptr, size, |ptr, stack| stack.grant(ptr, item, global))
}
/// Retags an indidual pointer, returning the retagged version.
// We can see dangling ptrs in here e.g. after a Box's `Unique` was
// updated using "self.0 = ..." (can happen in Box::from_raw) so we cannot ICE; see miri#1050.
let place = this.mplace_access_checked(place, Some(Align::from_bytes(1).unwrap()))?;
- // Nothing to do for ZSTs. We use `is_bits` here because we *do* need to retag even ZSTs
- // when there actually is a tag (to avoid inheriting a tag that would let us access more
- // than 0 bytes).
- if size == Size::ZERO && place.ptr.is_bits() {
- return Ok(*val);
- }
// Compute new borrow.
let new_tag = {
- let mut mem_extra = this.memory.extra.stacked_borrows.as_ref().unwrap().borrow_mut();
+ let mem_extra = this.memory.extra.stacked_borrows.as_mut().unwrap().get_mut();
match kind {
// Give up tracking for raw pointers.
RefKind::Raw { .. } if !mem_extra.track_raw => Tag::Untagged,