map: BTreeMap<Range, T>,
}
-impl<T> Default for RangeMap<T> {
- #[inline(always)]
- fn default() -> Self {
- RangeMap::new()
- }
-}
-
// The derived `Ord` impl sorts first by the first field, then, if the fields are the same,
// by the second field.
// This is exactly what we need for our purposes, since a range query on a BTReeSet/BTreeMap will give us all
}
impl<T> RangeMap<T> {
+ /// Create a new RangeMap for the given size, and with the given initial value used for
+ /// the entire range.
#[inline(always)]
- pub fn new() -> RangeMap<T> {
- RangeMap { map: BTreeMap::new() }
+ pub fn new(size: Size, init: T) -> RangeMap<T> {
+ let mut map = RangeMap { map: BTreeMap::new() };
+ if size.bytes() > 0 {
+ map.map.insert(Range { start: 0, end: size.bytes() }, init);
+ }
+ map
}
fn iter_with_range<'a>(
)
}
+ /// Provide read-only iteration over everything in the given range. This does
+ /// *not* split items if they overlap with the edges. Do not use this to mutate
+ /// through interior mutability.
pub fn iter<'a>(&'a self, offset: Size, len: Size) -> impl Iterator<Item = &'a T> + 'a {
self.iter_with_range(offset.bytes(), len.bytes()).map(|(_, data)| data)
}
/// Provide mutable iteration over everything in the given range. As a side-effect,
/// this will split entries in the map that are only partially hit by the given range,
/// to make sure that when they are mutated, the effect is constrained to the given range.
- /// If there are gaps, leave them be.
- pub fn iter_mut_with_gaps<'a>(
+ pub fn iter_mut<'a>(
&'a mut self,
offset: Size,
len: Size,
},
)
}
-
- /// Provide a mutable iterator over everything in the given range, with the same side-effects as
- /// iter_mut_with_gaps. Furthermore, if there are gaps between ranges, fill them with the given default
- /// before yielding them in the iterator.
- /// This is also how you insert.
- pub fn iter_mut<'a>(&'a mut self, offset: Size, len: Size) -> impl Iterator<Item = &'a mut T> + 'a
- where
- T: Clone + Default,
- {
- if len.bytes() > 0 {
- let offset = offset.bytes();
- let len = len.bytes();
-
- // Do a first iteration to collect the gaps
- let mut gaps = Vec::new();
- let mut last_end = offset;
- for (range, _) in self.iter_with_range(offset, len) {
- if last_end < range.start {
- gaps.push(Range {
- start: last_end,
- end: range.start,
- });
- }
- last_end = range.end;
- }
- if last_end < offset + len {
- gaps.push(Range {
- start: last_end,
- end: offset + len,
- });
- }
-
- // Add default for all gaps
- for gap in gaps {
- let old = self.map.insert(gap, Default::default());
- assert!(old.is_none());
- }
- }
-
- // Now provide mutable iteration
- self.iter_mut_with_gaps(offset, len)
- }
-
- pub fn retain<F>(&mut self, mut f: F)
- where
- F: FnMut(&T) -> bool,
- {
- let mut remove = Vec::new();
- for (range, data) in &self.map {
- if !f(data) {
- remove.push(*range);
- }
- }
-
- for range in remove {
- self.map.remove(&range);
- }
- }
}
#[cfg(test)]
use super::*;
/// Query the map at every offset in the range and collect the results.
- fn to_vec<T: Copy>(map: &RangeMap<T>, offset: u64, len: u64, default: Option<T>) -> Vec<T> {
+ fn to_vec<T: Copy>(map: &RangeMap<T>, offset: u64, len: u64) -> Vec<T> {
(offset..offset + len)
.into_iter()
.map(|i| map
.iter(Size::from_bytes(i), Size::from_bytes(1))
.next()
.map(|&t| t)
- .or(default)
.unwrap()
)
.collect()
#[test]
fn basic_insert() {
- let mut map = RangeMap::<i32>::new();
+ let mut map = RangeMap::<i32>::new(Size::from_bytes(20), -1);
// Insert
for x in map.iter_mut(Size::from_bytes(10), Size::from_bytes(1)) {
*x = 42;
}
// Check
- assert_eq!(to_vec(&map, 10, 1, None), vec![42]);
+ assert_eq!(to_vec(&map, 10, 1), vec![42]);
// Insert with size 0
for x in map.iter_mut(Size::from_bytes(10), Size::from_bytes(0)) {
for x in map.iter_mut(Size::from_bytes(11), Size::from_bytes(0)) {
*x = 19;
}
- assert_eq!(to_vec(&map, 10, 2, Some(-1)), vec![42, -1]);
+ assert_eq!(to_vec(&map, 10, 2), vec![42, -1]);
}
#[test]
fn gaps() {
- let mut map = RangeMap::<i32>::new();
+ let mut map = RangeMap::<i32>::new(Size::from_bytes(20), -1);
for x in map.iter_mut(Size::from_bytes(11), Size::from_bytes(1)) {
*x = 42;
}
*x = 43;
}
assert_eq!(
- to_vec(&map, 10, 10, Some(-1)),
+ to_vec(&map, 10, 10),
vec![-1, 42, -1, -1, -1, 43, -1, -1, -1, -1]
);
- // Now request a range that needs three gaps filled
for x in map.iter_mut(Size::from_bytes(10), Size::from_bytes(10)) {
if *x < 42 {
*x = 23;
}
assert_eq!(
- to_vec(&map, 10, 10, None),
+ to_vec(&map, 10, 10),
vec![23, 42, 23, 23, 23, 43, 23, 23, 23, 23]
);
- assert_eq!(to_vec(&map, 13, 5, None), vec![23, 23, 43, 23, 23]);
+ assert_eq!(to_vec(&map, 13, 5), vec![23, 23, 43, 23, 23]);
+
+ // Now request a range that goes beyond the initial size
+ for x in map.iter_mut(Size::from_bytes(15), Size::from_bytes(10)) {
+ *x = 19;
+ }
+ assert_eq!(map.iter(Size::from_bytes(19), Size::from_bytes(1))
+ .map(|&t| t).collect::<Vec<_>>(), vec![19]);
+ assert_eq!(map.iter(Size::from_bytes(20), Size::from_bytes(1))
+ .map(|&t| t).collect::<Vec<_>>(), vec![]);
}
}
use std::cell::RefCell;
+use std::collections::HashSet;
+use std::rc::Rc;
use rustc::ty::{self, layout::Size};
use rustc::hir::{Mutability, MutMutable, MutImmutable};
};
pub type Timestamp = u64;
+pub type CallId = u64;
/// Information about which kind of borrow was used to create the reference this is tagged
/// with.
/// when there is no `UnsafeCell`.
Shr,
/// A barrier, tracking the function it belongs to by its index on the call stack
- #[allow(dead_code)] // for future use
- FnBarrier(usize)
-}
-
-impl BorStackItem {
- #[inline(always)]
- pub fn is_fn_barrier(self) -> bool {
- match self {
- BorStackItem::FnBarrier(_) => true,
- _ => false,
- }
- }
+ FnBarrier(CallId)
}
/// Extra per-location state
frozen_since: Option<Timestamp>, // virtual frozen "item" on top of the stack
}
-impl Default for Stack {
- fn default() -> Self {
- Stack {
- borrows: vec![BorStackItem::Shr],
- frozen_since: None,
- }
- }
-}
-
impl Stack {
#[inline(always)]
pub fn is_frozen(&self) -> bool {
Raw,
}
+/// What kind of access is being performed?
+#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
+pub enum AccessKind {
+ Read,
+ Write,
+ Dealloc,
+}
+
+/// Extra global state in the memory, available to the memory access hooks
+#[derive(Debug)]
+pub struct BarrierTracking {
+ next_id: CallId,
+ active_calls: HashSet<CallId>,
+}
+pub type MemoryState = Rc<RefCell<BarrierTracking>>;
+
+impl Default for BarrierTracking {
+ fn default() -> Self {
+ BarrierTracking {
+ next_id: 0,
+ active_calls: HashSet::default(),
+ }
+ }
+}
+
+impl BarrierTracking {
+ pub fn new_call(&mut self) -> CallId {
+ let id = self.next_id;
+ trace!("new_call: Assigning ID {}", id);
+ self.active_calls.insert(id);
+ self.next_id += 1;
+ id
+ }
+
+ pub fn end_call(&mut self, id: CallId) {
+ assert!(self.active_calls.remove(&id));
+ }
+
+ fn is_active(&self, id: CallId) -> bool {
+ self.active_calls.contains(&id)
+ }
+}
+
/// Extra global machine state
#[derive(Clone, Debug)]
pub struct State {
clock: Timestamp
}
-impl State {
- pub fn new() -> State {
+impl Default for State {
+ fn default() -> Self {
State { clock: 0 }
}
+}
+impl State {
fn increment_clock(&mut self) -> Timestamp {
let val = self.clock;
self.clock = val + 1;
}
/// Extra per-allocation state
-#[derive(Clone, Debug, Default)]
+#[derive(Clone, Debug)]
pub struct Stacks {
// Even reading memory can have effects on the stack, so we need a `RefCell` here.
stacks: RefCell<RangeMap<Stack>>,
+ barrier_tracking: MemoryState,
}
/// Core per-location operations: deref, access, create.
/// going to read or write.
/// Returns the index of the item we matched, `None` if it was the frozen one.
/// `kind` indicates which kind of reference is being dereferenced.
- fn deref(&self, bor: Borrow, kind: RefKind) -> Result<Option<usize>, String> {
+ fn deref(
+ &self,
+ bor: Borrow,
+ kind: RefKind,
+ ) -> Result<Option<usize>, String> {
// Exclude unique ref with frozen tag.
if let (RefKind::Unique, Borrow::Shr(Some(_))) = (kind, bor) {
return Err(format!("Encountered mutable reference with frozen tag ({:?})", bor));
// If we got here, we have to look for our item in the stack.
for (idx, &itm) in self.borrows.iter().enumerate().rev() {
match (itm, bor) {
- (BorStackItem::FnBarrier(_), _) => break,
(BorStackItem::Uniq(itm_t), Borrow::Uniq(bor_t)) if itm_t == bor_t => {
// Found matching unique item. This satisfies U3.
return Ok(Some(idx))
// Found matching shared/raw item.
return Ok(Some(idx))
}
- // Go on looking.
+ // Go on looking. We ignore barriers! When an `&mut` and an `&` alias,
+ // dereferencing the `&` is still possible (to reborrow), but doing
+ // an access is not.
_ => {}
}
}
// If we got here, we did not find our item. We have to error to satisfy U3.
- Err(format!(
- "Borrow being dereferenced ({:?}) does not exist on the stack, or is guarded by a barrier",
- bor
- ))
+ Err(format!("Borrow being dereferenced ({:?}) does not exist on the stack", bor))
}
/// Perform an actual memory access using `bor`. We do not know any types here
/// or whether things should be frozen, but we *do* know if this is reading
/// or writing.
- fn access(&mut self, bor: Borrow, is_write: bool) -> EvalResult<'tcx> {
+ fn access(
+ &mut self,
+ bor: Borrow,
+ kind: AccessKind,
+ barrier_tracking: &BarrierTracking,
+ ) -> EvalResult<'tcx> {
// Check if we can match the frozen "item".
// Not possible on writes!
if self.is_frozen() {
- if !is_write {
+ if kind == AccessKind::Read {
// When we are frozen, we just accept all reads. No harm in this.
// The deref already checked that `Uniq` items are in the stack, and that
// the location is frozen if it should be.
// Pop the stack until we have something matching.
while let Some(&itm) = self.borrows.last() {
match (itm, bor) {
- (BorStackItem::FnBarrier(_), _) => break,
+ (BorStackItem::FnBarrier(call), _) if barrier_tracking.is_active(call) => {
+ return err!(MachineError(format!(
+ "Stopping looking for borrow being accessed ({:?}) because of barrier ({})",
+ bor, call
+ )))
+ }
(BorStackItem::Uniq(itm_t), Borrow::Uniq(bor_t)) if itm_t == bor_t => {
- // Found matching unique item.
- return Ok(())
+ // Found matching unique item. Continue after the match.
}
- (BorStackItem::Shr, _) if !is_write => {
+ (BorStackItem::Shr, _) if kind == AccessKind::Read => {
// When reading, everything can use a shared item!
// We do not want to do this when writing: Writing to an `&mut`
// should reaffirm its exclusivity (i.e., make sure it is
- // on top of the stack).
- return Ok(())
+ // on top of the stack). Continue after the match.
}
(BorStackItem::Shr, Borrow::Shr(_)) => {
- // Found matching shared item.
- return Ok(())
+ // Found matching shared item. Continue after the match.
}
_ => {
- // Pop this. This ensures U2.
+ // Pop this, go on. This ensures U2.
let itm = self.borrows.pop().unwrap();
trace!("access: Popping {:?}", itm);
+ continue
}
}
+ // If we got here, we found a matching item. Congratulations!
+ // However, we are not done yet: If this access is deallocating, we must make sure
+ // there are no active barriers remaining on the stack.
+ if kind == AccessKind::Dealloc {
+ for &itm in self.borrows.iter().rev() {
+ match itm {
+ BorStackItem::FnBarrier(call) if barrier_tracking.is_active(call) => {
+ return err!(MachineError(format!(
+ "Deallocating with active barrier ({})", call
+ )))
+ }
+ _ => {},
+ }
+ }
+ }
+ // NOW we are done.
+ return Ok(())
}
// If we got here, we did not find our item.
err!(MachineError(format!(
- "Borrow being accessed ({:?}) does not exist on the stack, or is guarded by a barrier",
+ "Borrow being accessed ({:?}) does not exist on the stack",
bor
)))
}
/// is met: We cannot push `Uniq` onto frozen stacks.
/// `kind` indicates which kind of reference is being created.
fn create(&mut self, bor: Borrow, kind: RefKind) {
- // First, push the item. We do this even if we will later freeze, because we
- // will allow mutation of shared data at the expense of unfreezing.
if self.frozen_since.is_some() {
- // A frozen location, this should be impossible!
- bug!("We should never try pushing to a frozen stack");
+ // A frozen location? Possible if we create a barrier, then push again.
+ assert!(bor.is_shared(), "We should never try creating a unique borrow for a frozen stack");
+ trace!("create: Not doing anything on frozen location");
+ return;
}
- // First, push.
+ // First, push. We do this even if we will later freeze, because we
+ // will allow mutation of shared data at the expense of unfreezing.
let itm = match bor {
Borrow::Uniq(t) => BorStackItem::Uniq(t),
Borrow::Shr(_) => BorStackItem::Shr,
};
if *self.borrows.last().unwrap() == itm {
+ // This is just an optimization, no functional change: Avoid stacking
+ // multiple `Shr` on top of each other.
assert!(bor.is_shared());
trace!("create: Sharing a shared location is a NOP");
} else {
self.frozen_since = Some(bor_t);
}
}
+
+ /// Add a barrier
+ fn barrier(&mut self, call: CallId) {
+ let itm = BorStackItem::FnBarrier(call);
+ if *self.borrows.last().unwrap() == itm {
+ // This is just an optimization, no functional change: Avoid stacking
+ // multiple identical barriers on top of each other.
+ // This can happen when a function receives several shared references
+ // that overlap.
+ trace!("barrier: Avoiding redundant extra barrier");
+ } else {
+ trace!("barrier: Pushing barrier for call {}", call);
+ self.borrows.push(itm);
+ }
+ }
}
/// Higher-level per-location operations: deref, access, reborrow.
) -> EvalResult<'tcx> {
trace!("deref for tag {:?} as {:?}: {:?}, size {}",
ptr.tag, kind, ptr, size.bytes());
- let mut stacks = self.stacks.borrow_mut();
- // We need `iter_mut` because `iter` would skip gaps!
- for stack in stacks.iter_mut(ptr.offset, size) {
+ let stacks = self.stacks.borrow();
+ for stack in stacks.iter(ptr.offset, size) {
stack.deref(ptr.tag, kind).map_err(EvalErrorKind::MachineError)?;
}
Ok(())
&self,
ptr: Pointer<Borrow>,
size: Size,
- is_write: bool,
+ kind: AccessKind,
) -> EvalResult<'tcx> {
- trace!("{} access of tag {:?}: {:?}, size {}",
- if is_write { "read" } else { "write" },
- ptr.tag, ptr, size.bytes());
+ trace!("{:?} access of tag {:?}: {:?}, size {}", kind, ptr.tag, ptr, size.bytes());
// Even reads can have a side-effect, by invalidating other references.
// This is fundamentally necessary since `&mut` asserts that there
// are no accesses through other references, not even reads.
+ let barrier_tracking = self.barrier_tracking.borrow();
let mut stacks = self.stacks.borrow_mut();
for stack in stacks.iter_mut(ptr.offset, size) {
- stack.access(ptr.tag, is_write)?;
+ stack.access(ptr.tag, kind, &*barrier_tracking)?;
}
Ok(())
}
&self,
ptr: Pointer<Borrow>,
size: Size,
+ mut barrier: Option<CallId>,
new_bor: Borrow,
new_kind: RefKind,
) -> EvalResult<'tcx> {
assert_eq!(new_bor.is_unique(), new_kind == RefKind::Unique);
trace!("reborrow for tag {:?} to {:?} as {:?}: {:?}, size {}",
ptr.tag, new_bor, new_kind, ptr, size.bytes());
+ if new_kind == RefKind::Raw {
+ // No barrier for raw, including `&UnsafeCell`. They can rightfully
+ // alias with `&mut`.
+ // FIXME: This means that the `dereferencable` attribute on non-frozen shared
+ // references is incorrect! They are dereferencable when the function is
+ // called, but might become non-dereferencable during the course of execution.
+ // Also see [1], [2].
+ //
+ // [1]: <https://internals.rust-lang.org/t/
+ // is-it-possible-to-be-memory-safe-with-deallocated-self/8457/8>,
+ // [2]: <https://lists.llvm.org/pipermail/llvm-dev/2018-July/124555.html>
+ barrier = None;
+ }
+ let barrier_tracking = self.barrier_tracking.borrow();
let mut stacks = self.stacks.borrow_mut();
for stack in stacks.iter_mut(ptr.offset, size) {
// Access source `ptr`, create new ref.
// the stack than the one we come from, just use that.
// IOW, we check if `new_bor` *already* is "derived from" `ptr.tag`.
// This also checks frozenness, if required.
- let bor_redundant = match (ptr_idx, stack.deref(new_bor, new_kind)) {
- // If the new borrow works with the frozen item, or else if it lives
- // above the old one in the stack, our job here is done.
- (_, Ok(None)) => true,
- (Some(ptr_idx), Ok(Some(new_idx))) if new_idx >= ptr_idx => true,
- // Otherwise we need to create a new borrow.
- _ => false,
- };
+ let bor_redundant = barrier.is_none() &&
+ match (ptr_idx, stack.deref(new_bor, new_kind)) {
+ // If the new borrow works with the frozen item, or else if it lives
+ // above the old one in the stack, our job here is done.
+ (_, Ok(None)) => true,
+ (Some(ptr_idx), Ok(Some(new_idx))) if new_idx >= ptr_idx => true,
+ // Otherwise we need to create a new borrow.
+ _ => false,
+ };
if bor_redundant {
assert!(new_bor.is_shared(), "A unique reborrow can never be redundant");
trace!("reborrow is redundant");
continue;
}
// We need to do some actual work.
- stack.access(ptr.tag, new_kind == RefKind::Unique)?;
+ let access_kind = if new_kind == RefKind::Unique {
+ AccessKind::Write
+ } else {
+ AccessKind::Read
+ };
+ stack.access(ptr.tag, access_kind, &*barrier_tracking)?;
+ if let Some(call) = barrier {
+ stack.barrier(call);
+ }
stack.create(new_bor, new_kind);
}
Ok(())
}
/// Hooks and glue
-impl AllocationExtra<Borrow> for Stacks {
+impl AllocationExtra<Borrow, MemoryState> for Stacks {
+ #[inline(always)]
+ fn memory_allocated<'tcx>(size: Size, extra: &MemoryState) -> Self {
+ let stack = Stack {
+ borrows: vec![BorStackItem::Shr],
+ frozen_since: None,
+ };
+ Stacks {
+ stacks: RefCell::new(RangeMap::new(size, stack)),
+ barrier_tracking: Rc::clone(extra),
+ }
+ }
+
#[inline(always)]
fn memory_read<'tcx>(
alloc: &Allocation<Borrow, Stacks>,
ptr: Pointer<Borrow>,
size: Size,
) -> EvalResult<'tcx> {
- alloc.extra.access(ptr, size, /*is_write*/false)
+ alloc.extra.access(ptr, size, AccessKind::Read)
}
#[inline(always)]
ptr: Pointer<Borrow>,
size: Size,
) -> EvalResult<'tcx> {
- alloc.extra.access(ptr, size, /*is_write*/true)
+ alloc.extra.access(ptr, size, AccessKind::Write)
}
#[inline(always)]
ptr: Pointer<Borrow>,
size: Size,
) -> EvalResult<'tcx> {
- // This is like mutating
- alloc.extra.access(ptr, size, /*is_write*/true)
- // FIXME: Error out of there are any barriers?
+ alloc.extra.access(ptr, size, AccessKind::Dealloc)
}
}
impl<'tcx> Stacks {
/// Pushes the first item to the stacks.
- pub fn first_item(
+ pub(crate) fn first_item(
&mut self,
itm: BorStackItem,
size: Size
) {
- assert!(!itm.is_fn_barrier());
for stack in self.stacks.get_mut().iter_mut(Size::ZERO, size) {
assert!(stack.borrows.len() == 1);
assert_eq!(stack.borrows.pop().unwrap(), BorStackItem::Shr);
&mut self,
place: MPlaceTy<'tcx, Borrow>,
size: Size,
+ fn_barrier: bool,
new_bor: Borrow
) -> EvalResult<'tcx, Pointer<Borrow>>;
&mut self,
ptr: ImmTy<'tcx, Borrow>,
mutbl: Mutability,
+ fn_barrier: bool,
) -> EvalResult<'tcx, Immediate<Borrow>>;
fn retag(
place: MPlaceTy<'tcx, Borrow>,
size: Size,
) -> EvalResult<'tcx> {
- self.reborrow(place, size, Borrow::default())?;
+ self.reborrow(place, size, /*fn_barrier*/ false, Borrow::default())?;
Ok(())
}
&mut self,
place: MPlaceTy<'tcx, Borrow>,
size: Size,
+ fn_barrier: bool,
new_bor: Borrow
) -> EvalResult<'tcx, Pointer<Borrow>> {
let ptr = place.ptr.to_ptr()?;
let new_ptr = Pointer::new_with_tag(ptr.alloc_id, ptr.offset, new_bor);
+ let barrier = if fn_barrier { Some(self.frame().extra) } else { None };
trace!("reborrow: Creating new reference for {:?} (pointee {}): {:?}",
ptr, place.layout.ty, new_bor);
// Reference that cares about freezing. We need a frozen-sensitive reborrow.
self.visit_freeze_sensitive(place, size, |cur_ptr, size, frozen| {
let kind = if frozen { RefKind::Frozen } else { RefKind::Raw };
- alloc.extra.reborrow(cur_ptr, size, new_bor, kind)
+ alloc.extra.reborrow(cur_ptr, size, barrier, new_bor, kind)
})?;
} else {
// Just treat this as one big chunk.
let kind = if new_bor.is_unique() { RefKind::Unique } else { RefKind::Raw };
- alloc.extra.reborrow(ptr, size, new_bor, kind)?;
+ alloc.extra.reborrow(ptr, size, barrier, new_bor, kind)?;
}
Ok(new_ptr)
}
&mut self,
val: ImmTy<'tcx, Borrow>,
mutbl: Mutability,
+ fn_barrier: bool,
) -> EvalResult<'tcx, Immediate<Borrow>> {
// We want a place for where the ptr *points to*, so we get one.
let place = self.ref_to_mplace(val)?;
};
// Reborrow.
- let new_ptr = self.reborrow(place, size, new_bor)?;
+ let new_ptr = self.reborrow(place, size, fn_barrier, new_bor)?;
// Return new ptr
let new_place = MemPlace { ptr: Scalar::Ptr(new_ptr), ..*place };
fn retag(
&mut self,
- _fn_entry: bool,
+ fn_entry: bool,
place: PlaceTy<'tcx, Borrow>
) -> EvalResult<'tcx> {
- // TODO: Honor `fn_entry`.
+ // Determine mutability and whether to add a barrier.
+ // Cannot use `builtin_deref` because that reports *immutable* for `Box`,
+ // making it useless.
+ fn qualify(ty: ty::Ty<'_>, fn_entry: bool) -> Option<(Mutability, bool)> {
+ match ty.sty {
+ // References are simple
+ ty::Ref(_, _, mutbl) => Some((mutbl, fn_entry)),
+ // Boxes do not get a barrier: Barriers reflect that references outlive the call
+ // they were passed in to; that's just not the case for boxes.
+ ty::Adt(..) if ty.is_box() => Some((MutMutable, false)),
+ _ => None,
+ }
+ }
// We need a visitor to visit all references. However, that requires
// a `MemPlace`, so we have a fast path for reference types that
// avoids allocating.
- // Cannot use `builtin_deref` because that reports *immutable* for `Box`,
- // making it useless.
- if let Some(mutbl) = match place.layout.ty.sty {
- ty::Ref(_, _, mutbl) => Some(mutbl),
- ty::Adt(..) if place.layout.ty.is_box() => Some(MutMutable),
- _ => None, // handled with the general case below
- } {
+ if let Some((mutbl, barrier)) = qualify(place.layout.ty, fn_entry) {
// fast path
let val = self.read_immediate(self.place_to_op(place)?)?;
- let val = self.retag_reference(val, mutbl)?;
+ let val = self.retag_reference(val, mutbl, barrier)?;
self.write_immediate(val, place)?;
return Ok(());
}
let place = self.force_allocation(place)?;
- let mut visitor = RetagVisitor { ecx: self };
+ let mut visitor = RetagVisitor { ecx: self, fn_entry };
visitor.visit_value(place)?;
// The actual visitor
struct RetagVisitor<'ecx, 'a, 'mir, 'tcx> {
ecx: &'ecx mut MiriEvalContext<'a, 'mir, 'tcx>,
+ fn_entry: bool,
}
impl<'ecx, 'a, 'mir, 'tcx>
MutValueVisitor<'a, 'mir, 'tcx, Evaluator<'tcx>>
{
// Cannot use `builtin_deref` because that reports *immutable* for `Box`,
// making it useless.
- let mutbl = match place.layout.ty.sty {
- ty::Ref(_, _, mutbl) => mutbl,
- ty::Adt(..) if place.layout.ty.is_box() => MutMutable,
- _ => return Ok(()), // nothing to do
- };
- let val = self.ecx.read_immediate(place.into())?;
- let val = self.ecx.retag_reference(val, mutbl)?;
- self.ecx.write_immediate(val, place.into())?;
+ if let Some((mutbl, barrier)) = qualify(place.layout.ty, self.fn_entry) {
+ let val = self.ecx.read_immediate(place.into())?;
+ let val = self.ecx.retag_reference(val, mutbl, barrier)?;
+ self.ecx.write_immediate(val, place.into())?;
+ }
Ok(())
}
}