+//! Implements "Stacked Borrows". See <https://github.com/rust-lang/unsafe-code-guidelines/blob/master/wip/stacked-borrows.md>
+//! for further information.
+
use std::cell::RefCell;
-use std::collections::HashSet;
+use std::fmt;
+use std::num::NonZeroU64;
use std::rc::Rc;
-use rustc::ty::{self, layout::Size};
-use rustc::hir::{Mutability, MutMutable, MutImmutable};
-use rustc::mir::RetagKind;
+use log::trace;
-use crate::{
- EvalResult, InterpError, MiriEvalContext, HelpersEvalContextExt, Evaluator, MutValueVisitor,
- MemoryKind, MiriMemoryKind, RangeMap, AllocId, Allocation, AllocationExtra,
- Pointer, Immediate, ImmTy, PlaceTy, MPlaceTy,
-};
+use rustc_data_structures::fx::{FxHashMap, FxHashSet};
+use rustc_middle::mir::RetagKind;
+use rustc_middle::ty;
+use rustc_target::abi::{Align, LayoutOf, Size};
+use rustc_hir::Mutability;
-pub type Timestamp = u64;
-pub type CallId = u64;
+use crate::*;
-/// Information about which kind of borrow was used to create the reference this is tagged with.
-#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
-pub enum Borrow {
- /// A unique (mutable) reference.
- Uniq(Timestamp),
- /// An aliasing reference. This is also used by raw pointers, which do not track details
- /// of how or when they were created, hence the timestamp is optional.
- /// `Shr(Some(_))` does *not* mean that the destination of this reference is frozen;
- /// that depends on the type! Only those parts outside of an `UnsafeCell` are actually
- /// frozen.
- Alias(Option<Timestamp>),
-}
+pub type PtrId = NonZeroU64;
+pub type CallId = NonZeroU64;
+pub type AllocExtra = Stacks;
-impl Borrow {
- #[inline(always)]
- pub fn is_aliasing(self) -> bool {
- match self {
- Borrow::Alias(_) => true,
- _ => false,
- }
- }
+/// Tracking pointer provenance
+#[derive(Copy, Clone, Hash, PartialEq, Eq)]
+pub enum Tag {
+ Tagged(PtrId),
+ Untagged,
+}
- #[inline(always)]
- pub fn is_unique(self) -> bool {
+impl fmt::Debug for Tag {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
- Borrow::Uniq(_) => true,
- _ => false,
+ Tag::Tagged(id) => write!(f, "<{}>", id),
+ Tag::Untagged => write!(f, "<untagged>"),
}
}
}
-impl Default for Borrow {
- fn default() -> Self {
- Borrow::Alias(None)
- }
+/// Indicates which permission is granted (by this item to some pointers)
+#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
+pub enum Permission {
+ /// Grants unique mutable access.
+ Unique,
+ /// Grants shared mutable access.
+ SharedReadWrite,
+ /// Grants shared read-only access.
+ SharedReadOnly,
+ /// Grants no access, but separates two groups of SharedReadWrite so they are not
+ /// all considered mutually compatible.
+ Disabled,
}
/// An item in the per-location borrow stack.
-#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
-pub enum BorStackItem {
- /// Indicates the unique reference that may mutate.
- Uniq(Timestamp),
- /// Indicates that the location has been mutably shared. Used for raw pointers as
- /// well as for unfrozen shared references.
- Raw,
- /// A barrier, tracking the function it belongs to by its index on the call stack.
- FnBarrier(CallId)
+#[derive(Copy, Clone, Hash, PartialEq, Eq)]
+pub struct Item {
+ /// The permission this item grants.
+ perm: Permission,
+ /// The pointers the permission is granted to.
+ tag: Tag,
+ /// An optional protector, ensuring the item cannot get popped until `CallId` is over.
+ protector: Option<CallId>,
+}
+
+impl fmt::Debug for Item {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "[{:?} for {:?}", self.perm, self.tag)?;
+ if let Some(call) = self.protector {
+ write!(f, " (call {})", call)?;
+ }
+ write!(f, "]")?;
+ Ok(())
+ }
}
/// Extra per-location state.
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct Stack {
- /// Used as the stack; never empty.
- borrows: Vec<BorStackItem>,
- /// A virtual frozen "item" on top of the stack.
- frozen_since: Option<Timestamp>,
+ /// Used *mostly* as a stack; never empty.
+ /// Invariants:
+ /// * Above a `SharedReadOnly` there can only be more `SharedReadOnly`.
+ /// * Except for `Untagged`, no tag occurs in the stack more than once.
+ borrows: Vec<Item>,
}
-impl Stack {
- #[inline(always)]
- pub fn is_frozen(&self) -> bool {
- self.frozen_since.is_some()
- }
+/// Extra per-allocation state.
+#[derive(Clone, Debug)]
+pub struct Stacks {
+ // Even reading memory can have effects on the stack, so we need a `RefCell` here.
+ stacks: RefCell<RangeMap<Stack>>,
+ // Pointer to global state
+ global: MemoryExtra,
}
-/// Indicates which kind of reference is being used.
-#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
-pub enum RefKind {
- /// `&mut`.
- Unique,
- /// `&` without interior mutability.
- Frozen,
- /// `*` (raw pointer) or `&` to `UnsafeCell`.
- Raw,
+/// Extra global state, available to the memory access hooks.
+#[derive(Debug)]
+pub struct GlobalState {
+ /// Next unused pointer ID (tag).
+ next_ptr_id: PtrId,
+ /// Table storing the "base" tag for each allocation.
+ /// The base tag is the one used for the initial pointer.
+ /// We need this in a separate table to handle cyclic statics.
+ base_ptr_ids: FxHashMap<AllocId, Tag>,
+ /// Next unused call ID (for protectors).
+ next_call_id: CallId,
+ /// Those call IDs corresponding to functions that are still running.
+ active_calls: FxHashSet<CallId>,
+ /// The pointer id to trace
+ tracked_pointer_tag: Option<PtrId>,
+ /// The call id to trace
+ tracked_call_id: Option<CallId>,
+ /// Whether to track raw pointers.
+ track_raw: bool,
}
+/// Memory extra state gives us interior mutable access to the global state.
+pub type MemoryExtra = Rc<RefCell<GlobalState>>;
/// Indicates which kind of access is being performed.
-#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
+#[derive(Copy, Clone, Hash, PartialEq, Eq)]
pub enum AccessKind {
Read,
Write,
- Dealloc,
}
-/// Extra global state in the memory, available to the memory access hooks.
-#[derive(Debug)]
-pub struct BarrierTracking {
- next_id: CallId,
- active_calls: HashSet<CallId>,
+impl fmt::Display for AccessKind {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ AccessKind::Read => write!(f, "read access"),
+ AccessKind::Write => write!(f, "write access"),
+ }
+ }
+}
+
+/// Indicates which kind of reference is being created.
+/// Used by high-level `reborrow` to compute which permissions to grant to the
+/// new pointer.
+#[derive(Copy, Clone, Hash, PartialEq, Eq)]
+pub enum RefKind {
+ /// `&mut` and `Box`.
+ Unique { two_phase: bool },
+ /// `&` with or without interior mutability.
+ Shared,
+ /// `*mut`/`*const` (raw pointers).
+ Raw { mutable: bool },
}
-pub type MemoryState = Rc<RefCell<BarrierTracking>>;
-impl Default for BarrierTracking {
- fn default() -> Self {
- BarrierTracking {
- next_id: 0,
- active_calls: HashSet::default(),
+impl fmt::Display for RefKind {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ RefKind::Unique { two_phase: false } => write!(f, "unique"),
+ RefKind::Unique { two_phase: true } => write!(f, "unique (two-phase)"),
+ RefKind::Shared => write!(f, "shared"),
+ RefKind::Raw { mutable: true } => write!(f, "raw (mutable)"),
+ RefKind::Raw { mutable: false } => write!(f, "raw (constant)"),
}
}
}
-impl BarrierTracking {
+/// Utilities for initialization and ID generation
+impl GlobalState {
+ pub fn new(tracked_pointer_tag: Option<PtrId>, tracked_call_id: Option<CallId>, track_raw: bool) -> Self {
+ GlobalState {
+ next_ptr_id: NonZeroU64::new(1).unwrap(),
+ base_ptr_ids: FxHashMap::default(),
+ next_call_id: NonZeroU64::new(1).unwrap(),
+ active_calls: FxHashSet::default(),
+ tracked_pointer_tag,
+ tracked_call_id,
+ track_raw,
+ }
+ }
+
+ fn new_ptr(&mut self) -> PtrId {
+ let id = self.next_ptr_id;
+ if Some(id) == self.tracked_pointer_tag {
+ register_diagnostic(NonHaltingDiagnostic::CreatedPointerTag(id));
+ }
+ self.next_ptr_id = NonZeroU64::new(id.get() + 1).unwrap();
+ id
+ }
+
pub fn new_call(&mut self) -> CallId {
- let id = self.next_id;
+ let id = self.next_call_id;
trace!("new_call: Assigning ID {}", id);
- self.active_calls.insert(id);
- self.next_id += 1;
+ if Some(id) == self.tracked_call_id {
+ register_diagnostic(NonHaltingDiagnostic::CreatedCallId(id));
+ }
+ assert!(self.active_calls.insert(id));
+ self.next_call_id = NonZeroU64::new(id.get() + 1).unwrap();
id
}
fn is_active(&self, id: CallId) -> bool {
self.active_calls.contains(&id)
}
-}
-
-/// Extra global machine state.
-#[derive(Clone, Debug)]
-pub struct State {
- clock: Timestamp
-}
-impl Default for State {
- fn default() -> Self {
- State { clock: 0 }
+ pub fn global_base_ptr(&mut self, id: AllocId) -> Tag {
+ self.base_ptr_ids.get(&id).copied().unwrap_or_else(|| {
+ let tag = Tag::Tagged(self.new_ptr());
+ trace!("New allocation {:?} has base tag {:?}", id, tag);
+ self.base_ptr_ids.insert(id, tag).unwrap_none();
+ tag
+ })
}
}
-impl State {
- fn increment_clock(&mut self) -> Timestamp {
- let val = self.clock;
- self.clock = val + 1;
- val
- }
+/// Error reporting
+fn err_sb_ub(msg: String) -> InterpError<'static> {
+ err_machine_stop!(TerminationInfo::ExperimentalUb {
+ msg,
+ url: format!("https://github.com/rust-lang/unsafe-code-guidelines/blob/master/wip/stacked-borrows.md"),
+ })
}
-/// Extra per-allocation state.
-#[derive(Clone, Debug)]
-pub struct Stacks {
- // Even reading memory can have effects on the stack, so we need a `RefCell` here.
- stacks: RefCell<RangeMap<Stack>>,
- barrier_tracking: MemoryState,
-}
+// # Stacked Borrows Core Begin
-/// Core per-location operations: deref, access, create.
/// We need to make at least the following things true:
///
-/// U1: After creating a `Uniq`, it is at the top (and unfrozen).
-/// U2: If the top is `Uniq` (and unfrozen), accesses must be through that `Uniq` or pop it.
-/// U3: If an access (deref sufficient?) happens with a `Uniq`, it requires the `Uniq` to be in the stack.
+/// U1: After creating a `Uniq`, it is at the top.
+/// U2: If the top is `Uniq`, accesses must be through that `Uniq` or remove it it.
+/// U3: If an access happens with a `Uniq`, it requires the `Uniq` to be in the stack.
///
-/// F1: After creating a `&`, the parts outside `UnsafeCell` are frozen.
-/// F2: If a write access happens, it unfreezes.
-/// F3: If an access (well, a deref) happens with an `&` outside `UnsafeCell`,
-/// it requires the location to still be frozen.
+/// F1: After creating a `&`, the parts outside `UnsafeCell` have our `SharedReadOnly` on top.
+/// F2: If a write access happens, it pops the `SharedReadOnly`. This has three pieces:
+/// F2a: If a write happens granted by an item below our `SharedReadOnly`, the `SharedReadOnly`
+/// gets popped.
+/// F2b: No `SharedReadWrite` or `Unique` will ever be added on top of our `SharedReadOnly`.
+/// F3: If an access happens with an `&` outside `UnsafeCell`,
+/// it requires the `SharedReadOnly` to still be in the stack.
+
+/// Core relation on `Permission` to define which accesses are allowed
+impl Permission {
+ /// This defines for a given permission, whether it permits the given kind of access.
+ fn grants(self, access: AccessKind) -> bool {
+ // Disabled grants nothing. Otherwise, all items grant read access, and except for SharedReadOnly they grant write access.
+ self != Permission::Disabled
+ && (access == AccessKind::Read || self != Permission::SharedReadOnly)
+ }
+}
+
+/// Core per-location operations: access, dealloc, reborrow.
impl<'tcx> Stack {
- /// Deref `bor`: check if the location is frozen and the tag in the stack.
- /// This dos *not* constitute an access! "Deref" refers to the `*` operator
- /// in Rust, and includs cases like `&*x` or `(*x).foo` where no or only part
- /// of the memory actually gets accessed. Also we cannot know if we are
- /// going to read or write.
- /// Returns the index of the item we matched, `None` if it was the frozen one.
- /// `kind` indicates which kind of reference is being dereferenced.
- fn deref(
- &self,
- bor: Borrow,
- kind: RefKind,
- ) -> Result<Option<usize>, String> {
- // Exclude unique ref with frozen tag.
- if let (RefKind::Unique, Borrow::Alias(Some(_))) = (kind, bor) {
- return Err(format!("encountered mutable reference with frozen tag ({:?})", bor));
- }
- // Checks related to freezing.
- match bor {
- Borrow::Alias(Some(bor_t)) if kind == RefKind::Frozen => {
- // We need the location to be frozen. This ensures F3.
- let frozen = self.frozen_since.map_or(false, |itm_t| itm_t <= bor_t);
- return if frozen { Ok(None) } else {
- Err(format!("location is not frozen long enough"))
- }
- }
- Borrow::Alias(_) if self.frozen_since.is_some() => {
- // Shared deref to frozen location; looking good.
- return Ok(None)
- }
- // Not sufficient; go on looking.
- _ => {}
- }
- // If we got here, we have to look for our item in the stack.
- for (idx, &itm) in self.borrows.iter().enumerate().rev() {
- match (itm, bor) {
- (BorStackItem::Uniq(itm_t), Borrow::Uniq(bor_t)) if itm_t == bor_t => {
- // Found matching unique item. This satisfies U3.
- return Ok(Some(idx))
- }
- (BorStackItem::Raw, Borrow::Alias(_)) => {
- // Found matching aliasing/raw item.
- return Ok(Some(idx))
+ /// Find the item granting the given kind of access to the given tag, and return where
+ /// it is on the stack.
+ fn find_granting(&self, access: AccessKind, tag: Tag) -> Option<usize> {
+ self.borrows
+ .iter()
+ .enumerate() // we also need to know *where* in the stack
+ .rev() // search top-to-bottom
+ // Return permission of first item that grants access.
+ // We require a permission with the right tag, ensuring U3 and F3.
+ .find_map(
+ |(idx, item)| {
+ if tag == item.tag && item.perm.grants(access) { Some(idx) } else { None }
+ },
+ )
+ }
+
+ /// Find the first write-incompatible item above the given one --
+ /// i.e, find the height to which the stack will be truncated when writing to `granting`.
+ fn find_first_write_incompatible(&self, granting: usize) -> usize {
+ let perm = self.borrows[granting].perm;
+ match perm {
+ Permission::SharedReadOnly => bug!("Cannot use SharedReadOnly for writing"),
+ Permission::Disabled => bug!("Cannot use Disabled for anything"),
+ // On a write, everything above us is incompatible.
+ Permission::Unique => granting + 1,
+ Permission::SharedReadWrite => {
+ // The SharedReadWrite *just* above us are compatible, to skip those.
+ let mut idx = granting + 1;
+ while let Some(item) = self.borrows.get(idx) {
+ if item.perm == Permission::SharedReadWrite {
+ // Go on.
+ idx += 1;
+ } else {
+ // Found first incompatible!
+ break;
+ }
}
- // Go on looking. We ignore barriers! When an `&mut` and an `&` alias,
- // dereferencing the `&` is still possible (to reborrow), but doing
- // an access is not.
- _ => {}
+ idx
}
}
- // If we got here, we did not find our item. We have to error to satisfy U3.
- Err(format!("Borrow being dereferenced ({:?}) does not exist on the borrow stack", bor))
}
- /// Performs an actual memory access using `bor`. We do not know any types here
- /// or whether things should be frozen, but we *do* know if this is reading
- /// or writing.
- fn access(
- &mut self,
- bor: Borrow,
- kind: AccessKind,
- barrier_tracking: &BarrierTracking,
- ) -> EvalResult<'tcx> {
- // Check if we can match the frozen "item".
- // Not possible on writes!
- if self.is_frozen() {
- if kind == AccessKind::Read {
- // When we are frozen, we just accept all reads. No harm in this.
- // The deref already checked that `Uniq` items are in the stack, and that
- // the location is frozen if it should be.
- return Ok(());
+ /// Check if the given item is protected.
+ fn check_protector(item: &Item, tag: Option<Tag>, global: &GlobalState) -> InterpResult<'tcx> {
+ if let Tag::Tagged(id) = item.tag {
+ if Some(id) == global.tracked_pointer_tag {
+ register_diagnostic(NonHaltingDiagnostic::PoppedPointerTag(item.clone()));
}
- trace!("access: unfreezing");
}
- // Unfreeze on writes. This ensures F2.
- self.frozen_since = None;
- // Pop the stack until we have something matching.
- while let Some(&itm) = self.borrows.last() {
- match (itm, bor) {
- (BorStackItem::FnBarrier(call), _) if barrier_tracking.is_active(call) => {
- return err!(MachineError(format!(
- "stopping looking for borrow being accessed ({:?}) because of barrier ({})",
- bor, call
- )))
- }
- (BorStackItem::Uniq(itm_t), Borrow::Uniq(bor_t)) if itm_t == bor_t => {
- // Found matching unique item. Continue after the match.
- }
- (BorStackItem::Raw, _) if kind == AccessKind::Read => {
- // When reading, everything can use a raw item!
- // We do not want to do this when writing: Writing to an `&mut`
- // should reaffirm its exclusivity (i.e., make sure it is
- // on top of the stack). Continue after the match.
- }
- (BorStackItem::Raw, Borrow::Alias(_)) => {
- // Found matching raw item. Continue after the match.
- }
- _ => {
- // Pop this, go on. This ensures U2.
- let itm = self.borrows.pop().unwrap();
- trace!("access: Popping {:?}", itm);
- continue
- }
- }
- // If we got here, we found a matching item. Congratulations!
- // However, we are not done yet: If this access is deallocating, we must make sure
- // there are no active barriers remaining on the stack.
- if kind == AccessKind::Dealloc {
- for &itm in self.borrows.iter().rev() {
- match itm {
- BorStackItem::FnBarrier(call) if barrier_tracking.is_active(call) => {
- return err!(MachineError(format!(
- "deallocating with active barrier ({})", call
- )))
- }
- _ => {},
- }
+ if let Some(call) = item.protector {
+ if global.is_active(call) {
+ if let Some(tag) = tag {
+ Err(err_sb_ub(format!(
+ "not granting access to tag {:?} because incompatible item is protected: {:?}",
+ tag, item
+ )))?
+ } else {
+ Err(err_sb_ub(format!(
+ "deallocating while item is protected: {:?}",
+ item
+ )))?
}
}
- // Now we are done.
- return Ok(())
}
- // If we got here, we did not find our item.
- err!(MachineError(format!(
- "borrow being accessed ({:?}) does not exist on the borrow stack",
- bor
- )))
+ Ok(())
}
- /// Initiate `bor`; mostly this means pushing.
- /// This operation cannot fail; it is up to the caller to ensure that the precondition
- /// is met: We cannot push `Uniq` onto frozen stacks.
- /// `kind` indicates which kind of reference is being created.
- fn create(&mut self, bor: Borrow, kind: RefKind) {
- // When creating a frozen reference, freeze. This ensures F1.
- // We also do *not* push anything else to the stack, making sure that no nother kind
- // of access (like writing through raw pointers) is permitted.
- if kind == RefKind::Frozen {
- let bor_t = match bor {
- Borrow::Alias(Some(t)) => t,
- _ => bug!("Creating illegal borrow {:?} for frozen ref", bor),
- };
- // It is possible that we already are frozen (e.g., if we just pushed a barrier,
- // the redundancy check would not have kicked in).
- match self.frozen_since {
- Some(loc_t) => assert!(
- loc_t <= bor_t,
- "trying to freeze location for longer than it was already frozen"
- ),
- None => {
- trace!("create: Freezing");
- self.frozen_since = Some(bor_t);
+ /// Test if a memory `access` using pointer tagged `tag` is granted.
+ /// If yes, return the index of the item that granted it.
+ fn access(&mut self, access: AccessKind, ptr: Pointer<Tag>, global: &GlobalState) -> InterpResult<'tcx> {
+ // Two main steps: Find granting item, remove incompatible items above.
+
+ // Step 1: Find granting item.
+ let granting_idx = self.find_granting(access, ptr.tag).ok_or_else(|| {
+ err_sb_ub(format!(
+ "no item granting {} to tag {:?} at {} found in borrow stack.",
+ access, ptr.tag, ptr.erase_tag(),
+ ))
+ })?;
+
+ // Step 2: Remove incompatible items above them. Make sure we do not remove protected
+ // items. Behavior differs for reads and writes.
+ if access == AccessKind::Write {
+ // Remove everything above the write-compatible items, like a proper stack. This makes sure read-only and unique
+ // pointers become invalid on write accesses (ensures F2a, and ensures U2 for write accesses).
+ let first_incompatible_idx = self.find_first_write_incompatible(granting_idx);
+ for item in self.borrows.drain(first_incompatible_idx..).rev() {
+ trace!("access: popping item {:?}", item);
+ Stack::check_protector(&item, Some(ptr.tag), global)?;
+ }
+ } else {
+ // On a read, *disable* all `Unique` above the granting item. This ensures U2 for read accesses.
+ // The reason this is not following the stack discipline (by removing the first Unique and
+ // everything on top of it) is that in `let raw = &mut *x as *mut _; let _val = *x;`, the second statement
+ // would pop the `Unique` from the reborrow of the first statement, and subsequently also pop the
+ // `SharedReadWrite` for `raw`.
+ // This pattern occurs a lot in the standard library: create a raw pointer, then also create a shared
+ // reference and use that.
+ // We *disable* instead of removing `Unique` to avoid "connecting" two neighbouring blocks of SRWs.
+ for idx in ((granting_idx + 1)..self.borrows.len()).rev() {
+ let item = &mut self.borrows[idx];
+ if item.perm == Permission::Unique {
+ trace!("access: disabling item {:?}", item);
+ Stack::check_protector(item, Some(ptr.tag), global)?;
+ item.perm = Permission::Disabled;
}
}
- return;
}
- assert!(
- self.frozen_since.is_none(),
- "trying to create non-frozen reference to frozen location"
- );
- // Push new item to the stack.
- let itm = match bor {
- Borrow::Uniq(t) => BorStackItem::Uniq(t),
- Borrow::Alias(_) => BorStackItem::Raw,
- };
- if *self.borrows.last().unwrap() == itm {
- // This is just an optimization, no functional change: Avoid stacking
- // multiple `Shr` on top of each other.
- assert!(bor.is_aliasing());
- trace!("create: sharing a shared location is a NOP");
- } else {
- // This ensures U1.
- trace!("create: pushing {:?}", itm);
- self.borrows.push(itm);
+ // Done.
+ Ok(())
+ }
+
+ /// Deallocate a location: Like a write access, but also there must be no
+ /// active protectors at all because we will remove all items.
+ fn dealloc(&mut self, ptr: Pointer<Tag>, global: &GlobalState) -> InterpResult<'tcx> {
+ // Step 1: Find granting item.
+ self.find_granting(AccessKind::Write, ptr.tag).ok_or_else(|| {
+ err_sb_ub(format!(
+ "no item granting write access for deallocation to tag {:?} at {} found in borrow stack",
+ ptr.tag, ptr.erase_tag(),
+ ))
+ })?;
+
+ // Step 2: Remove all items. Also checks for protectors.
+ for item in self.borrows.drain(..).rev() {
+ Stack::check_protector(&item, None, global)?;
}
+
+ Ok(())
}
- /// Adds a barrier.
- fn barrier(&mut self, call: CallId) {
- let itm = BorStackItem::FnBarrier(call);
- if *self.borrows.last().unwrap() == itm {
- // This is just an optimization, no functional change: Avoid stacking
- // multiple identical barriers on top of each other.
- // This can happen when a function receives several shared references
- // that overlap.
- trace!("barrier: avoiding redundant extra barrier");
+ /// Derive a new pointer from one with the given tag.
+ /// `weak` controls whether this operation is weak or strong: weak granting does not act as
+ /// an access, and they add the new item directly on top of the one it is derived
+ /// from instead of all the way at the top of the stack.
+ fn grant(&mut self, derived_from: Pointer<Tag>, new: Item, global: &GlobalState) -> InterpResult<'tcx> {
+ // Figure out which access `perm` corresponds to.
+ let access =
+ if new.perm.grants(AccessKind::Write) { AccessKind::Write } else { AccessKind::Read };
+ // Now we figure out which item grants our parent (`derived_from`) this kind of access.
+ // We use that to determine where to put the new item.
+ let granting_idx = self.find_granting(access, derived_from.tag)
+ .ok_or_else(|| err_sb_ub(format!(
+ "trying to reborrow for {:?} at {}, but parent tag {:?} does not have an appropriate item in the borrow stack",
+ new.perm, derived_from.erase_tag(), derived_from.tag,
+ )))?;
+
+ // Compute where to put the new item.
+ // Either way, we ensure that we insert the new item in a way such that between
+ // `derived_from` and the new one, there are only items *compatible with* `derived_from`.
+ let new_idx = if new.perm == Permission::SharedReadWrite {
+ assert!(
+ access == AccessKind::Write,
+ "this case only makes sense for stack-like accesses"
+ );
+ // SharedReadWrite can coexist with "existing loans", meaning they don't act like a write
+ // access. Instead of popping the stack, we insert the item at the place the stack would
+ // be popped to (i.e., we insert it above all the write-compatible items).
+ // This ensures F2b by adding the new item below any potentially existing `SharedReadOnly`.
+ self.find_first_write_incompatible(granting_idx)
+ } else {
+ // A "safe" reborrow for a pointer that actually expects some aliasing guarantees.
+ // Here, creating a reference actually counts as an access.
+ // This ensures F2b for `Unique`, by removing offending `SharedReadOnly`.
+ self.access(access, derived_from, global)?;
+
+ // We insert "as far up as possible": We know only compatible items are remaining
+ // on top of `derived_from`, and we want the new item at the top so that we
+ // get the strongest possible guarantees.
+ // This ensures U1 and F1.
+ self.borrows.len()
+ };
+
+ // Put the new item there. As an optimization, deduplicate if it is equal to one of its new neighbors.
+ if self.borrows[new_idx - 1] == new || self.borrows.get(new_idx) == Some(&new) {
+ // Optimization applies, done.
+ trace!("reborrow: avoiding adding redundant item {:?}", new);
} else {
- trace!("barrier: pushing barrier for call {}", call);
- self.borrows.push(itm);
+ trace!("reborrow: adding item {:?}", new);
+ self.borrows.insert(new_idx, new);
}
+
+ Ok(())
}
}
+// # Stacked Borrows Core End
-/// Higher-level per-location operations: deref, access, reborrow.
+/// Map per-stack operations to higher-level per-location-range operations.
impl<'tcx> Stacks {
- /// Checks that this stack is fine with being dereferenced.
- fn deref(
- &self,
- ptr: Pointer<Borrow>,
- size: Size,
- kind: RefKind,
- ) -> EvalResult<'tcx> {
- trace!("deref for tag {:?} as {:?}: {:?}, size {}",
- ptr.tag, kind, ptr, size.bytes());
- let stacks = self.stacks.borrow();
- for stack in stacks.iter(ptr.offset, size) {
- stack.deref(ptr.tag, kind).map_err(InterpError::MachineError)?;
- }
- Ok(())
- }
+ /// Creates new stack with initial tag.
+ fn new(size: Size, perm: Permission, tag: Tag, extra: MemoryExtra) -> Self {
+ let item = Item { perm, tag, protector: None };
+ let stack = Stack { borrows: vec![item] };
- /// `ptr` got used, reflect that in the stack.
- fn access(
- &self,
- ptr: Pointer<Borrow>,
- size: Size,
- kind: AccessKind,
- ) -> EvalResult<'tcx> {
- trace!("{:?} access of tag {:?}: {:?}, size {}", kind, ptr.tag, ptr, size.bytes());
- // Even reads can have a side-effect, by invalidating other references.
- // This is fundamentally necessary since `&mut` asserts that there
- // are no accesses through other references, not even reads.
- let barrier_tracking = self.barrier_tracking.borrow();
- let mut stacks = self.stacks.borrow_mut();
- for stack in stacks.iter_mut(ptr.offset, size) {
- stack.access(ptr.tag, kind, &*barrier_tracking)?;
- }
- Ok(())
+ Stacks { stacks: RefCell::new(RangeMap::new(size, stack)), global: extra }
}
- /// Reborrow the given pointer to the new tag for the given kind of reference.
- /// This works on `&self` because we might encounter references to constant memory.
- fn reborrow(
+ /// Call `f` on every stack in the range.
+ fn for_each(
&self,
- ptr: Pointer<Borrow>,
+ ptr: Pointer<Tag>,
size: Size,
- mut barrier: Option<CallId>,
- new_bor: Borrow,
- new_kind: RefKind,
- ) -> EvalResult<'tcx> {
- assert_eq!(new_bor.is_unique(), new_kind == RefKind::Unique);
- trace!(
- "reborrow for tag {:?} to {:?} as {:?}: {:?}, size {}",
- ptr.tag, new_bor, new_kind, ptr, size.bytes(),
- );
- if new_kind == RefKind::Raw {
- // No barrier for raw, including `&UnsafeCell`. They can rightfully alias with `&mut`.
- // FIXME: This means that the `dereferencable` attribute on non-frozen shared references
- // is incorrect! They are dereferencable when the function is called, but might become
- // non-dereferencable during the course of execution.
- // Also see [1], [2].
- //
- // [1]: <https://internals.rust-lang.org/t/
- // is-it-possible-to-be-memory-safe-with-deallocated-self/8457/8>,
- // [2]: <https://lists.llvm.org/pipermail/llvm-dev/2018-July/124555.html>
- barrier = None;
- }
- let barrier_tracking = self.barrier_tracking.borrow();
+ f: impl Fn(Pointer<Tag>, &mut Stack, &GlobalState) -> InterpResult<'tcx>,
+ ) -> InterpResult<'tcx> {
+ let global = self.global.borrow();
let mut stacks = self.stacks.borrow_mut();
- for stack in stacks.iter_mut(ptr.offset, size) {
- // Access source `ptr`, create new ref.
- let ptr_idx = stack.deref(ptr.tag, new_kind).map_err(InterpError::MachineError)?;
- // If we can deref the new tag already, and if that tag lives higher on
- // the stack than the one we come from, just use that.
- // That is, we check if `new_bor` *already* is "derived from" `ptr.tag`.
- // This also checks frozenness, if required.
- let bor_redundant = barrier.is_none() &&
- match (ptr_idx, stack.deref(new_bor, new_kind)) {
- // If the new borrow works with the frozen item, or else if it lives
- // above the old one in the stack, our job here is done.
- (_, Ok(None)) => true,
- (Some(ptr_idx), Ok(Some(new_idx))) if new_idx >= ptr_idx => true,
- // Otherwise, we need to create a new borrow.
- _ => false,
- };
- if bor_redundant {
- assert!(new_bor.is_aliasing(), "a unique reborrow can never be redundant");
- trace!("reborrow is redundant");
- continue;
- }
- // We need to do some actual work.
- let access_kind = if new_kind == RefKind::Unique {
- AccessKind::Write
- } else {
- AccessKind::Read
- };
- stack.access(ptr.tag, access_kind, &*barrier_tracking)?;
- if let Some(call) = barrier {
- stack.barrier(call);
- }
- stack.create(new_bor, new_kind);
+ for (offset, stack) in stacks.iter_mut(ptr.offset, size) {
+ let mut cur_ptr = ptr;
+ cur_ptr.offset = offset;
+ f(cur_ptr, stack, &*global)?;
}
Ok(())
}
}
-/// Hooks and glue.
-impl AllocationExtra<Borrow, MemoryState> for Stacks {
- #[inline(always)]
- fn memory_allocated<'tcx>(size: Size, extra: &MemoryState) -> Self {
- let stack = Stack {
- borrows: vec![BorStackItem::Raw],
- frozen_since: None,
+/// Glue code to connect with Miri Machine Hooks
+impl Stacks {
+ pub fn new_allocation(
+ id: AllocId,
+ size: Size,
+ extra: MemoryExtra,
+ kind: MemoryKind<MiriMemoryKind>,
+ ) -> (Self, Tag) {
+ let (tag, perm) = match kind {
+ // New unique borrow. This tag is not accessible by the program,
+ // so it will only ever be used when using the local directly (i.e.,
+ // not through a pointer). That is, whenever we directly write to a local, this will pop
+ // everything else off the stack, invalidating all previous pointers,
+ // and in particular, *all* raw pointers.
+ MemoryKind::Stack => (Tag::Tagged(extra.borrow_mut().new_ptr()), Permission::Unique),
+ // `Global` memory can be referenced by global pointers from `tcx`.
+ // Thus we call `global_base_ptr` such that the global pointers get the same tag
+ // as what we use here.
+ // `ExternStatic` is used for extern statics, and thus must also be listed here.
+ // `Env` we list because we can get away with precise tracking there.
+ // The base pointer is not unique, so the base permission is `SharedReadWrite`.
+ MemoryKind::Machine(MiriMemoryKind::Global | MiriMemoryKind::ExternStatic | MiriMemoryKind::Tls | MiriMemoryKind::Env) =>
+ (extra.borrow_mut().global_base_ptr(id), Permission::SharedReadWrite),
+ // Everything else we handle like raw pointers for now.
+ _ => {
+ let mut extra = extra.borrow_mut();
+ let tag = if extra.track_raw { Tag::Tagged(extra.new_ptr()) } else { Tag::Untagged };
+ (tag, Permission::SharedReadWrite)
+ }
};
- Stacks {
- stacks: RefCell::new(RangeMap::new(size, stack)),
- barrier_tracking: Rc::clone(extra),
- }
+ (Stacks::new(size, perm, tag, extra), tag)
}
#[inline(always)]
- fn memory_read<'tcx>(
- alloc: &Allocation<Borrow, Stacks>,
- ptr: Pointer<Borrow>,
- size: Size,
- ) -> EvalResult<'tcx> {
- alloc.extra.access(ptr, size, AccessKind::Read)
+ pub fn memory_read<'tcx>(&self, ptr: Pointer<Tag>, size: Size) -> InterpResult<'tcx> {
+ trace!("read access with tag {:?}: {:?}, size {}", ptr.tag, ptr.erase_tag(), size.bytes());
+ self.for_each(ptr, size, |ptr, stack, global| stack.access(AccessKind::Read, ptr, global))
}
#[inline(always)]
- fn memory_written<'tcx>(
- alloc: &mut Allocation<Borrow, Stacks>,
- ptr: Pointer<Borrow>,
- size: Size,
- ) -> EvalResult<'tcx> {
- alloc.extra.access(ptr, size, AccessKind::Write)
+ pub fn memory_written<'tcx>(&mut self, ptr: Pointer<Tag>, size: Size) -> InterpResult<'tcx> {
+ trace!("write access with tag {:?}: {:?}, size {}", ptr.tag, ptr.erase_tag(), size.bytes());
+ self.for_each(ptr, size, |ptr, stack, global| stack.access(AccessKind::Write, ptr, global))
}
#[inline(always)]
- fn memory_deallocated<'tcx>(
- alloc: &mut Allocation<Borrow, Stacks>,
- ptr: Pointer<Borrow>,
- size: Size,
- ) -> EvalResult<'tcx> {
- alloc.extra.access(ptr, size, AccessKind::Dealloc)
- }
-}
-
-impl<'tcx> Stacks {
- /// Pushes the first item to the stacks.
- pub(crate) fn first_item(
+ pub fn memory_deallocated<'tcx>(
&mut self,
- itm: BorStackItem,
- size: Size
- ) {
- for stack in self.stacks.get_mut().iter_mut(Size::ZERO, size) {
- assert!(stack.borrows.len() == 1);
- assert_eq!(stack.borrows.pop().unwrap(), BorStackItem::Raw);
- stack.borrows.push(itm);
- }
+ ptr: Pointer<Tag>,
+ size: Size,
+ ) -> InterpResult<'tcx> {
+ trace!("deallocation with tag {:?}: {:?}, size {}", ptr.tag, ptr.erase_tag(), size.bytes());
+ self.for_each(ptr, size, |ptr, stack, global| stack.dealloc(ptr, global))
}
}
-impl<'a, 'mir, 'tcx> EvalContextPrivExt<'a, 'mir, 'tcx> for crate::MiriEvalContext<'a, 'mir, 'tcx> {}
-trait EvalContextPrivExt<'a, 'mir, 'tcx: 'a+'mir>: crate::MiriEvalContextExt<'a, 'mir, 'tcx> {
+/// Retagging/reborrowing. There is some policy in here, such as which permissions
+/// to grant for which references, and when to add protectors.
+impl<'mir, 'tcx: 'mir> EvalContextPrivExt<'mir, 'tcx> for crate::MiriEvalContext<'mir, 'tcx> {}
+trait EvalContextPrivExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx> {
fn reborrow(
&mut self,
- place: MPlaceTy<'tcx, Borrow>,
+ place: MPlaceTy<'tcx, Tag>,
size: Size,
- fn_barrier: bool,
- new_bor: Borrow
- ) -> EvalResult<'tcx> {
+ kind: RefKind,
+ new_tag: Tag,
+ protect: bool,
+ ) -> InterpResult<'tcx> {
let this = self.eval_context_mut();
- let ptr = place.ptr.to_ptr()?;
- let barrier = if fn_barrier { Some(this.frame().extra) } else { None };
- trace!("reborrow: creating new reference for {:?} (pointee {}): {:?}",
- ptr, place.layout.ty, new_bor);
+ let protector = if protect { Some(this.frame().extra.call_id) } else { None };
+ let ptr = place.ptr.assert_ptr();
+ trace!(
+ "reborrow: {} reference {:?} derived from {:?} (pointee {}): {:?}, size {}",
+ kind,
+ new_tag,
+ ptr.tag,
+ place.layout.ty,
+ ptr.erase_tag(),
+ size.bytes()
+ );
// Get the allocation. It might not be mutable, so we cannot use `get_mut`.
- let alloc = this.memory().get(ptr.alloc_id)?;
- alloc.check_bounds(this, ptr, size)?;
+ let extra = &this.memory.get_raw(ptr.alloc_id)?.extra;
+ let stacked_borrows =
+ extra.stacked_borrows.as_ref().expect("we should have Stacked Borrows data");
// Update the stacks.
- if let Borrow::Alias(Some(_)) = new_bor {
- // Reference that cares about freezing. We need a frozen-sensitive reborrow.
- this.visit_freeze_sensitive(place, size, |cur_ptr, size, frozen| {
- let kind = if frozen { RefKind::Frozen } else { RefKind::Raw };
- alloc.extra.reborrow(cur_ptr, size, barrier, new_bor, kind)
- })?;
- } else {
- // Just treat this as one big chunk.
- let kind = if new_bor.is_unique() { RefKind::Unique } else { RefKind::Raw };
- alloc.extra.reborrow(ptr, size, barrier, new_bor, kind)?;
- }
- Ok(())
+ // Make sure that raw pointers and mutable shared references are reborrowed "weak":
+ // There could be existing unique pointers reborrowed from them that should remain valid!
+ let perm = match kind {
+ RefKind::Unique { two_phase: false } => Permission::Unique,
+ RefKind::Unique { two_phase: true } => Permission::SharedReadWrite,
+ RefKind::Raw { mutable: true } => Permission::SharedReadWrite,
+ RefKind::Shared | RefKind::Raw { mutable: false } => {
+ // Shared references and *const are a whole different kind of game, the
+ // permission is not uniform across the entire range!
+ // We need a frozen-sensitive reborrow.
+ return this.visit_freeze_sensitive(place, size, |cur_ptr, size, frozen| {
+ // We are only ever `SharedReadOnly` inside the frozen bits.
+ let perm = if frozen {
+ Permission::SharedReadOnly
+ } else {
+ Permission::SharedReadWrite
+ };
+ let item = Item { perm, tag: new_tag, protector };
+ stacked_borrows.for_each(cur_ptr, size, |cur_ptr, stack, global| {
+ stack.grant(cur_ptr, item, global)
+ })
+ });
+ }
+ };
+ let item = Item { perm, tag: new_tag, protector };
+ stacked_borrows.for_each(ptr, size, |ptr, stack, global| stack.grant(ptr, item, global))
}
/// Retags an indidual pointer, returning the retagged version.
/// `mutbl` can be `None` to make this a raw pointer.
fn retag_reference(
&mut self,
- val: ImmTy<'tcx, Borrow>,
- mutbl: Option<Mutability>,
- fn_barrier: bool,
- two_phase: bool,
- ) -> EvalResult<'tcx, Immediate<Borrow>> {
+ val: ImmTy<'tcx, Tag>,
+ kind: RefKind,
+ protect: bool,
+ ) -> InterpResult<'tcx, ImmTy<'tcx, Tag>> {
let this = self.eval_context_mut();
// We want a place for where the ptr *points to*, so we get one.
let place = this.ref_to_mplace(val)?;
- let size = this.size_and_align_of_mplace(place)?
+ let size = this
+ .size_and_align_of_mplace(place)?
.map(|(size, _)| size)
.unwrap_or_else(|| place.layout.size);
+ // `reborrow` relies on getting a `Pointer` and everything being in-bounds,
+ // so let's ensure that. However, we do not care about alignment.
+ // We can see dangling ptrs in here e.g. after a Box's `Unique` was
+ // updated using "self.0 = ..." (can happen in Box::from_raw) so we cannot ICE; see miri#1050.
+ let place = this.mplace_access_checked(place, Some(Align::from_bytes(1).unwrap()))?;
+ // Nothing to do for ZSTs.
if size == Size::ZERO {
- // Nothing to do for ZSTs.
- return Ok(*val);
+ return Ok(val);
}
// Compute new borrow.
- let time = this.machine.stacked_borrows.increment_clock();
- let new_bor = match mutbl {
- Some(MutMutable) => Borrow::Uniq(time),
- Some(MutImmutable) => Borrow::Alias(Some(time)),
- None => Borrow::default(),
+ let new_tag = {
+ let mut mem_extra = this.memory.extra.stacked_borrows.as_ref().unwrap().borrow_mut();
+ match kind {
+ // Give up tracking for raw pointers.
+ RefKind::Raw { .. } if !mem_extra.track_raw => Tag::Untagged,
+ // All other pointers are properly tracked.
+ _ => Tag::Tagged(mem_extra.new_ptr()),
+ }
};
// Reborrow.
- this.reborrow(place, size, fn_barrier, new_bor)?;
- let new_place = place.with_tag(new_bor);
- // Handle two-phase borrows.
- if two_phase {
- assert!(mutbl == Some(MutMutable), "two-phase shared borrows make no sense");
- // We immediately share it, to allow read accesses
- let two_phase_time = this.machine.stacked_borrows.increment_clock();
- let two_phase_bor = Borrow::Alias(Some(two_phase_time));
- this.reborrow(new_place, size, false /* fn_barrier */, two_phase_bor)?;
- }
+ this.reborrow(place, size, kind, new_tag, protect)?;
+ let new_place = place.replace_tag(new_tag);
// Return new pointer.
- Ok(new_place.to_ref())
+ Ok(ImmTy::from_immediate(new_place.to_ref(), val.layout))
}
}
-impl<'a, 'mir, 'tcx> EvalContextExt<'a, 'mir, 'tcx> for crate::MiriEvalContext<'a, 'mir, 'tcx> {}
-pub trait EvalContextExt<'a, 'mir, 'tcx: 'a+'mir>: crate::MiriEvalContextExt<'a, 'mir, 'tcx> {
- fn tag_new_allocation(
- &mut self,
- id: AllocId,
- kind: MemoryKind<MiriMemoryKind>,
- ) -> Borrow {
- let this = self.eval_context_mut();
- let time = match kind {
- MemoryKind::Stack => {
- // New unique borrow. This `Uniq` is not accessible by the program,
- // so it will only ever be used when using the local directly (i.e.,
- // not through a pointer). That is, whenever we directly use a local, this will pop
- // everything else off the stack, invalidating all previous pointers,
- // and in particular, *all* raw pointers. This subsumes the explicit
- // `reset` which the blog post [1] says to perform when accessing a local.
- //
- // [1]: <https://www.ralfj.de/blog/2018/08/07/stacked-borrows.html>
- this.machine.stacked_borrows.increment_clock()
- }
- _ => {
- // Nothing to do for everything else.
- return Borrow::default()
- }
- };
- // Make this the active borrow for this allocation.
- let alloc = this
- .memory_mut()
- .get_mut(id)
- .expect("this is a new allocation; it must still exist");
- let size = Size::from_bytes(alloc.bytes.len() as u64);
- alloc.extra.first_item(BorStackItem::Uniq(time), size);
- Borrow::Uniq(time)
- }
-
- /// Called for value-to-place conversion. `mutability` is `None` for raw pointers.
- ///
- /// Note that this does *not* mean that all this memory will actually get accessed/referenced!
- /// We could be in the middle of `&(*var).1`.
- fn ptr_dereference(
- &self,
- place: MPlaceTy<'tcx, Borrow>,
- size: Size,
- mutability: Option<Mutability>,
- ) -> EvalResult<'tcx> {
- let this = self.eval_context_ref();
- trace!(
- "ptr_dereference: Accessing {} reference for {:?} (pointee {})",
- if let Some(mutability) = mutability {
- format!("{:?}", mutability)
- } else {
- format!("raw")
- },
- place.ptr, place.layout.ty
- );
- let ptr = place.ptr.to_ptr()?;
- if mutability.is_none() {
- // No further checks on raw derefs -- only the access itself will be checked.
- return Ok(());
- }
-
- // Get the allocation
- let alloc = this.memory().get(ptr.alloc_id)?;
- alloc.check_bounds(this, ptr, size)?;
- // If we got here, we do some checking, *but* we leave the tag unchanged.
- if let Borrow::Alias(Some(_)) = ptr.tag {
- assert_eq!(mutability, Some(MutImmutable));
- // We need a frozen-sensitive check.
- this.visit_freeze_sensitive(place, size, |cur_ptr, size, frozen| {
- let kind = if frozen { RefKind::Frozen } else { RefKind::Raw };
- alloc.extra.deref(cur_ptr, size, kind)
- })?;
- } else {
- // Just treat this as one big chunk.
- let kind = if mutability == Some(MutMutable) { RefKind::Unique } else { RefKind::Raw };
- alloc.extra.deref(ptr, size, kind)?;
- }
-
- // All is good.
- Ok(())
- }
-
- fn retag(
- &mut self,
- kind: RetagKind,
- place: PlaceTy<'tcx, Borrow>
- ) -> EvalResult<'tcx> {
+impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriEvalContext<'mir, 'tcx> {}
+pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx> {
+ fn retag(&mut self, kind: RetagKind, place: PlaceTy<'tcx, Tag>) -> InterpResult<'tcx> {
let this = self.eval_context_mut();
- // Determine mutability and whether to add a barrier.
+ // Determine mutability and whether to add a protector.
// Cannot use `builtin_deref` because that reports *immutable* for `Box`,
// making it useless.
- fn qualify(ty: ty::Ty<'_>, kind: RetagKind) -> Option<(Option<Mutability>, bool)> {
- match ty.sty {
+ fn qualify(ty: ty::Ty<'_>, kind: RetagKind) -> Option<(RefKind, bool)> {
+ match ty.kind() {
// References are simple.
- ty::Ref(_, _, mutbl) => Some((Some(mutbl), kind == RetagKind::FnEntry)),
+ ty::Ref(_, _, Mutability::Mut) => Some((
+ RefKind::Unique { two_phase: kind == RetagKind::TwoPhase },
+ kind == RetagKind::FnEntry,
+ )),
+ ty::Ref(_, _, Mutability::Not) =>
+ Some((RefKind::Shared, kind == RetagKind::FnEntry)),
// Raw pointers need to be enabled.
- ty::RawPtr(..) if kind == RetagKind::Raw => Some((None, false)),
- // Boxes do not get a barrier: barriers reflect that references outlive the call
+ ty::RawPtr(tym) if kind == RetagKind::Raw =>
+ Some((RefKind::Raw { mutable: tym.mutbl == Mutability::Mut }, false)),
+ // Boxes do not get a protector: protectors reflect that references outlive the call
// they were passed in to; that's just not the case for boxes.
- ty::Adt(..) if ty.is_box() => Some((Some(MutMutable), false)),
+ ty::Adt(..) if ty.is_box() => Some((RefKind::Unique { two_phase: false }, false)),
_ => None,
}
}
- // We need a visitor to visit all references. However, that requires
- // a `MemPlace`, so we have a fast path for reference types that
- // avoids allocating.
- if let Some((mutbl, barrier)) = qualify(place.layout.ty, kind) {
+ // We only reborrow "bare" references/boxes.
+ // Not traversing into fields helps with <https://github.com/rust-lang/unsafe-code-guidelines/issues/125>,
+ // but might also cost us optimization and analyses. We will have to experiment more with this.
+ if let Some((mutbl, protector)) = qualify(place.layout.ty, kind) {
// Fast path.
let val = this.read_immediate(this.place_to_op(place)?)?;
- let val = this.retag_reference(val, mutbl, barrier, kind == RetagKind::TwoPhase)?;
- this.write_immediate(val, place)?;
- return Ok(());
+ let val = this.retag_reference(val, mutbl, protector)?;
+ this.write_immediate(*val, place)?;
}
- let place = this.force_allocation(place)?;
-
- let mut visitor = RetagVisitor { ecx: this, kind };
- visitor.visit_value(place)?;
- // The actual visitor.
- struct RetagVisitor<'ecx, 'a, 'mir, 'tcx> {
- ecx: &'ecx mut MiriEvalContext<'a, 'mir, 'tcx>,
- kind: RetagKind,
- }
- impl<'ecx, 'a, 'mir, 'tcx>
- MutValueVisitor<'a, 'mir, 'tcx, Evaluator<'tcx>>
- for
- RetagVisitor<'ecx, 'a, 'mir, 'tcx>
- {
- type V = MPlaceTy<'tcx, Borrow>;
-
- #[inline(always)]
- fn ecx(&mut self) -> &mut MiriEvalContext<'a, 'mir, 'tcx> {
- &mut self.ecx
- }
+ Ok(())
+ }
- // Primitives of reference type, that is the one thing we are interested in.
- fn visit_primitive(&mut self, place: MPlaceTy<'tcx, Borrow>) -> EvalResult<'tcx>
- {
- // Cannot use `builtin_deref` because that reports *immutable* for `Box`,
- // making it useless.
- if let Some((mutbl, barrier)) = qualify(place.layout.ty, self.kind) {
- let val = self.ecx.read_immediate(place.into())?;
- let val = self.ecx.retag_reference(
- val,
- mutbl,
- barrier,
- self.kind == RetagKind::TwoPhase
- )?;
- self.ecx.write_immediate(val, place.into())?;
- }
- Ok(())
- }
+ /// After a stack frame got pushed, retag the return place so that we are sure
+ /// it does not alias with anything.
+ ///
+ /// This is a HACK because there is nothing in MIR that would make the retag
+ /// explicit. Also see https://github.com/rust-lang/rust/issues/71117.
+ fn retag_return_place(&mut self) -> InterpResult<'tcx> {
+ let this = self.eval_context_mut();
+ let return_place = if let Some(return_place) = this.frame_mut().return_place {
+ return_place
+ } else {
+ // No return place, nothing to do.
+ return Ok(());
+ };
+ if return_place.layout.is_zst() {
+ // There may not be any memory here, nothing to do.
+ return Ok(());
}
+ // We need this to be in-memory to use tagged pointers.
+ let return_place = this.force_allocation(return_place)?;
+
+ // We have to turn the place into a pointer to use the existing code.
+ // (The pointer type does not matter, so we use a raw pointer.)
+ let ptr_layout = this.layout_of(this.tcx.mk_mut_ptr(return_place.layout.ty))?;
+ let val = ImmTy::from_immediate(return_place.to_ref(), ptr_layout);
+ // Reborrow it.
+ let val = this.retag_reference(val, RefKind::Unique { two_phase: false }, /*protector*/ true)?;
+ // And use reborrowed pointer for return place.
+ let return_place = this.ref_to_mplace(val)?;
+ this.frame_mut().return_place = Some(return_place.into());
Ok(())
}