//! for further information.
use std::cell::RefCell;
-use std::collections::{HashMap, HashSet};
use std::fmt;
use std::num::NonZeroU64;
use std::rc::Rc;
+use log::trace;
+
+use rustc_data_structures::fx::{FxHashMap, FxHashSet};
+use rustc_middle::mir::RetagKind;
+use rustc_middle::ty;
+use rustc_target::abi::{Align, LayoutOf, Size};
use rustc_hir::Mutability;
-use rustc::mir::RetagKind;
-use rustc::ty::{self, layout::Size};
-use rustc_mir::interpret::InterpError;
use crate::*;
/// Table storing the "base" tag for each allocation.
/// The base tag is the one used for the initial pointer.
/// We need this in a separate table to handle cyclic statics.
- base_ptr_ids: HashMap<AllocId, Tag>,
+ base_ptr_ids: FxHashMap<AllocId, Tag>,
/// Next unused call ID (for protectors).
next_call_id: CallId,
/// Those call IDs corresponding to functions that are still running.
- active_calls: HashSet<CallId>,
- /// The id to trace in this execution run
+ active_calls: FxHashSet<CallId>,
+ /// The pointer id to trace
tracked_pointer_tag: Option<PtrId>,
+ /// The call id to trace
+ tracked_call_id: Option<CallId>,
}
/// Memory extra state gives us interior mutable access to the global state.
pub type MemoryExtra = Rc<RefCell<GlobalState>>;
/// Utilities for initialization and ID generation
impl GlobalState {
- pub fn new(tracked_pointer_tag: Option<PtrId>) -> Self {
+ pub fn new(tracked_pointer_tag: Option<PtrId>, tracked_call_id: Option<CallId>) -> Self {
GlobalState {
next_ptr_id: NonZeroU64::new(1).unwrap(),
- base_ptr_ids: HashMap::default(),
+ base_ptr_ids: FxHashMap::default(),
next_call_id: NonZeroU64::new(1).unwrap(),
- active_calls: HashSet::default(),
+ active_calls: FxHashSet::default(),
tracked_pointer_tag,
+ tracked_call_id,
}
}
fn new_ptr(&mut self) -> PtrId {
let id = self.next_ptr_id;
+ if Some(id) == self.tracked_pointer_tag {
+ register_diagnostic(NonHaltingDiagnostic::CreatedPointerTag(id));
+ }
self.next_ptr_id = NonZeroU64::new(id.get() + 1).unwrap();
id
}
pub fn new_call(&mut self) -> CallId {
let id = self.next_call_id;
trace!("new_call: Assigning ID {}", id);
+ if Some(id) == self.tracked_call_id {
+ register_diagnostic(NonHaltingDiagnostic::CreatedCallId(id));
+ }
assert!(self.active_calls.insert(id));
self.next_call_id = NonZeroU64::new(id.get() + 1).unwrap();
id
self.active_calls.contains(&id)
}
- pub fn static_base_ptr(&mut self, id: AllocId) -> Tag {
+ pub fn global_base_ptr(&mut self, id: AllocId) -> Tag {
self.base_ptr_ids.get(&id).copied().unwrap_or_else(|| {
let tag = Tag::Tagged(self.new_ptr());
trace!("New allocation {:?} has base tag {:?}", id, tag);
}
}
+/// Error reporting
+fn err_sb_ub(msg: String) -> InterpError<'static> {
+ err_machine_stop!(TerminationInfo::ExperimentalUb {
+ msg,
+ url: format!("https://github.com/rust-lang/unsafe-code-guidelines/blob/master/wip/stacked-borrows.md"),
+ })
+}
+
// # Stacked Borrows Core Begin
/// We need to make at least the following things true:
fn check_protector(item: &Item, tag: Option<Tag>, global: &GlobalState) -> InterpResult<'tcx> {
if let Tag::Tagged(id) = item.tag {
if Some(id) == global.tracked_pointer_tag {
- register_err(
- InterpError::MachineStop(Box::new(TerminationInfo::PoppedTrackedPointerTag(
- item.clone(),
- )))
- .into(),
- );
+ register_diagnostic(NonHaltingDiagnostic::PoppedPointerTag(item.clone()));
}
}
if let Some(call) = item.protector {
if global.is_active(call) {
if let Some(tag) = tag {
- throw_ub!(UbExperimental(format!(
+ Err(err_sb_ub(format!(
"not granting access to tag {:?} because incompatible item is protected: {:?}",
tag, item
- )));
+ )))?
} else {
- throw_ub!(UbExperimental(format!(
+ Err(err_sb_ub(format!(
"deallocating while item is protected: {:?}",
item
- )));
+ )))?
}
}
}
// Step 1: Find granting item.
let granting_idx = self.find_granting(access, tag).ok_or_else(|| {
- err_ub!(UbExperimental(format!(
- "no item granting {} to tag {:?} found in borrow stack",
- access, tag,
- )))
+ err_sb_ub(format!(
+ "no item granting {} to tag {:?} found in borrow stack.",
+ access, tag
+ ))
})?;
// Step 2: Remove incompatible items above them. Make sure we do not remove protected
fn dealloc(&mut self, tag: Tag, global: &GlobalState) -> InterpResult<'tcx> {
// Step 1: Find granting item.
self.find_granting(AccessKind::Write, tag).ok_or_else(|| {
- err_ub!(UbExperimental(format!(
+ err_sb_ub(format!(
"no item granting write access for deallocation to tag {:?} found in borrow stack",
tag,
- )))
+ ))
})?;
// Step 2: Remove all items. Also checks for protectors.
// Now we figure out which item grants our parent (`derived_from`) this kind of access.
// We use that to determine where to put the new item.
let granting_idx = self.find_granting(access, derived_from)
- .ok_or_else(|| err_ub!(UbExperimental(format!(
- "trying to reborrow for {:?}, but parent tag {:?} does not have an appropriate item in the borrow stack", new.perm, derived_from,
- ))))?;
+ .ok_or_else(|| err_sb_ub(format!(
+ "trying to reborrow for {:?}, but parent tag {:?} does not have an appropriate item in the borrow stack",
+ new.perm, derived_from,
+ )))?;
// Compute where to put the new item.
// Either way, we ensure that we insert the new item in a way such that between
// everything else off the stack, invalidating all previous pointers,
// and in particular, *all* raw pointers.
MemoryKind::Stack => (Tag::Tagged(extra.borrow_mut().new_ptr()), Permission::Unique),
- // Static memory can be referenced by "global" pointers from `tcx`.
- // Thus we call `static_base_ptr` such that the global pointers get the same tag
+ // `Global` memory can be referenced by global pointers from `tcx`.
+ // Thus we call `global_base_ptr` such that the global pointers get the same tag
// as what we use here.
+ // `ExternStatic` is used for extern statics, and thus must also be listed here.
+ // `Env` we list because we can get away with precise tracking there.
// The base pointer is not unique, so the base permission is `SharedReadWrite`.
- MemoryKind::Machine(MiriMemoryKind::Static) =>
- (extra.borrow_mut().static_base_ptr(id), Permission::SharedReadWrite),
+ MemoryKind::Machine(MiriMemoryKind::Global | MiriMemoryKind::ExternStatic | MiriMemoryKind::Tls | MiriMemoryKind::Env) =>
+ (extra.borrow_mut().global_base_ptr(id), Permission::SharedReadWrite),
// Everything else we handle entirely untagged for now.
// FIXME: experiment with more precise tracking.
_ => (Tag::Untagged, Permission::SharedReadWrite),
/// Retagging/reborrowing. There is some policy in here, such as which permissions
/// to grant for which references, and when to add protectors.
-impl<'mir, 'tcx> EvalContextPrivExt<'mir, 'tcx> for crate::MiriEvalContext<'mir, 'tcx> {}
+impl<'mir, 'tcx: 'mir> EvalContextPrivExt<'mir, 'tcx> for crate::MiriEvalContext<'mir, 'tcx> {}
trait EvalContextPrivExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx> {
fn reborrow(
&mut self,
val: ImmTy<'tcx, Tag>,
kind: RefKind,
protect: bool,
- ) -> InterpResult<'tcx, Immediate<Tag>> {
+ ) -> InterpResult<'tcx, ImmTy<'tcx, Tag>> {
let this = self.eval_context_mut();
// We want a place for where the ptr *points to*, so we get one.
let place = this.ref_to_mplace(val)?;
.size_and_align_of_mplace(place)?
.map(|(size, _)| size)
.unwrap_or_else(|| place.layout.size);
+ // `reborrow` relies on getting a `Pointer` and everything being in-bounds,
+ // so let's ensure that. However, we do not care about alignment.
// We can see dangling ptrs in here e.g. after a Box's `Unique` was
- // updated using "self.0 = ..." (can happen in Box::from_raw); see miri#1050.
- let place = this.mplace_access_checked(place)?;
+ // updated using "self.0 = ..." (can happen in Box::from_raw) so we cannot ICE; see miri#1050.
+ let place = this.mplace_access_checked(place, Some(Align::from_bytes(1).unwrap()))?;
+ // Nothing to do for ZSTs.
if size == Size::ZERO {
- // Nothing to do for ZSTs.
- return Ok(*val);
+ return Ok(val);
}
// Compute new borrow.
// breaking `Rc::from_raw`.
RefKind::Raw { .. } => Tag::Untagged,
// All other pointesr are properly tracked.
- _ => Tag::Tagged(this.memory.extra.stacked_borrows.borrow_mut().new_ptr()),
+ _ => Tag::Tagged(
+ this.memory.extra.stacked_borrows.as_ref().unwrap().borrow_mut().new_ptr(),
+ ),
};
// Reborrow.
let new_place = place.replace_tag(new_tag);
// Return new pointer.
- Ok(new_place.to_ref())
+ Ok(ImmTy::from_immediate(new_place.to_ref(), val.layout))
}
}
-impl<'mir, 'tcx> EvalContextExt<'mir, 'tcx> for crate::MiriEvalContext<'mir, 'tcx> {}
+impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriEvalContext<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx> {
fn retag(&mut self, kind: RetagKind, place: PlaceTy<'tcx, Tag>) -> InterpResult<'tcx> {
let this = self.eval_context_mut();
// Cannot use `builtin_deref` because that reports *immutable* for `Box`,
// making it useless.
fn qualify(ty: ty::Ty<'_>, kind: RetagKind) -> Option<(RefKind, bool)> {
- match ty.kind {
+ match ty.kind() {
// References are simple.
ty::Ref(_, _, Mutability::Mut) => Some((
RefKind::Unique { two_phase: kind == RetagKind::TwoPhase },
// Fast path.
let val = this.read_immediate(this.place_to_op(place)?)?;
let val = this.retag_reference(val, mutbl, protector)?;
- this.write_immediate(val, place)?;
+ this.write_immediate(*val, place)?;
}
- this.process_errors();
+ Ok(())
+ }
+
+ /// After a stack frame got pushed, retag the return place so that we are sure
+ /// it does not alias with anything.
+ ///
+ /// This is a HACK because there is nothing in MIR that would make the retag
+ /// explicit. Also see https://github.com/rust-lang/rust/issues/71117.
+ fn retag_return_place(&mut self) -> InterpResult<'tcx> {
+ let this = self.eval_context_mut();
+ let return_place = if let Some(return_place) = this.frame_mut().return_place {
+ return_place
+ } else {
+ // No return place, nothing to do.
+ return Ok(());
+ };
+ if return_place.layout.is_zst() {
+ // There may not be any memory here, nothing to do.
+ return Ok(());
+ }
+ // We need this to be in-memory to use tagged pointers.
+ let return_place = this.force_allocation(return_place)?;
+
+ // We have to turn the place into a pointer to use the existing code.
+ // (The pointer type does not matter, so we use a raw pointer.)
+ let ptr_layout = this.layout_of(this.tcx.mk_mut_ptr(return_place.layout.ty))?;
+ let val = ImmTy::from_immediate(return_place.to_ref(), ptr_layout);
+ // Reborrow it.
+ let val = this.retag_reference(val, RefKind::Unique { two_phase: false }, /*protector*/ true)?;
+ // And use reborrowed pointer for return place.
+ let return_place = this.ref_to_mplace(val)?;
+ this.frame_mut().return_place = Some(return_place.into());
Ok(())
}