From 3f0a2a29414a67d9c52241a94ce373546ca2ddcf Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Mon, 15 Apr 2019 15:36:09 +0200 Subject: [PATCH] rewrite Stacked Borrows Core. this passes stacked-borrows.rs! --- src/fn_call.rs | 28 +- src/helpers.rs | 24 +- src/intrinsic.rs | 6 +- src/lib.rs | 108 +++-- src/operator.rs | 38 +- src/stacked_borrows.rs | 937 +++++++++++++++++++++-------------------- src/tls.rs | 10 +- 7 files changed, 573 insertions(+), 578 deletions(-) diff --git a/src/fn_call.rs b/src/fn_call.rs index 64dcce161dd..d8794fed469 100644 --- a/src/fn_call.rs +++ b/src/fn_call.rs @@ -13,8 +13,8 @@ pub trait EvalContextExt<'a, 'mir, 'tcx: 'a + 'mir>: crate::MiriEvalContextExt<' fn find_fn( &mut self, instance: ty::Instance<'tcx>, - args: &[OpTy<'tcx, Borrow>], - dest: Option>, + args: &[OpTy<'tcx, Tag>], + dest: Option>, ret: Option, ) -> EvalResult<'tcx, Option<&'mir mir::Mir<'tcx>>> { let this = self.eval_context_mut(); @@ -55,8 +55,8 @@ fn find_fn( fn emulate_foreign_item( &mut self, def_id: DefId, - args: &[OpTy<'tcx, Borrow>], - dest: Option>, + args: &[OpTy<'tcx, Tag>], + dest: Option>, ret: Option, ) -> EvalResult<'tcx> { let this = self.eval_context_mut(); @@ -92,7 +92,7 @@ fn emulate_foreign_item( } else { let align = this.tcx.data_layout.pointer_align.abi; let ptr = this.memory_mut().allocate(Size::from_bytes(size), align, MiriMemoryKind::C.into()); - this.write_scalar(Scalar::Ptr(ptr.with_default_tag()), dest)?; + this.write_scalar(Scalar::Ptr(ptr), dest)?; } } "calloc" => { @@ -105,7 +105,7 @@ fn emulate_foreign_item( } else { let size = Size::from_bytes(bytes); let align = this.tcx.data_layout.pointer_align.abi; - let ptr = this.memory_mut().allocate(size, align, MiriMemoryKind::C.into()).with_default_tag(); + let ptr = this.memory_mut().allocate(size, align, MiriMemoryKind::C.into()); this.memory_mut().get_mut(ptr.alloc_id)?.write_repeat(tcx, ptr, 0, size)?; this.write_scalar(Scalar::Ptr(ptr), dest)?; } @@ -132,7 +132,7 @@ fn emulate_foreign_item( Align::from_bytes(align).unwrap(), MiriMemoryKind::C.into() ); - this.write_scalar(Scalar::Ptr(ptr.with_default_tag()), ret.into())?; + this.write_scalar(Scalar::Ptr(ptr), ret.into())?; } this.write_null(dest)?; } @@ -162,8 +162,7 @@ fn emulate_foreign_item( Size::from_bytes(size), Align::from_bytes(align).unwrap(), MiriMemoryKind::Rust.into() - ) - .with_default_tag(); + ); this.write_scalar(Scalar::Ptr(ptr), dest)?; } "__rust_alloc_zeroed" => { @@ -180,8 +179,7 @@ fn emulate_foreign_item( Size::from_bytes(size), Align::from_bytes(align).unwrap(), MiriMemoryKind::Rust.into() - ) - .with_default_tag(); + ); this.memory_mut() .get_mut(ptr.alloc_id)? .write_repeat(tcx, ptr, 0, Size::from_bytes(size))?; @@ -222,7 +220,7 @@ fn emulate_foreign_item( Align::from_bytes(align).unwrap(), MiriMemoryKind::Rust.into(), )?; - this.write_scalar(Scalar::Ptr(new_ptr.with_default_tag()), dest)?; + this.write_scalar(Scalar::Ptr(new_ptr), dest)?; } "syscall" => { @@ -428,7 +426,7 @@ fn emulate_foreign_item( Size::from_bytes((value.len() + 1) as u64), Align::from_bytes(1).unwrap(), MiriMemoryKind::Env.into(), - ).with_default_tag(); + ); { let alloc = this.memory_mut().get_mut(value_copy.alloc_id)?; alloc.write_bytes(tcx, value_copy, &value)?; @@ -798,13 +796,13 @@ fn emulate_foreign_item( Ok(()) } - fn write_null(&mut self, dest: PlaceTy<'tcx, Borrow>) -> EvalResult<'tcx> { + fn write_null(&mut self, dest: PlaceTy<'tcx, Tag>) -> EvalResult<'tcx> { self.eval_context_mut().write_scalar(Scalar::from_int(0, dest.layout.size), dest) } /// Evaluates the scalar at the specified path. Returns Some(val) /// if the path could be resolved, and None otherwise - fn eval_path_scalar(&mut self, path: &[&str]) -> EvalResult<'tcx, Option>> { + fn eval_path_scalar(&mut self, path: &[&str]) -> EvalResult<'tcx, Option>> { let this = self.eval_context_mut(); if let Ok(instance) = this.resolve_path(path) { let cid = GlobalId { diff --git a/src/helpers.rs b/src/helpers.rs index 8a4cccc743e..f468d256031 100644 --- a/src/helpers.rs +++ b/src/helpers.rs @@ -47,9 +47,9 @@ fn resolve_path(&self, path: &[&str]) -> EvalResult<'tcx, ty::Instance<'tcx>> { /// will be true if this is frozen, false if this is in an `UnsafeCell`. fn visit_freeze_sensitive( &self, - place: MPlaceTy<'tcx, Borrow>, + place: MPlaceTy<'tcx, Tag>, size: Size, - mut action: impl FnMut(Pointer, Size, bool) -> EvalResult<'tcx>, + mut action: impl FnMut(Pointer, Size, bool) -> EvalResult<'tcx>, ) -> EvalResult<'tcx> { let this = self.eval_context_ref(); trace!("visit_frozen(place={:?}, size={:?})", *place, size); @@ -64,7 +64,7 @@ fn visit_freeze_sensitive( let mut end_ptr = place.ptr; // Called when we detected an `UnsafeCell` at the given offset and size. // Calls `action` and advances `end_ptr`. - let mut unsafe_cell_action = |unsafe_cell_ptr: Scalar, unsafe_cell_size: Size| { + let mut unsafe_cell_action = |unsafe_cell_ptr: Scalar, unsafe_cell_size: Size| { if unsafe_cell_size != Size::ZERO { debug_assert_eq!(unsafe_cell_ptr.to_ptr().unwrap().alloc_id, end_ptr.to_ptr().unwrap().alloc_id); @@ -120,7 +120,7 @@ fn visit_freeze_sensitive( /// Visiting the memory covered by a `MemPlace`, being aware of /// whether we are inside an `UnsafeCell` or not. struct UnsafeCellVisitor<'ecx, 'a, 'mir, 'tcx, F> - where F: FnMut(MPlaceTy<'tcx, Borrow>) -> EvalResult<'tcx> + where F: FnMut(MPlaceTy<'tcx, Tag>) -> EvalResult<'tcx> { ecx: &'ecx MiriEvalContext<'a, 'mir, 'tcx>, unsafe_cell_action: F, @@ -131,9 +131,9 @@ impl<'ecx, 'a, 'mir, 'tcx, F> for UnsafeCellVisitor<'ecx, 'a, 'mir, 'tcx, F> where - F: FnMut(MPlaceTy<'tcx, Borrow>) -> EvalResult<'tcx> + F: FnMut(MPlaceTy<'tcx, Tag>) -> EvalResult<'tcx> { - type V = MPlaceTy<'tcx, Borrow>; + type V = MPlaceTy<'tcx, Tag>; #[inline(always)] fn ecx(&self) -> &MiriEvalContext<'a, 'mir, 'tcx> { @@ -141,7 +141,7 @@ fn ecx(&self) -> &MiriEvalContext<'a, 'mir, 'tcx> { } // Hook to detect `UnsafeCell`. - fn visit_value(&mut self, v: MPlaceTy<'tcx, Borrow>) -> EvalResult<'tcx> + fn visit_value(&mut self, v: MPlaceTy<'tcx, Tag>) -> EvalResult<'tcx> { trace!("UnsafeCellVisitor: {:?} {:?}", *v, v.layout.ty); let is_unsafe_cell = match v.layout.ty.sty { @@ -163,8 +163,8 @@ fn visit_value(&mut self, v: MPlaceTy<'tcx, Borrow>) -> EvalResult<'tcx> // Make sure we visit aggregrates in increasing offset order. fn visit_aggregate( &mut self, - place: MPlaceTy<'tcx, Borrow>, - fields: impl Iterator>>, + place: MPlaceTy<'tcx, Tag>, + fields: impl Iterator>>, ) -> EvalResult<'tcx> { match place.layout.fields { layout::FieldPlacement::Array { .. } => { @@ -174,7 +174,7 @@ fn visit_aggregate( } layout::FieldPlacement::Arbitrary { .. } => { // Gather the subplaces and sort them before visiting. - let mut places = fields.collect::>>>()?; + let mut places = fields.collect::>>>()?; places.sort_by_key(|place| place.ptr.get_ptr_offset(self.ecx())); self.walk_aggregate(place, places.into_iter().map(Ok)) } @@ -186,7 +186,7 @@ fn visit_aggregate( } // We have to do *something* for unions. - fn visit_union(&mut self, v: MPlaceTy<'tcx, Borrow>) -> EvalResult<'tcx> + fn visit_union(&mut self, v: MPlaceTy<'tcx, Tag>) -> EvalResult<'tcx> { // With unions, we fall back to whatever the type says, to hopefully be consistent // with LLVM IR. @@ -200,7 +200,7 @@ fn visit_union(&mut self, v: MPlaceTy<'tcx, Borrow>) -> EvalResult<'tcx> } // We should never get to a primitive, but always short-circuit somewhere above. - fn visit_primitive(&mut self, _v: MPlaceTy<'tcx, Borrow>) -> EvalResult<'tcx> + fn visit_primitive(&mut self, _v: MPlaceTy<'tcx, Tag>) -> EvalResult<'tcx> { bug!("we should always short-circuit before coming to a primitive") } diff --git a/src/intrinsic.rs b/src/intrinsic.rs index bb156c95dfe..a17f576b43b 100644 --- a/src/intrinsic.rs +++ b/src/intrinsic.rs @@ -4,7 +4,7 @@ use rustc::ty; use crate::{ - PlaceTy, OpTy, ImmTy, Immediate, Scalar, ScalarMaybeUndef, Borrow, + PlaceTy, OpTy, ImmTy, Immediate, Scalar, ScalarMaybeUndef, Tag, OperatorEvalContextExt }; @@ -13,8 +13,8 @@ pub trait EvalContextExt<'a, 'mir, 'tcx: 'a+'mir>: crate::MiriEvalContextExt<'a, fn call_intrinsic( &mut self, instance: ty::Instance<'tcx>, - args: &[OpTy<'tcx, Borrow>], - dest: PlaceTy<'tcx, Borrow>, + args: &[OpTy<'tcx, Tag>], + dest: PlaceTy<'tcx, Tag>, ) -> EvalResult<'tcx> { let this = self.eval_context_mut(); if this.emulate_intrinsic(instance, args, dest)? { diff --git a/src/lib.rs b/src/lib.rs index 541986de551..3dbe922999d 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -23,6 +23,7 @@ use std::collections::HashMap; use std::borrow::Cow; +use std::rc::Rc; use rand::rngs::StdRng; use rand::SeedableRng; @@ -48,7 +49,7 @@ pub use crate::stacked_borrows::{EvalContextExt as StackedBorEvalContextExt}; // Used by priroda. -pub use crate::stacked_borrows::{Borrow, Stack, Stacks, BorStackItem}; +pub use crate::stacked_borrows::{Tag, Permission, Stack, Stacks, Item}; /// Insert rustc arguments at the beginning of the argument list that Miri wants to be /// set per default, for maximal validation power. @@ -155,7 +156,7 @@ pub fn create_ecx<'a, 'mir: 'a, 'tcx: 'mir>( // Don't forget `0` terminator. cmd.push(std::char::from_u32(0).unwrap()); // Collect the pointers to the individual strings. - let mut argvs = Vec::>::new(); + let mut argvs = Vec::>::new(); for arg in config.args { // Add `0` terminator. let mut arg = arg.into_bytes(); @@ -187,7 +188,7 @@ pub fn create_ecx<'a, 'mir: 'a, 'tcx: 'mir>( Size::from_bytes(cmd_utf16.len() as u64 * 2), Align::from_bytes(2).unwrap(), MiriMemoryKind::Env.into(), - ).with_default_tag(); + ); ecx.machine.cmd_line = Some(cmd_ptr); // Store the UTF-16 string. let char_size = Size::from_bytes(2); @@ -214,7 +215,13 @@ pub fn eval_main<'a, 'tcx: 'a>( main_id: DefId, config: MiriConfig, ) { - let mut ecx = create_ecx(tcx, main_id, config).expect("couldn't create ecx"); + let mut ecx = match create_ecx(tcx, main_id, config) { + Ok(ecx) => ecx, + Err(mut err) => { + err.print_backtrace(); + panic!("Miri initialziation error: {}", err.kind) + } + }; // Perform the main execution. let res: EvalResult = (|| { @@ -310,14 +317,14 @@ fn may_leak(self) -> bool { pub struct Evaluator<'tcx> { /// Environment variables set by `setenv`. /// Miri does not expose env vars from the host to the emulated program. - pub(crate) env_vars: HashMap, Pointer>, + pub(crate) env_vars: HashMap, Pointer>, /// Program arguments (`Option` because we can only initialize them after creating the ecx). /// These are *pointers* to argc/argv because macOS. /// We also need the full command line as one string because of Windows. - pub(crate) argc: Option>, - pub(crate) argv: Option>, - pub(crate) cmd_line: Option>, + pub(crate) argc: Option>, + pub(crate) argv: Option>, + pub(crate) cmd_line: Option>, /// Last OS error. pub(crate) last_error: u32, @@ -328,9 +335,6 @@ pub struct Evaluator<'tcx> { /// Whether to enforce the validity invariant. pub(crate) validate: bool, - /// Stacked Borrows state. - pub(crate) stacked_borrows: stacked_borrows::State, - /// The random number generator to use if Miri /// is running in non-deterministic mode pub(crate) rng: Option @@ -346,7 +350,6 @@ fn new(validate: bool, seed: Option) -> Self { last_error: 0, tls: TlsData::default(), validate, - stacked_borrows: stacked_borrows::State::default(), rng: seed.map(|s| StdRng::seed_from_u64(s)) } } @@ -378,9 +381,9 @@ impl<'a, 'mir, 'tcx> Machine<'a, 'mir, 'tcx> for Evaluator<'tcx> { type FrameExtra = stacked_borrows::CallId; type MemoryExtra = stacked_borrows::MemoryState; type AllocExtra = stacked_borrows::Stacks; - type PointerTag = Borrow; + type PointerTag = Tag; - type MemoryMap = MonoHashMap, Allocation)>; + type MemoryMap = MonoHashMap, Allocation)>; const STATIC_KIND: Option = Some(MiriMemoryKind::MutStatic); @@ -394,8 +397,8 @@ fn enforce_validity(ecx: &InterpretCx<'a, 'mir, 'tcx, Self>) -> bool { fn find_fn( ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>, instance: ty::Instance<'tcx>, - args: &[OpTy<'tcx, Borrow>], - dest: Option>, + args: &[OpTy<'tcx, Tag>], + dest: Option>, ret: Option, ) -> EvalResult<'tcx, Option<&'mir mir::Mir<'tcx>>> { ecx.find_fn(instance, args, dest, ret) @@ -405,8 +408,8 @@ fn find_fn( fn call_intrinsic( ecx: &mut rustc_mir::interpret::InterpretCx<'a, 'mir, 'tcx, Self>, instance: ty::Instance<'tcx>, - args: &[OpTy<'tcx, Borrow>], - dest: PlaceTy<'tcx, Borrow>, + args: &[OpTy<'tcx, Tag>], + dest: PlaceTy<'tcx, Tag>, ) -> EvalResult<'tcx> { ecx.call_intrinsic(instance, args, dest) } @@ -415,15 +418,15 @@ fn call_intrinsic( fn ptr_op( ecx: &rustc_mir::interpret::InterpretCx<'a, 'mir, 'tcx, Self>, bin_op: mir::BinOp, - left: ImmTy<'tcx, Borrow>, - right: ImmTy<'tcx, Borrow>, - ) -> EvalResult<'tcx, (Scalar, bool)> { + left: ImmTy<'tcx, Tag>, + right: ImmTy<'tcx, Tag>, + ) -> EvalResult<'tcx, (Scalar, bool)> { ecx.ptr_op(bin_op, left, right) } fn box_alloc( ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>, - dest: PlaceTy<'tcx, Borrow>, + dest: PlaceTy<'tcx, Tag>, ) -> EvalResult<'tcx> { trace!("box_alloc for {:?}", dest.layout.ty); // Call the `exchange_malloc` lang item. @@ -467,7 +470,7 @@ fn find_foreign_static( def_id: DefId, tcx: TyCtxtAt<'a, 'tcx, 'tcx>, memory_extra: &Self::MemoryExtra, - ) -> EvalResult<'tcx, Cow<'tcx, Allocation>> { + ) -> EvalResult<'tcx, Cow<'tcx, Allocation>> { let attrs = tcx.get_attrs(def_id); let link_name = match attr::first_attr_value_str_by_name(&attrs, "link_name") { Some(name) => name.as_str(), @@ -479,7 +482,7 @@ fn find_foreign_static( // This should be all-zero, pointer-sized. let size = tcx.data_layout.pointer_size; let data = vec![0; size.bytes() as usize]; - let extra = AllocationExtra::memory_allocated(size, memory_extra); + let extra = Stacks::new(size, Tag::default(), Rc::clone(memory_extra)); Allocation::from_bytes(&data, tcx.data_layout.pointer_align.abi, extra) } _ => return err!(Unimplemented( @@ -499,16 +502,17 @@ fn before_terminator(_ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>) -> EvalResult fn adjust_static_allocation<'b>( alloc: &'b Allocation, memory_extra: &Self::MemoryExtra, - ) -> Cow<'b, Allocation> { - let extra = AllocationExtra::memory_allocated( + ) -> Cow<'b, Allocation> { + let extra = Stacks::new( Size::from_bytes(alloc.bytes.len() as u64), - memory_extra, + Tag::default(), + Rc::clone(memory_extra), ); - let alloc: Allocation = Allocation { + let alloc: Allocation = Allocation { bytes: alloc.bytes.clone(), relocations: Relocations::from_presorted( alloc.relocations.iter() - .map(|&(offset, ((), alloc))| (offset, (Borrow::default(), alloc))) + .map(|&(offset, ((), alloc))| (offset, (Tag::default(), alloc))) .collect() ), undef_mask: alloc.undef_mask.clone(), @@ -519,46 +523,30 @@ fn adjust_static_allocation<'b>( Cow::Owned(alloc) } - fn tag_dereference( - ecx: &InterpretCx<'a, 'mir, 'tcx, Self>, - place: MPlaceTy<'tcx, Borrow>, - mutability: Option, - ) -> EvalResult<'tcx, Scalar> { - let size = ecx.size_and_align_of_mplace(place)?.map(|(size, _)| size) - // For extern types, just cover what we can. - .unwrap_or_else(|| place.layout.size); - if !ecx.tcx.sess.opts.debugging_opts.mir_emit_retag || - !Self::enforce_validity(ecx) || size == Size::ZERO - { - // No tracking. - Ok(place.ptr) - } else { - ecx.ptr_dereference(place, size, mutability.into())?; - // We never change the pointer. - Ok(place.ptr) - } + #[inline(always)] + fn new_allocation( + size: Size, + extra: &Self::MemoryExtra, + kind: MemoryKind, + ) -> (Self::AllocExtra, Self::PointerTag) { + Stacks::new_allocation(size, extra, kind) } #[inline(always)] - fn tag_new_allocation( - ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>, - ptr: Pointer, - kind: MemoryKind, - ) -> Pointer { - if !ecx.machine.validate { - // No tracking. - ptr.with_default_tag() - } else { - let tag = ecx.tag_new_allocation(ptr.alloc_id, kind); - Pointer::new_with_tag(ptr.alloc_id, ptr.offset, tag) - } + fn tag_dereference( + _ecx: &InterpretCx<'a, 'mir, 'tcx, Self>, + place: MPlaceTy<'tcx, Tag>, + _mutability: Option, + ) -> EvalResult<'tcx, Scalar> { + // Nothing happens. + Ok(place.ptr) } #[inline(always)] fn retag( ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>, kind: mir::RetagKind, - place: PlaceTy<'tcx, Borrow>, + place: PlaceTy<'tcx, Tag>, ) -> EvalResult<'tcx> { if !ecx.tcx.sess.opts.debugging_opts.mir_emit_retag || !Self::enforce_validity(ecx) { // No tracking, or no retagging. The latter is possible because a dependency of ours diff --git a/src/operator.rs b/src/operator.rs index 45c0e63542d..386fc4307b8 100644 --- a/src/operator.rs +++ b/src/operator.rs @@ -7,39 +7,39 @@ pub trait EvalContextExt<'tcx> { fn ptr_op( &self, bin_op: mir::BinOp, - left: ImmTy<'tcx, Borrow>, - right: ImmTy<'tcx, Borrow>, - ) -> EvalResult<'tcx, (Scalar, bool)>; + left: ImmTy<'tcx, Tag>, + right: ImmTy<'tcx, Tag>, + ) -> EvalResult<'tcx, (Scalar, bool)>; fn ptr_int_arithmetic( &self, bin_op: mir::BinOp, - left: Pointer, + left: Pointer, right: u128, signed: bool, - ) -> EvalResult<'tcx, (Scalar, bool)>; + ) -> EvalResult<'tcx, (Scalar, bool)>; fn ptr_eq( &self, - left: Scalar, - right: Scalar, + left: Scalar, + right: Scalar, ) -> EvalResult<'tcx, bool>; fn pointer_offset_inbounds( &self, - ptr: Scalar, + ptr: Scalar, pointee_ty: Ty<'tcx>, offset: i64, - ) -> EvalResult<'tcx, Scalar>; + ) -> EvalResult<'tcx, Scalar>; } impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for super::MiriEvalContext<'a, 'mir, 'tcx> { fn ptr_op( &self, bin_op: mir::BinOp, - left: ImmTy<'tcx, Borrow>, - right: ImmTy<'tcx, Borrow>, - ) -> EvalResult<'tcx, (Scalar, bool)> { + left: ImmTy<'tcx, Tag>, + right: ImmTy<'tcx, Tag>, + ) -> EvalResult<'tcx, (Scalar, bool)> { use rustc::mir::BinOp::*; trace!("ptr_op: {:?} {:?} {:?}", *left, bin_op, *right); @@ -136,8 +136,8 @@ fn ptr_op( fn ptr_eq( &self, - left: Scalar, - right: Scalar, + left: Scalar, + right: Scalar, ) -> EvalResult<'tcx, bool> { let size = self.pointer_size(); Ok(match (left, right) { @@ -233,13 +233,13 @@ fn ptr_eq( fn ptr_int_arithmetic( &self, bin_op: mir::BinOp, - left: Pointer, + left: Pointer, right: u128, signed: bool, - ) -> EvalResult<'tcx, (Scalar, bool)> { + ) -> EvalResult<'tcx, (Scalar, bool)> { use rustc::mir::BinOp::*; - fn map_to_primval((res, over): (Pointer, bool)) -> (Scalar, bool) { + fn map_to_primval((res, over): (Pointer, bool)) -> (Scalar, bool) { (Scalar::Ptr(res), over) } @@ -327,10 +327,10 @@ fn map_to_primval((res, over): (Pointer, bool)) -> (Scalar, bool /// allocation, and all the remaining integers pointers their own allocation. fn pointer_offset_inbounds( &self, - ptr: Scalar, + ptr: Scalar, pointee_ty: Ty<'tcx>, offset: i64, - ) -> EvalResult<'tcx, Scalar> { + ) -> EvalResult<'tcx, Scalar> { // FIXME: assuming here that type size is less than `i64::max_value()`. let pointee_size = self.layout_of(pointee_ty)?.size.bytes() as i64; let offset = offset diff --git a/src/stacked_borrows.rs b/src/stacked_borrows.rs index bea6aaf9cf8..080200b12a4 100644 --- a/src/stacked_borrows.rs +++ b/src/stacked_borrows.rs @@ -1,6 +1,8 @@ use std::cell::RefCell; use std::collections::HashSet; use std::rc::Rc; +use std::fmt; +use std::num::NonZeroU64; use rustc::ty::{self, layout::Size}; use rustc::hir::{Mutability, MutMutable, MutImmutable}; @@ -8,120 +10,163 @@ use crate::{ EvalResult, InterpError, MiriEvalContext, HelpersEvalContextExt, Evaluator, MutValueVisitor, - MemoryKind, MiriMemoryKind, RangeMap, AllocId, Allocation, AllocationExtra, + MemoryKind, MiriMemoryKind, RangeMap, Allocation, AllocationExtra, Pointer, Immediate, ImmTy, PlaceTy, MPlaceTy, }; -pub type Timestamp = u64; +pub type PtrId = NonZeroU64; pub type CallId = u64; -/// Information about which kind of borrow was used to create the reference this is tagged with. +/// Tracking pointer provenance #[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)] -pub enum Borrow { - /// A unique (mutable) reference. - Uniq(Timestamp), - /// An aliasing reference. This is also used by raw pointers, which do not track details - /// of how or when they were created, hence the timestamp is optional. - /// `Shr(Some(_))` does *not* mean that the destination of this reference is frozen; - /// that depends on the type! Only those parts outside of an `UnsafeCell` are actually - /// frozen. - Alias(Option), +pub enum Tag { + Tagged(PtrId), + Untagged, } -impl Borrow { - #[inline(always)] - pub fn is_aliasing(self) -> bool { - match self { - Borrow::Alias(_) => true, - _ => false, - } - } - - #[inline(always)] - pub fn is_unique(self) -> bool { +impl fmt::Display for Tag { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { - Borrow::Uniq(_) => true, - _ => false, + Tag::Tagged(id) => write!(f, "{}", id), + Tag::Untagged => write!(f, ""), } } } -impl Default for Borrow { - fn default() -> Self { - Borrow::Alias(None) - } +/// Indicates which permission is granted (by this item to some pointers) +#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)] +pub enum Permission { + /// Grants unique mutable access. + Unique, + /// Grants shared mutable access. + SharedReadWrite, + /// Greants shared read-only access. + SharedReadOnly, } /// An item in the per-location borrow stack. #[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)] -pub enum BorStackItem { - /// Indicates the unique reference that may mutate. - Uniq(Timestamp), - /// Indicates that the location has been mutably shared. Used for raw pointers as - /// well as for unfrozen shared references. - Raw, +pub enum Item { + /// Grants the given permission for pointers with this tag. + Permission(Permission, Tag), /// A barrier, tracking the function it belongs to by its index on the call stack. - FnBarrier(CallId) + FnBarrier(CallId), +} + +impl fmt::Display for Item { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + Item::Permission(perm, tag) => write!(f, "[{:?} for {}]", perm, tag), + Item::FnBarrier(call) => write!(f, "[barrier {}]", call), + } + } } /// Extra per-location state. #[derive(Clone, Debug, PartialEq, Eq)] pub struct Stack { - /// Used as the stack; never empty. - borrows: Vec, - /// A virtual frozen "item" on top of the stack. - frozen_since: Option, + /// Used *mostly* as a stack; never empty. + /// We sometimes push into the middle but never remove from the middle. + /// The same tag may occur multiple times, e.g. from a two-phase borrow. + /// Invariants: + /// * Above a `SharedReadOnly` there can only be barriers and more `SharedReadOnly`. + borrows: Vec, } -impl Stack { - #[inline(always)] - pub fn is_frozen(&self) -> bool { - self.frozen_since.is_some() - } + +/// Extra per-allocation state. +#[derive(Clone, Debug)] +pub struct Stacks { + // Even reading memory can have effects on the stack, so we need a `RefCell` here. + stacks: RefCell>, + // Pointer to global state + global: MemoryState, } -/// Indicates which kind of reference is being used. -#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)] -pub enum RefKind { - /// `&mut`. - Unique, - /// `&` without interior mutability. - Frozen, - /// `*` (raw pointer) or `&` to `UnsafeCell`. - Raw, +/// Extra global state, available to the memory access hooks. +#[derive(Debug)] +pub struct GlobalState { + next_ptr_id: PtrId, + next_call_id: CallId, + active_calls: HashSet, } +pub type MemoryState = Rc>; /// Indicates which kind of access is being performed. #[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)] pub enum AccessKind { Read, - Write, - Dealloc, + Write { dealloc: bool }, } -/// Extra global state in the memory, available to the memory access hooks. -#[derive(Debug)] -pub struct BarrierTracking { - next_id: CallId, - active_calls: HashSet, +// "Fake" constructors +impl AccessKind { + fn write() -> AccessKind { + AccessKind::Write { dealloc: false } + } + + fn dealloc() -> AccessKind { + AccessKind::Write { dealloc: true } + } +} + +impl fmt::Display for AccessKind { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + AccessKind::Read => write!(f, "read"), + AccessKind::Write { dealloc: false } => write!(f, "write"), + AccessKind::Write { dealloc: true } => write!(f, "deallocation"), + } + } +} + +/// Indicates which kind of reference is being created. +/// Used by `reborrow` to compute which permissions to grant to the +/// new pointer. +#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)] +pub enum RefKind { + /// `&mut`. + Mutable, + /// `&` with or without interior mutability. + Shared { frozen: bool }, + /// `*` (raw pointer). + Raw, +} + +impl fmt::Display for RefKind { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + RefKind::Mutable => write!(f, "mutable"), + RefKind::Shared { frozen: true } => write!(f, "shared (frozen)"), + RefKind::Shared { frozen: false } => write!(f, "shared (mutable)"), + RefKind::Raw => write!(f, "raw"), + } + } } -pub type MemoryState = Rc>; -impl Default for BarrierTracking { +/// Utilities for initialization and ID generation +impl Default for GlobalState { fn default() -> Self { - BarrierTracking { - next_id: 0, + GlobalState { + next_ptr_id: NonZeroU64::new(1).unwrap(), + next_call_id: 0, active_calls: HashSet::default(), } } } -impl BarrierTracking { +impl GlobalState { + pub fn new_ptr(&mut self) -> PtrId { + let id = self.next_ptr_id; + self.next_ptr_id = NonZeroU64::new(id.get() + 1).unwrap(); + id + } + pub fn new_call(&mut self) -> CallId { - let id = self.next_id; + let id = self.next_call_id; trace!("new_call: Assigning ID {}", id); self.active_calls.insert(id); - self.next_id += 1; + self.next_call_id = id+1; id } @@ -134,272 +179,354 @@ fn is_active(&self, id: CallId) -> bool { } } -/// Extra global machine state. -#[derive(Clone, Debug)] -pub struct State { - clock: Timestamp -} - -impl Default for State { - fn default() -> Self { - State { clock: 0 } - } -} +// # Stacked Borrows Core Begin -impl State { - fn increment_clock(&mut self) -> Timestamp { - let val = self.clock; - self.clock = val + 1; - val - } -} - -/// Extra per-allocation state. -#[derive(Clone, Debug)] -pub struct Stacks { - // Even reading memory can have effects on the stack, so we need a `RefCell` here. - stacks: RefCell>, - barrier_tracking: MemoryState, -} - -/// Core per-location operations: deref, access, create. /// We need to make at least the following things true: /// /// U1: After creating a `Uniq`, it is at the top (and unfrozen). /// U2: If the top is `Uniq` (and unfrozen), accesses must be through that `Uniq` or pop it. -/// U3: If an access (deref sufficient?) happens with a `Uniq`, it requires the `Uniq` to be in the stack. +/// U3: If an access happens with a `Uniq`, it requires the `Uniq` to be in the stack. /// /// F1: After creating a `&`, the parts outside `UnsafeCell` are frozen. /// F2: If a write access happens, it unfreezes. -/// F3: If an access (well, a deref) happens with an `&` outside `UnsafeCell`, +/// F3: If an access happens with an `&` outside `UnsafeCell`, /// it requires the location to still be frozen. -impl<'tcx> Stack { - /// Deref `bor`: check if the location is frozen and the tag in the stack. - /// This dos *not* constitute an access! "Deref" refers to the `*` operator - /// in Rust, and includs cases like `&*x` or `(*x).foo` where no or only part - /// of the memory actually gets accessed. Also we cannot know if we are - /// going to read or write. - /// Returns the index of the item we matched, `None` if it was the frozen one. - /// `kind` indicates which kind of reference is being dereferenced. - fn deref( - &self, - bor: Borrow, - kind: RefKind, - ) -> Result, String> { - // Exclude unique ref with frozen tag. - if let (RefKind::Unique, Borrow::Alias(Some(_))) = (kind, bor) { - return Err(format!("encountered mutable reference with frozen tag ({:?})", bor)); + +impl Default for Tag { + #[inline(always)] + fn default() -> Tag { + Tag::Untagged + } +} + +/// Core relations on `Permission` define which accesses are allowed: +/// On every access, we try to find a *granting* item, and then we remove all +/// *incompatible* items above it. +impl Permission { + /// This defines for a given permission, whether it permits the given kind of access. + fn grants(self, access: AccessKind) -> bool { + match (self, access) { + // Unique and SharedReadWrite allow any kind of access. + (Permission::Unique, _) | + (Permission::SharedReadWrite, _) => + true, + // SharedReadOnly only permits read access. + (Permission::SharedReadOnly, AccessKind::Read) => + true, + (Permission::SharedReadOnly, AccessKind::Write { .. }) => + false, } - // Checks related to freezing. - match bor { - Borrow::Alias(Some(bor_t)) if kind == RefKind::Frozen => { - // We need the location to be frozen. This ensures F3. - let frozen = self.frozen_since.map_or(false, |itm_t| itm_t <= bor_t); - return if frozen { Ok(None) } else { - Err(format!("location is not frozen long enough")) - } - } - Borrow::Alias(_) if self.frozen_since.is_some() => { - // Shared deref to frozen location; looking good. - return Ok(None) - } - // Not sufficient; go on looking. - _ => {} + } + + /// This defines for a given permission, which other items it can tolerate "above" itself + /// for which kinds of accesses. + /// If true, then `other` is allowed to remain on top of `self` when `access` happens. + fn compatible_with(self, access: AccessKind, other: Item) -> bool { + use self::Permission::*; + + let other = match other { + Item::Permission(perm, _) => perm, + Item::FnBarrier(_) => return false, // Remove all barriers -- if they are active, cause UB. + }; + + match (self, access, other) { + // Some cases are impossible. + (SharedReadOnly, _, SharedReadWrite) | + (SharedReadOnly, _, Unique) => + bug!("There can never be a SharedReadWrite or a Unique on top of a SharedReadOnly"), + // When `other` is `SharedReadOnly`, that is NEVER compatible with + // write accesses. + // This makes sure read-only pointers become invalid on write accesses. + (_, AccessKind::Write { .. }, SharedReadOnly) => + false, + // When `other` is `Unique`, that is compatible with nothing. + // This makes sure unique pointers become invalid on incompatible accesses (ensures U2). + (_, _, Unique) => + false, + // When we are unique and this is a write/dealloc, we tolerate nothing. + // This makes sure we re-assert uniqueness on write accesses. + // (This is particularily important such that when a new mutable ref gets created, it gets + // pushed into the right item -- this behaves like a write and we assert uniqueness of the + // pointer from which this comes, *if* it was a unique pointer.) + (Unique, AccessKind::Write { .. }, _) => + false, + // `SharedReadWrite` items can tolerate any other akin items for any kind of access. + (SharedReadWrite, _, SharedReadWrite) => + true, + // Any item can tolerate read accesses for shared items. + // This includes unique items! Reads from unique pointers do not invalidate + // other pointers. + (_, AccessKind::Read, SharedReadWrite) | + (_, AccessKind::Read, SharedReadOnly) => + true, + // That's it. } - // If we got here, we have to look for our item in the stack. - for (idx, &itm) in self.borrows.iter().enumerate().rev() { - match (itm, bor) { - (BorStackItem::Uniq(itm_t), Borrow::Uniq(bor_t)) if itm_t == bor_t => { - // Found matching unique item. This satisfies U3. - return Ok(Some(idx)) - } - (BorStackItem::Raw, Borrow::Alias(_)) => { - // Found matching aliasing/raw item. - return Ok(Some(idx)) - } - // Go on looking. We ignore barriers! When an `&mut` and an `&` alias, - // dereferencing the `&` is still possible (to reborrow), but doing - // an access is not. - _ => {} - } + } +} + +impl<'tcx> RefKind { + /// Defines which kind of access the "parent" must grant to create this reference. + fn access(self) -> AccessKind { + match self { + RefKind::Mutable | RefKind::Shared { frozen: false } => AccessKind::write(), + RefKind::Raw | RefKind::Shared { frozen: true } => AccessKind::Read, + // FIXME: Just requiring read-only access for raw means that a raw ptr might not be writeable + // even when we think it should be! Think about this some more. } - // If we got here, we did not find our item. We have to error to satisfy U3. - Err(format!("Borrow being dereferenced ({:?}) does not exist on the borrow stack", bor)) } - /// Performs an actual memory access using `bor`. We do not know any types here - /// or whether things should be frozen, but we *do* know if this is reading - /// or writing. + /// This defines the new permission used when a pointer gets created: For raw pointers, whether these are read-only + /// or read-write depends on the permission from which they derive. + fn new_perm(self, derived_from: Permission) -> EvalResult<'tcx, Permission> { + Ok(match (self, derived_from) { + // Do not derive writable safe pointer from read-only pointer! + (RefKind::Mutable, Permission::SharedReadOnly) => + return err!(MachineError(format!( + "deriving mutable reference from read-only pointer" + ))), + (RefKind::Shared { frozen: false }, Permission::SharedReadOnly) => + return err!(MachineError(format!( + "deriving shared reference with interior mutability from read-only pointer" + ))), + // Safe pointer cases. + (RefKind::Mutable, _) => Permission::Unique, + (RefKind::Shared { frozen: true }, _) => Permission::SharedReadOnly, + (RefKind::Shared { frozen: false }, _) => Permission::SharedReadWrite, + // Raw pointer cases. + (RefKind::Raw, Permission::SharedReadOnly) => Permission::SharedReadOnly, + (RefKind::Raw, _) => Permission::SharedReadWrite, + }) + } +} + +/// Core per-location operations: access, create. +impl<'tcx> Stack { + /// Find the item granting the given kind of access to the given tag, and where that item is in the stack. + fn find_granting(&self, access: AccessKind, tag: Tag) -> Option<(usize, Permission)> { + self.borrows.iter() + .enumerate() // we also need to know *where* in the stack + .rev() // search top-to-bottom + // Return permission of first item that grants access. + .filter_map(|(idx, item)| match item { + &Item::Permission(perm, item_tag) if perm.grants(access) && tag == item_tag => + Some((idx, perm)), + _ => None, + }) + .next() + } + + /// Test if a memory `access` using pointer tagged `tag` is granted. + /// If yes, return the index of the item that granted it. fn access( &mut self, - bor: Borrow, - kind: AccessKind, - barrier_tracking: &BarrierTracking, - ) -> EvalResult<'tcx> { - // Check if we can match the frozen "item". - // Not possible on writes! - if self.is_frozen() { - if kind == AccessKind::Read { - // When we are frozen, we just accept all reads. No harm in this. - // The deref already checked that `Uniq` items are in the stack, and that - // the location is frozen if it should be. - return Ok(()); - } - trace!("access: unfreezing"); - } - // Unfreeze on writes. This ensures F2. - self.frozen_since = None; - // Pop the stack until we have something matching. - while let Some(&itm) = self.borrows.last() { - match (itm, bor) { - (BorStackItem::FnBarrier(call), _) if barrier_tracking.is_active(call) => { - return err!(MachineError(format!( - "stopping looking for borrow being accessed ({:?}) because of barrier ({})", - bor, call - ))) - } - (BorStackItem::Uniq(itm_t), Borrow::Uniq(bor_t)) if itm_t == bor_t => { - // Found matching unique item. Continue after the match. - } - (BorStackItem::Raw, _) if kind == AccessKind::Read => { - // When reading, everything can use a raw item! - // We do not want to do this when writing: Writing to an `&mut` - // should reaffirm its exclusivity (i.e., make sure it is - // on top of the stack). Continue after the match. - } - (BorStackItem::Raw, Borrow::Alias(_)) => { - // Found matching raw item. Continue after the match. - } - _ => { - // Pop this, go on. This ensures U2. - let itm = self.borrows.pop().unwrap(); - trace!("access: Popping {:?}", itm); - continue - } - } - // If we got here, we found a matching item. Congratulations! - // However, we are not done yet: If this access is deallocating, we must make sure - // there are no active barriers remaining on the stack. - if kind == AccessKind::Dealloc { - for &itm in self.borrows.iter().rev() { - match itm { - BorStackItem::FnBarrier(call) if barrier_tracking.is_active(call) => { + access: AccessKind, + tag: Tag, + global: &GlobalState, + ) -> EvalResult<'tcx, usize> { + // Two main steps: Find granting item, remove all incompatible items above. + // Afterwards we just do some post-processing for deallocation accesses. + + // Step 1: Find granting item. + let (granting_idx, granting_perm) = self.find_granting(access, tag) + .ok_or_else(|| InterpError::MachineError(format!( + "no item granting {} access to tag {} found in borrow stack", + access, tag, + )))?; + + // Step 2: Remove everything incompatible above them. + // Implemented with indices because there does not seem to be a nice iterator and range-based + // API for this. + { + let mut cur = granting_idx + 1; + while let Some(item) = self.borrows.get(cur) { + if granting_perm.compatible_with(access, *item) { + // Keep this, check next. + cur += 1; + } else { + // Aha! This is a bad one, remove it, and if it is an *active* barrier + // we have a problem. + match self.borrows.remove(cur) { + Item::FnBarrier(call) if global.is_active(call) => { return err!(MachineError(format!( - "deallocating with active barrier ({})", call - ))) + "not granting access because of barrier ({})", call + ))); } - _ => {}, + _ => {} } } } - // Now we are done. - return Ok(()) } - // If we got here, we did not find our item. - err!(MachineError(format!( - "borrow being accessed ({:?}) does not exist on the borrow stack", - bor - ))) - } - - /// Initiate `bor`; mostly this means pushing. - /// This operation cannot fail; it is up to the caller to ensure that the precondition - /// is met: We cannot push `Uniq` onto frozen stacks. - /// `kind` indicates which kind of reference is being created. - fn create(&mut self, bor: Borrow, kind: RefKind) { - // When creating a frozen reference, freeze. This ensures F1. - // We also do *not* push anything else to the stack, making sure that no nother kind - // of access (like writing through raw pointers) is permitted. - if kind == RefKind::Frozen { - let bor_t = match bor { - Borrow::Alias(Some(t)) => t, - _ => bug!("Creating illegal borrow {:?} for frozen ref", bor), - }; - // It is possible that we already are frozen (e.g., if we just pushed a barrier, - // the redundancy check would not have kicked in). - match self.frozen_since { - Some(loc_t) => assert!( - loc_t <= bor_t, - "trying to freeze location for longer than it was already frozen" - ), - None => { - trace!("create: Freezing"); - self.frozen_since = Some(bor_t); + + // Post-processing. + // If we got here, we found a matching item. Congratulations! + // However, we are not done yet: If this access is deallocating, we must make sure + // there are no active barriers remaining on the stack. + if access == AccessKind::dealloc() { + for &itm in self.borrows.iter().rev() { + match itm { + Item::FnBarrier(call) if global.is_active(call) => { + return err!(MachineError(format!( + "deallocating with active barrier ({})", call + ))) + } + _ => {}, } } - return; } - assert!( - self.frozen_since.is_none(), - "trying to create non-frozen reference to frozen location" - ); - // Push new item to the stack. - let itm = match bor { - Borrow::Uniq(t) => BorStackItem::Uniq(t), - Borrow::Alias(_) => BorStackItem::Raw, - }; - if *self.borrows.last().unwrap() == itm { - // This is just an optimization, no functional change: Avoid stacking - // multiple `Shr` on top of each other. - assert!(bor.is_aliasing()); - trace!("create: sharing a shared location is a NOP"); - } else { - // This ensures U1. - trace!("create: pushing {:?}", itm); - self.borrows.push(itm); + // Done. + return Ok(granting_idx); + } + + /// `reborrow` helper function. + /// Grant `permisson` to new pointer tagged `tag`, added at `position` in the stack. + fn grant(&mut self, perm: Permission, tag: Tag, position: usize) { + // Simply add it to the "stack" -- this might add in the middle. + // As an optimization, do nothing if the new item is identical to one of its neighbors. + let item = Item::Permission(perm, tag); + if self.borrows[position-1] == item || self.borrows.get(position) == Some(&item) { + // Optimization applies, done. + trace!("reborrow: avoiding redundant item {}", item); + return; } + trace!("reborrow: pushing item {}", item); + self.borrows.insert(position, item); } + /// `reborrow` helper function. /// Adds a barrier. fn barrier(&mut self, call: CallId) { - let itm = BorStackItem::FnBarrier(call); + let itm = Item::FnBarrier(call); if *self.borrows.last().unwrap() == itm { // This is just an optimization, no functional change: Avoid stacking // multiple identical barriers on top of each other. // This can happen when a function receives several shared references // that overlap. - trace!("barrier: avoiding redundant extra barrier"); + trace!("reborrow: avoiding redundant extra barrier"); } else { - trace!("barrier: pushing barrier for call {}", call); + trace!("reborrow: pushing barrier for call {}", call); self.borrows.push(itm); } } + + /// `reborrow` helper function: test that the stack invariants are still maintained. + fn test_invariants(&self) { + let mut saw_shared_read_only = false; + for item in self.borrows.iter() { + match item { + Item::Permission(Permission::SharedReadOnly, _) => { + saw_shared_read_only = true; + } + Item::Permission(perm, _) if saw_shared_read_only => { + panic!("Found {:?} on top of a SharedReadOnly!", perm); + } + _ => {} + } + } + } + + /// Derived a new pointer from one with the given tag . + fn reborrow( + &mut self, + derived_from: Tag, + barrier: Option, + new_kind: RefKind, + new_tag: Tag, + global: &GlobalState, + ) -> EvalResult<'tcx> { + // Find the permission "from which we derive". To this end we first have to decide + // if we derive from a permission that grants writes or just reads. + let access = new_kind.access(); + let (derived_from_idx, derived_from_perm) = self.find_granting(access, derived_from) + .ok_or_else(|| InterpError::MachineError(format!( + "no item to reborrow as {} from tag {} found in borrow stack", new_kind, derived_from, + )))?; + // With this we can compute the permission for the new pointer. + let new_perm = new_kind.new_perm(derived_from_perm)?; + + // We behave very differently for the "unsafe" case of a shared-read-write pointer + // ("unsafe" because this also applies to shared references with interior mutability). + // This is because such pointers may be reborrowed to unique pointers that actually + // remain valid when their "parents" get further reborrows! + if new_perm == Permission::SharedReadWrite { + // A very liberal reborrow because the new pointer does not expect any kind of aliasing guarantee. + // Just insert new permission as child of old permission, and maintain everything else. + // This inserts "as far down as possible", which is good because it makes this pointer as + // long-lived as possible *and* we want all the items that are incompatible with this + // to actually get removed from the stack. If we pushed a `SharedReadWrite` on top of + // a `SharedReadOnly`, we'd violate the invariant that `SaredReadOnly` are at the top + // and we'd allow write access without invalidating frozen shared references! + self.grant(new_perm, new_tag, derived_from_idx+1); + + // No barrier. They can rightfully alias with `&mut`. + // FIXME: This means that the `dereferencable` attribute on non-frozen shared references + // is incorrect! They are dereferencable when the function is called, but might become + // non-dereferencable during the course of execution. + // Also see [1], [2]. + // + // [1]: , + // [2]: + } else { + // A "safe" reborrow for a pointer that actually expects some aliasing guarantees. + // Here, creating a reference actually counts as an access, and pops incompatible + // stuff off the stack. + let check_idx = self.access(access, derived_from, global)?; + assert_eq!(check_idx, derived_from_idx, "somehow we saw different items??"); + + // Now is a good time to add the barrier. + if let Some(call) = barrier { + self.barrier(call); + } + + // We insert "as far up as possible": We know only compatible items are remaining + // on top of `derived_from`, and we want the new item at the top so that we + // get the strongest possible guarantees. + self.grant(new_perm, new_tag, self.borrows.len()); + } + + // Make sure that after all this, the stack's invariant is still maintained. + if cfg!(debug_assertions) { + self.test_invariants(); + } + + Ok(()) + } } /// Higher-level per-location operations: deref, access, reborrow. impl<'tcx> Stacks { - /// Checks that this stack is fine with being dereferenced. - fn deref( - &self, - ptr: Pointer, + /// Creates new stack with initial tag. + pub(crate) fn new( size: Size, - kind: RefKind, - ) -> EvalResult<'tcx> { - trace!("deref for tag {:?} as {:?}: {:?}, size {}", - ptr.tag, kind, ptr, size.bytes()); - let stacks = self.stacks.borrow(); - for stack in stacks.iter(ptr.offset, size) { - stack.deref(ptr.tag, kind).map_err(InterpError::MachineError)?; + tag: Tag, + extra: MemoryState, + ) -> Self { + let item = Item::Permission(Permission::Unique, tag); + let stack = Stack { + borrows: vec![item], + }; + Stacks { + stacks: RefCell::new(RangeMap::new(size, stack)), + global: extra, } - Ok(()) } /// `ptr` got used, reflect that in the stack. fn access( &self, - ptr: Pointer, + ptr: Pointer, size: Size, kind: AccessKind, ) -> EvalResult<'tcx> { - trace!("{:?} access of tag {:?}: {:?}, size {}", kind, ptr.tag, ptr, size.bytes()); + trace!("{} access of tag {}: {:?}, size {}", kind, ptr.tag, ptr, size.bytes()); // Even reads can have a side-effect, by invalidating other references. // This is fundamentally necessary since `&mut` asserts that there // are no accesses through other references, not even reads. - let barrier_tracking = self.barrier_tracking.borrow(); + let global = self.global.borrow(); let mut stacks = self.stacks.borrow_mut(); for stack in stacks.iter_mut(ptr.offset, size) { - stack.access(ptr.tag, kind, &*barrier_tracking)?; + stack.access(kind, ptr.tag, &*global)?; } Ok(()) } @@ -408,86 +535,61 @@ fn access( /// This works on `&self` because we might encounter references to constant memory. fn reborrow( &self, - ptr: Pointer, + ptr: Pointer, size: Size, - mut barrier: Option, - new_bor: Borrow, + barrier: Option, new_kind: RefKind, + new_tag: Tag, ) -> EvalResult<'tcx> { - assert_eq!(new_bor.is_unique(), new_kind == RefKind::Unique); trace!( - "reborrow for tag {:?} to {:?} as {:?}: {:?}, size {}", - ptr.tag, new_bor, new_kind, ptr, size.bytes(), + "{} reborrow for tag {} to {}: {:?}, size {}", + new_kind, ptr.tag, new_tag, ptr, size.bytes(), ); - if new_kind == RefKind::Raw { - // No barrier for raw, including `&UnsafeCell`. They can rightfully alias with `&mut`. - // FIXME: This means that the `dereferencable` attribute on non-frozen shared references - // is incorrect! They are dereferencable when the function is called, but might become - // non-dereferencable during the course of execution. - // Also see [1], [2]. - // - // [1]: , - // [2]: - barrier = None; - } - let barrier_tracking = self.barrier_tracking.borrow(); + let global = self.global.borrow(); let mut stacks = self.stacks.borrow_mut(); for stack in stacks.iter_mut(ptr.offset, size) { - // Access source `ptr`, create new ref. - let ptr_idx = stack.deref(ptr.tag, new_kind).map_err(InterpError::MachineError)?; - // If we can deref the new tag already, and if that tag lives higher on - // the stack than the one we come from, just use that. - // That is, we check if `new_bor` *already* is "derived from" `ptr.tag`. - // This also checks frozenness, if required. - let bor_redundant = barrier.is_none() && - match (ptr_idx, stack.deref(new_bor, new_kind)) { - // If the new borrow works with the frozen item, or else if it lives - // above the old one in the stack, our job here is done. - (_, Ok(None)) => true, - (Some(ptr_idx), Ok(Some(new_idx))) if new_idx >= ptr_idx => true, - // Otherwise, we need to create a new borrow. - _ => false, - }; - if bor_redundant { - assert!(new_bor.is_aliasing(), "a unique reborrow can never be redundant"); - trace!("reborrow is redundant"); - continue; - } - // We need to do some actual work. - let access_kind = if new_kind == RefKind::Unique { - AccessKind::Write - } else { - AccessKind::Read - }; - stack.access(ptr.tag, access_kind, &*barrier_tracking)?; - if let Some(call) = barrier { - stack.barrier(call); - } - stack.create(new_bor, new_kind); + stack.reborrow(ptr.tag, barrier, new_kind, new_tag, &*global)?; } Ok(()) } } -/// Hooks and glue. -impl AllocationExtra for Stacks { - #[inline(always)] - fn memory_allocated<'tcx>(size: Size, extra: &MemoryState) -> Self { - let stack = Stack { - borrows: vec![BorStackItem::Raw], - frozen_since: None, +// # Stacked Borrows Core End + +// Glue code to connect with Miri Machine Hooks + +impl Stacks { + pub fn new_allocation( + size: Size, + extra: &MemoryState, + kind: MemoryKind, + ) -> (Self, Tag) { + let tag = match kind { + MemoryKind::Stack => { + // New unique borrow. This `Uniq` is not accessible by the program, + // so it will only ever be used when using the local directly (i.e., + // not through a pointer). That is, whenever we directly use a local, this will pop + // everything else off the stack, invalidating all previous pointers, + // and in particular, *all* raw pointers. This subsumes the explicit + // `reset` which the blog post [1] says to perform when accessing a local. + // + // [1]: + Tag::Tagged(extra.borrow_mut().new_ptr()) + } + _ => { + Tag::Untagged + } }; - Stacks { - stacks: RefCell::new(RangeMap::new(size, stack)), - barrier_tracking: Rc::clone(extra), - } + let stack = Stacks::new(size, tag, Rc::clone(extra)); + (stack, tag) } +} +impl AllocationExtra for Stacks { #[inline(always)] fn memory_read<'tcx>( - alloc: &Allocation, - ptr: Pointer, + alloc: &Allocation, + ptr: Pointer, size: Size, ) -> EvalResult<'tcx> { alloc.extra.access(ptr, size, AccessKind::Read) @@ -495,35 +597,20 @@ fn memory_read<'tcx>( #[inline(always)] fn memory_written<'tcx>( - alloc: &mut Allocation, - ptr: Pointer, + alloc: &mut Allocation, + ptr: Pointer, size: Size, ) -> EvalResult<'tcx> { - alloc.extra.access(ptr, size, AccessKind::Write) + alloc.extra.access(ptr, size, AccessKind::write()) } #[inline(always)] fn memory_deallocated<'tcx>( - alloc: &mut Allocation, - ptr: Pointer, + alloc: &mut Allocation, + ptr: Pointer, size: Size, ) -> EvalResult<'tcx> { - alloc.extra.access(ptr, size, AccessKind::Dealloc) - } -} - -impl<'tcx> Stacks { - /// Pushes the first item to the stacks. - pub(crate) fn first_item( - &mut self, - itm: BorStackItem, - size: Size - ) { - for stack in self.stacks.get_mut().iter_mut(Size::ZERO, size) { - assert!(stack.borrows.len() == 1); - assert_eq!(stack.borrows.pop().unwrap(), BorStackItem::Raw); - stack.borrows.push(itm); - } + alloc.extra.access(ptr, size, AccessKind::dealloc()) } } @@ -531,31 +618,32 @@ impl<'a, 'mir, 'tcx> EvalContextPrivExt<'a, 'mir, 'tcx> for crate::MiriEvalConte trait EvalContextPrivExt<'a, 'mir, 'tcx: 'a+'mir>: crate::MiriEvalContextExt<'a, 'mir, 'tcx> { fn reborrow( &mut self, - place: MPlaceTy<'tcx, Borrow>, + place: MPlaceTy<'tcx, Tag>, size: Size, + mutbl: Option, + new_tag: Tag, fn_barrier: bool, - new_bor: Borrow ) -> EvalResult<'tcx> { let this = self.eval_context_mut(); - let ptr = place.ptr.to_ptr()?; let barrier = if fn_barrier { Some(this.frame().extra) } else { None }; + let ptr = place.ptr.to_ptr()?; trace!("reborrow: creating new reference for {:?} (pointee {}): {:?}", - ptr, place.layout.ty, new_bor); + ptr, place.layout.ty, new_tag); // Get the allocation. It might not be mutable, so we cannot use `get_mut`. let alloc = this.memory().get(ptr.alloc_id)?; alloc.check_bounds(this, ptr, size)?; // Update the stacks. - if let Borrow::Alias(Some(_)) = new_bor { + if mutbl == Some(MutImmutable) { // Reference that cares about freezing. We need a frozen-sensitive reborrow. this.visit_freeze_sensitive(place, size, |cur_ptr, size, frozen| { - let kind = if frozen { RefKind::Frozen } else { RefKind::Raw }; - alloc.extra.reborrow(cur_ptr, size, barrier, new_bor, kind) + let new_kind = RefKind::Shared { frozen }; + alloc.extra.reborrow(cur_ptr, size, barrier, new_kind, new_tag) })?; } else { // Just treat this as one big chunk. - let kind = if new_bor.is_unique() { RefKind::Unique } else { RefKind::Raw }; - alloc.extra.reborrow(ptr, size, barrier, new_bor, kind)?; + let new_kind = if mutbl == Some(MutMutable) { RefKind::Mutable } else { RefKind::Raw }; + alloc.extra.reborrow(ptr, size, barrier, new_kind, new_tag)?; } Ok(()) } @@ -564,11 +652,11 @@ fn reborrow( /// `mutbl` can be `None` to make this a raw pointer. fn retag_reference( &mut self, - val: ImmTy<'tcx, Borrow>, + val: ImmTy<'tcx, Tag>, mutbl: Option, fn_barrier: bool, two_phase: bool, - ) -> EvalResult<'tcx, Immediate> { + ) -> EvalResult<'tcx, Immediate> { let this = self.eval_context_mut(); // We want a place for where the ptr *points to*, so we get one. let place = this.ref_to_mplace(val)?; @@ -581,23 +669,24 @@ fn retag_reference( } // Compute new borrow. - let time = this.machine.stacked_borrows.increment_clock(); - let new_bor = match mutbl { - Some(MutMutable) => Borrow::Uniq(time), - Some(MutImmutable) => Borrow::Alias(Some(time)), - None => Borrow::default(), + let new_tag = match mutbl { + Some(_) => Tag::Tagged(this.memory().extra.borrow_mut().new_ptr()), + None => Tag::Untagged, }; // Reborrow. - this.reborrow(place, size, fn_barrier, new_bor)?; - let new_place = place.with_tag(new_bor); + this.reborrow(place, size, mutbl, new_tag, fn_barrier)?; + let new_place = place.replace_tag(new_tag); // Handle two-phase borrows. if two_phase { assert!(mutbl == Some(MutMutable), "two-phase shared borrows make no sense"); - // We immediately share it, to allow read accesses - let two_phase_time = this.machine.stacked_borrows.increment_clock(); - let two_phase_bor = Borrow::Alias(Some(two_phase_time)); - this.reborrow(new_place, size, false /* fn_barrier */, two_phase_bor)?; + // Grant read access *to the parent pointer* with the old tag. This means the same pointer + // has multiple items in the stack now! + // FIXME: Think about this some more, in particular about the interaction with cast-to-raw. + // Maybe find a better way to express 2-phase, now that we have a "more expressive language" + // in the stack. + let old_tag = place.ptr.to_ptr().unwrap().tag; + this.reborrow(new_place, size, Some(MutImmutable), old_tag, /* fn_barrier: */ false)?; } // Return new pointer. @@ -607,90 +696,10 @@ fn retag_reference( impl<'a, 'mir, 'tcx> EvalContextExt<'a, 'mir, 'tcx> for crate::MiriEvalContext<'a, 'mir, 'tcx> {} pub trait EvalContextExt<'a, 'mir, 'tcx: 'a+'mir>: crate::MiriEvalContextExt<'a, 'mir, 'tcx> { - fn tag_new_allocation( - &mut self, - id: AllocId, - kind: MemoryKind, - ) -> Borrow { - let this = self.eval_context_mut(); - let time = match kind { - MemoryKind::Stack => { - // New unique borrow. This `Uniq` is not accessible by the program, - // so it will only ever be used when using the local directly (i.e., - // not through a pointer). That is, whenever we directly use a local, this will pop - // everything else off the stack, invalidating all previous pointers, - // and in particular, *all* raw pointers. This subsumes the explicit - // `reset` which the blog post [1] says to perform when accessing a local. - // - // [1]: - this.machine.stacked_borrows.increment_clock() - } - _ => { - // Nothing to do for everything else. - return Borrow::default() - } - }; - // Make this the active borrow for this allocation. - let alloc = this - .memory_mut() - .get_mut(id) - .expect("this is a new allocation; it must still exist"); - let size = Size::from_bytes(alloc.bytes.len() as u64); - alloc.extra.first_item(BorStackItem::Uniq(time), size); - Borrow::Uniq(time) - } - - /// Called for value-to-place conversion. `mutability` is `None` for raw pointers. - /// - /// Note that this does *not* mean that all this memory will actually get accessed/referenced! - /// We could be in the middle of `&(*var).1`. - fn ptr_dereference( - &self, - place: MPlaceTy<'tcx, Borrow>, - size: Size, - mutability: Option, - ) -> EvalResult<'tcx> { - let this = self.eval_context_ref(); - trace!( - "ptr_dereference: Accessing {} reference for {:?} (pointee {})", - if let Some(mutability) = mutability { - format!("{:?}", mutability) - } else { - format!("raw") - }, - place.ptr, place.layout.ty - ); - let ptr = place.ptr.to_ptr()?; - if mutability.is_none() { - // No further checks on raw derefs -- only the access itself will be checked. - return Ok(()); - } - - // Get the allocation - let alloc = this.memory().get(ptr.alloc_id)?; - alloc.check_bounds(this, ptr, size)?; - // If we got here, we do some checking, *but* we leave the tag unchanged. - if let Borrow::Alias(Some(_)) = ptr.tag { - assert_eq!(mutability, Some(MutImmutable)); - // We need a frozen-sensitive check. - this.visit_freeze_sensitive(place, size, |cur_ptr, size, frozen| { - let kind = if frozen { RefKind::Frozen } else { RefKind::Raw }; - alloc.extra.deref(cur_ptr, size, kind) - })?; - } else { - // Just treat this as one big chunk. - let kind = if mutability == Some(MutMutable) { RefKind::Unique } else { RefKind::Raw }; - alloc.extra.deref(ptr, size, kind)?; - } - - // All is good. - Ok(()) - } - fn retag( &mut self, kind: RetagKind, - place: PlaceTy<'tcx, Borrow> + place: PlaceTy<'tcx, Tag> ) -> EvalResult<'tcx> { let this = self.eval_context_mut(); // Determine mutability and whether to add a barrier. @@ -734,7 +743,7 @@ impl<'ecx, 'a, 'mir, 'tcx> for RetagVisitor<'ecx, 'a, 'mir, 'tcx> { - type V = MPlaceTy<'tcx, Borrow>; + type V = MPlaceTy<'tcx, Tag>; #[inline(always)] fn ecx(&mut self) -> &mut MiriEvalContext<'a, 'mir, 'tcx> { @@ -742,7 +751,7 @@ fn ecx(&mut self) -> &mut MiriEvalContext<'a, 'mir, 'tcx> { } // Primitives of reference type, that is the one thing we are interested in. - fn visit_primitive(&mut self, place: MPlaceTy<'tcx, Borrow>) -> EvalResult<'tcx> + fn visit_primitive(&mut self, place: MPlaceTy<'tcx, Tag>) -> EvalResult<'tcx> { // Cannot use `builtin_deref` because that reports *immutable* for `Box`, // making it useless. diff --git a/src/tls.rs b/src/tls.rs index 992e4fd0561..9346fba0dcc 100644 --- a/src/tls.rs +++ b/src/tls.rs @@ -5,14 +5,14 @@ use crate::{ EvalResult, InterpError, StackPopCleanup, - MPlaceTy, Scalar, Borrow, + MPlaceTy, Scalar, Tag, }; pub type TlsKey = u128; #[derive(Copy, Clone, Debug)] pub struct TlsEntry<'tcx> { - pub(crate) data: Scalar, // Will eventually become a map from thread IDs to `Scalar`s, if we ever support more than one thread. + pub(crate) data: Scalar, // Will eventually become a map from thread IDs to `Scalar`s, if we ever support more than one thread. pub(crate) dtor: Option>, } @@ -63,7 +63,7 @@ pub fn delete_tls_key(&mut self, key: TlsKey) -> EvalResult<'tcx> { } } - pub fn load_tls(&mut self, key: TlsKey) -> EvalResult<'tcx, Scalar> { + pub fn load_tls(&mut self, key: TlsKey) -> EvalResult<'tcx, Scalar> { match self.keys.get(&key) { Some(&TlsEntry { data, .. }) => { trace!("TLS key {} loaded: {:?}", key, data); @@ -73,7 +73,7 @@ pub fn load_tls(&mut self, key: TlsKey) -> EvalResult<'tcx, Scalar> { } } - pub fn store_tls(&mut self, key: TlsKey, new_data: Scalar) -> EvalResult<'tcx> { + pub fn store_tls(&mut self, key: TlsKey, new_data: Scalar) -> EvalResult<'tcx> { match self.keys.get_mut(&key) { Some(&mut TlsEntry { ref mut data, .. }) => { trace!("TLS key {} stored: {:?}", key, new_data); @@ -106,7 +106,7 @@ fn fetch_tls_dtor( &mut self, key: Option, cx: &impl HasDataLayout, - ) -> Option<(ty::Instance<'tcx>, Scalar, TlsKey)> { + ) -> Option<(ty::Instance<'tcx>, Scalar, TlsKey)> { use std::collections::Bound::*; let thread_local = &mut self.keys; -- 2.44.0