fn write_null(&mut self, dest: PlaceTy<'tcx, Borrow>) -> EvalResult<'tcx>;
}
-impl<'a, 'mir, 'tcx: 'mir + 'a> EvalContextExt<'tcx, 'mir> for EvalContext<'a, 'mir, 'tcx, super::Evaluator<'tcx>> {
+impl<'a, 'mir, 'tcx: 'mir + 'a> EvalContextExt<'tcx, 'mir> for super::MiriEvalContext<'a, 'mir, 'tcx> {
fn find_fn(
&mut self,
instance: ty::Instance<'tcx>,
use rustc::ty;
use rustc::mir::interpret::{EvalResult, PointerArithmetic};
-use rustc_mir::interpret::{EvalContext, PlaceTy, OpTy};
use super::{
- Value, Scalar, ScalarMaybeUndef, Borrow,
+ PlaceTy, OpTy, Value, Scalar, ScalarMaybeUndef, Borrow,
FalibleScalarExt, OperatorEvalContextExt
};
) -> EvalResult<'tcx>;
}
-impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for EvalContext<'a, 'mir, 'tcx, super::Evaluator<'tcx>> {
+impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for super::MiriEvalContext<'a, 'mir, 'tcx> {
fn call_intrinsic(
&mut self,
instance: ty::Instance<'tcx>,
use std::collections::HashMap;
use std::borrow::Cow;
-use rustc::ty::{self, TyCtxt, query::TyCtxtAt};
+use rustc::ty::{self, Ty, TyCtxt, query::TyCtxtAt};
use rustc::ty::layout::{TyLayout, LayoutOf, Size};
use rustc::hir::def_id::DefId;
use rustc::mir;
use range_map::RangeMap;
use helpers::FalibleScalarExt;
use mono_hash_map::MonoHashMap;
-use stacked_borrows::Borrow;
+use stacked_borrows::{EvalContextExt as StackedBorEvalContextExt, Borrow};
pub fn create_ecx<'a, 'mir: 'a, 'tcx: 'mir>(
tcx: TyCtxt<'a, 'tcx, 'tcx>,
}
}
-#[derive(Clone, PartialEq, Eq)]
pub struct Evaluator<'tcx> {
/// Environment variables set by `setenv`
/// Miri does not expose env vars from the host to the emulated program
/// Whether to enforce the validity invariant
pub(crate) validate: bool,
+
+ /// Stacked Borrows state
+ pub(crate) stacked_borrows: stacked_borrows::State,
}
impl<'tcx> Evaluator<'tcx> {
env_vars: HashMap::default(),
tls: TlsData::default(),
validate,
+ stacked_borrows: stacked_borrows::State::new(),
}
}
}
+#[allow(dead_code)] // FIXME https://github.com/rust-lang/rust/issues/47131
+type MiriEvalContext<'a, 'mir, 'tcx> = EvalContext<'a, 'mir, 'tcx, Evaluator<'tcx>>;
+
+
impl<'a, 'mir, 'tcx> Machine<'a, 'mir, 'tcx> for Evaluator<'tcx> {
type MemoryKinds = MiriMemoryKind;
- type AllocExtra = ();
+ type AllocExtra = stacked_borrows::Stacks;
type PointerTag = Borrow;
type MemoryMap = MonoHashMap<AllocId, (MemoryKind<MiriMemoryKind>, Allocation<Borrow, Self::AllocExtra>)>;
}
/// Returns Ok() when the function was handled, fail otherwise
+ #[inline(always)]
fn find_fn(
ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>,
instance: ty::Instance<'tcx>,
ecx.find_fn(instance, args, dest, ret)
}
+ #[inline(always)]
fn call_intrinsic(
ecx: &mut rustc_mir::interpret::EvalContext<'a, 'mir, 'tcx, Self>,
instance: ty::Instance<'tcx>,
ecx.call_intrinsic(instance, args, dest)
}
+ #[inline(always)]
fn ptr_op(
ecx: &rustc_mir::interpret::EvalContext<'a, 'mir, 'tcx, Self>,
bin_op: mir::BinOp,
Ok(Cow::Owned(alloc))
}
+ #[inline(always)]
fn before_terminator(_ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>) -> EvalResult<'tcx>
{
// We are not interested in detecting loops
};
Cow::Owned(alloc)
}
+
+ #[inline(always)]
+ fn tag_reference(
+ ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>,
+ ptr: Pointer<Self::PointerTag>,
+ pointee_ty: Ty<'tcx>,
+ pointee_size: Size,
+ borrow_kind: mir::BorrowKind,
+ ) -> EvalResult<'tcx, Self::PointerTag> {
+ if !ecx.machine.validate {
+ // No tracking
+ Ok(Borrow::default())
+ } else {
+ ecx.tag_reference(ptr, pointee_ty, pointee_size, borrow_kind)
+ }
+ }
+
+ #[inline(always)]
+ fn tag_dereference(
+ ecx: &EvalContext<'a, 'mir, 'tcx, Self>,
+ ptr: Pointer<Self::PointerTag>,
+ ptr_ty: Ty<'tcx>,
+ ) -> EvalResult<'tcx, Self::PointerTag> {
+ if !ecx.machine.validate {
+ // No tracking
+ Ok(Borrow::default())
+ } else {
+ ecx.tag_dereference(ptr, ptr_ty)
+ }
+ }
}
) -> EvalResult<'tcx, Scalar<Borrow>>;
}
-impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for EvalContext<'a, 'mir, 'tcx, super::Evaluator<'tcx>> {
+impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for super::MiriEvalContext<'a, 'mir, 'tcx> {
fn ptr_op(
&self,
bin_op: mir::BinOp,
use std::collections::BTreeMap;
use std::ops;
+use rustc::ty::layout::Size;
+
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct RangeMap<T> {
map: BTreeMap<Range, T>,
}
+impl<T> Default for RangeMap<T> {
+ #[inline(always)]
+ fn default() -> Self {
+ RangeMap::new()
+ }
+}
+
// The derived `Ord` impl sorts first by the first field, then, if the fields are the same,
// by the second field.
// This is exactly what we need for our purposes, since a range query on a BTReeSet/BTreeMap will give us all
// At the same time the `end` is irrelevant for the sorting and range searching, but used for the check.
// This kind of search breaks, if `end < start`, so don't do that!
#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Debug)]
-pub struct Range {
+struct Range {
start: u64,
end: u64, // Invariant: end > start
}
impl Range {
+ /// Compute a range of ranges that contains all ranges overlaping with [offset, offset+len)
fn range(offset: u64, len: u64) -> ops::Range<Range> {
- assert!(len > 0);
+ if len == 0 {
+ // We can produce an empty range, nothing overlaps with this.
+ let r = Range { start: 0, end: 1 };
+ return r..r;
+ }
// We select all elements that are within
// the range given by the offset into the allocation and the length.
// This is sound if all ranges that intersect with the argument range, are in the
left..right
}
- /// Tests if all of [offset, offset+len) are contained in this range.
+ /// Tests if any element of [offset, offset+len) is contained in this range.
+ #[inline(always)]
fn overlaps(&self, offset: u64, len: u64) -> bool {
- assert!(len > 0);
- offset < self.end && offset + len >= self.start
+ if len == 0 {
+ // `offset` totally does not matter, we cannot overlap with an empty interval
+ false
+ } else {
+ offset < self.end && offset.checked_add(len).unwrap() >= self.start
+ }
}
}
impl<T> RangeMap<T> {
+ #[inline(always)]
pub fn new() -> RangeMap<T> {
RangeMap { map: BTreeMap::new() }
}
offset: u64,
len: u64,
) -> impl Iterator<Item = (&'a Range, &'a T)> + 'a {
- assert!(len > 0);
self.map.range(Range::range(offset, len)).filter_map(
- move |(range,
- data)| {
+ move |(range, data)| {
+ debug_assert!(len > 0);
if range.overlaps(offset, len) {
Some((range, data))
} else {
)
}
- pub fn iter<'a>(&'a self, offset: u64, len: u64) -> impl Iterator<Item = &'a T> + 'a {
- self.iter_with_range(offset, len).map(|(_, data)| data)
+ pub fn iter<'a>(&'a self, offset: Size, len: Size) -> impl Iterator<Item = &'a T> + 'a {
+ self.iter_with_range(offset.bytes(), len.bytes()).map(|(_, data)| data)
}
fn split_entry_at(&mut self, offset: u64)
}
}
- pub fn iter_mut_all<'a>(&'a mut self) -> impl Iterator<Item = &'a mut T> + 'a {
- self.map.values_mut()
- }
-
/// Provide mutable iteration over everything in the given range. As a side-effect,
/// this will split entries in the map that are only partially hit by the given range,
/// to make sure that when they are mutated, the effect is constrained to the given range.
+ /// If there are gaps, leave them be.
pub fn iter_mut_with_gaps<'a>(
&'a mut self,
- offset: u64,
- len: u64,
+ offset: Size,
+ len: Size,
) -> impl Iterator<Item = &'a mut T> + 'a
where
T: Clone,
{
- assert!(len > 0);
- // Preparation: Split first and last entry as needed.
- self.split_entry_at(offset);
- self.split_entry_at(offset + len);
+ let offset = offset.bytes();
+ let len = len.bytes();
+
+ if len > 0 {
+ // Preparation: Split first and last entry as needed.
+ self.split_entry_at(offset);
+ self.split_entry_at(offset + len);
+ }
// Now we can provide a mutable iterator
self.map.range_mut(Range::range(offset, len)).filter_map(
move |(&range, data)| {
+ debug_assert!(len > 0);
if range.overlaps(offset, len) {
assert!(
offset <= range.start && offset + len >= range.end,
}
/// Provide a mutable iterator over everything in the given range, with the same side-effects as
- /// iter_mut_with_gaps. Furthermore, if there are gaps between ranges, fill them with the given default.
+ /// iter_mut_with_gaps. Furthermore, if there are gaps between ranges, fill them with the given default
+ /// before yielding them in the iterator.
/// This is also how you insert.
- pub fn iter_mut<'a>(&'a mut self, offset: u64, len: u64) -> impl Iterator<Item = &'a mut T> + 'a
+ pub fn iter_mut<'a>(&'a mut self, offset: Size, len: Size) -> impl Iterator<Item = &'a mut T> + 'a
where
T: Clone + Default,
{
- // Do a first iteration to collect the gaps
- let mut gaps = Vec::new();
- let mut last_end = offset;
- for (range, _) in self.iter_with_range(offset, len) {
- if last_end < range.start {
+ if len.bytes() > 0 {
+ let offset = offset.bytes();
+ let len = len.bytes();
+
+ // Do a first iteration to collect the gaps
+ let mut gaps = Vec::new();
+ let mut last_end = offset;
+ for (range, _) in self.iter_with_range(offset, len) {
+ if last_end < range.start {
+ gaps.push(Range {
+ start: last_end,
+ end: range.start,
+ });
+ }
+ last_end = range.end;
+ }
+ if last_end < offset + len {
gaps.push(Range {
start: last_end,
- end: range.start,
+ end: offset + len,
});
}
- last_end = range.end;
- }
- if last_end < offset + len {
- gaps.push(Range {
- start: last_end,
- end: offset + len,
- });
- }
- // Add default for all gaps
- for gap in gaps {
- let old = self.map.insert(gap, Default::default());
- assert!(old.is_none());
+ // Add default for all gaps
+ for gap in gaps {
+ let old = self.map.insert(gap, Default::default());
+ assert!(old.is_none());
+ }
}
// Now provide mutable iteration
use super::*;
/// Query the map at every offset in the range and collect the results.
- fn to_vec<T: Copy>(map: &RangeMap<T>, offset: u64, len: u64) -> Vec<T> {
+ fn to_vec<T: Copy>(map: &RangeMap<T>, offset: u64, len: u64, default: Option<T>) -> Vec<T> {
(offset..offset + len)
.into_iter()
- .map(|i| *map.iter(i, 1).next().unwrap())
+ .map(|i| map
+ .iter(Size::from_bytes(i), Size::from_bytes(1))
+ .next()
+ .map(|&t| t)
+ .or(default)
+ .unwrap()
+ )
.collect()
}
fn basic_insert() {
let mut map = RangeMap::<i32>::new();
// Insert
- for x in map.iter_mut(10, 1) {
+ for x in map.iter_mut(Size::from_bytes(10), Size::from_bytes(1)) {
*x = 42;
}
// Check
- assert_eq!(to_vec(&map, 10, 1), vec![42]);
+ assert_eq!(to_vec(&map, 10, 1, None), vec![42]);
+
+ // Insert with size 0
+ for x in map.iter_mut(Size::from_bytes(10), Size::from_bytes(0)) {
+ *x = 19;
+ }
+ for x in map.iter_mut(Size::from_bytes(11), Size::from_bytes(0)) {
+ *x = 19;
+ }
+ assert_eq!(to_vec(&map, 10, 2, Some(-1)), vec![42, -1]);
}
#[test]
fn gaps() {
let mut map = RangeMap::<i32>::new();
- for x in map.iter_mut(11, 1) {
+ for x in map.iter_mut(Size::from_bytes(11), Size::from_bytes(1)) {
*x = 42;
}
- for x in map.iter_mut(15, 1) {
- *x = 42;
+ for x in map.iter_mut(Size::from_bytes(15), Size::from_bytes(1)) {
+ *x = 43;
}
+ assert_eq!(
+ to_vec(&map, 10, 10, Some(-1)),
+ vec![-1, 42, -1, -1, -1, 43, -1, -1, -1, -1]
+ );
// Now request a range that needs three gaps filled
- for x in map.iter_mut(10, 10) {
- if *x != 42 {
+ for x in map.iter_mut(Size::from_bytes(10), Size::from_bytes(10)) {
+ if *x < 42 {
*x = 23;
}
}
assert_eq!(
- to_vec(&map, 10, 10),
- vec![23, 42, 23, 23, 23, 42, 23, 23, 23, 23]
+ to_vec(&map, 10, 10, None),
+ vec![23, 42, 23, 23, 23, 43, 23, 23, 23, 23]
);
- assert_eq!(to_vec(&map, 13, 5), vec![23, 23, 42, 23, 23]);
+ assert_eq!(to_vec(&map, 13, 5, None), vec![23, 23, 43, 23, 23]);
}
}
-use super::RangeMap;
+use std::cell::RefCell;
+
+use rustc::ty::{Ty, layout::Size};
+use rustc::mir;
+
+use super::{
+ RangeMap, EvalResult,
+ Pointer,
+};
pub type Timestamp = u64;
Raw,
}
+impl Mut {
+ #[inline(always)]
+ fn is_raw(self) -> bool {
+ match self {
+ Mut::Raw => true,
+ _ => false,
+ }
+ }
+
+ #[inline(always)]
+ fn is_uniq(self) -> bool {
+ match self {
+ Mut::Uniq(_) => true,
+ _ => false,
+ }
+ }
+}
+
/// Information about any kind of borrow
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
pub enum Borrow {
Frz(Timestamp)
}
+impl Borrow {
+ #[inline(always)]
+ fn is_mut(self) -> bool {
+ match self {
+ Borrow::Mut(_) => true,
+ _ => false,
+ }
+ }
+
+ #[inline(always)]
+ fn is_uniq(self) -> bool {
+ match self {
+ Borrow::Mut(Mut::Uniq(_)) => true,
+ _ => false,
+ }
+ }
+}
+
/// An item in the borrow stack
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
pub enum BorStackItem {
Borrow::Mut(Mut::Raw)
}
}
+
+/// Extra global machine state
+#[derive(Clone, Debug)]
+pub struct State {
+ clock: Timestamp
+}
+
+impl State {
+ pub fn new() -> State {
+ State { clock: 0 }
+ }
+}
+
+/// Extra per-location state
+#[derive(Clone, Debug)]
+struct Stack {
+ borrows: Vec<BorStackItem>, // used as a stack
+ frozen_since: Option<Timestamp>,
+}
+
+impl Default for Stack {
+ fn default() -> Self {
+ Stack {
+ borrows: Vec::new(),
+ frozen_since: None,
+ }
+ }
+}
+
+/// Extra per-allocation state
+#[derive(Clone, Debug, Default)]
+pub struct Stacks {
+ stacks: RefCell<RangeMap<Stack>>,
+}
+
+/// Core operations
+impl<'tcx> Stack {
+ fn check(&self, bor: Borrow) -> bool {
+ match bor {
+ Borrow::Frz(acc_t) =>
+ // Must be frozen at least as long as the `acc_t` says.
+ self.frozen_since.map_or(false, |loc_t| loc_t <= acc_t),
+ Borrow::Mut(acc_m) =>
+ // Raw pointers are fine with frozen locations. This is important because &Cell is raw!
+ if self.frozen_since.is_some() {
+ acc_m.is_raw()
+ } else {
+ self.borrows.last().map_or(false, |&loc_itm| loc_itm == BorStackItem::Mut(acc_m))
+ }
+ }
+ }
+
+ /// Reactive `bor` for this stack. If `force_mut` is set, we want to aggressively
+ /// unfreeze this location (because we are about to push a `Uniq`).
+ fn reactivate(&mut self, bor: Borrow, force_mut: bool) -> EvalResult<'tcx> {
+ assert!(!force_mut || bor.is_mut()); // if `force_mut` is set, this must be a mutable borrow
+ // Do NOT change anything if `bor` is already active -- in particular, if
+ // it is a `Mut(Raw)` and we are frozen.
+ if !force_mut && self.check(bor) {
+ return Ok(());
+ }
+
+ let acc_m = match bor {
+ Borrow::Frz(_) => return err!(MachineError(format!("Location should be frozen but it is not"))),
+ Borrow::Mut(acc_m) => acc_m,
+ };
+ // We definitely have to unfreeze this, even if we use the topmost item.
+ self.frozen_since = None;
+ // Pop until we see the one we are looking for.
+ while let Some(&itm) = self.borrows.last() {
+ match itm {
+ BorStackItem::FnBarrier(_) => {
+ return err!(MachineError(format!("Trying to reactivate a borrow that lives behind a barrier")));
+ }
+ BorStackItem::Mut(loc_m) => {
+ if loc_m == acc_m { return Ok(()); }
+ self.borrows.pop();
+ }
+ }
+ }
+ // Nothing to be found. Simulate a "virtual raw" element at the bottom of the stack.
+ if acc_m.is_raw() {
+ Ok(())
+ } else {
+ err!(MachineError(format!("Borrow-to-reactivate does not exist on the stack")))
+ }
+ }
+
+ fn initiate(&mut self, bor: Borrow) -> EvalResult<'tcx> {
+ match bor {
+ Borrow::Frz(t) => {
+ match self.frozen_since {
+ None => self.frozen_since = Some(t),
+ Some(since) => assert!(since <= t),
+ }
+ }
+ Borrow::Mut(m) => {
+ match self.frozen_since {
+ None => self.borrows.push(BorStackItem::Mut(m)),
+ Some(_) =>
+ // FIXME: Do we want an exception for raw borrows?
+ return err!(MachineError(format!("Trying to mutate frozen location")))
+ }
+ }
+ }
+ Ok(())
+ }
+}
+
+impl State {
+ fn increment_clock(&mut self) -> Timestamp {
+ self.clock += 1;
+ self.clock
+ }
+}
+
+/// Machine hooks
+pub trait EvalContextExt<'tcx> {
+ fn tag_reference(
+ &mut self,
+ ptr: Pointer<Borrow>,
+ pointee_ty: Ty<'tcx>,
+ size: Size,
+ borrow_kind: mir::BorrowKind,
+ ) -> EvalResult<'tcx, Borrow>;
+
+ fn tag_dereference(
+ &self,
+ ptr: Pointer<Borrow>,
+ ptr_ty: Ty<'tcx>,
+ ) -> EvalResult<'tcx, Borrow>;
+}
+
+impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for super::MiriEvalContext<'a, 'mir, 'tcx> {
+ fn tag_reference(
+ &mut self,
+ ptr: Pointer<Borrow>,
+ pointee_ty: Ty<'tcx>,
+ size: Size,
+ borrow_kind: mir::BorrowKind,
+ ) -> EvalResult<'tcx, Borrow> {
+ let old_bor = ptr.tag;
+ let time = self.machine.stacked_borrows.increment_clock();
+ // FIXME This does not do enough checking when only part of the data lacks
+ // interior mutability.
+ let new_bor = match borrow_kind {
+ mir::BorrowKind::Mut { .. } => Borrow::Mut(Mut::Uniq(time)),
+ _ =>
+ if self.type_is_freeze(pointee_ty) {
+ Borrow::Frz(time)
+ } else {
+ Borrow::Mut(Mut::Raw)
+ }
+ };
+ trace!("tag_reference: Creating new tag for {:?} (pointee {}, size {}): {:?}", ptr, pointee_ty, size.bytes(), new_bor);
+
+ // Make sure this reference is not dangling or so
+ self.memory.check_bounds(ptr, size, false)?;
+
+ // Update the stacks. We cannot use `get_mut` becuse this might be immutable
+ // memory.
+ let alloc = self.memory.get(ptr.alloc_id).expect("We checked that the ptr is fine!");
+ let mut stacks = alloc.extra.stacks.borrow_mut();
+ for stack in stacks.iter_mut(ptr.offset, size) {
+ if stack.check(new_bor) {
+ // The new borrow is already active! This can happen when creating multiple
+ // shared references from the same mutable reference. Do nothing.
+ } else {
+ // FIXME: The blog post says we should `reset` if this is a local.
+ stack.reactivate(old_bor, /*force_mut*/new_bor.is_uniq())?;
+ stack.initiate(new_bor)?;
+ }
+ }
+
+ Ok(new_bor)
+ }
+
+ fn tag_dereference(
+ &self,
+ ptr: Pointer<Borrow>,
+ ptr_ty: Ty<'tcx>,
+ ) -> EvalResult<'tcx, Borrow> {
+ // If this is a raw ptr, forget about the tag.
+ Ok(if ptr_ty.is_unsafe_ptr() {
+ trace!("tag_dereference: Erasing tag for {:?} ({})", ptr, ptr_ty);
+ Borrow::Mut(Mut::Raw)
+ } else {
+ // FIXME: Do we want to adjust the tag if it does not match the type?
+ ptr.tag
+ })
+ }
+}
use rustc::{ty, ty::layout::HasDataLayout, mir};
use super::{
- EvalResult, EvalErrorKind, StackPopCleanup, EvalContext, Evaluator,
+ EvalResult, EvalErrorKind, StackPopCleanup,
MPlaceTy, Scalar, Borrow,
};
pub type TlsKey = u128;
-#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+#[derive(Copy, Clone, Debug)]
pub struct TlsEntry<'tcx> {
pub(crate) data: Scalar<Borrow>, // Will eventually become a map from thread IDs to `Scalar`s, if we ever support more than one thread.
pub(crate) dtor: Option<ty::Instance<'tcx>>,
}
-#[derive(Debug, Clone, PartialEq, Eq)]
+#[derive(Debug)]
pub struct TlsData<'tcx> {
/// The Key to use for the next thread-local allocation.
pub(crate) next_key: TlsKey,
}
}
-impl<'a, 'mir, 'tcx: 'mir + 'a> EvalContextExt<'tcx> for EvalContext<'a, 'mir, 'tcx, Evaluator<'tcx>> {
+impl<'a, 'mir, 'tcx: 'mir + 'a> EvalContextExt<'tcx> for super::MiriEvalContext<'a, 'mir, 'tcx> {
fn run_tls_dtors(&mut self) -> EvalResult<'tcx> {
let mut dtor = self.machine.tls.fetch_tls_dtor(None, *self.tcx);
// FIXME: replace loop by some structure that works with stepping