-use std::cell::RefCell;
+use std::cell::{Cell, RefCell};
-use rustc::mir::interpret::AllocId;
+use rustc::mir::interpret::{AllocId, Pointer, InterpResult};
+use rustc_mir::interpret::Memory;
+use rustc_target::abi::Size;
-pub type MemoryState = RefCell<GlobalState>;
+use crate::stacked_borrows::Tag;
+use crate::Evaluator;
+
+pub type MemoryExtra = RefCell<GlobalState>;
+
+#[derive(Clone, Debug, Default)]
+pub struct AllocExtra {
+ base_addr: Cell<Option<u64>>
+}
#[derive(Clone, Debug)]
pub struct GlobalState {
- /// This field is used as a map between the address of each allocation and its `AllocId`
+ /// This is used as a map between the address of each allocation and its `AllocId`.
+ /// It is always sorted
pub int_to_ptr_map: Vec<(u64, AllocId)>,
+ /// This is used as a memory address when a new pointer is casted to an integer. It
+ /// is always larger than any address that was previously made part of a block.
pub next_base_addr: u64,
}
}
}
}
+
+impl<'mir, 'tcx> GlobalState {
+ pub fn int_to_ptr(
+ base_addr: u64,
+ memory: &Memory<'mir, 'tcx, Evaluator<'tcx>>,
+ ) -> InterpResult<'tcx, Pointer<Tag>> {
+ let global_state = memory.extra.intptrcast.borrow();
+
+ match global_state.int_to_ptr_map.binary_search_by_key(&base_addr, |(addr, _)| *addr) {
+ Ok(pos) => {
+ let (_, alloc_id) = global_state.int_to_ptr_map[pos];
+ // `base_addr` is the starting address for an allocation, the offset should be
+ // zero. The pointer is untagged because it was created from a cast
+ Ok(Pointer::new_with_tag(alloc_id, Size::from_bytes(0), Tag::Untagged))
+ },
+ Err(0) => err!(DanglingPointerDeref),
+ Err(pos) => {
+ // This is the gargest of the adresses smaller than `base_addr`,
+ // i.e. the greatest lower bound (glb)
+ let (glb, alloc_id) = global_state.int_to_ptr_map[pos - 1];
+ // This never overflows because `base_addr >= glb`
+ let offset = base_addr - glb;
+ // If the offset exceeds the size of the allocation, this access is illegal
+ if offset <= memory.get(alloc_id)?.bytes.len() as u64 {
+ // This pointer is untagged because it was created from a cast
+ Ok(Pointer::new_with_tag(alloc_id, Size::from_bytes(offset), Tag::Untagged))
+ } else {
+ err!(DanglingPointerDeref)
+ }
+ }
+ }
+ }
+
+ pub fn ptr_to_int(
+ ptr: Pointer<Tag>,
+ memory: &Memory<'mir, 'tcx, Evaluator<'tcx>>,
+ ) -> InterpResult<'tcx, u64> {
+ let mut global_state = memory.extra.intptrcast.borrow_mut();
+
+ let alloc = memory.get(ptr.alloc_id)?;
+
+ let base_addr = match alloc.extra.intptrcast.base_addr.get() {
+ Some(base_addr) => base_addr,
+ None => {
+ let base_addr = global_state.next_base_addr;
+ global_state.next_base_addr += alloc.bytes.len() as u64;
+
+ alloc.extra.intptrcast.base_addr.set(Some(base_addr));
+
+ let elem = (base_addr, ptr.alloc_id);
+
+ // Given that `next_base_addr` increases in each allocation, pushing the
+ // corresponding tuple keeps `int_to_ptr_map` sorted
+ global_state.int_to_ptr_map.push(elem);
+
+ base_addr
+ }
+ };
+
+ Ok(base_addr + ptr.offset.bytes())
+ }
+}
use std::collections::HashMap;
use std::borrow::Cow;
-use std::cell::Cell;
use std::rc::Rc;
use rand::rngs::StdRng;
type MemoryKinds = MiriMemoryKind;
type FrameExtra = stacked_borrows::CallId;
- type MemoryExtra = memory::MemoryState;
+ type MemoryExtra = memory::MemoryExtra;
type AllocExtra = memory::AllocExtra;
type PointerTag = Tag;
) -> (Cow<'b, Allocation<Self::PointerTag, Self::AllocExtra>>, Self::PointerTag) {
let kind = kind.expect("we set our STATIC_KIND so this cannot be None");
let alloc = alloc.into_owned();
- let (extra, base_tag) = Stacks::new_allocation(
+ let (stacks, base_tag) = Stacks::new_allocation(
id,
Size::from_bytes(alloc.bytes.len() as u64),
- Rc::clone(&memory.extra.stacked),
+ Rc::clone(&memory.extra.stacked_borrows),
kind,
);
if kind != MiriMemoryKind::Static.into() {
assert!(alloc.relocations.is_empty(), "Only statics can come initialized with inner pointers");
// Now we can rely on the inner pointers being static, too.
}
- let mut memory_extra = memory.extra.stacked.borrow_mut();
+ let mut memory_extra = memory.extra.stacked_borrows.borrow_mut();
let alloc: Allocation<Tag, Self::AllocExtra> = Allocation {
bytes: alloc.bytes,
relocations: Relocations::from_presorted(
align: alloc.align,
mutability: alloc.mutability,
extra: AllocExtra {
- stacks: extra,
- base_addr: Cell::new(None),
+ stacked_borrows: stacks,
+ intptrcast: Default::default(),
},
};
(Cow::Owned(alloc), base_tag)
id: AllocId,
memory: &Memory<'mir, 'tcx, Self>,
) -> Self::PointerTag {
- memory.extra.stacked.borrow_mut().static_base_ptr(id)
+ memory.extra.stacked_borrows.borrow_mut().static_base_ptr(id)
}
#[inline(always)]
fn stack_push(
ecx: &mut InterpretCx<'mir, 'tcx, Self>,
) -> InterpResult<'tcx, stacked_borrows::CallId> {
- Ok(ecx.memory().extra.stacked.borrow_mut().new_call())
+ Ok(ecx.memory().extra.stacked_borrows.borrow_mut().new_call())
}
#[inline(always)]
ecx: &mut InterpretCx<'mir, 'tcx, Self>,
extra: stacked_borrows::CallId,
) -> InterpResult<'tcx> {
- Ok(ecx.memory().extra.stacked.borrow_mut().end_call(extra))
+ Ok(ecx.memory().extra.stacked_borrows.borrow_mut().end_call(extra))
}
fn int_to_ptr(
memory: &Memory<'mir, 'tcx, Self>,
) -> InterpResult<'tcx, Pointer<Self::PointerTag>> {
if int == 0 {
- return err!(InvalidNullPointerUsage);
- }
-
- if memory.extra.rng.is_none() {
- return err!(ReadBytesAsPointer);
- }
-
- let extra = memory.extra.intptrcast.borrow();
-
- match extra.int_to_ptr_map.binary_search_by_key(&int, |(int, _)| *int) {
- Ok(pos) => {
- let (_, alloc_id) = extra.int_to_ptr_map[pos];
- Ok(Pointer::new_with_tag(alloc_id, Size::from_bytes(0), Tag::Untagged))
- }
- Err(pos) => {
- if pos > 0 {
- let (glb, alloc_id) = extra.int_to_ptr_map[pos - 1];
- let offset = int - glb;
- if offset <= memory.get(alloc_id)?.bytes.len() as u64 {
- Ok(Pointer::new_with_tag(alloc_id, Size::from_bytes(offset), Tag::Untagged))
- } else {
- return err!(DanglingPointerDeref);
- }
- } else {
- return err!(DanglingPointerDeref);
- }
- }
+ err!(InvalidNullPointerUsage)
+ } else if memory.extra.rng.is_none() {
+ err!(ReadBytesAsPointer)
+ } else {
+ intptrcast::GlobalState::int_to_ptr(int, memory)
}
}
memory: &Memory<'mir, 'tcx, Self>,
) -> InterpResult<'tcx, u64> {
if memory.extra.rng.is_none() {
- return err!(ReadPointerAsBytes);
+ err!(ReadPointerAsBytes)
+ } else {
+ intptrcast::GlobalState::ptr_to_int(ptr, memory)
}
-
- let mut extra = memory.extra.intptrcast.borrow_mut();
-
- let alloc = memory.get(ptr.alloc_id)?;
-
- let base_addr = match alloc.extra.base_addr.get() {
- Some(base_addr) => base_addr,
- None => {
- let base_addr = extra.next_base_addr;
- extra.next_base_addr += alloc.bytes.len() as u64;
-
- alloc.extra.base_addr.set(Some(base_addr));
-
- let elem = (base_addr, ptr.alloc_id);
-
- // Given that `next_base_addr` increases in each allocation, pushing the
- // corresponding tuple keeps `int_to_ptr_map` sorted
- extra.int_to_ptr_map.push(elem);
-
- base_addr
- }
- };
-
- Ok(base_addr + ptr.offset.bytes())
}
}
-use std::cell::Cell;
use rand::rngs::StdRng;
use rustc_mir::interpret::{Pointer, Allocation, AllocationExtra, InterpResult};
use rustc_target::abi::Size;
use crate::{stacked_borrows, intptrcast};
-use crate::stacked_borrows::{Tag, AccessKind};
+use crate::stacked_borrows::Tag;
#[derive(Default, Clone, Debug)]
-pub struct MemoryState {
- pub stacked: stacked_borrows::MemoryState,
- pub intptrcast: intptrcast::MemoryState,
- /// The random number generator to use if Miri
- /// is running in non-deterministic mode
+pub struct MemoryExtra {
+ pub stacked_borrows: stacked_borrows::MemoryExtra,
+ pub intptrcast: intptrcast::MemoryExtra,
+ /// The random number generator to use if Miri is running in non-deterministic mode and to
+ /// enable intptrcast
pub(crate) rng: Option<StdRng>
}
-#[derive(Debug, Clone,)]
+#[derive(Debug, Clone)]
pub struct AllocExtra {
- pub stacks: stacked_borrows::Stacks,
- pub base_addr: Cell<Option<u64>>,
+ pub stacked_borrows: stacked_borrows::AllocExtra,
+ pub intptrcast: intptrcast::AllocExtra,
}
impl AllocationExtra<Tag> for AllocExtra {
ptr: Pointer<Tag>,
size: Size,
) -> InterpResult<'tcx> {
- trace!("read access with tag {:?}: {:?}, size {}", ptr.tag, ptr.erase_tag(), size.bytes());
- alloc.extra.stacks.for_each(ptr, size, |stack, global| {
- stack.access(AccessKind::Read, ptr.tag, global)?;
- Ok(())
- })
+ alloc.extra.stacked_borrows.memory_read(ptr, size)
}
#[inline(always)]
ptr: Pointer<Tag>,
size: Size,
) -> InterpResult<'tcx> {
- trace!("write access with tag {:?}: {:?}, size {}", ptr.tag, ptr.erase_tag(), size.bytes());
- alloc.extra.stacks.for_each(ptr, size, |stack, global| {
- stack.access(AccessKind::Write, ptr.tag, global)?;
- Ok(())
- })
+ alloc.extra.stacked_borrows.memory_written(ptr, size)
}
#[inline(always)]
ptr: Pointer<Tag>,
size: Size,
) -> InterpResult<'tcx> {
- trace!("deallocation with tag {:?}: {:?}, size {}", ptr.tag, ptr.erase_tag(), size.bytes());
- alloc.extra.stacks.for_each(ptr, size, |stack, global| {
- stack.dealloc(ptr.tag, global)
- })
+ alloc.extra.stacked_borrows.memory_deallocated(ptr, size)
}
}
trace!("ptr_op: {:?} {:?} {:?}", *left, bin_op, *right);
+ // If intptrcast is enabled and the operation is not an offset
+ // we can force the cast from pointers to integer addresses and
+ // then dispatch to rustc binary operation method
if self.memory().extra.rng.is_some() && bin_op != Offset {
let l_bits = self.force_bits(left.imm.to_scalar()?, left.layout.size)?;
let r_bits = self.force_bits(right.imm.to_scalar()?, right.layout.size)?;
pub type PtrId = NonZeroU64;
pub type CallId = NonZeroU64;
+pub type AllocExtra = Stacks;
/// Tracking pointer provenance
#[derive(Copy, Clone, Hash, PartialEq, Eq)]
// Even reading memory can have effects on the stack, so we need a `RefCell` here.
stacks: RefCell<RangeMap<Stack>>,
// Pointer to global state
- global: MemoryState,
+ global: MemoryExtra,
}
/// Extra global state, available to the memory access hooks.
active_calls: HashSet<CallId>,
}
/// Memory extra state gives us interior mutable access to the global state.
-pub type MemoryState = Rc<RefCell<GlobalState>>;
+pub type MemoryExtra = Rc<RefCell<GlobalState>>;
/// Indicates which kind of access is being performed.
#[derive(Copy, Clone, Hash, PartialEq, Eq)]
/// Test if a memory `access` using pointer tagged `tag` is granted.
/// If yes, return the index of the item that granted it.
- pub(crate) fn access(
+ fn access(
&mut self,
access: AccessKind,
tag: Tag,
/// Deallocate a location: Like a write access, but also there must be no
/// active protectors at all because we will remove all items.
- pub(crate) fn dealloc(
+ fn dealloc(
&mut self,
tag: Tag,
global: &GlobalState,
size: Size,
perm: Permission,
tag: Tag,
- extra: MemoryState,
+ extra: MemoryExtra,
) -> Self {
let item = Item { perm, tag, protector: None };
let stack = Stack {
}
/// Call `f` on every stack in the range.
- pub(crate) fn for_each(
+ fn for_each(
&self,
ptr: Pointer<Tag>,
size: Size,
pub fn new_allocation(
id: AllocId,
size: Size,
- extra: MemoryState,
+ extra: MemoryExtra,
kind: MemoryKind<MiriMemoryKind>,
) -> (Self, Tag) {
let (tag, perm) = match kind {
let stack = Stacks::new(size, perm, tag, extra);
(stack, tag)
}
+
+ #[inline(always)]
+ pub fn memory_read<'tcx>(
+ &self,
+ ptr: Pointer<Tag>,
+ size: Size,
+ ) -> InterpResult<'tcx> {
+ trace!("read access with tag {:?}: {:?}, size {}", ptr.tag, ptr.erase_tag(), size.bytes());
+ self.for_each(ptr, size, |stack, global| {
+ stack.access(AccessKind::Read, ptr.tag, global)?;
+ Ok(())
+ })
+ }
+
+ #[inline(always)]
+ pub fn memory_written<'tcx>(
+ &mut self,
+ ptr: Pointer<Tag>,
+ size: Size,
+ ) -> InterpResult<'tcx> {
+ trace!("write access with tag {:?}: {:?}, size {}", ptr.tag, ptr.erase_tag(), size.bytes());
+ self.for_each(ptr, size, |stack, global| {
+ stack.access(AccessKind::Write, ptr.tag, global)?;
+ Ok(())
+ })
+ }
+
+ #[inline(always)]
+ pub fn memory_deallocated<'tcx>(
+ &mut self,
+ ptr: Pointer<Tag>,
+ size: Size,
+ ) -> InterpResult<'tcx> {
+ trace!("deallocation with tag {:?}: {:?}, size {}", ptr.tag, ptr.erase_tag(), size.bytes());
+ self.for_each(ptr, size, |stack, global| {
+ stack.dealloc(ptr.tag, global)
+ })
+ }
}
/// Retagging/reborrowing. There is some policy in here, such as which permissions
// We are only ever `SharedReadOnly` inside the frozen bits.
let perm = if frozen { Permission::SharedReadOnly } else { Permission::SharedReadWrite };
let item = Item { perm, tag: new_tag, protector };
- alloc.extra.stacks.for_each(cur_ptr, size, |stack, global| {
+ alloc.extra.stacked_borrows.for_each(cur_ptr, size, |stack, global| {
stack.grant(cur_ptr.tag, item, global)
})
});
}
};
let item = Item { perm, tag: new_tag, protector };
- alloc.extra.stacks.for_each(ptr, size, |stack, global| {
+ alloc.extra.stacked_borrows.for_each(ptr, size, |stack, global| {
stack.grant(ptr.tag, item, global)
})
}
// Compute new borrow.
let new_tag = match kind {
RefKind::Raw { .. } => Tag::Untagged,
- _ => Tag::Tagged(this.memory().extra.stacked.borrow_mut().new_ptr()),
+ _ => Tag::Tagged(this.memory().extra.stacked_borrows.borrow_mut().new_ptr()),
};
// Reborrow.
ty::RawPtr(tym) if kind == RetagKind::Raw =>
Some((RefKind::Raw { mutable: tym.mutbl == MutMutable }, false)),
// Boxes do not get a protector: protectors reflect that references outlive the call
- // they were passed in to; that's just not the case for boxes.
+ // they were passed in to; that's just not the case for boxes.
ty::Adt(..) if ty.is_box() => Some((RefKind::Unique { two_phase: false }, false)),
_ => None,
}