}
let ptr = dest.to_ptr()?;
- let data = match &mut this.machine.rng {
+ let data = match &mut this.memory_mut().extra.rng {
Some(rng) => {
let mut data = vec![0; len];
rng.fill_bytes(&mut data);
--- /dev/null
+use std::cell::{Cell, RefCell};
+
+use rustc::mir::interpret::{AllocId, Pointer, InterpResult};
+use rustc_mir::interpret::Memory;
+use rustc_target::abi::Size;
+
+use crate::stacked_borrows::Tag;
+use crate::Evaluator;
+
+pub type MemoryExtra = RefCell<GlobalState>;
+
+#[derive(Clone, Debug, Default)]
+pub struct AllocExtra {
+ base_addr: Cell<Option<u64>>
+}
+
+#[derive(Clone, Debug)]
+pub struct GlobalState {
+ /// This is used as a map between the address of each allocation and its `AllocId`.
+ /// It is always sorted
+ pub int_to_ptr_map: Vec<(u64, AllocId)>,
+ /// This is used as a memory address when a new pointer is casted to an integer. It
+ /// is always larger than any address that was previously made part of a block.
+ pub next_base_addr: u64,
+}
+
+impl Default for GlobalState {
+ // FIXME: Query the page size in the future
+ fn default() -> Self {
+ GlobalState {
+ int_to_ptr_map: Vec::default(),
+ next_base_addr: 2u64.pow(16)
+ }
+ }
+}
+
+impl<'mir, 'tcx> GlobalState {
+ pub fn int_to_ptr(
+ int: u64,
+ memory: &Memory<'mir, 'tcx, Evaluator<'tcx>>,
+ ) -> InterpResult<'tcx, Pointer<Tag>> {
+ let global_state = memory.extra.intptrcast.borrow();
+
+ match global_state.int_to_ptr_map.binary_search_by_key(&int, |(addr, _)| *addr) {
+ Ok(pos) => {
+ let (_, alloc_id) = global_state.int_to_ptr_map[pos];
+ // `int` is equal to the starting address for an allocation, the offset should be
+ // zero. The pointer is untagged because it was created from a cast
+ Ok(Pointer::new_with_tag(alloc_id, Size::from_bytes(0), Tag::Untagged))
+ },
+ Err(0) => err!(DanglingPointerDeref),
+ Err(pos) => {
+ // This is the largest of the adresses smaller than `int`,
+ // i.e. the greatest lower bound (glb)
+ let (glb, alloc_id) = global_state.int_to_ptr_map[pos - 1];
+ // This never overflows because `int >= glb`
+ let offset = int - glb;
+ // If the offset exceeds the size of the allocation, this access is illegal
+ if offset <= memory.get(alloc_id)?.bytes.len() as u64 {
+ // This pointer is untagged because it was created from a cast
+ Ok(Pointer::new_with_tag(alloc_id, Size::from_bytes(offset), Tag::Untagged))
+ } else {
+ err!(DanglingPointerDeref)
+ }
+ }
+ }
+ }
+
+ pub fn ptr_to_int(
+ ptr: Pointer<Tag>,
+ memory: &Memory<'mir, 'tcx, Evaluator<'tcx>>,
+ ) -> InterpResult<'tcx, u64> {
+ let mut global_state = memory.extra.intptrcast.borrow_mut();
+
+ let alloc = memory.get(ptr.alloc_id)?;
+
+ let base_addr = match alloc.extra.intptrcast.base_addr.get() {
+ Some(base_addr) => base_addr,
+ None => {
+ // This allocation does not have a base address yet, pick one.
+ let base_addr = Self::align_addr(global_state.next_base_addr, alloc.align.bytes());
+ global_state.next_base_addr = base_addr + alloc.bytes.len() as u64;
+ alloc.extra.intptrcast.base_addr.set(Some(base_addr));
+ // Given that `next_base_addr` increases in each allocation, pushing the
+ // corresponding tuple keeps `int_to_ptr_map` sorted
+ global_state.int_to_ptr_map.push((base_addr, ptr.alloc_id));
+
+ base_addr
+ }
+ };
+
+ Ok(base_addr + ptr.offset.bytes())
+ }
+
+ /// Shifts `addr` to make it aligned with `align` by rounding `addr` to the smallest multiple
+ /// of `align` that is strictly larger to `addr`
+ fn align_addr(addr: u64, align: u64) -> u64 {
+ addr + align - addr % align
+ }
+}
mod range_map;
mod mono_hash_map;
mod stacked_borrows;
+mod intptrcast;
+mod memory;
use std::collections::HashMap;
use std::borrow::Cow;
pub use crate::helpers::{EvalContextExt as HelpersEvalContextExt};
use crate::mono_hash_map::MonoHashMap;
pub use crate::stacked_borrows::{EvalContextExt as StackedBorEvalContextExt};
+use crate::memory::AllocExtra;
// Used by priroda.
pub use crate::stacked_borrows::{Tag, Permission, Stack, Stacks, Item};
let mut ecx = InterpretCx::new(
tcx.at(syntax::source_map::DUMMY_SP),
ty::ParamEnv::reveal_all(),
- Evaluator::new(config.validate, config.seed),
+ Evaluator::new(config.validate),
);
+ // FIXME: InterpretCx::new should take an initial MemoryExtra
+ ecx.memory_mut().extra.rng = config.seed.map(StdRng::seed_from_u64);
+
let main_instance = ty::Instance::mono(ecx.tcx.tcx, main_id);
let main_mir = ecx.load_mir(main_instance.def)?;
cur_ptr = cur_ptr.offset(char_size, tcx)?;
}
}
-
+
assert!(args.next().is_none(), "start lang item has more arguments than expected");
Ok(ecx)
/// Whether to enforce the validity invariant.
pub(crate) validate: bool,
-
- /// The random number generator to use if Miri
- /// is running in non-deterministic mode
- pub(crate) rng: Option<StdRng>
}
impl<'tcx> Evaluator<'tcx> {
- fn new(validate: bool, seed: Option<u64>) -> Self {
+ fn new(validate: bool) -> Self {
Evaluator {
env_vars: HashMap::default(),
argc: None,
last_error: 0,
tls: TlsData::default(),
validate,
- rng: seed.map(|s| StdRng::seed_from_u64(s))
}
}
}
type MemoryKinds = MiriMemoryKind;
type FrameExtra = stacked_borrows::CallId;
- type MemoryExtra = stacked_borrows::MemoryState;
- type AllocExtra = stacked_borrows::Stacks;
+ type MemoryExtra = memory::MemoryExtra;
+ type AllocExtra = memory::AllocExtra;
type PointerTag = Tag;
type MemoryMap = MonoHashMap<AllocId, (MemoryKind<MiriMemoryKind>, Allocation<Tag, Self::AllocExtra>)>;
) -> (Cow<'b, Allocation<Self::PointerTag, Self::AllocExtra>>, Self::PointerTag) {
let kind = kind.expect("we set our STATIC_KIND so this cannot be None");
let alloc = alloc.into_owned();
- let (extra, base_tag) = Stacks::new_allocation(
+ let (stacks, base_tag) = Stacks::new_allocation(
id,
Size::from_bytes(alloc.bytes.len() as u64),
- Rc::clone(&memory.extra),
+ Rc::clone(&memory.extra.stacked_borrows),
kind,
);
if kind != MiriMemoryKind::Static.into() {
assert!(alloc.relocations.is_empty(), "Only statics can come initialized with inner pointers");
// Now we can rely on the inner pointers being static, too.
}
- let mut memory_extra = memory.extra.borrow_mut();
+ let mut memory_extra = memory.extra.stacked_borrows.borrow_mut();
let alloc: Allocation<Tag, Self::AllocExtra> = Allocation {
bytes: alloc.bytes,
relocations: Relocations::from_presorted(
undef_mask: alloc.undef_mask,
align: alloc.align,
mutability: alloc.mutability,
- extra,
+ extra: AllocExtra {
+ stacked_borrows: stacks,
+ intptrcast: Default::default(),
+ },
};
(Cow::Owned(alloc), base_tag)
}
id: AllocId,
memory: &Memory<'mir, 'tcx, Self>,
) -> Self::PointerTag {
- memory.extra.borrow_mut().static_base_ptr(id)
+ memory.extra.stacked_borrows.borrow_mut().static_base_ptr(id)
}
#[inline(always)]
fn stack_push(
ecx: &mut InterpretCx<'mir, 'tcx, Self>,
) -> InterpResult<'tcx, stacked_borrows::CallId> {
- Ok(ecx.memory().extra.borrow_mut().new_call())
+ Ok(ecx.memory().extra.stacked_borrows.borrow_mut().new_call())
}
#[inline(always)]
ecx: &mut InterpretCx<'mir, 'tcx, Self>,
extra: stacked_borrows::CallId,
) -> InterpResult<'tcx> {
- Ok(ecx.memory().extra.borrow_mut().end_call(extra))
+ Ok(ecx.memory().extra.stacked_borrows.borrow_mut().end_call(extra))
+ }
+
+ fn int_to_ptr(
+ int: u64,
+ memory: &Memory<'mir, 'tcx, Self>,
+ ) -> InterpResult<'tcx, Pointer<Self::PointerTag>> {
+ if int == 0 {
+ err!(InvalidNullPointerUsage)
+ } else if memory.extra.rng.is_none() {
+ err!(ReadBytesAsPointer)
+ } else {
+ intptrcast::GlobalState::int_to_ptr(int, memory)
+ }
+ }
+
+ fn ptr_to_int(
+ ptr: Pointer<Self::PointerTag>,
+ memory: &Memory<'mir, 'tcx, Self>,
+ ) -> InterpResult<'tcx, u64> {
+ if memory.extra.rng.is_none() {
+ err!(ReadPointerAsBytes)
+ } else {
+ intptrcast::GlobalState::ptr_to_int(ptr, memory)
+ }
}
}
--- /dev/null
+use rand::rngs::StdRng;
+
+use rustc_mir::interpret::{Pointer, Allocation, AllocationExtra, InterpResult};
+use rustc_target::abi::Size;
+
+use crate::{stacked_borrows, intptrcast};
+use crate::stacked_borrows::Tag;
+
+#[derive(Default, Clone, Debug)]
+pub struct MemoryExtra {
+ pub stacked_borrows: stacked_borrows::MemoryExtra,
+ pub intptrcast: intptrcast::MemoryExtra,
+ /// The random number generator to use if Miri is running in non-deterministic mode and to
+ /// enable intptrcast
+ pub(crate) rng: Option<StdRng>
+}
+
+#[derive(Debug, Clone)]
+pub struct AllocExtra {
+ pub stacked_borrows: stacked_borrows::AllocExtra,
+ pub intptrcast: intptrcast::AllocExtra,
+}
+
+impl AllocationExtra<Tag> for AllocExtra {
+ #[inline(always)]
+ fn memory_read<'tcx>(
+ alloc: &Allocation<Tag, AllocExtra>,
+ ptr: Pointer<Tag>,
+ size: Size,
+ ) -> InterpResult<'tcx> {
+ alloc.extra.stacked_borrows.memory_read(ptr, size)
+ }
+
+ #[inline(always)]
+ fn memory_written<'tcx>(
+ alloc: &mut Allocation<Tag, AllocExtra>,
+ ptr: Pointer<Tag>,
+ size: Size,
+ ) -> InterpResult<'tcx> {
+ alloc.extra.stacked_borrows.memory_written(ptr, size)
+ }
+
+ #[inline(always)]
+ fn memory_deallocated<'tcx>(
+ alloc: &mut Allocation<Tag, AllocExtra>,
+ ptr: Pointer<Tag>,
+ size: Size,
+ ) -> InterpResult<'tcx> {
+ alloc.extra.stacked_borrows.memory_deallocated(ptr, size)
+ }
+}
trace!("ptr_op: {:?} {:?} {:?}", *left, bin_op, *right);
+ // If intptrcast is enabled and the operation is not an offset
+ // we can force the cast from pointers to integer addresses and
+ // then dispatch to rustc binary operation method
+ if self.memory().extra.rng.is_some() && bin_op != Offset {
+ let l_bits = self.force_bits(left.imm.to_scalar()?, left.layout.size)?;
+ let r_bits = self.force_bits(right.imm.to_scalar()?, right.layout.size)?;
+
+ let left = ImmTy::from_scalar(Scalar::from_uint(l_bits, left.layout.size), left.layout);
+ let right = ImmTy::from_scalar(Scalar::from_uint(r_bits, left.layout.size), right.layout);
+
+ return self.binary_op(bin_op, left, right);
+ }
+
// Operations that support fat pointers
match bin_op {
Eq | Ne => {
use crate::{
InterpResult, InterpError, MiriEvalContext, HelpersEvalContextExt, Evaluator, MutValueVisitor,
- MemoryKind, MiriMemoryKind, RangeMap, Allocation, AllocationExtra, AllocId,
- Pointer, Immediate, ImmTy, PlaceTy, MPlaceTy,
+ MemoryKind, MiriMemoryKind, RangeMap, AllocId, Pointer, Immediate, ImmTy, PlaceTy, MPlaceTy,
};
pub type PtrId = NonZeroU64;
pub type CallId = NonZeroU64;
+pub type AllocExtra = Stacks;
/// Tracking pointer provenance
#[derive(Copy, Clone, Hash, PartialEq, Eq)]
// Even reading memory can have effects on the stack, so we need a `RefCell` here.
stacks: RefCell<RangeMap<Stack>>,
// Pointer to global state
- global: MemoryState,
+ global: MemoryExtra,
}
/// Extra global state, available to the memory access hooks.
active_calls: HashSet<CallId>,
}
/// Memory extra state gives us interior mutable access to the global state.
-pub type MemoryState = Rc<RefCell<GlobalState>>;
+pub type MemoryExtra = Rc<RefCell<GlobalState>>;
/// Indicates which kind of access is being performed.
#[derive(Copy, Clone, Hash, PartialEq, Eq)]
size: Size,
perm: Permission,
tag: Tag,
- extra: MemoryState,
+ extra: MemoryExtra,
) -> Self {
let item = Item { perm, tag, protector: None };
let stack = Stack {
borrows: vec![item],
};
+
Stacks {
stacks: RefCell::new(RangeMap::new(size, stack)),
- global: extra,
+ global: extra,
}
}
pub fn new_allocation(
id: AllocId,
size: Size,
- extra: MemoryState,
+ extra: MemoryExtra,
kind: MemoryKind<MiriMemoryKind>,
) -> (Self, Tag) {
let (tag, perm) = match kind {
let stack = Stacks::new(size, perm, tag, extra);
(stack, tag)
}
-}
-impl AllocationExtra<Tag> for Stacks {
#[inline(always)]
- fn memory_read<'tcx>(
- alloc: &Allocation<Tag, Stacks>,
+ pub fn memory_read<'tcx>(
+ &self,
ptr: Pointer<Tag>,
size: Size,
) -> InterpResult<'tcx> {
trace!("read access with tag {:?}: {:?}, size {}", ptr.tag, ptr.erase_tag(), size.bytes());
- alloc.extra.for_each(ptr, size, |stack, global| {
+ self.for_each(ptr, size, |stack, global| {
stack.access(AccessKind::Read, ptr.tag, global)?;
Ok(())
})
}
#[inline(always)]
- fn memory_written<'tcx>(
- alloc: &mut Allocation<Tag, Stacks>,
+ pub fn memory_written<'tcx>(
+ &mut self,
ptr: Pointer<Tag>,
size: Size,
) -> InterpResult<'tcx> {
trace!("write access with tag {:?}: {:?}, size {}", ptr.tag, ptr.erase_tag(), size.bytes());
- alloc.extra.for_each(ptr, size, |stack, global| {
+ self.for_each(ptr, size, |stack, global| {
stack.access(AccessKind::Write, ptr.tag, global)?;
Ok(())
})
}
#[inline(always)]
- fn memory_deallocated<'tcx>(
- alloc: &mut Allocation<Tag, Stacks>,
+ pub fn memory_deallocated<'tcx>(
+ &mut self,
ptr: Pointer<Tag>,
size: Size,
) -> InterpResult<'tcx> {
trace!("deallocation with tag {:?}: {:?}, size {}", ptr.tag, ptr.erase_tag(), size.bytes());
- alloc.extra.for_each(ptr, size, |stack, global| {
+ self.for_each(ptr, size, |stack, global| {
stack.dealloc(ptr.tag, global)
})
}
// We are only ever `SharedReadOnly` inside the frozen bits.
let perm = if frozen { Permission::SharedReadOnly } else { Permission::SharedReadWrite };
let item = Item { perm, tag: new_tag, protector };
- alloc.extra.for_each(cur_ptr, size, |stack, global| {
+ alloc.extra.stacked_borrows.for_each(cur_ptr, size, |stack, global| {
stack.grant(cur_ptr.tag, item, global)
})
});
}
};
let item = Item { perm, tag: new_tag, protector };
- alloc.extra.for_each(ptr, size, |stack, global| {
+ alloc.extra.stacked_borrows.for_each(ptr, size, |stack, global| {
stack.grant(ptr.tag, item, global)
})
}
// Compute new borrow.
let new_tag = match kind {
RefKind::Raw { .. } => Tag::Untagged,
- _ => Tag::Tagged(this.memory().extra.borrow_mut().new_ptr()),
+ _ => Tag::Tagged(this.memory().extra.stacked_borrows.borrow_mut().new_ptr()),
};
// Reborrow.
--- /dev/null
+// Validation makes this fail in the wrong place
+// compile-flags: -Zmiri-disable-validation -Zmiri-seed=0000000000000000
+
+fn main() {
+ let g = unsafe {
+ std::mem::transmute::<usize, fn(i32)>(42)
+ };
+
+ g(42) //~ ERROR dangling pointer was dereferenced
+}
--- /dev/null
+// compile-flags: -Zmiri-seed=0000000000000000
+
+fn main() {
+ let x: i32 = unsafe { *std::ptr::null() }; //~ ERROR invalid use of NULL pointer
+ panic!("this should never print: {}", x);
+}
--- /dev/null
+// compile-flags: -Zmiri-seed=0000000000000000
+
+fn main() {
+ let p = 44 as *const i32;
+ let x = unsafe { *p }; //~ ERROR dangling pointer was dereferenced
+ panic!("this should never print: {}", x);
+}
--- /dev/null
+// compile-flags: -Zmiri-seed=0000000000000000
+
+fn main() {
+ let x = &42 as *const i32 as usize;
+ let y = x * 2;
+ assert_eq!(y, x + x);
+ let z = y as u8 as usize;
+ assert_eq!(z, y % 256);
+}