use std::borrow::Cow;
use std::cell::RefCell;
+use std::collections::HashSet;
use std::fmt;
use std::num::NonZeroU64;
use std::time::Instant;
use rustc_ast::ast::Mutability;
use rustc_data_structures::fx::FxHashMap;
+#[allow(unused)]
+use rustc_data_structures::static_assert_size;
use rustc_middle::{
mir,
ty::{
},
};
use rustc_span::def_id::{CrateNum, DefId};
-use rustc_span::symbol::{sym, Symbol};
+use rustc_span::Symbol;
use rustc_target::abi::Size;
use rustc_target::spec::abi::Abi;
-use crate::*;
+use crate::{
+ concurrency::{data_race, weak_memory},
+ shims::unix::FileHandler,
+ *,
+};
// Some global facts about the emulated machine.
pub const PAGE_SIZE: u64 = 4 * 1024; // FIXME: adjust to target architecture
Tls,
}
-impl Into<MemoryKind<MiriMemoryKind>> for MiriMemoryKind {
+impl From<MiriMemoryKind> for MemoryKind<MiriMemoryKind> {
#[inline(always)]
- fn into(self) -> MemoryKind<MiriMemoryKind> {
- MemoryKind::Machine(self)
+ fn from(kind: MiriMemoryKind) -> MemoryKind<MiriMemoryKind> {
+ MemoryKind::Machine(kind)
}
}
/// Pointer provenance (tag).
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub struct Tag {
+pub enum Tag {
+ Concrete(ConcreteTag),
+ Wildcard,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct ConcreteTag {
pub alloc_id: AllocId,
/// Stacked Borrows tag.
pub sb: SbTag,
}
+#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
+static_assert_size!(Pointer<Tag>, 24);
+// #[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
+// static_assert_size!(Pointer<Option<Tag>>, 24);
+#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
+static_assert_size!(ScalarMaybeUninit<Tag>, 32);
+
impl Provenance for Tag {
/// We use absolute addresses in the `offset` of a `Pointer<Tag>`.
const OFFSET_IS_ADDR: bool = true;
fn fmt(ptr: &Pointer<Self>, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let (tag, addr) = ptr.into_parts(); // address is absolute
write!(f, "0x{:x}", addr.bytes())?;
- // Forward `alternate` flag to `alloc_id` printing.
- if f.alternate() {
- write!(f, "[{:#?}]", tag.alloc_id)?;
- } else {
- write!(f, "[{:?}]", tag.alloc_id)?;
+
+ match tag {
+ Tag::Concrete(tag) => {
+ // Forward `alternate` flag to `alloc_id` printing.
+ if f.alternate() {
+ write!(f, "[{:#?}]", tag.alloc_id)?;
+ } else {
+ write!(f, "[{:?}]", tag.alloc_id)?;
+ }
+ // Print Stacked Borrows tag.
+ write!(f, "{:?}", tag.sb)?;
+ }
+ Tag::Wildcard => {
+ write!(f, "[Wildcard]")?;
+ }
}
- // Print Stacked Borrows tag.
- write!(f, "{:?}", tag.sb)
+
+ Ok(())
}
- fn get_alloc_id(self) -> AllocId {
- self.alloc_id
+ fn get_alloc_id(self) -> Option<AllocId> {
+ match self {
+ Tag::Concrete(concrete) => Some(concrete.alloc_id),
+ Tag::Wildcard => None,
+ }
}
}
/// Data race detection via the use of a vector-clock,
/// this is only added if it is enabled.
pub data_race: Option<data_race::AllocExtra>,
+ /// Weak memory emulation via the use of store buffers,
+ /// this is only added if it is enabled.
+ pub weak_memory: Option<weak_memory::AllocExtra>,
}
/// Precomputed layouts of primitive types
pub struct PrimitiveLayouts<'tcx> {
pub unit: TyAndLayout<'tcx>,
pub i8: TyAndLayout<'tcx>,
+ pub i16: TyAndLayout<'tcx>,
pub i32: TyAndLayout<'tcx>,
pub isize: TyAndLayout<'tcx>,
pub u8: TyAndLayout<'tcx>,
+ pub u16: TyAndLayout<'tcx>,
pub u32: TyAndLayout<'tcx>,
pub usize: TyAndLayout<'tcx>,
pub bool: TyAndLayout<'tcx>,
Ok(Self {
unit: layout_cx.layout_of(tcx.mk_unit())?,
i8: layout_cx.layout_of(tcx.types.i8)?,
+ i16: layout_cx.layout_of(tcx.types.i16)?,
i32: layout_cx.layout_of(tcx.types.i32)?,
isize: layout_cx.layout_of(tcx.types.isize)?,
u8: layout_cx.layout_of(tcx.types.u8)?,
+ u16: layout_cx.layout_of(tcx.types.u16)?,
u32: layout_cx.layout_of(tcx.types.u32)?,
usize: layout_cx.layout_of(tcx.types.usize)?,
bool: layout_cx.layout_of(tcx.types.bool)?,
/// Whether to enforce the validity invariant.
pub(crate) validate: bool,
- /// Whether to enforce validity (e.g., initialization) of integers and floats.
- pub(crate) enforce_number_validity: bool,
+ /// Whether to allow uninitialized numbers (integers and floats).
+ pub(crate) allow_uninit_numbers: bool,
+
+ /// Whether to allow ptr2int transmutes, and whether to allow *dereferencing* the result of an
+ /// int2ptr transmute.
+ pub(crate) allow_ptr_int_transmute: bool,
/// Whether to enforce [ABI](Abi) of function calls.
pub(crate) enforce_abi: bool,
- pub(crate) file_handler: shims::posix::FileHandler,
- pub(crate) dir_handler: shims::posix::DirHandler,
+ /// The table of file descriptors.
+ pub(crate) file_handler: shims::unix::FileHandler,
+ /// The table of directory descriptors.
+ pub(crate) dir_handler: shims::unix::DirHandler,
/// The "time anchor" for this machine's monotone clock (for `Instant` simulation).
pub(crate) time_anchor: Instant,
/// Needs to be queried by ptr_to_int, hence needs interior mutability.
pub(crate) rng: RefCell<StdRng>,
- /// An allocation ID to report when it is being allocated
+ /// The allocation IDs to report when they are being allocated
/// (helps for debugging memory leaks and use after free bugs).
- tracked_alloc_id: Option<AllocId>,
+ tracked_alloc_ids: HashSet<AllocId>,
/// Controls whether alignment of memory accesses is being checked.
pub(crate) check_alignment: AlignmentCheck,
/// Failure rate of compare_exchange_weak, between 0.0 and 1.0
pub(crate) cmpxchg_weak_failure_rate: f64,
+
+ /// Corresponds to -Zmiri-mute-stdout-stderr and doesn't write the output but acts as if it succeeded.
+ pub(crate) mute_stdout_stderr: bool,
+
+ /// Whether weak memory emulation is enabled
+ pub(crate) weak_memory: bool,
}
impl<'mir, 'tcx> Evaluator<'mir, 'tcx> {
pub(crate) fn new(config: &MiriConfig, layout_cx: LayoutCx<'tcx, TyCtxt<'tcx>>) -> Self {
- let local_crates = helpers::get_local_crates(&layout_cx.tcx);
+ let local_crates = helpers::get_local_crates(layout_cx.tcx);
let layouts =
PrimitiveLayouts::new(layout_cx).expect("Couldn't get layouts of primitive types");
let profiler = config.measureme_out.as_ref().map(|out| {
let rng = StdRng::seed_from_u64(config.seed.unwrap_or(0));
let stacked_borrows = if config.stacked_borrows {
Some(RefCell::new(stacked_borrows::GlobalStateInner::new(
- config.tracked_pointer_tag,
- config.tracked_call_id,
+ config.tracked_pointer_tags.clone(),
+ config.tracked_call_ids.clone(),
config.tag_raw,
)))
} else {
tls: TlsData::default(),
isolated_op: config.isolated_op,
validate: config.validate,
- enforce_number_validity: config.check_number_validity,
+ allow_uninit_numbers: config.allow_uninit_numbers,
+ allow_ptr_int_transmute: config.allow_ptr_int_transmute,
enforce_abi: config.check_abi,
- file_handler: Default::default(),
+ file_handler: FileHandler::new(config.mute_stdout_stderr),
dir_handler: Default::default(),
time_anchor: Instant::now(),
layouts,
local_crates,
extern_statics: FxHashMap::default(),
rng: RefCell::new(rng),
- tracked_alloc_id: config.tracked_alloc_id,
+ tracked_alloc_ids: config.tracked_alloc_ids.clone(),
check_alignment: config.check_alignment,
cmpxchg_weak_failure_rate: config.cmpxchg_weak_failure_rate,
+ mute_stdout_stderr: config.mute_stdout_stderr,
+ weak_memory: config.weak_memory_emulation,
}
}
name: &str,
ptr: Pointer<Option<Tag>>,
) {
+ // This got just allocated, so there definitely is a pointer here.
let ptr = ptr.into_pointer_or_addr().unwrap();
this.machine.extern_statics.try_insert(Symbol::intern(name), ptr).unwrap();
}
// Most of them are for weak symbols, which we all set to null (indicating that the
// symbol is not supported, and triggering fallback code which ends up calling a
// syscall that we do support).
- for name in &["__cxa_thread_atexit_impl", "getrandom", "statx"] {
+ for name in &["__cxa_thread_atexit_impl", "getrandom", "statx", "__clock_gettime64"]
+ {
let layout = this.machine.layouts.usize;
let place = this.allocate(layout, MiriMemoryKind::ExternStatic.into())?;
this.write_scalar(Scalar::from_machine_usize(0, this), &place.into())?;
/// Machine hook implementations.
impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'mir, 'tcx> {
type MemoryKind = MiriMemoryKind;
+ type ExtraFnVal = Dlsym;
type FrameExtra = FrameData<'tcx>;
type AllocExtra = AllocExtra;
+
type PointerTag = Tag;
- type ExtraFnVal = Dlsym;
+ type TagExtra = SbTag;
type MemoryMap =
MonoHashMap<AllocId, (MemoryKind<MiriMemoryKind>, Allocation<Tag, Self::AllocExtra>)>;
}
#[inline(always)]
- fn enforce_number_validity(ecx: &MiriEvalContext<'mir, 'tcx>) -> bool {
- ecx.machine.enforce_number_validity
+ fn enforce_number_init(ecx: &MiriEvalContext<'mir, 'tcx>) -> bool {
+ !ecx.machine.allow_uninit_numbers
+ }
+
+ #[inline(always)]
+ fn enforce_number_no_provenance(ecx: &MiriEvalContext<'mir, 'tcx>) -> bool {
+ !ecx.machine.allow_ptr_int_transmute
}
#[inline(always)]
instance: ty::Instance<'tcx>,
abi: Abi,
args: &[OpTy<'tcx, Tag>],
- ret: Option<(&PlaceTy<'tcx, Tag>, mir::BasicBlock)>,
+ dest: &PlaceTy<'tcx, Tag>,
+ ret: Option<mir::BasicBlock>,
unwind: StackPopUnwind,
) -> InterpResult<'tcx, Option<(&'mir mir::Body<'tcx>, ty::Instance<'tcx>)>> {
- ecx.find_mir_or_eval_fn(instance, abi, args, ret, unwind)
+ ecx.find_mir_or_eval_fn(instance, abi, args, dest, ret, unwind)
}
#[inline(always)]
fn_val: Dlsym,
abi: Abi,
args: &[OpTy<'tcx, Tag>],
- ret: Option<(&PlaceTy<'tcx, Tag>, mir::BasicBlock)>,
+ dest: &PlaceTy<'tcx, Tag>,
+ ret: Option<mir::BasicBlock>,
_unwind: StackPopUnwind,
) -> InterpResult<'tcx> {
- ecx.call_dlsym(fn_val, abi, args, ret)
+ ecx.call_dlsym(fn_val, abi, args, dest, ret)
}
#[inline(always)]
ecx: &mut MiriEvalContext<'mir, 'tcx>,
instance: ty::Instance<'tcx>,
args: &[OpTy<'tcx, Tag>],
- ret: Option<(&PlaceTy<'tcx, Tag>, mir::BasicBlock)>,
+ dest: &PlaceTy<'tcx, Tag>,
+ ret: Option<mir::BasicBlock>,
unwind: StackPopUnwind,
) -> InterpResult<'tcx> {
- ecx.call_intrinsic(instance, args, ret, unwind)
+ ecx.call_intrinsic(instance, args, dest, ret, unwind)
}
#[inline(always)]
ecx: &MiriEvalContext<'mir, 'tcx>,
def_id: DefId,
) -> InterpResult<'tcx, Pointer<Tag>> {
- let attrs = ecx.tcx.get_attrs(def_id);
- let link_name = match ecx.tcx.sess.first_attr_value_str_by_name(&attrs, sym::link_name) {
- Some(name) => name,
- None => ecx.tcx.item_name(def_id),
- };
+ let link_name = ecx.item_link_name(def_id);
if let Some(&ptr) = ecx.machine.extern_statics.get(&link_name) {
Ok(ptr)
} else {
- throw_unsup_format!("`extern` static {:?} is not supported by Miri", def_id)
+ throw_unsup_format!(
+ "`extern` static `{}` from crate `{}` is not supported by Miri",
+ ecx.tcx.def_path_str(def_id),
+ ecx.tcx.crate_name(def_id.krate),
+ )
}
}
alloc: Cow<'b, Allocation>,
kind: Option<MemoryKind<Self::MemoryKind>>,
) -> Cow<'b, Allocation<Self::PointerTag, Self::AllocExtra>> {
- if Some(id) == ecx.machine.tracked_alloc_id {
+ if ecx.machine.tracked_alloc_ids.contains(&id) {
register_diagnostic(NonHaltingDiagnostic::CreatedAlloc(id));
}
let kind = kind.expect("we set our STATIC_KIND so this cannot be None");
let alloc = alloc.into_owned();
let stacks = if let Some(stacked_borrows) = &ecx.machine.stacked_borrows {
- Some(Stacks::new_allocation(id, alloc.size(), stacked_borrows, kind))
+ Some(Stacks::new_allocation(
+ id,
+ alloc.size(),
+ stacked_borrows,
+ kind,
+ ecx.machine.current_span(),
+ ))
} else {
None
};
let race_alloc = if let Some(data_race) = &ecx.machine.data_race {
- Some(data_race::AllocExtra::new_allocation(&data_race, alloc.size(), kind))
+ Some(data_race::AllocExtra::new_allocation(data_race, alloc.size(), kind))
+ } else {
+ None
+ };
+ let buffer_alloc = if ecx.machine.weak_memory {
+ // FIXME: if this is an atomic obejct, we want to supply its initial value
+ // while allocating the store buffer here.
+ Some(weak_memory::AllocExtra::new_allocation())
} else {
None
};
let alloc: Allocation<Tag, Self::AllocExtra> = alloc.convert_tag_add_extra(
&ecx.tcx,
- AllocExtra { stacked_borrows: stacks, data_race: race_alloc },
+ AllocExtra {
+ stacked_borrows: stacks,
+ data_race: race_alloc,
+ weak_memory: buffer_alloc,
+ },
|ptr| Evaluator::tag_alloc_base_pointer(ecx, ptr),
);
Cow::Owned(alloc)
} else {
SbTag::Untagged
};
- Pointer::new(Tag { alloc_id: ptr.provenance, sb: sb_tag }, Size::from_bytes(absolute_addr))
+ Pointer::new(
+ Tag::Concrete(ConcreteTag { alloc_id: ptr.provenance, sb: sb_tag }),
+ Size::from_bytes(absolute_addr),
+ )
}
#[inline(always)]
- fn ptr_from_addr(
+ fn ptr_from_addr_cast(
ecx: &MiriEvalContext<'mir, 'tcx>,
addr: u64,
) -> Pointer<Option<Self::PointerTag>> {
- intptrcast::GlobalStateInner::ptr_from_addr(addr, ecx)
+ intptrcast::GlobalStateInner::ptr_from_addr_cast(ecx, addr)
+ }
+
+ #[inline(always)]
+ fn ptr_from_addr_transmute(
+ ecx: &MiriEvalContext<'mir, 'tcx>,
+ addr: u64,
+ ) -> Pointer<Option<Self::PointerTag>> {
+ intptrcast::GlobalStateInner::ptr_from_addr_transmute(ecx, addr)
+ }
+
+ fn expose_ptr(
+ ecx: &mut InterpCx<'mir, 'tcx, Self>,
+ ptr: Pointer<Self::PointerTag>,
+ ) -> InterpResult<'tcx> {
+ match ptr.provenance {
+ Tag::Concrete(concrete) =>
+ intptrcast::GlobalStateInner::expose_addr(ecx, concrete.alloc_id),
+ Tag::Wildcard => {
+ // No need to do anything for wildcard pointers as
+ // their provenances have already been previously exposed.
+ }
+ }
+ Ok(())
}
/// Convert a pointer with provenance into an allocation-offset pair,
fn ptr_get_alloc(
ecx: &MiriEvalContext<'mir, 'tcx>,
ptr: Pointer<Self::PointerTag>,
- ) -> (AllocId, Size) {
+ ) -> Option<(AllocId, Size, Self::TagExtra)> {
let rel = intptrcast::GlobalStateInner::abs_ptr_to_rel(ecx, ptr);
- (ptr.provenance.alloc_id, rel)
+
+ rel.map(|(alloc_id, size)| {
+ let sb = match ptr.provenance {
+ Tag::Concrete(ConcreteTag { sb, .. }) => sb,
+ Tag::Wildcard => SbTag::Untagged,
+ };
+ (alloc_id, size, sb)
+ })
}
#[inline(always)]
fn memory_read(
+ _tcx: TyCtxt<'tcx>,
machine: &Self,
alloc_extra: &AllocExtra,
- tag: Tag,
+ (alloc_id, tag): (AllocId, Self::TagExtra),
range: AllocRange,
) -> InterpResult<'tcx> {
if let Some(data_race) = &alloc_extra.data_race {
- data_race.read(tag.alloc_id, range, machine.data_race.as_ref().unwrap())?;
+ data_race.read(alloc_id, range, machine.data_race.as_ref().unwrap())?;
}
if let Some(stacked_borrows) = &alloc_extra.stacked_borrows {
stacked_borrows.memory_read(
- tag.alloc_id,
- tag.sb,
+ alloc_id,
+ tag,
range,
machine.stacked_borrows.as_ref().unwrap(),
- )
- } else {
- Ok(())
+ machine.current_span(),
+ )?;
}
+ if let Some(weak_memory) = &alloc_extra.weak_memory {
+ if !machine.data_race.as_ref().unwrap().ongoing_atomic_access() {
+ // This is a non-atomic access. And if we are accessing a previously atomically
+ // accessed location without racing with them, then the location no longer needs
+ // to exhibit weak-memory behaviours until a fresh atomic access happens
+ weak_memory.destroy_atomicity(range);
+ }
+ }
+ Ok(())
}
#[inline(always)]
fn memory_written(
+ _tcx: TyCtxt<'tcx>,
machine: &mut Self,
alloc_extra: &mut AllocExtra,
- tag: Tag,
+ (alloc_id, tag): (AllocId, Self::TagExtra),
range: AllocRange,
) -> InterpResult<'tcx> {
if let Some(data_race) = &mut alloc_extra.data_race {
- data_race.write(tag.alloc_id, range, machine.data_race.as_mut().unwrap())?;
+ data_race.write(alloc_id, range, machine.data_race.as_mut().unwrap())?;
}
if let Some(stacked_borrows) = &mut alloc_extra.stacked_borrows {
stacked_borrows.memory_written(
- tag.alloc_id,
- tag.sb,
+ alloc_id,
+ tag,
range,
- machine.stacked_borrows.as_mut().unwrap(),
- )
- } else {
- Ok(())
+ machine.stacked_borrows.as_ref().unwrap(),
+ machine.current_span(),
+ )?;
}
+ if let Some(weak_memory) = &alloc_extra.weak_memory {
+ if !machine.data_race.as_ref().unwrap().ongoing_atomic_access() {
+ weak_memory.destroy_atomicity(range);
+ }
+ }
+ Ok(())
}
#[inline(always)]
fn memory_deallocated(
+ _tcx: TyCtxt<'tcx>,
machine: &mut Self,
alloc_extra: &mut AllocExtra,
- tag: Tag,
+ (alloc_id, tag): (AllocId, Self::TagExtra),
range: AllocRange,
) -> InterpResult<'tcx> {
- if Some(tag.alloc_id) == machine.tracked_alloc_id {
- register_diagnostic(NonHaltingDiagnostic::FreedAlloc(tag.alloc_id));
+ if machine.tracked_alloc_ids.contains(&alloc_id) {
+ register_diagnostic(NonHaltingDiagnostic::FreedAlloc(alloc_id));
}
if let Some(data_race) = &mut alloc_extra.data_race {
- data_race.deallocate(tag.alloc_id, range, machine.data_race.as_mut().unwrap())?;
+ data_race.deallocate(alloc_id, range, machine.data_race.as_mut().unwrap())?;
}
if let Some(stacked_borrows) = &mut alloc_extra.stacked_borrows {
stacked_borrows.memory_deallocated(
- tag.alloc_id,
- tag.sb,
+ alloc_id,
+ tag,
range,
- machine.stacked_borrows.as_mut().unwrap(),
+ machine.stacked_borrows.as_ref().unwrap(),
)
} else {
Ok(())