use std::collections::HashSet;
use std::fmt;
use std::num::NonZeroU64;
-use std::rc::Rc;
use std::time::Instant;
use rand::rngs::StdRng;
use rustc_target::abi::Size;
use rustc_target::spec::abi::Abi;
-use crate::{shims::posix::FileHandler, *};
+use crate::{
+ concurrency::{data_race, weak_memory},
+ shims::unix::FileHandler,
+ *,
+};
// Some global facts about the emulated machine.
pub const PAGE_SIZE: u64 = 4 * 1024; // FIXME: adjust to target architecture
Tls,
}
-impl Into<MemoryKind<MiriMemoryKind>> for MiriMemoryKind {
+impl From<MiriMemoryKind> for MemoryKind<MiriMemoryKind> {
#[inline(always)]
- fn into(self) -> MemoryKind<MiriMemoryKind> {
- MemoryKind::Machine(self)
+ fn from(kind: MiriMemoryKind) -> MemoryKind<MiriMemoryKind> {
+ MemoryKind::Machine(kind)
}
}
/// Pointer provenance (tag).
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub struct Tag {
+pub enum Tag {
+ Concrete(ConcreteTag),
+ Wildcard,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct ConcreteTag {
pub alloc_id: AllocId,
/// Stacked Borrows tag.
pub sb: SbTag,
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
static_assert_size!(Pointer<Tag>, 24);
-#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
-static_assert_size!(Pointer<Option<Tag>>, 24);
+// #[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
+// static_assert_size!(Pointer<Option<Tag>>, 24);
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
static_assert_size!(ScalarMaybeUninit<Tag>, 32);
fn fmt(ptr: &Pointer<Self>, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let (tag, addr) = ptr.into_parts(); // address is absolute
write!(f, "0x{:x}", addr.bytes())?;
- // Forward `alternate` flag to `alloc_id` printing.
- if f.alternate() {
- write!(f, "[{:#?}]", tag.alloc_id)?;
- } else {
- write!(f, "[{:?}]", tag.alloc_id)?;
+
+ match tag {
+ Tag::Concrete(tag) => {
+ // Forward `alternate` flag to `alloc_id` printing.
+ if f.alternate() {
+ write!(f, "[{:#?}]", tag.alloc_id)?;
+ } else {
+ write!(f, "[{:?}]", tag.alloc_id)?;
+ }
+ // Print Stacked Borrows tag.
+ write!(f, "{:?}", tag.sb)?;
+ }
+ Tag::Wildcard => {
+ write!(f, "[Wildcard]")?;
+ }
}
- // Print Stacked Borrows tag.
- write!(f, "{:?}", tag.sb)
+
+ Ok(())
}
fn get_alloc_id(self) -> Option<AllocId> {
- Some(self.alloc_id)
+ match self {
+ Tag::Concrete(concrete) => Some(concrete.alloc_id),
+ Tag::Wildcard => None,
+ }
}
}
/// Data race detection via the use of a vector-clock,
/// this is only added if it is enabled.
pub data_race: Option<data_race::AllocExtra>,
+ /// Weak memory emulation via the use of store buffers,
+ /// this is only added if it is enabled.
+ pub weak_memory: Option<weak_memory::AllocExtra>,
}
/// Precomputed layouts of primitive types
pub struct PrimitiveLayouts<'tcx> {
pub unit: TyAndLayout<'tcx>,
pub i8: TyAndLayout<'tcx>,
+ pub i16: TyAndLayout<'tcx>,
pub i32: TyAndLayout<'tcx>,
pub isize: TyAndLayout<'tcx>,
pub u8: TyAndLayout<'tcx>,
+ pub u16: TyAndLayout<'tcx>,
pub u32: TyAndLayout<'tcx>,
pub usize: TyAndLayout<'tcx>,
pub bool: TyAndLayout<'tcx>,
Ok(Self {
unit: layout_cx.layout_of(tcx.mk_unit())?,
i8: layout_cx.layout_of(tcx.types.i8)?,
+ i16: layout_cx.layout_of(tcx.types.i16)?,
i32: layout_cx.layout_of(tcx.types.i32)?,
isize: layout_cx.layout_of(tcx.types.isize)?,
u8: layout_cx.layout_of(tcx.types.u8)?,
+ u16: layout_cx.layout_of(tcx.types.u16)?,
u32: layout_cx.layout_of(tcx.types.u32)?,
usize: layout_cx.layout_of(tcx.types.usize)?,
bool: layout_cx.layout_of(tcx.types.bool)?,
/// Whether to enforce the validity invariant.
pub(crate) validate: bool,
- /// Whether to enforce validity (e.g., initialization) of integers and floats.
- pub(crate) enforce_number_validity: bool,
+ /// Whether to allow uninitialized numbers (integers and floats).
+ pub(crate) allow_uninit_numbers: bool,
+
+ /// Whether to allow ptr2int transmutes, and whether to allow *dereferencing* the result of an
+ /// int2ptr transmute.
+ pub(crate) allow_ptr_int_transmute: bool,
/// Whether to enforce [ABI](Abi) of function calls.
pub(crate) enforce_abi: bool,
- pub(crate) file_handler: shims::posix::FileHandler,
- pub(crate) dir_handler: shims::posix::DirHandler,
+ /// The table of file descriptors.
+ pub(crate) file_handler: shims::unix::FileHandler,
+ /// The table of directory descriptors.
+ pub(crate) dir_handler: shims::unix::DirHandler,
/// The "time anchor" for this machine's monotone clock (for `Instant` simulation).
pub(crate) time_anchor: Instant,
pub(crate) backtrace_style: BacktraceStyle,
/// Crates which are considered local for the purposes of error reporting.
- pub(crate) local_crates: Rc<[CrateNum]>,
+ pub(crate) local_crates: Vec<CrateNum>,
/// Mapping extern static names to their base pointer.
extern_statics: FxHashMap<Symbol, Pointer<Tag>>,
/// Corresponds to -Zmiri-mute-stdout-stderr and doesn't write the output but acts as if it succeeded.
pub(crate) mute_stdout_stderr: bool,
+
+ /// Whether weak memory emulation is enabled
+ pub(crate) weak_memory: bool,
}
impl<'mir, 'tcx> Evaluator<'mir, 'tcx> {
pub(crate) fn new(config: &MiriConfig, layout_cx: LayoutCx<'tcx, TyCtxt<'tcx>>) -> Self {
- let local_crates = helpers::get_local_crates(&layout_cx.tcx);
+ let local_crates = helpers::get_local_crates(layout_cx.tcx);
let layouts =
PrimitiveLayouts::new(layout_cx).expect("Couldn't get layouts of primitive types");
let profiler = config.measureme_out.as_ref().map(|out| {
tls: TlsData::default(),
isolated_op: config.isolated_op,
validate: config.validate,
- enforce_number_validity: config.check_number_validity,
+ allow_uninit_numbers: config.allow_uninit_numbers,
+ allow_ptr_int_transmute: config.allow_ptr_int_transmute,
enforce_abi: config.check_abi,
file_handler: FileHandler::new(config.mute_stdout_stderr),
dir_handler: Default::default(),
check_alignment: config.check_alignment,
cmpxchg_weak_failure_rate: config.cmpxchg_weak_failure_rate,
mute_stdout_stderr: config.mute_stdout_stderr,
+ weak_memory: config.weak_memory_emulation,
}
}
}
#[inline(always)]
- fn enforce_number_validity(ecx: &MiriEvalContext<'mir, 'tcx>) -> bool {
- ecx.machine.enforce_number_validity
+ fn enforce_number_init(ecx: &MiriEvalContext<'mir, 'tcx>) -> bool {
+ !ecx.machine.allow_uninit_numbers
+ }
+
+ #[inline(always)]
+ fn enforce_number_no_provenance(ecx: &MiriEvalContext<'mir, 'tcx>) -> bool {
+ !ecx.machine.allow_ptr_int_transmute
}
#[inline(always)]
instance: ty::Instance<'tcx>,
abi: Abi,
args: &[OpTy<'tcx, Tag>],
- ret: Option<(&PlaceTy<'tcx, Tag>, mir::BasicBlock)>,
+ dest: &PlaceTy<'tcx, Tag>,
+ ret: Option<mir::BasicBlock>,
unwind: StackPopUnwind,
) -> InterpResult<'tcx, Option<(&'mir mir::Body<'tcx>, ty::Instance<'tcx>)>> {
- ecx.find_mir_or_eval_fn(instance, abi, args, ret, unwind)
+ ecx.find_mir_or_eval_fn(instance, abi, args, dest, ret, unwind)
}
#[inline(always)]
fn_val: Dlsym,
abi: Abi,
args: &[OpTy<'tcx, Tag>],
- ret: Option<(&PlaceTy<'tcx, Tag>, mir::BasicBlock)>,
+ dest: &PlaceTy<'tcx, Tag>,
+ ret: Option<mir::BasicBlock>,
_unwind: StackPopUnwind,
) -> InterpResult<'tcx> {
- ecx.call_dlsym(fn_val, abi, args, ret)
+ ecx.call_dlsym(fn_val, abi, args, dest, ret)
}
#[inline(always)]
ecx: &mut MiriEvalContext<'mir, 'tcx>,
instance: ty::Instance<'tcx>,
args: &[OpTy<'tcx, Tag>],
- ret: Option<(&PlaceTy<'tcx, Tag>, mir::BasicBlock)>,
+ dest: &PlaceTy<'tcx, Tag>,
+ ret: Option<mir::BasicBlock>,
unwind: StackPopUnwind,
) -> InterpResult<'tcx> {
- ecx.call_intrinsic(instance, args, ret, unwind)
+ ecx.call_intrinsic(instance, args, dest, ret, unwind)
}
#[inline(always)]
if let Some(&ptr) = ecx.machine.extern_statics.get(&link_name) {
Ok(ptr)
} else {
- throw_unsup_format!("`extern` static {:?} is not supported by Miri", def_id)
+ throw_unsup_format!(
+ "`extern` static `{}` from crate `{}` is not supported by Miri",
+ ecx.tcx.def_path_str(def_id),
+ ecx.tcx.crate_name(def_id.krate),
+ )
}
}
alloc.size(),
stacked_borrows,
kind,
- &ecx.machine.threads,
- ecx.machine.local_crates.clone(),
+ ecx.machine.current_span(),
))
} else {
None
} else {
None
};
+ let buffer_alloc = if ecx.machine.weak_memory {
+ // FIXME: if this is an atomic obejct, we want to supply its initial value
+ // while allocating the store buffer here.
+ Some(weak_memory::AllocExtra::new_allocation())
+ } else {
+ None
+ };
let alloc: Allocation<Tag, Self::AllocExtra> = alloc.convert_tag_add_extra(
&ecx.tcx,
- AllocExtra { stacked_borrows: stacks, data_race: race_alloc },
+ AllocExtra {
+ stacked_borrows: stacks,
+ data_race: race_alloc,
+ weak_memory: buffer_alloc,
+ },
|ptr| Evaluator::tag_alloc_base_pointer(ecx, ptr),
);
Cow::Owned(alloc)
} else {
SbTag::Untagged
};
- Pointer::new(Tag { alloc_id: ptr.provenance, sb: sb_tag }, Size::from_bytes(absolute_addr))
+ Pointer::new(
+ Tag::Concrete(ConcreteTag { alloc_id: ptr.provenance, sb: sb_tag }),
+ Size::from_bytes(absolute_addr),
+ )
}
#[inline(always)]
ecx: &MiriEvalContext<'mir, 'tcx>,
addr: u64,
) -> Pointer<Option<Self::PointerTag>> {
- intptrcast::GlobalStateInner::ptr_from_addr(addr, ecx)
+ intptrcast::GlobalStateInner::ptr_from_addr_cast(ecx, addr)
}
#[inline(always)]
ecx: &MiriEvalContext<'mir, 'tcx>,
addr: u64,
) -> Pointer<Option<Self::PointerTag>> {
- Self::ptr_from_addr_cast(ecx, addr)
+ intptrcast::GlobalStateInner::ptr_from_addr_transmute(ecx, addr)
}
- #[inline(always)]
fn expose_ptr(
- _ecx: &mut InterpCx<'mir, 'tcx, Self>,
- _ptr: Pointer<Self::PointerTag>,
+ ecx: &mut InterpCx<'mir, 'tcx, Self>,
+ ptr: Pointer<Self::PointerTag>,
) -> InterpResult<'tcx> {
+ match ptr.provenance {
+ Tag::Concrete(concrete) =>
+ intptrcast::GlobalStateInner::expose_addr(ecx, concrete.alloc_id),
+ Tag::Wildcard => {
+ // No need to do anything for wildcard pointers as
+ // their provenances have already been previously exposed.
+ }
+ }
Ok(())
}
ptr: Pointer<Self::PointerTag>,
) -> Option<(AllocId, Size, Self::TagExtra)> {
let rel = intptrcast::GlobalStateInner::abs_ptr_to_rel(ecx, ptr);
- Some((ptr.provenance.alloc_id, rel, ptr.provenance.sb))
+
+ rel.map(|(alloc_id, size)| {
+ let sb = match ptr.provenance {
+ Tag::Concrete(ConcreteTag { sb, .. }) => sb,
+ Tag::Wildcard => SbTag::Untagged,
+ };
+ (alloc_id, size, sb)
+ })
}
#[inline(always)]
tag,
range,
machine.stacked_borrows.as_ref().unwrap(),
- &machine.threads,
- )
- } else {
- Ok(())
+ machine.current_span(),
+ )?;
+ }
+ if let Some(weak_memory) = &alloc_extra.weak_memory {
+ if !machine.data_race.as_ref().unwrap().ongoing_atomic_access() {
+ // This is a non-atomic access. And if we are accessing a previously atomically
+ // accessed location without racing with them, then the location no longer needs
+ // to exhibit weak-memory behaviours until a fresh atomic access happens
+ weak_memory.destroy_atomicity(range);
+ }
}
+ Ok(())
}
#[inline(always)]
tag,
range,
machine.stacked_borrows.as_ref().unwrap(),
- &machine.threads,
- )
- } else {
- Ok(())
+ machine.current_span(),
+ )?;
}
+ if let Some(weak_memory) = &alloc_extra.weak_memory {
+ if !machine.data_race.as_ref().unwrap().ongoing_atomic_access() {
+ weak_memory.destroy_atomicity(range);
+ }
+ }
+ Ok(())
}
#[inline(always)]