use rustc_target::abi::Size;
use rustc_target::spec::abi::Abi;
-use crate::{shims::posix::FileHandler, *};
+use crate::{
+ concurrency::{data_race, weak_memory},
+ shims::unix::FileHandler,
+ *,
+};
// Some global facts about the emulated machine.
pub const PAGE_SIZE: u64 = 4 * 1024; // FIXME: adjust to target architecture
Tls,
}
-impl Into<MemoryKind<MiriMemoryKind>> for MiriMemoryKind {
+impl From<MiriMemoryKind> for MemoryKind<MiriMemoryKind> {
#[inline(always)]
- fn into(self) -> MemoryKind<MiriMemoryKind> {
- MemoryKind::Machine(self)
+ fn from(kind: MiriMemoryKind) -> MemoryKind<MiriMemoryKind> {
+ MemoryKind::Machine(kind)
}
}
/// Data race detection via the use of a vector-clock,
/// this is only added if it is enabled.
pub data_race: Option<data_race::AllocExtra>,
+ /// Weak memory emulation via the use of store buffers,
+ /// this is only added if it is enabled.
+ pub weak_memory: Option<weak_memory::AllocExtra>,
}
/// Precomputed layouts of primitive types
/// Whether to enforce the validity invariant.
pub(crate) validate: bool,
- /// Whether to enforce validity (e.g., initialization) of integers and floats.
- pub(crate) enforce_number_validity: bool,
+ /// Whether to allow uninitialized numbers (integers and floats).
+ pub(crate) allow_uninit_numbers: bool,
+
+ /// Whether to allow ptr2int transmutes, and whether to allow *dereferencing* the result of an
+ /// int2ptr transmute.
+ pub(crate) allow_ptr_int_transmute: bool,
/// Whether to enforce [ABI](Abi) of function calls.
pub(crate) enforce_abi: bool,
- pub(crate) file_handler: shims::posix::FileHandler,
- pub(crate) dir_handler: shims::posix::DirHandler,
+ /// The table of file descriptors.
+ pub(crate) file_handler: shims::unix::FileHandler,
+ /// The table of directory descriptors.
+ pub(crate) dir_handler: shims::unix::DirHandler,
/// The "time anchor" for this machine's monotone clock (for `Instant` simulation).
pub(crate) time_anchor: Instant,
/// Corresponds to -Zmiri-mute-stdout-stderr and doesn't write the output but acts as if it succeeded.
pub(crate) mute_stdout_stderr: bool,
+
+ /// Whether weak memory emulation is enabled
+ pub(crate) weak_memory: bool,
}
impl<'mir, 'tcx> Evaluator<'mir, 'tcx> {
pub(crate) fn new(config: &MiriConfig, layout_cx: LayoutCx<'tcx, TyCtxt<'tcx>>) -> Self {
- let local_crates = helpers::get_local_crates(&layout_cx.tcx);
+ let local_crates = helpers::get_local_crates(layout_cx.tcx);
let layouts =
PrimitiveLayouts::new(layout_cx).expect("Couldn't get layouts of primitive types");
let profiler = config.measureme_out.as_ref().map(|out| {
tls: TlsData::default(),
isolated_op: config.isolated_op,
validate: config.validate,
- enforce_number_validity: config.check_number_validity,
+ allow_uninit_numbers: config.allow_uninit_numbers,
+ allow_ptr_int_transmute: config.allow_ptr_int_transmute,
enforce_abi: config.check_abi,
file_handler: FileHandler::new(config.mute_stdout_stderr),
dir_handler: Default::default(),
check_alignment: config.check_alignment,
cmpxchg_weak_failure_rate: config.cmpxchg_weak_failure_rate,
mute_stdout_stderr: config.mute_stdout_stderr,
+ weak_memory: config.weak_memory_emulation,
}
}
#[inline(always)]
fn enforce_number_init(ecx: &MiriEvalContext<'mir, 'tcx>) -> bool {
- ecx.machine.enforce_number_validity
+ !ecx.machine.allow_uninit_numbers
}
#[inline(always)]
fn enforce_number_no_provenance(ecx: &MiriEvalContext<'mir, 'tcx>) -> bool {
- ecx.machine.enforce_number_validity
+ !ecx.machine.allow_ptr_int_transmute
}
#[inline(always)]
} else {
None
};
+ let buffer_alloc = if ecx.machine.weak_memory {
+ // FIXME: if this is an atomic obejct, we want to supply its initial value
+ // while allocating the store buffer here.
+ Some(weak_memory::AllocExtra::new_allocation())
+ } else {
+ None
+ };
let alloc: Allocation<Tag, Self::AllocExtra> = alloc.convert_tag_add_extra(
&ecx.tcx,
- AllocExtra { stacked_borrows: stacks, data_race: race_alloc },
+ AllocExtra {
+ stacked_borrows: stacks,
+ data_race: race_alloc,
+ weak_memory: buffer_alloc,
+ },
|ptr| Evaluator::tag_alloc_base_pointer(ecx, ptr),
);
Cow::Owned(alloc)
range,
machine.stacked_borrows.as_ref().unwrap(),
machine.current_span(),
- )
- } else {
- Ok(())
+ )?;
}
+ if let Some(weak_memory) = &alloc_extra.weak_memory {
+ if !machine.data_race.as_ref().unwrap().ongoing_atomic_access() {
+ // This is a non-atomic access. And if we are accessing a previously atomically
+ // accessed location without racing with them, then the location no longer needs
+ // to exhibit weak-memory behaviours until a fresh atomic access happens
+ weak_memory.destroy_atomicity(range);
+ }
+ }
+ Ok(())
}
#[inline(always)]
range,
machine.stacked_borrows.as_ref().unwrap(),
machine.current_span(),
- )
- } else {
- Ok(())
+ )?;
+ }
+ if let Some(weak_memory) = &alloc_extra.weak_memory {
+ if !machine.data_race.as_ref().unwrap().ongoing_atomic_access() {
+ weak_memory.destroy_atomicity(range);
+ }
}
+ Ok(())
}
#[inline(always)]