use rustc_target::abi::Size;
use rustc_target::spec::abi::Abi;
-use crate::{shims::unix::FileHandler, *};
+use crate::{
+ concurrency::{data_race, weak_memory},
+ shims::unix::FileHandler,
+ *,
+};
// Some global facts about the emulated machine.
pub const PAGE_SIZE: u64 = 4 * 1024; // FIXME: adjust to target architecture
/// Data race detection via the use of a vector-clock,
/// this is only added if it is enabled.
pub data_race: Option<data_race::AllocExtra>,
+ /// Weak memory emulation via the use of store buffers,
+ /// this is only added if it is enabled.
+ pub weak_memory: Option<weak_memory::AllocExtra>,
}
/// Precomputed layouts of primitive types
/// Corresponds to -Zmiri-mute-stdout-stderr and doesn't write the output but acts as if it succeeded.
pub(crate) mute_stdout_stderr: bool,
+
+ /// Whether weak memory emulation is enabled
+ pub(crate) weak_memory: bool,
}
impl<'mir, 'tcx> Evaluator<'mir, 'tcx> {
check_alignment: config.check_alignment,
cmpxchg_weak_failure_rate: config.cmpxchg_weak_failure_rate,
mute_stdout_stderr: config.mute_stdout_stderr,
+ weak_memory: config.weak_memory_emulation,
}
}
} else {
None
};
+ let buffer_alloc = if ecx.machine.weak_memory {
+ // FIXME: if this is an atomic obejct, we want to supply its initial value
+ // while allocating the store buffer here.
+ Some(weak_memory::AllocExtra::new_allocation())
+ } else {
+ None
+ };
let alloc: Allocation<Tag, Self::AllocExtra> = alloc.convert_tag_add_extra(
&ecx.tcx,
- AllocExtra { stacked_borrows: stacks, data_race: race_alloc },
+ AllocExtra {
+ stacked_borrows: stacks,
+ data_race: race_alloc,
+ weak_memory: buffer_alloc,
+ },
|ptr| Evaluator::tag_alloc_base_pointer(ecx, ptr),
);
Cow::Owned(alloc)
range,
machine.stacked_borrows.as_ref().unwrap(),
machine.current_span(),
- )
- } else {
- Ok(())
+ )?;
}
+ if let Some(weak_memory) = &alloc_extra.weak_memory {
+ if !machine.data_race.as_ref().unwrap().ongoing_atomic_access() {
+ // This is a non-atomic access. And if we are accessing a previously atomically
+ // accessed location without racing with them, then the location no longer needs
+ // to exhibit weak-memory behaviours until a fresh atomic access happens
+ weak_memory.destroy_atomicity(range);
+ }
+ }
+ Ok(())
}
#[inline(always)]
range,
machine.stacked_borrows.as_ref().unwrap(),
machine.current_span(),
- )
- } else {
- Ok(())
+ )?;
+ }
+ if let Some(weak_memory) = &alloc_extra.weak_memory {
+ if !machine.data_race.as_ref().unwrap().ongoing_atomic_access() {
+ weak_memory.destroy_atomicity(range);
+ }
}
+ Ok(())
}
#[inline(always)]