use rustc_target::abi::Size;
use rustc_target::spec::abi::Abi;
-use crate::{shims::unix::FileHandler, *};
+use crate::{
+ concurrency::{data_race, weak_memory},
+ shims::unix::FileHandler,
+ *,
+};
// Some global facts about the emulated machine.
pub const PAGE_SIZE: u64 = 4 * 1024; // FIXME: adjust to target architecture
let buffer_alloc = if ecx.machine.weak_memory {
// FIXME: if this is an atomic obejct, we want to supply its initial value
// while allocating the store buffer here.
- Some(weak_memory::AllocExtra::new_allocation(alloc.size()))
+ Some(weak_memory::AllocExtra::new_allocation())
} else {
None
};
let alloc: Allocation<Tag, Self::AllocExtra> = alloc.convert_tag_add_extra(
&ecx.tcx,
- AllocExtra { stacked_borrows: stacks, data_race: race_alloc, weak_memory: buffer_alloc },
+ AllocExtra {
+ stacked_borrows: stacks,
+ data_race: race_alloc,
+ weak_memory: buffer_alloc,
+ },
|ptr| Evaluator::tag_alloc_base_pointer(ecx, ptr),
);
Cow::Owned(alloc)
range,
machine.stacked_borrows.as_ref().unwrap(),
machine.current_span(),
- )
- } else {
- Ok(())
+ )?;
}
+ if let Some(weak_memory) = &alloc_extra.weak_memory {
+ if !machine.data_race.as_ref().unwrap().ongoing_atomic_access() {
+ // This is a non-atomic access. And if we are accessing a previously atomically
+ // accessed location without racing with them, then the location no longer needs
+ // to exhibit weak-memory behaviours until a fresh atomic access happens
+ weak_memory.destroy_atomicity(range);
+ }
+ }
+ Ok(())
}
#[inline(always)]
range,
machine.stacked_borrows.as_ref().unwrap(),
machine.current_span(),
- )
- } else {
- Ok(())
+ )?;
}
+ if let Some(weak_memory) = &alloc_extra.weak_memory {
+ if !machine.data_race.as_ref().unwrap().ongoing_atomic_access() {
+ weak_memory.destroy_atomicity(range);
+ }
+ }
+ Ok(())
}
#[inline(always)]