/// module provides higher-level access.
#[derive(Clone, Debug, Eq, PartialEq, PartialOrd, Ord, Hash, TyEncodable, TyDecodable)]
#[derive(HashStable)]
-pub struct Allocation<Tag = (), Extra = ()> {
+pub struct Allocation<Tag = AllocId, Extra = ()> {
/// The actual bytes of the allocation.
/// Note that the bytes of a pointer represent the offset of the pointer.
bytes: Vec<u8>,
}
}
-impl Allocation<()> {
- /// Add Tag and Extra fields
- pub fn with_tags_and_extra<T, E>(
+impl Allocation {
+ /// Convert Tag and add Extra fields
+ pub fn with_prov_and_extra<Tag, Extra>(
self,
- mut tagger: impl FnMut(AllocId) -> T,
- extra: E,
- ) -> Allocation<T, E> {
+ mut tagger: impl FnMut(AllocId) -> Tag,
+ extra: Extra,
+ ) -> Allocation<Tag, Extra> {
Allocation {
bytes: self.bytes,
relocations: Relocations::from_presorted(
- self.relocations
- .iter()
- // The allocations in the relocations (pointers stored *inside* this allocation)
- // all get the base pointer tag.
- .map(|&(offset, ((), alloc))| {
- let tag = tagger(alloc);
- (offset, (tag, alloc))
- })
- .collect(),
+ self.relocations.iter().map(|&(offset, tag)| (offset, tagger(tag))).collect(),
),
init_mask: self.init_mask,
align: self.align,
self.check_relocations(cx, range)?;
} else {
// Maybe a pointer.
- if let Some(&(tag, alloc_id)) = self.relocations.get(&range.start) {
- let ptr = Pointer::new_with_tag(alloc_id, Size::from_bytes(bits), tag);
+ if let Some(&prov) = self.relocations.get(&range.start) {
+ let ptr = Pointer::new(prov, Size::from_bytes(bits));
return Ok(ScalarMaybeUninit::Scalar(ptr.into()));
}
}
range: AllocRange,
val: ScalarMaybeUninit<Tag>,
) -> AllocResult {
+ assert!(self.mutability == Mutability::Mut);
+
let val = match val {
ScalarMaybeUninit::Scalar(scalar) => scalar,
ScalarMaybeUninit::Uninit => {
}
};
- let bytes = match val.to_bits_or_ptr(range.size, cx) {
- Err(val) => u128::from(val.offset.bytes()),
- Ok(data) => data,
+ let (bytes, provenance) = match val.to_bits_or_ptr(range.size, cx) {
+ Err(val) => {
+ let (provenance, offset) = val.into_parts();
+ (u128::from(offset.bytes()), Some(provenance))
+ }
+ Ok(data) => (data, None),
};
let endian = cx.data_layout().endian;
write_target_uint(endian, dst, bytes).unwrap();
// See if we have to also write a relocation.
- if let Scalar::Ptr(val) = val {
- self.relocations.insert(range.start, (val.tag, val.alloc_id));
+ if let Some(provenance) = provenance {
+ self.relocations.insert(range.start, provenance);
}
Ok(())
/// Relocations.
impl<Tag: Copy, Extra> Allocation<Tag, Extra> {
/// Returns all relocations overlapping with the given pointer-offset pair.
- pub fn get_relocations(
- &self,
- cx: &impl HasDataLayout,
- range: AllocRange,
- ) -> &[(Size, (Tag, AllocId))] {
+ pub fn get_relocations(&self, cx: &impl HasDataLayout, range: AllocRange) -> &[(Size, Tag)] {
// We have to go back `pointer_size - 1` bytes, as that one would still overlap with
// the beginning of this range.
let start = range.start.bytes().saturating_sub(cx.data_layout().pointer_size.bytes() - 1);
if range.size.bytes() == 0 {
return;
}
+ assert!(self.mutability == Mutability::Mut);
self.init_mask.set_range(range.start, range.end(), is_init);
}
}
/// Transferring the initialization mask to other allocations.
impl<Tag, Extra> Allocation<Tag, Extra> {
/// Creates a run-length encoding of the initialization mask.
- pub fn compress_uninit_range(&self, src: Pointer<Tag>, size: Size) -> InitMaskCompressed {
+ pub fn compress_uninit_range(&self, range: AllocRange) -> InitMaskCompressed {
// Since we are copying `size` bytes from `src` to `dest + i * size` (`for i in 0..repeat`),
// a naive initialization mask copying algorithm would repeatedly have to read the initialization mask from
// the source and write it to the destination. Even if we optimized the memory accesses,
// where each element toggles the state.
let mut ranges = smallvec::SmallVec::<[u64; 1]>::new();
- let initial = self.init_mask.get(src.offset);
+ let initial = self.init_mask.get(range.start);
let mut cur_len = 1;
let mut cur = initial;
- for i in 1..size.bytes() {
+ for i in 1..range.size.bytes() {
// FIXME: optimize to bitshift the current uninitialized block's bits and read the top bit.
- if self.init_mask.get(src.offset + Size::from_bytes(i)) == cur {
+ if self.init_mask.get(range.start + Size::from_bytes(i)) == cur {
cur_len += 1;
} else {
ranges.push(cur_len);
pub fn mark_compressed_init_range(
&mut self,
defined: &InitMaskCompressed,
- dest: Pointer<Tag>,
- size: Size,
+ range: AllocRange,
repeat: u64,
) {
// An optimization where we can just overwrite an entire range of initialization
// bits if they are going to be uniformly `1` or `0`.
if defined.ranges.len() <= 1 {
self.init_mask.set_range_inbounds(
- dest.offset,
- dest.offset + size * repeat, // `Size` operations
+ range.start,
+ range.start + range.size * repeat, // `Size` operations
defined.initial,
);
return;
}
for mut j in 0..repeat {
- j *= size.bytes();
- j += dest.offset.bytes();
+ j *= range.size.bytes();
+ j += range.start.bytes();
let mut cur = defined.initial;
for range in &defined.ranges {
let old_j = j;
}
}
-/// Relocations.
+/// "Relocations" stores the provenance information of pointers stored in memory.
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, TyEncodable, TyDecodable)]
-pub struct Relocations<Tag = (), Id = AllocId>(SortedMap<Size, (Tag, Id)>);
+pub struct Relocations<Tag = AllocId>(SortedMap<Size, Tag>);
-impl<Tag, Id> Relocations<Tag, Id> {
+impl<Tag> Relocations<Tag> {
pub fn new() -> Self {
Relocations(SortedMap::new())
}
// The caller must guarantee that the given relocations are already sorted
// by address and contain no duplicates.
- pub fn from_presorted(r: Vec<(Size, (Tag, Id))>) -> Self {
+ pub fn from_presorted(r: Vec<(Size, Tag)>) -> Self {
Relocations(SortedMap::from_presorted_elements(r))
}
}
impl<Tag> Deref for Relocations<Tag> {
- type Target = SortedMap<Size, (Tag, AllocId)>;
+ type Target = SortedMap<Size, Tag>;
fn deref(&self) -> &Self::Target {
&self.0
/// A partial, owned list of relocations to transfer into another allocation.
pub struct AllocationRelocations<Tag> {
- relative_relocations: Vec<(Size, (Tag, AllocId))>,
+ relative_relocations: Vec<(Size, Tag)>,
}
impl<Tag: Copy, Extra> Allocation<Tag, Extra> {