1 //! Store the provenance for each byte in the range, with a more efficient
2 //! representation for the common case where PTR_SIZE consecutive bytes have the same provenance.
6 use rustc_data_structures::sorted_map::SortedMap;
7 use rustc_target::abi::{HasDataLayout, Size};
9 use super::{alloc_range, AllocError, AllocId, AllocRange, AllocResult, Provenance};
11 /// Stores the provenance information of pointers stored in memory.
12 #[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, TyEncodable, TyDecodable)]
14 pub struct ProvenanceMap<Prov = AllocId> {
15 /// Provenance in this map applies from the given offset for an entire pointer-size worth of
16 /// bytes. Two entires in this map are always at least a pointer size apart.
17 ptrs: SortedMap<Size, Prov>,
18 /// Provenance in this map only applies to the given single byte.
19 /// This map is disjoint from the previous.
20 bytes: SortedMap<Size, Prov>,
23 impl<Prov> ProvenanceMap<Prov> {
24 pub fn new() -> Self {
25 ProvenanceMap { ptrs: SortedMap::new(), bytes: SortedMap::new() }
28 /// The caller must guarantee that the given provenance list is already sorted
29 /// by address and contain no duplicates.
30 pub fn from_presorted_ptrs(r: Vec<(Size, Prov)>) -> Self {
31 ProvenanceMap { ptrs: SortedMap::from_presorted_elements(r), bytes: SortedMap::new() }
36 /// Give access to the ptr-sized provenances (which can also be thought of as relocations, and
37 /// indeed that is how codegen treats them).
39 /// Only exposed with `AllocId` provenance, since it panics if there is bytewise provenance.
40 pub fn ptrs(&self) -> &SortedMap<Size, AllocId> {
41 assert!(self.bytes.is_empty());
46 impl<Prov: Provenance> ProvenanceMap<Prov> {
47 /// Returns all ptr-sized provenance in the given range.
48 /// If the range has length 0, returns provenance that crosses the edge between `start-1` and
50 fn range_get_ptrs(&self, range: AllocRange, cx: &impl HasDataLayout) -> &[(Size, Prov)] {
51 // We have to go back `pointer_size - 1` bytes, as that one would still overlap with
52 // the beginning of this range.
53 let adjusted_start = Size::from_bytes(
54 range.start.bytes().saturating_sub(cx.data_layout().pointer_size.bytes() - 1),
56 self.ptrs.range(adjusted_start..range.end())
59 /// Returns all byte-wise provenance in the given range.
60 fn range_get_bytes(&self, range: AllocRange) -> &[(Size, Prov)] {
61 self.bytes.range(range.start..range.end())
64 /// Get the provenance of a single byte.
65 pub fn get(&self, offset: Size, cx: &impl HasDataLayout) -> Option<Prov> {
66 let prov = self.range_get_ptrs(alloc_range(offset, Size::from_bytes(1)), cx);
67 debug_assert!(prov.len() <= 1);
68 if let Some(entry) = prov.first() {
69 // If it overlaps with this byte, it is on this byte.
70 debug_assert!(self.bytes.get(&offset).is_none());
73 // Look up per-byte provenance.
74 self.bytes.get(&offset).copied()
78 /// Check if here is ptr-sized provenance at the given index.
79 /// Does not mean anything for bytewise provenance! But can be useful as an optimization.
80 pub fn get_ptr(&self, offset: Size) -> Option<Prov> {
81 self.ptrs.get(&offset).copied()
84 /// Returns whether this allocation has provenance overlapping with the given range.
86 /// Note: this function exists to allow `range_get_provenance` to be private, in order to somewhat
87 /// limit access to provenance outside of the `Allocation` abstraction.
89 pub fn range_empty(&self, range: AllocRange, cx: &impl HasDataLayout) -> bool {
90 self.range_get_ptrs(range, cx).is_empty() && self.range_get_bytes(range).is_empty()
93 /// Yields all the provenances stored in this map.
94 pub fn provenances(&self) -> impl Iterator<Item = Prov> + '_ {
95 self.ptrs.values().chain(self.bytes.values()).copied()
98 pub fn insert_ptr(&mut self, offset: Size, prov: Prov, cx: &impl HasDataLayout) {
99 debug_assert!(self.range_empty(alloc_range(offset, cx.data_layout().pointer_size), cx));
100 self.ptrs.insert(offset, prov);
103 /// Removes all provenance inside the given range.
104 /// If there is provenance overlapping with the edges, might result in an error.
105 pub fn clear(&mut self, range: AllocRange, cx: &impl HasDataLayout) -> AllocResult {
106 let start = range.start;
107 let end = range.end();
108 // Clear the bytewise part -- this is easy.
109 self.bytes.remove_range(start..end);
111 // For the ptr-sized part, find the first (inclusive) and last (exclusive) byte of
112 // provenance that overlaps with the given range.
113 let (first, last) = {
114 // Find all provenance overlapping the given range.
115 let provenance = self.range_get_ptrs(range, cx);
116 if provenance.is_empty() {
117 // No provenance in this range, we are done.
122 provenance.first().unwrap().0,
123 provenance.last().unwrap().0 + cx.data_layout().pointer_size,
127 // We need to handle clearing the provenance from parts of a pointer.
129 if !Prov::OFFSET_IS_ADDR {
130 // We can't split up the provenance into less than a pointer.
131 return Err(AllocError::PartialPointerOverwrite(first));
133 // Insert the remaining part in the bytewise provenance.
134 let prov = self.ptrs[&first];
135 for offset in first..start {
136 self.bytes.insert(offset, prov);
140 let begin_of_last = last - cx.data_layout().pointer_size;
141 if !Prov::OFFSET_IS_ADDR {
142 // We can't split up the provenance into less than a pointer.
143 return Err(AllocError::PartialPointerOverwrite(begin_of_last));
145 // Insert the remaining part in the bytewise provenance.
146 let prov = self.ptrs[&begin_of_last];
147 for offset in end..last {
148 self.bytes.insert(offset, prov);
152 // Forget all the provenance.
153 // Since provenance do not overlap, we know that removing until `last` (exclusive) is fine,
154 // i.e., this will not remove any other provenance just after the ones we care about.
155 self.ptrs.remove_range(first..last);
161 /// A partial, owned list of provenance to transfer into another allocation.
163 /// Offsets are already adjusted to the destination allocation.
164 pub struct ProvenanceCopy<Prov> {
165 dest_ptrs: Vec<(Size, Prov)>,
166 dest_bytes: Vec<(Size, Prov)>,
169 impl<Prov: Provenance> ProvenanceMap<Prov> {
170 #[instrument(skip(self, cx), level = "debug")]
176 cx: &impl HasDataLayout,
177 ) -> AllocResult<ProvenanceCopy<Prov>> {
178 let shift_offset = move |idx, offset| {
179 // compute offset for current repetition
180 let dest_offset = dest + src.size * idx; // `Size` operations
181 // shift offsets from source allocation to destination allocation
182 (offset - src.start) + dest_offset // `Size` operations
184 let ptr_size = cx.data_layout().pointer_size;
186 // # Pointer-sized provenances
187 // Get the provenances that are entirely within this range.
188 // (Different from `range_get_ptrs` which asks if they overlap the range.)
189 let ptrs = if src.size < ptr_size {
190 // This isn't even large enough to contain a pointer.
194 Size::from_bytes(src.end().bytes().saturating_sub(ptr_size.bytes() - 1));
195 self.ptrs.range(src.start..adjusted_end)
198 // Buffer for the new list.
199 let mut dest_ptrs = Vec::with_capacity(ptrs.len() * (count as usize));
200 // If `count` is large, this is rather wasteful -- we are allocating a big array here, which
201 // is mostly filled with redundant information since it's just N copies of the same `Prov`s
202 // at slightly adjusted offsets. The reason we do this is so that in `mark_provenance_range`
203 // we can use `insert_presorted`. That wouldn't work with an `Iterator` that just produces
204 // the right sequence of provenance for all N copies.
205 // Basically, this large array would have to be created anyway in the target allocation.
207 dest_ptrs.extend(ptrs.iter().map(|&(offset, reloc)| (shift_offset(i, offset), reloc)));
210 // # Byte-sized provenances
211 let mut bytes = Vec::new();
212 // First, if there is a part of a pointer at the start, add that.
213 if let Some(entry) = self.range_get_ptrs(alloc_range(src.start, Size::ZERO), cx).first() {
214 if !Prov::OFFSET_IS_ADDR {
215 // We can't split up the provenance into less than a pointer.
216 return Err(AllocError::PartialPointerCopy(entry.0));
218 trace!("start overlapping entry: {entry:?}");
219 // For really small copies, make sure we don't run off the end of the `src` range.
220 let entry_end = cmp::min(entry.0 + ptr_size, src.end());
221 for offset in src.start..entry_end {
222 bytes.push((offset, entry.1));
225 trace!("no start overlapping entry");
227 // Then the main part, bytewise provenance from `self.bytes`.
228 bytes.extend(self.bytes.range(src.start..src.end()));
229 // And finally possibly parts of a pointer at the end.
230 if let Some(entry) = self.range_get_ptrs(alloc_range(src.end(), Size::ZERO), cx).first() {
231 if !Prov::OFFSET_IS_ADDR {
232 // We can't split up the provenance into less than a pointer.
233 return Err(AllocError::PartialPointerCopy(entry.0));
235 trace!("end overlapping entry: {entry:?}");
236 // For really small copies, make sure we don't start before `src` does.
237 let entry_start = cmp::max(entry.0, src.start);
238 for offset in entry_start..src.end() {
239 if bytes.last().map_or(true, |bytes_entry| bytes_entry.0 < offset) {
240 // The last entry, if it exists, has a lower offset than us.
241 bytes.push((offset, entry.1));
243 // There already is an entry for this offset in there! This can happen when the
244 // start and end range checks actually end up hitting the same pointer, so we
245 // already added this in the "pointer at the start" part above.
246 assert!(entry.0 <= src.start);
250 trace!("no end overlapping entry");
252 trace!("byte provenances: {bytes:?}");
254 // And again a buffer for the new list on the target side.
255 let mut dest_bytes = Vec::with_capacity(bytes.len() * (count as usize));
258 .extend(bytes.iter().map(|&(offset, reloc)| (shift_offset(i, offset), reloc)));
261 Ok(ProvenanceCopy { dest_ptrs, dest_bytes })
264 /// Applies a provenance copy.
265 /// The affected range, as defined in the parameters to `prepare_copy` is expected
266 /// to be clear of provenance.
268 /// This is dangerous to use as it can violate internal `Allocation` invariants!
269 /// It only exists to support an efficient implementation of `mem_copy_repeatedly`.
270 pub fn apply_copy(&mut self, copy: ProvenanceCopy<Prov>) {
271 self.ptrs.insert_presorted(copy.dest_ptrs);
272 self.bytes.insert_presorted(copy.dest_bytes);