1 use std::cell::RefCell;
3 use std::num::NonZeroU64;
6 use smallvec::SmallVec;
8 use rustc_data_structures::fx::{FxHashMap, FxHashSet};
9 use rustc_middle::mir::RetagKind;
10 use rustc_target::abi::Size;
13 pub mod stacked_borrows;
14 use stacked_borrows::diagnostics::RetagCause;
16 pub type CallId = NonZeroU64;
18 /// Tracking pointer provenance
19 #[derive(Copy, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)]
20 pub struct BorTag(NonZeroU64);
23 pub fn new(i: u64) -> Option<Self> {
24 NonZeroU64::new(i).map(BorTag)
27 pub fn get(&self) -> u64 {
31 pub fn inner(&self) -> NonZeroU64 {
35 pub fn succ(self) -> Option<Self> {
36 self.0.checked_add(1).map(Self)
39 /// The minimum representable tag
40 pub fn one() -> Self {
45 impl std::default::Default for BorTag {
46 /// The default to be used when borrow tracking is disabled
47 fn default() -> Self {
52 impl fmt::Debug for BorTag {
53 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
54 write!(f, "<{}>", self.0)
58 /// Per-call-stack-frame data for borrow tracking
60 pub struct FrameState {
61 /// The ID of the call this frame corresponds to.
64 /// If this frame is protecting any tags, they are listed here. We use this list to do
65 /// incremental updates of the global list of protected tags stored in the
66 /// `stacked_borrows::GlobalState` upon function return, and if we attempt to pop a protected
67 /// tag, to identify which call is responsible for protecting the tag.
68 /// See `Stack::item_popped` for more explanation.
70 /// This will contain one tag per reference passed to the function, so
71 /// a size of 2 is enough for the vast majority of functions.
72 pub protected_tags: SmallVec<[BorTag; 2]>,
75 impl VisitTags for FrameState {
76 fn visit_tags(&self, _visit: &mut dyn FnMut(BorTag)) {
77 // `protected_tags` are fine to GC.
81 /// Extra global state, available to the memory access hooks.
83 pub struct GlobalStateInner {
84 /// Borrow tracker method currently in use.
85 pub borrow_tracker_method: BorrowTrackerMethod,
86 /// Next unused pointer ID (tag).
87 pub next_ptr_tag: BorTag,
88 /// Table storing the "base" tag for each allocation.
89 /// The base tag is the one used for the initial pointer.
90 /// We need this in a separate table to handle cyclic statics.
91 pub base_ptr_tags: FxHashMap<AllocId, BorTag>,
92 /// Next unused call ID (for protectors).
93 pub next_call_id: CallId,
94 /// All currently protected tags.
95 /// An item is protected if its tag is in this set, *and* it has the "protected" bit set.
96 /// We add tags to this when they are created with a protector in `reborrow`, and
97 /// we remove tags from this when the call which is protecting them returns, in
98 /// `GlobalStateInner::end_call`. See `Stack::item_popped` for more details.
99 pub protected_tags: FxHashMap<BorTag, ProtectorKind>,
100 /// The pointer ids to trace
101 pub tracked_pointer_tags: FxHashSet<BorTag>,
102 /// The call ids to trace
103 pub tracked_call_ids: FxHashSet<CallId>,
104 /// Whether to recurse into datatypes when searching for pointers to retag.
105 pub retag_fields: RetagFields,
108 impl VisitTags for GlobalStateInner {
109 fn visit_tags(&self, _visit: &mut dyn FnMut(BorTag)) {
110 // The only candidate is base_ptr_tags, and that does not need visiting since we don't ever
111 // GC the bottommost tag.
115 /// We need interior mutable access to the global state.
116 pub type GlobalState = RefCell<GlobalStateInner>;
118 /// Indicates which kind of access is being performed.
119 #[derive(Copy, Clone, Hash, PartialEq, Eq, Debug)]
120 pub enum AccessKind {
125 impl fmt::Display for AccessKind {
126 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
128 AccessKind::Read => write!(f, "read access"),
129 AccessKind::Write => write!(f, "write access"),
134 /// Policy on whether to recurse into fields to retag
135 #[derive(Copy, Clone, Debug)]
136 pub enum RetagFields {
137 /// Don't retag any fields.
139 /// Retag all fields.
141 /// Only retag fields of types with Scalar and ScalarPair layout,
142 /// to match the LLVM `noalias` we generate.
146 /// The flavor of the protector.
147 #[derive(Copy, Clone, Debug, PartialEq, Eq)]
148 pub enum ProtectorKind {
149 /// Protected against aliasing violations from other pointers.
151 /// Items protected like this cause UB when they are invalidated, *but* the pointer itself may
152 /// still be used to issue a deallocation.
154 /// This is required for LLVM IR pointers that are `noalias` but *not* `dereferenceable`.
157 /// Protected against any kind of invalidation.
159 /// Items protected like this cause UB when they are invalidated or the memory is deallocated.
160 /// This is strictly stronger protection than `WeakProtector`.
162 /// This is required for LLVM IR pointers that are `dereferenceable` (and also allows `noalias`).
166 /// Utilities for initialization and ID generation
167 impl GlobalStateInner {
169 borrow_tracker_method: BorrowTrackerMethod,
170 tracked_pointer_tags: FxHashSet<BorTag>,
171 tracked_call_ids: FxHashSet<CallId>,
172 retag_fields: RetagFields,
175 borrow_tracker_method,
176 next_ptr_tag: BorTag::one(),
177 base_ptr_tags: FxHashMap::default(),
178 next_call_id: NonZeroU64::new(1).unwrap(),
179 protected_tags: FxHashMap::default(),
180 tracked_pointer_tags,
186 /// Generates a new pointer tag. Remember to also check track_pointer_tags and log its creation!
187 pub fn new_ptr(&mut self) -> BorTag {
188 let id = self.next_ptr_tag;
189 self.next_ptr_tag = id.succ().unwrap();
193 pub fn new_frame(&mut self, machine: &MiriMachine<'_, '_>) -> FrameState {
194 let call_id = self.next_call_id;
195 trace!("new_frame: Assigning call ID {}", call_id);
196 if self.tracked_call_ids.contains(&call_id) {
197 machine.emit_diagnostic(NonHaltingDiagnostic::CreatedCallId(call_id));
199 self.next_call_id = NonZeroU64::new(call_id.get() + 1).unwrap();
200 FrameState { call_id, protected_tags: SmallVec::new() }
203 pub fn end_call(&mut self, frame: &machine::FrameExtra<'_>) {
207 .expect("we should have borrow tracking data")
210 self.protected_tags.remove(tag);
214 pub fn base_ptr_tag(&mut self, id: AllocId, machine: &MiriMachine<'_, '_>) -> BorTag {
215 self.base_ptr_tags.get(&id).copied().unwrap_or_else(|| {
216 let tag = self.new_ptr();
217 if self.tracked_pointer_tags.contains(&tag) {
218 machine.emit_diagnostic(NonHaltingDiagnostic::CreatedPointerTag(
224 trace!("New allocation {:?} has base tag {:?}", id, tag);
225 self.base_ptr_tags.try_insert(id, tag).unwrap();
231 /// Which borrow tracking method to use
232 #[derive(Debug, Copy, Clone, PartialEq, Eq)]
233 pub enum BorrowTrackerMethod {
234 /// Stacked Borrows, as implemented in borrow_tracker/stacked
238 impl BorrowTrackerMethod {
239 pub fn instanciate_global_state(self, config: &MiriConfig) -> GlobalState {
240 RefCell::new(GlobalStateInner::new(
242 config.tracked_pointer_tags.clone(),
243 config.tracked_call_ids.clone(),
249 impl GlobalStateInner {
250 pub fn new_allocation(
254 kind: MemoryKind<machine::MiriMemoryKind>,
255 machine: &MiriMachine<'_, '_>,
257 match self.borrow_tracker_method {
258 BorrowTrackerMethod::StackedBorrows =>
259 AllocState::StackedBorrows(Box::new(RefCell::new(Stacks::new_allocation(
260 id, alloc_size, self, kind, machine,
266 impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriInterpCx<'mir, 'tcx> {}
267 pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> {
268 fn retag(&mut self, kind: RetagKind, place: &PlaceTy<'tcx, Provenance>) -> InterpResult<'tcx> {
269 let this = self.eval_context_mut();
270 let method = this.machine.borrow_tracker.as_ref().unwrap().borrow().borrow_tracker_method;
272 BorrowTrackerMethod::StackedBorrows => this.sb_retag(kind, place),
276 fn retag_return_place(&mut self) -> InterpResult<'tcx> {
277 let this = self.eval_context_mut();
278 let method = this.machine.borrow_tracker.as_ref().unwrap().borrow().borrow_tracker_method;
280 BorrowTrackerMethod::StackedBorrows => this.sb_retag_return_place(),
284 fn expose_tag(&mut self, alloc_id: AllocId, tag: BorTag) -> InterpResult<'tcx> {
285 let this = self.eval_context_mut();
286 let method = this.machine.borrow_tracker.as_ref().unwrap().borrow().borrow_tracker_method;
288 BorrowTrackerMethod::StackedBorrows => this.sb_expose_tag(alloc_id, tag),
293 /// Extra per-allocation data for borrow tracking
294 #[derive(Debug, Clone)]
295 pub enum AllocState {
296 /// Data corresponding to Stacked Borrows
297 StackedBorrows(Box<RefCell<stacked_borrows::AllocState>>),
300 impl machine::AllocExtra {
302 pub fn borrow_tracker_sb(&self) -> &RefCell<stacked_borrows::AllocState> {
303 match self.borrow_tracker {
304 Some(AllocState::StackedBorrows(ref sb)) => sb,
305 _ => panic!("expected Stacked Borrows borrow tracking, got something else"),
310 pub fn borrow_tracker_sb_mut(&mut self) -> &mut RefCell<stacked_borrows::AllocState> {
311 match self.borrow_tracker {
312 Some(AllocState::StackedBorrows(ref mut sb)) => sb,
313 _ => panic!("expected Stacked Borrows borrow tracking, got something else"),
319 pub fn before_memory_read<'tcx>(
322 prov_extra: ProvenanceExtra,
324 machine: &MiriMachine<'_, 'tcx>,
325 ) -> InterpResult<'tcx> {
327 AllocState::StackedBorrows(sb) =>
328 sb.borrow_mut().before_memory_read(alloc_id, prov_extra, range, machine),
332 pub fn before_memory_write<'tcx>(
335 prov_extra: ProvenanceExtra,
337 machine: &mut MiriMachine<'_, 'tcx>,
338 ) -> InterpResult<'tcx> {
340 AllocState::StackedBorrows(sb) =>
341 sb.get_mut().before_memory_write(alloc_id, prov_extra, range, machine),
345 pub fn before_memory_deallocation<'tcx>(
348 prov_extra: ProvenanceExtra,
350 machine: &mut MiriMachine<'_, 'tcx>,
351 ) -> InterpResult<'tcx> {
353 AllocState::StackedBorrows(sb) =>
354 sb.get_mut().before_memory_deallocation(alloc_id, prov_extra, range, machine),
358 pub fn remove_unreachable_tags(&self, tags: &FxHashSet<BorTag>) {
360 AllocState::StackedBorrows(sb) => sb.borrow_mut().remove_unreachable_tags(tags),
365 impl VisitTags for AllocState {
366 fn visit_tags(&self, visit: &mut dyn FnMut(BorTag)) {
368 AllocState::StackedBorrows(sb) => sb.visit_tags(visit),