Ok(())
}
- /// Executes a retagging operation.
+ /// Executes a retagging operation for a single pointer.
+ /// Returns the possibly adjusted pointer.
#[inline]
- fn retag(
+ fn retag_ptr_value(
+ _ecx: &mut InterpCx<'mir, 'tcx, Self>,
+ _kind: mir::RetagKind,
+ val: &ImmTy<'tcx, Self::Provenance>,
+ ) -> InterpResult<'tcx, ImmTy<'tcx, Self::Provenance>> {
+ Ok(val.clone())
+ }
+
+ /// Executes a retagging operation on a compound value.
+ /// Replaces all pointers stored in the given place.
+ #[inline]
+ fn retag_place_contents(
_ecx: &mut InterpCx<'mir, 'tcx, Self>,
_kind: mir::RetagKind,
_place: &PlaceTy<'tcx, Self::Provenance>,
use rustc_middle::mir::interpret::{InterpResult, Scalar};
use rustc_middle::ty::layout::LayoutOf;
-use super::{InterpCx, Machine};
+use super::{ImmTy, InterpCx, Machine};
/// Classify whether an operator is "left-homogeneous", i.e., the LHS has the
/// same type as the result.
// Stacked Borrows.
Retag(kind, place) => {
let dest = self.eval_place(**place)?;
- M::retag(self, *kind, &dest)?;
+ M::retag_place_contents(self, *kind, &dest)?;
}
Intrinsic(box ref intrinsic) => self.emulate_nondiverging_intrinsic(intrinsic)?,
self.write_scalar(Scalar::from_machine_usize(len, self), &dest)?;
}
- AddressOf(_, place) | Ref(_, _, place) => {
+ Ref(_, borrow_kind, place) => {
let src = self.eval_place(place)?;
let place = self.force_allocation(&src)?;
- self.write_immediate(place.to_ref(self), &dest)?;
+ let val = ImmTy::from_immediate(place.to_ref(self), dest.layout);
+ // A fresh reference was created, make sure it gets retagged.
+ let val = M::retag_ptr_value(
+ self,
+ if borrow_kind.allows_two_phase_borrow() {
+ mir::RetagKind::TwoPhase
+ } else {
+ mir::RetagKind::Default
+ },
+ &val,
+ )?;
+ self.write_immediate(*val, &dest)?;
+ }
+
+ AddressOf(_, place) => {
+ // Figure out whether this is an addr_of of an already raw place.
+ let place_base_raw = if place.has_deref() {
+ let ty = self.frame().body.local_decls[place.local].ty;
+ ty.is_unsafe_ptr()
+ } else {
+ // Not a deref, and thus not raw.
+ false
+ };
+
+ let src = self.eval_place(place)?;
+ let place = self.force_allocation(&src)?;
+ let mut val = ImmTy::from_immediate(place.to_ref(self), dest.layout);
+ if !place_base_raw {
+ // If this was not already raw, it needs retagging.
+ val = M::retag_ptr_value(self, mir::RetagKind::Raw, &val)?;
+ }
+ self.write_immediate(*val, &dest)?;
}
NullaryOp(null_op, ty) => {
#[derive(Copy, Clone, TyEncodable, TyDecodable, Debug, PartialEq, Eq, Hash, HashStable)]
#[rustc_pass_by_value]
pub enum RetagKind {
- /// The initial retag when entering a function.
+ /// The initial retag of arguments when entering a function.
FnEntry,
/// Retag preparing for a two-phase borrow.
TwoPhase,
pub struct AddRetag;
-/// Determines whether this place is "stable": Whether, if we evaluate it again
-/// after the assignment, we can be sure to obtain the same place value.
-/// (Concurrent accesses by other threads are no problem as these are anyway non-atomic
-/// copies. Data races are UB.)
-fn is_stable(place: PlaceRef<'_>) -> bool {
- // Which place this evaluates to can change with any memory write,
- // so cannot assume deref to be stable.
- !place.has_deref()
-}
-
/// Determine whether this type may contain a reference (or box), and thus needs retagging.
/// We will only recurse `depth` times into Tuples/ADTs to bound the cost of this.
fn may_contain_reference<'tcx>(ty: Ty<'tcx>, depth: u32, tcx: TyCtxt<'tcx>) -> bool {
let basic_blocks = body.basic_blocks.as_mut();
let local_decls = &body.local_decls;
let needs_retag = |place: &Place<'tcx>| {
- // FIXME: Instead of giving up for unstable places, we should introduce
- // a temporary and retag on that.
- is_stable(place.as_ref())
+ !place.has_deref() // we're not eally interested in stores to "outside" locations, they are hard to keep track of anyway
&& may_contain_reference(place.ty(&*local_decls, tcx).ty, /*depth*/ 3, tcx)
&& !local_decls[place.local].is_deref_temp()
};
- let place_base_raw = |place: &Place<'tcx>| {
- // If this is a `Deref`, get the type of what we are deref'ing.
- if place.has_deref() {
- let ty = &local_decls[place.local].ty;
- ty.is_unsafe_ptr()
- } else {
- // Not a deref, and thus not raw.
- false
- }
- };
// PART 1
// Retag arguments at the beginning of the start block.
}
// PART 2
- // Retag return values of functions. Also escape-to-raw the argument of `drop`.
+ // Retag return values of functions.
// We collect the return destinations because we cannot mutate while iterating.
let returns = basic_blocks
.iter_mut()
}
// PART 3
- // Add retag after assignment.
+ // Add retag after assignments where data "enters" this function: the RHS is behind a deref and the LHS is not.
for block_data in basic_blocks {
// We want to insert statements as we iterate. To this end, we
// iterate backwards using indices.
for i in (0..block_data.statements.len()).rev() {
let (retag_kind, place) = match block_data.statements[i].kind {
- // Retag-as-raw after escaping to a raw pointer, if the referent
- // is not already a raw pointer.
- StatementKind::Assign(box (lplace, Rvalue::AddressOf(_, ref rplace)))
- if !place_base_raw(rplace) =>
- {
- (RetagKind::Raw, lplace)
- }
// Retag after assignments of reference type.
StatementKind::Assign(box (ref place, ref rvalue)) if needs_retag(place) => {
- let kind = match rvalue {
- Rvalue::Ref(_, borrow_kind, _)
- if borrow_kind.allows_two_phase_borrow() =>
- {
- RetagKind::TwoPhase
- }
- _ => RetagKind::Default,
+ let add_retag = match rvalue {
+ // Ptr-creating operations already do their own internal retagging, no
+ // need to also add a retag statement.
+ Rvalue::Ref(..) | Rvalue::AddressOf(..) => false,
+ _ => true,
};
- (kind, *place)
+ if add_retag {
+ (RetagKind::Default, *place)
+ } else {
+ continue;
+ }
}
// Do nothing for the rest
_ => continue,
tcx.mk_ptr(ty::TypeAndMut { ty: gen_ty, mutbl: hir::Mutability::Mut }),
source_info,
);
- if tcx.sess.opts.unstable_opts.mir_emit_retag {
- // Alias tracking must know we changed the type
- body.basic_blocks_mut()[START_BLOCK].statements.insert(
- 0,
- Statement {
- source_info,
- kind: StatementKind::Retag(RetagKind::Raw, Box::new(Place::from(SELF_ARG))),
- },
- )
- }
// Make sure we remove dead blocks to remove
// unrelated code from the resume part of the function
if ty.is_some() {
// The first argument (index 0), but add 1 for the return value.
let dropee_ptr = Place::from(Local::new(1 + 0));
- if tcx.sess.opts.unstable_opts.mir_emit_retag {
- // Function arguments should be retagged, and we make this one raw.
- body.basic_blocks_mut()[START_BLOCK].statements.insert(
- 0,
- Statement {
- source_info,
- kind: StatementKind::Retag(RetagKind::Raw, Box::new(dropee_ptr)),
- },
- );
- }
let patch = {
let param_env = tcx.param_env_reveal_all_normalized(def_id);
let mut elaborator =
// + literal: Const { ty: &i32, val: Unevaluated(bar, [], Some(promoted[1])) }
Retag(_10); // scope 1 at $DIR/inline_retag.rs:+2:7: +2:9
_4 = &(*_10); // scope 1 at $DIR/inline_retag.rs:+2:7: +2:9
- Retag(_4); // scope 1 at $DIR/inline_retag.rs:+2:7: +2:9
_3 = &(*_4); // scope 1 at $DIR/inline_retag.rs:+2:7: +2:9
- Retag(_3); // scope 1 at $DIR/inline_retag.rs:+2:7: +2:9
StorageLive(_6); // scope 1 at $DIR/inline_retag.rs:+2:11: +2:14
StorageLive(_7); // scope 1 at $DIR/inline_retag.rs:+2:11: +2:14
_9 = const _; // scope 1 at $DIR/inline_retag.rs:+2:11: +2:14
// + literal: Const { ty: &i32, val: Unevaluated(bar, [], Some(promoted[0])) }
Retag(_9); // scope 1 at $DIR/inline_retag.rs:+2:11: +2:14
_7 = &(*_9); // scope 1 at $DIR/inline_retag.rs:+2:11: +2:14
- Retag(_7); // scope 1 at $DIR/inline_retag.rs:+2:11: +2:14
_6 = &(*_7); // scope 1 at $DIR/inline_retag.rs:+2:11: +2:14
- Retag(_6); // scope 1 at $DIR/inline_retag.rs:+2:11: +2:14
Retag(_3); // scope 2 at $DIR/inline_retag.rs:16:8: 16:9
Retag(_6); // scope 2 at $DIR/inline_retag.rs:16:17: 16:18
StorageLive(_11); // scope 2 at $DIR/inline_retag.rs:17:5: 17:7
StorageLive(_3); // scope 1 at $DIR/retag.rs:+2:13: +2:19
StorageLive(_4); // scope 1 at $DIR/retag.rs:+2:13: +2:19
_4 = &mut _1; // scope 1 at $DIR/retag.rs:+2:13: +2:19
- Retag(_4); // scope 1 at $DIR/retag.rs:+2:13: +2:19
_3 = &raw mut (*_4); // scope 1 at $DIR/retag.rs:+2:13: +2:19
- Retag([raw] _3); // scope 1 at $DIR/retag.rs:+2:13: +2:19
_2 = move _3 as *mut usize (Pointer(ArrayToPointer)); // scope 1 at $DIR/retag.rs:+2:13: +2:33
StorageDead(_3); // scope 1 at $DIR/retag.rs:+2:32: +2:33
StorageDead(_4); // scope 1 at $DIR/retag.rs:+2:33: +2:34
StorageLive(_10); // scope 4 at $DIR/retag.rs:+6:13: +6:15
StorageLive(_11); // scope 4 at $DIR/retag.rs:+6:13: +6:15
_11 = &_8; // scope 4 at $DIR/retag.rs:+6:13: +6:15
- Retag(_11); // scope 4 at $DIR/retag.rs:+6:13: +6:15
_10 = &raw const (*_11); // scope 4 at $DIR/retag.rs:+6:13: +6:15
- Retag([raw] _10); // scope 4 at $DIR/retag.rs:+6:13: +6:15
_9 = move _10 as *const usize (Pointer(ArrayToPointer)); // scope 4 at $DIR/retag.rs:+6:13: +6:31
StorageDead(_10); // scope 4 at $DIR/retag.rs:+6:30: +6:31
StorageDead(_11); // scope 4 at $DIR/retag.rs:+6:31: +6:32
StorageDead(_17); // scope 6 at $DIR/retag.rs:+7:33: +7:34
_15 = (*_16); // scope 6 at $DIR/retag.rs:+7:25: +7:34
_14 = &_15; // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
- Retag(_14); // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
StorageLive(_18); // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
_35 = const _; // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
// mir::Constant
// + literal: Const { ty: &usize, val: Unevaluated(array_casts, [], Some(promoted[0])) }
Retag(_35); // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
_18 = &(*_35); // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
- Retag(_18); // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
Deinit(_13); // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
(_13.0: &usize) = move _14; // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
(_13.1: &usize) = move _18; // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
StorageLive(_30); // scope 8 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
StorageLive(_31); // scope 8 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
_31 = &(*_20); // scope 8 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
- Retag(_31); // scope 8 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
_30 = &(*_31); // scope 8 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
- Retag(_30); // scope 8 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
StorageLive(_32); // scope 8 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
StorageLive(_33); // scope 8 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
_33 = &(*_21); // scope 8 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
- Retag(_33); // scope 8 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
_32 = &(*_33); // scope 8 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
- Retag(_32); // scope 8 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
StorageLive(_34); // scope 8 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
Deinit(_34); // scope 8 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
discriminant(_34) = 0; // scope 8 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
let mut _3: (); // in scope 0 at $SRC_DIR/core/src/ptr/mod.rs:+0:1: +0:56
bb0: {
- Retag([raw] _1); // scope 0 at $SRC_DIR/core/src/ptr/mod.rs:+0:1: +0:56
_2 = &mut (*_1); // scope 0 at $SRC_DIR/core/src/ptr/mod.rs:+0:1: +0:56
_3 = <Test as Drop>::drop(move _2) -> bb1; // scope 0 at $SRC_DIR/core/src/ptr/mod.rs:+0:1: +0:56
// mir::Constant
_3 = _2; // scope 0 at $DIR/retag.rs:+1:18: +1:19
Retag(_3); // scope 0 at $DIR/retag.rs:+1:18: +1:19
_0 = &(*_2); // scope 1 at $DIR/retag.rs:+2:9: +2:10
- Retag(_0); // scope 1 at $DIR/retag.rs:+2:9: +2:10
StorageDead(_3); // scope 0 at $DIR/retag.rs:+3:5: +3:6
return; // scope 0 at $DIR/retag.rs:+3:6: +3:6
}
Deinit(_5); // scope 1 at $DIR/retag.rs:+3:17: +3:24
(_5.0: i32) = const 0_i32; // scope 1 at $DIR/retag.rs:+3:17: +3:24
_4 = &_5; // scope 1 at $DIR/retag.rs:+3:17: +3:36
- Retag(_4); // scope 1 at $DIR/retag.rs:+3:17: +3:36
StorageLive(_6); // scope 1 at $DIR/retag.rs:+3:29: +3:35
StorageLive(_7); // scope 1 at $DIR/retag.rs:+3:29: +3:35
_7 = &mut _1; // scope 1 at $DIR/retag.rs:+3:29: +3:35
- Retag(_7); // scope 1 at $DIR/retag.rs:+3:29: +3:35
_6 = &mut (*_7); // scope 1 at $DIR/retag.rs:+3:29: +3:35
- Retag([2phase] _6); // scope 1 at $DIR/retag.rs:+3:29: +3:35
_3 = Test::foo(move _4, move _6) -> [return: bb1, unwind: bb8]; // scope 1 at $DIR/retag.rs:+3:17: +3:36
// mir::Constant
// + span: $DIR/retag.rs:33:25: 33:28
_9 = move _3; // scope 2 at $DIR/retag.rs:+4:19: +4:20
Retag(_9); // scope 2 at $DIR/retag.rs:+4:19: +4:20
_8 = &mut (*_9); // scope 2 at $DIR/retag.rs:+4:19: +4:20
- Retag(_8); // scope 2 at $DIR/retag.rs:+4:19: +4:20
StorageDead(_9); // scope 2 at $DIR/retag.rs:+4:22: +4:23
StorageLive(_10); // scope 3 at $DIR/retag.rs:+5:13: +5:14
_10 = move _8; // scope 3 at $DIR/retag.rs:+5:17: +5:18
StorageLive(_11); // scope 4 at $DIR/retag.rs:+7:13: +7:15
StorageLive(_12); // scope 4 at $DIR/retag.rs:+7:18: +7:29
_12 = &raw mut (*_10); // scope 4 at $DIR/retag.rs:+7:18: +7:19
- Retag([raw] _12); // scope 4 at $DIR/retag.rs:+7:18: +7:19
_11 = _12; // scope 4 at $DIR/retag.rs:+7:18: +7:29
StorageDead(_12); // scope 4 at $DIR/retag.rs:+7:29: +7:30
_2 = const (); // scope 1 at $DIR/retag.rs:+2:5: +8:6
StorageLive(_17); // scope 6 at $DIR/retag.rs:+15:16: +15:18
StorageLive(_18); // scope 6 at $DIR/retag.rs:+15:16: +15:18
_18 = &_1; // scope 6 at $DIR/retag.rs:+15:16: +15:18
- Retag(_18); // scope 6 at $DIR/retag.rs:+15:16: +15:18
_17 = &(*_18); // scope 6 at $DIR/retag.rs:+15:16: +15:18
- Retag(_17); // scope 6 at $DIR/retag.rs:+15:16: +15:18
_15 = move _16(move _17) -> bb3; // scope 6 at $DIR/retag.rs:+15:14: +15:19
}
Deinit(_21); // scope 7 at $DIR/retag.rs:+18:5: +18:12
(_21.0: i32) = const 0_i32; // scope 7 at $DIR/retag.rs:+18:5: +18:12
_20 = &_21; // scope 7 at $DIR/retag.rs:+18:5: +18:24
- Retag(_20); // scope 7 at $DIR/retag.rs:+18:5: +18:24
StorageLive(_22); // scope 7 at $DIR/retag.rs:+18:21: +18:23
StorageLive(_23); // scope 7 at $DIR/retag.rs:+18:21: +18:23
_28 = const _; // scope 7 at $DIR/retag.rs:+18:21: +18:23
// + literal: Const { ty: &i32, val: Unevaluated(main, [], Some(promoted[0])) }
Retag(_28); // scope 7 at $DIR/retag.rs:+18:21: +18:23
_23 = &(*_28); // scope 7 at $DIR/retag.rs:+18:21: +18:23
- Retag(_23); // scope 7 at $DIR/retag.rs:+18:21: +18:23
_22 = &(*_23); // scope 7 at $DIR/retag.rs:+18:21: +18:23
- Retag(_22); // scope 7 at $DIR/retag.rs:+18:21: +18:23
_19 = Test::foo_shr(move _20, move _22) -> [return: bb4, unwind: bb7]; // scope 7 at $DIR/retag.rs:+18:5: +18:24
// mir::Constant
// + span: $DIR/retag.rs:48:13: 48:20
StorageLive(_25); // scope 7 at $DIR/retag.rs:+21:9: +21:11
StorageLive(_26); // scope 7 at $DIR/retag.rs:+21:14: +21:28
_26 = &raw const (*_15); // scope 7 at $DIR/retag.rs:+21:14: +21:16
- Retag([raw] _26); // scope 7 at $DIR/retag.rs:+21:14: +21:16
_25 = _26; // scope 7 at $DIR/retag.rs:+21:14: +21:28
StorageDead(_26); // scope 7 at $DIR/retag.rs:+21:28: +21:29
StorageLive(_27); // scope 8 at $DIR/retag.rs:+23:5: +23:18
Retag([fn entry] _2); // scope 0 at $DIR/retag.rs:+0:23: +0:24
StorageLive(_3); // scope 0 at $DIR/retag.rs:+1:9: +1:10
_3 = &mut (*_2); // scope 0 at $DIR/retag.rs:+1:9: +1:10
- Retag(_3); // scope 0 at $DIR/retag.rs:+1:9: +1:10
_0 = &mut (*_3); // scope 0 at $DIR/retag.rs:+1:9: +1:10
- Retag(_0); // scope 0 at $DIR/retag.rs:+1:9: +1:10
StorageDead(_3); // scope 0 at $DIR/retag.rs:+2:5: +2:6
return; // scope 0 at $DIR/retag.rs:+2:6: +2:6
}
use crate::*;
pub mod stacked_borrows;
-use stacked_borrows::diagnostics::RetagCause;
pub type CallId = NonZeroU64;
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriInterpCx<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> {
- fn retag(&mut self, kind: RetagKind, place: &PlaceTy<'tcx, Provenance>) -> InterpResult<'tcx> {
+ fn retag_ptr_value(&mut self, kind: RetagKind, val: &ImmTy<'tcx, Provenance>) -> InterpResult<'tcx, ImmTy<'tcx, Provenance>> {
let this = self.eval_context_mut();
let method = this.machine.borrow_tracker.as_ref().unwrap().borrow().borrow_tracker_method;
match method {
- BorrowTrackerMethod::StackedBorrows => this.sb_retag(kind, place),
+ BorrowTrackerMethod::StackedBorrows => this.sb_retag_ptr_value(kind, val),
+ }
+ }
+
+ fn retag_place_contents(&mut self, kind: RetagKind, place: &PlaceTy<'tcx, Provenance>) -> InterpResult<'tcx> {
+ let this = self.eval_context_mut();
+ let method = this.machine.borrow_tracker.as_ref().unwrap().borrow().borrow_tracker_method;
+ match method {
+ BorrowTrackerMethod::StackedBorrows => this.sb_retag_place_contents(kind, place),
}
}
//! Implements "Stacked Borrows". See <https://github.com/rust-lang/unsafe-code-guidelines/blob/master/wip/stacked-borrows.md>
//! for further information.
+mod item;
+mod stack;
+pub mod diagnostics;
+
use log::trace;
use std::cmp;
use std::fmt::{self, Write};
use crate::borrow_tracker::{
stacked_borrows::diagnostics::{AllocHistory, DiagnosticCx, DiagnosticCxBuilder, TagHistory},
- AccessKind, GlobalStateInner, ProtectorKind, RetagCause, RetagFields,
+ AccessKind, GlobalStateInner, ProtectorKind, RetagFields,
};
use crate::*;
-mod item;
pub use item::{Item, Permission};
-mod stack;
pub use stack::Stack;
-pub mod diagnostics;
+use diagnostics::RetagCause;
pub type AllocState = Stacks;
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriInterpCx<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> {
- fn sb_retag(
+ fn sb_retag_ptr_value(
+ &mut self,
+ kind: RetagKind,
+ val: &ImmTy<'tcx, Provenance>,
+ ) -> InterpResult<'tcx, ImmTy<'tcx, Provenance>> {
+ let this = self.eval_context_mut();
+ let ref_kind = match val.layout.ty.kind() {
+ ty::Ref(_, _, mutbl) => {
+ match mutbl {
+ Mutability::Mut =>
+ RefKind::Unique { two_phase: kind == RetagKind::TwoPhase },
+ Mutability::Not => RefKind::Shared,
+ }
+ }
+ ty::RawPtr(tym) => {
+ RefKind::Raw { mutable: tym.mutbl == Mutability::Mut }
+ }
+ _ => unreachable!(),
+ };
+ let retag_cause = match kind {
+ RetagKind::TwoPhase { .. } => RetagCause::TwoPhase,
+ RetagKind::FnEntry => unreachable!(),
+ RetagKind::Raw | RetagKind::Default => RetagCause::Normal,
+ };
+ this.sb_retag_reference(&val, ref_kind, retag_cause, None)
+ }
+
+ fn sb_retag_place_contents(
&mut self,
kind: RetagKind,
place: &PlaceTy<'tcx, Provenance>,
let this = self.eval_context_mut();
let retag_fields = this.machine.borrow_tracker.as_mut().unwrap().get_mut().retag_fields;
let retag_cause = match kind {
- RetagKind::TwoPhase { .. } => RetagCause::TwoPhase,
+ RetagKind::Raw | RetagKind::TwoPhase { .. } => unreachable!(),
RetagKind::FnEntry => RetagCause::FnEntry,
- RetagKind::Raw | RetagKind::Default => RetagCause::Normal,
+ RetagKind::Default => RetagCause::Normal,
};
let mut visitor = RetagVisitor { ecx: this, kind, retag_cause, retag_fields };
return visitor.visit_value(place);
}
impl<'ecx, 'mir, 'tcx> RetagVisitor<'ecx, 'mir, 'tcx> {
#[inline(always)] // yes this helps in our benchmarks
- fn retag_place(
+ fn retag_ptr_inplace(
&mut self,
place: &PlaceTy<'tcx, Provenance>,
ref_kind: RefKind,
fn visit_box(&mut self, place: &PlaceTy<'tcx, Provenance>) -> InterpResult<'tcx> {
// Boxes get a weak protectors, since they may be deallocated.
- self.retag_place(
+ self.retag_ptr_inplace(
place,
RefKind::Box,
self.retag_cause,
ty::Ref(_, _, mutbl) => {
let ref_kind = match mutbl {
Mutability::Mut =>
- RefKind::Unique { two_phase: self.kind == RetagKind::TwoPhase },
+ RefKind::Unique { two_phase: false },
Mutability::Not => RefKind::Shared,
};
- self.retag_place(
+ self.retag_ptr_inplace(
place,
ref_kind,
self.retag_cause,
.then_some(ProtectorKind::StrongProtector),
)?;
}
- ty::RawPtr(tym) => {
- // We definitely do *not* want to recurse into raw pointers -- wide raw
- // pointers have fields, and for dyn Trait pointees those can have reference
- // type!
- if self.kind == RetagKind::Raw {
- // Raw pointers need to be enabled.
- self.retag_place(
- place,
- RefKind::Raw { mutable: tym.mutbl == Mutability::Mut },
- self.retag_cause,
- /*protector*/ None,
- )?;
- }
+ ty::RawPtr(..) => {
+ // We do *not* want to recurse into raw pointers -- wide raw pointers have
+ // fields, and for dyn Trait pointees those can have reference type!
}
- _ if place.layout.ty.ty_adt_def().is_some_and(|adt| adt.is_box()) => {
+ ty::Adt(adt, _) if adt.is_box() => {
// Recurse for boxes, they require some tricky handling and will end up in `visit_box` above.
// (Yes this means we technically also recursively retag the allocator itself
// even if field retagging is not enabled. *shrug*)
ptr: Pointer<Self::Provenance>,
) -> InterpResult<'tcx> {
match ptr.provenance {
- Provenance::Concrete { alloc_id, tag } =>
- intptrcast::GlobalStateInner::expose_ptr(ecx, alloc_id, tag),
+ Provenance::Concrete { alloc_id, tag } => {
+ intptrcast::GlobalStateInner::expose_ptr(ecx, alloc_id, tag)
+ }
Provenance::Wildcard => {
// No need to do anything for wildcard pointers as
// their provenances have already been previously exposed.
}
#[inline(always)]
- fn retag(
+ fn retag_ptr_value(
+ ecx: &mut InterpCx<'mir, 'tcx, Self>,
+ kind: mir::RetagKind,
+ val: &ImmTy<'tcx, Provenance>,
+ ) -> InterpResult<'tcx, ImmTy<'tcx, Provenance>> {
+ if ecx.machine.borrow_tracker.is_some() {
+ ecx.retag_ptr_value(kind, val)
+ } else {
+ Ok(val.clone())
+ }
+ }
+
+ #[inline(always)]
+ fn retag_place_contents(
ecx: &mut InterpCx<'mir, 'tcx, Self>,
kind: mir::RetagKind,
place: &PlaceTy<'tcx, Provenance>,
) -> InterpResult<'tcx> {
if ecx.machine.borrow_tracker.is_some() {
- ecx.retag(kind, place)?;
+ ecx.retag_place_contents(kind, place)?;
}
Ok(())
}