1 //! Global machine state as well as implementation of the interpreter engine
5 use std::cell::RefCell;
6 use std::collections::HashSet;
8 use std::num::NonZeroU64;
9 use std::time::Instant;
11 use rand::rngs::StdRng;
12 use rand::SeedableRng;
14 use rustc_ast::ast::Mutability;
15 use rustc_data_structures::fx::FxHashMap;
17 use rustc_data_structures::static_assert_size;
22 layout::{LayoutCx, LayoutError, LayoutOf, TyAndLayout},
23 Instance, TyCtxt, TypeAndMut,
26 use rustc_span::def_id::{CrateNum, DefId};
27 use rustc_span::symbol::{sym, Symbol};
28 use rustc_span::DUMMY_SP;
29 use rustc_target::abi::Size;
30 use rustc_target::spec::abi::Abi;
34 // Some global facts about the emulated machine.
35 pub const PAGE_SIZE: u64 = 4 * 1024; // FIXME: adjust to target architecture
36 pub const STACK_ADDR: u64 = 32 * PAGE_SIZE; // not really about the "stack", but where we start assigning integer addresses to allocations
37 pub const STACK_SIZE: u64 = 16 * PAGE_SIZE; // whatever
38 pub const NUM_CPUS: u64 = 1;
40 /// Extra data stored with each stack frame
41 pub struct FrameData<'tcx> {
42 /// Extra data for Stacked Borrows.
43 pub call_id: stacked_borrows::CallId,
45 /// If this is Some(), then this is a special "catch unwind" frame (the frame of `try_fn`
46 /// called by `try`). When this frame is popped during unwinding a panic,
47 /// we stop unwinding, use the `CatchUnwindData` to handle catching.
48 pub catch_unwind: Option<CatchUnwindData<'tcx>>,
50 /// If `measureme` profiling is enabled, holds timing information
51 /// for the start of this frame. When we finish executing this frame,
52 /// we use this to register a completed event with `measureme`.
53 pub timing: Option<measureme::DetachedTiming>,
56 impl<'tcx> std::fmt::Debug for FrameData<'tcx> {
57 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
58 // Omitting `timing`, it does not support `Debug`.
59 let FrameData { call_id, catch_unwind, timing: _ } = self;
60 f.debug_struct("FrameData")
61 .field("call_id", call_id)
62 .field("catch_unwind", catch_unwind)
67 /// Extra memory kinds
68 #[derive(Debug, Copy, Clone, PartialEq, Eq)]
69 pub enum MiriMemoryKind {
70 /// `__rust_alloc` memory.
74 /// Windows `HeapAlloc` memory.
76 /// Memory for args, errno, and other parts of the machine-managed environment.
77 /// This memory may leak.
79 /// Memory allocated by the runtime (e.g. env vars). Separate from `Machine`
80 /// because we clean it up and leak-check it.
82 /// Globals copied from `tcx`.
83 /// This memory may leak.
85 /// Memory for extern statics.
86 /// This memory may leak.
88 /// Memory for thread-local statics.
89 /// This memory may leak.
93 impl Into<MemoryKind<MiriMemoryKind>> for MiriMemoryKind {
95 fn into(self) -> MemoryKind<MiriMemoryKind> {
96 MemoryKind::Machine(self)
100 impl MayLeak for MiriMemoryKind {
102 fn may_leak(self) -> bool {
103 use self::MiriMemoryKind::*;
105 Rust | C | WinHeap | Runtime => false,
106 Machine | Global | ExternStatic | Tls => true,
111 impl fmt::Display for MiriMemoryKind {
112 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
113 use self::MiriMemoryKind::*;
115 Rust => write!(f, "Rust heap"),
116 C => write!(f, "C heap"),
117 WinHeap => write!(f, "Windows heap"),
118 Machine => write!(f, "machine-managed memory"),
119 Runtime => write!(f, "language runtime memory"),
120 Global => write!(f, "global (static or const)"),
121 ExternStatic => write!(f, "extern static"),
122 Tls => write!(f, "thread-local static"),
127 /// Pointer provenance (tag).
128 #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
130 pub alloc_id: AllocId,
131 /// Stacked Borrows tag.
135 #[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
136 static_assert_size!(Pointer<Tag>, 24);
137 #[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
138 static_assert_size!(Pointer<Option<Tag>>, 24);
139 #[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
140 static_assert_size!(ScalarMaybeUninit<Tag>, 32);
142 impl Provenance for Tag {
143 /// We use absolute addresses in the `offset` of a `Pointer<Tag>`.
144 const OFFSET_IS_ADDR: bool = true;
146 /// We cannot err on partial overwrites, it happens too often in practice (due to unions).
147 const ERR_ON_PARTIAL_PTR_OVERWRITE: bool = false;
149 fn fmt(ptr: &Pointer<Self>, f: &mut fmt::Formatter<'_>) -> fmt::Result {
150 let (tag, addr) = ptr.into_parts(); // address is absolute
151 write!(f, "0x{:x}", addr.bytes())?;
152 // Forward `alternate` flag to `alloc_id` printing.
154 write!(f, "[{:#?}]", tag.alloc_id)?;
156 write!(f, "[{:?}]", tag.alloc_id)?;
158 // Print Stacked Borrows tag.
159 write!(f, "{:?}", tag.sb)
162 fn get_alloc_id(self) -> AllocId {
167 /// Extra per-allocation data
168 #[derive(Debug, Clone)]
169 pub struct AllocExtra {
170 /// Stacked Borrows state is only added if it is enabled.
171 pub stacked_borrows: Option<stacked_borrows::AllocExtra>,
172 /// Data race detection via the use of a vector-clock,
173 /// this is only added if it is enabled.
174 pub data_race: Option<data_race::AllocExtra>,
177 /// Precomputed layouts of primitive types
178 pub struct PrimitiveLayouts<'tcx> {
179 pub unit: TyAndLayout<'tcx>,
180 pub i8: TyAndLayout<'tcx>,
181 pub i32: TyAndLayout<'tcx>,
182 pub isize: TyAndLayout<'tcx>,
183 pub u8: TyAndLayout<'tcx>,
184 pub u32: TyAndLayout<'tcx>,
185 pub usize: TyAndLayout<'tcx>,
186 pub bool: TyAndLayout<'tcx>,
187 pub mut_raw_ptr: TyAndLayout<'tcx>,
190 impl<'mir, 'tcx: 'mir> PrimitiveLayouts<'tcx> {
191 fn new(layout_cx: LayoutCx<'tcx, TyCtxt<'tcx>>) -> Result<Self, LayoutError<'tcx>> {
192 let tcx = layout_cx.tcx;
193 let mut_raw_ptr = tcx.mk_ptr(TypeAndMut { ty: tcx.types.unit, mutbl: Mutability::Mut });
195 unit: layout_cx.layout_of(tcx.mk_unit())?,
196 i8: layout_cx.layout_of(tcx.types.i8)?,
197 i32: layout_cx.layout_of(tcx.types.i32)?,
198 isize: layout_cx.layout_of(tcx.types.isize)?,
199 u8: layout_cx.layout_of(tcx.types.u8)?,
200 u32: layout_cx.layout_of(tcx.types.u32)?,
201 usize: layout_cx.layout_of(tcx.types.usize)?,
202 bool: layout_cx.layout_of(tcx.types.bool)?,
203 mut_raw_ptr: layout_cx.layout_of(mut_raw_ptr)?,
208 /// The machine itself.
209 pub struct Evaluator<'mir, 'tcx> {
210 pub stacked_borrows: Option<stacked_borrows::GlobalState>,
211 pub data_race: Option<data_race::GlobalState>,
212 pub intptrcast: intptrcast::GlobalState,
214 /// Environment variables set by `setenv`.
215 /// Miri does not expose env vars from the host to the emulated program.
216 pub(crate) env_vars: EnvVars<'tcx>,
218 /// Program arguments (`Option` because we can only initialize them after creating the ecx).
219 /// These are *pointers* to argc/argv because macOS.
220 /// We also need the full command line as one string because of Windows.
221 pub(crate) argc: Option<MemPlace<Tag>>,
222 pub(crate) argv: Option<MemPlace<Tag>>,
223 pub(crate) cmd_line: Option<MemPlace<Tag>>,
226 pub(crate) tls: TlsData<'tcx>,
228 /// What should Miri do when an op requires communicating with the host,
229 /// such as accessing host env vars, random number generation, and
230 /// file system access.
231 pub(crate) isolated_op: IsolatedOp,
233 /// Whether to enforce the validity invariant.
234 pub(crate) validate: bool,
236 /// Whether to enforce validity (e.g., initialization) of integers and floats.
237 pub(crate) enforce_number_validity: bool,
239 /// Whether to enforce [ABI](Abi) of function calls.
240 pub(crate) enforce_abi: bool,
242 pub(crate) file_handler: shims::posix::FileHandler,
243 pub(crate) dir_handler: shims::posix::DirHandler,
245 /// The "time anchor" for this machine's monotone clock (for `Instant` simulation).
246 pub(crate) time_anchor: Instant,
248 /// The set of threads.
249 pub(crate) threads: ThreadManager<'mir, 'tcx>,
251 /// Precomputed `TyLayout`s for primitive data types that are commonly used inside Miri.
252 pub(crate) layouts: PrimitiveLayouts<'tcx>,
254 /// Allocations that are considered roots of static memory (that may leak).
255 pub(crate) static_roots: Vec<AllocId>,
257 /// The `measureme` profiler used to record timing information about
258 /// the emulated program.
259 profiler: Option<measureme::Profiler>,
260 /// Used with `profiler` to cache the `StringId`s for event names
261 /// uesd with `measureme`.
262 string_cache: FxHashMap<String, measureme::StringId>,
264 /// Cache of `Instance` exported under the given `Symbol` name.
265 /// `None` means no `Instance` exported under the given name is found.
266 pub(crate) exported_symbols_cache: FxHashMap<Symbol, Option<Instance<'tcx>>>,
268 /// Whether to raise a panic in the context of the evaluated process when unsupported
269 /// functionality is encountered. If `false`, an error is propagated in the Miri application context
270 /// instead (default behavior)
271 pub(crate) panic_on_unsupported: bool,
273 /// Equivalent setting as RUST_BACKTRACE on encountering an error.
274 pub(crate) backtrace_style: BacktraceStyle,
276 /// Crates which are considered local for the purposes of error reporting.
277 pub(crate) local_crates: Vec<CrateNum>,
279 /// Mapping extern static names to their base pointer.
280 extern_statics: FxHashMap<Symbol, Pointer<Tag>>,
282 /// The random number generator used for resolving non-determinism.
283 /// Needs to be queried by ptr_to_int, hence needs interior mutability.
284 pub(crate) rng: RefCell<StdRng>,
286 /// The allocation IDs to report when they are being allocated
287 /// (helps for debugging memory leaks and use after free bugs).
288 tracked_alloc_ids: HashSet<AllocId>,
290 /// Controls whether alignment of memory accesses is being checked.
291 pub(crate) check_alignment: AlignmentCheck,
293 /// Failure rate of compare_exchange_weak, between 0.0 and 1.0
294 pub(crate) cmpxchg_weak_failure_rate: f64,
297 impl<'mir, 'tcx> Evaluator<'mir, 'tcx> {
298 pub(crate) fn new(config: &MiriConfig, layout_cx: LayoutCx<'tcx, TyCtxt<'tcx>>) -> Self {
299 let local_crates = helpers::get_local_crates(&layout_cx.tcx);
301 PrimitiveLayouts::new(layout_cx).expect("Couldn't get layouts of primitive types");
302 let profiler = config.measureme_out.as_ref().map(|out| {
303 measureme::Profiler::new(out).expect("Couldn't create `measureme` profiler")
305 let rng = StdRng::seed_from_u64(config.seed.unwrap_or(0));
306 let stacked_borrows = if config.stacked_borrows {
307 Some(RefCell::new(stacked_borrows::GlobalStateInner::new(
308 config.tracked_pointer_tags.clone(),
309 config.tracked_call_ids.clone(),
316 if config.data_race_detector { Some(data_race::GlobalState::new()) } else { None };
320 intptrcast: RefCell::new(intptrcast::GlobalStateInner::new(config)),
321 // `env_vars` depends on a full interpreter so we cannot properly initialize it yet.
322 env_vars: EnvVars::default(),
326 tls: TlsData::default(),
327 isolated_op: config.isolated_op,
328 validate: config.validate,
329 enforce_number_validity: config.check_number_validity,
330 enforce_abi: config.check_abi,
331 file_handler: Default::default(),
332 dir_handler: Default::default(),
333 time_anchor: Instant::now(),
335 threads: ThreadManager::default(),
336 static_roots: Vec::new(),
338 string_cache: Default::default(),
339 exported_symbols_cache: FxHashMap::default(),
340 panic_on_unsupported: config.panic_on_unsupported,
341 backtrace_style: config.backtrace_style,
343 extern_statics: FxHashMap::default(),
344 rng: RefCell::new(rng),
345 tracked_alloc_ids: config.tracked_alloc_ids.clone(),
346 check_alignment: config.check_alignment,
347 cmpxchg_weak_failure_rate: config.cmpxchg_weak_failure_rate,
351 pub(crate) fn late_init(
352 this: &mut MiriEvalContext<'mir, 'tcx>,
354 ) -> InterpResult<'tcx> {
355 EnvVars::init(this, config)?;
356 Evaluator::init_extern_statics(this)?;
360 fn add_extern_static(
361 this: &mut MiriEvalContext<'mir, 'tcx>,
363 ptr: Pointer<Option<Tag>>,
365 // This got just allocated, so there definitely is a pointer here.
366 let ptr = ptr.into_pointer_or_addr().unwrap();
367 this.machine.extern_statics.try_insert(Symbol::intern(name), ptr).unwrap();
370 /// Sets up the "extern statics" for this machine.
371 fn init_extern_statics(this: &mut MiriEvalContext<'mir, 'tcx>) -> InterpResult<'tcx> {
372 match this.tcx.sess.target.os.as_ref() {
375 Self::add_extern_static(
378 this.machine.env_vars.environ.unwrap().ptr,
380 // A couple zero-initialized pointer-sized extern statics.
381 // Most of them are for weak symbols, which we all set to null (indicating that the
382 // symbol is not supported, and triggering fallback code which ends up calling a
383 // syscall that we do support).
384 for name in &["__cxa_thread_atexit_impl", "getrandom", "statx"] {
385 let layout = this.machine.layouts.usize;
386 let place = this.allocate(layout, MiriMemoryKind::ExternStatic.into())?;
387 this.write_scalar(Scalar::from_machine_usize(0, this), &place.into())?;
388 Self::add_extern_static(this, name, place.ptr);
393 // This is some obscure hack that is part of the Windows TLS story. It's a `u8`.
394 let layout = this.machine.layouts.u8;
395 let place = this.allocate(layout, MiriMemoryKind::ExternStatic.into())?;
396 this.write_scalar(Scalar::from_u8(0), &place.into())?;
397 Self::add_extern_static(this, "_tls_used", place.ptr);
399 _ => {} // No "extern statics" supported on this target
404 pub(crate) fn communicate(&self) -> bool {
405 self.isolated_op == IsolatedOp::Allow
408 /// Check whether the stack frame that this `FrameInfo` refers to is part of a local crate.
409 pub(crate) fn is_local(&self, frame: &FrameInfo<'_>) -> bool {
410 let def_id = frame.instance.def_id();
411 def_id.is_local() || self.local_crates.contains(&def_id.krate)
415 /// A rustc InterpCx for Miri.
416 pub type MiriEvalContext<'mir, 'tcx> = InterpCx<'mir, 'tcx, Evaluator<'mir, 'tcx>>;
418 /// A little trait that's useful to be inherited by extension traits.
419 pub trait MiriEvalContextExt<'mir, 'tcx> {
420 fn eval_context_ref<'a>(&'a self) -> &'a MiriEvalContext<'mir, 'tcx>;
421 fn eval_context_mut<'a>(&'a mut self) -> &'a mut MiriEvalContext<'mir, 'tcx>;
423 impl<'mir, 'tcx> MiriEvalContextExt<'mir, 'tcx> for MiriEvalContext<'mir, 'tcx> {
425 fn eval_context_ref(&self) -> &MiriEvalContext<'mir, 'tcx> {
429 fn eval_context_mut(&mut self) -> &mut MiriEvalContext<'mir, 'tcx> {
434 /// Machine hook implementations.
435 impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'mir, 'tcx> {
436 type MemoryKind = MiriMemoryKind;
437 type ExtraFnVal = Dlsym;
439 type FrameExtra = FrameData<'tcx>;
440 type AllocExtra = AllocExtra;
442 type PointerTag = Tag;
443 type TagExtra = SbTag;
446 MonoHashMap<AllocId, (MemoryKind<MiriMemoryKind>, Allocation<Tag, Self::AllocExtra>)>;
448 const GLOBAL_KIND: Option<MiriMemoryKind> = Some(MiriMemoryKind::Global);
450 const PANIC_ON_ALLOC_FAIL: bool = false;
453 fn enforce_alignment(ecx: &MiriEvalContext<'mir, 'tcx>) -> bool {
454 ecx.machine.check_alignment != AlignmentCheck::None
458 fn force_int_for_alignment_check(ecx: &MiriEvalContext<'mir, 'tcx>) -> bool {
459 ecx.machine.check_alignment == AlignmentCheck::Int
463 fn enforce_validity(ecx: &MiriEvalContext<'mir, 'tcx>) -> bool {
468 fn enforce_number_validity(ecx: &MiriEvalContext<'mir, 'tcx>) -> bool {
469 ecx.machine.enforce_number_validity
473 fn enforce_abi(ecx: &MiriEvalContext<'mir, 'tcx>) -> bool {
474 ecx.machine.enforce_abi
478 fn find_mir_or_eval_fn(
479 ecx: &mut MiriEvalContext<'mir, 'tcx>,
480 instance: ty::Instance<'tcx>,
482 args: &[OpTy<'tcx, Tag>],
483 ret: Option<(&PlaceTy<'tcx, Tag>, mir::BasicBlock)>,
484 unwind: StackPopUnwind,
485 ) -> InterpResult<'tcx, Option<(&'mir mir::Body<'tcx>, ty::Instance<'tcx>)>> {
486 ecx.find_mir_or_eval_fn(instance, abi, args, ret, unwind)
491 ecx: &mut MiriEvalContext<'mir, 'tcx>,
494 args: &[OpTy<'tcx, Tag>],
495 ret: Option<(&PlaceTy<'tcx, Tag>, mir::BasicBlock)>,
496 _unwind: StackPopUnwind,
497 ) -> InterpResult<'tcx> {
498 ecx.call_dlsym(fn_val, abi, args, ret)
503 ecx: &mut MiriEvalContext<'mir, 'tcx>,
504 instance: ty::Instance<'tcx>,
505 args: &[OpTy<'tcx, Tag>],
506 ret: Option<(&PlaceTy<'tcx, Tag>, mir::BasicBlock)>,
507 unwind: StackPopUnwind,
508 ) -> InterpResult<'tcx> {
509 ecx.call_intrinsic(instance, args, ret, unwind)
514 ecx: &mut MiriEvalContext<'mir, 'tcx>,
515 msg: &mir::AssertMessage<'tcx>,
516 unwind: Option<mir::BasicBlock>,
517 ) -> InterpResult<'tcx> {
518 ecx.assert_panic(msg, unwind)
522 fn abort(_ecx: &mut MiriEvalContext<'mir, 'tcx>, msg: String) -> InterpResult<'tcx, !> {
523 throw_machine_stop!(TerminationInfo::Abort(msg))
528 ecx: &MiriEvalContext<'mir, 'tcx>,
530 left: &ImmTy<'tcx, Tag>,
531 right: &ImmTy<'tcx, Tag>,
532 ) -> InterpResult<'tcx, (Scalar<Tag>, bool, ty::Ty<'tcx>)> {
533 ecx.binary_ptr_op(bin_op, left, right)
536 fn thread_local_static_base_pointer(
537 ecx: &mut MiriEvalContext<'mir, 'tcx>,
539 ) -> InterpResult<'tcx, Pointer<Tag>> {
540 ecx.get_or_create_thread_local_alloc(def_id)
543 fn extern_static_base_pointer(
544 ecx: &MiriEvalContext<'mir, 'tcx>,
546 ) -> InterpResult<'tcx, Pointer<Tag>> {
547 let attrs = ecx.tcx.get_attrs(def_id);
548 let link_name = match ecx.tcx.sess.first_attr_value_str_by_name(attrs, sym::link_name) {
550 None => ecx.tcx.item_name(def_id),
552 if let Some(&ptr) = ecx.machine.extern_statics.get(&link_name) {
555 throw_unsup_format!("`extern` static {:?} is not supported by Miri", def_id)
559 fn init_allocation_extra<'b>(
560 ecx: &MiriEvalContext<'mir, 'tcx>,
562 alloc: Cow<'b, Allocation>,
563 kind: Option<MemoryKind<Self::MemoryKind>>,
564 ) -> Cow<'b, Allocation<Self::PointerTag, Self::AllocExtra>> {
565 set_current_span(&ecx.machine);
566 if ecx.machine.tracked_alloc_ids.contains(&id) {
567 register_diagnostic(NonHaltingDiagnostic::CreatedAlloc(id));
570 let kind = kind.expect("we set our STATIC_KIND so this cannot be None");
571 let alloc = alloc.into_owned();
572 let stacks = if let Some(stacked_borrows) = &ecx.machine.stacked_borrows {
573 Some(Stacks::new_allocation(id, alloc.size(), stacked_borrows, kind))
577 let race_alloc = if let Some(data_race) = &ecx.machine.data_race {
578 Some(data_race::AllocExtra::new_allocation(data_race, alloc.size(), kind))
582 let alloc: Allocation<Tag, Self::AllocExtra> = alloc.convert_tag_add_extra(
584 AllocExtra { stacked_borrows: stacks, data_race: race_alloc },
585 |ptr| Evaluator::tag_alloc_base_pointer(ecx, ptr),
590 fn tag_alloc_base_pointer(
591 ecx: &MiriEvalContext<'mir, 'tcx>,
592 ptr: Pointer<AllocId>,
594 set_current_span(&ecx.machine);
595 let absolute_addr = intptrcast::GlobalStateInner::rel_ptr_to_addr(ecx, ptr);
596 let sb_tag = if let Some(stacked_borrows) = &ecx.machine.stacked_borrows {
597 stacked_borrows.borrow_mut().base_tag(ptr.provenance)
601 Pointer::new(Tag { alloc_id: ptr.provenance, sb: sb_tag }, Size::from_bytes(absolute_addr))
606 ecx: &MiriEvalContext<'mir, 'tcx>,
608 ) -> Pointer<Option<Self::PointerTag>> {
609 intptrcast::GlobalStateInner::ptr_from_addr(addr, ecx)
612 /// Convert a pointer with provenance into an allocation-offset pair,
613 /// or a `None` with an absolute address if that conversion is not possible.
615 ecx: &MiriEvalContext<'mir, 'tcx>,
616 ptr: Pointer<Self::PointerTag>,
617 ) -> (AllocId, Size, Self::TagExtra) {
618 let rel = intptrcast::GlobalStateInner::abs_ptr_to_rel(ecx, ptr);
619 (ptr.provenance.alloc_id, rel, ptr.provenance.sb)
626 alloc_extra: &AllocExtra,
627 (alloc_id, tag): (AllocId, Self::TagExtra),
629 ) -> InterpResult<'tcx> {
630 set_current_span(&machine);
631 if let Some(data_race) = &alloc_extra.data_race {
632 data_race.read(alloc_id, range, machine.data_race.as_ref().unwrap())?;
634 if let Some(stacked_borrows) = &alloc_extra.stacked_borrows {
635 stacked_borrows.memory_read(
639 machine.stacked_borrows.as_ref().unwrap(),
650 alloc_extra: &mut AllocExtra,
651 (alloc_id, tag): (AllocId, Self::TagExtra),
653 ) -> InterpResult<'tcx> {
654 set_current_span(&machine);
655 if let Some(data_race) = &mut alloc_extra.data_race {
656 data_race.write(alloc_id, range, machine.data_race.as_mut().unwrap())?;
658 if let Some(stacked_borrows) = &mut alloc_extra.stacked_borrows {
659 stacked_borrows.memory_written(
663 machine.stacked_borrows.as_ref().unwrap(),
671 fn memory_deallocated(
674 alloc_extra: &mut AllocExtra,
675 (alloc_id, tag): (AllocId, Self::TagExtra),
677 ) -> InterpResult<'tcx> {
678 set_current_span(&machine);
679 if machine.tracked_alloc_ids.contains(&alloc_id) {
680 register_diagnostic(NonHaltingDiagnostic::FreedAlloc(alloc_id));
682 if let Some(data_race) = &mut alloc_extra.data_race {
683 data_race.deallocate(alloc_id, range, machine.data_race.as_mut().unwrap())?;
685 if let Some(stacked_borrows) = &mut alloc_extra.stacked_borrows {
686 stacked_borrows.memory_deallocated(
690 machine.stacked_borrows.as_ref().unwrap(),
699 ecx: &mut InterpCx<'mir, 'tcx, Self>,
700 kind: mir::RetagKind,
701 place: &PlaceTy<'tcx, Tag>,
702 ) -> InterpResult<'tcx> {
703 if ecx.machine.stacked_borrows.is_some() {
704 set_current_span(&ecx.machine);
705 ecx.retag(kind, place)
713 ecx: &mut InterpCx<'mir, 'tcx, Self>,
714 frame: Frame<'mir, 'tcx, Tag>,
715 ) -> InterpResult<'tcx, Frame<'mir, 'tcx, Tag, FrameData<'tcx>>> {
716 // Start recording our event before doing anything else
717 let timing = if let Some(profiler) = ecx.machine.profiler.as_ref() {
718 let fn_name = frame.instance.to_string();
719 let entry = ecx.machine.string_cache.entry(fn_name.clone());
720 let name = entry.or_insert_with(|| profiler.alloc_string(&*fn_name));
722 Some(profiler.start_recording_interval_event_detached(
724 measureme::EventId::from_label(*name),
725 ecx.get_active_thread().to_u32(),
731 let stacked_borrows = ecx.machine.stacked_borrows.as_ref();
732 let call_id = stacked_borrows.map_or(NonZeroU64::new(1).unwrap(), |stacked_borrows| {
733 stacked_borrows.borrow_mut().new_call()
736 let extra = FrameData { call_id, catch_unwind: None, timing };
737 Ok(frame.with_extra(extra))
741 ecx: &'a InterpCx<'mir, 'tcx, Self>,
742 ) -> &'a [Frame<'mir, 'tcx, Self::PointerTag, Self::FrameExtra>] {
743 ecx.active_thread_stack()
747 ecx: &'a mut InterpCx<'mir, 'tcx, Self>,
748 ) -> &'a mut Vec<Frame<'mir, 'tcx, Self::PointerTag, Self::FrameExtra>> {
749 ecx.active_thread_stack_mut()
753 fn after_stack_push(ecx: &mut InterpCx<'mir, 'tcx, Self>) -> InterpResult<'tcx> {
754 if ecx.machine.stacked_borrows.is_some() {
755 set_current_span(&ecx.machine);
756 ecx.retag_return_place()
764 ecx: &mut InterpCx<'mir, 'tcx, Self>,
765 mut frame: Frame<'mir, 'tcx, Tag, FrameData<'tcx>>,
767 ) -> InterpResult<'tcx, StackPopJump> {
768 let timing = frame.extra.timing.take();
769 let res = ecx.handle_stack_pop(frame.extra, unwinding);
770 if let Some(profiler) = ecx.machine.profiler.as_ref() {
771 profiler.finish_recording_interval_event(timing.unwrap());
777 // This is potentially a performance hazard.
778 // Factoring it into its own function lets us keep an eye on how much it shows up in a profile.
779 fn set_current_span<'mir, 'tcx: 'mir>(machine: &Evaluator<'mir, 'tcx>) {
780 if let Some(sb) = machine.stacked_borrows.as_ref() {
781 if sb.borrow().current_span != DUMMY_SP {
784 let current_span = machine
786 .active_thread_stack()
790 let info = FrameInfo {
791 instance: frame.instance,
792 span: frame.current_span(),
795 machine.is_local(&info)
797 .map(|frame| frame.current_span())
798 .unwrap_or(rustc_span::DUMMY_SP);
799 sb.borrow_mut().current_span = current_span;