1 //! Global machine state as well as implementation of the interpreter engine
5 use std::cell::RefCell;
6 use std::collections::HashSet;
8 use std::num::NonZeroU64;
9 use std::time::Instant;
11 use rand::rngs::StdRng;
12 use rand::SeedableRng;
14 use rustc_ast::ast::Mutability;
15 use rustc_data_structures::fx::FxHashMap;
17 use rustc_data_structures::static_assert_size;
22 layout::{LayoutCx, LayoutError, LayoutOf, TyAndLayout},
23 Instance, TyCtxt, TypeAndMut,
26 use rustc_span::def_id::{CrateNum, DefId};
27 use rustc_span::Symbol;
28 use rustc_target::abi::Size;
29 use rustc_target::spec::abi::Abi;
32 concurrency::{data_race, weak_memory},
33 shims::unix::FileHandler,
37 // Some global facts about the emulated machine.
38 pub const PAGE_SIZE: u64 = 4 * 1024; // FIXME: adjust to target architecture
39 pub const STACK_ADDR: u64 = 32 * PAGE_SIZE; // not really about the "stack", but where we start assigning integer addresses to allocations
40 pub const STACK_SIZE: u64 = 16 * PAGE_SIZE; // whatever
41 pub const NUM_CPUS: u64 = 1;
43 /// Extra data stored with each stack frame
44 pub struct FrameData<'tcx> {
45 /// Extra data for Stacked Borrows.
46 pub call_id: stacked_borrows::CallId,
48 /// If this is Some(), then this is a special "catch unwind" frame (the frame of `try_fn`
49 /// called by `try`). When this frame is popped during unwinding a panic,
50 /// we stop unwinding, use the `CatchUnwindData` to handle catching.
51 pub catch_unwind: Option<CatchUnwindData<'tcx>>,
53 /// If `measureme` profiling is enabled, holds timing information
54 /// for the start of this frame. When we finish executing this frame,
55 /// we use this to register a completed event with `measureme`.
56 pub timing: Option<measureme::DetachedTiming>,
59 impl<'tcx> std::fmt::Debug for FrameData<'tcx> {
60 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
61 // Omitting `timing`, it does not support `Debug`.
62 let FrameData { call_id, catch_unwind, timing: _ } = self;
63 f.debug_struct("FrameData")
64 .field("call_id", call_id)
65 .field("catch_unwind", catch_unwind)
70 /// Extra memory kinds
71 #[derive(Debug, Copy, Clone, PartialEq, Eq)]
72 pub enum MiriMemoryKind {
73 /// `__rust_alloc` memory.
77 /// Windows `HeapAlloc` memory.
79 /// Memory for args, errno, and other parts of the machine-managed environment.
80 /// This memory may leak.
82 /// Memory allocated by the runtime (e.g. env vars). Separate from `Machine`
83 /// because we clean it up and leak-check it.
85 /// Globals copied from `tcx`.
86 /// This memory may leak.
88 /// Memory for extern statics.
89 /// This memory may leak.
91 /// Memory for thread-local statics.
92 /// This memory may leak.
96 impl From<MiriMemoryKind> for MemoryKind<MiriMemoryKind> {
98 fn from(kind: MiriMemoryKind) -> MemoryKind<MiriMemoryKind> {
99 MemoryKind::Machine(kind)
103 impl MayLeak for MiriMemoryKind {
105 fn may_leak(self) -> bool {
106 use self::MiriMemoryKind::*;
108 Rust | C | WinHeap | Runtime => false,
109 Machine | Global | ExternStatic | Tls => true,
114 impl fmt::Display for MiriMemoryKind {
115 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
116 use self::MiriMemoryKind::*;
118 Rust => write!(f, "Rust heap"),
119 C => write!(f, "C heap"),
120 WinHeap => write!(f, "Windows heap"),
121 Machine => write!(f, "machine-managed memory"),
122 Runtime => write!(f, "language runtime memory"),
123 Global => write!(f, "global (static or const)"),
124 ExternStatic => write!(f, "extern static"),
125 Tls => write!(f, "thread-local static"),
130 /// Pointer provenance (tag).
131 #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
133 Concrete(ConcreteTag),
137 #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
138 pub struct ConcreteTag {
139 pub alloc_id: AllocId,
140 /// Stacked Borrows tag.
144 #[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
145 static_assert_size!(Pointer<Tag>, 24);
146 // #[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
147 // static_assert_size!(Pointer<Option<Tag>>, 24);
148 #[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
149 static_assert_size!(ScalarMaybeUninit<Tag>, 32);
151 impl Provenance for Tag {
152 /// We use absolute addresses in the `offset` of a `Pointer<Tag>`.
153 const OFFSET_IS_ADDR: bool = true;
155 /// We cannot err on partial overwrites, it happens too often in practice (due to unions).
156 const ERR_ON_PARTIAL_PTR_OVERWRITE: bool = false;
158 fn fmt(ptr: &Pointer<Self>, f: &mut fmt::Formatter<'_>) -> fmt::Result {
159 let (tag, addr) = ptr.into_parts(); // address is absolute
160 write!(f, "0x{:x}", addr.bytes())?;
163 Tag::Concrete(tag) => {
164 // Forward `alternate` flag to `alloc_id` printing.
166 write!(f, "[{:#?}]", tag.alloc_id)?;
168 write!(f, "[{:?}]", tag.alloc_id)?;
170 // Print Stacked Borrows tag.
171 write!(f, "{:?}", tag.sb)?;
174 write!(f, "[Wildcard]")?;
181 fn get_alloc_id(self) -> Option<AllocId> {
183 Tag::Concrete(concrete) => Some(concrete.alloc_id),
184 Tag::Wildcard => None,
189 /// Extra per-allocation data
190 #[derive(Debug, Clone)]
191 pub struct AllocExtra {
192 /// Stacked Borrows state is only added if it is enabled.
193 pub stacked_borrows: Option<stacked_borrows::AllocExtra>,
194 /// Data race detection via the use of a vector-clock,
195 /// this is only added if it is enabled.
196 pub data_race: Option<data_race::AllocExtra>,
197 /// Weak memory emulation via the use of store buffers,
198 /// this is only added if it is enabled.
199 pub weak_memory: Option<weak_memory::AllocExtra>,
202 /// Precomputed layouts of primitive types
203 pub struct PrimitiveLayouts<'tcx> {
204 pub unit: TyAndLayout<'tcx>,
205 pub i8: TyAndLayout<'tcx>,
206 pub i16: TyAndLayout<'tcx>,
207 pub i32: TyAndLayout<'tcx>,
208 pub isize: TyAndLayout<'tcx>,
209 pub u8: TyAndLayout<'tcx>,
210 pub u16: TyAndLayout<'tcx>,
211 pub u32: TyAndLayout<'tcx>,
212 pub usize: TyAndLayout<'tcx>,
213 pub bool: TyAndLayout<'tcx>,
214 pub mut_raw_ptr: TyAndLayout<'tcx>,
217 impl<'mir, 'tcx: 'mir> PrimitiveLayouts<'tcx> {
218 fn new(layout_cx: LayoutCx<'tcx, TyCtxt<'tcx>>) -> Result<Self, LayoutError<'tcx>> {
219 let tcx = layout_cx.tcx;
220 let mut_raw_ptr = tcx.mk_ptr(TypeAndMut { ty: tcx.types.unit, mutbl: Mutability::Mut });
222 unit: layout_cx.layout_of(tcx.mk_unit())?,
223 i8: layout_cx.layout_of(tcx.types.i8)?,
224 i16: layout_cx.layout_of(tcx.types.i16)?,
225 i32: layout_cx.layout_of(tcx.types.i32)?,
226 isize: layout_cx.layout_of(tcx.types.isize)?,
227 u8: layout_cx.layout_of(tcx.types.u8)?,
228 u16: layout_cx.layout_of(tcx.types.u16)?,
229 u32: layout_cx.layout_of(tcx.types.u32)?,
230 usize: layout_cx.layout_of(tcx.types.usize)?,
231 bool: layout_cx.layout_of(tcx.types.bool)?,
232 mut_raw_ptr: layout_cx.layout_of(mut_raw_ptr)?,
237 /// The machine itself.
238 pub struct Evaluator<'mir, 'tcx> {
239 pub stacked_borrows: Option<stacked_borrows::GlobalState>,
240 pub data_race: Option<data_race::GlobalState>,
241 pub intptrcast: intptrcast::GlobalState,
243 /// Environment variables set by `setenv`.
244 /// Miri does not expose env vars from the host to the emulated program.
245 pub(crate) env_vars: EnvVars<'tcx>,
247 /// Program arguments (`Option` because we can only initialize them after creating the ecx).
248 /// These are *pointers* to argc/argv because macOS.
249 /// We also need the full command line as one string because of Windows.
250 pub(crate) argc: Option<MemPlace<Tag>>,
251 pub(crate) argv: Option<MemPlace<Tag>>,
252 pub(crate) cmd_line: Option<MemPlace<Tag>>,
255 pub(crate) tls: TlsData<'tcx>,
257 /// What should Miri do when an op requires communicating with the host,
258 /// such as accessing host env vars, random number generation, and
259 /// file system access.
260 pub(crate) isolated_op: IsolatedOp,
262 /// Whether to enforce the validity invariant.
263 pub(crate) validate: bool,
265 /// Whether to allow uninitialized numbers (integers and floats).
266 pub(crate) allow_uninit_numbers: bool,
268 /// Whether to allow ptr2int transmutes, and whether to allow *dereferencing* the result of an
269 /// int2ptr transmute.
270 pub(crate) allow_ptr_int_transmute: bool,
272 /// Whether to enforce [ABI](Abi) of function calls.
273 pub(crate) enforce_abi: bool,
275 /// The table of file descriptors.
276 pub(crate) file_handler: shims::unix::FileHandler,
277 /// The table of directory descriptors.
278 pub(crate) dir_handler: shims::unix::DirHandler,
280 /// The "time anchor" for this machine's monotone clock (for `Instant` simulation).
281 pub(crate) time_anchor: Instant,
283 /// The set of threads.
284 pub(crate) threads: ThreadManager<'mir, 'tcx>,
286 /// Precomputed `TyLayout`s for primitive data types that are commonly used inside Miri.
287 pub(crate) layouts: PrimitiveLayouts<'tcx>,
289 /// Allocations that are considered roots of static memory (that may leak).
290 pub(crate) static_roots: Vec<AllocId>,
292 /// The `measureme` profiler used to record timing information about
293 /// the emulated program.
294 profiler: Option<measureme::Profiler>,
295 /// Used with `profiler` to cache the `StringId`s for event names
296 /// uesd with `measureme`.
297 string_cache: FxHashMap<String, measureme::StringId>,
299 /// Cache of `Instance` exported under the given `Symbol` name.
300 /// `None` means no `Instance` exported under the given name is found.
301 pub(crate) exported_symbols_cache: FxHashMap<Symbol, Option<Instance<'tcx>>>,
303 /// Whether to raise a panic in the context of the evaluated process when unsupported
304 /// functionality is encountered. If `false`, an error is propagated in the Miri application context
305 /// instead (default behavior)
306 pub(crate) panic_on_unsupported: bool,
308 /// Equivalent setting as RUST_BACKTRACE on encountering an error.
309 pub(crate) backtrace_style: BacktraceStyle,
311 /// Crates which are considered local for the purposes of error reporting.
312 pub(crate) local_crates: Vec<CrateNum>,
314 /// Mapping extern static names to their base pointer.
315 extern_statics: FxHashMap<Symbol, Pointer<Tag>>,
317 /// The random number generator used for resolving non-determinism.
318 /// Needs to be queried by ptr_to_int, hence needs interior mutability.
319 pub(crate) rng: RefCell<StdRng>,
321 /// The allocation IDs to report when they are being allocated
322 /// (helps for debugging memory leaks and use after free bugs).
323 tracked_alloc_ids: HashSet<AllocId>,
325 /// Controls whether alignment of memory accesses is being checked.
326 pub(crate) check_alignment: AlignmentCheck,
328 /// Failure rate of compare_exchange_weak, between 0.0 and 1.0
329 pub(crate) cmpxchg_weak_failure_rate: f64,
331 /// Corresponds to -Zmiri-mute-stdout-stderr and doesn't write the output but acts as if it succeeded.
332 pub(crate) mute_stdout_stderr: bool,
334 /// Whether weak memory emulation is enabled
335 pub(crate) weak_memory: bool,
337 /// The probability of the active thread being preempted at the end of each basic block.
338 pub(crate) preemption_rate: f64,
341 impl<'mir, 'tcx> Evaluator<'mir, 'tcx> {
342 pub(crate) fn new(config: &MiriConfig, layout_cx: LayoutCx<'tcx, TyCtxt<'tcx>>) -> Self {
343 let local_crates = helpers::get_local_crates(layout_cx.tcx);
345 PrimitiveLayouts::new(layout_cx).expect("Couldn't get layouts of primitive types");
346 let profiler = config.measureme_out.as_ref().map(|out| {
347 measureme::Profiler::new(out).expect("Couldn't create `measureme` profiler")
349 let rng = StdRng::seed_from_u64(config.seed.unwrap_or(0));
350 let stacked_borrows = if config.stacked_borrows {
351 Some(RefCell::new(stacked_borrows::GlobalStateInner::new(
352 config.tracked_pointer_tags.clone(),
353 config.tracked_call_ids.clone(),
360 if config.data_race_detector { Some(data_race::GlobalState::new()) } else { None };
364 intptrcast: RefCell::new(intptrcast::GlobalStateInner::new(config)),
365 // `env_vars` depends on a full interpreter so we cannot properly initialize it yet.
366 env_vars: EnvVars::default(),
370 tls: TlsData::default(),
371 isolated_op: config.isolated_op,
372 validate: config.validate,
373 allow_uninit_numbers: config.allow_uninit_numbers,
374 allow_ptr_int_transmute: config.allow_ptr_int_transmute,
375 enforce_abi: config.check_abi,
376 file_handler: FileHandler::new(config.mute_stdout_stderr),
377 dir_handler: Default::default(),
378 time_anchor: Instant::now(),
380 threads: ThreadManager::default(),
381 static_roots: Vec::new(),
383 string_cache: Default::default(),
384 exported_symbols_cache: FxHashMap::default(),
385 panic_on_unsupported: config.panic_on_unsupported,
386 backtrace_style: config.backtrace_style,
388 extern_statics: FxHashMap::default(),
389 rng: RefCell::new(rng),
390 tracked_alloc_ids: config.tracked_alloc_ids.clone(),
391 check_alignment: config.check_alignment,
392 cmpxchg_weak_failure_rate: config.cmpxchg_weak_failure_rate,
393 mute_stdout_stderr: config.mute_stdout_stderr,
394 weak_memory: config.weak_memory_emulation,
395 preemption_rate: config.preemption_rate,
399 pub(crate) fn late_init(
400 this: &mut MiriEvalContext<'mir, 'tcx>,
402 ) -> InterpResult<'tcx> {
403 EnvVars::init(this, config)?;
404 Evaluator::init_extern_statics(this)?;
408 fn add_extern_static(
409 this: &mut MiriEvalContext<'mir, 'tcx>,
411 ptr: Pointer<Option<Tag>>,
413 // This got just allocated, so there definitely is a pointer here.
414 let ptr = ptr.into_pointer_or_addr().unwrap();
415 this.machine.extern_statics.try_insert(Symbol::intern(name), ptr).unwrap();
418 /// Sets up the "extern statics" for this machine.
419 fn init_extern_statics(this: &mut MiriEvalContext<'mir, 'tcx>) -> InterpResult<'tcx> {
420 match this.tcx.sess.target.os.as_ref() {
423 Self::add_extern_static(
426 this.machine.env_vars.environ.unwrap().ptr,
428 // A couple zero-initialized pointer-sized extern statics.
429 // Most of them are for weak symbols, which we all set to null (indicating that the
430 // symbol is not supported, and triggering fallback code which ends up calling a
431 // syscall that we do support).
432 for name in &["__cxa_thread_atexit_impl", "getrandom", "statx", "__clock_gettime64"]
434 let layout = this.machine.layouts.usize;
435 let place = this.allocate(layout, MiriMemoryKind::ExternStatic.into())?;
436 this.write_scalar(Scalar::from_machine_usize(0, this), &place.into())?;
437 Self::add_extern_static(this, name, place.ptr);
442 // This is some obscure hack that is part of the Windows TLS story. It's a `u8`.
443 let layout = this.machine.layouts.u8;
444 let place = this.allocate(layout, MiriMemoryKind::ExternStatic.into())?;
445 this.write_scalar(Scalar::from_u8(0), &place.into())?;
446 Self::add_extern_static(this, "_tls_used", place.ptr);
448 _ => {} // No "extern statics" supported on this target
453 pub(crate) fn communicate(&self) -> bool {
454 self.isolated_op == IsolatedOp::Allow
457 /// Check whether the stack frame that this `FrameInfo` refers to is part of a local crate.
458 pub(crate) fn is_local(&self, frame: &FrameInfo<'_>) -> bool {
459 let def_id = frame.instance.def_id();
460 def_id.is_local() || self.local_crates.contains(&def_id.krate)
464 /// A rustc InterpCx for Miri.
465 pub type MiriEvalContext<'mir, 'tcx> = InterpCx<'mir, 'tcx, Evaluator<'mir, 'tcx>>;
467 /// A little trait that's useful to be inherited by extension traits.
468 pub trait MiriEvalContextExt<'mir, 'tcx> {
469 fn eval_context_ref<'a>(&'a self) -> &'a MiriEvalContext<'mir, 'tcx>;
470 fn eval_context_mut<'a>(&'a mut self) -> &'a mut MiriEvalContext<'mir, 'tcx>;
472 impl<'mir, 'tcx> MiriEvalContextExt<'mir, 'tcx> for MiriEvalContext<'mir, 'tcx> {
474 fn eval_context_ref(&self) -> &MiriEvalContext<'mir, 'tcx> {
478 fn eval_context_mut(&mut self) -> &mut MiriEvalContext<'mir, 'tcx> {
483 /// Machine hook implementations.
484 impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'mir, 'tcx> {
485 type MemoryKind = MiriMemoryKind;
486 type ExtraFnVal = Dlsym;
488 type FrameExtra = FrameData<'tcx>;
489 type AllocExtra = AllocExtra;
491 type PointerTag = Tag;
492 type TagExtra = SbTag;
495 MonoHashMap<AllocId, (MemoryKind<MiriMemoryKind>, Allocation<Tag, Self::AllocExtra>)>;
497 const GLOBAL_KIND: Option<MiriMemoryKind> = Some(MiriMemoryKind::Global);
499 const PANIC_ON_ALLOC_FAIL: bool = false;
502 fn enforce_alignment(ecx: &MiriEvalContext<'mir, 'tcx>) -> bool {
503 ecx.machine.check_alignment != AlignmentCheck::None
507 fn force_int_for_alignment_check(ecx: &MiriEvalContext<'mir, 'tcx>) -> bool {
508 ecx.machine.check_alignment == AlignmentCheck::Int
512 fn enforce_validity(ecx: &MiriEvalContext<'mir, 'tcx>) -> bool {
517 fn enforce_number_init(ecx: &MiriEvalContext<'mir, 'tcx>) -> bool {
518 !ecx.machine.allow_uninit_numbers
522 fn enforce_number_no_provenance(ecx: &MiriEvalContext<'mir, 'tcx>) -> bool {
523 !ecx.machine.allow_ptr_int_transmute
527 fn enforce_abi(ecx: &MiriEvalContext<'mir, 'tcx>) -> bool {
528 ecx.machine.enforce_abi
532 fn find_mir_or_eval_fn(
533 ecx: &mut MiriEvalContext<'mir, 'tcx>,
534 instance: ty::Instance<'tcx>,
536 args: &[OpTy<'tcx, Tag>],
537 dest: &PlaceTy<'tcx, Tag>,
538 ret: Option<mir::BasicBlock>,
539 unwind: StackPopUnwind,
540 ) -> InterpResult<'tcx, Option<(&'mir mir::Body<'tcx>, ty::Instance<'tcx>)>> {
541 ecx.find_mir_or_eval_fn(instance, abi, args, dest, ret, unwind)
546 ecx: &mut MiriEvalContext<'mir, 'tcx>,
549 args: &[OpTy<'tcx, Tag>],
550 dest: &PlaceTy<'tcx, Tag>,
551 ret: Option<mir::BasicBlock>,
552 _unwind: StackPopUnwind,
553 ) -> InterpResult<'tcx> {
554 ecx.call_dlsym(fn_val, abi, args, dest, ret)
559 ecx: &mut MiriEvalContext<'mir, 'tcx>,
560 instance: ty::Instance<'tcx>,
561 args: &[OpTy<'tcx, Tag>],
562 dest: &PlaceTy<'tcx, Tag>,
563 ret: Option<mir::BasicBlock>,
564 unwind: StackPopUnwind,
565 ) -> InterpResult<'tcx> {
566 ecx.call_intrinsic(instance, args, dest, ret, unwind)
571 ecx: &mut MiriEvalContext<'mir, 'tcx>,
572 msg: &mir::AssertMessage<'tcx>,
573 unwind: Option<mir::BasicBlock>,
574 ) -> InterpResult<'tcx> {
575 ecx.assert_panic(msg, unwind)
579 fn abort(_ecx: &mut MiriEvalContext<'mir, 'tcx>, msg: String) -> InterpResult<'tcx, !> {
580 throw_machine_stop!(TerminationInfo::Abort(msg))
585 ecx: &MiriEvalContext<'mir, 'tcx>,
587 left: &ImmTy<'tcx, Tag>,
588 right: &ImmTy<'tcx, Tag>,
589 ) -> InterpResult<'tcx, (Scalar<Tag>, bool, ty::Ty<'tcx>)> {
590 ecx.binary_ptr_op(bin_op, left, right)
593 fn thread_local_static_base_pointer(
594 ecx: &mut MiriEvalContext<'mir, 'tcx>,
596 ) -> InterpResult<'tcx, Pointer<Tag>> {
597 ecx.get_or_create_thread_local_alloc(def_id)
600 fn extern_static_base_pointer(
601 ecx: &MiriEvalContext<'mir, 'tcx>,
603 ) -> InterpResult<'tcx, Pointer<Tag>> {
604 let link_name = ecx.item_link_name(def_id);
605 if let Some(&ptr) = ecx.machine.extern_statics.get(&link_name) {
609 "`extern` static `{}` from crate `{}` is not supported by Miri",
610 ecx.tcx.def_path_str(def_id),
611 ecx.tcx.crate_name(def_id.krate),
616 fn init_allocation_extra<'b>(
617 ecx: &MiriEvalContext<'mir, 'tcx>,
619 alloc: Cow<'b, Allocation>,
620 kind: Option<MemoryKind<Self::MemoryKind>>,
621 ) -> Cow<'b, Allocation<Self::PointerTag, Self::AllocExtra>> {
622 if ecx.machine.tracked_alloc_ids.contains(&id) {
623 register_diagnostic(NonHaltingDiagnostic::CreatedAlloc(id));
626 let kind = kind.expect("we set our STATIC_KIND so this cannot be None");
627 let alloc = alloc.into_owned();
628 let stacks = if let Some(stacked_borrows) = &ecx.machine.stacked_borrows {
629 Some(Stacks::new_allocation(
634 ecx.machine.current_span(),
639 let race_alloc = if let Some(data_race) = &ecx.machine.data_race {
640 Some(data_race::AllocExtra::new_allocation(data_race, alloc.size(), kind))
644 let buffer_alloc = if ecx.machine.weak_memory {
645 Some(weak_memory::AllocExtra::new_allocation())
649 let alloc: Allocation<Tag, Self::AllocExtra> = alloc.convert_tag_add_extra(
652 stacked_borrows: stacks,
653 data_race: race_alloc,
654 weak_memory: buffer_alloc,
656 |ptr| Evaluator::tag_alloc_base_pointer(ecx, ptr),
661 fn tag_alloc_base_pointer(
662 ecx: &MiriEvalContext<'mir, 'tcx>,
663 ptr: Pointer<AllocId>,
665 let absolute_addr = intptrcast::GlobalStateInner::rel_ptr_to_addr(ecx, ptr);
666 let sb_tag = if let Some(stacked_borrows) = &ecx.machine.stacked_borrows {
667 stacked_borrows.borrow_mut().base_tag(ptr.provenance)
672 Tag::Concrete(ConcreteTag { alloc_id: ptr.provenance, sb: sb_tag }),
673 Size::from_bytes(absolute_addr),
678 fn ptr_from_addr_cast(
679 ecx: &MiriEvalContext<'mir, 'tcx>,
681 ) -> InterpResult<'tcx, Pointer<Option<Self::PointerTag>>> {
682 Ok(intptrcast::GlobalStateInner::ptr_from_addr_cast(ecx, addr))
686 fn ptr_from_addr_transmute(
687 ecx: &MiriEvalContext<'mir, 'tcx>,
689 ) -> Pointer<Option<Self::PointerTag>> {
690 intptrcast::GlobalStateInner::ptr_from_addr_transmute(ecx, addr)
694 ecx: &mut InterpCx<'mir, 'tcx, Self>,
695 ptr: Pointer<Self::PointerTag>,
696 ) -> InterpResult<'tcx> {
697 match ptr.provenance {
698 Tag::Concrete(concrete) =>
699 intptrcast::GlobalStateInner::expose_addr(ecx, concrete.alloc_id),
701 // No need to do anything for wildcard pointers as
702 // their provenances have already been previously exposed.
708 /// Convert a pointer with provenance into an allocation-offset pair,
709 /// or a `None` with an absolute address if that conversion is not possible.
711 ecx: &MiriEvalContext<'mir, 'tcx>,
712 ptr: Pointer<Self::PointerTag>,
713 ) -> Option<(AllocId, Size, Self::TagExtra)> {
714 let rel = intptrcast::GlobalStateInner::abs_ptr_to_rel(ecx, ptr);
716 rel.map(|(alloc_id, size)| {
717 let sb = match ptr.provenance {
718 Tag::Concrete(ConcreteTag { sb, .. }) => sb,
719 Tag::Wildcard => SbTag::Untagged,
729 alloc_extra: &AllocExtra,
730 (alloc_id, tag): (AllocId, Self::TagExtra),
732 ) -> InterpResult<'tcx> {
733 if let Some(data_race) = &alloc_extra.data_race {
734 data_race.read(alloc_id, range, machine.data_race.as_ref().unwrap())?;
736 if let Some(stacked_borrows) = &alloc_extra.stacked_borrows {
737 stacked_borrows.memory_read(
741 machine.stacked_borrows.as_ref().unwrap(),
742 machine.current_span(),
745 if let Some(weak_memory) = &alloc_extra.weak_memory {
746 weak_memory.memory_accessed(range, machine.data_race.as_ref().unwrap());
755 alloc_extra: &mut AllocExtra,
756 (alloc_id, tag): (AllocId, Self::TagExtra),
758 ) -> InterpResult<'tcx> {
759 if let Some(data_race) = &mut alloc_extra.data_race {
760 data_race.write(alloc_id, range, machine.data_race.as_mut().unwrap())?;
762 if let Some(stacked_borrows) = &mut alloc_extra.stacked_borrows {
763 stacked_borrows.memory_written(
767 machine.stacked_borrows.as_ref().unwrap(),
768 machine.current_span(),
771 if let Some(weak_memory) = &alloc_extra.weak_memory {
772 weak_memory.memory_accessed(range, machine.data_race.as_ref().unwrap());
778 fn memory_deallocated(
781 alloc_extra: &mut AllocExtra,
782 (alloc_id, tag): (AllocId, Self::TagExtra),
784 ) -> InterpResult<'tcx> {
785 if machine.tracked_alloc_ids.contains(&alloc_id) {
786 register_diagnostic(NonHaltingDiagnostic::FreedAlloc(alloc_id));
788 if let Some(data_race) = &mut alloc_extra.data_race {
789 data_race.deallocate(alloc_id, range, machine.data_race.as_mut().unwrap())?;
791 if let Some(stacked_borrows) = &mut alloc_extra.stacked_borrows {
792 stacked_borrows.memory_deallocated(
796 machine.stacked_borrows.as_ref().unwrap(),
805 ecx: &mut InterpCx<'mir, 'tcx, Self>,
806 kind: mir::RetagKind,
807 place: &PlaceTy<'tcx, Tag>,
808 ) -> InterpResult<'tcx> {
809 if ecx.machine.stacked_borrows.is_some() { ecx.retag(kind, place) } else { Ok(()) }
814 ecx: &mut InterpCx<'mir, 'tcx, Self>,
815 frame: Frame<'mir, 'tcx, Tag>,
816 ) -> InterpResult<'tcx, Frame<'mir, 'tcx, Tag, FrameData<'tcx>>> {
817 // Start recording our event before doing anything else
818 let timing = if let Some(profiler) = ecx.machine.profiler.as_ref() {
819 let fn_name = frame.instance.to_string();
820 let entry = ecx.machine.string_cache.entry(fn_name.clone());
821 let name = entry.or_insert_with(|| profiler.alloc_string(&*fn_name));
823 Some(profiler.start_recording_interval_event_detached(
825 measureme::EventId::from_label(*name),
826 ecx.get_active_thread().to_u32(),
832 let stacked_borrows = ecx.machine.stacked_borrows.as_ref();
833 let call_id = stacked_borrows.map_or(NonZeroU64::new(1).unwrap(), |stacked_borrows| {
834 stacked_borrows.borrow_mut().new_call()
837 let extra = FrameData { call_id, catch_unwind: None, timing };
838 Ok(frame.with_extra(extra))
842 ecx: &'a InterpCx<'mir, 'tcx, Self>,
843 ) -> &'a [Frame<'mir, 'tcx, Self::PointerTag, Self::FrameExtra>] {
844 ecx.active_thread_stack()
848 ecx: &'a mut InterpCx<'mir, 'tcx, Self>,
849 ) -> &'a mut Vec<Frame<'mir, 'tcx, Self::PointerTag, Self::FrameExtra>> {
850 ecx.active_thread_stack_mut()
853 fn before_terminator(ecx: &mut InterpCx<'mir, 'tcx, Self>) -> InterpResult<'tcx> {
854 ecx.maybe_preempt_active_thread();
859 fn after_stack_push(ecx: &mut InterpCx<'mir, 'tcx, Self>) -> InterpResult<'tcx> {
860 if ecx.machine.stacked_borrows.is_some() { ecx.retag_return_place() } else { Ok(()) }
865 ecx: &mut InterpCx<'mir, 'tcx, Self>,
866 mut frame: Frame<'mir, 'tcx, Tag, FrameData<'tcx>>,
868 ) -> InterpResult<'tcx, StackPopJump> {
869 let timing = frame.extra.timing.take();
870 let res = ecx.handle_stack_pop(frame.extra, unwinding);
871 if let Some(profiler) = ecx.machine.profiler.as_ref() {
872 profiler.finish_recording_interval_event(timing.unwrap());