1 //! Global machine state as well as implementation of the interpreter engine
5 use std::cell::RefCell;
6 use std::collections::HashSet;
8 use std::time::Instant;
10 use rand::rngs::StdRng;
11 use rand::SeedableRng;
13 use rustc_ast::ast::Mutability;
14 use rustc_data_structures::fx::FxHashMap;
16 use rustc_data_structures::static_assert_size;
21 layout::{LayoutCx, LayoutError, LayoutOf, TyAndLayout},
22 Instance, TyCtxt, TypeAndMut,
25 use rustc_span::def_id::{CrateNum, DefId};
26 use rustc_span::Symbol;
27 use rustc_target::abi::Size;
28 use rustc_target::spec::abi::Abi;
31 concurrency::{data_race, weak_memory},
32 shims::unix::FileHandler,
36 // Some global facts about the emulated machine.
37 pub const PAGE_SIZE: u64 = 4 * 1024; // FIXME: adjust to target architecture
38 pub const STACK_ADDR: u64 = 32 * PAGE_SIZE; // not really about the "stack", but where we start assigning integer addresses to allocations
39 pub const STACK_SIZE: u64 = 16 * PAGE_SIZE; // whatever
40 pub const NUM_CPUS: u64 = 1;
42 /// Extra data stored with each stack frame
43 pub struct FrameData<'tcx> {
44 /// Extra data for Stacked Borrows.
45 pub stacked_borrows: Option<stacked_borrows::FrameExtra>,
47 /// If this is Some(), then this is a special "catch unwind" frame (the frame of `try_fn`
48 /// called by `try`). When this frame is popped during unwinding a panic,
49 /// we stop unwinding, use the `CatchUnwindData` to handle catching.
50 pub catch_unwind: Option<CatchUnwindData<'tcx>>,
52 /// If `measureme` profiling is enabled, holds timing information
53 /// for the start of this frame. When we finish executing this frame,
54 /// we use this to register a completed event with `measureme`.
55 pub timing: Option<measureme::DetachedTiming>,
58 impl<'tcx> std::fmt::Debug for FrameData<'tcx> {
59 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
60 // Omitting `timing`, it does not support `Debug`.
61 let FrameData { stacked_borrows, catch_unwind, timing: _ } = self;
62 f.debug_struct("FrameData")
63 .field("stacked_borrows", stacked_borrows)
64 .field("catch_unwind", catch_unwind)
69 /// Extra memory kinds
70 #[derive(Debug, Copy, Clone, PartialEq, Eq)]
71 pub enum MiriMemoryKind {
72 /// `__rust_alloc` memory.
76 /// Windows `HeapAlloc` memory.
78 /// Memory for args, errno, and other parts of the machine-managed environment.
79 /// This memory may leak.
81 /// Memory allocated by the runtime (e.g. env vars). Separate from `Machine`
82 /// because we clean it up and leak-check it.
84 /// Globals copied from `tcx`.
85 /// This memory may leak.
87 /// Memory for extern statics.
88 /// This memory may leak.
90 /// Memory for thread-local statics.
91 /// This memory may leak.
95 impl From<MiriMemoryKind> for MemoryKind<MiriMemoryKind> {
97 fn from(kind: MiriMemoryKind) -> MemoryKind<MiriMemoryKind> {
98 MemoryKind::Machine(kind)
102 impl MayLeak for MiriMemoryKind {
104 fn may_leak(self) -> bool {
105 use self::MiriMemoryKind::*;
107 Rust | C | WinHeap | Runtime => false,
108 Machine | Global | ExternStatic | Tls => true,
113 impl fmt::Display for MiriMemoryKind {
114 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
115 use self::MiriMemoryKind::*;
117 Rust => write!(f, "Rust heap"),
118 C => write!(f, "C heap"),
119 WinHeap => write!(f, "Windows heap"),
120 Machine => write!(f, "machine-managed memory"),
121 Runtime => write!(f, "language runtime memory"),
122 Global => write!(f, "global (static or const)"),
123 ExternStatic => write!(f, "extern static"),
124 Tls => write!(f, "thread-local static"),
129 /// Pointer provenance.
130 #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
131 pub enum Provenance {
134 /// Stacked Borrows tag.
140 /// The "extra" information a pointer has over a regular AllocId.
141 #[derive(Copy, Clone)]
142 pub enum ProvenanceExtra {
147 #[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
148 static_assert_size!(Pointer<Provenance>, 24);
149 // FIXME: this would with in 24bytes but layout optimizations are not smart enough
150 // #[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
151 //static_assert_size!(Pointer<Option<Provenance>>, 24);
152 #[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
153 static_assert_size!(ScalarMaybeUninit<Provenance>, 32);
155 impl interpret::Provenance for Provenance {
156 /// We use absolute addresses in the `offset` of a `Pointer<Provenance>`.
157 const OFFSET_IS_ADDR: bool = true;
159 /// We cannot err on partial overwrites, it happens too often in practice (due to unions).
160 const ERR_ON_PARTIAL_PTR_OVERWRITE: bool = false;
162 fn fmt(ptr: &Pointer<Self>, f: &mut fmt::Formatter<'_>) -> fmt::Result {
163 let (prov, addr) = ptr.into_parts(); // address is absolute
164 write!(f, "{:#x}", addr.bytes())?;
167 Provenance::Concrete { alloc_id, sb } => {
168 // Forward `alternate` flag to `alloc_id` printing.
170 write!(f, "[{:#?}]", alloc_id)?;
172 write!(f, "[{:?}]", alloc_id)?;
174 // Print Stacked Borrows tag.
175 write!(f, "{:?}", sb)?;
177 Provenance::Wildcard => {
178 write!(f, "[wildcard]")?;
185 fn get_alloc_id(self) -> Option<AllocId> {
187 Provenance::Concrete { alloc_id, .. } => Some(alloc_id),
188 Provenance::Wildcard => None,
193 impl fmt::Debug for ProvenanceExtra {
194 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
196 ProvenanceExtra::Concrete(pid) => write!(f, "{pid:?}"),
197 ProvenanceExtra::Wildcard => write!(f, "<wildcard>"),
202 impl ProvenanceExtra {
203 pub fn and_then<T>(self, f: impl FnOnce(SbTag) -> Option<T>) -> Option<T> {
205 ProvenanceExtra::Concrete(pid) => f(pid),
206 ProvenanceExtra::Wildcard => None,
211 /// Extra per-allocation data
212 #[derive(Debug, Clone)]
213 pub struct AllocExtra {
214 /// Stacked Borrows state is only added if it is enabled.
215 pub stacked_borrows: Option<stacked_borrows::AllocExtra>,
216 /// Data race detection via the use of a vector-clock,
217 /// this is only added if it is enabled.
218 pub data_race: Option<data_race::AllocExtra>,
219 /// Weak memory emulation via the use of store buffers,
220 /// this is only added if it is enabled.
221 pub weak_memory: Option<weak_memory::AllocExtra>,
224 /// Precomputed layouts of primitive types
225 pub struct PrimitiveLayouts<'tcx> {
226 pub unit: TyAndLayout<'tcx>,
227 pub i8: TyAndLayout<'tcx>,
228 pub i16: TyAndLayout<'tcx>,
229 pub i32: TyAndLayout<'tcx>,
230 pub isize: TyAndLayout<'tcx>,
231 pub u8: TyAndLayout<'tcx>,
232 pub u16: TyAndLayout<'tcx>,
233 pub u32: TyAndLayout<'tcx>,
234 pub usize: TyAndLayout<'tcx>,
235 pub bool: TyAndLayout<'tcx>,
236 pub mut_raw_ptr: TyAndLayout<'tcx>,
239 impl<'mir, 'tcx: 'mir> PrimitiveLayouts<'tcx> {
240 fn new(layout_cx: LayoutCx<'tcx, TyCtxt<'tcx>>) -> Result<Self, LayoutError<'tcx>> {
241 let tcx = layout_cx.tcx;
242 let mut_raw_ptr = tcx.mk_ptr(TypeAndMut { ty: tcx.types.unit, mutbl: Mutability::Mut });
244 unit: layout_cx.layout_of(tcx.mk_unit())?,
245 i8: layout_cx.layout_of(tcx.types.i8)?,
246 i16: layout_cx.layout_of(tcx.types.i16)?,
247 i32: layout_cx.layout_of(tcx.types.i32)?,
248 isize: layout_cx.layout_of(tcx.types.isize)?,
249 u8: layout_cx.layout_of(tcx.types.u8)?,
250 u16: layout_cx.layout_of(tcx.types.u16)?,
251 u32: layout_cx.layout_of(tcx.types.u32)?,
252 usize: layout_cx.layout_of(tcx.types.usize)?,
253 bool: layout_cx.layout_of(tcx.types.bool)?,
254 mut_raw_ptr: layout_cx.layout_of(mut_raw_ptr)?,
259 /// The machine itself.
260 pub struct Evaluator<'mir, 'tcx> {
261 pub stacked_borrows: Option<stacked_borrows::GlobalState>,
262 pub data_race: Option<data_race::GlobalState>,
263 pub intptrcast: intptrcast::GlobalState,
265 /// Environment variables set by `setenv`.
266 /// Miri does not expose env vars from the host to the emulated program.
267 pub(crate) env_vars: EnvVars<'tcx>,
269 /// Program arguments (`Option` because we can only initialize them after creating the ecx).
270 /// These are *pointers* to argc/argv because macOS.
271 /// We also need the full command line as one string because of Windows.
272 pub(crate) argc: Option<MemPlace<Provenance>>,
273 pub(crate) argv: Option<MemPlace<Provenance>>,
274 pub(crate) cmd_line: Option<MemPlace<Provenance>>,
277 pub(crate) tls: TlsData<'tcx>,
279 /// What should Miri do when an op requires communicating with the host,
280 /// such as accessing host env vars, random number generation, and
281 /// file system access.
282 pub(crate) isolated_op: IsolatedOp,
284 /// Whether to enforce the validity invariant.
285 pub(crate) validate: bool,
287 /// Whether to enforce [ABI](Abi) of function calls.
288 pub(crate) enforce_abi: bool,
290 /// The table of file descriptors.
291 pub(crate) file_handler: shims::unix::FileHandler,
292 /// The table of directory descriptors.
293 pub(crate) dir_handler: shims::unix::DirHandler,
295 /// The "time anchor" for this machine's monotone clock (for `Instant` simulation).
296 pub(crate) time_anchor: Instant,
298 /// The set of threads.
299 pub(crate) threads: ThreadManager<'mir, 'tcx>,
301 /// Precomputed `TyLayout`s for primitive data types that are commonly used inside Miri.
302 pub(crate) layouts: PrimitiveLayouts<'tcx>,
304 /// Allocations that are considered roots of static memory (that may leak).
305 pub(crate) static_roots: Vec<AllocId>,
307 /// The `measureme` profiler used to record timing information about
308 /// the emulated program.
309 profiler: Option<measureme::Profiler>,
310 /// Used with `profiler` to cache the `StringId`s for event names
311 /// uesd with `measureme`.
312 string_cache: FxHashMap<String, measureme::StringId>,
314 /// Cache of `Instance` exported under the given `Symbol` name.
315 /// `None` means no `Instance` exported under the given name is found.
316 pub(crate) exported_symbols_cache: FxHashMap<Symbol, Option<Instance<'tcx>>>,
318 /// Whether to raise a panic in the context of the evaluated process when unsupported
319 /// functionality is encountered. If `false`, an error is propagated in the Miri application context
320 /// instead (default behavior)
321 pub(crate) panic_on_unsupported: bool,
323 /// Equivalent setting as RUST_BACKTRACE on encountering an error.
324 pub(crate) backtrace_style: BacktraceStyle,
326 /// Crates which are considered local for the purposes of error reporting.
327 pub(crate) local_crates: Vec<CrateNum>,
329 /// Mapping extern static names to their base pointer.
330 extern_statics: FxHashMap<Symbol, Pointer<Provenance>>,
332 /// The random number generator used for resolving non-determinism.
333 /// Needs to be queried by ptr_to_int, hence needs interior mutability.
334 pub(crate) rng: RefCell<StdRng>,
336 /// The allocation IDs to report when they are being allocated
337 /// (helps for debugging memory leaks and use after free bugs).
338 tracked_alloc_ids: HashSet<AllocId>,
340 /// Controls whether alignment of memory accesses is being checked.
341 pub(crate) check_alignment: AlignmentCheck,
343 /// Failure rate of compare_exchange_weak, between 0.0 and 1.0
344 pub(crate) cmpxchg_weak_failure_rate: f64,
346 /// Corresponds to -Zmiri-mute-stdout-stderr and doesn't write the output but acts as if it succeeded.
347 pub(crate) mute_stdout_stderr: bool,
349 /// Whether weak memory emulation is enabled
350 pub(crate) weak_memory: bool,
352 /// The probability of the active thread being preempted at the end of each basic block.
353 pub(crate) preemption_rate: f64,
355 /// If `Some`, we will report the current stack every N basic blocks.
356 pub(crate) report_progress: Option<u32>,
357 /// The number of blocks that passed since the last progress report.
358 pub(crate) since_progress_report: u32,
361 impl<'mir, 'tcx> Evaluator<'mir, 'tcx> {
362 pub(crate) fn new(config: &MiriConfig, layout_cx: LayoutCx<'tcx, TyCtxt<'tcx>>) -> Self {
363 let local_crates = helpers::get_local_crates(layout_cx.tcx);
365 PrimitiveLayouts::new(layout_cx).expect("Couldn't get layouts of primitive types");
366 let profiler = config.measureme_out.as_ref().map(|out| {
367 measureme::Profiler::new(out).expect("Couldn't create `measureme` profiler")
369 let rng = StdRng::seed_from_u64(config.seed.unwrap_or(0));
370 let stacked_borrows = if config.stacked_borrows {
371 Some(RefCell::new(stacked_borrows::GlobalStateInner::new(
372 config.tracked_pointer_tags.clone(),
373 config.tracked_call_ids.clone(),
379 let data_race = if config.data_race_detector {
380 Some(data_race::GlobalState::new(config))
387 intptrcast: RefCell::new(intptrcast::GlobalStateInner::new(config)),
388 // `env_vars` depends on a full interpreter so we cannot properly initialize it yet.
389 env_vars: EnvVars::default(),
393 tls: TlsData::default(),
394 isolated_op: config.isolated_op,
395 validate: config.validate,
396 enforce_abi: config.check_abi,
397 file_handler: FileHandler::new(config.mute_stdout_stderr),
398 dir_handler: Default::default(),
399 time_anchor: Instant::now(),
401 threads: ThreadManager::default(),
402 static_roots: Vec::new(),
404 string_cache: Default::default(),
405 exported_symbols_cache: FxHashMap::default(),
406 panic_on_unsupported: config.panic_on_unsupported,
407 backtrace_style: config.backtrace_style,
409 extern_statics: FxHashMap::default(),
410 rng: RefCell::new(rng),
411 tracked_alloc_ids: config.tracked_alloc_ids.clone(),
412 check_alignment: config.check_alignment,
413 cmpxchg_weak_failure_rate: config.cmpxchg_weak_failure_rate,
414 mute_stdout_stderr: config.mute_stdout_stderr,
415 weak_memory: config.weak_memory_emulation,
416 preemption_rate: config.preemption_rate,
417 report_progress: config.report_progress,
418 since_progress_report: 0,
422 pub(crate) fn late_init(
423 this: &mut MiriEvalContext<'mir, 'tcx>,
425 ) -> InterpResult<'tcx> {
426 EnvVars::init(this, config)?;
427 Evaluator::init_extern_statics(this)?;
431 fn add_extern_static(
432 this: &mut MiriEvalContext<'mir, 'tcx>,
434 ptr: Pointer<Option<Provenance>>,
436 // This got just allocated, so there definitely is a pointer here.
437 let ptr = ptr.into_pointer_or_addr().unwrap();
438 this.machine.extern_statics.try_insert(Symbol::intern(name), ptr).unwrap();
441 /// Sets up the "extern statics" for this machine.
442 fn init_extern_statics(this: &mut MiriEvalContext<'mir, 'tcx>) -> InterpResult<'tcx> {
443 match this.tcx.sess.target.os.as_ref() {
446 Self::add_extern_static(
449 this.machine.env_vars.environ.unwrap().ptr,
451 // A couple zero-initialized pointer-sized extern statics.
452 // Most of them are for weak symbols, which we all set to null (indicating that the
453 // symbol is not supported, and triggering fallback code which ends up calling a
454 // syscall that we do support).
455 for name in &["__cxa_thread_atexit_impl", "getrandom", "statx", "__clock_gettime64"]
457 let layout = this.machine.layouts.usize;
458 let place = this.allocate(layout, MiriMemoryKind::ExternStatic.into())?;
459 this.write_scalar(Scalar::from_machine_usize(0, this), &place.into())?;
460 Self::add_extern_static(this, name, place.ptr);
465 Self::add_extern_static(
468 this.machine.env_vars.environ.unwrap().ptr,
473 // This is some obscure hack that is part of the Windows TLS story. It's a `u8`.
474 let layout = this.machine.layouts.u8;
475 let place = this.allocate(layout, MiriMemoryKind::ExternStatic.into())?;
476 this.write_scalar(Scalar::from_u8(0), &place.into())?;
477 Self::add_extern_static(this, "_tls_used", place.ptr);
479 _ => {} // No "extern statics" supported on this target
484 pub(crate) fn communicate(&self) -> bool {
485 self.isolated_op == IsolatedOp::Allow
488 /// Check whether the stack frame that this `FrameInfo` refers to is part of a local crate.
489 pub(crate) fn is_local(&self, frame: &FrameInfo<'_>) -> bool {
490 let def_id = frame.instance.def_id();
491 def_id.is_local() || self.local_crates.contains(&def_id.krate)
495 /// A rustc InterpCx for Miri.
496 pub type MiriEvalContext<'mir, 'tcx> = InterpCx<'mir, 'tcx, Evaluator<'mir, 'tcx>>;
498 /// A little trait that's useful to be inherited by extension traits.
499 pub trait MiriEvalContextExt<'mir, 'tcx> {
500 fn eval_context_ref<'a>(&'a self) -> &'a MiriEvalContext<'mir, 'tcx>;
501 fn eval_context_mut<'a>(&'a mut self) -> &'a mut MiriEvalContext<'mir, 'tcx>;
503 impl<'mir, 'tcx> MiriEvalContextExt<'mir, 'tcx> for MiriEvalContext<'mir, 'tcx> {
505 fn eval_context_ref(&self) -> &MiriEvalContext<'mir, 'tcx> {
509 fn eval_context_mut(&mut self) -> &mut MiriEvalContext<'mir, 'tcx> {
514 /// Machine hook implementations.
515 impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'mir, 'tcx> {
516 type MemoryKind = MiriMemoryKind;
517 type ExtraFnVal = Dlsym;
519 type FrameExtra = FrameData<'tcx>;
520 type AllocExtra = AllocExtra;
522 type Provenance = Provenance;
523 type ProvenanceExtra = ProvenanceExtra;
525 type MemoryMap = MonoHashMap<
527 (MemoryKind<MiriMemoryKind>, Allocation<Provenance, Self::AllocExtra>),
530 const GLOBAL_KIND: Option<MiriMemoryKind> = Some(MiriMemoryKind::Global);
532 const PANIC_ON_ALLOC_FAIL: bool = false;
535 fn enforce_alignment(ecx: &MiriEvalContext<'mir, 'tcx>) -> bool {
536 ecx.machine.check_alignment != AlignmentCheck::None
540 fn force_int_for_alignment_check(ecx: &MiriEvalContext<'mir, 'tcx>) -> bool {
541 ecx.machine.check_alignment == AlignmentCheck::Int
545 fn enforce_validity(ecx: &MiriEvalContext<'mir, 'tcx>) -> bool {
550 fn enforce_number_init(_ecx: &MiriEvalContext<'mir, 'tcx>) -> bool {
555 fn enforce_number_no_provenance(_ecx: &MiriEvalContext<'mir, 'tcx>) -> bool {
560 fn enforce_abi(ecx: &MiriEvalContext<'mir, 'tcx>) -> bool {
561 ecx.machine.enforce_abi
565 fn checked_binop_checks_overflow(ecx: &MiriEvalContext<'mir, 'tcx>) -> bool {
566 ecx.tcx.sess.overflow_checks()
570 fn find_mir_or_eval_fn(
571 ecx: &mut MiriEvalContext<'mir, 'tcx>,
572 instance: ty::Instance<'tcx>,
574 args: &[OpTy<'tcx, Provenance>],
575 dest: &PlaceTy<'tcx, Provenance>,
576 ret: Option<mir::BasicBlock>,
577 unwind: StackPopUnwind,
578 ) -> InterpResult<'tcx, Option<(&'mir mir::Body<'tcx>, ty::Instance<'tcx>)>> {
579 ecx.find_mir_or_eval_fn(instance, abi, args, dest, ret, unwind)
584 ecx: &mut MiriEvalContext<'mir, 'tcx>,
587 args: &[OpTy<'tcx, Provenance>],
588 dest: &PlaceTy<'tcx, Provenance>,
589 ret: Option<mir::BasicBlock>,
590 _unwind: StackPopUnwind,
591 ) -> InterpResult<'tcx> {
592 ecx.call_dlsym(fn_val, abi, args, dest, ret)
597 ecx: &mut MiriEvalContext<'mir, 'tcx>,
598 instance: ty::Instance<'tcx>,
599 args: &[OpTy<'tcx, Provenance>],
600 dest: &PlaceTy<'tcx, Provenance>,
601 ret: Option<mir::BasicBlock>,
602 unwind: StackPopUnwind,
603 ) -> InterpResult<'tcx> {
604 ecx.call_intrinsic(instance, args, dest, ret, unwind)
609 ecx: &mut MiriEvalContext<'mir, 'tcx>,
610 msg: &mir::AssertMessage<'tcx>,
611 unwind: Option<mir::BasicBlock>,
612 ) -> InterpResult<'tcx> {
613 ecx.assert_panic(msg, unwind)
617 fn abort(_ecx: &mut MiriEvalContext<'mir, 'tcx>, msg: String) -> InterpResult<'tcx, !> {
618 throw_machine_stop!(TerminationInfo::Abort(msg))
623 ecx: &MiriEvalContext<'mir, 'tcx>,
625 left: &ImmTy<'tcx, Provenance>,
626 right: &ImmTy<'tcx, Provenance>,
627 ) -> InterpResult<'tcx, (Scalar<Provenance>, bool, ty::Ty<'tcx>)> {
628 ecx.binary_ptr_op(bin_op, left, right)
631 fn thread_local_static_base_pointer(
632 ecx: &mut MiriEvalContext<'mir, 'tcx>,
634 ) -> InterpResult<'tcx, Pointer<Provenance>> {
635 ecx.get_or_create_thread_local_alloc(def_id)
638 fn extern_static_base_pointer(
639 ecx: &MiriEvalContext<'mir, 'tcx>,
641 ) -> InterpResult<'tcx, Pointer<Provenance>> {
642 let link_name = ecx.item_link_name(def_id);
643 if let Some(&ptr) = ecx.machine.extern_statics.get(&link_name) {
644 // Various parts of the engine rely on `get_alloc_info` for size and alignment
645 // information. That uses the type information of this static.
646 // Make sure it matches the Miri allocation for this.
647 let Provenance::Concrete { alloc_id, .. } = ptr.provenance else {
648 panic!("extern_statics cannot contain wildcards")
650 let (shim_size, shim_align, _kind) = ecx.get_alloc_info(alloc_id);
651 let extern_decl_layout =
652 ecx.tcx.layout_of(ty::ParamEnv::empty().and(ecx.tcx.type_of(def_id))).unwrap();
653 if extern_decl_layout.size != shim_size || extern_decl_layout.align.abi != shim_align {
655 "`extern` static `{name}` from crate `{krate}` has been declared \
656 with a size of {decl_size} bytes and alignment of {decl_align} bytes, \
657 but Miri emulates it via an extern static shim \
658 with a size of {shim_size} bytes and alignment of {shim_align} bytes",
659 name = ecx.tcx.def_path_str(def_id),
660 krate = ecx.tcx.crate_name(def_id.krate),
661 decl_size = extern_decl_layout.size.bytes(),
662 decl_align = extern_decl_layout.align.abi.bytes(),
663 shim_size = shim_size.bytes(),
664 shim_align = shim_align.bytes(),
670 "`extern` static `{name}` from crate `{krate}` is not supported by Miri",
671 name = ecx.tcx.def_path_str(def_id),
672 krate = ecx.tcx.crate_name(def_id.krate),
677 fn adjust_allocation<'b>(
678 ecx: &MiriEvalContext<'mir, 'tcx>,
680 alloc: Cow<'b, Allocation>,
681 kind: Option<MemoryKind<Self::MemoryKind>>,
682 ) -> InterpResult<'tcx, Cow<'b, Allocation<Self::Provenance, Self::AllocExtra>>> {
683 let kind = kind.expect("we set our STATIC_KIND so this cannot be None");
684 if ecx.machine.tracked_alloc_ids.contains(&id) {
685 register_diagnostic(NonHaltingDiagnostic::CreatedAlloc(
693 let alloc = alloc.into_owned();
694 let stacks = if let Some(stacked_borrows) = &ecx.machine.stacked_borrows {
695 Some(Stacks::new_allocation(
700 ecx.machine.current_span(),
705 let race_alloc = if let Some(data_race) = &ecx.machine.data_race {
706 Some(data_race::AllocExtra::new_allocation(
708 &ecx.machine.threads,
715 let buffer_alloc = if ecx.machine.weak_memory {
716 Some(weak_memory::AllocExtra::new_allocation())
720 let alloc: Allocation<Provenance, Self::AllocExtra> = alloc.adjust_from_tcx(
723 stacked_borrows: stacks.map(RefCell::new),
724 data_race: race_alloc,
725 weak_memory: buffer_alloc,
727 |ptr| ecx.global_base_pointer(ptr),
729 Ok(Cow::Owned(alloc))
732 fn adjust_alloc_base_pointer(
733 ecx: &MiriEvalContext<'mir, 'tcx>,
734 ptr: Pointer<AllocId>,
735 ) -> Pointer<Provenance> {
736 if cfg!(debug_assertions) {
737 // The machine promises to never call us on thread-local or extern statics.
738 let alloc_id = ptr.provenance;
739 match ecx.tcx.try_get_global_alloc(alloc_id) {
740 Some(GlobalAlloc::Static(def_id)) if ecx.tcx.is_thread_local_static(def_id) => {
741 panic!("adjust_alloc_base_pointer called on thread-local static")
743 Some(GlobalAlloc::Static(def_id)) if ecx.tcx.is_foreign_item(def_id) => {
744 panic!("adjust_alloc_base_pointer called on extern static")
749 let absolute_addr = intptrcast::GlobalStateInner::rel_ptr_to_addr(ecx, ptr);
750 let sb_tag = if let Some(stacked_borrows) = &ecx.machine.stacked_borrows {
751 stacked_borrows.borrow_mut().base_ptr_tag(ptr.provenance)
753 // Value does not matter, SB is disabled
757 Provenance::Concrete { alloc_id: ptr.provenance, sb: sb_tag },
758 Size::from_bytes(absolute_addr),
763 fn ptr_from_addr_cast(
764 ecx: &MiriEvalContext<'mir, 'tcx>,
766 ) -> InterpResult<'tcx, Pointer<Option<Self::Provenance>>> {
767 intptrcast::GlobalStateInner::ptr_from_addr_cast(ecx, addr)
771 fn ptr_from_addr_transmute(
772 ecx: &MiriEvalContext<'mir, 'tcx>,
774 ) -> Pointer<Option<Self::Provenance>> {
775 intptrcast::GlobalStateInner::ptr_from_addr_transmute(ecx, addr)
779 ecx: &mut InterpCx<'mir, 'tcx, Self>,
780 ptr: Pointer<Self::Provenance>,
781 ) -> InterpResult<'tcx> {
782 match ptr.provenance {
783 Provenance::Concrete { alloc_id, sb } =>
784 intptrcast::GlobalStateInner::expose_ptr(ecx, alloc_id, sb),
785 Provenance::Wildcard => {
786 // No need to do anything for wildcard pointers as
787 // their provenances have already been previously exposed.
793 /// Convert a pointer with provenance into an allocation-offset pair,
794 /// or a `None` with an absolute address if that conversion is not possible.
796 ecx: &MiriEvalContext<'mir, 'tcx>,
797 ptr: Pointer<Self::Provenance>,
798 ) -> Option<(AllocId, Size, Self::ProvenanceExtra)> {
799 let rel = intptrcast::GlobalStateInner::abs_ptr_to_rel(ecx, ptr);
801 rel.map(|(alloc_id, size)| {
802 let sb = match ptr.provenance {
803 Provenance::Concrete { sb, .. } => ProvenanceExtra::Concrete(sb),
804 Provenance::Wildcard => ProvenanceExtra::Wildcard,
814 alloc_extra: &AllocExtra,
815 (alloc_id, prov_extra): (AllocId, Self::ProvenanceExtra),
817 ) -> InterpResult<'tcx> {
818 if let Some(data_race) = &alloc_extra.data_race {
822 machine.data_race.as_ref().unwrap(),
826 if let Some(stacked_borrows) = &alloc_extra.stacked_borrows {
827 stacked_borrows.borrow_mut().memory_read(
831 machine.stacked_borrows.as_ref().unwrap(),
832 machine.current_span(),
836 if let Some(weak_memory) = &alloc_extra.weak_memory {
837 weak_memory.memory_accessed(range, machine.data_race.as_ref().unwrap());
846 alloc_extra: &mut AllocExtra,
847 (alloc_id, prov_extra): (AllocId, Self::ProvenanceExtra),
849 ) -> InterpResult<'tcx> {
850 if let Some(data_race) = &mut alloc_extra.data_race {
854 machine.data_race.as_mut().unwrap(),
858 if let Some(stacked_borrows) = &mut alloc_extra.stacked_borrows {
859 stacked_borrows.get_mut().memory_written(
863 machine.stacked_borrows.as_ref().unwrap(),
864 machine.current_span(),
868 if let Some(weak_memory) = &alloc_extra.weak_memory {
869 weak_memory.memory_accessed(range, machine.data_race.as_ref().unwrap());
875 fn memory_deallocated(
878 alloc_extra: &mut AllocExtra,
879 (alloc_id, prove_extra): (AllocId, Self::ProvenanceExtra),
881 ) -> InterpResult<'tcx> {
882 if machine.tracked_alloc_ids.contains(&alloc_id) {
883 register_diagnostic(NonHaltingDiagnostic::FreedAlloc(alloc_id));
885 if let Some(data_race) = &mut alloc_extra.data_race {
886 data_race.deallocate(
889 machine.data_race.as_mut().unwrap(),
893 if let Some(stacked_borrows) = &mut alloc_extra.stacked_borrows {
894 stacked_borrows.get_mut().memory_deallocated(
898 machine.stacked_borrows.as_ref().unwrap(),
908 ecx: &mut InterpCx<'mir, 'tcx, Self>,
909 kind: mir::RetagKind,
910 place: &PlaceTy<'tcx, Provenance>,
911 ) -> InterpResult<'tcx> {
912 if ecx.machine.stacked_borrows.is_some() { ecx.retag(kind, place) } else { Ok(()) }
917 ecx: &mut InterpCx<'mir, 'tcx, Self>,
918 frame: Frame<'mir, 'tcx, Provenance>,
919 ) -> InterpResult<'tcx, Frame<'mir, 'tcx, Provenance, FrameData<'tcx>>> {
920 // Start recording our event before doing anything else
921 let timing = if let Some(profiler) = ecx.machine.profiler.as_ref() {
922 let fn_name = frame.instance.to_string();
923 let entry = ecx.machine.string_cache.entry(fn_name.clone());
924 let name = entry.or_insert_with(|| profiler.alloc_string(&*fn_name));
926 Some(profiler.start_recording_interval_event_detached(
928 measureme::EventId::from_label(*name),
929 ecx.get_active_thread().to_u32(),
935 let stacked_borrows = ecx.machine.stacked_borrows.as_ref();
937 let extra = FrameData {
938 stacked_borrows: stacked_borrows.map(|sb| sb.borrow_mut().new_frame()),
942 Ok(frame.with_extra(extra))
946 ecx: &'a InterpCx<'mir, 'tcx, Self>,
947 ) -> &'a [Frame<'mir, 'tcx, Self::Provenance, Self::FrameExtra>] {
948 ecx.active_thread_stack()
952 ecx: &'a mut InterpCx<'mir, 'tcx, Self>,
953 ) -> &'a mut Vec<Frame<'mir, 'tcx, Self::Provenance, Self::FrameExtra>> {
954 ecx.active_thread_stack_mut()
957 fn before_terminator(ecx: &mut InterpCx<'mir, 'tcx, Self>) -> InterpResult<'tcx> {
958 // Possibly report our progress.
959 if let Some(report_progress) = ecx.machine.report_progress {
960 if ecx.machine.since_progress_report >= report_progress {
961 register_diagnostic(NonHaltingDiagnostic::ProgressReport);
962 ecx.machine.since_progress_report = 0;
964 // Cannot overflow, since it is strictly less than `report_progress`.
965 ecx.machine.since_progress_report += 1;
967 // These are our preemption points.
968 ecx.maybe_preempt_active_thread();
973 fn after_stack_push(ecx: &mut InterpCx<'mir, 'tcx, Self>) -> InterpResult<'tcx> {
974 if ecx.machine.stacked_borrows.is_some() { ecx.retag_return_place() } else { Ok(()) }
979 ecx: &mut InterpCx<'mir, 'tcx, Self>,
980 mut frame: Frame<'mir, 'tcx, Provenance, FrameData<'tcx>>,
982 ) -> InterpResult<'tcx, StackPopJump> {
983 let timing = frame.extra.timing.take();
984 if let Some(stacked_borrows) = &ecx.machine.stacked_borrows {
985 stacked_borrows.borrow_mut().end_call(&frame.extra);
987 let res = ecx.handle_stack_pop_unwind(frame.extra, unwinding);
988 if let Some(profiler) = ecx.machine.profiler.as_ref() {
989 profiler.finish_recording_interval_event(timing.unwrap());