1 //! Global machine state as well as implementation of the interpreter engine
5 use std::cell::RefCell;
7 use std::num::NonZeroU64;
8 use std::time::Instant;
11 use rand::rngs::StdRng;
12 use rand::SeedableRng;
14 use rustc_data_structures::fx::FxHashMap;
19 layout::{LayoutCx, LayoutError, TyAndLayout},
23 use rustc_span::def_id::DefId;
24 use rustc_span::symbol::{sym, Symbol};
25 use rustc_target::abi::{LayoutOf, Size};
26 use rustc_target::spec::abi::Abi;
30 // Some global facts about the emulated machine.
31 pub const PAGE_SIZE: u64 = 4 * 1024; // FIXME: adjust to target architecture
32 pub const STACK_ADDR: u64 = 32 * PAGE_SIZE; // not really about the "stack", but where we start assigning integer addresses to allocations
33 pub const STACK_SIZE: u64 = 16 * PAGE_SIZE; // whatever
34 pub const NUM_CPUS: u64 = 1;
36 /// Extra data stored with each stack frame
38 pub struct FrameData<'tcx> {
39 /// Extra data for Stacked Borrows.
40 pub call_id: stacked_borrows::CallId,
42 /// If this is Some(), then this is a special "catch unwind" frame (the frame of `try_fn`
43 /// called by `try`). When this frame is popped during unwinding a panic,
44 /// we stop unwinding, use the `CatchUnwindData` to handle catching.
45 pub catch_unwind: Option<CatchUnwindData<'tcx>>,
48 /// Extra memory kinds
49 #[derive(Debug, Copy, Clone, PartialEq, Eq)]
50 pub enum MiriMemoryKind {
51 /// `__rust_alloc` memory.
55 /// Windows `HeapAlloc` memory.
57 /// Memory for args, errno, and other parts of the machine-managed environment.
58 /// This memory may leak.
60 /// Memory for env vars. Separate from `Machine` because we clean it up and leak-check it.
62 /// Globals copied from `tcx`.
63 /// This memory may leak.
65 /// Memory for extern statics.
66 /// This memory may leak.
68 /// Memory for thread-local statics.
69 /// This memory may leak.
73 impl Into<MemoryKind<MiriMemoryKind>> for MiriMemoryKind {
75 fn into(self) -> MemoryKind<MiriMemoryKind> {
76 MemoryKind::Machine(self)
80 impl MayLeak for MiriMemoryKind {
82 fn may_leak(self) -> bool {
83 use self::MiriMemoryKind::*;
85 Rust | C | WinHeap | Env => false,
86 Machine | Global | ExternStatic | Tls => true,
91 impl fmt::Display for MiriMemoryKind {
92 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
93 use self::MiriMemoryKind::*;
95 Rust => write!(f, "Rust heap"),
96 C => write!(f, "C heap"),
97 WinHeap => write!(f, "Windows heap"),
98 Machine => write!(f, "machine-managed memory"),
99 Env => write!(f, "environment variable"),
100 Global => write!(f, "global (static or const)"),
101 ExternStatic => write!(f, "extern static"),
102 Tls => write!(f, "thread-local static"),
107 /// Extra per-allocation data
108 #[derive(Debug, Clone)]
109 pub struct AllocExtra {
110 /// Stacked Borrows state is only added if it is enabled.
111 pub stacked_borrows: Option<stacked_borrows::AllocExtra>,
112 /// Data race detection via the use of a vector-clock,
113 /// this is only added if it is enabled.
114 pub data_race: Option<data_race::AllocExtra>,
117 /// Extra global memory data
119 pub struct MemoryExtra {
120 pub stacked_borrows: Option<stacked_borrows::MemoryExtra>,
121 pub data_race: Option<data_race::MemoryExtra>,
122 pub intptrcast: intptrcast::MemoryExtra,
124 /// Mapping extern static names to their canonical allocation.
125 extern_statics: FxHashMap<Symbol, AllocId>,
127 /// The random number generator used for resolving non-determinism.
128 /// Needs to be queried by ptr_to_int, hence needs interior mutability.
129 pub(crate) rng: RefCell<StdRng>,
131 /// An allocation ID to report when it is being allocated
132 /// (helps for debugging memory leaks and use after free bugs).
133 tracked_alloc_id: Option<AllocId>,
135 /// Controls whether alignment of memory accesses is being checked.
136 pub(crate) check_alignment: AlignmentCheck,
138 /// Failure rate of compare_exchange_weak, between 0.0 and 1.0
139 pub(crate) cmpxchg_weak_failure_rate: f64,
143 pub fn new(config: &MiriConfig) -> Self {
144 let rng = StdRng::seed_from_u64(config.seed.unwrap_or(0));
145 let stacked_borrows = if config.stacked_borrows {
146 Some(RefCell::new(stacked_borrows::GlobalState::new(
147 config.tracked_pointer_tag,
148 config.tracked_call_id,
154 let data_race = if config.data_race_detector {
155 Some(data_race::GlobalState::new())
162 intptrcast: Default::default(),
163 extern_statics: FxHashMap::default(),
164 rng: RefCell::new(rng),
165 tracked_alloc_id: config.tracked_alloc_id,
166 check_alignment: config.check_alignment,
167 cmpxchg_weak_failure_rate: config.cmpxchg_weak_failure_rate,
171 fn add_extern_static<'tcx, 'mir>(
172 this: &mut MiriEvalContext<'mir, 'tcx>,
176 let ptr = ptr.assert_ptr();
177 assert_eq!(ptr.offset, Size::ZERO);
178 this.memory.extra.extern_statics.try_insert(Symbol::intern(name), ptr.alloc_id).unwrap();
181 /// Sets up the "extern statics" for this machine.
182 pub fn init_extern_statics<'tcx, 'mir>(
183 this: &mut MiriEvalContext<'mir, 'tcx>,
184 ) -> InterpResult<'tcx> {
185 match this.tcx.sess.target.os.as_str() {
187 // "__cxa_thread_atexit_impl"
188 // This should be all-zero, pointer-sized.
189 let layout = this.machine.layouts.usize;
190 let place = this.allocate(layout, MiriMemoryKind::ExternStatic.into());
191 this.write_scalar(Scalar::from_machine_usize(0, this), &place.into())?;
192 Self::add_extern_static(this, "__cxa_thread_atexit_impl", place.ptr);
194 Self::add_extern_static(
197 this.machine.env_vars.environ.unwrap().ptr,
202 // This is some obscure hack that is part of the Windows TLS story. It's a `u8`.
203 let layout = this.machine.layouts.u8;
204 let place = this.allocate(layout, MiriMemoryKind::ExternStatic.into());
205 this.write_scalar(Scalar::from_u8(0), &place.into())?;
206 Self::add_extern_static(this, "_tls_used", place.ptr);
208 _ => {} // No "extern statics" supported on this target
214 /// Precomputed layouts of primitive types
215 pub struct PrimitiveLayouts<'tcx> {
216 pub unit: TyAndLayout<'tcx>,
217 pub i8: TyAndLayout<'tcx>,
218 pub i32: TyAndLayout<'tcx>,
219 pub isize: TyAndLayout<'tcx>,
220 pub u8: TyAndLayout<'tcx>,
221 pub u32: TyAndLayout<'tcx>,
222 pub usize: TyAndLayout<'tcx>,
225 impl<'mir, 'tcx: 'mir> PrimitiveLayouts<'tcx> {
226 fn new(layout_cx: LayoutCx<'tcx, TyCtxt<'tcx>>) -> Result<Self, LayoutError<'tcx>> {
228 unit: layout_cx.layout_of(layout_cx.tcx.mk_unit())?,
229 i8: layout_cx.layout_of(layout_cx.tcx.types.i8)?,
230 i32: layout_cx.layout_of(layout_cx.tcx.types.i32)?,
231 isize: layout_cx.layout_of(layout_cx.tcx.types.isize)?,
232 u8: layout_cx.layout_of(layout_cx.tcx.types.u8)?,
233 u32: layout_cx.layout_of(layout_cx.tcx.types.u32)?,
234 usize: layout_cx.layout_of(layout_cx.tcx.types.usize)?,
239 /// The machine itself.
240 pub struct Evaluator<'mir, 'tcx> {
241 /// Environment variables set by `setenv`.
242 /// Miri does not expose env vars from the host to the emulated program.
243 pub(crate) env_vars: EnvVars<'tcx>,
245 /// Program arguments (`Option` because we can only initialize them after creating the ecx).
246 /// These are *pointers* to argc/argv because macOS.
247 /// We also need the full command line as one string because of Windows.
248 pub(crate) argc: Option<Scalar<Tag>>,
249 pub(crate) argv: Option<Scalar<Tag>>,
250 pub(crate) cmd_line: Option<Scalar<Tag>>,
253 pub(crate) tls: TlsData<'tcx>,
255 /// If enabled, the `env_vars` field is populated with the host env vars during initialization
256 /// and random number generation is delegated to the host.
257 pub(crate) communicate: bool,
259 /// Whether to enforce the validity invariant.
260 pub(crate) validate: bool,
262 pub(crate) file_handler: shims::posix::FileHandler,
263 pub(crate) dir_handler: shims::posix::DirHandler,
265 /// The "time anchor" for this machine's monotone clock (for `Instant` simulation).
266 pub(crate) time_anchor: Instant,
268 /// The set of threads.
269 pub(crate) threads: ThreadManager<'mir, 'tcx>,
271 /// Precomputed `TyLayout`s for primitive data types that are commonly used inside Miri.
272 pub(crate) layouts: PrimitiveLayouts<'tcx>,
274 /// Allocations that are considered roots of static memory (that may leak).
275 pub(crate) static_roots: Vec<AllocId>,
278 impl<'mir, 'tcx> Evaluator<'mir, 'tcx> {
282 layout_cx: LayoutCx<'tcx, TyCtxt<'tcx>>,
285 PrimitiveLayouts::new(layout_cx).expect("Couldn't get layouts of primitive types");
287 // `env_vars` could be initialized properly here if `Memory` were available before
288 // calling this method.
289 env_vars: EnvVars::default(),
293 tls: TlsData::default(),
296 file_handler: Default::default(),
297 dir_handler: Default::default(),
298 time_anchor: Instant::now(),
300 threads: ThreadManager::default(),
301 static_roots: Vec::new(),
306 /// A rustc InterpCx for Miri.
307 pub type MiriEvalContext<'mir, 'tcx> = InterpCx<'mir, 'tcx, Evaluator<'mir, 'tcx>>;
309 /// A little trait that's useful to be inherited by extension traits.
310 pub trait MiriEvalContextExt<'mir, 'tcx> {
311 fn eval_context_ref<'a>(&'a self) -> &'a MiriEvalContext<'mir, 'tcx>;
312 fn eval_context_mut<'a>(&'a mut self) -> &'a mut MiriEvalContext<'mir, 'tcx>;
314 impl<'mir, 'tcx> MiriEvalContextExt<'mir, 'tcx> for MiriEvalContext<'mir, 'tcx> {
316 fn eval_context_ref(&self) -> &MiriEvalContext<'mir, 'tcx> {
320 fn eval_context_mut(&mut self) -> &mut MiriEvalContext<'mir, 'tcx> {
325 /// Machine hook implementations.
326 impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'mir, 'tcx> {
327 type MemoryKind = MiriMemoryKind;
329 type FrameExtra = FrameData<'tcx>;
330 type MemoryExtra = MemoryExtra;
331 type AllocExtra = AllocExtra;
332 type PointerTag = Tag;
333 type ExtraFnVal = Dlsym;
336 MonoHashMap<AllocId, (MemoryKind<MiriMemoryKind>, Allocation<Tag, Self::AllocExtra>)>;
338 const GLOBAL_KIND: Option<MiriMemoryKind> = Some(MiriMemoryKind::Global);
341 fn enforce_alignment(memory_extra: &MemoryExtra) -> bool {
342 memory_extra.check_alignment != AlignmentCheck::None
346 fn force_int_for_alignment_check(memory_extra: &Self::MemoryExtra) -> bool {
347 memory_extra.check_alignment == AlignmentCheck::Int
351 fn enforce_validity(ecx: &InterpCx<'mir, 'tcx, Self>) -> bool {
356 fn find_mir_or_eval_fn(
357 ecx: &mut InterpCx<'mir, 'tcx, Self>,
358 instance: ty::Instance<'tcx>,
360 args: &[OpTy<'tcx, Tag>],
361 ret: Option<(&PlaceTy<'tcx, Tag>, mir::BasicBlock)>,
362 unwind: Option<mir::BasicBlock>,
363 ) -> InterpResult<'tcx, Option<&'mir mir::Body<'tcx>>> {
364 ecx.find_mir_or_eval_fn(instance, abi, args, ret, unwind)
369 ecx: &mut InterpCx<'mir, 'tcx, Self>,
372 args: &[OpTy<'tcx, Tag>],
373 ret: Option<(&PlaceTy<'tcx, Tag>, mir::BasicBlock)>,
374 _unwind: Option<mir::BasicBlock>,
375 ) -> InterpResult<'tcx> {
376 ecx.call_dlsym(fn_val, abi, args, ret)
381 ecx: &mut rustc_mir::interpret::InterpCx<'mir, 'tcx, Self>,
382 instance: ty::Instance<'tcx>,
383 args: &[OpTy<'tcx, Tag>],
384 ret: Option<(&PlaceTy<'tcx, Tag>, mir::BasicBlock)>,
385 unwind: Option<mir::BasicBlock>,
386 ) -> InterpResult<'tcx> {
387 ecx.call_intrinsic(instance, args, ret, unwind)
392 ecx: &mut InterpCx<'mir, 'tcx, Self>,
393 msg: &mir::AssertMessage<'tcx>,
394 unwind: Option<mir::BasicBlock>,
395 ) -> InterpResult<'tcx> {
396 ecx.assert_panic(msg, unwind)
400 fn abort(_ecx: &mut InterpCx<'mir, 'tcx, Self>, msg: String) -> InterpResult<'tcx, !> {
401 throw_machine_stop!(TerminationInfo::Abort(msg))
406 ecx: &rustc_mir::interpret::InterpCx<'mir, 'tcx, Self>,
408 left: &ImmTy<'tcx, Tag>,
409 right: &ImmTy<'tcx, Tag>,
410 ) -> InterpResult<'tcx, (Scalar<Tag>, bool, ty::Ty<'tcx>)> {
411 ecx.binary_ptr_op(bin_op, left, right)
415 ecx: &mut InterpCx<'mir, 'tcx, Self>,
416 dest: &PlaceTy<'tcx, Tag>,
417 ) -> InterpResult<'tcx> {
418 trace!("box_alloc for {:?}", dest.layout.ty);
419 let layout = ecx.layout_of(dest.layout.ty.builtin_deref(false).unwrap().ty)?;
420 // First argument: `size`.
421 // (`0` is allowed here -- this is expected to be handled by the lang item).
422 let size = Scalar::from_machine_usize(layout.size.bytes(), ecx);
424 // Second argument: `align`.
425 let align = Scalar::from_machine_usize(layout.align.abi.bytes(), ecx);
427 // Call the `exchange_malloc` lang item.
428 let malloc = ecx.tcx.lang_items().exchange_malloc_fn().unwrap();
429 let malloc = ty::Instance::mono(ecx.tcx.tcx, malloc);
433 &[size.into(), align.into()],
435 // Don't do anything when we are done. The `statement()` function will increment
436 // the old stack frame's stmt counter to the next statement, which means that when
437 // `exchange_malloc` returns, we go on evaluating exactly where we want to be.
438 StackPopCleanup::None { cleanup: true },
443 fn thread_local_static_alloc_id(
444 ecx: &mut InterpCx<'mir, 'tcx, Self>,
446 ) -> InterpResult<'tcx, AllocId> {
447 ecx.get_or_create_thread_local_alloc_id(def_id)
450 fn extern_static_alloc_id(
451 memory: &Memory<'mir, 'tcx, Self>,
453 ) -> InterpResult<'tcx, AllocId> {
454 let attrs = memory.tcx.get_attrs(def_id);
455 let link_name = match memory.tcx.sess.first_attr_value_str_by_name(&attrs, sym::link_name) {
457 None => memory.tcx.item_name(def_id),
459 if let Some(&id) = memory.extra.extern_statics.get(&link_name) {
462 throw_unsup_format!("`extern` static {:?} is not supported by Miri", def_id)
466 fn init_allocation_extra<'b>(
467 memory_extra: &MemoryExtra,
469 alloc: Cow<'b, Allocation>,
470 kind: Option<MemoryKind<Self::MemoryKind>>,
471 ) -> (Cow<'b, Allocation<Self::PointerTag, Self::AllocExtra>>, Self::PointerTag) {
472 if Some(id) == memory_extra.tracked_alloc_id {
473 register_diagnostic(NonHaltingDiagnostic::CreatedAlloc(id));
476 let kind = kind.expect("we set our STATIC_KIND so this cannot be None");
477 let alloc = alloc.into_owned();
478 let (stacks, base_tag) = if let Some(stacked_borrows) = &memory_extra.stacked_borrows {
479 let (stacks, base_tag) =
480 Stacks::new_allocation(id, alloc.size(), stacked_borrows, kind);
481 (Some(stacks), base_tag)
483 // No stacks, no tag.
484 (None, Tag::Untagged)
486 let race_alloc = if let Some(data_race) = &memory_extra.data_race {
487 Some(data_race::AllocExtra::new_allocation(&data_race, alloc.size(), kind))
491 let mut stacked_borrows = memory_extra.stacked_borrows.as_ref().map(|sb| sb.borrow_mut());
492 let alloc: Allocation<Tag, Self::AllocExtra> = alloc.with_tags_and_extra(
494 if let Some(stacked_borrows) = &mut stacked_borrows {
495 // Only globals may already contain pointers at this point
496 assert_eq!(kind, MiriMemoryKind::Global.into());
497 stacked_borrows.global_base_ptr(alloc)
502 AllocExtra { stacked_borrows: stacks, data_race: race_alloc },
504 (Cow::Owned(alloc), base_tag)
509 memory_extra: &Self::MemoryExtra,
510 alloc_extra: &AllocExtra,
513 ) -> InterpResult<'tcx> {
514 if let Some(data_race) = &alloc_extra.data_race {
515 data_race.read(ptr, size, memory_extra.data_race.as_ref().unwrap())?;
517 if let Some(stacked_borrows) = &alloc_extra.stacked_borrows {
518 stacked_borrows.memory_read(ptr, size, memory_extra.stacked_borrows.as_ref().unwrap())
526 memory_extra: &mut Self::MemoryExtra,
527 alloc_extra: &mut AllocExtra,
530 ) -> InterpResult<'tcx> {
531 if let Some(data_race) = &mut alloc_extra.data_race {
532 data_race.write(ptr, size, memory_extra.data_race.as_mut().unwrap())?;
534 if let Some(stacked_borrows) = &mut alloc_extra.stacked_borrows {
535 stacked_borrows.memory_written(ptr, size, memory_extra.stacked_borrows.as_mut().unwrap())
542 fn memory_deallocated(
543 memory_extra: &mut Self::MemoryExtra,
544 alloc_extra: &mut AllocExtra,
547 ) -> InterpResult<'tcx> {
548 if Some(ptr.alloc_id) == memory_extra.tracked_alloc_id {
549 register_diagnostic(NonHaltingDiagnostic::FreedAlloc(ptr.alloc_id));
551 if let Some(data_race) = &mut alloc_extra.data_race {
552 data_race.deallocate(ptr, size, memory_extra.data_race.as_mut().unwrap())?;
554 if let Some(stacked_borrows) = &mut alloc_extra.stacked_borrows {
555 stacked_borrows.memory_deallocated(ptr, size, memory_extra.stacked_borrows.as_mut().unwrap())
561 fn after_static_mem_initialized(
562 ecx: &mut InterpCx<'mir, 'tcx, Self>,
563 ptr: Pointer<Self::PointerTag>,
565 ) -> InterpResult<'tcx> {
566 if ecx.memory.extra.data_race.is_some() {
567 ecx.reset_vector_clocks(ptr, size)?;
573 fn tag_global_base_pointer(memory_extra: &MemoryExtra, id: AllocId) -> Self::PointerTag {
574 if let Some(stacked_borrows) = &memory_extra.stacked_borrows {
575 stacked_borrows.borrow_mut().global_base_ptr(id)
583 ecx: &mut InterpCx<'mir, 'tcx, Self>,
584 kind: mir::RetagKind,
585 place: &PlaceTy<'tcx, Tag>,
586 ) -> InterpResult<'tcx> {
587 if ecx.memory.extra.stacked_borrows.is_some() { ecx.retag(kind, place) } else { Ok(()) }
592 ecx: &mut InterpCx<'mir, 'tcx, Self>,
593 frame: Frame<'mir, 'tcx, Tag>,
594 ) -> InterpResult<'tcx, Frame<'mir, 'tcx, Tag, FrameData<'tcx>>> {
595 let stacked_borrows = ecx.memory.extra.stacked_borrows.as_ref();
596 let call_id = stacked_borrows.map_or(NonZeroU64::new(1).unwrap(), |stacked_borrows| {
597 stacked_borrows.borrow_mut().new_call()
599 let extra = FrameData { call_id, catch_unwind: None };
600 Ok(frame.with_extra(extra))
604 ecx: &'a InterpCx<'mir, 'tcx, Self>,
605 ) -> &'a [Frame<'mir, 'tcx, Self::PointerTag, Self::FrameExtra>] {
606 ecx.active_thread_stack()
610 ecx: &'a mut InterpCx<'mir, 'tcx, Self>,
611 ) -> &'a mut Vec<Frame<'mir, 'tcx, Self::PointerTag, Self::FrameExtra>> {
612 ecx.active_thread_stack_mut()
616 fn after_stack_push(ecx: &mut InterpCx<'mir, 'tcx, Self>) -> InterpResult<'tcx> {
617 if ecx.memory.extra.stacked_borrows.is_some() { ecx.retag_return_place() } else { Ok(()) }
622 ecx: &mut InterpCx<'mir, 'tcx, Self>,
623 frame: Frame<'mir, 'tcx, Tag, FrameData<'tcx>>,
625 ) -> InterpResult<'tcx, StackPopJump> {
626 ecx.handle_stack_pop(frame.extra, unwinding)
631 memory: &Memory<'mir, 'tcx, Self>,
633 ) -> InterpResult<'tcx, Pointer<Self::PointerTag>> {
634 intptrcast::GlobalState::int_to_ptr(int, memory)
639 memory: &Memory<'mir, 'tcx, Self>,
640 ptr: Pointer<Self::PointerTag>,
641 ) -> InterpResult<'tcx, u64> {
642 intptrcast::GlobalState::ptr_to_int(ptr, memory)