1 //! Global machine state as well as implementation of the interpreter engine
5 use std::cell::RefCell;
6 use std::collections::HashMap;
7 use std::num::NonZeroU64;
10 use rand::rngs::StdRng;
15 layout::{LayoutOf, Size},
19 use rustc_span::{source_map::Span, symbol::{sym, Symbol}};
23 // Some global facts about the emulated machine.
24 pub const PAGE_SIZE: u64 = 4 * 1024; // FIXME: adjust to target architecture
25 pub const STACK_ADDR: u64 = 32 * PAGE_SIZE; // not really about the "stack", but where we start assigning integer addresses to allocations
26 pub const STACK_SIZE: u64 = 16 * PAGE_SIZE; // whatever
27 pub const NUM_CPUS: u64 = 1;
29 /// Extra data stored with each stack frame
31 pub struct FrameData<'tcx> {
32 /// Extra data for Stacked Borrows.
33 pub call_id: stacked_borrows::CallId,
35 /// If this is Some(), then this is a special "catch unwind" frame (the frame of the closure
36 /// called by `__rustc_maybe_catch_panic`). When this frame is popped during unwinding a panic,
37 /// we stop unwinding, use the `CatchUnwindData` to
38 /// store the panic payload, and continue execution in the parent frame.
39 pub catch_panic: Option<CatchUnwindData<'tcx>>,
42 /// Extra memory kinds
43 #[derive(Debug, Copy, Clone, PartialEq, Eq)]
44 pub enum MiriMemoryKind {
45 /// `__rust_alloc` memory.
49 /// Windows `HeapAlloc` memory.
51 /// Memory for env vars and args, errno, extern statics and other parts of the machine-managed environment.
57 impl Into<MemoryKind<MiriMemoryKind>> for MiriMemoryKind {
59 fn into(self) -> MemoryKind<MiriMemoryKind> {
60 MemoryKind::Machine(self)
64 /// Extra per-allocation data
65 #[derive(Debug, Clone)]
66 pub struct AllocExtra {
67 /// Stacked Borrows state is only added if it is enabled.
68 pub stacked_borrows: Option<stacked_borrows::AllocExtra>,
71 /// Extra global memory data
72 #[derive(Clone, Debug)]
73 pub struct MemoryExtra {
74 pub stacked_borrows: Option<stacked_borrows::MemoryExtra>,
75 pub intptrcast: intptrcast::MemoryExtra,
77 /// Mapping extern static names to their canonical allocation.
78 pub(crate) extern_statics: HashMap<Symbol, AllocId>,
80 /// The random number generator used for resolving non-determinism.
81 /// Needs to be queried by ptr_to_int, hence needs interior mutability.
82 pub(crate) rng: RefCell<StdRng>,
86 pub fn new(rng: StdRng, stacked_borrows: bool, tracked_pointer_tag: Option<PtrId>) -> Self {
87 let stacked_borrows = if stacked_borrows {
88 Some(Rc::new(RefCell::new(stacked_borrows::GlobalState::new(tracked_pointer_tag))))
94 intptrcast: Default::default(),
95 extern_statics: HashMap::default(),
96 rng: RefCell::new(rng),
100 /// Sets up the "extern statics" for this machine.
101 pub fn init_extern_statics<'mir, 'tcx>(
102 this: &mut MiriEvalContext<'mir, 'tcx>,
103 ) -> InterpResult<'tcx> {
104 match this.tcx.sess.target.target.target_os.as_str() {
106 // "__cxa_thread_atexit_impl"
107 // This should be all-zero, pointer-sized.
108 let layout = this.layout_of(this.tcx.types.usize)?;
109 let place = this.allocate(layout, MiriMemoryKind::Machine.into());
110 this.write_scalar(Scalar::from_machine_usize(0, &*this.tcx), place.into())?;
114 .insert(Symbol::intern("__cxa_thread_atexit_impl"), place.ptr.assert_ptr().alloc_id)
117 _ => {} // No "extern statics" supported on this platform
123 /// The machine itself.
124 pub struct Evaluator<'tcx> {
125 /// Environment variables set by `setenv`.
126 /// Miri does not expose env vars from the host to the emulated program.
127 pub(crate) env_vars: EnvVars,
129 /// Program arguments (`Option` because we can only initialize them after creating the ecx).
130 /// These are *pointers* to argc/argv because macOS.
131 /// We also need the full command line as one string because of Windows.
132 pub(crate) argc: Option<Scalar<Tag>>,
133 pub(crate) argv: Option<Scalar<Tag>>,
134 pub(crate) cmd_line: Option<Scalar<Tag>>,
136 /// Last OS error location in memory. It is a 32-bit integer.
137 pub(crate) last_error: Option<MPlaceTy<'tcx, Tag>>,
140 pub(crate) tls: TlsData<'tcx>,
142 /// If enabled, the `env_vars` field is populated with the host env vars during initialization
143 /// and random number generation is delegated to the host.
144 pub(crate) communicate: bool,
146 /// Whether to enforce the validity invariant.
147 pub(crate) validate: bool,
149 pub(crate) file_handler: FileHandler,
150 pub(crate) dir_handler: DirHandler,
152 /// The temporary used for storing the argument of
153 /// the call to `miri_start_panic` (the panic payload) when unwinding.
154 pub(crate) panic_payload: Option<ImmTy<'tcx, Tag>>,
157 impl<'tcx> Evaluator<'tcx> {
158 pub(crate) fn new(communicate: bool, validate: bool) -> Self {
160 // `env_vars` could be initialized properly here if `Memory` were available before
161 // calling this method.
162 env_vars: EnvVars::default(),
167 tls: TlsData::default(),
170 file_handler: Default::default(),
171 dir_handler: Default::default(),
177 /// A rustc InterpCx for Miri.
178 pub type MiriEvalContext<'mir, 'tcx> = InterpCx<'mir, 'tcx, Evaluator<'tcx>>;
180 /// A little trait that's useful to be inherited by extension traits.
181 pub trait MiriEvalContextExt<'mir, 'tcx> {
182 fn eval_context_ref<'a>(&'a self) -> &'a MiriEvalContext<'mir, 'tcx>;
183 fn eval_context_mut<'a>(&'a mut self) -> &'a mut MiriEvalContext<'mir, 'tcx>;
185 impl<'mir, 'tcx> MiriEvalContextExt<'mir, 'tcx> for MiriEvalContext<'mir, 'tcx> {
187 fn eval_context_ref(&self) -> &MiriEvalContext<'mir, 'tcx> {
191 fn eval_context_mut(&mut self) -> &mut MiriEvalContext<'mir, 'tcx> {
196 /// Machine hook implementations.
197 impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'tcx> {
198 type MemoryKinds = MiriMemoryKind;
200 type FrameExtra = FrameData<'tcx>;
201 type MemoryExtra = MemoryExtra;
202 type AllocExtra = AllocExtra;
203 type PointerTag = Tag;
204 type ExtraFnVal = Dlsym;
207 MonoHashMap<AllocId, (MemoryKind<MiriMemoryKind>, Allocation<Tag, Self::AllocExtra>)>;
209 const STATIC_KIND: Option<MiriMemoryKind> = Some(MiriMemoryKind::Static);
211 const CHECK_ALIGN: bool = true;
214 fn enforce_validity(ecx: &InterpCx<'mir, 'tcx, Self>) -> bool {
219 fn find_mir_or_eval_fn(
220 ecx: &mut InterpCx<'mir, 'tcx, Self>,
222 instance: ty::Instance<'tcx>,
223 args: &[OpTy<'tcx, Tag>],
224 ret: Option<(PlaceTy<'tcx, Tag>, mir::BasicBlock)>,
225 unwind: Option<mir::BasicBlock>,
226 ) -> InterpResult<'tcx, Option<&'mir mir::Body<'tcx>>> {
227 ecx.find_mir_or_eval_fn(instance, args, ret, unwind)
232 ecx: &mut InterpCx<'mir, 'tcx, Self>,
234 args: &[OpTy<'tcx, Tag>],
235 ret: Option<(PlaceTy<'tcx, Tag>, mir::BasicBlock)>,
236 _unwind: Option<mir::BasicBlock>,
237 ) -> InterpResult<'tcx> {
238 ecx.call_dlsym(fn_val, args, ret)
243 ecx: &mut rustc_mir::interpret::InterpCx<'mir, 'tcx, Self>,
245 instance: ty::Instance<'tcx>,
246 args: &[OpTy<'tcx, Tag>],
247 ret: Option<(PlaceTy<'tcx, Tag>, mir::BasicBlock)>,
248 unwind: Option<mir::BasicBlock>,
249 ) -> InterpResult<'tcx> {
250 ecx.call_intrinsic(span, instance, args, ret, unwind)
255 ecx: &mut InterpCx<'mir, 'tcx, Self>,
257 msg: &mir::AssertMessage<'tcx>,
258 unwind: Option<mir::BasicBlock>,
259 ) -> InterpResult<'tcx> {
260 ecx.assert_panic(span, msg, unwind)
265 ecx: &rustc_mir::interpret::InterpCx<'mir, 'tcx, Self>,
267 left: ImmTy<'tcx, Tag>,
268 right: ImmTy<'tcx, Tag>,
269 ) -> InterpResult<'tcx, (Scalar<Tag>, bool, Ty<'tcx>)> {
270 ecx.binary_ptr_op(bin_op, left, right)
274 ecx: &mut InterpCx<'mir, 'tcx, Self>,
275 dest: PlaceTy<'tcx, Tag>,
276 ) -> InterpResult<'tcx> {
277 trace!("box_alloc for {:?}", dest.layout.ty);
278 let layout = ecx.layout_of(dest.layout.ty.builtin_deref(false).unwrap().ty)?;
279 // First argument: `size`.
280 // (`0` is allowed here -- this is expected to be handled by the lang item).
281 let size = Scalar::from_uint(layout.size.bytes(), ecx.pointer_size());
283 // Second argument: `align`.
284 let align = Scalar::from_uint(layout.align.abi.bytes(), ecx.pointer_size());
286 // Call the `exchange_malloc` lang item.
287 let malloc = ecx.tcx.lang_items().exchange_malloc_fn().unwrap();
288 let malloc = ty::Instance::mono(ecx.tcx.tcx, malloc);
291 &[size.into(), align.into()],
293 // Don't do anything when we are done. The `statement()` function will increment
294 // the old stack frame's stmt counter to the next statement, which means that when
295 // `exchange_malloc` returns, we go on evaluating exactly where we want to be.
296 StackPopCleanup::None { cleanup: true },
301 fn canonical_alloc_id(mem: &Memory<'mir, 'tcx, Self>, id: AllocId) -> AllocId {
303 // Figure out if this is an extern static, and if yes, which one.
304 let def_id = match tcx.alloc_map.lock().get(id) {
305 Some(GlobalAlloc::Static(def_id)) if tcx.is_foreign_item(def_id) => def_id,
307 // No need to canonicalize anything.
311 let attrs = tcx.get_attrs(def_id);
312 let link_name = match attr::first_attr_value_str_by_name(&attrs, sym::link_name) {
314 None => tcx.item_name(def_id),
316 // Check if we know this one.
317 if let Some(canonical_id) = mem.extra.extern_statics.get(&link_name) {
318 trace!("canonical_alloc_id: {:?} ({}) -> {:?}", id, link_name, canonical_id);
321 // Return original id; `Memory::get_static_alloc` will throw an error.
326 fn init_allocation_extra<'b>(
327 memory_extra: &MemoryExtra,
329 alloc: Cow<'b, Allocation>,
330 kind: Option<MemoryKind<Self::MemoryKinds>>,
331 ) -> (Cow<'b, Allocation<Self::PointerTag, Self::AllocExtra>>, Self::PointerTag) {
332 let kind = kind.expect("we set our STATIC_KIND so this cannot be None");
333 let alloc = alloc.into_owned();
334 let (stacks, base_tag) =
335 if let Some(stacked_borrows) = memory_extra.stacked_borrows.as_ref() {
336 let (stacks, base_tag) =
337 Stacks::new_allocation(id, alloc.size, Rc::clone(stacked_borrows), kind);
338 (Some(stacks), base_tag)
340 // No stacks, no tag.
341 (None, Tag::Untagged)
343 let mut stacked_borrows = memory_extra.stacked_borrows.as_ref().map(|sb| sb.borrow_mut());
344 let alloc: Allocation<Tag, Self::AllocExtra> = alloc.with_tags_and_extra(
346 if let Some(stacked_borrows) = stacked_borrows.as_mut() {
347 // Only statics may already contain pointers at this point
348 assert_eq!(kind, MiriMemoryKind::Static.into());
349 stacked_borrows.static_base_ptr(alloc)
354 AllocExtra { stacked_borrows: stacks },
356 (Cow::Owned(alloc), base_tag)
360 fn tag_static_base_pointer(memory_extra: &MemoryExtra, id: AllocId) -> Self::PointerTag {
361 if let Some(stacked_borrows) = memory_extra.stacked_borrows.as_ref() {
362 stacked_borrows.borrow_mut().static_base_ptr(id)
370 ecx: &mut InterpCx<'mir, 'tcx, Self>,
371 kind: mir::RetagKind,
372 place: PlaceTy<'tcx, Tag>,
373 ) -> InterpResult<'tcx> {
374 if ecx.memory.extra.stacked_borrows.is_none() {
378 ecx.retag(kind, place)
383 fn stack_push(ecx: &mut InterpCx<'mir, 'tcx, Self>) -> InterpResult<'tcx, FrameData<'tcx>> {
384 let stacked_borrows = ecx.memory.extra.stacked_borrows.as_ref();
385 let call_id = stacked_borrows.map_or(NonZeroU64::new(1).unwrap(), |stacked_borrows| {
386 stacked_borrows.borrow_mut().new_call()
388 Ok(FrameData { call_id, catch_panic: None })
393 ecx: &mut InterpCx<'mir, 'tcx, Self>,
394 extra: FrameData<'tcx>,
396 ) -> InterpResult<'tcx, StackPopInfo> {
397 ecx.handle_stack_pop(extra, unwinding)
402 memory: &Memory<'mir, 'tcx, Self>,
404 ) -> InterpResult<'tcx, Pointer<Self::PointerTag>> {
405 intptrcast::GlobalState::int_to_ptr(int, memory)
410 memory: &Memory<'mir, 'tcx, Self>,
411 ptr: Pointer<Self::PointerTag>,
412 ) -> InterpResult<'tcx, u64> {
413 intptrcast::GlobalState::ptr_to_int(ptr, memory)
417 impl AllocationExtra<Tag> for AllocExtra {
419 fn memory_read<'tcx>(
420 alloc: &Allocation<Tag, AllocExtra>,
423 ) -> InterpResult<'tcx> {
424 if let Some(ref stacked_borrows) = alloc.extra.stacked_borrows {
425 stacked_borrows.memory_read(ptr, size)
432 fn memory_written<'tcx>(
433 alloc: &mut Allocation<Tag, AllocExtra>,
436 ) -> InterpResult<'tcx> {
437 if let Some(ref mut stacked_borrows) = alloc.extra.stacked_borrows {
438 stacked_borrows.memory_written(ptr, size)
445 fn memory_deallocated<'tcx>(
446 alloc: &mut Allocation<Tag, AllocExtra>,
449 ) -> InterpResult<'tcx> {
450 if let Some(ref mut stacked_borrows) = alloc.extra.stacked_borrows {
451 stacked_borrows.memory_deallocated(ptr, size)
458 impl MayLeak for MiriMemoryKind {
460 fn may_leak(self) -> bool {
461 use self::MiriMemoryKind::*;
463 Rust | C | WinHeap => false,
464 Machine | Static => true,