5 use rustc_data_structures::fx::FxHashMap;
6 use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
7 use rustc_hir::def::DefKind;
8 use rustc_hir::def_id::DefId;
9 use rustc_index::vec::IndexVec;
10 use rustc_macros::HashStable;
11 use rustc_middle::ich::StableHashingContext;
12 use rustc_middle::mir;
13 use rustc_middle::mir::interpret::{
14 sign_extend, truncate, FrameInfo, GlobalId, InterpResult, Pointer, Scalar,
16 use rustc_middle::ty::layout::{self, TyAndLayout};
17 use rustc_middle::ty::{
18 self, query::TyCtxtAt, subst::SubstsRef, ParamEnv, Ty, TyCtxt, TypeFoldable,
20 use rustc_span::{source_map::DUMMY_SP, Span};
21 use rustc_target::abi::{Align, HasDataLayout, LayoutOf, Size, TargetDataLayout};
24 Immediate, MPlaceTy, Machine, MemPlace, MemPlaceMeta, Memory, OpTy, Operand, Place, PlaceTy,
25 ScalarMaybeUninit, StackPopJump,
27 use crate::transform::validate::equal_up_to_regions;
28 use crate::util::storage::AlwaysLiveLocals;
30 pub struct InterpCx<'mir, 'tcx, M: Machine<'mir, 'tcx>> {
31 /// Stores the `Machine` instance.
33 /// Note: the stack is provided by the machine.
36 /// The results of the type checker, from rustc.
37 /// The span in this is the "root" of the evaluation, i.e., the const
38 /// we are evaluating (if this is CTFE).
39 pub tcx: TyCtxtAt<'tcx>,
41 /// Bounds in scope for polymorphic evaluations.
42 pub(crate) param_env: ty::ParamEnv<'tcx>,
44 /// The virtual memory system.
45 pub memory: Memory<'mir, 'tcx, M>,
47 /// A cache for deduplicating vtables
49 FxHashMap<(Ty<'tcx>, Option<ty::PolyExistentialTraitRef<'tcx>>), Pointer<M::PointerTag>>,
54 pub struct Frame<'mir, 'tcx, Tag = (), Extra = ()> {
55 ////////////////////////////////////////////////////////////////////////////////
56 // Function and callsite information
57 ////////////////////////////////////////////////////////////////////////////////
58 /// The MIR for the function called on this frame.
59 pub body: &'mir mir::Body<'tcx>,
61 /// The def_id and substs of the current function.
62 pub instance: ty::Instance<'tcx>,
64 /// Extra data for the machine.
67 ////////////////////////////////////////////////////////////////////////////////
68 // Return place and locals
69 ////////////////////////////////////////////////////////////////////////////////
70 /// Work to perform when returning from this function.
71 pub return_to_block: StackPopCleanup,
73 /// The location where the result of the current stack frame should be written to,
74 /// and its layout in the caller.
75 pub return_place: Option<PlaceTy<'tcx, Tag>>,
77 /// The list of locals for this stack frame, stored in order as
78 /// `[return_ptr, arguments..., variables..., temporaries...]`.
79 /// The locals are stored as `Option<Value>`s.
80 /// `None` represents a local that is currently dead, while a live local
81 /// can either directly contain `Scalar` or refer to some part of an `Allocation`.
82 pub locals: IndexVec<mir::Local, LocalState<'tcx, Tag>>,
84 ////////////////////////////////////////////////////////////////////////////////
85 // Current position within the function
86 ////////////////////////////////////////////////////////////////////////////////
87 /// If this is `None`, we are unwinding and this function doesn't need any clean-up.
88 /// Just continue the same as with `Resume`.
89 pub loc: Option<mir::Location>,
92 #[derive(Clone, Eq, PartialEq, Debug, HashStable)] // Miri debug-prints these
93 pub enum StackPopCleanup {
94 /// Jump to the next block in the caller, or cause UB if None (that's a function
95 /// that may never return). Also store layout of return place so
96 /// we can validate it at that layout.
97 /// `ret` stores the block we jump to on a normal return, while `unwind`
98 /// stores the block used for cleanup during unwinding.
99 Goto { ret: Option<mir::BasicBlock>, unwind: Option<mir::BasicBlock> },
100 /// Just do nothing: Used by Main and for the `box_alloc` hook in miri.
101 /// `cleanup` says whether locals are deallocated. Static computation
102 /// wants them leaked to intern what they need (and just throw away
103 /// the entire `ecx` when it is done).
104 None { cleanup: bool },
107 /// State of a local variable including a memoized layout
108 #[derive(Clone, PartialEq, Eq, HashStable)]
109 pub struct LocalState<'tcx, Tag = ()> {
110 pub value: LocalValue<Tag>,
111 /// Don't modify if `Some`, this is only used to prevent computing the layout twice
112 #[stable_hasher(ignore)]
113 pub layout: Cell<Option<TyAndLayout<'tcx>>>,
116 /// Current value of a local variable
117 #[derive(Copy, Clone, PartialEq, Eq, Debug, HashStable)] // Miri debug-prints these
118 pub enum LocalValue<Tag = ()> {
119 /// This local is not currently alive, and cannot be used at all.
121 /// This local is alive but not yet initialized. It can be written to
122 /// but not read from or its address taken. Locals get initialized on
123 /// first write because for unsized locals, we do not know their size
126 /// A normal, live local.
127 /// Mostly for convenience, we re-use the `Operand` type here.
128 /// This is an optimization over just always having a pointer here;
129 /// we can thus avoid doing an allocation when the local just stores
130 /// immediate values *and* never has its address taken.
134 impl<'tcx, Tag: Copy + 'static> LocalState<'tcx, Tag> {
135 pub fn access(&self) -> InterpResult<'tcx, Operand<Tag>> {
137 LocalValue::Dead => throw_ub!(DeadLocal),
138 LocalValue::Uninitialized => {
139 bug!("The type checker should prevent reading from a never-written local")
141 LocalValue::Live(val) => Ok(val),
145 /// Overwrite the local. If the local can be overwritten in place, return a reference
146 /// to do so; otherwise return the `MemPlace` to consult instead.
149 ) -> InterpResult<'tcx, Result<&mut LocalValue<Tag>, MemPlace<Tag>>> {
151 LocalValue::Dead => throw_ub!(DeadLocal),
152 LocalValue::Live(Operand::Indirect(mplace)) => Ok(Err(mplace)),
154 local @ (LocalValue::Live(Operand::Immediate(_)) | LocalValue::Uninitialized) => {
161 impl<'mir, 'tcx, Tag> Frame<'mir, 'tcx, Tag> {
162 pub fn with_extra<Extra>(self, extra: Extra) -> Frame<'mir, 'tcx, Tag, Extra> {
165 instance: self.instance,
166 return_to_block: self.return_to_block,
167 return_place: self.return_place,
175 impl<'mir, 'tcx, Tag, Extra> Frame<'mir, 'tcx, Tag, Extra> {
176 /// Return the `SourceInfo` of the current instruction.
177 pub fn current_source_info(&self) -> Option<&mir::SourceInfo> {
178 self.loc.map(|loc| self.body.source_info(loc))
182 impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> HasDataLayout for InterpCx<'mir, 'tcx, M> {
184 fn data_layout(&self) -> &TargetDataLayout {
185 &self.tcx.data_layout
189 impl<'mir, 'tcx, M> layout::HasTyCtxt<'tcx> for InterpCx<'mir, 'tcx, M>
191 M: Machine<'mir, 'tcx>,
194 fn tcx(&self) -> TyCtxt<'tcx> {
199 impl<'mir, 'tcx, M> layout::HasParamEnv<'tcx> for InterpCx<'mir, 'tcx, M>
201 M: Machine<'mir, 'tcx>,
203 fn param_env(&self) -> ty::ParamEnv<'tcx> {
208 impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> LayoutOf for InterpCx<'mir, 'tcx, M> {
210 type TyAndLayout = InterpResult<'tcx, TyAndLayout<'tcx>>;
213 fn layout_of(&self, ty: Ty<'tcx>) -> Self::TyAndLayout {
215 .layout_of(self.param_env.and(ty))
216 .map_err(|layout| err_inval!(Layout(layout)).into())
220 /// Test if it is valid for a MIR assignment to assign `src`-typed place to `dest`-typed value.
221 /// This test should be symmetric, as it is primarily about layout compatibility.
222 pub(super) fn mir_assign_valid_types<'tcx>(
224 param_env: ParamEnv<'tcx>,
225 src: TyAndLayout<'tcx>,
226 dest: TyAndLayout<'tcx>,
228 // Type-changing assignments can happen when subtyping is used. While
229 // all normal lifetimes are erased, higher-ranked types with their
230 // late-bound lifetimes are still around and can lead to type
231 // differences. So we compare ignoring lifetimes.
232 if equal_up_to_regions(tcx, param_env, src.ty, dest.ty) {
233 // Make sure the layout is equal, too -- just to be safe. Miri really needs layout equality.
234 assert_eq!(src.layout, dest.layout);
241 /// Use the already known layout if given (but sanity check in debug mode),
242 /// or compute the layout.
243 #[cfg_attr(not(debug_assertions), inline(always))]
244 pub(super) fn from_known_layout<'tcx>(
246 param_env: ParamEnv<'tcx>,
247 known_layout: Option<TyAndLayout<'tcx>>,
248 compute: impl FnOnce() -> InterpResult<'tcx, TyAndLayout<'tcx>>,
249 ) -> InterpResult<'tcx, TyAndLayout<'tcx>> {
252 Some(known_layout) => {
253 if cfg!(debug_assertions) {
254 let check_layout = compute()?;
255 if !mir_assign_valid_types(tcx.tcx, param_env, check_layout, known_layout) {
258 "expected type differs from actual type.\nexpected: {:?}\nactual: {:?}",
269 impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
273 param_env: ty::ParamEnv<'tcx>,
275 memory_extra: M::MemoryExtra,
279 tcx: tcx.at(root_span),
281 memory: Memory::new(tcx, memory_extra),
282 vtables: FxHashMap::default(),
287 pub fn cur_span(&self) -> Span {
290 .and_then(|f| f.current_source_info())
292 .unwrap_or(self.tcx.span)
298 scalar: Scalar<M::PointerTag>,
299 ) -> InterpResult<'tcx, Pointer<M::PointerTag>> {
300 self.memory.force_ptr(scalar)
306 scalar: Scalar<M::PointerTag>,
308 ) -> InterpResult<'tcx, u128> {
309 self.memory.force_bits(scalar, size)
312 /// Call this to turn untagged "global" pointers (obtained via `tcx`) into
313 /// the *canonical* machine pointer to the allocation. Must never be used
314 /// for any other pointers!
316 /// This represents a *direct* access to that memory, as opposed to access
317 /// through a pointer that was created by the program.
319 pub fn tag_global_base_pointer(&self, ptr: Pointer) -> Pointer<M::PointerTag> {
320 self.memory.tag_global_base_pointer(ptr)
324 pub(crate) fn stack(&self) -> &[Frame<'mir, 'tcx, M::PointerTag, M::FrameExtra>] {
329 pub(crate) fn stack_mut(
331 ) -> &mut Vec<Frame<'mir, 'tcx, M::PointerTag, M::FrameExtra>> {
336 pub fn frame_idx(&self) -> usize {
337 let stack = self.stack();
338 assert!(!stack.is_empty());
343 pub fn frame(&self) -> &Frame<'mir, 'tcx, M::PointerTag, M::FrameExtra> {
344 self.stack().last().expect("no call frames exist")
348 pub fn frame_mut(&mut self) -> &mut Frame<'mir, 'tcx, M::PointerTag, M::FrameExtra> {
349 self.stack_mut().last_mut().expect("no call frames exist")
353 pub(super) fn body(&self) -> &'mir mir::Body<'tcx> {
358 pub fn sign_extend(&self, value: u128, ty: TyAndLayout<'_>) -> u128 {
359 assert!(ty.abi.is_signed());
360 sign_extend(value, ty.size)
364 pub fn truncate(&self, value: u128, ty: TyAndLayout<'_>) -> u128 {
365 truncate(value, ty.size)
369 pub fn type_is_sized(&self, ty: Ty<'tcx>) -> bool {
370 ty.is_sized(self.tcx, self.param_env)
374 pub fn type_is_freeze(&self, ty: Ty<'tcx>) -> bool {
375 ty.is_freeze(*self.tcx, self.param_env, self.tcx.span)
380 instance: ty::InstanceDef<'tcx>,
381 promoted: Option<mir::Promoted>,
382 ) -> InterpResult<'tcx, &'tcx mir::Body<'tcx>> {
383 // do not continue if typeck errors occurred (can only occur in local crate)
384 let did = instance.def_id();
385 if let Some(did) = did.as_local() {
386 if self.tcx.has_typeck_tables(did) {
387 if let Some(error_reported) = self.tcx.typeck_tables_of(did).tainted_by_errors {
388 throw_inval!(TypeckError(error_reported))
392 trace!("load mir(instance={:?}, promoted={:?})", instance, promoted);
393 if let Some(promoted) = promoted {
394 return Ok(&self.tcx.promoted_mir(did)[promoted]);
397 ty::InstanceDef::Item(def_id) => {
398 if self.tcx.is_mir_available(did) {
399 Ok(self.tcx.optimized_mir(did))
401 throw_unsup!(NoMirFor(def_id))
404 _ => Ok(self.tcx.instance_mir(instance)),
408 /// Call this on things you got out of the MIR (so it is as generic as the current
409 /// stack frame), to bring it into the proper environment for this interpreter.
410 pub(super) fn subst_from_current_frame_and_normalize_erasing_regions<T: TypeFoldable<'tcx>>(
414 self.subst_from_frame_and_normalize_erasing_regions(self.frame(), value)
417 /// Call this on things you got out of the MIR (so it is as generic as the provided
418 /// stack frame), to bring it into the proper environment for this interpreter.
419 pub(super) fn subst_from_frame_and_normalize_erasing_regions<T: TypeFoldable<'tcx>>(
421 frame: &Frame<'mir, 'tcx, M::PointerTag, M::FrameExtra>,
424 if let Some(substs) = frame.instance.substs_for_mir_body() {
425 self.tcx.subst_and_normalize_erasing_regions(substs, self.param_env, &value)
427 self.tcx.normalize_erasing_regions(self.param_env, value)
431 /// The `substs` are assumed to already be in our interpreter "universe" (param_env).
432 pub(super) fn resolve(
435 substs: SubstsRef<'tcx>,
436 ) -> InterpResult<'tcx, ty::Instance<'tcx>> {
437 trace!("resolve: {:?}, {:#?}", def_id, substs);
438 trace!("param_env: {:#?}", self.param_env);
439 trace!("substs: {:#?}", substs);
440 match ty::Instance::resolve(*self.tcx, self.param_env, def_id, substs) {
441 Ok(Some(instance)) => Ok(instance),
442 Ok(None) => throw_inval!(TooGeneric),
444 // FIXME(eddyb) this could be a bit more specific than `TypeckError`.
445 Err(error_reported) => throw_inval!(TypeckError(error_reported)),
449 pub fn layout_of_local(
451 frame: &Frame<'mir, 'tcx, M::PointerTag, M::FrameExtra>,
453 layout: Option<TyAndLayout<'tcx>>,
454 ) -> InterpResult<'tcx, TyAndLayout<'tcx>> {
455 // `const_prop` runs into this with an invalid (empty) frame, so we
456 // have to support that case (mostly by skipping all caching).
457 match frame.locals.get(local).and_then(|state| state.layout.get()) {
459 let layout = from_known_layout(self.tcx, self.param_env, layout, || {
460 let local_ty = frame.body.local_decls[local].ty;
462 self.subst_from_frame_and_normalize_erasing_regions(frame, local_ty);
463 self.layout_of(local_ty)
465 if let Some(state) = frame.locals.get(local) {
466 // Layouts of locals are requested a lot, so we cache them.
467 state.layout.set(Some(layout));
471 Some(layout) => Ok(layout),
475 /// Returns the actual dynamic size and alignment of the place at the given type.
476 /// Only the "meta" (metadata) part of the place matters.
477 /// This can fail to provide an answer for extern types.
478 pub(super) fn size_and_align_of(
480 metadata: MemPlaceMeta<M::PointerTag>,
481 layout: TyAndLayout<'tcx>,
482 ) -> InterpResult<'tcx, Option<(Size, Align)>> {
483 if !layout.is_unsized() {
484 return Ok(Some((layout.size, layout.align.abi)));
486 match layout.ty.kind {
487 ty::Adt(..) | ty::Tuple(..) => {
488 // First get the size of all statically known fields.
489 // Don't use type_of::sizing_type_of because that expects t to be sized,
490 // and it also rounds up to alignment, which we want to avoid,
491 // as the unsized field's alignment could be smaller.
492 assert!(!layout.ty.is_simd());
493 assert!(layout.fields.count() > 0);
494 trace!("DST layout: {:?}", layout);
496 let sized_size = layout.fields.offset(layout.fields.count() - 1);
497 let sized_align = layout.align.abi;
499 "DST {} statically sized prefix size: {:?} align: {:?}",
505 // Recurse to get the size of the dynamically sized field (must be
506 // the last field). Can't have foreign types here, how would we
507 // adjust alignment and size for them?
508 let field = layout.field(self, layout.fields.count() - 1)?;
509 let (unsized_size, unsized_align) = match self.size_and_align_of(metadata, field)? {
510 Some(size_and_align) => size_and_align,
512 // A field with extern type. If this field is at offset 0, we behave
513 // like the underlying extern type.
514 // FIXME: Once we have made decisions for how to handle size and alignment
515 // of `extern type`, this should be adapted. It is just a temporary hack
516 // to get some code to work that probably ought to work.
517 if sized_size == Size::ZERO {
520 bug!("Fields cannot be extern types, unless they are at offset 0")
525 // FIXME (#26403, #27023): We should be adding padding
526 // to `sized_size` (to accommodate the `unsized_align`
527 // required of the unsized field that follows) before
528 // summing it with `sized_size`. (Note that since #26403
529 // is unfixed, we do not yet add the necessary padding
530 // here. But this is where the add would go.)
532 // Return the sum of sizes and max of aligns.
533 let size = sized_size + unsized_size; // `Size` addition
535 // Choose max of two known alignments (combined value must
536 // be aligned according to more restrictive of the two).
537 let align = sized_align.max(unsized_align);
539 // Issue #27023: must add any necessary padding to `size`
540 // (to make it a multiple of `align`) before returning it.
541 let size = size.align_to(align);
543 // Check if this brought us over the size limit.
544 if size.bytes() >= self.tcx.data_layout.obj_size_bound() {
545 throw_ub!(InvalidMeta("total size is bigger than largest supported object"));
547 Ok(Some((size, align)))
550 let vtable = metadata.unwrap_meta();
551 // Read size and align from vtable (already checks size).
552 Ok(Some(self.read_size_and_align_from_vtable(vtable)?))
555 ty::Slice(_) | ty::Str => {
556 let len = metadata.unwrap_meta().to_machine_usize(self)?;
557 let elem = layout.field(self, 0)?;
559 // Make sure the slice is not too big.
560 let size = elem.size.checked_mul(len, self).ok_or_else(|| {
561 err_ub!(InvalidMeta("slice is bigger than largest supported object"))
563 Ok(Some((size, elem.align.abi)))
566 ty::Foreign(_) => Ok(None),
568 _ => bug!("size_and_align_of::<{:?}> not supported", layout.ty),
572 pub fn size_and_align_of_mplace(
574 mplace: MPlaceTy<'tcx, M::PointerTag>,
575 ) -> InterpResult<'tcx, Option<(Size, Align)>> {
576 self.size_and_align_of(mplace.meta, mplace.layout)
579 pub fn push_stack_frame(
581 instance: ty::Instance<'tcx>,
582 body: &'mir mir::Body<'tcx>,
583 return_place: Option<PlaceTy<'tcx, M::PointerTag>>,
584 return_to_block: StackPopCleanup,
585 ) -> InterpResult<'tcx> {
586 if !self.stack().is_empty() {
587 info!("PAUSING({}) {}", self.frame_idx(), self.frame().instance);
589 ::log_settings::settings().indentation += 1;
591 // first push a stack frame so we have access to the local substs
592 let pre_frame = Frame {
594 loc: Some(mir::Location::START),
597 // empty local array, we fill it in below, after we are inside the stack frame and
598 // all methods actually know about the frame
599 locals: IndexVec::new(),
603 let frame = M::init_frame_extra(self, pre_frame)?;
604 self.stack_mut().push(frame);
606 // Locals are initially uninitialized.
607 let dummy = LocalState { value: LocalValue::Uninitialized, layout: Cell::new(None) };
608 let mut locals = IndexVec::from_elem(dummy, &body.local_decls);
610 // Now mark those locals as dead that we do not want to initialize
611 match self.tcx.def_kind(instance.def_id()) {
612 // statics and constants don't have `Storage*` statements, no need to look for them
614 // FIXME: The above is likely untrue. See
615 // <https://github.com/rust-lang/rust/pull/70004#issuecomment-602022110>. Is it
616 // okay to ignore `StorageDead`/`StorageLive` annotations during CTFE?
617 DefKind::Static | DefKind::Const | DefKind::AssocConst => {}
619 // Mark locals that use `Storage*` annotations as dead on function entry.
620 let always_live = AlwaysLiveLocals::new(self.body());
621 for local in locals.indices() {
622 if !always_live.contains(local) {
623 locals[local].value = LocalValue::Dead;
629 self.frame_mut().locals = locals;
631 M::after_stack_push(self)?;
632 info!("ENTERING({}) {}", self.frame_idx(), self.frame().instance);
634 if !self.tcx.sess.recursion_limit().value_within_limit(self.stack().len()) {
635 throw_exhaust!(StackFrameLimitReached)
641 /// Jump to the given block.
643 pub fn go_to_block(&mut self, target: mir::BasicBlock) {
644 self.frame_mut().loc = Some(mir::Location { block: target, statement_index: 0 });
647 /// *Return* to the given `target` basic block.
648 /// Do *not* use for unwinding! Use `unwind_to_block` instead.
650 /// If `target` is `None`, that indicates the function cannot return, so we raise UB.
651 pub fn return_to_block(&mut self, target: Option<mir::BasicBlock>) -> InterpResult<'tcx> {
652 if let Some(target) = target {
653 self.go_to_block(target);
656 throw_ub!(Unreachable)
660 /// *Unwind* to the given `target` basic block.
661 /// Do *not* use for returning! Use `return_to_block` instead.
663 /// If `target` is `None`, that indicates the function does not need cleanup during
664 /// unwinding, and we will just keep propagating that upwards.
665 pub fn unwind_to_block(&mut self, target: Option<mir::BasicBlock>) {
666 self.frame_mut().loc = target.map(|block| mir::Location { block, statement_index: 0 });
669 /// Pops the current frame from the stack, deallocating the
670 /// memory for allocated locals.
672 /// If `unwinding` is `false`, then we are performing a normal return
673 /// from a function. In this case, we jump back into the frame of the caller,
674 /// and continue execution as normal.
676 /// If `unwinding` is `true`, then we are in the middle of a panic,
677 /// and need to unwind this frame. In this case, we jump to the
678 /// `cleanup` block for the function, which is responsible for running
679 /// `Drop` impls for any locals that have been initialized at this point.
680 /// The cleanup block ends with a special `Resume` terminator, which will
681 /// cause us to continue unwinding.
682 pub(super) fn pop_stack_frame(&mut self, unwinding: bool) -> InterpResult<'tcx> {
684 "LEAVING({}) {} (unwinding = {})",
686 self.frame().instance,
690 // Sanity check `unwinding`.
693 match self.frame().loc {
695 Some(loc) => self.body().basic_blocks()[loc.block].is_cleanup,
699 ::log_settings::settings().indentation -= 1;
701 self.stack_mut().pop().expect("tried to pop a stack frame, but there were none");
704 // Copy the return value to the caller's stack frame.
705 if let Some(return_place) = frame.return_place {
706 let op = self.access_local(&frame, mir::RETURN_PLACE, None)?;
707 self.copy_op_transmute(op, return_place)?;
708 self.dump_place(*return_place);
710 throw_ub!(Unreachable);
714 // Now where do we jump next?
716 // Usually we want to clean up (deallocate locals), but in a few rare cases we don't.
717 // In that case, we return early. We also avoid validation in that case,
718 // because this is CTFE and the final value will be thoroughly validated anyway.
719 let (cleanup, next_block) = match frame.return_to_block {
720 StackPopCleanup::Goto { ret, unwind } => {
721 (true, Some(if unwinding { unwind } else { ret }))
723 StackPopCleanup::None { cleanup, .. } => (cleanup, None),
727 assert!(self.stack().is_empty(), "only the topmost frame should ever be leaked");
728 assert!(next_block.is_none(), "tried to skip cleanup when we have a next block!");
729 assert!(!unwinding, "tried to skip cleanup during unwinding");
730 // Leak the locals, skip validation, skip machine hook.
734 // Cleanup: deallocate all locals that are backed by an allocation.
735 for local in &frame.locals {
736 self.deallocate_local(local.value)?;
739 if M::after_stack_pop(self, frame, unwinding)? == StackPopJump::NoJump {
740 // The hook already did everything.
741 // We want to skip the `info!` below, hence early return.
744 // Normal return, figure out where to jump.
746 // Follow the unwind edge.
747 let unwind = next_block.expect("Encountered StackPopCleanup::None when unwinding!");
748 self.unwind_to_block(unwind);
750 // Follow the normal return edge.
751 if let Some(ret) = next_block {
752 self.return_to_block(ret)?;
756 if !self.stack().is_empty() {
758 "CONTINUING({}) {} (unwinding = {})",
760 self.frame().instance,
768 /// Mark a storage as live, killing the previous content and returning it.
769 /// Remember to deallocate that!
773 ) -> InterpResult<'tcx, LocalValue<M::PointerTag>> {
774 assert!(local != mir::RETURN_PLACE, "Cannot make return place live");
775 trace!("{:?} is now live", local);
777 let local_val = LocalValue::Uninitialized;
778 // StorageLive *always* kills the value that's currently stored.
779 // However, we do not error if the variable already is live;
780 // see <https://github.com/rust-lang/rust/issues/42371>.
781 Ok(mem::replace(&mut self.frame_mut().locals[local].value, local_val))
784 /// Returns the old value of the local.
785 /// Remember to deallocate that!
786 pub fn storage_dead(&mut self, local: mir::Local) -> LocalValue<M::PointerTag> {
787 assert!(local != mir::RETURN_PLACE, "Cannot make return place dead");
788 trace!("{:?} is now dead", local);
790 mem::replace(&mut self.frame_mut().locals[local].value, LocalValue::Dead)
793 pub(super) fn deallocate_local(
795 local: LocalValue<M::PointerTag>,
796 ) -> InterpResult<'tcx> {
797 // FIXME: should we tell the user that there was a local which was never written to?
798 if let LocalValue::Live(Operand::Indirect(MemPlace { ptr, .. })) = local {
799 trace!("deallocating local");
800 // All locals have a backing allocation, even if the allocation is empty
801 // due to the local having ZST type.
802 let ptr = ptr.assert_ptr();
803 if log_enabled!(::log::Level::Trace) {
804 self.memory.dump_alloc(ptr.alloc_id);
806 self.memory.deallocate_local(ptr)?;
811 pub(super) fn const_eval(
815 ) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> {
816 // For statics we pick `ParamEnv::reveal_all`, because statics don't have generics
817 // and thus don't care about the parameter environment. While we could just use
818 // `self.param_env`, that would mean we invoke the query to evaluate the static
819 // with different parameter environments, thus causing the static to be evaluated
821 let param_env = if self.tcx.is_static(gid.instance.def_id()) {
822 ty::ParamEnv::reveal_all()
826 let val = self.tcx.const_eval_global_id(param_env, gid, Some(self.tcx.span))?;
828 // Even though `ecx.const_eval` is called from `eval_const_to_op` we can never have a
829 // recursion deeper than one level, because the `tcx.const_eval` above is guaranteed to not
830 // return `ConstValue::Unevaluated`, which is the only way that `eval_const_to_op` will call
832 let const_ = ty::Const { val: ty::ConstKind::Value(val), ty };
833 self.eval_const_to_op(&const_, None)
836 pub fn const_eval_raw(
839 ) -> InterpResult<'tcx, MPlaceTy<'tcx, M::PointerTag>> {
840 // For statics we pick `ParamEnv::reveal_all`, because statics don't have generics
841 // and thus don't care about the parameter environment. While we could just use
842 // `self.param_env`, that would mean we invoke the query to evaluate the static
843 // with different parameter environments, thus causing the static to be evaluated
845 let param_env = if self.tcx.is_static(gid.instance.def_id()) {
846 ty::ParamEnv::reveal_all()
850 // We use `const_eval_raw` here, and get an unvalidated result. That is okay:
851 // Our result will later be validated anyway, and there seems no good reason
852 // to have to fail early here. This is also more consistent with
853 // `Memory::get_static_alloc` which has to use `const_eval_raw` to avoid cycles.
854 // FIXME: We can hit delay_span_bug if this is an invalid const, interning finds
855 // that problem, but we never run validation to show an error. Can we ensure
856 // this does not happen?
857 let val = self.tcx.const_eval_raw(param_env.and(gid))?;
858 self.raw_const_to_mplace(val)
861 pub fn dump_place(&self, place: Place<M::PointerTag>) {
863 if !log_enabled!(::log::Level::Trace) {
867 Place::Local { frame, local } => {
868 let mut allocs = Vec::new();
869 let mut msg = format!("{:?}", local);
870 if frame != self.frame_idx() {
871 write!(msg, " ({} frames up)", self.frame_idx() - frame).unwrap();
873 write!(msg, ":").unwrap();
875 match self.stack()[frame].locals[local].value {
876 LocalValue::Dead => write!(msg, " is dead").unwrap(),
877 LocalValue::Uninitialized => write!(msg, " is uninitialized").unwrap(),
878 LocalValue::Live(Operand::Indirect(mplace)) => match mplace.ptr {
879 Scalar::Ptr(ptr) => {
882 " by align({}){} ref:",
883 mplace.align.bytes(),
885 MemPlaceMeta::Meta(meta) => format!(" meta({:?})", meta),
886 MemPlaceMeta::Poison | MemPlaceMeta::None => String::new(),
890 allocs.push(ptr.alloc_id);
892 ptr => write!(msg, " by integral ref: {:?}", ptr).unwrap(),
894 LocalValue::Live(Operand::Immediate(Immediate::Scalar(val))) => {
895 write!(msg, " {:?}", val).unwrap();
896 if let ScalarMaybeUninit::Scalar(Scalar::Ptr(ptr)) = val {
897 allocs.push(ptr.alloc_id);
900 LocalValue::Live(Operand::Immediate(Immediate::ScalarPair(val1, val2))) => {
901 write!(msg, " ({:?}, {:?})", val1, val2).unwrap();
902 if let ScalarMaybeUninit::Scalar(Scalar::Ptr(ptr)) = val1 {
903 allocs.push(ptr.alloc_id);
905 if let ScalarMaybeUninit::Scalar(Scalar::Ptr(ptr)) = val2 {
906 allocs.push(ptr.alloc_id);
912 self.memory.dump_allocs(allocs);
914 Place::Ptr(mplace) => match mplace.ptr {
915 Scalar::Ptr(ptr) => {
916 trace!("by align({}) ref:", mplace.align.bytes());
917 self.memory.dump_alloc(ptr.alloc_id);
919 ptr => trace!(" integral by ref: {:?}", ptr),
924 pub fn generate_stacktrace(&self) -> Vec<FrameInfo<'tcx>> {
925 let mut frames = Vec::new();
926 for frame in self.stack().iter().rev() {
927 let source_info = frame.current_source_info();
928 let lint_root = source_info.and_then(|source_info| {
929 match &frame.body.source_scopes[source_info.scope].local_data {
930 mir::ClearCrossCrate::Set(data) => Some(data.lint_root),
931 mir::ClearCrossCrate::Clear => None,
934 let span = source_info.map_or(DUMMY_SP, |source_info| source_info.span);
936 frames.push(FrameInfo { span, instance: frame.instance, lint_root });
938 trace!("generate stacktrace: {:#?}", frames);
943 impl<'ctx, 'mir, 'tcx, Tag, Extra> HashStable<StableHashingContext<'ctx>>
944 for Frame<'mir, 'tcx, Tag, Extra>
946 Extra: HashStable<StableHashingContext<'ctx>>,
947 Tag: HashStable<StableHashingContext<'ctx>>,
949 fn hash_stable(&self, hcx: &mut StableHashingContext<'ctx>, hasher: &mut StableHasher) {
950 // Exhaustive match on fields to make sure we forget no field.
951 let Frame { body, instance, return_to_block, return_place, locals, loc, extra } = self;
952 body.hash_stable(hcx, hasher);
953 instance.hash_stable(hcx, hasher);
954 return_to_block.hash_stable(hcx, hasher);
955 return_place.as_ref().map(|r| &**r).hash_stable(hcx, hasher);
956 locals.hash_stable(hcx, hasher);
957 loc.hash_stable(hcx, hasher);
958 extra.hash_stable(hcx, hasher);