PlaceTy, MPlaceTy, OpTy, ImmTy, Immediate, Scalar, Pointer,
RawConst, ConstValue, Machine,
InterpResult, InterpErrorInfo, GlobalId, InterpCx, StackPopCleanup,
- Allocation, AllocId, MemoryKind, Memory, StackPopInfo,
+ Allocation, AllocId, MemoryKind, Memory,
snapshot, RefTracking, intern_const_alloc_recursive,
};
fn stack_push(_ecx: &mut InterpCx<'mir, 'tcx, Self>) -> InterpResult<'tcx> {
Ok(())
}
-
- /// Called immediately before a stack frame gets popped.
- #[inline(always)]
- fn stack_pop(
- _ecx: &mut InterpCx<'mir, 'tcx, Self>,
- _extra: (),
- ) -> InterpResult<'tcx, StackPopInfo> {
- // Const-eval mode does not support unwinding from panics
- Ok(StackPopInfo::Normal)
- }
}
/// Extracts a field of a (variant of a) const.
use super::{
Immediate, Operand, MemPlace, MPlaceTy, Place, PlaceTy, ScalarMaybeUndef,
- Memory, Machine, PointerArithmetic, FnVal, StackPopInfo
+ Memory, Machine, StackPopInfo
};
pub struct InterpCx<'mir, 'tcx, M: Machine<'mir, 'tcx>> {
/// The block that is currently executed (or will be executed after the above call stacks
/// return).
/// If this is `None`, we are unwinding and this function doesn't need any clean-up.
- /// Just continue the same as with
+ /// Just continue the same as with `Resume`.
pub block: Option<mir::BasicBlock>,
/// The index of the currently evaluated statement.
/// `cleanup` block for the function, which is responsible for running
/// `Drop` impls for any locals that have been initialized at this point.
/// The cleanup block ends with a special `Resume` terminator, which will
- /// cause us to continue unwinding where we left off.
+ /// cause us to continue unwinding.
pub(super) fn pop_stack_frame(
&mut self,
unwinding: bool
trace!("generate stacktrace: {:#?}, {:?}", frames, explicit_span);
frames
}
-
- /// Resolve the function at the specified slot in the provided
- /// vtable. An index of '0' corresponds to the first method
- /// declared in the trait of the provided vtable
- pub fn get_vtable_slot(
- &self,
- vtable: Scalar<M::PointerTag>,
- idx: usize
- ) -> InterpResult<'tcx, FnVal<'tcx, M::ExtraFnVal>> {
- let ptr_size = self.pointer_size();
- // Skip over the 'drop_ptr', 'size', and 'align' fields
- let vtable_slot = vtable.ptr_offset(ptr_size * (idx as u64 + 3), self)?;
- let vtable_slot = self.memory.check_ptr_access(
- vtable_slot,
- ptr_size,
- self.tcx.data_layout.pointer_align.abi,
- )?.expect("cannot be a ZST");
- let fn_ptr = self.memory.get(vtable_slot.alloc_id)?
- .read_ptr_sized(self, vtable_slot)?.not_undef()?;
- Ok(self.memory.get_fn(fn_ptr)?)
- }
}
/// Called immediately after a stack frame gets popped
fn stack_pop(
- ecx: &mut InterpCx<'mir, 'tcx, Self>,
- extra: Self::FrameExtra,
- ) -> InterpResult<'tcx, StackPopInfo>;
+ _ecx: &mut InterpCx<'mir, 'tcx, Self>,
+ _extra: Self::FrameExtra,
+ ) -> InterpResult<'tcx, StackPopInfo> {
+ // By default, we do not support unwinding from panics
+ Ok(StackPopInfo::Normal)
+ }
fn int_to_ptr(
_mem: &Memory<'mir, 'tcx, Self>,
Ok(vtable)
}
+ /// Resolve the function at the specified slot in the provided
+ /// vtable. An index of '0' corresponds to the first method
+ /// declared in the trait of the provided vtable
+ pub fn get_vtable_slot(
+ &self,
+ vtable: Scalar<M::PointerTag>,
+ idx: usize
+ ) -> InterpResult<'tcx, FnVal<'tcx, M::ExtraFnVal>> {
+ let ptr_size = self.pointer_size();
+ // Skip over the 'drop_ptr', 'size', and 'align' fields
+ let vtable_slot = vtable.ptr_offset(ptr_size * (idx as u64 + 3), self)?;
+ let vtable_slot = self.memory.check_ptr_access(
+ vtable_slot,
+ ptr_size,
+ self.tcx.data_layout.pointer_align.abi,
+ )?.expect("cannot be a ZST");
+ let fn_ptr = self.memory.get(vtable_slot.alloc_id)?
+ .read_ptr_sized(self, vtable_slot)?.not_undef()?;
+ Ok(self.memory.get_fn(fn_ptr)?)
+ }
+
/// Returns the drop fn instance as well as the actual dynamic type
pub fn read_drop_type_from_vtable(
&self,