use super::{
Immediate, Operand, MemPlace, MPlaceTy, Place, PlaceTy, ScalarMaybeUndef,
- Memory, Machine
+ Memory, Machine, StackPopInfo
};
pub struct InterpCx<'mir, 'tcx, M: Machine<'mir, 'tcx>> {
/// The span of the call site.
pub span: source_map::Span,
+ /// Extra data for the machine.
+ pub extra: Extra,
+
////////////////////////////////////////////////////////////////////////////////
// Return place and locals
////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////
/// The block that is currently executed (or will be executed after the above call stacks
/// return).
- pub block: mir::BasicBlock,
+ /// If this is `None`, we are unwinding and this function doesn't need any clean-up.
+ /// Just continue the same as with `Resume`.
+ pub block: Option<mir::BasicBlock>,
/// The index of the currently evaluated statement.
pub stmt: usize,
-
- /// Extra data for the machine.
- pub extra: Extra,
}
#[derive(Clone, Eq, PartialEq, Debug)] // Miri debug-prints these
/// Jump to the next block in the caller, or cause UB if None (that's a function
/// that may never return). Also store layout of return place so
/// we can validate it at that layout.
- Goto(Option<mir::BasicBlock>),
+ /// `ret` stores the block we jump to on a normal return, while 'unwind'
+ /// stores the block used for cleanup during unwinding
+ Goto { ret: Option<mir::BasicBlock>, unwind: Option<mir::BasicBlock> },
/// Just do nohing: Used by Main and for the box_alloc hook in miri.
/// `cleanup` says whether locals are deallocated. Static computation
/// wants them leaked to intern what they need (and just throw away
}
ty::Slice(_) | ty::Str => {
- let len = metadata.expect("slice fat ptr must have vtable").to_usize(self)?;
+ let len = metadata.expect("slice fat ptr must have length").to_machine_usize(self)?;
let elem = layout.field(self, 0)?;
// Make sure the slice is not too big.
let extra = M::stack_push(self)?;
self.stack.push(Frame {
body,
- block: mir::START_BLOCK,
+ block: Some(mir::START_BLOCK),
return_to_block,
return_place,
// empty local array, we fill it in below, after we are inside the stack frame and
}
}
- pub(super) fn pop_stack_frame(&mut self) -> InterpResult<'tcx> {
- info!("LEAVING({}) {}", self.cur_frame(), self.frame().instance);
+ /// Pops the current frame from the stack, deallocating the
+ /// memory for allocated locals.
+ ///
+ /// If `unwinding` is `false`, then we are performing a normal return
+ /// from a function. In this case, we jump back into the frame of the caller,
+ /// and continue execution as normal.
+ ///
+ /// If `unwinding` is `true`, then we are in the middle of a panic,
+ /// and need to unwind this frame. In this case, we jump to the
+ /// `cleanup` block for the function, which is responsible for running
+ /// `Drop` impls for any locals that have been initialized at this point.
+ /// The cleanup block ends with a special `Resume` terminator, which will
+ /// cause us to continue unwinding.
+ pub(super) fn pop_stack_frame(
+ &mut self,
+ unwinding: bool
+ ) -> InterpResult<'tcx> {
+ info!("LEAVING({}) {} (unwinding = {})",
+ self.cur_frame(), self.frame().instance, unwinding);
+
+ // Sanity check `unwinding`.
+ assert_eq!(
+ unwinding,
+ match self.frame().block {
+ None => true,
+ Some(block) => self.body().basic_blocks()[block].is_cleanup
+ }
+ );
+
::log_settings::settings().indentation -= 1;
let frame = self.stack.pop().expect(
"tried to pop a stack frame, but there were none",
);
- M::stack_pop(self, frame.extra)?;
- // Abort early if we do not want to clean up: We also avoid validation in that case,
+ let stack_pop_info = M::stack_pop(self, frame.extra)?;
+ match (unwinding, stack_pop_info) {
+ (true, StackPopInfo::StartUnwinding) =>
+ bug!("Attempted to start unwinding while already unwinding!"),
+ (false, StackPopInfo::StopUnwinding) =>
+ bug!("Attempted to stop unwinding while there is no unwinding!"),
+ _ => {}
+ }
+
+ // Usually we want to clean up (deallocate locals), but in a few rare cases we don't.
+ // In that case, we return early. We also avoid validation in that case,
// because this is CTFE and the final value will be thoroughly validated anyway.
- match frame.return_to_block {
- StackPopCleanup::Goto(_) => {},
- StackPopCleanup::None { cleanup } => {
- if !cleanup {
- assert!(self.stack.is_empty(), "only the topmost frame should ever be leaked");
- // Leak the locals, skip validation.
- return Ok(());
- }
+ let cleanup = unwinding || match frame.return_to_block {
+ StackPopCleanup::Goto{ .. } => true,
+ StackPopCleanup::None { cleanup, .. } => {
+ cleanup
}
+ };
+ if !cleanup {
+ assert!(self.stack.is_empty(), "only the topmost frame should ever be leaked");
+ // Leak the locals, skip validation.
+ return Ok(());
}
- // Deallocate all locals that are backed by an allocation.
+
+ // Cleanup: deallocate all locals that are backed by an allocation.
for local in frame.locals {
self.deallocate_local(local.value)?;
}
- // Validate the return value. Do this after deallocating so that we catch dangling
- // references.
- if let Some(return_place) = frame.return_place {
- if M::enforce_validity(self) {
- // Data got changed, better make sure it matches the type!
- // It is still possible that the return place held invalid data while
- // the function is running, but that's okay because nobody could have
- // accessed that same data from the "outside" to observe any broken
- // invariant -- that is, unless a function somehow has a ptr to
- // its return place... but the way MIR is currently generated, the
- // return place is always a local and then this cannot happen.
- self.validate_operand(
- self.place_to_op(return_place)?,
- vec![],
- None,
- )?;
+
+ // Now where do we jump next?
+
+ // Determine if we leave this function normally or via unwinding.
+ let cur_unwinding = unwinding && stack_pop_info != StackPopInfo::StopUnwinding;
+ trace!("StackPopCleanup: {:?} StackPopInfo: {:?} cur_unwinding = {:?}",
+ frame.return_to_block, stack_pop_info, cur_unwinding);
+ if cur_unwinding {
+ // Follow the unwind edge.
+ match frame.return_to_block {
+ StackPopCleanup::Goto { unwind, .. } => {
+ let next_frame = self.frame_mut();
+ // If `unwind` is `None`, we'll leave that function immediately again.
+ next_frame.block = unwind;
+ next_frame.stmt = 0;
+ },
+ StackPopCleanup::None { .. } =>
+ bug!("Encountered StackPopCleanup::None while unwinding"),
}
} else {
- // Uh, that shouldn't happen... the function did not intend to return
- throw_ub!(Unreachable)
- }
- // Jump to new block -- *after* validation so that the spans make more sense.
- match frame.return_to_block {
- StackPopCleanup::Goto(block) => {
- self.goto_block(block)?;
+ // Follow the normal return edge.
+ // Validate the return value. Do this after deallocating so that we catch dangling
+ // references.
+ if let Some(return_place) = frame.return_place {
+ if M::enforce_validity(self) {
+ // Data got changed, better make sure it matches the type!
+ // It is still possible that the return place held invalid data while
+ // the function is running, but that's okay because nobody could have
+ // accessed that same data from the "outside" to observe any broken
+ // invariant -- that is, unless a function somehow has a ptr to
+ // its return place... but the way MIR is currently generated, the
+ // return place is always a local and then this cannot happen.
+ self.validate_operand(
+ self.place_to_op(return_place)?,
+ vec![],
+ None,
+ )?;
+ }
+ } else {
+ // Uh, that shouldn't happen... the function did not intend to return
+ throw_ub!(Unreachable);
+ }
+
+ // Jump to new block -- *after* validation so that the spans make more sense.
+ match frame.return_to_block {
+ StackPopCleanup::Goto { ret, .. } => {
+ self.goto_block(ret)?;
+ }
+ StackPopCleanup::None { .. } => {}
}
- StackPopCleanup::None { .. } => {}
}
if self.stack.len() > 0 {
- info!("CONTINUING({}) {}", self.cur_frame(), self.frame().instance);
+ info!("CONTINUING({}) {} (unwinding = {})",
+ self.cur_frame(), self.frame().instance, cur_unwinding);
}
Ok(())
} else {
last_span = Some(span);
}
- let block = &body.basic_blocks()[block];
- let source_info = if stmt < block.statements.len() {
- block.statements[stmt].source_info
- } else {
- block.terminator().source_info
- };
- let lint_root = match body.source_scope_local_data {
- mir::ClearCrossCrate::Set(ref ivs) => Some(ivs[source_info.scope].lint_root),
- mir::ClearCrossCrate::Clear => None,
- };
+
+ let lint_root = block.and_then(|block| {
+ let block = &body.basic_blocks()[block];
+ let source_info = if stmt < block.statements.len() {
+ block.statements[stmt].source_info
+ } else {
+ block.terminator().source_info
+ };
+ match body.source_scope_local_data {
+ mir::ClearCrossCrate::Set(ref ivs) => Some(ivs[source_info.scope].lint_root),
+ mir::ClearCrossCrate::Clear => None,
+ }
+ });
+
frames.push(FrameInfo { call_site: span, instance, lint_root });
}
trace!("generate stacktrace: {:#?}, {:?}", frames, explicit_span);