1 use std::collections::HashSet;
4 use rustc::hir::def_id::DefId;
5 use rustc::hir::map::definitions::DefPathData;
6 use rustc::middle::const_val::{ConstVal, ErrKind};
8 use rustc::traits::Reveal;
9 use rustc::ty::layout::{self, Size, Align, HasDataLayout, LayoutOf, TyLayout};
10 use rustc::ty::subst::{Subst, Substs};
11 use rustc::ty::{self, Ty, TyCtxt};
12 use rustc_data_structures::indexed_vec::Idx;
13 use rustc::middle::const_val::FrameInfo;
14 use syntax::codemap::{self, DUMMY_SP, Span};
15 use syntax::ast::Mutability;
16 use rustc::mir::interpret::{
17 GlobalId, Value, Pointer, PrimVal, PrimValKind,
18 EvalError, EvalResult, EvalErrorKind, MemoryPointer,
21 use super::{Place, PlaceExtra, Memory,
22 HasMemory, MemoryKind, operator,
25 pub struct EvalContext<'a, 'mir, 'tcx: 'a + 'mir, M: Machine<'mir, 'tcx>> {
26 /// Stores the `Machine` instance.
29 /// The results of the type checker, from rustc.
30 pub tcx: TyCtxt<'a, 'tcx, 'tcx>,
32 /// Bounds in scope for polymorphic evaluations.
33 pub param_env: ty::ParamEnv<'tcx>,
35 /// The virtual memory system.
36 pub memory: Memory<'a, 'mir, 'tcx, M>,
38 /// The virtual call stack.
39 pub(crate) stack: Vec<Frame<'mir, 'tcx>>,
41 /// The maximum number of stack frames allowed
42 pub(crate) stack_limit: usize,
44 /// The maximum number of operations that may be executed.
45 /// This prevents infinite loops and huge computations from freezing up const eval.
46 /// Remove once halting problem is solved.
47 pub(crate) steps_remaining: usize,
51 pub struct Frame<'mir, 'tcx: 'mir> {
52 ////////////////////////////////////////////////////////////////////////////////
53 // Function and callsite information
54 ////////////////////////////////////////////////////////////////////////////////
55 /// The MIR for the function called on this frame.
56 pub mir: &'mir mir::Mir<'tcx>,
58 /// The def_id and substs of the current function
59 pub instance: ty::Instance<'tcx>,
61 /// The span of the call site.
62 pub span: codemap::Span,
64 ////////////////////////////////////////////////////////////////////////////////
65 // Return place and locals
66 ////////////////////////////////////////////////////////////////////////////////
67 /// The block to return to when returning from the current stack frame
68 pub return_to_block: StackPopCleanup,
70 /// The location where the result of the current stack frame should be written to.
71 pub return_place: Place,
73 /// The list of locals for this stack frame, stored in order as
74 /// `[arguments..., variables..., temporaries...]`. The locals are stored as `Option<Value>`s.
75 /// `None` represents a local that is currently dead, while a live local
76 /// can either directly contain `PrimVal` or refer to some part of an `Allocation`.
78 /// Before being initialized, arguments are `Value::ByVal(PrimVal::Undef)` and other locals are `None`.
79 pub locals: Vec<Option<Value>>,
81 ////////////////////////////////////////////////////////////////////////////////
82 // Current position within the function
83 ////////////////////////////////////////////////////////////////////////////////
84 /// The block that is currently executed (or will be executed after the above call stacks
86 pub block: mir::BasicBlock,
88 /// The index of the currently evaluated statement.
92 #[derive(Clone, Debug, Eq, PartialEq, Hash)]
93 pub enum StackPopCleanup {
94 /// The stackframe existed to compute the initial value of a static/constant, make sure it
95 /// isn't modifyable afterwards in case of constants.
96 /// In case of `static mut`, mark the memory to ensure it's never marked as immutable through
97 /// references or deallocated
98 MarkStatic(Mutability),
99 /// A regular stackframe added due to a function call will need to get forwarded to the next
101 Goto(mir::BasicBlock),
102 /// The main function and diverging functions have nowhere to return to
106 #[derive(Copy, Clone, Debug)]
107 pub struct TyAndPacked<'tcx> {
112 #[derive(Copy, Clone, Debug)]
113 pub struct ValTy<'tcx> {
118 impl<'tcx> ValTy<'tcx> {
119 pub fn from(val: &ty::Const<'tcx>) -> Option<Self> {
121 ConstVal::Value(value) => Some(ValTy { value, ty: val.ty }),
122 ConstVal::Unevaluated { .. } => None,
127 impl<'tcx> ::std::ops::Deref for ValTy<'tcx> {
129 fn deref(&self) -> &Value {
134 impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> HasDataLayout for &'a EvalContext<'a, 'mir, 'tcx, M> {
136 fn data_layout(&self) -> &layout::TargetDataLayout {
137 &self.tcx.data_layout
141 impl<'c, 'b, 'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> HasDataLayout
142 for &'c &'b mut EvalContext<'a, 'mir, 'tcx, M> {
144 fn data_layout(&self) -> &layout::TargetDataLayout {
145 &self.tcx.data_layout
149 impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> layout::HasTyCtxt<'tcx> for &'a EvalContext<'a, 'mir, 'tcx, M> {
151 fn tcx<'b>(&'b self) -> TyCtxt<'b, 'tcx, 'tcx> {
156 impl<'c, 'b, 'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> layout::HasTyCtxt<'tcx>
157 for &'c &'b mut EvalContext<'a, 'mir, 'tcx, M> {
159 fn tcx<'d>(&'d self) -> TyCtxt<'d, 'tcx, 'tcx> {
164 impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> LayoutOf<Ty<'tcx>> for &'a EvalContext<'a, 'mir, 'tcx, M> {
165 type TyLayout = EvalResult<'tcx, TyLayout<'tcx>>;
167 fn layout_of(self, ty: Ty<'tcx>) -> Self::TyLayout {
168 self.tcx.layout_of(self.param_env.and(ty))
169 .map_err(|layout| EvalErrorKind::Layout(layout).into())
173 impl<'c, 'b, 'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> LayoutOf<Ty<'tcx>>
174 for &'c &'b mut EvalContext<'a, 'mir, 'tcx, M> {
175 type TyLayout = EvalResult<'tcx, TyLayout<'tcx>>;
178 fn layout_of(self, ty: Ty<'tcx>) -> Self::TyLayout {
179 (&**self).layout_of(ty)
183 impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
185 tcx: TyCtxt<'a, 'tcx, 'tcx>,
186 param_env: ty::ParamEnv<'tcx>,
188 memory_data: M::MemoryData,
194 memory: Memory::new(tcx, memory_data),
196 stack_limit: tcx.sess.const_eval_stack_frame_limit.get(),
197 steps_remaining: tcx.sess.const_eval_step_limit.get(),
201 pub fn alloc_ptr(&mut self, ty: Ty<'tcx>) -> EvalResult<'tcx, MemoryPointer> {
202 let layout = self.layout_of(ty)?;
203 assert!(!layout.is_unsized(), "cannot alloc memory for unsized type");
205 let size = layout.size.bytes();
206 self.memory.allocate(size, layout.align, Some(MemoryKind::Stack))
209 pub fn memory(&self) -> &Memory<'a, 'mir, 'tcx, M> {
213 pub fn memory_mut(&mut self) -> &mut Memory<'a, 'mir, 'tcx, M> {
217 pub fn stack(&self) -> &[Frame<'mir, 'tcx>] {
222 pub fn cur_frame(&self) -> usize {
223 assert!(self.stack.len() > 0);
227 pub fn str_to_value(&mut self, s: &str) -> EvalResult<'tcx, Value> {
228 let ptr = self.memory.allocate_cached(s.as_bytes());
231 PrimVal::from_u128(s.len() as u128),
235 pub(super) fn const_to_value(&self, const_val: &ConstVal<'tcx>, ty: Ty<'tcx>) -> EvalResult<'tcx, Value> {
237 ConstVal::Unevaluated(def_id, substs) => {
238 let instance = self.resolve(def_id, substs)?;
239 self.read_global_as_value(GlobalId {
244 ConstVal::Value(val) => Ok(val),
248 pub(super) fn resolve(&self, def_id: DefId, substs: &'tcx Substs<'tcx>) -> EvalResult<'tcx, ty::Instance<'tcx>> {
249 let substs = self.tcx.trans_apply_param_substs(self.substs(), &substs);
250 ty::Instance::resolve(
255 ).ok_or(EvalErrorKind::TypeckError.into()) // turn error prop into a panic to expose associated type in const issue
258 pub(super) fn type_is_sized(&self, ty: Ty<'tcx>) -> bool {
259 ty.is_sized(self.tcx.at(DUMMY_SP), self.param_env)
264 instance: ty::InstanceDef<'tcx>,
265 ) -> EvalResult<'tcx, &'tcx mir::Mir<'tcx>> {
266 // do not continue if typeck errors occurred (can only occur in local crate)
267 let did = instance.def_id();
268 if did.is_local() && self.tcx.has_typeck_tables(did) && self.tcx.typeck_tables_of(did).tainted_by_errors {
269 return err!(TypeckError);
271 trace!("load mir {:?}", instance);
273 ty::InstanceDef::Item(def_id) => {
274 self.tcx.maybe_optimized_mir(def_id).ok_or_else(|| {
275 EvalErrorKind::NoMirFor(self.tcx.item_path_str(def_id)).into()
278 _ => Ok(self.tcx.instance_mir(instance)),
282 pub fn monomorphize(&self, ty: Ty<'tcx>, substs: &'tcx Substs<'tcx>) -> Ty<'tcx> {
283 // miri doesn't care about lifetimes, and will choke on some crazy ones
284 // let's simply get rid of them
285 let without_lifetimes = self.tcx.erase_regions(&ty);
286 let substituted = without_lifetimes.subst(self.tcx, substs);
287 let substituted = self.tcx.fully_normalize_monormophic_ty(&substituted);
291 /// Return the size and aligment of the value at the given type.
292 /// Note that the value does not matter if the type is sized. For unsized types,
293 /// the value has to be a fat pointer, and we only care about the "extra" data in it.
294 pub fn size_and_align_of_dst(
298 ) -> EvalResult<'tcx, (Size, Align)> {
299 let layout = self.layout_of(ty)?;
300 if !layout.is_unsized() {
301 Ok(layout.size_and_align())
304 ty::TyAdt(..) | ty::TyTuple(..) => {
305 // First get the size of all statically known fields.
306 // Don't use type_of::sizing_type_of because that expects t to be sized,
307 // and it also rounds up to alignment, which we want to avoid,
308 // as the unsized field's alignment could be smaller.
309 assert!(!ty.is_simd());
310 debug!("DST {} layout: {:?}", ty, layout);
312 let sized_size = layout.fields.offset(layout.fields.count() - 1);
313 let sized_align = layout.align;
315 "DST {} statically sized prefix size: {:?} align: {:?}",
321 // Recurse to get the size of the dynamically sized field (must be
323 let field_ty = layout.field(&self, layout.fields.count() - 1)?.ty;
324 let (unsized_size, unsized_align) =
325 self.size_and_align_of_dst(field_ty, value)?;
327 // FIXME (#26403, #27023): We should be adding padding
328 // to `sized_size` (to accommodate the `unsized_align`
329 // required of the unsized field that follows) before
330 // summing it with `sized_size`. (Note that since #26403
331 // is unfixed, we do not yet add the necessary padding
332 // here. But this is where the add would go.)
334 // Return the sum of sizes and max of aligns.
335 let size = sized_size + unsized_size;
337 // Choose max of two known alignments (combined value must
338 // be aligned according to more restrictive of the two).
339 let align = sized_align.max(unsized_align);
341 // Issue #27023: must add any necessary padding to `size`
342 // (to make it a multiple of `align`) before returning it.
344 // Namely, the returned size should be, in C notation:
346 // `size + ((size & (align-1)) ? align : 0)`
348 // emulated via the semi-standard fast bit trick:
350 // `(size + (align-1)) & -align`
352 Ok((size.abi_align(align), align))
354 ty::TyDynamic(..) => {
355 let (_, vtable) = self.into_ptr_vtable_pair(value)?;
356 // the second entry in the vtable is the dynamic size of the object.
357 self.read_size_and_align_from_vtable(vtable)
360 ty::TySlice(_) | ty::TyStr => {
361 let (elem_size, align) = layout.field(&self, 0)?.size_and_align();
362 let (_, len) = self.into_slice(value)?;
363 Ok((elem_size * len, align))
366 _ => bug!("size_of_val::<{:?}>", ty),
371 pub fn push_stack_frame(
373 instance: ty::Instance<'tcx>,
375 mir: &'mir mir::Mir<'tcx>,
377 return_to_block: StackPopCleanup,
378 ) -> EvalResult<'tcx> {
379 ::log_settings::settings().indentation += 1;
381 /// Return the set of locals that have a storage annotation anywhere
382 fn collect_storage_annotations<'mir, 'tcx>(mir: &'mir mir::Mir<'tcx>) -> HashSet<mir::Local> {
383 use rustc::mir::StatementKind::*;
385 let mut set = HashSet::new();
386 for block in mir.basic_blocks() {
387 for stmt in block.statements.iter() {
390 StorageDead(local) => {
400 // Subtract 1 because `local_decls` includes the ReturnMemoryPointer, but we don't store a local
402 let num_locals = mir.local_decls.len() - 1;
405 let annotated_locals = collect_storage_annotations(mir);
406 let mut locals = vec![None; num_locals];
407 for i in 0..num_locals {
408 let local = mir::Local::new(i + 1);
409 if !annotated_locals.contains(&local) {
410 locals[i] = Some(Value::ByVal(PrimVal::Undef));
416 self.stack.push(Frame {
418 block: mir::START_BLOCK,
427 self.memory.cur_frame = self.cur_frame();
429 if self.stack.len() > self.stack_limit {
430 err!(StackFrameLimitReached)
436 pub(super) fn pop_stack_frame(&mut self) -> EvalResult<'tcx> {
437 ::log_settings::settings().indentation -= 1;
438 M::end_region(self, None)?;
439 let frame = self.stack.pop().expect(
440 "tried to pop a stack frame, but there were none",
442 if !self.stack.is_empty() {
443 // TODO: Is this the correct time to start considering these accesses as originating from the returned-to stack frame?
444 self.memory.cur_frame = self.cur_frame();
446 match frame.return_to_block {
447 StackPopCleanup::MarkStatic(mutable) => {
448 if let Place::Ptr { ptr, .. } = frame.return_place {
449 // FIXME: to_ptr()? might be too extreme here, static zsts might reach this under certain conditions
450 self.memory.mark_static_initialized(
451 ptr.to_ptr()?.alloc_id,
455 bug!("StackPopCleanup::MarkStatic on: {:?}", frame.return_place);
458 StackPopCleanup::Goto(target) => self.goto_block(target),
459 StackPopCleanup::None => {}
461 // deallocate all locals that are backed by an allocation
462 for local in frame.locals {
463 self.deallocate_local(local)?;
469 pub fn deallocate_local(&mut self, local: Option<Value>) -> EvalResult<'tcx> {
470 if let Some(Value::ByRef(ptr, _align)) = local {
471 trace!("deallocating local");
472 let ptr = ptr.to_ptr()?;
473 self.memory.dump_alloc(ptr.alloc_id);
474 self.memory.deallocate_local(ptr)?;
479 /// Evaluate an assignment statement.
481 /// There is no separate `eval_rvalue` function. Instead, the code for handling each rvalue
482 /// type writes its results directly into the memory specified by the place.
483 pub(super) fn eval_rvalue_into_place(
485 rvalue: &mir::Rvalue<'tcx>,
486 place: &mir::Place<'tcx>,
487 ) -> EvalResult<'tcx> {
488 let dest = self.eval_place(place)?;
489 let dest_ty = self.place_ty(place);
491 use rustc::mir::Rvalue::*;
493 Use(ref operand) => {
494 let value = self.eval_operand(operand)?.value;
499 self.write_value(valty, dest)?;
502 BinaryOp(bin_op, ref left, ref right) => {
503 let left = self.eval_operand(left)?;
504 let right = self.eval_operand(right)?;
505 if self.intrinsic_overflowing(
513 // There was an overflow in an unchecked binop. Right now, we consider this an error and bail out.
514 // The rationale is that the reason rustc emits unchecked binops in release mode (vs. the checked binops
515 // it emits in debug mode) is performance, but it doesn't cost us any performance in miri.
516 // If, however, the compiler ever starts transforming unchecked intrinsics into unchecked binops,
517 // we have to go back to just ignoring the overflow here.
518 return err!(OverflowingMath);
522 CheckedBinaryOp(bin_op, ref left, ref right) => {
523 let left = self.eval_operand(left)?;
524 let right = self.eval_operand(right)?;
525 self.intrinsic_with_overflow(
534 UnaryOp(un_op, ref operand) => {
535 let val = self.eval_operand_to_primval(operand)?;
536 let kind = self.ty_to_primval_kind(dest_ty)?;
539 operator::unary_op(un_op, val, kind)?,
544 Aggregate(ref kind, ref operands) => {
545 self.inc_step_counter_and_check_limit(operands.len())?;
547 let (dest, active_field_index) = match **kind {
548 mir::AggregateKind::Adt(adt_def, variant_index, _, active_field_index) => {
549 self.write_discriminant_value(dest_ty, dest, variant_index)?;
550 if adt_def.is_enum() {
551 (self.place_downcast(dest, variant_index)?, active_field_index)
553 (dest, active_field_index)
559 let layout = self.layout_of(dest_ty)?;
560 for (i, operand) in operands.iter().enumerate() {
561 let value = self.eval_operand(operand)?;
562 // Ignore zero-sized fields.
563 if !self.layout_of(value.ty)?.is_zst() {
564 let field_index = active_field_index.unwrap_or(i);
565 let (field_dest, _) = self.place_field(dest, mir::Field::new(field_index), layout)?;
566 self.write_value(value, field_dest)?;
571 Repeat(ref operand, _) => {
572 let (elem_ty, length) = match dest_ty.sty {
573 ty::TyArray(elem_ty, n) => (elem_ty, n.val.unwrap_u64()),
576 "tried to assign array-repeat to non-array type {:?}",
581 let elem_size = self.layout_of(elem_ty)?.size.bytes();
582 let value = self.eval_operand(operand)?.value;
584 let (dest, dest_align) = self.force_allocation(dest)?.to_ptr_align();
586 // FIXME: speed up repeat filling
588 let elem_dest = dest.offset(i * elem_size, &self)?;
589 self.write_value_to_ptr(value, elem_dest, dest_align, elem_ty)?;
594 // FIXME(CTFE): don't allow computing the length of arrays in const eval
595 let src = self.eval_place(place)?;
596 let ty = self.place_ty(place);
597 let (_, len) = src.elem_ty_and_len(ty);
600 PrimVal::from_u128(len as u128),
605 Ref(_, _, ref place) => {
606 let src = self.eval_place(place)?;
607 // We ignore the alignment of the place here -- special handling for packed structs ends
608 // at the `&` operator.
609 let (ptr, _align, extra) = self.force_allocation(src)?.to_ptr_align_extra();
611 let val = match extra {
612 PlaceExtra::None => ptr.to_value(),
613 PlaceExtra::Length(len) => ptr.to_value_with_len(len),
614 PlaceExtra::Vtable(vtable) => ptr.to_value_with_vtable(vtable),
615 PlaceExtra::DowncastVariant(..) => {
616 bug!("attempted to take a reference to an enum downcast place")
623 self.write_value(valty, dest)?;
626 NullaryOp(mir::NullOp::Box, ty) => {
627 let ty = self.monomorphize(ty, self.substs());
628 M::box_alloc(self, ty, dest)?;
631 NullaryOp(mir::NullOp::SizeOf, ty) => {
632 let ty = self.monomorphize(ty, self.substs());
633 let layout = self.layout_of(ty)?;
634 assert!(!layout.is_unsized(),
635 "SizeOf nullary MIR operator called for unsized type");
638 PrimVal::from_u128(layout.size.bytes() as u128),
643 Cast(kind, ref operand, cast_ty) => {
644 debug_assert_eq!(self.monomorphize(cast_ty, self.substs()), dest_ty);
645 use rustc::mir::CastKind::*;
648 let src = self.eval_operand(operand)?;
649 let src_layout = self.layout_of(src.ty)?;
650 let dst_layout = self.layout_of(dest_ty)?;
651 self.unsize_into(src.value, src_layout, dest, dst_layout)?;
655 let src = self.eval_operand(operand)?;
656 if self.type_is_fat_ptr(src.ty) {
657 match (src.value, self.type_is_fat_ptr(dest_ty)) {
658 (Value::ByRef { .. }, _) |
659 (Value::ByValPair(..), true) => {
664 self.write_value(valty, dest)?;
666 (Value::ByValPair(data, _), false) => {
668 value: Value::ByVal(data),
671 self.write_value(valty, dest)?;
673 (Value::ByVal(_), _) => bug!("expected fat ptr"),
676 let src_val = self.value_to_primval(src)?;
677 let dest_val = self.cast_primval(src_val, src.ty, dest_ty)?;
679 value: Value::ByVal(dest_val),
682 self.write_value(valty, dest)?;
687 match self.eval_operand(operand)?.ty.sty {
688 ty::TyFnDef(def_id, substs) => {
689 if self.tcx.has_attr(def_id, "rustc_args_required_const") {
690 bug!("reifying a fn ptr that requires \
693 let instance = self.resolve(def_id, substs)?;
694 let fn_ptr = self.memory.create_fn_alloc(instance);
696 value: Value::ByVal(PrimVal::Ptr(fn_ptr)),
699 self.write_value(valty, dest)?;
701 ref other => bug!("reify fn pointer on {:?}", other),
708 let mut src = self.eval_operand(operand)?;
710 self.write_value(src, dest)?;
712 ref other => bug!("fn to unsafe fn cast on {:?}", other),
716 ClosureFnPointer => {
717 match self.eval_operand(operand)?.ty.sty {
718 ty::TyClosure(def_id, substs) => {
719 let substs = self.tcx.trans_apply_param_substs(self.substs(), &substs);
720 let instance = ty::Instance::resolve_closure(
724 ty::ClosureKind::FnOnce,
726 let fn_ptr = self.memory.create_fn_alloc(instance);
728 value: Value::ByVal(PrimVal::Ptr(fn_ptr)),
731 self.write_value(valty, dest)?;
733 ref other => bug!("closure fn pointer on {:?}", other),
739 Discriminant(ref place) => {
740 let ty = self.place_ty(place);
741 let place = self.eval_place(place)?;
742 let discr_val = self.read_discriminant_value(place, ty)?;
743 if let ty::TyAdt(adt_def, _) = ty.sty {
744 trace!("Read discriminant {}, valid discriminants {:?}", discr_val, adt_def.discriminants(self.tcx).collect::<Vec<_>>());
745 if adt_def.discriminants(self.tcx).all(|v| {
749 return err!(InvalidDiscriminant);
751 self.write_primval(dest, PrimVal::Bytes(discr_val), dest_ty)?;
753 bug!("rustc only generates Rvalue::Discriminant for enums");
758 if log_enabled!(::log::Level::Trace) {
759 self.dump_local(dest);
765 pub(super) fn type_is_fat_ptr(&self, ty: Ty<'tcx>) -> bool {
767 ty::TyRawPtr(ref tam) |
768 ty::TyRef(_, ref tam) => !self.type_is_sized(tam.ty),
769 ty::TyAdt(def, _) if def.is_box() => !self.type_is_sized(ty.boxed_ty()),
774 pub(super) fn eval_operand_to_primval(
776 op: &mir::Operand<'tcx>,
777 ) -> EvalResult<'tcx, PrimVal> {
778 let valty = self.eval_operand(op)?;
779 self.value_to_primval(valty)
782 pub(crate) fn operands_to_args(
784 ops: &[mir::Operand<'tcx>],
785 ) -> EvalResult<'tcx, Vec<ValTy<'tcx>>> {
787 .map(|op| self.eval_operand(op))
791 pub fn eval_operand(&mut self, op: &mir::Operand<'tcx>) -> EvalResult<'tcx, ValTy<'tcx>> {
792 use rustc::mir::Operand::*;
793 let ty = self.monomorphize(op.ty(self.mir(), self.tcx), self.substs());
795 // FIXME: do some more logic on `move` to invalidate the old location
799 value: self.eval_and_read_place(place)?,
804 Constant(ref constant) => {
805 use rustc::mir::Literal;
806 let mir::Constant { ref literal, .. } = **constant;
807 let value = match *literal {
808 Literal::Value { ref value } => self.const_to_value(&value.val, ty)?,
810 Literal::Promoted { index } => {
811 self.read_global_as_value(GlobalId {
812 instance: self.frame().instance,
813 promoted: Some(index),
826 pub fn read_discriminant_value(
830 ) -> EvalResult<'tcx, u128> {
831 let layout = self.layout_of(ty)?;
832 //trace!("read_discriminant_value {:#?}", layout);
834 match layout.variants {
835 layout::Variants::Single { index } => {
836 return Ok(index as u128);
838 layout::Variants::Tagged { .. } |
839 layout::Variants::NicheFilling { .. } => {},
842 let (discr_place, discr) = self.place_field(place, mir::Field::new(0), layout)?;
843 let raw_discr = self.value_to_primval(ValTy {
844 value: self.read_place(discr_place)?,
847 let discr_val = match layout.variants {
848 layout::Variants::Single { .. } => bug!(),
849 layout::Variants::Tagged { .. } => raw_discr.to_bytes()?,
850 layout::Variants::NicheFilling {
856 let variants_start = niche_variants.start as u128;
857 let variants_end = niche_variants.end as u128;
860 assert!(niche_start == 0);
861 assert!(variants_start == variants_end);
862 dataful_variant as u128
864 PrimVal::Bytes(raw_discr) => {
865 let discr = raw_discr.wrapping_sub(niche_start)
866 .wrapping_add(variants_start);
867 if variants_start <= discr && discr <= variants_end {
870 dataful_variant as u128
873 PrimVal::Undef => return err!(ReadUndefBytes),
882 pub(crate) fn write_discriminant_value(
886 variant_index: usize,
887 ) -> EvalResult<'tcx> {
888 let layout = self.layout_of(dest_ty)?;
890 match layout.variants {
891 layout::Variants::Single { index } => {
892 if index != variant_index {
893 // If the layout of an enum is `Single`, all
894 // other variants are necessarily uninhabited.
895 assert_eq!(layout.for_variant(&self, variant_index).abi,
896 layout::Abi::Uninhabited);
899 layout::Variants::Tagged { .. } => {
900 let discr_val = dest_ty.ty_adt_def().unwrap()
901 .discriminant_for_variant(self.tcx, variant_index)
904 let (discr_dest, discr) = self.place_field(dest, mir::Field::new(0), layout)?;
905 self.write_primval(discr_dest, PrimVal::Bytes(discr_val), discr.ty)?;
907 layout::Variants::NicheFilling {
913 if variant_index != dataful_variant {
914 let (niche_dest, niche) =
915 self.place_field(dest, mir::Field::new(0), layout)?;
916 let niche_value = ((variant_index - niche_variants.start) as u128)
917 .wrapping_add(niche_start);
918 self.write_primval(niche_dest, PrimVal::Bytes(niche_value), niche.ty)?;
926 pub fn read_global_as_value(&self, gid: GlobalId<'tcx>, ty: Ty<'tcx>) -> EvalResult<'tcx, Value> {
927 if gid.promoted.is_none() {
931 .get_cached(gid.instance.def_id());
932 if let Some(alloc_id) = cached {
933 let layout = self.layout_of(ty)?;
934 let ptr = MemoryPointer::new(alloc_id, 0);
935 return Ok(Value::ByRef(ptr.into(), layout.align))
938 let cv = self.const_eval(gid)?;
939 self.const_to_value(&cv.val, ty)
942 pub fn const_eval(&self, gid: GlobalId<'tcx>) -> EvalResult<'tcx, &'tcx ty::Const<'tcx>> {
943 let param_env = if self.tcx.is_static(gid.instance.def_id()).is_some() {
945 ty::ParamEnv::empty(traits::Reveal::All)
949 self.tcx.const_eval(param_env.and(gid)).map_err(|err| match *err.kind {
950 ErrKind::Miri(ref err, _) => match err.kind {
951 EvalErrorKind::TypeckError |
952 EvalErrorKind::Layout(_) => EvalErrorKind::TypeckError.into(),
953 _ => EvalErrorKind::ReferencedConstant.into(),
955 ErrKind::TypeckError => EvalErrorKind::TypeckError.into(),
956 ref other => bug!("const eval returned {:?}", other),
960 pub fn force_allocation(&mut self, place: Place) -> EvalResult<'tcx, Place> {
961 let new_place = match place {
962 Place::Local { frame, local } => {
963 // -1 since we don't store the return value
964 match self.stack[frame].locals[local.index() - 1] {
965 None => return err!(DeadLocal),
966 Some(Value::ByRef(ptr, align)) => {
970 extra: PlaceExtra::None,
974 let ty = self.stack[frame].mir.local_decls[local].ty;
975 let ty = self.monomorphize(ty, self.stack[frame].instance.substs);
976 let layout = self.layout_of(ty)?;
977 let ptr = self.alloc_ptr(ty)?;
978 self.stack[frame].locals[local.index() - 1] =
979 Some(Value::ByRef(ptr.into(), layout.align)); // it stays live
980 let place = Place::from_ptr(ptr, layout.align);
981 self.write_value(ValTy { value: val, ty }, place)?;
986 Place::Ptr { .. } => place,
991 /// ensures this Value is not a ByRef
992 pub fn follow_by_ref_value(
996 ) -> EvalResult<'tcx, Value> {
998 Value::ByRef(ptr, align) => {
999 self.read_value(ptr, align, ty)
1005 pub fn value_to_primval(
1007 ValTy { value, ty } : ValTy<'tcx>,
1008 ) -> EvalResult<'tcx, PrimVal> {
1009 match self.follow_by_ref_value(value, ty)? {
1010 Value::ByRef { .. } => bug!("follow_by_ref_value can't result in `ByRef`"),
1012 Value::ByVal(primval) => {
1013 // TODO: Do we really want insta-UB here?
1014 self.ensure_valid_value(primval, ty)?;
1018 Value::ByValPair(..) => bug!("value_to_primval can't work with fat pointers"),
1022 pub fn write_ptr(&mut self, dest: Place, val: Pointer, dest_ty: Ty<'tcx>) -> EvalResult<'tcx> {
1024 value: val.to_value(),
1027 self.write_value(valty, dest)
1030 pub fn write_primval(
1035 ) -> EvalResult<'tcx> {
1037 value: Value::ByVal(val),
1040 self.write_value(valty, dest)
1045 ValTy { value: src_val, ty: dest_ty } : ValTy<'tcx>,
1047 ) -> EvalResult<'tcx> {
1048 //trace!("Writing {:?} to {:?} at type {:?}", src_val, dest, dest_ty);
1049 // Note that it is really important that the type here is the right one, and matches the type things are read at.
1050 // In case `src_val` is a `ByValPair`, we don't do any magic here to handle padding properly, which is only
1051 // correct if we never look at this data with the wrong type.
1054 Place::Ptr { ptr, align, extra } => {
1055 assert_eq!(extra, PlaceExtra::None);
1056 self.write_value_to_ptr(src_val, ptr, align, dest_ty)
1059 Place::Local { frame, local } => {
1060 let dest = self.stack[frame].get_local(local)?;
1061 self.write_value_possibly_by_val(
1063 |this, val| this.stack[frame].set_local(local, val),
1071 // The cases here can be a bit subtle. Read carefully!
1072 fn write_value_possibly_by_val<F: FnOnce(&mut Self, Value) -> EvalResult<'tcx>>(
1076 old_dest_val: Value,
1078 ) -> EvalResult<'tcx> {
1079 if let Value::ByRef(dest_ptr, align) = old_dest_val {
1080 // If the value is already `ByRef` (that is, backed by an `Allocation`),
1081 // then we must write the new value into this allocation, because there may be
1082 // other pointers into the allocation. These other pointers are logically
1083 // pointers into the local variable, and must be able to observe the change.
1085 // Thus, it would be an error to replace the `ByRef` with a `ByVal`, unless we
1086 // knew for certain that there were no outstanding pointers to this allocation.
1087 self.write_value_to_ptr(src_val, dest_ptr, align, dest_ty)?;
1088 } else if let Value::ByRef(src_ptr, align) = src_val {
1089 // If the value is not `ByRef`, then we know there are no pointers to it
1090 // and we can simply overwrite the `Value` in the locals array directly.
1092 // In this specific case, where the source value is `ByRef`, we must duplicate
1093 // the allocation, because this is a by-value operation. It would be incorrect
1094 // if they referred to the same allocation, since then a change to one would
1095 // implicitly change the other.
1097 // It is a valid optimization to attempt reading a primitive value out of the
1098 // source and write that into the destination without making an allocation, so
1100 if let Ok(Some(src_val)) = self.try_read_value(src_ptr, align, dest_ty) {
1101 write_dest(self, src_val)?;
1103 let dest_ptr = self.alloc_ptr(dest_ty)?.into();
1104 let layout = self.layout_of(dest_ty)?;
1105 self.memory.copy(src_ptr, align.min(layout.align), dest_ptr, layout.align, layout.size.bytes(), false)?;
1106 write_dest(self, Value::ByRef(dest_ptr, layout.align))?;
1109 // Finally, we have the simple case where neither source nor destination are
1110 // `ByRef`. We may simply copy the source value over the the destintion.
1111 write_dest(self, src_val)?;
1116 pub fn write_value_to_ptr(
1122 ) -> EvalResult<'tcx> {
1123 trace!("write_value_to_ptr: {:#?}", value);
1124 let layout = self.layout_of(dest_ty)?;
1126 Value::ByRef(ptr, align) => {
1127 self.memory.copy(ptr, align.min(layout.align), dest, dest_align.min(layout.align), layout.size.bytes(), false)
1129 Value::ByVal(primval) => {
1131 layout::Abi::Scalar(_) => {}
1132 _ if primval.is_undef() => {}
1133 _ => bug!("write_value_to_ptr: invalid ByVal layout: {:#?}", layout)
1135 // TODO: Do we need signedness?
1136 self.memory.write_primval(dest.to_ptr()?, dest_align, primval, layout.size.bytes(), false)
1138 Value::ByValPair(a_val, b_val) => {
1139 let ptr = dest.to_ptr()?;
1140 trace!("write_value_to_ptr valpair: {:#?}", layout);
1141 let (a, b) = match layout.abi {
1142 layout::Abi::ScalarPair(ref a, ref b) => (&a.value, &b.value),
1143 _ => bug!("write_value_to_ptr: invalid ByValPair layout: {:#?}", layout)
1145 let (a_size, b_size) = (a.size(&self), b.size(&self));
1147 let b_offset = a_size.abi_align(b.align(&self));
1148 let b_ptr = ptr.offset(b_offset.bytes(), &self)?.into();
1149 // TODO: What about signedess?
1150 self.memory.write_primval(a_ptr, dest_align, a_val, a_size.bytes(), false)?;
1151 self.memory.write_primval(b_ptr, dest_align, b_val, b_size.bytes(), false)
1156 pub fn ty_to_primval_kind(&self, ty: Ty<'tcx>) -> EvalResult<'tcx, PrimValKind> {
1157 use syntax::ast::FloatTy;
1159 let kind = match ty.sty {
1160 ty::TyBool => PrimValKind::Bool,
1161 ty::TyChar => PrimValKind::Char,
1163 ty::TyInt(int_ty) => {
1164 use syntax::ast::IntTy::*;
1165 let size = match int_ty {
1171 Isize => self.memory.pointer_size(),
1173 PrimValKind::from_int_size(size)
1176 ty::TyUint(uint_ty) => {
1177 use syntax::ast::UintTy::*;
1178 let size = match uint_ty {
1184 Usize => self.memory.pointer_size(),
1186 PrimValKind::from_uint_size(size)
1189 ty::TyFloat(FloatTy::F32) => PrimValKind::F32,
1190 ty::TyFloat(FloatTy::F64) => PrimValKind::F64,
1192 ty::TyFnPtr(_) => PrimValKind::FnPtr,
1194 ty::TyRef(_, ref tam) |
1195 ty::TyRawPtr(ref tam) if self.type_is_sized(tam.ty) => PrimValKind::Ptr,
1197 ty::TyAdt(def, _) if def.is_box() => PrimValKind::Ptr,
1200 match self.layout_of(ty)?.abi {
1201 layout::Abi::Scalar(ref scalar) => {
1202 use rustc::ty::layout::Primitive::*;
1203 match scalar.value {
1204 Int(i, false) => PrimValKind::from_uint_size(i.size().bytes()),
1205 Int(i, true) => PrimValKind::from_int_size(i.size().bytes()),
1206 F32 => PrimValKind::F32,
1207 F64 => PrimValKind::F64,
1208 Pointer => PrimValKind::Ptr,
1212 _ => return err!(TypeNotPrimitive(ty)),
1216 _ => return err!(TypeNotPrimitive(ty)),
1222 fn ensure_valid_value(&self, val: PrimVal, ty: Ty<'tcx>) -> EvalResult<'tcx> {
1224 ty::TyBool if val.to_bytes()? > 1 => err!(InvalidBool),
1226 ty::TyChar if ::std::char::from_u32(val.to_bytes()? as u32).is_none() => {
1227 err!(InvalidChar(val.to_bytes()? as u32 as u128))
1234 pub fn read_value(&self, ptr: Pointer, align: Align, ty: Ty<'tcx>) -> EvalResult<'tcx, Value> {
1235 if let Some(val) = self.try_read_value(ptr, align, ty)? {
1238 bug!("primitive read failed for type: {:?}", ty);
1242 pub(crate) fn read_ptr(
1246 pointee_ty: Ty<'tcx>,
1247 ) -> EvalResult<'tcx, Value> {
1248 let ptr_size = self.memory.pointer_size();
1249 let p: Pointer = self.memory.read_ptr_sized_unsigned(ptr, ptr_align)?.into();
1250 if self.type_is_sized(pointee_ty) {
1253 trace!("reading fat pointer extra of type {}", pointee_ty);
1254 let extra = ptr.offset(ptr_size, self)?;
1255 match self.tcx.struct_tail(pointee_ty).sty {
1256 ty::TyDynamic(..) => Ok(p.to_value_with_vtable(
1257 self.memory.read_ptr_sized_unsigned(extra, ptr_align)?.to_ptr()?,
1259 ty::TySlice(..) | ty::TyStr => {
1262 .read_ptr_sized_unsigned(extra, ptr_align)?
1264 Ok(p.to_value_with_len(len as u64))
1266 _ => bug!("unsized primval ptr read from {:?}", pointee_ty),
1271 pub fn try_read_value(&self, ptr: Pointer, ptr_align: Align, ty: Ty<'tcx>) -> EvalResult<'tcx, Option<Value>> {
1272 use syntax::ast::FloatTy;
1274 let ptr = ptr.to_ptr()?;
1275 let val = match ty.sty {
1277 let val = self.memory.read_primval(ptr, ptr_align, 1, false)?;
1278 let val = match val {
1279 PrimVal::Bytes(0) => false,
1280 PrimVal::Bytes(1) => true,
1281 // TODO: This seems a little overeager, should reading at bool type already be insta-UB?
1282 _ => return err!(InvalidBool),
1284 PrimVal::from_bool(val)
1287 let c = self.memory.read_primval(ptr, ptr_align, 4, false)?.to_bytes()? as u32;
1288 match ::std::char::from_u32(c) {
1289 Some(ch) => PrimVal::from_char(ch),
1290 None => return err!(InvalidChar(c as u128)),
1294 ty::TyInt(int_ty) => {
1295 use syntax::ast::IntTy::*;
1296 let size = match int_ty {
1302 Isize => self.memory.pointer_size(),
1304 self.memory.read_primval(ptr, ptr_align, size, true)?
1307 ty::TyUint(uint_ty) => {
1308 use syntax::ast::UintTy::*;
1309 let size = match uint_ty {
1315 Usize => self.memory.pointer_size(),
1317 self.memory.read_primval(ptr, ptr_align, size, false)?
1320 ty::TyFloat(FloatTy::F32) => {
1321 PrimVal::Bytes(self.memory.read_primval(ptr, ptr_align, 4, false)?.to_bytes()?)
1323 ty::TyFloat(FloatTy::F64) => {
1324 PrimVal::Bytes(self.memory.read_primval(ptr, ptr_align, 8, false)?.to_bytes()?)
1327 ty::TyFnPtr(_) => self.memory.read_ptr_sized_unsigned(ptr, ptr_align)?,
1328 ty::TyRef(_, ref tam) |
1329 ty::TyRawPtr(ref tam) => return self.read_ptr(ptr, ptr_align, tam.ty).map(Some),
1331 ty::TyAdt(def, _) => {
1333 return self.read_ptr(ptr, ptr_align, ty.boxed_ty()).map(Some);
1336 if let layout::Abi::Scalar(ref scalar) = self.layout_of(ty)?.abi {
1337 let mut signed = false;
1338 if let layout::Int(_, s) = scalar.value {
1341 let size = scalar.value.size(self).bytes();
1342 self.memory.read_primval(ptr, ptr_align, size, signed)?
1348 _ => return Ok(None),
1351 Ok(Some(Value::ByVal(val)))
1354 pub fn frame(&self) -> &Frame<'mir, 'tcx> {
1355 self.stack.last().expect("no call frames exist")
1358 pub fn frame_mut(&mut self) -> &mut Frame<'mir, 'tcx> {
1359 self.stack.last_mut().expect("no call frames exist")
1362 pub(super) fn mir(&self) -> &'mir mir::Mir<'tcx> {
1366 pub fn substs(&self) -> &'tcx Substs<'tcx> {
1367 if let Some(frame) = self.stack.last() {
1368 frame.instance.substs
1382 ) -> EvalResult<'tcx> {
1383 // A<Struct> -> A<Trait> conversion
1384 let (src_pointee_ty, dest_pointee_ty) = self.tcx.struct_lockstep_tails(sty, dty);
1386 match (&src_pointee_ty.sty, &dest_pointee_ty.sty) {
1387 (&ty::TyArray(_, length), &ty::TySlice(_)) => {
1388 let ptr = self.into_ptr(src)?;
1389 // u64 cast is from usize to u64, which is always good
1391 value: ptr.to_value_with_len(length.val.unwrap_u64() ),
1394 self.write_value(valty, dest)
1396 (&ty::TyDynamic(..), &ty::TyDynamic(..)) => {
1397 // For now, upcasts are limited to changes in marker
1398 // traits, and hence never actually require an actual
1399 // change to the vtable.
1404 self.write_value(valty, dest)
1406 (_, &ty::TyDynamic(ref data, _)) => {
1407 let trait_ref = data.principal().unwrap().with_self_ty(
1411 let trait_ref = self.tcx.erase_regions(&trait_ref);
1412 let vtable = self.get_vtable(src_pointee_ty, trait_ref)?;
1413 let ptr = self.into_ptr(src)?;
1415 value: ptr.to_value_with_vtable(vtable),
1418 self.write_value(valty, dest)
1421 _ => bug!("invalid unsizing {:?} -> {:?}", src_ty, dest_ty),
1428 src_layout: TyLayout<'tcx>,
1430 dst_layout: TyLayout<'tcx>,
1431 ) -> EvalResult<'tcx> {
1432 match (&src_layout.ty.sty, &dst_layout.ty.sty) {
1433 (&ty::TyRef(_, ref s), &ty::TyRef(_, ref d)) |
1434 (&ty::TyRef(_, ref s), &ty::TyRawPtr(ref d)) |
1435 (&ty::TyRawPtr(ref s), &ty::TyRawPtr(ref d)) => {
1436 self.unsize_into_ptr(src, src_layout.ty, dst, dst_layout.ty, s.ty, d.ty)
1438 (&ty::TyAdt(def_a, _), &ty::TyAdt(def_b, _)) => {
1439 assert_eq!(def_a, def_b);
1440 if def_a.is_box() || def_b.is_box() {
1441 if !def_a.is_box() || !def_b.is_box() {
1442 bug!("invalid unsizing between {:?} -> {:?}", src_layout, dst_layout);
1444 return self.unsize_into_ptr(
1449 src_layout.ty.boxed_ty(),
1450 dst_layout.ty.boxed_ty(),
1454 // unsizing of generic struct with pointer fields
1455 // Example: `Arc<T>` -> `Arc<Trait>`
1456 // here we need to increase the size of every &T thin ptr field to a fat ptr
1457 for i in 0..src_layout.fields.count() {
1458 let (dst_f_place, dst_field) =
1459 self.place_field(dst, mir::Field::new(i), dst_layout)?;
1460 if dst_field.is_zst() {
1463 let (src_f_value, src_field) = match src {
1464 Value::ByRef(ptr, align) => {
1465 let src_place = Place::from_primval_ptr(ptr, align);
1466 let (src_f_place, src_field) =
1467 self.place_field(src_place, mir::Field::new(i), src_layout)?;
1468 (self.read_place(src_f_place)?, src_field)
1470 Value::ByVal(_) | Value::ByValPair(..) => {
1471 let src_field = src_layout.field(&self, i)?;
1472 assert_eq!(src_layout.fields.offset(i).bytes(), 0);
1473 assert_eq!(src_field.size, src_layout.size);
1477 if src_field.ty == dst_field.ty {
1478 self.write_value(ValTy {
1483 self.unsize_into(src_f_value, src_field, dst_f_place, dst_field)?;
1490 "unsize_into: invalid conversion: {:?} -> {:?}",
1498 pub fn dump_local(&self, place: Place) {
1501 Place::Local { frame, local } => {
1502 let mut allocs = Vec::new();
1503 let mut msg = format!("{:?}", local);
1504 if frame != self.cur_frame() {
1505 write!(msg, " ({} frames up)", self.cur_frame() - frame).unwrap();
1507 write!(msg, ":").unwrap();
1509 match self.stack[frame].get_local(local) {
1511 if let EvalErrorKind::DeadLocal = err.kind {
1512 write!(msg, " is dead").unwrap();
1514 panic!("Failed to access local: {:?}", err);
1517 Ok(Value::ByRef(ptr, align)) => {
1518 match ptr.into_inner_primval() {
1519 PrimVal::Ptr(ptr) => {
1520 write!(msg, " by align({}) ref:", align.abi()).unwrap();
1521 allocs.push(ptr.alloc_id);
1523 ptr => write!(msg, " integral by ref: {:?}", ptr).unwrap(),
1526 Ok(Value::ByVal(val)) => {
1527 write!(msg, " {:?}", val).unwrap();
1528 if let PrimVal::Ptr(ptr) = val {
1529 allocs.push(ptr.alloc_id);
1532 Ok(Value::ByValPair(val1, val2)) => {
1533 write!(msg, " ({:?}, {:?})", val1, val2).unwrap();
1534 if let PrimVal::Ptr(ptr) = val1 {
1535 allocs.push(ptr.alloc_id);
1537 if let PrimVal::Ptr(ptr) = val2 {
1538 allocs.push(ptr.alloc_id);
1544 self.memory.dump_allocs(allocs);
1546 Place::Ptr { ptr, align, .. } => {
1547 match ptr.into_inner_primval() {
1548 PrimVal::Ptr(ptr) => {
1549 trace!("by align({}) ref:", align.abi());
1550 self.memory.dump_alloc(ptr.alloc_id);
1552 ptr => trace!(" integral by ref: {:?}", ptr),
1558 /// Convenience function to ensure correct usage of locals
1559 pub fn modify_local<F>(&mut self, frame: usize, local: mir::Local, f: F) -> EvalResult<'tcx>
1561 F: FnOnce(&mut Self, Value) -> EvalResult<'tcx, Value>,
1563 let val = self.stack[frame].get_local(local)?;
1564 let new_val = f(self, val)?;
1565 self.stack[frame].set_local(local, new_val)?;
1566 // FIXME(solson): Run this when setting to Undef? (See previous version of this code.)
1567 // if let Value::ByRef(ptr) = self.stack[frame].get_local(local) {
1568 // self.memory.deallocate(ptr)?;
1573 pub fn generate_stacktrace(&self, explicit_span: Option<Span>) -> Vec<FrameInfo> {
1574 let mut last_span = None;
1575 let mut frames = Vec::new();
1576 // skip 1 because the last frame is just the environment of the constant
1577 for &Frame { instance, span, .. } in self.stack().iter().skip(1).rev() {
1578 // make sure we don't emit frames that are duplicates of the previous
1579 if explicit_span == Some(span) {
1580 last_span = Some(span);
1583 if let Some(last) = last_span {
1588 last_span = Some(span);
1590 let location = if self.tcx.def_key(instance.def_id()).disambiguated_data.data == DefPathData::ClosureExpr {
1591 "closure".to_owned()
1593 instance.to_string()
1595 frames.push(FrameInfo { span, location });
1600 pub fn report(&self, e: &mut EvalError, as_err: bool, explicit_span: Option<Span>) {
1602 EvalErrorKind::Layout(_) |
1603 EvalErrorKind::TypeckError => return,
1606 if let Some(ref mut backtrace) = e.backtrace {
1607 let mut trace_text = "\n\nAn error occurred in miri:\n".to_string();
1608 backtrace.resolve();
1609 write!(trace_text, "backtrace frames: {}\n", backtrace.frames().len()).unwrap();
1610 'frames: for (i, frame) in backtrace.frames().iter().enumerate() {
1611 if frame.symbols().is_empty() {
1612 write!(trace_text, "{}: no symbols\n", i).unwrap();
1614 for symbol in frame.symbols() {
1615 write!(trace_text, "{}: ", i).unwrap();
1616 if let Some(name) = symbol.name() {
1617 write!(trace_text, "{}\n", name).unwrap();
1619 write!(trace_text, "<unknown>\n").unwrap();
1621 write!(trace_text, "\tat ").unwrap();
1622 if let Some(file_path) = symbol.filename() {
1623 write!(trace_text, "{}", file_path.display()).unwrap();
1625 write!(trace_text, "<unknown_file>").unwrap();
1627 if let Some(line) = symbol.lineno() {
1628 write!(trace_text, ":{}\n", line).unwrap();
1630 write!(trace_text, "\n").unwrap();
1634 error!("{}", trace_text);
1636 if let Some(frame) = self.stack().last() {
1637 let block = &frame.mir.basic_blocks()[frame.block];
1638 let span = explicit_span.unwrap_or_else(|| if frame.stmt < block.statements.len() {
1639 block.statements[frame.stmt].source_info.span
1641 block.terminator().source_info.span
1643 trace!("reporting const eval failure at {:?}", span);
1644 let mut err = if as_err {
1645 ::rustc::middle::const_val::struct_error(self.tcx, span, "constant evaluation error")
1651 .filter_map(|frame| self.tcx.hir.as_local_node_id(frame.instance.def_id()))
1653 .expect("some part of a failing const eval must be local");
1654 self.tcx.struct_span_lint_node(
1655 ::rustc::lint::builtin::CONST_ERR,
1658 "constant evaluation error",
1661 err.span_label(span, e.to_string());
1662 for FrameInfo { span, location } in self.generate_stacktrace(explicit_span) {
1663 err.span_note(span, &format!("inside call to {}", location));
1667 self.tcx.sess.err(&e.to_string());
1672 impl<'mir, 'tcx> Frame<'mir, 'tcx> {
1673 pub fn get_local(&self, local: mir::Local) -> EvalResult<'tcx, Value> {
1674 // Subtract 1 because we don't store a value for the ReturnPointer, the local with index 0.
1675 self.locals[local.index() - 1].ok_or(EvalErrorKind::DeadLocal.into())
1678 fn set_local(&mut self, local: mir::Local, value: Value) -> EvalResult<'tcx> {
1679 // Subtract 1 because we don't store a value for the ReturnPointer, the local with index 0.
1680 match self.locals[local.index() - 1] {
1681 None => err!(DeadLocal),
1682 Some(ref mut local) => {
1689 pub fn storage_live(&mut self, local: mir::Local) -> EvalResult<'tcx, Option<Value>> {
1690 trace!("{:?} is now live", local);
1692 let old = self.locals[local.index() - 1];
1693 self.locals[local.index() - 1] = Some(Value::ByVal(PrimVal::Undef)); // StorageLive *always* kills the value that's currently stored
1697 /// Returns the old value of the local
1698 pub fn storage_dead(&mut self, local: mir::Local) -> EvalResult<'tcx, Option<Value>> {
1699 trace!("{:?} is now dead", local);
1701 let old = self.locals[local.index() - 1];
1702 self.locals[local.index() - 1] = None;
1707 // TODO(solson): Upstream these methods into rustc::ty::layout.
1709 pub fn resolve_drop_in_place<'a, 'tcx>(
1710 tcx: TyCtxt<'a, 'tcx, 'tcx>,
1712 ) -> ty::Instance<'tcx> {
1713 let def_id = tcx.require_lang_item(::rustc::middle::lang_items::DropInPlaceFnLangItem);
1714 let substs = tcx.intern_substs(&[ty.into()]);
1715 ty::Instance::resolve(tcx, ty::ParamEnv::empty(Reveal::All), def_id, substs).unwrap()