3 use rustc::hir::def_id::DefId;
4 use rustc::hir::def::Def;
5 use rustc::hir::map::definitions::DefPathData;
6 use rustc::middle::const_val::{ConstVal, ErrKind};
8 use rustc::ty::layout::{self, Size, Align, HasDataLayout, IntegerExt, LayoutOf, TyLayout};
9 use rustc::ty::subst::{Subst, Substs};
10 use rustc::ty::{self, Ty, TyCtxt};
11 use rustc::ty::maps::TyCtxtAt;
12 use rustc_data_structures::indexed_vec::{IndexVec, Idx};
13 use rustc::middle::const_val::FrameInfo;
14 use syntax::codemap::{self, Span};
15 use syntax::ast::Mutability;
16 use rustc::mir::interpret::{
17 GlobalId, Value, Pointer, PrimVal, PrimValKind,
18 EvalError, EvalResult, EvalErrorKind, MemoryPointer,
22 use super::{Place, PlaceExtra, Memory,
23 HasMemory, MemoryKind,
26 pub struct EvalContext<'a, 'mir, 'tcx: 'a + 'mir, M: Machine<'mir, 'tcx>> {
27 /// Stores the `Machine` instance.
30 /// The results of the type checker, from rustc.
31 pub tcx: TyCtxtAt<'a, 'tcx, 'tcx>,
33 /// Bounds in scope for polymorphic evaluations.
34 pub param_env: ty::ParamEnv<'tcx>,
36 /// The virtual memory system.
37 pub memory: Memory<'a, 'mir, 'tcx, M>,
39 /// The virtual call stack.
40 pub(crate) stack: Vec<Frame<'mir, 'tcx>>,
42 /// The maximum number of stack frames allowed
43 pub(crate) stack_limit: usize,
45 /// The maximum number of terminators that may be evaluated.
46 /// This prevents infinite loops and huge computations from freezing up const eval.
47 /// Remove once halting problem is solved.
48 pub(crate) terminators_remaining: usize,
52 pub struct Frame<'mir, 'tcx: 'mir> {
53 ////////////////////////////////////////////////////////////////////////////////
54 // Function and callsite information
55 ////////////////////////////////////////////////////////////////////////////////
56 /// The MIR for the function called on this frame.
57 pub mir: &'mir mir::Mir<'tcx>,
59 /// The def_id and substs of the current function
60 pub instance: ty::Instance<'tcx>,
62 /// The span of the call site.
63 pub span: codemap::Span,
65 ////////////////////////////////////////////////////////////////////////////////
66 // Return place and locals
67 ////////////////////////////////////////////////////////////////////////////////
68 /// The block to return to when returning from the current stack frame
69 pub return_to_block: StackPopCleanup,
71 /// The location where the result of the current stack frame should be written to.
72 pub return_place: Place,
74 /// The list of locals for this stack frame, stored in order as
75 /// `[return_ptr, arguments..., variables..., temporaries...]`. The locals are stored as `Option<Value>`s.
76 /// `None` represents a local that is currently dead, while a live local
77 /// can either directly contain `PrimVal` or refer to some part of an `Allocation`.
79 /// Before being initialized, arguments are `Value::ByVal(PrimVal::Undef)` and other locals are `None`.
80 pub locals: IndexVec<mir::Local, Option<Value>>,
82 ////////////////////////////////////////////////////////////////////////////////
83 // Current position within the function
84 ////////////////////////////////////////////////////////////////////////////////
85 /// The block that is currently executed (or will be executed after the above call stacks
87 pub block: mir::BasicBlock,
89 /// The index of the currently evaluated statement.
93 #[derive(Clone, Debug, Eq, PartialEq, Hash)]
94 pub enum StackPopCleanup {
95 /// The stackframe existed to compute the initial value of a static/constant, make sure it
96 /// isn't modifyable afterwards in case of constants.
97 /// In case of `static mut`, mark the memory to ensure it's never marked as immutable through
98 /// references or deallocated
99 MarkStatic(Mutability),
100 /// A regular stackframe added due to a function call will need to get forwarded to the next
102 Goto(mir::BasicBlock),
103 /// The main function and diverging functions have nowhere to return to
107 #[derive(Copy, Clone, Debug)]
108 pub struct TyAndPacked<'tcx> {
113 #[derive(Copy, Clone, Debug)]
114 pub struct ValTy<'tcx> {
119 impl<'tcx> ValTy<'tcx> {
120 pub fn from(val: &ty::Const<'tcx>) -> Option<Self> {
122 ConstVal::Value(value) => Some(ValTy { value, ty: val.ty }),
123 ConstVal::Unevaluated { .. } => None,
128 impl<'tcx> ::std::ops::Deref for ValTy<'tcx> {
130 fn deref(&self) -> &Value {
135 impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> HasDataLayout for &'a EvalContext<'a, 'mir, 'tcx, M> {
137 fn data_layout(&self) -> &layout::TargetDataLayout {
138 &self.tcx.data_layout
142 impl<'c, 'b, 'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> HasDataLayout
143 for &'c &'b mut EvalContext<'a, 'mir, 'tcx, M> {
145 fn data_layout(&self) -> &layout::TargetDataLayout {
146 &self.tcx.data_layout
150 impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> layout::HasTyCtxt<'tcx> for &'a EvalContext<'a, 'mir, 'tcx, M> {
152 fn tcx<'b>(&'b self) -> TyCtxt<'b, 'tcx, 'tcx> {
157 impl<'c, 'b, 'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> layout::HasTyCtxt<'tcx>
158 for &'c &'b mut EvalContext<'a, 'mir, 'tcx, M> {
160 fn tcx<'d>(&'d self) -> TyCtxt<'d, 'tcx, 'tcx> {
165 impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> LayoutOf for &'a EvalContext<'a, 'mir, 'tcx, M> {
167 type TyLayout = EvalResult<'tcx, TyLayout<'tcx>>;
169 fn layout_of(self, ty: Ty<'tcx>) -> Self::TyLayout {
170 self.tcx.layout_of(self.param_env.and(ty))
171 .map_err(|layout| EvalErrorKind::Layout(layout).into())
175 impl<'c, 'b, 'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> LayoutOf
176 for &'c &'b mut EvalContext<'a, 'mir, 'tcx, M> {
178 type TyLayout = EvalResult<'tcx, TyLayout<'tcx>>;
181 fn layout_of(self, ty: Ty<'tcx>) -> Self::TyLayout {
182 (&**self).layout_of(ty)
186 impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
188 tcx: TyCtxtAt<'a, 'tcx, 'tcx>,
189 param_env: ty::ParamEnv<'tcx>,
191 memory_data: M::MemoryData,
197 memory: Memory::new(tcx, memory_data),
199 stack_limit: tcx.sess.const_eval_stack_frame_limit,
200 terminators_remaining: 1_000_000,
204 pub fn alloc_ptr(&mut self, ty: Ty<'tcx>) -> EvalResult<'tcx, MemoryPointer> {
205 let layout = self.layout_of(ty)?;
206 assert!(!layout.is_unsized(), "cannot alloc memory for unsized type");
208 let size = layout.size.bytes();
209 self.memory.allocate(size, layout.align, Some(MemoryKind::Stack))
212 pub fn memory(&self) -> &Memory<'a, 'mir, 'tcx, M> {
216 pub fn memory_mut(&mut self) -> &mut Memory<'a, 'mir, 'tcx, M> {
220 pub fn stack(&self) -> &[Frame<'mir, 'tcx>] {
225 pub fn cur_frame(&self) -> usize {
226 assert!(self.stack.len() > 0);
230 pub fn str_to_value(&mut self, s: &str) -> EvalResult<'tcx, Value> {
231 let ptr = self.memory.allocate_cached(s.as_bytes());
234 PrimVal::from_u128(s.len() as u128),
238 pub(super) fn const_to_value(&self, const_val: &ConstVal<'tcx>, ty: Ty<'tcx>) -> EvalResult<'tcx, Value> {
240 ConstVal::Unevaluated(def_id, substs) => {
241 let instance = self.resolve(def_id, substs)?;
242 self.read_global_as_value(GlobalId {
247 ConstVal::Value(val) => Ok(val),
251 pub(super) fn resolve(&self, def_id: DefId, substs: &'tcx Substs<'tcx>) -> EvalResult<'tcx, ty::Instance<'tcx>> {
252 trace!("resolve: {:?}, {:#?}", def_id, substs);
253 trace!("substs: {:#?}", self.substs());
254 trace!("param_env: {:#?}", self.param_env);
255 let substs = self.tcx.subst_and_normalize_erasing_regions(
260 ty::Instance::resolve(
265 ).ok_or_else(|| EvalErrorKind::TypeckError.into()) // turn error prop into a panic to expose associated type in const issue
268 pub(super) fn type_is_sized(&self, ty: Ty<'tcx>) -> bool {
269 ty.is_sized(self.tcx, self.param_env)
274 instance: ty::InstanceDef<'tcx>,
275 ) -> EvalResult<'tcx, &'tcx mir::Mir<'tcx>> {
276 // do not continue if typeck errors occurred (can only occur in local crate)
277 let did = instance.def_id();
278 if did.is_local() && self.tcx.has_typeck_tables(did) && self.tcx.typeck_tables_of(did).tainted_by_errors {
279 return err!(TypeckError);
281 trace!("load mir {:?}", instance);
283 ty::InstanceDef::Item(def_id) => {
284 self.tcx.maybe_optimized_mir(def_id).ok_or_else(||
285 EvalErrorKind::NoMirFor(self.tcx.item_path_str(def_id)).into()
288 _ => Ok(self.tcx.instance_mir(instance)),
292 pub fn monomorphize(&self, ty: Ty<'tcx>, substs: &'tcx Substs<'tcx>) -> Ty<'tcx> {
293 // miri doesn't care about lifetimes, and will choke on some crazy ones
294 // let's simply get rid of them
295 let substituted = ty.subst(*self.tcx, substs);
296 self.tcx.normalize_erasing_regions(ty::ParamEnv::reveal_all(), substituted)
299 /// Return the size and aligment of the value at the given type.
300 /// Note that the value does not matter if the type is sized. For unsized types,
301 /// the value has to be a fat pointer, and we only care about the "extra" data in it.
302 pub fn size_and_align_of_dst(
306 ) -> EvalResult<'tcx, (Size, Align)> {
307 let layout = self.layout_of(ty)?;
308 if !layout.is_unsized() {
309 Ok(layout.size_and_align())
312 ty::TyAdt(..) | ty::TyTuple(..) => {
313 // First get the size of all statically known fields.
314 // Don't use type_of::sizing_type_of because that expects t to be sized,
315 // and it also rounds up to alignment, which we want to avoid,
316 // as the unsized field's alignment could be smaller.
317 assert!(!ty.is_simd());
318 debug!("DST {} layout: {:?}", ty, layout);
320 let sized_size = layout.fields.offset(layout.fields.count() - 1);
321 let sized_align = layout.align;
323 "DST {} statically sized prefix size: {:?} align: {:?}",
329 // Recurse to get the size of the dynamically sized field (must be
331 let field_ty = layout.field(&self, layout.fields.count() - 1)?.ty;
332 let (unsized_size, unsized_align) =
333 self.size_and_align_of_dst(field_ty, value)?;
335 // FIXME (#26403, #27023): We should be adding padding
336 // to `sized_size` (to accommodate the `unsized_align`
337 // required of the unsized field that follows) before
338 // summing it with `sized_size`. (Note that since #26403
339 // is unfixed, we do not yet add the necessary padding
340 // here. But this is where the add would go.)
342 // Return the sum of sizes and max of aligns.
343 let size = sized_size + unsized_size;
345 // Choose max of two known alignments (combined value must
346 // be aligned according to more restrictive of the two).
347 let align = sized_align.max(unsized_align);
349 // Issue #27023: must add any necessary padding to `size`
350 // (to make it a multiple of `align`) before returning it.
352 // Namely, the returned size should be, in C notation:
354 // `size + ((size & (align-1)) ? align : 0)`
356 // emulated via the semi-standard fast bit trick:
358 // `(size + (align-1)) & -align`
360 Ok((size.abi_align(align), align))
362 ty::TyDynamic(..) => {
363 let (_, vtable) = self.into_ptr_vtable_pair(value)?;
364 // the second entry in the vtable is the dynamic size of the object.
365 self.read_size_and_align_from_vtable(vtable)
368 ty::TySlice(_) | ty::TyStr => {
369 let (elem_size, align) = layout.field(&self, 0)?.size_and_align();
370 let (_, len) = self.into_slice(value)?;
371 Ok((elem_size * len, align))
374 _ => bug!("size_of_val::<{:?}>", ty),
379 pub fn push_stack_frame(
381 instance: ty::Instance<'tcx>,
383 mir: &'mir mir::Mir<'tcx>,
385 return_to_block: StackPopCleanup,
386 ) -> EvalResult<'tcx> {
387 ::log_settings::settings().indentation += 1;
389 let locals = if mir.local_decls.len() > 1 {
390 let mut locals = IndexVec::from_elem(Some(Value::ByVal(PrimVal::Undef)), &mir.local_decls);
391 match self.tcx.describe_def(instance.def_id()) {
392 // statics and constants don't have `Storage*` statements, no need to look for them
393 Some(Def::Static(..)) | Some(Def::Const(..)) | Some(Def::AssociatedConst(..)) => {},
395 trace!("push_stack_frame: {:?}: num_bbs: {}", span, mir.basic_blocks().len());
396 for block in mir.basic_blocks() {
397 for stmt in block.statements.iter() {
398 use rustc::mir::StatementKind::{StorageDead, StorageLive};
401 StorageDead(local) => locals[local] = None,
410 // don't allocate at all for trivial constants
414 self.stack.push(Frame {
416 block: mir::START_BLOCK,
425 self.memory.cur_frame = self.cur_frame();
427 if self.stack.len() > self.stack_limit {
428 err!(StackFrameLimitReached)
434 pub(super) fn pop_stack_frame(&mut self) -> EvalResult<'tcx> {
435 ::log_settings::settings().indentation -= 1;
436 M::end_region(self, None)?;
437 let frame = self.stack.pop().expect(
438 "tried to pop a stack frame, but there were none",
440 if !self.stack.is_empty() {
441 // TODO: Is this the correct time to start considering these accesses as originating from the returned-to stack frame?
442 self.memory.cur_frame = self.cur_frame();
444 match frame.return_to_block {
445 StackPopCleanup::MarkStatic(mutable) => {
446 if let Place::Ptr { ptr, .. } = frame.return_place {
447 // FIXME: to_ptr()? might be too extreme here, static zsts might reach this under certain conditions
448 self.memory.mark_static_initialized(
449 ptr.to_ptr()?.alloc_id,
453 bug!("StackPopCleanup::MarkStatic on: {:?}", frame.return_place);
456 StackPopCleanup::Goto(target) => self.goto_block(target),
457 StackPopCleanup::None => {}
459 // deallocate all locals that are backed by an allocation
460 for local in frame.locals {
461 self.deallocate_local(local)?;
467 pub fn deallocate_local(&mut self, local: Option<Value>) -> EvalResult<'tcx> {
468 if let Some(Value::ByRef(ptr, _align)) = local {
469 trace!("deallocating local");
470 let ptr = ptr.to_ptr()?;
471 self.memory.dump_alloc(ptr.alloc_id);
472 self.memory.deallocate_local(ptr)?;
477 /// Evaluate an assignment statement.
479 /// There is no separate `eval_rvalue` function. Instead, the code for handling each rvalue
480 /// type writes its results directly into the memory specified by the place.
481 pub(super) fn eval_rvalue_into_place(
483 rvalue: &mir::Rvalue<'tcx>,
484 place: &mir::Place<'tcx>,
485 ) -> EvalResult<'tcx> {
486 let dest = self.eval_place(place)?;
487 let dest_ty = self.place_ty(place);
489 use rustc::mir::Rvalue::*;
491 Use(ref operand) => {
492 let value = self.eval_operand(operand)?.value;
497 self.write_value(valty, dest)?;
500 BinaryOp(bin_op, ref left, ref right) => {
501 let left = self.eval_operand(left)?;
502 let right = self.eval_operand(right)?;
503 if self.intrinsic_overflowing(
511 // There was an overflow in an unchecked binop. Right now, we consider this an error and bail out.
512 // The rationale is that the reason rustc emits unchecked binops in release mode (vs. the checked binops
513 // it emits in debug mode) is performance, but it doesn't cost us any performance in miri.
514 // If, however, the compiler ever starts transforming unchecked intrinsics into unchecked binops,
515 // we have to go back to just ignoring the overflow here.
516 return err!(OverflowingMath);
520 CheckedBinaryOp(bin_op, ref left, ref right) => {
521 let left = self.eval_operand(left)?;
522 let right = self.eval_operand(right)?;
523 self.intrinsic_with_overflow(
532 UnaryOp(un_op, ref operand) => {
533 let val = self.eval_operand_to_primval(operand)?;
534 let val = self.unary_op(un_op, val, dest_ty)?;
542 Aggregate(ref kind, ref operands) => {
543 self.inc_step_counter_and_check_limit(operands.len());
545 let (dest, active_field_index) = match **kind {
546 mir::AggregateKind::Adt(adt_def, variant_index, _, active_field_index) => {
547 self.write_discriminant_value(dest_ty, dest, variant_index)?;
548 if adt_def.is_enum() {
549 (self.place_downcast(dest, variant_index)?, active_field_index)
551 (dest, active_field_index)
557 let layout = self.layout_of(dest_ty)?;
558 for (i, operand) in operands.iter().enumerate() {
559 let value = self.eval_operand(operand)?;
560 // Ignore zero-sized fields.
561 if !self.layout_of(value.ty)?.is_zst() {
562 let field_index = active_field_index.unwrap_or(i);
563 let (field_dest, _) = self.place_field(dest, mir::Field::new(field_index), layout)?;
564 self.write_value(value, field_dest)?;
569 Repeat(ref operand, _) => {
570 let (elem_ty, length) = match dest_ty.sty {
571 ty::TyArray(elem_ty, n) => (elem_ty, n.val.unwrap_u64()),
574 "tried to assign array-repeat to non-array type {:?}",
579 let elem_size = self.layout_of(elem_ty)?.size.bytes();
580 let value = self.eval_operand(operand)?.value;
582 let (dest, dest_align) = self.force_allocation(dest)?.to_ptr_align();
584 // FIXME: speed up repeat filling
586 let elem_dest = dest.offset(i * elem_size, &self)?;
587 self.write_value_to_ptr(value, elem_dest, dest_align, elem_ty)?;
592 // FIXME(CTFE): don't allow computing the length of arrays in const eval
593 let src = self.eval_place(place)?;
594 let ty = self.place_ty(place);
595 let (_, len) = src.elem_ty_and_len(ty);
598 PrimVal::from_u128(len as u128),
603 Ref(_, _, ref place) => {
604 let src = self.eval_place(place)?;
605 // We ignore the alignment of the place here -- special handling for packed structs ends
606 // at the `&` operator.
607 let (ptr, _align, extra) = self.force_allocation(src)?.to_ptr_align_extra();
609 let val = match extra {
610 PlaceExtra::None => ptr.to_value(),
611 PlaceExtra::Length(len) => ptr.to_value_with_len(len),
612 PlaceExtra::Vtable(vtable) => ptr.to_value_with_vtable(vtable),
613 PlaceExtra::DowncastVariant(..) => {
614 bug!("attempted to take a reference to an enum downcast place")
621 self.write_value(valty, dest)?;
624 NullaryOp(mir::NullOp::Box, ty) => {
625 let ty = self.monomorphize(ty, self.substs());
626 M::box_alloc(self, ty, dest)?;
629 NullaryOp(mir::NullOp::SizeOf, ty) => {
630 let ty = self.monomorphize(ty, self.substs());
631 let layout = self.layout_of(ty)?;
632 assert!(!layout.is_unsized(),
633 "SizeOf nullary MIR operator called for unsized type");
636 PrimVal::from_u128(layout.size.bytes() as u128),
641 Cast(kind, ref operand, cast_ty) => {
642 debug_assert_eq!(self.monomorphize(cast_ty, self.substs()), dest_ty);
643 use rustc::mir::CastKind::*;
646 let src = self.eval_operand(operand)?;
647 let src_layout = self.layout_of(src.ty)?;
648 let dst_layout = self.layout_of(dest_ty)?;
649 self.unsize_into(src.value, src_layout, dest, dst_layout)?;
653 let src = self.eval_operand(operand)?;
654 if self.type_is_fat_ptr(src.ty) {
655 match (src.value, self.type_is_fat_ptr(dest_ty)) {
656 (Value::ByRef { .. }, _) |
657 (Value::ByValPair(..), true) => {
662 self.write_value(valty, dest)?;
664 (Value::ByValPair(data, _), false) => {
666 value: Value::ByVal(data),
669 self.write_value(valty, dest)?;
671 (Value::ByVal(_), _) => bug!("expected fat ptr"),
674 let src_layout = self.layout_of(src.ty)?;
675 match src_layout.variants {
676 layout::Variants::Single { index } => {
677 if let Some(def) = src.ty.ty_adt_def() {
679 .discriminant_for_variant(*self.tcx, index)
681 return self.write_primval(
683 PrimVal::Bytes(discr_val),
687 layout::Variants::Tagged { .. } |
688 layout::Variants::NicheFilling { .. } => {},
691 let src_val = self.value_to_primval(src)?;
692 let dest_val = self.cast_primval(src_val, src.ty, dest_ty)?;
694 value: Value::ByVal(dest_val),
697 self.write_value(valty, dest)?;
702 match self.eval_operand(operand)?.ty.sty {
703 ty::TyFnDef(def_id, substs) => {
704 if self.tcx.has_attr(def_id, "rustc_args_required_const") {
705 bug!("reifying a fn ptr that requires \
708 let instance: EvalResult<'tcx, _> = ty::Instance::resolve(
713 ).ok_or_else(|| EvalErrorKind::TypeckError.into());
714 let fn_ptr = self.memory.create_fn_alloc(instance?);
716 value: Value::ByVal(PrimVal::Ptr(fn_ptr)),
719 self.write_value(valty, dest)?;
721 ref other => bug!("reify fn pointer on {:?}", other),
728 let mut src = self.eval_operand(operand)?;
730 self.write_value(src, dest)?;
732 ref other => bug!("fn to unsafe fn cast on {:?}", other),
736 ClosureFnPointer => {
737 match self.eval_operand(operand)?.ty.sty {
738 ty::TyClosure(def_id, substs) => {
739 let substs = self.tcx.subst_and_normalize_erasing_regions(
741 ty::ParamEnv::reveal_all(),
744 let instance = ty::Instance::resolve_closure(
748 ty::ClosureKind::FnOnce,
750 let fn_ptr = self.memory.create_fn_alloc(instance);
752 value: Value::ByVal(PrimVal::Ptr(fn_ptr)),
755 self.write_value(valty, dest)?;
757 ref other => bug!("closure fn pointer on {:?}", other),
763 Discriminant(ref place) => {
764 let ty = self.place_ty(place);
765 let place = self.eval_place(place)?;
766 let discr_val = self.read_discriminant_value(place, ty)?;
767 self.write_primval(dest, PrimVal::Bytes(discr_val), dest_ty)?;
771 self.dump_local(dest);
776 pub(super) fn type_is_fat_ptr(&self, ty: Ty<'tcx>) -> bool {
778 ty::TyRawPtr(ref tam) |
779 ty::TyRef(_, ref tam) => !self.type_is_sized(tam.ty),
780 ty::TyAdt(def, _) if def.is_box() => !self.type_is_sized(ty.boxed_ty()),
785 pub(super) fn eval_operand_to_primval(
787 op: &mir::Operand<'tcx>,
788 ) -> EvalResult<'tcx, PrimVal> {
789 let valty = self.eval_operand(op)?;
790 self.value_to_primval(valty)
793 pub(crate) fn operands_to_args(
795 ops: &[mir::Operand<'tcx>],
796 ) -> EvalResult<'tcx, Vec<ValTy<'tcx>>> {
798 .map(|op| self.eval_operand(op))
802 pub fn eval_operand(&mut self, op: &mir::Operand<'tcx>) -> EvalResult<'tcx, ValTy<'tcx>> {
803 use rustc::mir::Operand::*;
804 let ty = self.monomorphize(op.ty(self.mir(), *self.tcx), self.substs());
806 // FIXME: do some more logic on `move` to invalidate the old location
810 value: self.eval_and_read_place(place)?,
815 Constant(ref constant) => {
816 use rustc::mir::Literal;
817 let mir::Constant { ref literal, .. } = **constant;
818 let value = match *literal {
819 Literal::Value { ref value } => self.const_to_value(&value.val, ty)?,
821 Literal::Promoted { index } => {
822 self.read_global_as_value(GlobalId {
823 instance: self.frame().instance,
824 promoted: Some(index),
837 /// reads a tag and produces the corresponding variant index
838 pub fn read_discriminant_as_variant_index(
842 ) -> EvalResult<'tcx, usize> {
843 let layout = self.layout_of(ty)?;
844 match layout.variants {
845 ty::layout::Variants::Single { index } => Ok(index),
846 ty::layout::Variants::Tagged { .. } => {
847 let discr_val = self.read_discriminant_value(place, ty)?;
850 .expect("tagged layout for non adt")
851 .discriminants(self.tcx.tcx)
852 .position(|var| var.val == discr_val)
853 .ok_or_else(|| EvalErrorKind::InvalidDiscriminant.into())
855 ty::layout::Variants::NicheFilling { .. } => {
856 let discr_val = self.read_discriminant_value(place, ty)?;
857 assert_eq!(discr_val as usize as u128, discr_val);
858 Ok(discr_val as usize)
863 pub fn read_discriminant_value(
867 ) -> EvalResult<'tcx, u128> {
868 let layout = self.layout_of(ty)?;
869 trace!("read_discriminant_value {:#?}", layout);
870 if layout.abi == layout::Abi::Uninhabited {
874 match layout.variants {
875 layout::Variants::Single { index } => {
876 let discr_val = ty.ty_adt_def().map_or(
878 |def| def.discriminant_for_variant(*self.tcx, index).val);
879 return Ok(discr_val);
881 layout::Variants::Tagged { .. } |
882 layout::Variants::NicheFilling { .. } => {},
885 let (discr_place, discr) = self.place_field(place, mir::Field::new(0), layout)?;
886 trace!("discr place: {:?}, {:?}", discr_place, discr);
887 let raw_discr = self.value_to_primval(ValTy {
888 value: self.read_place(discr_place)?,
891 let discr_val = match layout.variants {
892 layout::Variants::Single { .. } => bug!(),
893 // FIXME: should we catch invalid discriminants here?
894 layout::Variants::Tagged { .. } => {
895 if discr.ty.is_signed() {
896 let i = raw_discr.to_bytes()? as i128;
897 // going from layout tag type to typeck discriminant type
898 // requires first sign extending with the layout discriminant
899 let amt = 128 - discr.size.bits();
900 let sexted = (i << amt) >> amt;
901 // and then zeroing with the typeck discriminant type
903 .ty_adt_def().expect("tagged layout corresponds to adt")
906 let discr_ty = layout::Integer::from_attr(self.tcx.tcx, discr_ty);
907 let amt = 128 - discr_ty.size().bits();
908 let truncatee = sexted as u128;
909 (truncatee << amt) >> amt
911 raw_discr.to_bytes()?
914 layout::Variants::NicheFilling {
920 let variants_start = *niche_variants.start() as u128;
921 let variants_end = *niche_variants.end() as u128;
924 assert!(niche_start == 0);
925 assert!(variants_start == variants_end);
926 dataful_variant as u128
928 PrimVal::Bytes(raw_discr) => {
929 let discr = raw_discr.wrapping_sub(niche_start)
930 .wrapping_add(variants_start);
931 if variants_start <= discr && discr <= variants_end {
934 dataful_variant as u128
937 PrimVal::Undef => return err!(ReadUndefBytes),
946 pub fn write_discriminant_value(
950 variant_index: usize,
951 ) -> EvalResult<'tcx> {
952 let layout = self.layout_of(dest_ty)?;
954 match layout.variants {
955 layout::Variants::Single { index } => {
956 if index != variant_index {
957 // If the layout of an enum is `Single`, all
958 // other variants are necessarily uninhabited.
959 assert_eq!(layout.for_variant(&self, variant_index).abi,
960 layout::Abi::Uninhabited);
963 layout::Variants::Tagged { ref discr, .. } => {
964 let discr_val = dest_ty.ty_adt_def().unwrap()
965 .discriminant_for_variant(*self.tcx, variant_index)
968 // raw discriminants for enums are isize or bigger during
969 // their computation, but the in-memory tag is the smallest possible
971 let size = discr.value.size(self.tcx.tcx).bits();
972 let amt = 128 - size;
973 let discr_val = (discr_val << amt) >> amt;
975 let (discr_dest, discr) = self.place_field(dest, mir::Field::new(0), layout)?;
976 self.write_primval(discr_dest, PrimVal::Bytes(discr_val), discr.ty)?;
978 layout::Variants::NicheFilling {
984 if variant_index != dataful_variant {
985 let (niche_dest, niche) =
986 self.place_field(dest, mir::Field::new(0), layout)?;
987 let niche_value = ((variant_index - niche_variants.start()) as u128)
988 .wrapping_add(niche_start);
989 self.write_primval(niche_dest, PrimVal::Bytes(niche_value), niche.ty)?;
997 pub fn read_global_as_value(&self, gid: GlobalId<'tcx>, ty: Ty<'tcx>) -> EvalResult<'tcx, Value> {
998 if self.tcx.is_static(gid.instance.def_id()).is_some() {
1002 .cache_static(gid.instance.def_id());
1003 let layout = self.layout_of(ty)?;
1004 let ptr = MemoryPointer::new(alloc_id, 0);
1005 return Ok(Value::ByRef(ptr.into(), layout.align))
1007 let cv = self.const_eval(gid)?;
1008 self.const_to_value(&cv.val, ty)
1011 pub fn const_eval(&self, gid: GlobalId<'tcx>) -> EvalResult<'tcx, &'tcx ty::Const<'tcx>> {
1012 let param_env = if self.tcx.is_static(gid.instance.def_id()).is_some() {
1013 ty::ParamEnv::reveal_all()
1017 self.tcx.const_eval(param_env.and(gid)).map_err(|err| match *err.kind {
1018 ErrKind::Miri(ref err, _) => match err.kind {
1019 EvalErrorKind::TypeckError |
1020 EvalErrorKind::Layout(_) => EvalErrorKind::TypeckError.into(),
1021 _ => EvalErrorKind::ReferencedConstant.into(),
1023 ErrKind::TypeckError => EvalErrorKind::TypeckError.into(),
1024 ref other => bug!("const eval returned {:?}", other),
1028 pub fn force_allocation(&mut self, place: Place) -> EvalResult<'tcx, Place> {
1029 let new_place = match place {
1030 Place::Local { frame, local } => {
1031 match self.stack[frame].locals[local] {
1032 None => return err!(DeadLocal),
1033 Some(Value::ByRef(ptr, align)) => {
1037 extra: PlaceExtra::None,
1041 let ty = self.stack[frame].mir.local_decls[local].ty;
1042 let ty = self.monomorphize(ty, self.stack[frame].instance.substs);
1043 let layout = self.layout_of(ty)?;
1044 let ptr = self.alloc_ptr(ty)?;
1045 self.stack[frame].locals[local] =
1046 Some(Value::ByRef(ptr.into(), layout.align)); // it stays live
1047 let place = Place::from_ptr(ptr, layout.align);
1048 self.write_value(ValTy { value: val, ty }, place)?;
1053 Place::Ptr { .. } => place,
1058 /// ensures this Value is not a ByRef
1059 pub fn follow_by_ref_value(
1063 ) -> EvalResult<'tcx, Value> {
1065 Value::ByRef(ptr, align) => {
1066 self.read_value(ptr, align, ty)
1072 pub fn value_to_primval(
1074 ValTy { value, ty } : ValTy<'tcx>,
1075 ) -> EvalResult<'tcx, PrimVal> {
1076 match self.follow_by_ref_value(value, ty)? {
1077 Value::ByRef { .. } => bug!("follow_by_ref_value can't result in `ByRef`"),
1079 Value::ByVal(primval) => {
1080 // TODO: Do we really want insta-UB here?
1081 self.ensure_valid_value(primval, ty)?;
1085 Value::ByValPair(..) => bug!("value_to_primval can't work with fat pointers"),
1089 pub fn write_ptr(&mut self, dest: Place, val: Pointer, dest_ty: Ty<'tcx>) -> EvalResult<'tcx> {
1091 value: val.to_value(),
1094 self.write_value(valty, dest)
1097 pub fn write_primval(
1102 ) -> EvalResult<'tcx> {
1104 value: Value::ByVal(val),
1107 self.write_value(valty, dest)
1112 ValTy { value: src_val, ty: dest_ty } : ValTy<'tcx>,
1114 ) -> EvalResult<'tcx> {
1115 //trace!("Writing {:?} to {:?} at type {:?}", src_val, dest, dest_ty);
1116 // Note that it is really important that the type here is the right one, and matches the type things are read at.
1117 // In case `src_val` is a `ByValPair`, we don't do any magic here to handle padding properly, which is only
1118 // correct if we never look at this data with the wrong type.
1121 Place::Ptr { ptr, align, extra } => {
1122 assert_eq!(extra, PlaceExtra::None);
1123 self.write_value_to_ptr(src_val, ptr, align, dest_ty)
1126 Place::Local { frame, local } => {
1127 let dest = self.stack[frame].get_local(local)?;
1128 self.write_value_possibly_by_val(
1130 |this, val| this.stack[frame].set_local(local, val),
1138 // The cases here can be a bit subtle. Read carefully!
1139 fn write_value_possibly_by_val<F: FnOnce(&mut Self, Value) -> EvalResult<'tcx>>(
1143 old_dest_val: Value,
1145 ) -> EvalResult<'tcx> {
1146 if let Value::ByRef(dest_ptr, align) = old_dest_val {
1147 // If the value is already `ByRef` (that is, backed by an `Allocation`),
1148 // then we must write the new value into this allocation, because there may be
1149 // other pointers into the allocation. These other pointers are logically
1150 // pointers into the local variable, and must be able to observe the change.
1152 // Thus, it would be an error to replace the `ByRef` with a `ByVal`, unless we
1153 // knew for certain that there were no outstanding pointers to this allocation.
1154 self.write_value_to_ptr(src_val, dest_ptr, align, dest_ty)?;
1155 } else if let Value::ByRef(src_ptr, align) = src_val {
1156 // If the value is not `ByRef`, then we know there are no pointers to it
1157 // and we can simply overwrite the `Value` in the locals array directly.
1159 // In this specific case, where the source value is `ByRef`, we must duplicate
1160 // the allocation, because this is a by-value operation. It would be incorrect
1161 // if they referred to the same allocation, since then a change to one would
1162 // implicitly change the other.
1164 // It is a valid optimization to attempt reading a primitive value out of the
1165 // source and write that into the destination without making an allocation, so
1167 if let Ok(Some(src_val)) = self.try_read_value(src_ptr, align, dest_ty) {
1168 write_dest(self, src_val)?;
1170 let dest_ptr = self.alloc_ptr(dest_ty)?.into();
1171 let layout = self.layout_of(dest_ty)?;
1172 self.memory.copy(src_ptr, align.min(layout.align), dest_ptr, layout.align, layout.size.bytes(), false)?;
1173 write_dest(self, Value::ByRef(dest_ptr, layout.align))?;
1176 // Finally, we have the simple case where neither source nor destination are
1177 // `ByRef`. We may simply copy the source value over the the destintion.
1178 write_dest(self, src_val)?;
1183 pub fn write_value_to_ptr(
1189 ) -> EvalResult<'tcx> {
1190 let layout = self.layout_of(dest_ty)?;
1191 trace!("write_value_to_ptr: {:#?}, {}, {:#?}", value, dest_ty, layout);
1193 Value::ByRef(ptr, align) => {
1194 self.memory.copy(ptr, align.min(layout.align), dest, dest_align.min(layout.align), layout.size.bytes(), false)
1196 Value::ByVal(primval) => {
1197 let signed = match layout.abi {
1198 layout::Abi::Scalar(ref scal) => match scal.value {
1199 layout::Primitive::Int(_, signed) => signed,
1202 _ if primval.is_undef() => false,
1203 _ => bug!("write_value_to_ptr: invalid ByVal layout: {:#?}", layout)
1205 self.memory.write_primval(dest, dest_align, primval, layout.size.bytes(), signed)
1207 Value::ByValPair(a_val, b_val) => {
1208 trace!("write_value_to_ptr valpair: {:#?}", layout);
1209 let (a, b) = match layout.abi {
1210 layout::Abi::ScalarPair(ref a, ref b) => (&a.value, &b.value),
1211 _ => bug!("write_value_to_ptr: invalid ByValPair layout: {:#?}", layout)
1213 let (a_size, b_size) = (a.size(&self), b.size(&self));
1215 let b_offset = a_size.abi_align(b.align(&self));
1216 let b_ptr = dest.offset(b_offset.bytes(), &self)?.into();
1217 // TODO: What about signedess?
1218 self.memory.write_primval(a_ptr, dest_align, a_val, a_size.bytes(), false)?;
1219 self.memory.write_primval(b_ptr, dest_align, b_val, b_size.bytes(), false)
1224 pub fn ty_to_primval_kind(&self, ty: Ty<'tcx>) -> EvalResult<'tcx, PrimValKind> {
1225 use syntax::ast::FloatTy;
1227 let kind = match ty.sty {
1228 ty::TyBool => PrimValKind::Bool,
1229 ty::TyChar => PrimValKind::Char,
1231 ty::TyInt(int_ty) => {
1232 use syntax::ast::IntTy::*;
1233 let size = match int_ty {
1239 Isize => self.memory.pointer_size(),
1241 PrimValKind::from_int_size(size)
1244 ty::TyUint(uint_ty) => {
1245 use syntax::ast::UintTy::*;
1246 let size = match uint_ty {
1252 Usize => self.memory.pointer_size(),
1254 PrimValKind::from_uint_size(size)
1257 ty::TyFloat(FloatTy::F32) => PrimValKind::F32,
1258 ty::TyFloat(FloatTy::F64) => PrimValKind::F64,
1260 ty::TyFnPtr(_) => PrimValKind::FnPtr,
1262 ty::TyRef(_, ref tam) |
1263 ty::TyRawPtr(ref tam) if self.type_is_sized(tam.ty) => PrimValKind::Ptr,
1265 ty::TyAdt(def, _) if def.is_box() => PrimValKind::Ptr,
1268 match self.layout_of(ty)?.abi {
1269 layout::Abi::Scalar(ref scalar) => {
1270 use rustc::ty::layout::Primitive::*;
1271 match scalar.value {
1272 Int(i, false) => PrimValKind::from_uint_size(i.size().bytes()),
1273 Int(i, true) => PrimValKind::from_int_size(i.size().bytes()),
1274 F32 => PrimValKind::F32,
1275 F64 => PrimValKind::F64,
1276 Pointer => PrimValKind::Ptr,
1280 _ => return err!(TypeNotPrimitive(ty)),
1284 _ => return err!(TypeNotPrimitive(ty)),
1290 fn ensure_valid_value(&self, val: PrimVal, ty: Ty<'tcx>) -> EvalResult<'tcx> {
1292 ty::TyBool if val.to_bytes()? > 1 => err!(InvalidBool),
1294 ty::TyChar if ::std::char::from_u32(val.to_bytes()? as u32).is_none() => {
1295 err!(InvalidChar(val.to_bytes()? as u32 as u128))
1302 pub fn read_value(&self, ptr: Pointer, align: Align, ty: Ty<'tcx>) -> EvalResult<'tcx, Value> {
1303 if let Some(val) = self.try_read_value(ptr, align, ty)? {
1306 bug!("primitive read failed for type: {:?}", ty);
1310 pub(crate) fn read_ptr(
1314 pointee_ty: Ty<'tcx>,
1315 ) -> EvalResult<'tcx, Value> {
1316 let ptr_size = self.memory.pointer_size();
1317 let p: Pointer = self.memory.read_ptr_sized(ptr, ptr_align)?.into();
1318 if self.type_is_sized(pointee_ty) {
1321 trace!("reading fat pointer extra of type {}", pointee_ty);
1322 let extra = ptr.offset(ptr_size, self)?;
1323 match self.tcx.struct_tail(pointee_ty).sty {
1324 ty::TyDynamic(..) => Ok(p.to_value_with_vtable(
1325 self.memory.read_ptr_sized(extra, ptr_align)?.to_ptr()?,
1327 ty::TySlice(..) | ty::TyStr => {
1330 .read_ptr_sized(extra, ptr_align)?
1332 Ok(p.to_value_with_len(len as u64))
1334 _ => bug!("unsized primval ptr read from {:?}", pointee_ty),
1339 pub fn try_read_value(&self, ptr: Pointer, ptr_align: Align, ty: Ty<'tcx>) -> EvalResult<'tcx, Option<Value>> {
1340 use syntax::ast::FloatTy;
1342 let layout = self.layout_of(ty)?;
1343 // do the strongest layout check of the two
1344 let align = layout.align.max(ptr_align);
1345 self.memory.check_align(ptr, align)?;
1347 if layout.size.bytes() == 0 {
1348 return Ok(Some(Value::ByVal(PrimVal::Undef)));
1351 let ptr = ptr.to_ptr()?;
1352 let val = match ty.sty {
1354 let val = self.memory.read_primval(ptr, ptr_align, 1)?;
1355 let val = match val {
1356 PrimVal::Bytes(0) => false,
1357 PrimVal::Bytes(1) => true,
1358 // TODO: This seems a little overeager, should reading at bool type already be insta-UB?
1359 _ => return err!(InvalidBool),
1361 PrimVal::from_bool(val)
1364 let c = self.memory.read_primval(ptr, ptr_align, 4)?.to_bytes()? as u32;
1365 match ::std::char::from_u32(c) {
1366 Some(ch) => PrimVal::from_char(ch),
1367 None => return err!(InvalidChar(c as u128)),
1371 ty::TyInt(int_ty) => {
1372 use syntax::ast::IntTy::*;
1373 let size = match int_ty {
1379 Isize => self.memory.pointer_size(),
1381 self.memory.read_primval(ptr, ptr_align, size)?
1384 ty::TyUint(uint_ty) => {
1385 use syntax::ast::UintTy::*;
1386 let size = match uint_ty {
1392 Usize => self.memory.pointer_size(),
1394 self.memory.read_primval(ptr, ptr_align, size)?
1397 ty::TyFloat(FloatTy::F32) => {
1398 PrimVal::Bytes(self.memory.read_primval(ptr, ptr_align, 4)?.to_bytes()?)
1400 ty::TyFloat(FloatTy::F64) => {
1401 PrimVal::Bytes(self.memory.read_primval(ptr, ptr_align, 8)?.to_bytes()?)
1404 ty::TyFnPtr(_) => self.memory.read_ptr_sized(ptr, ptr_align)?,
1405 ty::TyRef(_, ref tam) |
1406 ty::TyRawPtr(ref tam) => return self.read_ptr(ptr, ptr_align, tam.ty).map(Some),
1408 ty::TyAdt(def, _) => {
1410 return self.read_ptr(ptr, ptr_align, ty.boxed_ty()).map(Some);
1413 if let layout::Abi::Scalar(ref scalar) = self.layout_of(ty)?.abi {
1414 let size = scalar.value.size(self).bytes();
1415 self.memory.read_primval(ptr, ptr_align, size)?
1421 _ => return Ok(None),
1424 Ok(Some(Value::ByVal(val)))
1427 pub fn frame(&self) -> &Frame<'mir, 'tcx> {
1428 self.stack.last().expect("no call frames exist")
1431 pub fn frame_mut(&mut self) -> &mut Frame<'mir, 'tcx> {
1432 self.stack.last_mut().expect("no call frames exist")
1435 pub(super) fn mir(&self) -> &'mir mir::Mir<'tcx> {
1439 pub fn substs(&self) -> &'tcx Substs<'tcx> {
1440 if let Some(frame) = self.stack.last() {
1441 frame.instance.substs
1455 ) -> EvalResult<'tcx> {
1456 // A<Struct> -> A<Trait> conversion
1457 let (src_pointee_ty, dest_pointee_ty) = self.tcx.struct_lockstep_tails(sty, dty);
1459 match (&src_pointee_ty.sty, &dest_pointee_ty.sty) {
1460 (&ty::TyArray(_, length), &ty::TySlice(_)) => {
1461 let ptr = self.into_ptr(src)?;
1462 // u64 cast is from usize to u64, which is always good
1464 value: ptr.to_value_with_len(length.val.unwrap_u64() ),
1467 self.write_value(valty, dest)
1469 (&ty::TyDynamic(..), &ty::TyDynamic(..)) => {
1470 // For now, upcasts are limited to changes in marker
1471 // traits, and hence never actually require an actual
1472 // change to the vtable.
1477 self.write_value(valty, dest)
1479 (_, &ty::TyDynamic(ref data, _)) => {
1480 let trait_ref = data.principal().unwrap().with_self_ty(
1484 let trait_ref = self.tcx.erase_regions(&trait_ref);
1485 let vtable = self.get_vtable(src_pointee_ty, trait_ref)?;
1486 let ptr = self.into_ptr(src)?;
1488 value: ptr.to_value_with_vtable(vtable),
1491 self.write_value(valty, dest)
1494 _ => bug!("invalid unsizing {:?} -> {:?}", src_ty, dest_ty),
1501 src_layout: TyLayout<'tcx>,
1503 dst_layout: TyLayout<'tcx>,
1504 ) -> EvalResult<'tcx> {
1505 match (&src_layout.ty.sty, &dst_layout.ty.sty) {
1506 (&ty::TyRef(_, ref s), &ty::TyRef(_, ref d)) |
1507 (&ty::TyRef(_, ref s), &ty::TyRawPtr(ref d)) |
1508 (&ty::TyRawPtr(ref s), &ty::TyRawPtr(ref d)) => {
1509 self.unsize_into_ptr(src, src_layout.ty, dst, dst_layout.ty, s.ty, d.ty)
1511 (&ty::TyAdt(def_a, _), &ty::TyAdt(def_b, _)) => {
1512 assert_eq!(def_a, def_b);
1513 if def_a.is_box() || def_b.is_box() {
1514 if !def_a.is_box() || !def_b.is_box() {
1515 bug!("invalid unsizing between {:?} -> {:?}", src_layout, dst_layout);
1517 return self.unsize_into_ptr(
1522 src_layout.ty.boxed_ty(),
1523 dst_layout.ty.boxed_ty(),
1527 // unsizing of generic struct with pointer fields
1528 // Example: `Arc<T>` -> `Arc<Trait>`
1529 // here we need to increase the size of every &T thin ptr field to a fat ptr
1530 for i in 0..src_layout.fields.count() {
1531 let (dst_f_place, dst_field) =
1532 self.place_field(dst, mir::Field::new(i), dst_layout)?;
1533 if dst_field.is_zst() {
1536 let (src_f_value, src_field) = match src {
1537 Value::ByRef(ptr, align) => {
1538 let src_place = Place::from_primval_ptr(ptr, align);
1539 let (src_f_place, src_field) =
1540 self.place_field(src_place, mir::Field::new(i), src_layout)?;
1541 (self.read_place(src_f_place)?, src_field)
1543 Value::ByVal(_) | Value::ByValPair(..) => {
1544 let src_field = src_layout.field(&self, i)?;
1545 assert_eq!(src_layout.fields.offset(i).bytes(), 0);
1546 assert_eq!(src_field.size, src_layout.size);
1550 if src_field.ty == dst_field.ty {
1551 self.write_value(ValTy {
1556 self.unsize_into(src_f_value, src_field, dst_f_place, dst_field)?;
1563 "unsize_into: invalid conversion: {:?} -> {:?}",
1571 pub fn dump_local(&self, place: Place) {
1573 if !log_enabled!(::log::Level::Trace) {
1577 Place::Local { frame, local } => {
1578 let mut allocs = Vec::new();
1579 let mut msg = format!("{:?}", local);
1580 if frame != self.cur_frame() {
1581 write!(msg, " ({} frames up)", self.cur_frame() - frame).unwrap();
1583 write!(msg, ":").unwrap();
1585 match self.stack[frame].get_local(local) {
1587 if let EvalErrorKind::DeadLocal = err.kind {
1588 write!(msg, " is dead").unwrap();
1590 panic!("Failed to access local: {:?}", err);
1593 Ok(Value::ByRef(ptr, align)) => {
1594 match ptr.into_inner_primval() {
1595 PrimVal::Ptr(ptr) => {
1596 write!(msg, " by align({}) ref:", align.abi()).unwrap();
1597 allocs.push(ptr.alloc_id);
1599 ptr => write!(msg, " integral by ref: {:?}", ptr).unwrap(),
1602 Ok(Value::ByVal(val)) => {
1603 write!(msg, " {:?}", val).unwrap();
1604 if let PrimVal::Ptr(ptr) = val {
1605 allocs.push(ptr.alloc_id);
1608 Ok(Value::ByValPair(val1, val2)) => {
1609 write!(msg, " ({:?}, {:?})", val1, val2).unwrap();
1610 if let PrimVal::Ptr(ptr) = val1 {
1611 allocs.push(ptr.alloc_id);
1613 if let PrimVal::Ptr(ptr) = val2 {
1614 allocs.push(ptr.alloc_id);
1620 self.memory.dump_allocs(allocs);
1622 Place::Ptr { ptr, align, .. } => {
1623 match ptr.into_inner_primval() {
1624 PrimVal::Ptr(ptr) => {
1625 trace!("by align({}) ref:", align.abi());
1626 self.memory.dump_alloc(ptr.alloc_id);
1628 ptr => trace!(" integral by ref: {:?}", ptr),
1634 /// Convenience function to ensure correct usage of locals
1635 pub fn modify_local<F>(&mut self, frame: usize, local: mir::Local, f: F) -> EvalResult<'tcx>
1637 F: FnOnce(&mut Self, Value) -> EvalResult<'tcx, Value>,
1639 let val = self.stack[frame].get_local(local)?;
1640 let new_val = f(self, val)?;
1641 self.stack[frame].set_local(local, new_val)?;
1642 // FIXME(solson): Run this when setting to Undef? (See previous version of this code.)
1643 // if let Value::ByRef(ptr) = self.stack[frame].get_local(local) {
1644 // self.memory.deallocate(ptr)?;
1649 pub fn generate_stacktrace(&self, explicit_span: Option<Span>) -> (Vec<FrameInfo>, Span) {
1650 let mut last_span = None;
1651 let mut frames = Vec::new();
1652 // skip 1 because the last frame is just the environment of the constant
1653 for &Frame { instance, span, .. } in self.stack().iter().skip(1).rev() {
1654 // make sure we don't emit frames that are duplicates of the previous
1655 if explicit_span == Some(span) {
1656 last_span = Some(span);
1659 if let Some(last) = last_span {
1664 last_span = Some(span);
1666 let location = if self.tcx.def_key(instance.def_id()).disambiguated_data.data == DefPathData::ClosureExpr {
1667 "closure".to_owned()
1669 instance.to_string()
1671 frames.push(FrameInfo { span, location });
1673 trace!("generate stacktrace: {:#?}, {:?}", frames, explicit_span);
1674 (frames, self.tcx.span)
1677 pub fn report(&self, e: &mut EvalError, as_err: bool, explicit_span: Option<Span>) {
1679 EvalErrorKind::Layout(_) |
1680 EvalErrorKind::TypeckError => return,
1683 if let Some(ref mut backtrace) = e.backtrace {
1684 let mut trace_text = "\n\nAn error occurred in miri:\n".to_string();
1685 backtrace.resolve();
1686 write!(trace_text, "backtrace frames: {}\n", backtrace.frames().len()).unwrap();
1687 'frames: for (i, frame) in backtrace.frames().iter().enumerate() {
1688 if frame.symbols().is_empty() {
1689 write!(trace_text, "{}: no symbols\n", i).unwrap();
1691 for symbol in frame.symbols() {
1692 write!(trace_text, "{}: ", i).unwrap();
1693 if let Some(name) = symbol.name() {
1694 write!(trace_text, "{}\n", name).unwrap();
1696 write!(trace_text, "<unknown>\n").unwrap();
1698 write!(trace_text, "\tat ").unwrap();
1699 if let Some(file_path) = symbol.filename() {
1700 write!(trace_text, "{}", file_path.display()).unwrap();
1702 write!(trace_text, "<unknown_file>").unwrap();
1704 if let Some(line) = symbol.lineno() {
1705 write!(trace_text, ":{}\n", line).unwrap();
1707 write!(trace_text, "\n").unwrap();
1711 error!("{}", trace_text);
1713 if let Some(frame) = self.stack().last() {
1714 let block = &frame.mir.basic_blocks()[frame.block];
1715 let span = explicit_span.unwrap_or_else(|| if frame.stmt < block.statements.len() {
1716 block.statements[frame.stmt].source_info.span
1718 block.terminator().source_info.span
1720 trace!("reporting const eval failure at {:?}", span);
1721 let mut err = if as_err {
1722 ::rustc::middle::const_val::struct_error(*self.tcx, span, "constant evaluation error")
1728 .filter_map(|frame| self.tcx.hir.as_local_node_id(frame.instance.def_id()))
1730 .expect("some part of a failing const eval must be local");
1731 self.tcx.struct_span_lint_node(
1732 ::rustc::lint::builtin::CONST_ERR,
1735 "constant evaluation error",
1738 let (frames, span) = self.generate_stacktrace(explicit_span);
1739 err.span_label(span, e.to_string());
1740 for FrameInfo { span, location } in frames {
1741 err.span_note(span, &format!("inside call to `{}`", location));
1745 self.tcx.sess.err(&e.to_string());
1749 pub fn sign_extend(&self, value: u128, ty: Ty<'tcx>) -> EvalResult<'tcx, u128> {
1750 super::sign_extend(self.tcx.tcx, value, ty)
1753 pub fn truncate(&self, value: u128, ty: Ty<'tcx>) -> EvalResult<'tcx, u128> {
1754 super::truncate(self.tcx.tcx, value, ty)
1758 impl<'mir, 'tcx> Frame<'mir, 'tcx> {
1759 pub fn get_local(&self, local: mir::Local) -> EvalResult<'tcx, Value> {
1760 self.locals[local].ok_or_else(|| EvalErrorKind::DeadLocal.into())
1763 fn set_local(&mut self, local: mir::Local, value: Value) -> EvalResult<'tcx> {
1764 match self.locals[local] {
1765 None => err!(DeadLocal),
1766 Some(ref mut local) => {
1773 pub fn storage_live(&mut self, local: mir::Local) -> Option<Value> {
1774 trace!("{:?} is now live", local);
1776 // StorageLive *always* kills the value that's currently stored
1777 mem::replace(&mut self.locals[local], Some(Value::ByVal(PrimVal::Undef)))
1780 /// Returns the old value of the local
1781 pub fn storage_dead(&mut self, local: mir::Local) -> Option<Value> {
1782 trace!("{:?} is now dead", local);
1784 self.locals[local].take()