1 use std::collections::HashSet;
4 use rustc::hir::def_id::DefId;
5 use rustc::hir::map::definitions::DefPathData;
6 use rustc::middle::const_val::{ConstVal, ErrKind};
8 use rustc::traits::Reveal;
9 use rustc::ty::layout::{self, Size, Align, HasDataLayout, LayoutOf, TyLayout};
10 use rustc::ty::subst::{Subst, Substs};
11 use rustc::ty::{self, Ty, TyCtxt};
12 use rustc_data_structures::indexed_vec::Idx;
13 use syntax::codemap::{self, DUMMY_SP, Span};
14 use syntax::ast::Mutability;
15 use rustc::mir::interpret::{
16 GlobalId, Value, Pointer, PrimVal, PrimValKind,
17 EvalError, EvalResult, EvalErrorKind, MemoryPointer,
20 use super::{Place, PlaceExtra, Memory,
21 HasMemory, MemoryKind, operator,
24 pub struct EvalContext<'a, 'mir, 'tcx: 'a + 'mir, M: Machine<'mir, 'tcx>> {
25 /// Stores the `Machine` instance.
28 /// The results of the type checker, from rustc.
29 pub tcx: TyCtxt<'a, 'tcx, 'tcx>,
31 /// Bounds in scope for polymorphic evaluations.
32 pub param_env: ty::ParamEnv<'tcx>,
34 /// The virtual memory system.
35 pub memory: Memory<'a, 'mir, 'tcx, M>,
37 /// The virtual call stack.
38 pub(crate) stack: Vec<Frame<'mir, 'tcx>>,
40 /// The maximum number of stack frames allowed
41 pub(crate) stack_limit: usize,
43 /// The maximum number of operations that may be executed.
44 /// This prevents infinite loops and huge computations from freezing up const eval.
45 /// Remove once halting problem is solved.
46 pub(crate) steps_remaining: usize,
50 pub struct Frame<'mir, 'tcx: 'mir> {
51 ////////////////////////////////////////////////////////////////////////////////
52 // Function and callsite information
53 ////////////////////////////////////////////////////////////////////////////////
54 /// The MIR for the function called on this frame.
55 pub mir: &'mir mir::Mir<'tcx>,
57 /// The def_id and substs of the current function
58 pub instance: ty::Instance<'tcx>,
60 /// The span of the call site.
61 pub span: codemap::Span,
63 ////////////////////////////////////////////////////////////////////////////////
64 // Return place and locals
65 ////////////////////////////////////////////////////////////////////////////////
66 /// The block to return to when returning from the current stack frame
67 pub return_to_block: StackPopCleanup,
69 /// The location where the result of the current stack frame should be written to.
70 pub return_place: Place,
72 /// The list of locals for this stack frame, stored in order as
73 /// `[arguments..., variables..., temporaries...]`. The locals are stored as `Option<Value>`s.
74 /// `None` represents a local that is currently dead, while a live local
75 /// can either directly contain `PrimVal` or refer to some part of an `Allocation`.
77 /// Before being initialized, arguments are `Value::ByVal(PrimVal::Undef)` and other locals are `None`.
78 pub locals: Vec<Option<Value>>,
80 ////////////////////////////////////////////////////////////////////////////////
81 // Current position within the function
82 ////////////////////////////////////////////////////////////////////////////////
83 /// The block that is currently executed (or will be executed after the above call stacks
85 pub block: mir::BasicBlock,
87 /// The index of the currently evaluated statement.
91 #[derive(Clone, Debug, Eq, PartialEq, Hash)]
92 pub enum StackPopCleanup {
93 /// The stackframe existed to compute the initial value of a static/constant, make sure it
94 /// isn't modifyable afterwards in case of constants.
95 /// In case of `static mut`, mark the memory to ensure it's never marked as immutable through
96 /// references or deallocated
97 MarkStatic(Mutability),
98 /// A regular stackframe added due to a function call will need to get forwarded to the next
100 Goto(mir::BasicBlock),
101 /// The main function and diverging functions have nowhere to return to
105 #[derive(Copy, Clone, Debug)]
106 pub struct TyAndPacked<'tcx> {
111 #[derive(Copy, Clone, Debug)]
112 pub struct ValTy<'tcx> {
117 impl<'tcx> ValTy<'tcx> {
118 pub fn from(val: &ty::Const<'tcx>) -> Option<Self> {
120 ConstVal::Value(value) => Some(ValTy { value, ty: val.ty }),
121 ConstVal::Unevaluated { .. } => None,
126 impl<'tcx> ::std::ops::Deref for ValTy<'tcx> {
128 fn deref(&self) -> &Value {
133 impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> HasDataLayout for &'a EvalContext<'a, 'mir, 'tcx, M> {
135 fn data_layout(&self) -> &layout::TargetDataLayout {
136 &self.tcx.data_layout
140 impl<'c, 'b, 'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> HasDataLayout
141 for &'c &'b mut EvalContext<'a, 'mir, 'tcx, M> {
143 fn data_layout(&self) -> &layout::TargetDataLayout {
144 &self.tcx.data_layout
148 impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> layout::HasTyCtxt<'tcx> for &'a EvalContext<'a, 'mir, 'tcx, M> {
150 fn tcx<'b>(&'b self) -> TyCtxt<'b, 'tcx, 'tcx> {
155 impl<'c, 'b, 'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> layout::HasTyCtxt<'tcx>
156 for &'c &'b mut EvalContext<'a, 'mir, 'tcx, M> {
158 fn tcx<'d>(&'d self) -> TyCtxt<'d, 'tcx, 'tcx> {
163 impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> LayoutOf<Ty<'tcx>> for &'a EvalContext<'a, 'mir, 'tcx, M> {
164 type TyLayout = EvalResult<'tcx, TyLayout<'tcx>>;
166 fn layout_of(self, ty: Ty<'tcx>) -> Self::TyLayout {
167 self.tcx.layout_of(self.param_env.and(ty))
168 .map_err(|layout| EvalErrorKind::Layout(layout).into())
172 impl<'c, 'b, 'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> LayoutOf<Ty<'tcx>>
173 for &'c &'b mut EvalContext<'a, 'mir, 'tcx, M> {
174 type TyLayout = EvalResult<'tcx, TyLayout<'tcx>>;
177 fn layout_of(self, ty: Ty<'tcx>) -> Self::TyLayout {
178 (&**self).layout_of(ty)
182 impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
184 tcx: TyCtxt<'a, 'tcx, 'tcx>,
185 param_env: ty::ParamEnv<'tcx>,
187 memory_data: M::MemoryData,
193 memory: Memory::new(tcx, memory_data),
195 stack_limit: tcx.sess.const_eval_stack_frame_limit.get(),
196 steps_remaining: tcx.sess.const_eval_step_limit.get(),
200 pub fn alloc_ptr(&mut self, ty: Ty<'tcx>) -> EvalResult<'tcx, MemoryPointer> {
201 let layout = self.layout_of(ty)?;
202 assert!(!layout.is_unsized(), "cannot alloc memory for unsized type");
204 let size = layout.size.bytes();
205 self.memory.allocate(size, layout.align, Some(MemoryKind::Stack))
208 pub fn memory(&self) -> &Memory<'a, 'mir, 'tcx, M> {
212 pub fn memory_mut(&mut self) -> &mut Memory<'a, 'mir, 'tcx, M> {
216 pub fn stack(&self) -> &[Frame<'mir, 'tcx>] {
221 pub fn cur_frame(&self) -> usize {
222 assert!(self.stack.len() > 0);
226 pub fn str_to_value(&mut self, s: &str) -> EvalResult<'tcx, Value> {
227 let ptr = self.memory.allocate_cached(s.as_bytes());
230 PrimVal::from_u128(s.len() as u128),
234 pub(super) fn const_to_value(&self, const_val: &ConstVal<'tcx>, ty: Ty<'tcx>) -> EvalResult<'tcx, Value> {
236 ConstVal::Unevaluated(def_id, substs) => {
237 let instance = self.resolve(def_id, substs)?;
238 self.read_global_as_value(GlobalId {
243 ConstVal::Value(val) => Ok(val),
247 pub(super) fn resolve(&self, def_id: DefId, substs: &'tcx Substs<'tcx>) -> EvalResult<'tcx, ty::Instance<'tcx>> {
248 let substs = self.tcx.trans_apply_param_substs(self.substs(), &substs);
249 ty::Instance::resolve(
254 ).ok_or(EvalErrorKind::TypeckError.into()) // turn error prop into a panic to expose associated type in const issue
257 pub(super) fn type_is_sized(&self, ty: Ty<'tcx>) -> bool {
258 ty.is_sized(self.tcx.at(DUMMY_SP), self.param_env)
263 instance: ty::InstanceDef<'tcx>,
264 ) -> EvalResult<'tcx, &'tcx mir::Mir<'tcx>> {
265 // do not continue if typeck errors occurred (can only occur in local crate)
266 let did = instance.def_id();
267 if did.is_local() && self.tcx.has_typeck_tables(did) && self.tcx.typeck_tables_of(did).tainted_by_errors {
268 return err!(TypeckError);
270 trace!("load mir {:?}", instance);
272 ty::InstanceDef::Item(def_id) => {
273 self.tcx.maybe_optimized_mir(def_id).ok_or_else(|| {
274 EvalErrorKind::NoMirFor(self.tcx.item_path_str(def_id)).into()
277 _ => Ok(self.tcx.instance_mir(instance)),
281 pub fn monomorphize(&self, ty: Ty<'tcx>, substs: &'tcx Substs<'tcx>) -> Ty<'tcx> {
282 // miri doesn't care about lifetimes, and will choke on some crazy ones
283 // let's simply get rid of them
284 let without_lifetimes = self.tcx.erase_regions(&ty);
285 let substituted = without_lifetimes.subst(self.tcx, substs);
286 let substituted = self.tcx.fully_normalize_monormophic_ty(&substituted);
290 /// Return the size and aligment of the value at the given type.
291 /// Note that the value does not matter if the type is sized. For unsized types,
292 /// the value has to be a fat pointer, and we only care about the "extra" data in it.
293 pub fn size_and_align_of_dst(
297 ) -> EvalResult<'tcx, (Size, Align)> {
298 let layout = self.layout_of(ty)?;
299 if !layout.is_unsized() {
300 Ok(layout.size_and_align())
303 ty::TyAdt(..) | ty::TyTuple(..) => {
304 // First get the size of all statically known fields.
305 // Don't use type_of::sizing_type_of because that expects t to be sized,
306 // and it also rounds up to alignment, which we want to avoid,
307 // as the unsized field's alignment could be smaller.
308 assert!(!ty.is_simd());
309 debug!("DST {} layout: {:?}", ty, layout);
311 let sized_size = layout.fields.offset(layout.fields.count() - 1);
312 let sized_align = layout.align;
314 "DST {} statically sized prefix size: {:?} align: {:?}",
320 // Recurse to get the size of the dynamically sized field (must be
322 let field_ty = layout.field(&self, layout.fields.count() - 1)?.ty;
323 let (unsized_size, unsized_align) =
324 self.size_and_align_of_dst(field_ty, value)?;
326 // FIXME (#26403, #27023): We should be adding padding
327 // to `sized_size` (to accommodate the `unsized_align`
328 // required of the unsized field that follows) before
329 // summing it with `sized_size`. (Note that since #26403
330 // is unfixed, we do not yet add the necessary padding
331 // here. But this is where the add would go.)
333 // Return the sum of sizes and max of aligns.
334 let size = sized_size + unsized_size;
336 // Choose max of two known alignments (combined value must
337 // be aligned according to more restrictive of the two).
338 let align = sized_align.max(unsized_align);
340 // Issue #27023: must add any necessary padding to `size`
341 // (to make it a multiple of `align`) before returning it.
343 // Namely, the returned size should be, in C notation:
345 // `size + ((size & (align-1)) ? align : 0)`
347 // emulated via the semi-standard fast bit trick:
349 // `(size + (align-1)) & -align`
351 Ok((size.abi_align(align), align))
353 ty::TyDynamic(..) => {
354 let (_, vtable) = self.into_ptr_vtable_pair(value)?;
355 // the second entry in the vtable is the dynamic size of the object.
356 self.read_size_and_align_from_vtable(vtable)
359 ty::TySlice(_) | ty::TyStr => {
360 let (elem_size, align) = layout.field(&self, 0)?.size_and_align();
361 let (_, len) = self.into_slice(value)?;
362 Ok((elem_size * len, align))
365 _ => bug!("size_of_val::<{:?}>", ty),
370 pub fn push_stack_frame(
372 instance: ty::Instance<'tcx>,
374 mir: &'mir mir::Mir<'tcx>,
376 return_to_block: StackPopCleanup,
377 ) -> EvalResult<'tcx> {
378 ::log_settings::settings().indentation += 1;
380 /// Return the set of locals that have a storage annotation anywhere
381 fn collect_storage_annotations<'mir, 'tcx>(mir: &'mir mir::Mir<'tcx>) -> HashSet<mir::Local> {
382 use rustc::mir::StatementKind::*;
384 let mut set = HashSet::new();
385 for block in mir.basic_blocks() {
386 for stmt in block.statements.iter() {
389 StorageDead(local) => {
399 // Subtract 1 because `local_decls` includes the ReturnMemoryPointer, but we don't store a local
401 let num_locals = mir.local_decls.len() - 1;
404 let annotated_locals = collect_storage_annotations(mir);
405 let mut locals = vec![None; num_locals];
406 for i in 0..num_locals {
407 let local = mir::Local::new(i + 1);
408 if !annotated_locals.contains(&local) {
409 locals[i] = Some(Value::ByVal(PrimVal::Undef));
415 self.stack.push(Frame {
417 block: mir::START_BLOCK,
426 self.memory.cur_frame = self.cur_frame();
428 if self.stack.len() > self.stack_limit {
429 err!(StackFrameLimitReached)
435 pub(super) fn pop_stack_frame(&mut self) -> EvalResult<'tcx> {
436 ::log_settings::settings().indentation -= 1;
437 M::end_region(self, None)?;
438 let frame = self.stack.pop().expect(
439 "tried to pop a stack frame, but there were none",
441 if !self.stack.is_empty() {
442 // TODO: Is this the correct time to start considering these accesses as originating from the returned-to stack frame?
443 self.memory.cur_frame = self.cur_frame();
445 match frame.return_to_block {
446 StackPopCleanup::MarkStatic(mutable) => {
447 if let Place::Ptr { ptr, .. } = frame.return_place {
448 // FIXME: to_ptr()? might be too extreme here, static zsts might reach this under certain conditions
449 self.memory.mark_static_initialized(
450 ptr.to_ptr()?.alloc_id,
454 bug!("StackPopCleanup::MarkStatic on: {:?}", frame.return_place);
457 StackPopCleanup::Goto(target) => self.goto_block(target),
458 StackPopCleanup::None => {}
460 // deallocate all locals that are backed by an allocation
461 for local in frame.locals {
462 self.deallocate_local(local)?;
468 pub fn deallocate_local(&mut self, local: Option<Value>) -> EvalResult<'tcx> {
469 if let Some(Value::ByRef(ptr, _align)) = local {
470 trace!("deallocating local");
471 let ptr = ptr.to_ptr()?;
472 self.memory.dump_alloc(ptr.alloc_id);
473 self.memory.deallocate_local(ptr)?;
478 /// Evaluate an assignment statement.
480 /// There is no separate `eval_rvalue` function. Instead, the code for handling each rvalue
481 /// type writes its results directly into the memory specified by the place.
482 pub(super) fn eval_rvalue_into_place(
484 rvalue: &mir::Rvalue<'tcx>,
485 place: &mir::Place<'tcx>,
486 ) -> EvalResult<'tcx> {
487 let dest = self.eval_place(place)?;
488 let dest_ty = self.place_ty(place);
490 use rustc::mir::Rvalue::*;
492 Use(ref operand) => {
493 let value = self.eval_operand(operand)?.value;
498 self.write_value(valty, dest)?;
501 BinaryOp(bin_op, ref left, ref right) => {
502 let left = self.eval_operand(left)?;
503 let right = self.eval_operand(right)?;
504 if self.intrinsic_overflowing(
512 // There was an overflow in an unchecked binop. Right now, we consider this an error and bail out.
513 // The rationale is that the reason rustc emits unchecked binops in release mode (vs. the checked binops
514 // it emits in debug mode) is performance, but it doesn't cost us any performance in miri.
515 // If, however, the compiler ever starts transforming unchecked intrinsics into unchecked binops,
516 // we have to go back to just ignoring the overflow here.
517 return err!(OverflowingMath);
521 CheckedBinaryOp(bin_op, ref left, ref right) => {
522 let left = self.eval_operand(left)?;
523 let right = self.eval_operand(right)?;
524 self.intrinsic_with_overflow(
533 UnaryOp(un_op, ref operand) => {
534 let val = self.eval_operand_to_primval(operand)?;
535 let kind = self.ty_to_primval_kind(dest_ty)?;
538 operator::unary_op(un_op, val, kind)?,
543 Aggregate(ref kind, ref operands) => {
544 self.inc_step_counter_and_check_limit(operands.len())?;
546 let (dest, active_field_index) = match **kind {
547 mir::AggregateKind::Adt(adt_def, variant_index, _, active_field_index) => {
548 self.write_discriminant_value(dest_ty, dest, variant_index)?;
549 if adt_def.is_enum() {
550 (self.place_downcast(dest, variant_index)?, active_field_index)
552 (dest, active_field_index)
558 let layout = self.layout_of(dest_ty)?;
559 for (i, operand) in operands.iter().enumerate() {
560 let value = self.eval_operand(operand)?;
561 // Ignore zero-sized fields.
562 if !self.layout_of(value.ty)?.is_zst() {
563 let field_index = active_field_index.unwrap_or(i);
564 let (field_dest, _) = self.place_field(dest, mir::Field::new(field_index), layout)?;
565 self.write_value(value, field_dest)?;
570 Repeat(ref operand, _) => {
571 let (elem_ty, length) = match dest_ty.sty {
572 ty::TyArray(elem_ty, n) => (elem_ty, n.val.unwrap_u64()),
575 "tried to assign array-repeat to non-array type {:?}",
580 let elem_size = self.layout_of(elem_ty)?.size.bytes();
581 let value = self.eval_operand(operand)?.value;
583 let (dest, dest_align) = self.force_allocation(dest)?.to_ptr_align();
585 // FIXME: speed up repeat filling
587 let elem_dest = dest.offset(i * elem_size, &self)?;
588 self.write_value_to_ptr(value, elem_dest, dest_align, elem_ty)?;
593 // FIXME(CTFE): don't allow computing the length of arrays in const eval
594 let src = self.eval_place(place)?;
595 let ty = self.place_ty(place);
596 let (_, len) = src.elem_ty_and_len(ty);
599 PrimVal::from_u128(len as u128),
604 Ref(_, _, ref place) => {
605 let src = self.eval_place(place)?;
606 // We ignore the alignment of the place here -- special handling for packed structs ends
607 // at the `&` operator.
608 let (ptr, _align, extra) = self.force_allocation(src)?.to_ptr_align_extra();
610 let val = match extra {
611 PlaceExtra::None => ptr.to_value(),
612 PlaceExtra::Length(len) => ptr.to_value_with_len(len),
613 PlaceExtra::Vtable(vtable) => ptr.to_value_with_vtable(vtable),
614 PlaceExtra::DowncastVariant(..) => {
615 bug!("attempted to take a reference to an enum downcast place")
622 self.write_value(valty, dest)?;
625 NullaryOp(mir::NullOp::Box, ty) => {
626 let ty = self.monomorphize(ty, self.substs());
627 M::box_alloc(self, ty, dest)?;
630 NullaryOp(mir::NullOp::SizeOf, ty) => {
631 let ty = self.monomorphize(ty, self.substs());
632 let layout = self.layout_of(ty)?;
633 assert!(!layout.is_unsized(),
634 "SizeOf nullary MIR operator called for unsized type");
637 PrimVal::from_u128(layout.size.bytes() as u128),
642 Cast(kind, ref operand, cast_ty) => {
643 debug_assert_eq!(self.monomorphize(cast_ty, self.substs()), dest_ty);
644 use rustc::mir::CastKind::*;
647 let src = self.eval_operand(operand)?;
648 let src_layout = self.layout_of(src.ty)?;
649 let dst_layout = self.layout_of(dest_ty)?;
650 self.unsize_into(src.value, src_layout, dest, dst_layout)?;
654 let src = self.eval_operand(operand)?;
655 if self.type_is_fat_ptr(src.ty) {
656 match (src.value, self.type_is_fat_ptr(dest_ty)) {
657 (Value::ByRef { .. }, _) |
658 (Value::ByValPair(..), true) => {
663 self.write_value(valty, dest)?;
665 (Value::ByValPair(data, _), false) => {
667 value: Value::ByVal(data),
670 self.write_value(valty, dest)?;
672 (Value::ByVal(_), _) => bug!("expected fat ptr"),
675 let src_val = self.value_to_primval(src)?;
676 let dest_val = self.cast_primval(src_val, src.ty, dest_ty)?;
678 value: Value::ByVal(dest_val),
681 self.write_value(valty, dest)?;
686 match self.eval_operand(operand)?.ty.sty {
687 ty::TyFnDef(def_id, substs) => {
688 if self.tcx.has_attr(def_id, "rustc_args_required_const") {
689 bug!("reifying a fn ptr that requires \
692 let instance = self.resolve(def_id, substs)?;
693 let fn_ptr = self.memory.create_fn_alloc(instance);
695 value: Value::ByVal(PrimVal::Ptr(fn_ptr)),
698 self.write_value(valty, dest)?;
700 ref other => bug!("reify fn pointer on {:?}", other),
707 let mut src = self.eval_operand(operand)?;
709 self.write_value(src, dest)?;
711 ref other => bug!("fn to unsafe fn cast on {:?}", other),
715 ClosureFnPointer => {
716 match self.eval_operand(operand)?.ty.sty {
717 ty::TyClosure(def_id, substs) => {
718 let substs = self.tcx.trans_apply_param_substs(self.substs(), &substs);
719 let instance = ty::Instance::resolve_closure(
723 ty::ClosureKind::FnOnce,
725 let fn_ptr = self.memory.create_fn_alloc(instance);
727 value: Value::ByVal(PrimVal::Ptr(fn_ptr)),
730 self.write_value(valty, dest)?;
732 ref other => bug!("closure fn pointer on {:?}", other),
738 Discriminant(ref place) => {
739 let ty = self.place_ty(place);
740 let place = self.eval_place(place)?;
741 let discr_val = self.read_discriminant_value(place, ty)?;
742 if let ty::TyAdt(adt_def, _) = ty.sty {
743 trace!("Read discriminant {}, valid discriminants {:?}", discr_val, adt_def.discriminants(self.tcx).collect::<Vec<_>>());
744 if adt_def.discriminants(self.tcx).all(|v| {
748 return err!(InvalidDiscriminant);
750 self.write_primval(dest, PrimVal::Bytes(discr_val), dest_ty)?;
752 bug!("rustc only generates Rvalue::Discriminant for enums");
757 if log_enabled!(::log::Level::Trace) {
758 self.dump_local(dest);
764 pub(super) fn type_is_fat_ptr(&self, ty: Ty<'tcx>) -> bool {
766 ty::TyRawPtr(ref tam) |
767 ty::TyRef(_, ref tam) => !self.type_is_sized(tam.ty),
768 ty::TyAdt(def, _) if def.is_box() => !self.type_is_sized(ty.boxed_ty()),
773 pub(super) fn eval_operand_to_primval(
775 op: &mir::Operand<'tcx>,
776 ) -> EvalResult<'tcx, PrimVal> {
777 let valty = self.eval_operand(op)?;
778 self.value_to_primval(valty)
781 pub(crate) fn operands_to_args(
783 ops: &[mir::Operand<'tcx>],
784 ) -> EvalResult<'tcx, Vec<ValTy<'tcx>>> {
786 .map(|op| self.eval_operand(op))
790 pub fn eval_operand(&mut self, op: &mir::Operand<'tcx>) -> EvalResult<'tcx, ValTy<'tcx>> {
791 use rustc::mir::Operand::*;
792 let ty = self.monomorphize(op.ty(self.mir(), self.tcx), self.substs());
794 // FIXME: do some more logic on `move` to invalidate the old location
798 value: self.eval_and_read_place(place)?,
803 Constant(ref constant) => {
804 use rustc::mir::Literal;
805 let mir::Constant { ref literal, .. } = **constant;
806 let value = match *literal {
807 Literal::Value { ref value } => self.const_to_value(&value.val, ty)?,
809 Literal::Promoted { index } => {
810 self.read_global_as_value(GlobalId {
811 instance: self.frame().instance,
812 promoted: Some(index),
825 pub fn read_discriminant_value(
829 ) -> EvalResult<'tcx, u128> {
830 let layout = self.layout_of(ty)?;
831 //trace!("read_discriminant_value {:#?}", layout);
833 match layout.variants {
834 layout::Variants::Single { index } => {
835 return Ok(index as u128);
837 layout::Variants::Tagged { .. } |
838 layout::Variants::NicheFilling { .. } => {},
841 let (discr_place, discr) = self.place_field(place, mir::Field::new(0), layout)?;
842 let raw_discr = self.value_to_primval(ValTy {
843 value: self.read_place(discr_place)?,
846 let discr_val = match layout.variants {
847 layout::Variants::Single { .. } => bug!(),
848 layout::Variants::Tagged { .. } => raw_discr.to_bytes()?,
849 layout::Variants::NicheFilling {
855 let variants_start = niche_variants.start as u128;
856 let variants_end = niche_variants.end as u128;
859 assert!(niche_start == 0);
860 assert!(variants_start == variants_end);
861 dataful_variant as u128
863 PrimVal::Bytes(raw_discr) => {
864 let discr = raw_discr.wrapping_sub(niche_start)
865 .wrapping_add(variants_start);
866 if variants_start <= discr && discr <= variants_end {
869 dataful_variant as u128
872 PrimVal::Undef => return err!(ReadUndefBytes),
881 pub(crate) fn write_discriminant_value(
885 variant_index: usize,
886 ) -> EvalResult<'tcx> {
887 let layout = self.layout_of(dest_ty)?;
889 match layout.variants {
890 layout::Variants::Single { index } => {
891 if index != variant_index {
892 // If the layout of an enum is `Single`, all
893 // other variants are necessarily uninhabited.
894 assert_eq!(layout.for_variant(&self, variant_index).abi,
895 layout::Abi::Uninhabited);
898 layout::Variants::Tagged { .. } => {
899 let discr_val = dest_ty.ty_adt_def().unwrap()
900 .discriminant_for_variant(self.tcx, variant_index)
903 let (discr_dest, discr) = self.place_field(dest, mir::Field::new(0), layout)?;
904 self.write_primval(discr_dest, PrimVal::Bytes(discr_val), discr.ty)?;
906 layout::Variants::NicheFilling {
912 if variant_index != dataful_variant {
913 let (niche_dest, niche) =
914 self.place_field(dest, mir::Field::new(0), layout)?;
915 let niche_value = ((variant_index - niche_variants.start) as u128)
916 .wrapping_add(niche_start);
917 self.write_primval(niche_dest, PrimVal::Bytes(niche_value), niche.ty)?;
925 pub fn read_global_as_value(&self, gid: GlobalId<'tcx>, ty: Ty<'tcx>) -> EvalResult<'tcx, Value> {
926 if gid.promoted.is_none() {
930 .get_cached(gid.instance.def_id());
931 if let Some(alloc_id) = cached {
932 let layout = self.layout_of(ty)?;
933 let ptr = MemoryPointer::new(alloc_id, 0);
934 return Ok(Value::ByRef(ptr.into(), layout.align))
937 let cv = match self.tcx.const_eval(self.param_env.and(gid)) {
939 Err(err) => match err.kind {
940 ErrKind::Miri(miri) => return Err(miri),
941 ErrKind::TypeckError => return err!(TypeckError),
942 other => bug!("const eval returned {:?}", other),
945 self.const_to_value(&cv.val, ty)
948 pub fn force_allocation(&mut self, place: Place) -> EvalResult<'tcx, Place> {
949 let new_place = match place {
950 Place::Local { frame, local } => {
951 // -1 since we don't store the return value
952 match self.stack[frame].locals[local.index() - 1] {
953 None => return err!(DeadLocal),
954 Some(Value::ByRef(ptr, align)) => {
958 extra: PlaceExtra::None,
962 let ty = self.stack[frame].mir.local_decls[local].ty;
963 let ty = self.monomorphize(ty, self.stack[frame].instance.substs);
964 let layout = self.layout_of(ty)?;
965 let ptr = self.alloc_ptr(ty)?;
966 self.stack[frame].locals[local.index() - 1] =
967 Some(Value::ByRef(ptr.into(), layout.align)); // it stays live
968 let place = Place::from_ptr(ptr, layout.align);
969 self.write_value(ValTy { value: val, ty }, place)?;
974 Place::Ptr { .. } => place,
979 /// ensures this Value is not a ByRef
980 pub fn follow_by_ref_value(
984 ) -> EvalResult<'tcx, Value> {
986 Value::ByRef(ptr, align) => {
987 self.read_value(ptr, align, ty)
993 pub fn value_to_primval(
995 ValTy { value, ty } : ValTy<'tcx>,
996 ) -> EvalResult<'tcx, PrimVal> {
997 match self.follow_by_ref_value(value, ty)? {
998 Value::ByRef { .. } => bug!("follow_by_ref_value can't result in `ByRef`"),
1000 Value::ByVal(primval) => {
1001 // TODO: Do we really want insta-UB here?
1002 self.ensure_valid_value(primval, ty)?;
1006 Value::ByValPair(..) => bug!("value_to_primval can't work with fat pointers"),
1010 pub fn write_ptr(&mut self, dest: Place, val: Pointer, dest_ty: Ty<'tcx>) -> EvalResult<'tcx> {
1012 value: val.to_value(),
1015 self.write_value(valty, dest)
1018 pub fn write_primval(
1023 ) -> EvalResult<'tcx> {
1025 value: Value::ByVal(val),
1028 self.write_value(valty, dest)
1033 ValTy { value: src_val, ty: dest_ty } : ValTy<'tcx>,
1035 ) -> EvalResult<'tcx> {
1036 //trace!("Writing {:?} to {:?} at type {:?}", src_val, dest, dest_ty);
1037 // Note that it is really important that the type here is the right one, and matches the type things are read at.
1038 // In case `src_val` is a `ByValPair`, we don't do any magic here to handle padding properly, which is only
1039 // correct if we never look at this data with the wrong type.
1042 Place::Ptr { ptr, align, extra } => {
1043 assert_eq!(extra, PlaceExtra::None);
1044 self.write_value_to_ptr(src_val, ptr, align, dest_ty)
1047 Place::Local { frame, local } => {
1048 let dest = self.stack[frame].get_local(local)?;
1049 self.write_value_possibly_by_val(
1051 |this, val| this.stack[frame].set_local(local, val),
1059 // The cases here can be a bit subtle. Read carefully!
1060 fn write_value_possibly_by_val<F: FnOnce(&mut Self, Value) -> EvalResult<'tcx>>(
1064 old_dest_val: Value,
1066 ) -> EvalResult<'tcx> {
1067 if let Value::ByRef(dest_ptr, align) = old_dest_val {
1068 // If the value is already `ByRef` (that is, backed by an `Allocation`),
1069 // then we must write the new value into this allocation, because there may be
1070 // other pointers into the allocation. These other pointers are logically
1071 // pointers into the local variable, and must be able to observe the change.
1073 // Thus, it would be an error to replace the `ByRef` with a `ByVal`, unless we
1074 // knew for certain that there were no outstanding pointers to this allocation.
1075 self.write_value_to_ptr(src_val, dest_ptr, align, dest_ty)?;
1076 } else if let Value::ByRef(src_ptr, align) = src_val {
1077 // If the value is not `ByRef`, then we know there are no pointers to it
1078 // and we can simply overwrite the `Value` in the locals array directly.
1080 // In this specific case, where the source value is `ByRef`, we must duplicate
1081 // the allocation, because this is a by-value operation. It would be incorrect
1082 // if they referred to the same allocation, since then a change to one would
1083 // implicitly change the other.
1085 // It is a valid optimization to attempt reading a primitive value out of the
1086 // source and write that into the destination without making an allocation, so
1088 if let Ok(Some(src_val)) = self.try_read_value(src_ptr, align, dest_ty) {
1089 write_dest(self, src_val)?;
1091 let dest_ptr = self.alloc_ptr(dest_ty)?.into();
1092 let layout = self.layout_of(dest_ty)?;
1093 self.memory.copy(src_ptr, align.min(layout.align), dest_ptr, layout.align, layout.size.bytes(), false)?;
1094 write_dest(self, Value::ByRef(dest_ptr, layout.align))?;
1097 // Finally, we have the simple case where neither source nor destination are
1098 // `ByRef`. We may simply copy the source value over the the destintion.
1099 write_dest(self, src_val)?;
1104 pub fn write_value_to_ptr(
1110 ) -> EvalResult<'tcx> {
1111 trace!("write_value_to_ptr: {:#?}", value);
1112 let layout = self.layout_of(dest_ty)?;
1114 Value::ByRef(ptr, align) => {
1115 self.memory.copy(ptr, align.min(layout.align), dest, dest_align.min(layout.align), layout.size.bytes(), false)
1117 Value::ByVal(primval) => {
1119 layout::Abi::Scalar(_) => {}
1120 _ if primval.is_undef() => {}
1121 _ => bug!("write_value_to_ptr: invalid ByVal layout: {:#?}", layout)
1123 // TODO: Do we need signedness?
1124 self.memory.write_primval(dest.to_ptr()?, dest_align, primval, layout.size.bytes(), false)
1126 Value::ByValPair(a_val, b_val) => {
1127 let ptr = dest.to_ptr()?;
1128 trace!("write_value_to_ptr valpair: {:#?}", layout);
1129 let (a, b) = match layout.abi {
1130 layout::Abi::ScalarPair(ref a, ref b) => (&a.value, &b.value),
1131 _ => bug!("write_value_to_ptr: invalid ByValPair layout: {:#?}", layout)
1133 let (a_size, b_size) = (a.size(&self), b.size(&self));
1135 let b_offset = a_size.abi_align(b.align(&self));
1136 let b_ptr = ptr.offset(b_offset.bytes(), &self)?.into();
1137 // TODO: What about signedess?
1138 self.memory.write_primval(a_ptr, dest_align, a_val, a_size.bytes(), false)?;
1139 self.memory.write_primval(b_ptr, dest_align, b_val, b_size.bytes(), false)
1144 pub fn ty_to_primval_kind(&self, ty: Ty<'tcx>) -> EvalResult<'tcx, PrimValKind> {
1145 use syntax::ast::FloatTy;
1147 let kind = match ty.sty {
1148 ty::TyBool => PrimValKind::Bool,
1149 ty::TyChar => PrimValKind::Char,
1151 ty::TyInt(int_ty) => {
1152 use syntax::ast::IntTy::*;
1153 let size = match int_ty {
1159 Isize => self.memory.pointer_size(),
1161 PrimValKind::from_int_size(size)
1164 ty::TyUint(uint_ty) => {
1165 use syntax::ast::UintTy::*;
1166 let size = match uint_ty {
1172 Usize => self.memory.pointer_size(),
1174 PrimValKind::from_uint_size(size)
1177 ty::TyFloat(FloatTy::F32) => PrimValKind::F32,
1178 ty::TyFloat(FloatTy::F64) => PrimValKind::F64,
1180 ty::TyFnPtr(_) => PrimValKind::FnPtr,
1182 ty::TyRef(_, ref tam) |
1183 ty::TyRawPtr(ref tam) if self.type_is_sized(tam.ty) => PrimValKind::Ptr,
1185 ty::TyAdt(def, _) if def.is_box() => PrimValKind::Ptr,
1188 match self.layout_of(ty)?.abi {
1189 layout::Abi::Scalar(ref scalar) => {
1190 use rustc::ty::layout::Primitive::*;
1191 match scalar.value {
1192 Int(i, false) => PrimValKind::from_uint_size(i.size().bytes()),
1193 Int(i, true) => PrimValKind::from_int_size(i.size().bytes()),
1194 F32 => PrimValKind::F32,
1195 F64 => PrimValKind::F64,
1196 Pointer => PrimValKind::Ptr,
1200 _ => return err!(TypeNotPrimitive(ty)),
1204 _ => return err!(TypeNotPrimitive(ty)),
1210 fn ensure_valid_value(&self, val: PrimVal, ty: Ty<'tcx>) -> EvalResult<'tcx> {
1212 ty::TyBool if val.to_bytes()? > 1 => err!(InvalidBool),
1214 ty::TyChar if ::std::char::from_u32(val.to_bytes()? as u32).is_none() => {
1215 err!(InvalidChar(val.to_bytes()? as u32 as u128))
1222 pub fn read_value(&self, ptr: Pointer, align: Align, ty: Ty<'tcx>) -> EvalResult<'tcx, Value> {
1223 if let Some(val) = self.try_read_value(ptr, align, ty)? {
1226 bug!("primitive read failed for type: {:?}", ty);
1230 pub(crate) fn read_ptr(
1234 pointee_ty: Ty<'tcx>,
1235 ) -> EvalResult<'tcx, Value> {
1236 let ptr_size = self.memory.pointer_size();
1237 let p: Pointer = self.memory.read_ptr_sized_unsigned(ptr, ptr_align)?.into();
1238 if self.type_is_sized(pointee_ty) {
1241 trace!("reading fat pointer extra of type {}", pointee_ty);
1242 let extra = ptr.offset(ptr_size, self)?;
1243 match self.tcx.struct_tail(pointee_ty).sty {
1244 ty::TyDynamic(..) => Ok(p.to_value_with_vtable(
1245 self.memory.read_ptr_sized_unsigned(extra, ptr_align)?.to_ptr()?,
1247 ty::TySlice(..) | ty::TyStr => {
1250 .read_ptr_sized_unsigned(extra, ptr_align)?
1252 Ok(p.to_value_with_len(len as u64))
1254 _ => bug!("unsized primval ptr read from {:?}", pointee_ty),
1259 pub fn try_read_value(&self, ptr: Pointer, ptr_align: Align, ty: Ty<'tcx>) -> EvalResult<'tcx, Option<Value>> {
1260 use syntax::ast::FloatTy;
1262 let ptr = ptr.to_ptr()?;
1263 let val = match ty.sty {
1265 let val = self.memory.read_primval(ptr, ptr_align, 1, false)?;
1266 let val = match val {
1267 PrimVal::Bytes(0) => false,
1268 PrimVal::Bytes(1) => true,
1269 // TODO: This seems a little overeager, should reading at bool type already be insta-UB?
1270 _ => return err!(InvalidBool),
1272 PrimVal::from_bool(val)
1275 let c = self.memory.read_primval(ptr, ptr_align, 4, false)?.to_bytes()? as u32;
1276 match ::std::char::from_u32(c) {
1277 Some(ch) => PrimVal::from_char(ch),
1278 None => return err!(InvalidChar(c as u128)),
1282 ty::TyInt(int_ty) => {
1283 use syntax::ast::IntTy::*;
1284 let size = match int_ty {
1290 Isize => self.memory.pointer_size(),
1292 self.memory.read_primval(ptr, ptr_align, size, true)?
1295 ty::TyUint(uint_ty) => {
1296 use syntax::ast::UintTy::*;
1297 let size = match uint_ty {
1303 Usize => self.memory.pointer_size(),
1305 self.memory.read_primval(ptr, ptr_align, size, false)?
1308 ty::TyFloat(FloatTy::F32) => {
1309 PrimVal::Bytes(self.memory.read_primval(ptr, ptr_align, 4, false)?.to_bytes()?)
1311 ty::TyFloat(FloatTy::F64) => {
1312 PrimVal::Bytes(self.memory.read_primval(ptr, ptr_align, 8, false)?.to_bytes()?)
1315 ty::TyFnPtr(_) => self.memory.read_ptr_sized_unsigned(ptr, ptr_align)?,
1316 ty::TyRef(_, ref tam) |
1317 ty::TyRawPtr(ref tam) => return self.read_ptr(ptr, ptr_align, tam.ty).map(Some),
1319 ty::TyAdt(def, _) => {
1321 return self.read_ptr(ptr, ptr_align, ty.boxed_ty()).map(Some);
1324 if let layout::Abi::Scalar(ref scalar) = self.layout_of(ty)?.abi {
1325 let mut signed = false;
1326 if let layout::Int(_, s) = scalar.value {
1329 let size = scalar.value.size(self).bytes();
1330 self.memory.read_primval(ptr, ptr_align, size, signed)?
1336 _ => return Ok(None),
1339 Ok(Some(Value::ByVal(val)))
1342 pub fn frame(&self) -> &Frame<'mir, 'tcx> {
1343 self.stack.last().expect("no call frames exist")
1346 pub fn frame_mut(&mut self) -> &mut Frame<'mir, 'tcx> {
1347 self.stack.last_mut().expect("no call frames exist")
1350 pub(super) fn mir(&self) -> &'mir mir::Mir<'tcx> {
1354 pub fn substs(&self) -> &'tcx Substs<'tcx> {
1355 if let Some(frame) = self.stack.last() {
1356 frame.instance.substs
1370 ) -> EvalResult<'tcx> {
1371 // A<Struct> -> A<Trait> conversion
1372 let (src_pointee_ty, dest_pointee_ty) = self.tcx.struct_lockstep_tails(sty, dty);
1374 match (&src_pointee_ty.sty, &dest_pointee_ty.sty) {
1375 (&ty::TyArray(_, length), &ty::TySlice(_)) => {
1376 let ptr = self.into_ptr(src)?;
1377 // u64 cast is from usize to u64, which is always good
1379 value: ptr.to_value_with_len(length.val.unwrap_u64() ),
1382 self.write_value(valty, dest)
1384 (&ty::TyDynamic(..), &ty::TyDynamic(..)) => {
1385 // For now, upcasts are limited to changes in marker
1386 // traits, and hence never actually require an actual
1387 // change to the vtable.
1392 self.write_value(valty, dest)
1394 (_, &ty::TyDynamic(ref data, _)) => {
1395 let trait_ref = data.principal().unwrap().with_self_ty(
1399 let trait_ref = self.tcx.erase_regions(&trait_ref);
1400 let vtable = self.get_vtable(src_pointee_ty, trait_ref)?;
1401 let ptr = self.into_ptr(src)?;
1403 value: ptr.to_value_with_vtable(vtable),
1406 self.write_value(valty, dest)
1409 _ => bug!("invalid unsizing {:?} -> {:?}", src_ty, dest_ty),
1416 src_layout: TyLayout<'tcx>,
1418 dst_layout: TyLayout<'tcx>,
1419 ) -> EvalResult<'tcx> {
1420 match (&src_layout.ty.sty, &dst_layout.ty.sty) {
1421 (&ty::TyRef(_, ref s), &ty::TyRef(_, ref d)) |
1422 (&ty::TyRef(_, ref s), &ty::TyRawPtr(ref d)) |
1423 (&ty::TyRawPtr(ref s), &ty::TyRawPtr(ref d)) => {
1424 self.unsize_into_ptr(src, src_layout.ty, dst, dst_layout.ty, s.ty, d.ty)
1426 (&ty::TyAdt(def_a, _), &ty::TyAdt(def_b, _)) => {
1427 assert_eq!(def_a, def_b);
1428 if def_a.is_box() || def_b.is_box() {
1429 if !def_a.is_box() || !def_b.is_box() {
1430 bug!("invalid unsizing between {:?} -> {:?}", src_layout, dst_layout);
1432 return self.unsize_into_ptr(
1437 src_layout.ty.boxed_ty(),
1438 dst_layout.ty.boxed_ty(),
1442 // unsizing of generic struct with pointer fields
1443 // Example: `Arc<T>` -> `Arc<Trait>`
1444 // here we need to increase the size of every &T thin ptr field to a fat ptr
1445 for i in 0..src_layout.fields.count() {
1446 let (dst_f_place, dst_field) =
1447 self.place_field(dst, mir::Field::new(i), dst_layout)?;
1448 if dst_field.is_zst() {
1451 let (src_f_value, src_field) = match src {
1452 Value::ByRef(ptr, align) => {
1453 let src_place = Place::from_primval_ptr(ptr, align);
1454 let (src_f_place, src_field) =
1455 self.place_field(src_place, mir::Field::new(i), src_layout)?;
1456 (self.read_place(src_f_place)?, src_field)
1458 Value::ByVal(_) | Value::ByValPair(..) => {
1459 let src_field = src_layout.field(&self, i)?;
1460 assert_eq!(src_layout.fields.offset(i).bytes(), 0);
1461 assert_eq!(src_field.size, src_layout.size);
1465 if src_field.ty == dst_field.ty {
1466 self.write_value(ValTy {
1471 self.unsize_into(src_f_value, src_field, dst_f_place, dst_field)?;
1478 "unsize_into: invalid conversion: {:?} -> {:?}",
1486 pub fn dump_local(&self, place: Place) {
1489 Place::Local { frame, local } => {
1490 let mut allocs = Vec::new();
1491 let mut msg = format!("{:?}", local);
1492 if frame != self.cur_frame() {
1493 write!(msg, " ({} frames up)", self.cur_frame() - frame).unwrap();
1495 write!(msg, ":").unwrap();
1497 match self.stack[frame].get_local(local) {
1499 if let EvalErrorKind::DeadLocal = *err.kind {
1500 write!(msg, " is dead").unwrap();
1502 panic!("Failed to access local: {:?}", err);
1505 Ok(Value::ByRef(ptr, align)) => {
1506 match ptr.into_inner_primval() {
1507 PrimVal::Ptr(ptr) => {
1508 write!(msg, " by align({}) ref:", align.abi()).unwrap();
1509 allocs.push(ptr.alloc_id);
1511 ptr => write!(msg, " integral by ref: {:?}", ptr).unwrap(),
1514 Ok(Value::ByVal(val)) => {
1515 write!(msg, " {:?}", val).unwrap();
1516 if let PrimVal::Ptr(ptr) = val {
1517 allocs.push(ptr.alloc_id);
1520 Ok(Value::ByValPair(val1, val2)) => {
1521 write!(msg, " ({:?}, {:?})", val1, val2).unwrap();
1522 if let PrimVal::Ptr(ptr) = val1 {
1523 allocs.push(ptr.alloc_id);
1525 if let PrimVal::Ptr(ptr) = val2 {
1526 allocs.push(ptr.alloc_id);
1532 self.memory.dump_allocs(allocs);
1534 Place::Ptr { ptr, align, .. } => {
1535 match ptr.into_inner_primval() {
1536 PrimVal::Ptr(ptr) => {
1537 trace!("by align({}) ref:", align.abi());
1538 self.memory.dump_alloc(ptr.alloc_id);
1540 ptr => trace!(" integral by ref: {:?}", ptr),
1546 /// Convenience function to ensure correct usage of locals
1547 pub fn modify_local<F>(&mut self, frame: usize, local: mir::Local, f: F) -> EvalResult<'tcx>
1549 F: FnOnce(&mut Self, Value) -> EvalResult<'tcx, Value>,
1551 let val = self.stack[frame].get_local(local)?;
1552 let new_val = f(self, val)?;
1553 self.stack[frame].set_local(local, new_val)?;
1554 // FIXME(solson): Run this when setting to Undef? (See previous version of this code.)
1555 // if let Value::ByRef(ptr) = self.stack[frame].get_local(local) {
1556 // self.memory.deallocate(ptr)?;
1561 pub fn report(&self, e: &mut EvalError, as_err: bool, explicit_span: Option<Span>) {
1562 if let EvalErrorKind::TypeckError = *e.kind {
1565 if let Some(ref mut backtrace) = e.backtrace {
1566 let mut trace_text = "\n\nAn error occurred in miri:\n".to_string();
1567 backtrace.resolve();
1568 write!(trace_text, "backtrace frames: {}\n", backtrace.frames().len()).unwrap();
1569 'frames: for (i, frame) in backtrace.frames().iter().enumerate() {
1570 if frame.symbols().is_empty() {
1571 write!(trace_text, "{}: no symbols\n", i).unwrap();
1573 for symbol in frame.symbols() {
1574 write!(trace_text, "{}: ", i).unwrap();
1575 if let Some(name) = symbol.name() {
1576 write!(trace_text, "{}\n", name).unwrap();
1578 write!(trace_text, "<unknown>\n").unwrap();
1580 write!(trace_text, "\tat ").unwrap();
1581 if let Some(file_path) = symbol.filename() {
1582 write!(trace_text, "{}", file_path.display()).unwrap();
1584 write!(trace_text, "<unknown_file>").unwrap();
1586 if let Some(line) = symbol.lineno() {
1587 write!(trace_text, ":{}\n", line).unwrap();
1589 write!(trace_text, "\n").unwrap();
1593 error!("{}", trace_text);
1595 if let Some(frame) = self.stack().last() {
1596 let block = &frame.mir.basic_blocks()[frame.block];
1597 let span = explicit_span.unwrap_or_else(|| if frame.stmt < block.statements.len() {
1598 block.statements[frame.stmt].source_info.span
1600 block.terminator().source_info.span
1602 trace!("reporting const eval failure at {:?}", span);
1603 let mut err = if as_err {
1604 ::rustc::middle::const_val::struct_error(self.tcx, span, "constant evaluation error")
1610 .filter_map(|frame| self.tcx.hir.as_local_node_id(frame.instance.def_id()))
1612 .expect("some part of a failing const eval must be local");
1613 self.tcx.struct_span_lint_node(
1614 ::rustc::lint::builtin::CONST_ERR,
1617 "constant evaluation error",
1620 err.span_label(span, e.to_string());
1621 let mut last_span = None;
1622 // skip 1 because the last frame is just the environment of the constant
1623 for &Frame { instance, span, .. } in self.stack().iter().skip(1).rev() {
1624 // make sure we don't emit frames that are duplicates of the previous
1625 if explicit_span == Some(span) {
1626 last_span = Some(span);
1629 if let Some(last) = last_span {
1634 last_span = Some(span);
1636 if self.tcx.def_key(instance.def_id()).disambiguated_data.data ==
1637 DefPathData::ClosureExpr
1639 err.span_note(span, "inside call to closure");
1642 err.span_note(span, &format!("inside call to {}", instance));
1646 self.tcx.sess.err(&e.to_string());
1651 impl<'mir, 'tcx> Frame<'mir, 'tcx> {
1652 pub fn get_local(&self, local: mir::Local) -> EvalResult<'tcx, Value> {
1653 // Subtract 1 because we don't store a value for the ReturnPointer, the local with index 0.
1654 self.locals[local.index() - 1].ok_or(EvalErrorKind::DeadLocal.into())
1657 fn set_local(&mut self, local: mir::Local, value: Value) -> EvalResult<'tcx> {
1658 // Subtract 1 because we don't store a value for the ReturnPointer, the local with index 0.
1659 match self.locals[local.index() - 1] {
1660 None => err!(DeadLocal),
1661 Some(ref mut local) => {
1668 pub fn storage_live(&mut self, local: mir::Local) -> EvalResult<'tcx, Option<Value>> {
1669 trace!("{:?} is now live", local);
1671 let old = self.locals[local.index() - 1];
1672 self.locals[local.index() - 1] = Some(Value::ByVal(PrimVal::Undef)); // StorageLive *always* kills the value that's currently stored
1676 /// Returns the old value of the local
1677 pub fn storage_dead(&mut self, local: mir::Local) -> EvalResult<'tcx, Option<Value>> {
1678 trace!("{:?} is now dead", local);
1680 let old = self.locals[local.index() - 1];
1681 self.locals[local.index() - 1] = None;
1686 // TODO(solson): Upstream these methods into rustc::ty::layout.
1688 pub fn resolve_drop_in_place<'a, 'tcx>(
1689 tcx: TyCtxt<'a, 'tcx, 'tcx>,
1691 ) -> ty::Instance<'tcx> {
1692 let def_id = tcx.require_lang_item(::rustc::middle::lang_items::DropInPlaceFnLangItem);
1693 let substs = tcx.intern_substs(&[ty.into()]);
1694 ty::Instance::resolve(tcx, ty::ParamEnv::empty(Reveal::All), def_id, substs).unwrap()