]> git.lizzy.rs Git - rust.git/blob - src/librustc_mir/interpret/eval_context.rs
Removed direct field usage of RangeInclusive in rustc itself.
[rust.git] / src / librustc_mir / interpret / eval_context.rs
1 use std::fmt::Write;
2
3 use rustc::hir::def_id::DefId;
4 use rustc::hir::def::Def;
5 use rustc::hir::map::definitions::DefPathData;
6 use rustc::middle::const_val::{ConstVal, ErrKind};
7 use rustc::mir;
8 use rustc::ty::layout::{self, Size, Align, HasDataLayout, IntegerExt, LayoutOf, TyLayout};
9 use rustc::ty::subst::{Subst, Substs};
10 use rustc::ty::{self, Ty, TyCtxt};
11 use rustc::ty::maps::TyCtxtAt;
12 use rustc_data_structures::indexed_vec::{IndexVec, Idx};
13 use rustc::middle::const_val::FrameInfo;
14 use syntax::codemap::{self, Span};
15 use syntax::ast::Mutability;
16 use rustc::mir::interpret::{
17     GlobalId, Value, Pointer, PrimVal, PrimValKind,
18     EvalError, EvalResult, EvalErrorKind, MemoryPointer,
19 };
20 use std::mem;
21
22 use super::{Place, PlaceExtra, Memory,
23             HasMemory, MemoryKind,
24             Machine};
25
26 pub struct EvalContext<'a, 'mir, 'tcx: 'a + 'mir, M: Machine<'mir, 'tcx>> {
27     /// Stores the `Machine` instance.
28     pub machine: M,
29
30     /// The results of the type checker, from rustc.
31     pub tcx: TyCtxtAt<'a, 'tcx, 'tcx>,
32
33     /// Bounds in scope for polymorphic evaluations.
34     pub param_env: ty::ParamEnv<'tcx>,
35
36     /// The virtual memory system.
37     pub memory: Memory<'a, 'mir, 'tcx, M>,
38
39     /// The virtual call stack.
40     pub(crate) stack: Vec<Frame<'mir, 'tcx>>,
41
42     /// The maximum number of stack frames allowed
43     pub(crate) stack_limit: usize,
44
45     /// The maximum number of terminators that may be evaluated.
46     /// This prevents infinite loops and huge computations from freezing up const eval.
47     /// Remove once halting problem is solved.
48     pub(crate) terminators_remaining: usize,
49 }
50
51 /// A stack frame.
52 pub struct Frame<'mir, 'tcx: 'mir> {
53     ////////////////////////////////////////////////////////////////////////////////
54     // Function and callsite information
55     ////////////////////////////////////////////////////////////////////////////////
56     /// The MIR for the function called on this frame.
57     pub mir: &'mir mir::Mir<'tcx>,
58
59     /// The def_id and substs of the current function
60     pub instance: ty::Instance<'tcx>,
61
62     /// The span of the call site.
63     pub span: codemap::Span,
64
65     ////////////////////////////////////////////////////////////////////////////////
66     // Return place and locals
67     ////////////////////////////////////////////////////////////////////////////////
68     /// The block to return to when returning from the current stack frame
69     pub return_to_block: StackPopCleanup,
70
71     /// The location where the result of the current stack frame should be written to.
72     pub return_place: Place,
73
74     /// The list of locals for this stack frame, stored in order as
75     /// `[return_ptr, arguments..., variables..., temporaries...]`. The locals are stored as `Option<Value>`s.
76     /// `None` represents a local that is currently dead, while a live local
77     /// can either directly contain `PrimVal` or refer to some part of an `Allocation`.
78     ///
79     /// Before being initialized, arguments are `Value::ByVal(PrimVal::Undef)` and other locals are `None`.
80     pub locals: IndexVec<mir::Local, Option<Value>>,
81
82     ////////////////////////////////////////////////////////////////////////////////
83     // Current position within the function
84     ////////////////////////////////////////////////////////////////////////////////
85     /// The block that is currently executed (or will be executed after the above call stacks
86     /// return).
87     pub block: mir::BasicBlock,
88
89     /// The index of the currently evaluated statement.
90     pub stmt: usize,
91 }
92
93 #[derive(Clone, Debug, Eq, PartialEq, Hash)]
94 pub enum StackPopCleanup {
95     /// The stackframe existed to compute the initial value of a static/constant, make sure it
96     /// isn't modifyable afterwards in case of constants.
97     /// In case of `static mut`, mark the memory to ensure it's never marked as immutable through
98     /// references or deallocated
99     MarkStatic(Mutability),
100     /// A regular stackframe added due to a function call will need to get forwarded to the next
101     /// block
102     Goto(mir::BasicBlock),
103     /// The main function and diverging functions have nowhere to return to
104     None,
105 }
106
107 #[derive(Copy, Clone, Debug)]
108 pub struct TyAndPacked<'tcx> {
109     pub ty: Ty<'tcx>,
110     pub packed: bool,
111 }
112
113 #[derive(Copy, Clone, Debug)]
114 pub struct ValTy<'tcx> {
115     pub value: Value,
116     pub ty: Ty<'tcx>,
117 }
118
119 impl<'tcx> ValTy<'tcx> {
120     pub fn from(val: &ty::Const<'tcx>) -> Option<Self> {
121         match val.val {
122             ConstVal::Value(value) => Some(ValTy { value, ty: val.ty }),
123             ConstVal::Unevaluated { .. } => None,
124         }
125     }
126 }
127
128 impl<'tcx> ::std::ops::Deref for ValTy<'tcx> {
129     type Target = Value;
130     fn deref(&self) -> &Value {
131         &self.value
132     }
133 }
134
135 impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> HasDataLayout for &'a EvalContext<'a, 'mir, 'tcx, M> {
136     #[inline]
137     fn data_layout(&self) -> &layout::TargetDataLayout {
138         &self.tcx.data_layout
139     }
140 }
141
142 impl<'c, 'b, 'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> HasDataLayout
143     for &'c &'b mut EvalContext<'a, 'mir, 'tcx, M> {
144     #[inline]
145     fn data_layout(&self) -> &layout::TargetDataLayout {
146         &self.tcx.data_layout
147     }
148 }
149
150 impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> layout::HasTyCtxt<'tcx> for &'a EvalContext<'a, 'mir, 'tcx, M> {
151     #[inline]
152     fn tcx<'b>(&'b self) -> TyCtxt<'b, 'tcx, 'tcx> {
153         *self.tcx
154     }
155 }
156
157 impl<'c, 'b, 'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> layout::HasTyCtxt<'tcx>
158     for &'c &'b mut EvalContext<'a, 'mir, 'tcx, M> {
159     #[inline]
160     fn tcx<'d>(&'d self) -> TyCtxt<'d, 'tcx, 'tcx> {
161         *self.tcx
162     }
163 }
164
165 impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> LayoutOf for &'a EvalContext<'a, 'mir, 'tcx, M> {
166     type Ty = Ty<'tcx>;
167     type TyLayout = EvalResult<'tcx, TyLayout<'tcx>>;
168
169     fn layout_of(self, ty: Ty<'tcx>) -> Self::TyLayout {
170         self.tcx.layout_of(self.param_env.and(ty))
171             .map_err(|layout| EvalErrorKind::Layout(layout).into())
172     }
173 }
174
175 impl<'c, 'b, 'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> LayoutOf
176     for &'c &'b mut EvalContext<'a, 'mir, 'tcx, M> {
177     type Ty = Ty<'tcx>;
178     type TyLayout = EvalResult<'tcx, TyLayout<'tcx>>;
179
180     #[inline]
181     fn layout_of(self, ty: Ty<'tcx>) -> Self::TyLayout {
182         (&**self).layout_of(ty)
183     }
184 }
185
186 impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
187     pub fn new(
188         tcx: TyCtxtAt<'a, 'tcx, 'tcx>,
189         param_env: ty::ParamEnv<'tcx>,
190         machine: M,
191         memory_data: M::MemoryData,
192     ) -> Self {
193         EvalContext {
194             machine,
195             tcx,
196             param_env,
197             memory: Memory::new(tcx, memory_data),
198             stack: Vec::new(),
199             stack_limit: tcx.sess.const_eval_stack_frame_limit,
200             terminators_remaining: 1_000_000,
201         }
202     }
203
204     pub fn alloc_ptr(&mut self, ty: Ty<'tcx>) -> EvalResult<'tcx, MemoryPointer> {
205         let layout = self.layout_of(ty)?;
206         assert!(!layout.is_unsized(), "cannot alloc memory for unsized type");
207
208         let size = layout.size.bytes();
209         self.memory.allocate(size, layout.align, Some(MemoryKind::Stack))
210     }
211
212     pub fn memory(&self) -> &Memory<'a, 'mir, 'tcx, M> {
213         &self.memory
214     }
215
216     pub fn memory_mut(&mut self) -> &mut Memory<'a, 'mir, 'tcx, M> {
217         &mut self.memory
218     }
219
220     pub fn stack(&self) -> &[Frame<'mir, 'tcx>] {
221         &self.stack
222     }
223
224     #[inline]
225     pub fn cur_frame(&self) -> usize {
226         assert!(self.stack.len() > 0);
227         self.stack.len() - 1
228     }
229
230     pub fn str_to_value(&mut self, s: &str) -> EvalResult<'tcx, Value> {
231         let ptr = self.memory.allocate_cached(s.as_bytes());
232         Ok(Value::ByValPair(
233             PrimVal::Ptr(ptr),
234             PrimVal::from_u128(s.len() as u128),
235         ))
236     }
237
238     pub(super) fn const_to_value(&self, const_val: &ConstVal<'tcx>, ty: Ty<'tcx>) -> EvalResult<'tcx, Value> {
239         match *const_val {
240             ConstVal::Unevaluated(def_id, substs) => {
241                 let instance = self.resolve(def_id, substs)?;
242                 self.read_global_as_value(GlobalId {
243                     instance,
244                     promoted: None,
245                 }, ty)
246             }
247             ConstVal::Value(val) => Ok(val),
248         }
249     }
250
251     pub(super) fn resolve(&self, def_id: DefId, substs: &'tcx Substs<'tcx>) -> EvalResult<'tcx, ty::Instance<'tcx>> {
252         trace!("resolve: {:?}, {:#?}", def_id, substs);
253         trace!("substs: {:#?}", self.substs());
254         trace!("param_env: {:#?}", self.param_env);
255         let substs = self.tcx.subst_and_normalize_erasing_regions(
256             self.substs(),
257             self.param_env,
258             &substs,
259         );
260         ty::Instance::resolve(
261             *self.tcx,
262             self.param_env,
263             def_id,
264             substs,
265         ).ok_or_else(|| EvalErrorKind::TypeckError.into()) // turn error prop into a panic to expose associated type in const issue
266     }
267
268     pub(super) fn type_is_sized(&self, ty: Ty<'tcx>) -> bool {
269         ty.is_sized(self.tcx, self.param_env)
270     }
271
272     pub fn load_mir(
273         &self,
274         instance: ty::InstanceDef<'tcx>,
275     ) -> EvalResult<'tcx, &'tcx mir::Mir<'tcx>> {
276         // do not continue if typeck errors occurred (can only occur in local crate)
277         let did = instance.def_id();
278         if did.is_local() && self.tcx.has_typeck_tables(did) && self.tcx.typeck_tables_of(did).tainted_by_errors {
279             return err!(TypeckError);
280         }
281         trace!("load mir {:?}", instance);
282         match instance {
283             ty::InstanceDef::Item(def_id) => {
284                 self.tcx.maybe_optimized_mir(def_id).ok_or_else(||
285                     EvalErrorKind::NoMirFor(self.tcx.item_path_str(def_id)).into()
286                 )
287             }
288             _ => Ok(self.tcx.instance_mir(instance)),
289         }
290     }
291
292     pub fn monomorphize(&self, ty: Ty<'tcx>, substs: &'tcx Substs<'tcx>) -> Ty<'tcx> {
293         // miri doesn't care about lifetimes, and will choke on some crazy ones
294         // let's simply get rid of them
295         let substituted = ty.subst(*self.tcx, substs);
296         self.tcx.normalize_erasing_regions(ty::ParamEnv::reveal_all(), substituted)
297     }
298
299     /// Return the size and aligment of the value at the given type.
300     /// Note that the value does not matter if the type is sized. For unsized types,
301     /// the value has to be a fat pointer, and we only care about the "extra" data in it.
302     pub fn size_and_align_of_dst(
303         &mut self,
304         ty: Ty<'tcx>,
305         value: Value,
306     ) -> EvalResult<'tcx, (Size, Align)> {
307         let layout = self.layout_of(ty)?;
308         if !layout.is_unsized() {
309             Ok(layout.size_and_align())
310         } else {
311             match ty.sty {
312                 ty::TyAdt(..) | ty::TyTuple(..) => {
313                     // First get the size of all statically known fields.
314                     // Don't use type_of::sizing_type_of because that expects t to be sized,
315                     // and it also rounds up to alignment, which we want to avoid,
316                     // as the unsized field's alignment could be smaller.
317                     assert!(!ty.is_simd());
318                     debug!("DST {} layout: {:?}", ty, layout);
319
320                     let sized_size = layout.fields.offset(layout.fields.count() - 1);
321                     let sized_align = layout.align;
322                     debug!(
323                         "DST {} statically sized prefix size: {:?} align: {:?}",
324                         ty,
325                         sized_size,
326                         sized_align
327                     );
328
329                     // Recurse to get the size of the dynamically sized field (must be
330                     // the last field).
331                     let field_ty = layout.field(&self, layout.fields.count() - 1)?.ty;
332                     let (unsized_size, unsized_align) =
333                         self.size_and_align_of_dst(field_ty, value)?;
334
335                     // FIXME (#26403, #27023): We should be adding padding
336                     // to `sized_size` (to accommodate the `unsized_align`
337                     // required of the unsized field that follows) before
338                     // summing it with `sized_size`. (Note that since #26403
339                     // is unfixed, we do not yet add the necessary padding
340                     // here. But this is where the add would go.)
341
342                     // Return the sum of sizes and max of aligns.
343                     let size = sized_size + unsized_size;
344
345                     // Choose max of two known alignments (combined value must
346                     // be aligned according to more restrictive of the two).
347                     let align = sized_align.max(unsized_align);
348
349                     // Issue #27023: must add any necessary padding to `size`
350                     // (to make it a multiple of `align`) before returning it.
351                     //
352                     // Namely, the returned size should be, in C notation:
353                     //
354                     //   `size + ((size & (align-1)) ? align : 0)`
355                     //
356                     // emulated via the semi-standard fast bit trick:
357                     //
358                     //   `(size + (align-1)) & -align`
359
360                     Ok((size.abi_align(align), align))
361                 }
362                 ty::TyDynamic(..) => {
363                     let (_, vtable) = self.into_ptr_vtable_pair(value)?;
364                     // the second entry in the vtable is the dynamic size of the object.
365                     self.read_size_and_align_from_vtable(vtable)
366                 }
367
368                 ty::TySlice(_) | ty::TyStr => {
369                     let (elem_size, align) = layout.field(&self, 0)?.size_and_align();
370                     let (_, len) = self.into_slice(value)?;
371                     Ok((elem_size * len, align))
372                 }
373
374                 _ => bug!("size_of_val::<{:?}>", ty),
375             }
376         }
377     }
378
379     pub fn push_stack_frame(
380         &mut self,
381         instance: ty::Instance<'tcx>,
382         span: codemap::Span,
383         mir: &'mir mir::Mir<'tcx>,
384         return_place: Place,
385         return_to_block: StackPopCleanup,
386     ) -> EvalResult<'tcx> {
387         ::log_settings::settings().indentation += 1;
388
389         let locals = if mir.local_decls.len() > 1 {
390             let mut locals = IndexVec::from_elem(Some(Value::ByVal(PrimVal::Undef)), &mir.local_decls);
391             match self.tcx.describe_def(instance.def_id()) {
392                 // statics and constants don't have `Storage*` statements, no need to look for them
393                 Some(Def::Static(..)) | Some(Def::Const(..)) | Some(Def::AssociatedConst(..)) => {},
394                 _ => {
395                     trace!("push_stack_frame: {:?}: num_bbs: {}", span, mir.basic_blocks().len());
396                     for block in mir.basic_blocks() {
397                         for stmt in block.statements.iter() {
398                             use rustc::mir::StatementKind::{StorageDead, StorageLive};
399                             match stmt.kind {
400                                 StorageLive(local) |
401                                 StorageDead(local) => locals[local] = None,
402                                 _ => {}
403                             }
404                         }
405                     }
406                 },
407             }
408             locals
409         } else {
410             // don't allocate at all for trivial constants
411             IndexVec::new()
412         };
413
414         self.stack.push(Frame {
415             mir,
416             block: mir::START_BLOCK,
417             return_to_block,
418             return_place,
419             locals,
420             span,
421             instance,
422             stmt: 0,
423         });
424
425         self.memory.cur_frame = self.cur_frame();
426
427         if self.stack.len() > self.stack_limit {
428             err!(StackFrameLimitReached)
429         } else {
430             Ok(())
431         }
432     }
433
434     pub(super) fn pop_stack_frame(&mut self) -> EvalResult<'tcx> {
435         ::log_settings::settings().indentation -= 1;
436         M::end_region(self, None)?;
437         let frame = self.stack.pop().expect(
438             "tried to pop a stack frame, but there were none",
439         );
440         if !self.stack.is_empty() {
441             // TODO: Is this the correct time to start considering these accesses as originating from the returned-to stack frame?
442             self.memory.cur_frame = self.cur_frame();
443         }
444         match frame.return_to_block {
445             StackPopCleanup::MarkStatic(mutable) => {
446                 if let Place::Ptr { ptr, .. } = frame.return_place {
447                     // FIXME: to_ptr()? might be too extreme here, static zsts might reach this under certain conditions
448                     self.memory.mark_static_initialized(
449                         ptr.to_ptr()?.alloc_id,
450                         mutable,
451                     )?
452                 } else {
453                     bug!("StackPopCleanup::MarkStatic on: {:?}", frame.return_place);
454                 }
455             }
456             StackPopCleanup::Goto(target) => self.goto_block(target),
457             StackPopCleanup::None => {}
458         }
459         // deallocate all locals that are backed by an allocation
460         for local in frame.locals {
461             self.deallocate_local(local)?;
462         }
463
464         Ok(())
465     }
466
467     pub fn deallocate_local(&mut self, local: Option<Value>) -> EvalResult<'tcx> {
468         if let Some(Value::ByRef(ptr, _align)) = local {
469             trace!("deallocating local");
470             let ptr = ptr.to_ptr()?;
471             self.memory.dump_alloc(ptr.alloc_id);
472             self.memory.deallocate_local(ptr)?;
473         };
474         Ok(())
475     }
476
477     /// Evaluate an assignment statement.
478     ///
479     /// There is no separate `eval_rvalue` function. Instead, the code for handling each rvalue
480     /// type writes its results directly into the memory specified by the place.
481     pub(super) fn eval_rvalue_into_place(
482         &mut self,
483         rvalue: &mir::Rvalue<'tcx>,
484         place: &mir::Place<'tcx>,
485     ) -> EvalResult<'tcx> {
486         let dest = self.eval_place(place)?;
487         let dest_ty = self.place_ty(place);
488
489         use rustc::mir::Rvalue::*;
490         match *rvalue {
491             Use(ref operand) => {
492                 let value = self.eval_operand(operand)?.value;
493                 let valty = ValTy {
494                     value,
495                     ty: dest_ty,
496                 };
497                 self.write_value(valty, dest)?;
498             }
499
500             BinaryOp(bin_op, ref left, ref right) => {
501                 let left = self.eval_operand(left)?;
502                 let right = self.eval_operand(right)?;
503                 if self.intrinsic_overflowing(
504                     bin_op,
505                     left,
506                     right,
507                     dest,
508                     dest_ty,
509                 )?
510                 {
511                     // There was an overflow in an unchecked binop.  Right now, we consider this an error and bail out.
512                     // The rationale is that the reason rustc emits unchecked binops in release mode (vs. the checked binops
513                     // it emits in debug mode) is performance, but it doesn't cost us any performance in miri.
514                     // If, however, the compiler ever starts transforming unchecked intrinsics into unchecked binops,
515                     // we have to go back to just ignoring the overflow here.
516                     return err!(OverflowingMath);
517                 }
518             }
519
520             CheckedBinaryOp(bin_op, ref left, ref right) => {
521                 let left = self.eval_operand(left)?;
522                 let right = self.eval_operand(right)?;
523                 self.intrinsic_with_overflow(
524                     bin_op,
525                     left,
526                     right,
527                     dest,
528                     dest_ty,
529                 )?;
530             }
531
532             UnaryOp(un_op, ref operand) => {
533                 let val = self.eval_operand_to_primval(operand)?;
534                 let val = self.unary_op(un_op, val, dest_ty)?;
535                 self.write_primval(
536                     dest,
537                     val,
538                     dest_ty,
539                 )?;
540             }
541
542             Aggregate(ref kind, ref operands) => {
543                 self.inc_step_counter_and_check_limit(operands.len());
544
545                 let (dest, active_field_index) = match **kind {
546                     mir::AggregateKind::Adt(adt_def, variant_index, _, active_field_index) => {
547                         self.write_discriminant_value(dest_ty, dest, variant_index)?;
548                         if adt_def.is_enum() {
549                             (self.place_downcast(dest, variant_index)?, active_field_index)
550                         } else {
551                             (dest, active_field_index)
552                         }
553                     }
554                     _ => (dest, None)
555                 };
556
557                 let layout = self.layout_of(dest_ty)?;
558                 for (i, operand) in operands.iter().enumerate() {
559                     let value = self.eval_operand(operand)?;
560                     // Ignore zero-sized fields.
561                     if !self.layout_of(value.ty)?.is_zst() {
562                         let field_index = active_field_index.unwrap_or(i);
563                         let (field_dest, _) = self.place_field(dest, mir::Field::new(field_index), layout)?;
564                         self.write_value(value, field_dest)?;
565                     }
566                 }
567             }
568
569             Repeat(ref operand, _) => {
570                 let (elem_ty, length) = match dest_ty.sty {
571                     ty::TyArray(elem_ty, n) => (elem_ty, n.val.unwrap_u64()),
572                     _ => {
573                         bug!(
574                             "tried to assign array-repeat to non-array type {:?}",
575                             dest_ty
576                         )
577                     }
578                 };
579                 let elem_size = self.layout_of(elem_ty)?.size.bytes();
580                 let value = self.eval_operand(operand)?.value;
581
582                 let (dest, dest_align) = self.force_allocation(dest)?.to_ptr_align();
583
584                 // FIXME: speed up repeat filling
585                 for i in 0..length {
586                     let elem_dest = dest.offset(i * elem_size, &self)?;
587                     self.write_value_to_ptr(value, elem_dest, dest_align, elem_ty)?;
588                 }
589             }
590
591             Len(ref place) => {
592                 // FIXME(CTFE): don't allow computing the length of arrays in const eval
593                 let src = self.eval_place(place)?;
594                 let ty = self.place_ty(place);
595                 let (_, len) = src.elem_ty_and_len(ty);
596                 self.write_primval(
597                     dest,
598                     PrimVal::from_u128(len as u128),
599                     dest_ty,
600                 )?;
601             }
602
603             Ref(_, _, ref place) => {
604                 let src = self.eval_place(place)?;
605                 // We ignore the alignment of the place here -- special handling for packed structs ends
606                 // at the `&` operator.
607                 let (ptr, _align, extra) = self.force_allocation(src)?.to_ptr_align_extra();
608
609                 let val = match extra {
610                     PlaceExtra::None => ptr.to_value(),
611                     PlaceExtra::Length(len) => ptr.to_value_with_len(len),
612                     PlaceExtra::Vtable(vtable) => ptr.to_value_with_vtable(vtable),
613                     PlaceExtra::DowncastVariant(..) => {
614                         bug!("attempted to take a reference to an enum downcast place")
615                     }
616                 };
617                 let valty = ValTy {
618                     value: val,
619                     ty: dest_ty,
620                 };
621                 self.write_value(valty, dest)?;
622             }
623
624             NullaryOp(mir::NullOp::Box, ty) => {
625                 let ty = self.monomorphize(ty, self.substs());
626                 M::box_alloc(self, ty, dest)?;
627             }
628
629             NullaryOp(mir::NullOp::SizeOf, ty) => {
630                 let ty = self.monomorphize(ty, self.substs());
631                 let layout = self.layout_of(ty)?;
632                 assert!(!layout.is_unsized(),
633                         "SizeOf nullary MIR operator called for unsized type");
634                 self.write_primval(
635                     dest,
636                     PrimVal::from_u128(layout.size.bytes() as u128),
637                     dest_ty,
638                 )?;
639             }
640
641             Cast(kind, ref operand, cast_ty) => {
642                 debug_assert_eq!(self.monomorphize(cast_ty, self.substs()), dest_ty);
643                 use rustc::mir::CastKind::*;
644                 match kind {
645                     Unsize => {
646                         let src = self.eval_operand(operand)?;
647                         let src_layout = self.layout_of(src.ty)?;
648                         let dst_layout = self.layout_of(dest_ty)?;
649                         self.unsize_into(src.value, src_layout, dest, dst_layout)?;
650                     }
651
652                     Misc => {
653                         let src = self.eval_operand(operand)?;
654                         if self.type_is_fat_ptr(src.ty) {
655                             match (src.value, self.type_is_fat_ptr(dest_ty)) {
656                                 (Value::ByRef { .. }, _) |
657                                 (Value::ByValPair(..), true) => {
658                                     let valty = ValTy {
659                                         value: src.value,
660                                         ty: dest_ty,
661                                     };
662                                     self.write_value(valty, dest)?;
663                                 }
664                                 (Value::ByValPair(data, _), false) => {
665                                     let valty = ValTy {
666                                         value: Value::ByVal(data),
667                                         ty: dest_ty,
668                                     };
669                                     self.write_value(valty, dest)?;
670                                 }
671                                 (Value::ByVal(_), _) => bug!("expected fat ptr"),
672                             }
673                         } else {
674                             let src_layout = self.layout_of(src.ty)?;
675                             match src_layout.variants {
676                                 layout::Variants::Single { index } => {
677                                     if let Some(def) = src.ty.ty_adt_def() {
678                                         let discr_val = def
679                                             .discriminant_for_variant(*self.tcx, index)
680                                             .val;
681                                         return self.write_primval(
682                                             dest,
683                                             PrimVal::Bytes(discr_val),
684                                             dest_ty);
685                                     }
686                                 }
687                                 layout::Variants::Tagged { .. } |
688                                 layout::Variants::NicheFilling { .. } => {},
689                             }
690
691                             let src_val = self.value_to_primval(src)?;
692                             let dest_val = self.cast_primval(src_val, src.ty, dest_ty)?;
693                             let valty = ValTy {
694                                 value: Value::ByVal(dest_val),
695                                 ty: dest_ty,
696                             };
697                             self.write_value(valty, dest)?;
698                         }
699                     }
700
701                     ReifyFnPointer => {
702                         match self.eval_operand(operand)?.ty.sty {
703                             ty::TyFnDef(def_id, substs) => {
704                                 if self.tcx.has_attr(def_id, "rustc_args_required_const") {
705                                     bug!("reifying a fn ptr that requires \
706                                           const arguments");
707                                 }
708                                 let instance: EvalResult<'tcx, _> = ty::Instance::resolve(
709                                     *self.tcx,
710                                     self.param_env,
711                                     def_id,
712                                     substs,
713                                 ).ok_or_else(|| EvalErrorKind::TypeckError.into());
714                                 let fn_ptr = self.memory.create_fn_alloc(instance?);
715                                 let valty = ValTy {
716                                     value: Value::ByVal(PrimVal::Ptr(fn_ptr)),
717                                     ty: dest_ty,
718                                 };
719                                 self.write_value(valty, dest)?;
720                             }
721                             ref other => bug!("reify fn pointer on {:?}", other),
722                         }
723                     }
724
725                     UnsafeFnPointer => {
726                         match dest_ty.sty {
727                             ty::TyFnPtr(_) => {
728                                 let mut src = self.eval_operand(operand)?;
729                                 src.ty = dest_ty;
730                                 self.write_value(src, dest)?;
731                             }
732                             ref other => bug!("fn to unsafe fn cast on {:?}", other),
733                         }
734                     }
735
736                     ClosureFnPointer => {
737                         match self.eval_operand(operand)?.ty.sty {
738                             ty::TyClosure(def_id, substs) => {
739                                 let substs = self.tcx.subst_and_normalize_erasing_regions(
740                                     self.substs(),
741                                     ty::ParamEnv::reveal_all(),
742                                     &substs,
743                                 );
744                                 let instance = ty::Instance::resolve_closure(
745                                     *self.tcx,
746                                     def_id,
747                                     substs,
748                                     ty::ClosureKind::FnOnce,
749                                 );
750                                 let fn_ptr = self.memory.create_fn_alloc(instance);
751                                 let valty = ValTy {
752                                     value: Value::ByVal(PrimVal::Ptr(fn_ptr)),
753                                     ty: dest_ty,
754                                 };
755                                 self.write_value(valty, dest)?;
756                             }
757                             ref other => bug!("closure fn pointer on {:?}", other),
758                         }
759                     }
760                 }
761             }
762
763             Discriminant(ref place) => {
764                 let ty = self.place_ty(place);
765                 let place = self.eval_place(place)?;
766                 let discr_val = self.read_discriminant_value(place, ty)?;
767                 self.write_primval(dest, PrimVal::Bytes(discr_val), dest_ty)?;
768             }
769         }
770
771         self.dump_local(dest);
772
773         Ok(())
774     }
775
776     pub(super) fn type_is_fat_ptr(&self, ty: Ty<'tcx>) -> bool {
777         match ty.sty {
778             ty::TyRawPtr(ref tam) |
779             ty::TyRef(_, ref tam) => !self.type_is_sized(tam.ty),
780             ty::TyAdt(def, _) if def.is_box() => !self.type_is_sized(ty.boxed_ty()),
781             _ => false,
782         }
783     }
784
785     pub(super) fn eval_operand_to_primval(
786         &mut self,
787         op: &mir::Operand<'tcx>,
788     ) -> EvalResult<'tcx, PrimVal> {
789         let valty = self.eval_operand(op)?;
790         self.value_to_primval(valty)
791     }
792
793     pub(crate) fn operands_to_args(
794         &mut self,
795         ops: &[mir::Operand<'tcx>],
796     ) -> EvalResult<'tcx, Vec<ValTy<'tcx>>> {
797         ops.into_iter()
798             .map(|op| self.eval_operand(op))
799             .collect()
800     }
801
802     pub fn eval_operand(&mut self, op: &mir::Operand<'tcx>) -> EvalResult<'tcx, ValTy<'tcx>> {
803         use rustc::mir::Operand::*;
804         let ty = self.monomorphize(op.ty(self.mir(), *self.tcx), self.substs());
805         match *op {
806             // FIXME: do some more logic on `move` to invalidate the old location
807             Copy(ref place) |
808             Move(ref place) => {
809                 Ok(ValTy {
810                     value: self.eval_and_read_place(place)?,
811                     ty
812                 })
813             },
814
815             Constant(ref constant) => {
816                 use rustc::mir::Literal;
817                 let mir::Constant { ref literal, .. } = **constant;
818                 let value = match *literal {
819                     Literal::Value { ref value } => self.const_to_value(&value.val, ty)?,
820
821                     Literal::Promoted { index } => {
822                         self.read_global_as_value(GlobalId {
823                             instance: self.frame().instance,
824                             promoted: Some(index),
825                         }, ty)?
826                     }
827                 };
828
829                 Ok(ValTy {
830                     value,
831                     ty,
832                 })
833             }
834         }
835     }
836
837     /// reads a tag and produces the corresponding variant index
838     pub fn read_discriminant_as_variant_index(
839         &mut self,
840         place: Place,
841         ty: Ty<'tcx>,
842     ) -> EvalResult<'tcx, usize> {
843         let layout = self.layout_of(ty)?;
844         match layout.variants {
845             ty::layout::Variants::Single { index } => Ok(index),
846             ty::layout::Variants::Tagged { .. } => {
847                 let discr_val = self.read_discriminant_value(place, ty)?;
848                 ty
849                     .ty_adt_def()
850                     .expect("tagged layout for non adt")
851                     .discriminants(self.tcx.tcx)
852                     .position(|var| var.val == discr_val)
853                     .ok_or_else(|| EvalErrorKind::InvalidDiscriminant.into())
854             }
855             ty::layout::Variants::NicheFilling { .. } => {
856                 let discr_val = self.read_discriminant_value(place, ty)?;
857                 assert_eq!(discr_val as usize as u128, discr_val);
858                 Ok(discr_val as usize)
859             },
860         }
861     }
862
863     pub fn read_discriminant_value(
864         &mut self,
865         place: Place,
866         ty: Ty<'tcx>,
867     ) -> EvalResult<'tcx, u128> {
868         let layout = self.layout_of(ty)?;
869         trace!("read_discriminant_value {:#?}", layout);
870         if layout.abi == layout::Abi::Uninhabited {
871             return Ok(0);
872         }
873
874         match layout.variants {
875             layout::Variants::Single { index } => {
876                 let discr_val = ty.ty_adt_def().map_or(
877                     index as u128,
878                     |def| def.discriminant_for_variant(*self.tcx, index).val);
879                 return Ok(discr_val);
880             }
881             layout::Variants::Tagged { .. } |
882             layout::Variants::NicheFilling { .. } => {},
883         }
884
885         let (discr_place, discr) = self.place_field(place, mir::Field::new(0), layout)?;
886         trace!("discr place: {:?}, {:?}", discr_place, discr);
887         let raw_discr = self.value_to_primval(ValTy {
888             value: self.read_place(discr_place)?,
889             ty: discr.ty
890         })?;
891         let discr_val = match layout.variants {
892             layout::Variants::Single { .. } => bug!(),
893             // FIXME: should we catch invalid discriminants here?
894             layout::Variants::Tagged { .. } => {
895                 if discr.ty.is_signed() {
896                     let i = raw_discr.to_bytes()? as i128;
897                     // going from layout tag type to typeck discriminant type
898                     // requires first sign extending with the layout discriminant
899                     let amt = 128 - discr.size.bits();
900                     let sexted = (i << amt) >> amt;
901                     // and then zeroing with the typeck discriminant type
902                     let discr_ty = ty
903                         .ty_adt_def().expect("tagged layout corresponds to adt")
904                         .repr
905                         .discr_type();
906                     let discr_ty = layout::Integer::from_attr(self.tcx.tcx, discr_ty);
907                     let amt = 128 - discr_ty.size().bits();
908                     let truncatee = sexted as u128;
909                     (truncatee << amt) >> amt
910                 } else {
911                     raw_discr.to_bytes()?
912                 }
913             },
914             layout::Variants::NicheFilling {
915                 dataful_variant,
916                 ref niche_variants,
917                 niche_start,
918                 ..
919             } => {
920                 let variants_start = *niche_variants.start() as u128;
921                 let variants_end = *niche_variants.end() as u128;
922                 match raw_discr {
923                     PrimVal::Ptr(_) => {
924                         assert!(niche_start == 0);
925                         assert!(variants_start == variants_end);
926                         dataful_variant as u128
927                     },
928                     PrimVal::Bytes(raw_discr) => {
929                         let discr = raw_discr.wrapping_sub(niche_start)
930                             .wrapping_add(variants_start);
931                         if variants_start <= discr && discr <= variants_end {
932                             discr
933                         } else {
934                             dataful_variant as u128
935                         }
936                     },
937                     PrimVal::Undef => return err!(ReadUndefBytes),
938                 }
939             }
940         };
941
942         Ok(discr_val)
943     }
944
945
946     pub fn write_discriminant_value(
947         &mut self,
948         dest_ty: Ty<'tcx>,
949         dest: Place,
950         variant_index: usize,
951     ) -> EvalResult<'tcx> {
952         let layout = self.layout_of(dest_ty)?;
953
954         match layout.variants {
955             layout::Variants::Single { index } => {
956                 if index != variant_index {
957                     // If the layout of an enum is `Single`, all
958                     // other variants are necessarily uninhabited.
959                     assert_eq!(layout.for_variant(&self, variant_index).abi,
960                                layout::Abi::Uninhabited);
961                 }
962             }
963             layout::Variants::Tagged { ref discr, .. } => {
964                 let discr_val = dest_ty.ty_adt_def().unwrap()
965                     .discriminant_for_variant(*self.tcx, variant_index)
966                     .val;
967
968                 // raw discriminants for enums are isize or bigger during
969                 // their computation, but the in-memory tag is the smallest possible
970                 // representation
971                 let size = discr.value.size(self.tcx.tcx).bits();
972                 let amt = 128 - size;
973                 let discr_val = (discr_val << amt) >> amt;
974
975                 let (discr_dest, discr) = self.place_field(dest, mir::Field::new(0), layout)?;
976                 self.write_primval(discr_dest, PrimVal::Bytes(discr_val), discr.ty)?;
977             }
978             layout::Variants::NicheFilling {
979                 dataful_variant,
980                 ref niche_variants,
981                 niche_start,
982                 ..
983             } => {
984                 if variant_index != dataful_variant {
985                     let (niche_dest, niche) =
986                         self.place_field(dest, mir::Field::new(0), layout)?;
987                     let niche_value = ((variant_index - niche_variants.start()) as u128)
988                         .wrapping_add(niche_start);
989                     self.write_primval(niche_dest, PrimVal::Bytes(niche_value), niche.ty)?;
990                 }
991             }
992         }
993
994         Ok(())
995     }
996
997     pub fn read_global_as_value(&self, gid: GlobalId<'tcx>, ty: Ty<'tcx>) -> EvalResult<'tcx, Value> {
998         if self.tcx.is_static(gid.instance.def_id()).is_some() {
999             let alloc_id = self
1000                 .tcx
1001                 .interpret_interner
1002                 .cache_static(gid.instance.def_id());
1003             let layout = self.layout_of(ty)?;
1004             let ptr = MemoryPointer::new(alloc_id, 0);
1005             return Ok(Value::ByRef(ptr.into(), layout.align))
1006         }
1007         let cv = self.const_eval(gid)?;
1008         self.const_to_value(&cv.val, ty)
1009     }
1010
1011     pub fn const_eval(&self, gid: GlobalId<'tcx>) -> EvalResult<'tcx, &'tcx ty::Const<'tcx>> {
1012         let param_env = if self.tcx.is_static(gid.instance.def_id()).is_some() {
1013             ty::ParamEnv::reveal_all()
1014         } else {
1015             self.param_env
1016         };
1017         self.tcx.const_eval(param_env.and(gid)).map_err(|err| match *err.kind {
1018             ErrKind::Miri(ref err, _) => match err.kind {
1019                 EvalErrorKind::TypeckError |
1020                 EvalErrorKind::Layout(_) => EvalErrorKind::TypeckError.into(),
1021                 _ => EvalErrorKind::ReferencedConstant.into(),
1022             },
1023             ErrKind::TypeckError => EvalErrorKind::TypeckError.into(),
1024             ref other => bug!("const eval returned {:?}", other),
1025         })
1026     }
1027
1028     pub fn force_allocation(&mut self, place: Place) -> EvalResult<'tcx, Place> {
1029         let new_place = match place {
1030             Place::Local { frame, local } => {
1031                 match self.stack[frame].locals[local] {
1032                     None => return err!(DeadLocal),
1033                     Some(Value::ByRef(ptr, align)) => {
1034                         Place::Ptr {
1035                             ptr,
1036                             align,
1037                             extra: PlaceExtra::None,
1038                         }
1039                     }
1040                     Some(val) => {
1041                         let ty = self.stack[frame].mir.local_decls[local].ty;
1042                         let ty = self.monomorphize(ty, self.stack[frame].instance.substs);
1043                         let layout = self.layout_of(ty)?;
1044                         let ptr = self.alloc_ptr(ty)?;
1045                         self.stack[frame].locals[local] =
1046                             Some(Value::ByRef(ptr.into(), layout.align)); // it stays live
1047                         let place = Place::from_ptr(ptr, layout.align);
1048                         self.write_value(ValTy { value: val, ty }, place)?;
1049                         place
1050                     }
1051                 }
1052             }
1053             Place::Ptr { .. } => place,
1054         };
1055         Ok(new_place)
1056     }
1057
1058     /// ensures this Value is not a ByRef
1059     pub fn follow_by_ref_value(
1060         &self,
1061         value: Value,
1062         ty: Ty<'tcx>,
1063     ) -> EvalResult<'tcx, Value> {
1064         match value {
1065             Value::ByRef(ptr, align) => {
1066                 self.read_value(ptr, align, ty)
1067             }
1068             other => Ok(other),
1069         }
1070     }
1071
1072     pub fn value_to_primval(
1073         &self,
1074         ValTy { value, ty } : ValTy<'tcx>,
1075     ) -> EvalResult<'tcx, PrimVal> {
1076         match self.follow_by_ref_value(value, ty)? {
1077             Value::ByRef { .. } => bug!("follow_by_ref_value can't result in `ByRef`"),
1078
1079             Value::ByVal(primval) => {
1080                 // TODO: Do we really want insta-UB here?
1081                 self.ensure_valid_value(primval, ty)?;
1082                 Ok(primval)
1083             }
1084
1085             Value::ByValPair(..) => bug!("value_to_primval can't work with fat pointers"),
1086         }
1087     }
1088
1089     pub fn write_ptr(&mut self, dest: Place, val: Pointer, dest_ty: Ty<'tcx>) -> EvalResult<'tcx> {
1090         let valty = ValTy {
1091             value: val.to_value(),
1092             ty: dest_ty,
1093         };
1094         self.write_value(valty, dest)
1095     }
1096
1097     pub fn write_primval(
1098         &mut self,
1099         dest: Place,
1100         val: PrimVal,
1101         dest_ty: Ty<'tcx>,
1102     ) -> EvalResult<'tcx> {
1103         let valty = ValTy {
1104             value: Value::ByVal(val),
1105             ty: dest_ty,
1106         };
1107         self.write_value(valty, dest)
1108     }
1109
1110     pub fn write_value(
1111         &mut self,
1112         ValTy { value: src_val, ty: dest_ty } : ValTy<'tcx>,
1113         dest: Place,
1114     ) -> EvalResult<'tcx> {
1115         //trace!("Writing {:?} to {:?} at type {:?}", src_val, dest, dest_ty);
1116         // Note that it is really important that the type here is the right one, and matches the type things are read at.
1117         // In case `src_val` is a `ByValPair`, we don't do any magic here to handle padding properly, which is only
1118         // correct if we never look at this data with the wrong type.
1119
1120         match dest {
1121             Place::Ptr { ptr, align, extra } => {
1122                 assert_eq!(extra, PlaceExtra::None);
1123                 self.write_value_to_ptr(src_val, ptr, align, dest_ty)
1124             }
1125
1126             Place::Local { frame, local } => {
1127                 let dest = self.stack[frame].get_local(local)?;
1128                 self.write_value_possibly_by_val(
1129                     src_val,
1130                     |this, val| this.stack[frame].set_local(local, val),
1131                     dest,
1132                     dest_ty,
1133                 )
1134             }
1135         }
1136     }
1137
1138     // The cases here can be a bit subtle. Read carefully!
1139     fn write_value_possibly_by_val<F: FnOnce(&mut Self, Value) -> EvalResult<'tcx>>(
1140         &mut self,
1141         src_val: Value,
1142         write_dest: F,
1143         old_dest_val: Value,
1144         dest_ty: Ty<'tcx>,
1145     ) -> EvalResult<'tcx> {
1146         if let Value::ByRef(dest_ptr, align) = old_dest_val {
1147             // If the value is already `ByRef` (that is, backed by an `Allocation`),
1148             // then we must write the new value into this allocation, because there may be
1149             // other pointers into the allocation. These other pointers are logically
1150             // pointers into the local variable, and must be able to observe the change.
1151             //
1152             // Thus, it would be an error to replace the `ByRef` with a `ByVal`, unless we
1153             // knew for certain that there were no outstanding pointers to this allocation.
1154             self.write_value_to_ptr(src_val, dest_ptr, align, dest_ty)?;
1155         } else if let Value::ByRef(src_ptr, align) = src_val {
1156             // If the value is not `ByRef`, then we know there are no pointers to it
1157             // and we can simply overwrite the `Value` in the locals array directly.
1158             //
1159             // In this specific case, where the source value is `ByRef`, we must duplicate
1160             // the allocation, because this is a by-value operation. It would be incorrect
1161             // if they referred to the same allocation, since then a change to one would
1162             // implicitly change the other.
1163             //
1164             // It is a valid optimization to attempt reading a primitive value out of the
1165             // source and write that into the destination without making an allocation, so
1166             // we do so here.
1167             if let Ok(Some(src_val)) = self.try_read_value(src_ptr, align, dest_ty) {
1168                 write_dest(self, src_val)?;
1169             } else {
1170                 let dest_ptr = self.alloc_ptr(dest_ty)?.into();
1171                 let layout = self.layout_of(dest_ty)?;
1172                 self.memory.copy(src_ptr, align.min(layout.align), dest_ptr, layout.align, layout.size.bytes(), false)?;
1173                 write_dest(self, Value::ByRef(dest_ptr, layout.align))?;
1174             }
1175         } else {
1176             // Finally, we have the simple case where neither source nor destination are
1177             // `ByRef`. We may simply copy the source value over the the destintion.
1178             write_dest(self, src_val)?;
1179         }
1180         Ok(())
1181     }
1182
1183     pub fn write_value_to_ptr(
1184         &mut self,
1185         value: Value,
1186         dest: Pointer,
1187         dest_align: Align,
1188         dest_ty: Ty<'tcx>,
1189     ) -> EvalResult<'tcx> {
1190         let layout = self.layout_of(dest_ty)?;
1191         trace!("write_value_to_ptr: {:#?}, {}, {:#?}", value, dest_ty, layout);
1192         match value {
1193             Value::ByRef(ptr, align) => {
1194                 self.memory.copy(ptr, align.min(layout.align), dest, dest_align.min(layout.align), layout.size.bytes(), false)
1195             }
1196             Value::ByVal(primval) => {
1197                 let signed = match layout.abi {
1198                     layout::Abi::Scalar(ref scal) => match scal.value {
1199                         layout::Primitive::Int(_, signed) => signed,
1200                         _ => false,
1201                     },
1202                     _ if primval.is_undef() => false,
1203                     _ => bug!("write_value_to_ptr: invalid ByVal layout: {:#?}", layout)
1204                 };
1205                 self.memory.write_primval(dest, dest_align, primval, layout.size.bytes(), signed)
1206             }
1207             Value::ByValPair(a_val, b_val) => {
1208                 trace!("write_value_to_ptr valpair: {:#?}", layout);
1209                 let (a, b) = match layout.abi {
1210                     layout::Abi::ScalarPair(ref a, ref b) => (&a.value, &b.value),
1211                     _ => bug!("write_value_to_ptr: invalid ByValPair layout: {:#?}", layout)
1212                 };
1213                 let (a_size, b_size) = (a.size(&self), b.size(&self));
1214                 let a_ptr = dest;
1215                 let b_offset = a_size.abi_align(b.align(&self));
1216                 let b_ptr = dest.offset(b_offset.bytes(), &self)?.into();
1217                 // TODO: What about signedess?
1218                 self.memory.write_primval(a_ptr, dest_align, a_val, a_size.bytes(), false)?;
1219                 self.memory.write_primval(b_ptr, dest_align, b_val, b_size.bytes(), false)
1220             }
1221         }
1222     }
1223
1224     pub fn ty_to_primval_kind(&self, ty: Ty<'tcx>) -> EvalResult<'tcx, PrimValKind> {
1225         use syntax::ast::FloatTy;
1226
1227         let kind = match ty.sty {
1228             ty::TyBool => PrimValKind::Bool,
1229             ty::TyChar => PrimValKind::Char,
1230
1231             ty::TyInt(int_ty) => {
1232                 use syntax::ast::IntTy::*;
1233                 let size = match int_ty {
1234                     I8 => 1,
1235                     I16 => 2,
1236                     I32 => 4,
1237                     I64 => 8,
1238                     I128 => 16,
1239                     Isize => self.memory.pointer_size(),
1240                 };
1241                 PrimValKind::from_int_size(size)
1242             }
1243
1244             ty::TyUint(uint_ty) => {
1245                 use syntax::ast::UintTy::*;
1246                 let size = match uint_ty {
1247                     U8 => 1,
1248                     U16 => 2,
1249                     U32 => 4,
1250                     U64 => 8,
1251                     U128 => 16,
1252                     Usize => self.memory.pointer_size(),
1253                 };
1254                 PrimValKind::from_uint_size(size)
1255             }
1256
1257             ty::TyFloat(FloatTy::F32) => PrimValKind::F32,
1258             ty::TyFloat(FloatTy::F64) => PrimValKind::F64,
1259
1260             ty::TyFnPtr(_) => PrimValKind::FnPtr,
1261
1262             ty::TyRef(_, ref tam) |
1263             ty::TyRawPtr(ref tam) if self.type_is_sized(tam.ty) => PrimValKind::Ptr,
1264
1265             ty::TyAdt(def, _) if def.is_box() => PrimValKind::Ptr,
1266
1267             ty::TyAdt(..) => {
1268                 match self.layout_of(ty)?.abi {
1269                     layout::Abi::Scalar(ref scalar) => {
1270                         use rustc::ty::layout::Primitive::*;
1271                         match scalar.value {
1272                             Int(i, false) => PrimValKind::from_uint_size(i.size().bytes()),
1273                             Int(i, true) => PrimValKind::from_int_size(i.size().bytes()),
1274                             F32 => PrimValKind::F32,
1275                             F64 => PrimValKind::F64,
1276                             Pointer => PrimValKind::Ptr,
1277                         }
1278                     }
1279
1280                     _ => return err!(TypeNotPrimitive(ty)),
1281                 }
1282             }
1283
1284             _ => return err!(TypeNotPrimitive(ty)),
1285         };
1286
1287         Ok(kind)
1288     }
1289
1290     fn ensure_valid_value(&self, val: PrimVal, ty: Ty<'tcx>) -> EvalResult<'tcx> {
1291         match ty.sty {
1292             ty::TyBool if val.to_bytes()? > 1 => err!(InvalidBool),
1293
1294             ty::TyChar if ::std::char::from_u32(val.to_bytes()? as u32).is_none() => {
1295                 err!(InvalidChar(val.to_bytes()? as u32 as u128))
1296             }
1297
1298             _ => Ok(()),
1299         }
1300     }
1301
1302     pub fn read_value(&self, ptr: Pointer, align: Align, ty: Ty<'tcx>) -> EvalResult<'tcx, Value> {
1303         if let Some(val) = self.try_read_value(ptr, align, ty)? {
1304             Ok(val)
1305         } else {
1306             bug!("primitive read failed for type: {:?}", ty);
1307         }
1308     }
1309
1310     pub(crate) fn read_ptr(
1311         &self,
1312         ptr: MemoryPointer,
1313         ptr_align: Align,
1314         pointee_ty: Ty<'tcx>,
1315     ) -> EvalResult<'tcx, Value> {
1316         let ptr_size = self.memory.pointer_size();
1317         let p: Pointer = self.memory.read_ptr_sized(ptr, ptr_align)?.into();
1318         if self.type_is_sized(pointee_ty) {
1319             Ok(p.to_value())
1320         } else {
1321             trace!("reading fat pointer extra of type {}", pointee_ty);
1322             let extra = ptr.offset(ptr_size, self)?;
1323             match self.tcx.struct_tail(pointee_ty).sty {
1324                 ty::TyDynamic(..) => Ok(p.to_value_with_vtable(
1325                     self.memory.read_ptr_sized(extra, ptr_align)?.to_ptr()?,
1326                 )),
1327                 ty::TySlice(..) | ty::TyStr => {
1328                     let len = self
1329                         .memory
1330                         .read_ptr_sized(extra, ptr_align)?
1331                         .to_bytes()?;
1332                     Ok(p.to_value_with_len(len as u64))
1333                 },
1334                 _ => bug!("unsized primval ptr read from {:?}", pointee_ty),
1335             }
1336         }
1337     }
1338
1339     pub fn try_read_value(&self, ptr: Pointer, ptr_align: Align, ty: Ty<'tcx>) -> EvalResult<'tcx, Option<Value>> {
1340         use syntax::ast::FloatTy;
1341
1342         let layout = self.layout_of(ty)?;
1343         // do the strongest layout check of the two
1344         let align = layout.align.max(ptr_align);
1345         self.memory.check_align(ptr, align)?;
1346
1347         if layout.size.bytes() == 0 {
1348             return Ok(Some(Value::ByVal(PrimVal::Undef)));
1349         }
1350
1351         let ptr = ptr.to_ptr()?;
1352         let val = match ty.sty {
1353             ty::TyBool => {
1354                 let val = self.memory.read_primval(ptr, ptr_align, 1)?;
1355                 let val = match val {
1356                     PrimVal::Bytes(0) => false,
1357                     PrimVal::Bytes(1) => true,
1358                     // TODO: This seems a little overeager, should reading at bool type already be insta-UB?
1359                     _ => return err!(InvalidBool),
1360                 };
1361                 PrimVal::from_bool(val)
1362             }
1363             ty::TyChar => {
1364                 let c = self.memory.read_primval(ptr, ptr_align, 4)?.to_bytes()? as u32;
1365                 match ::std::char::from_u32(c) {
1366                     Some(ch) => PrimVal::from_char(ch),
1367                     None => return err!(InvalidChar(c as u128)),
1368                 }
1369             }
1370
1371             ty::TyInt(int_ty) => {
1372                 use syntax::ast::IntTy::*;
1373                 let size = match int_ty {
1374                     I8 => 1,
1375                     I16 => 2,
1376                     I32 => 4,
1377                     I64 => 8,
1378                     I128 => 16,
1379                     Isize => self.memory.pointer_size(),
1380                 };
1381                 self.memory.read_primval(ptr, ptr_align, size)?
1382             }
1383
1384             ty::TyUint(uint_ty) => {
1385                 use syntax::ast::UintTy::*;
1386                 let size = match uint_ty {
1387                     U8 => 1,
1388                     U16 => 2,
1389                     U32 => 4,
1390                     U64 => 8,
1391                     U128 => 16,
1392                     Usize => self.memory.pointer_size(),
1393                 };
1394                 self.memory.read_primval(ptr, ptr_align, size)?
1395             }
1396
1397             ty::TyFloat(FloatTy::F32) => {
1398                 PrimVal::Bytes(self.memory.read_primval(ptr, ptr_align, 4)?.to_bytes()?)
1399             }
1400             ty::TyFloat(FloatTy::F64) => {
1401                 PrimVal::Bytes(self.memory.read_primval(ptr, ptr_align, 8)?.to_bytes()?)
1402             }
1403
1404             ty::TyFnPtr(_) => self.memory.read_ptr_sized(ptr, ptr_align)?,
1405             ty::TyRef(_, ref tam) |
1406             ty::TyRawPtr(ref tam) => return self.read_ptr(ptr, ptr_align, tam.ty).map(Some),
1407
1408             ty::TyAdt(def, _) => {
1409                 if def.is_box() {
1410                     return self.read_ptr(ptr, ptr_align, ty.boxed_ty()).map(Some);
1411                 }
1412
1413                 if let layout::Abi::Scalar(ref scalar) = self.layout_of(ty)?.abi {
1414                     let size = scalar.value.size(self).bytes();
1415                     self.memory.read_primval(ptr, ptr_align, size)?
1416                 } else {
1417                     return Ok(None);
1418                 }
1419             }
1420
1421             _ => return Ok(None),
1422         };
1423
1424         Ok(Some(Value::ByVal(val)))
1425     }
1426
1427     pub fn frame(&self) -> &Frame<'mir, 'tcx> {
1428         self.stack.last().expect("no call frames exist")
1429     }
1430
1431     pub fn frame_mut(&mut self) -> &mut Frame<'mir, 'tcx> {
1432         self.stack.last_mut().expect("no call frames exist")
1433     }
1434
1435     pub(super) fn mir(&self) -> &'mir mir::Mir<'tcx> {
1436         self.frame().mir
1437     }
1438
1439     pub fn substs(&self) -> &'tcx Substs<'tcx> {
1440         if let Some(frame) = self.stack.last() {
1441             frame.instance.substs
1442         } else {
1443             Substs::empty()
1444         }
1445     }
1446
1447     fn unsize_into_ptr(
1448         &mut self,
1449         src: Value,
1450         src_ty: Ty<'tcx>,
1451         dest: Place,
1452         dest_ty: Ty<'tcx>,
1453         sty: Ty<'tcx>,
1454         dty: Ty<'tcx>,
1455     ) -> EvalResult<'tcx> {
1456         // A<Struct> -> A<Trait> conversion
1457         let (src_pointee_ty, dest_pointee_ty) = self.tcx.struct_lockstep_tails(sty, dty);
1458
1459         match (&src_pointee_ty.sty, &dest_pointee_ty.sty) {
1460             (&ty::TyArray(_, length), &ty::TySlice(_)) => {
1461                 let ptr = self.into_ptr(src)?;
1462                 // u64 cast is from usize to u64, which is always good
1463                 let valty = ValTy {
1464                     value: ptr.to_value_with_len(length.val.unwrap_u64() ),
1465                     ty: dest_ty,
1466                 };
1467                 self.write_value(valty, dest)
1468             }
1469             (&ty::TyDynamic(..), &ty::TyDynamic(..)) => {
1470                 // For now, upcasts are limited to changes in marker
1471                 // traits, and hence never actually require an actual
1472                 // change to the vtable.
1473                 let valty = ValTy {
1474                     value: src,
1475                     ty: dest_ty,
1476                 };
1477                 self.write_value(valty, dest)
1478             }
1479             (_, &ty::TyDynamic(ref data, _)) => {
1480                 let trait_ref = data.principal().unwrap().with_self_ty(
1481                     *self.tcx,
1482                     src_pointee_ty,
1483                 );
1484                 let trait_ref = self.tcx.erase_regions(&trait_ref);
1485                 let vtable = self.get_vtable(src_pointee_ty, trait_ref)?;
1486                 let ptr = self.into_ptr(src)?;
1487                 let valty = ValTy {
1488                     value: ptr.to_value_with_vtable(vtable),
1489                     ty: dest_ty,
1490                 };
1491                 self.write_value(valty, dest)
1492             }
1493
1494             _ => bug!("invalid unsizing {:?} -> {:?}", src_ty, dest_ty),
1495         }
1496     }
1497
1498     fn unsize_into(
1499         &mut self,
1500         src: Value,
1501         src_layout: TyLayout<'tcx>,
1502         dst: Place,
1503         dst_layout: TyLayout<'tcx>,
1504     ) -> EvalResult<'tcx> {
1505         match (&src_layout.ty.sty, &dst_layout.ty.sty) {
1506             (&ty::TyRef(_, ref s), &ty::TyRef(_, ref d)) |
1507             (&ty::TyRef(_, ref s), &ty::TyRawPtr(ref d)) |
1508             (&ty::TyRawPtr(ref s), &ty::TyRawPtr(ref d)) => {
1509                 self.unsize_into_ptr(src, src_layout.ty, dst, dst_layout.ty, s.ty, d.ty)
1510             }
1511             (&ty::TyAdt(def_a, _), &ty::TyAdt(def_b, _)) => {
1512                 assert_eq!(def_a, def_b);
1513                 if def_a.is_box() || def_b.is_box() {
1514                     if !def_a.is_box() || !def_b.is_box() {
1515                         bug!("invalid unsizing between {:?} -> {:?}", src_layout, dst_layout);
1516                     }
1517                     return self.unsize_into_ptr(
1518                         src,
1519                         src_layout.ty,
1520                         dst,
1521                         dst_layout.ty,
1522                         src_layout.ty.boxed_ty(),
1523                         dst_layout.ty.boxed_ty(),
1524                     );
1525                 }
1526
1527                 // unsizing of generic struct with pointer fields
1528                 // Example: `Arc<T>` -> `Arc<Trait>`
1529                 // here we need to increase the size of every &T thin ptr field to a fat ptr
1530                 for i in 0..src_layout.fields.count() {
1531                     let (dst_f_place, dst_field) =
1532                         self.place_field(dst, mir::Field::new(i), dst_layout)?;
1533                     if dst_field.is_zst() {
1534                         continue;
1535                     }
1536                     let (src_f_value, src_field) = match src {
1537                         Value::ByRef(ptr, align) => {
1538                             let src_place = Place::from_primval_ptr(ptr, align);
1539                             let (src_f_place, src_field) =
1540                                 self.place_field(src_place, mir::Field::new(i), src_layout)?;
1541                             (self.read_place(src_f_place)?, src_field)
1542                         }
1543                         Value::ByVal(_) | Value::ByValPair(..) => {
1544                             let src_field = src_layout.field(&self, i)?;
1545                             assert_eq!(src_layout.fields.offset(i).bytes(), 0);
1546                             assert_eq!(src_field.size, src_layout.size);
1547                             (src, src_field)
1548                         }
1549                     };
1550                     if src_field.ty == dst_field.ty {
1551                         self.write_value(ValTy {
1552                             value: src_f_value,
1553                             ty: src_field.ty,
1554                         }, dst_f_place)?;
1555                     } else {
1556                         self.unsize_into(src_f_value, src_field, dst_f_place, dst_field)?;
1557                     }
1558                 }
1559                 Ok(())
1560             }
1561             _ => {
1562                 bug!(
1563                     "unsize_into: invalid conversion: {:?} -> {:?}",
1564                     src_layout,
1565                     dst_layout
1566                 )
1567             }
1568         }
1569     }
1570
1571     pub fn dump_local(&self, place: Place) {
1572         // Debug output
1573         if !log_enabled!(::log::Level::Trace) {
1574             return;
1575         }
1576         match place {
1577             Place::Local { frame, local } => {
1578                 let mut allocs = Vec::new();
1579                 let mut msg = format!("{:?}", local);
1580                 if frame != self.cur_frame() {
1581                     write!(msg, " ({} frames up)", self.cur_frame() - frame).unwrap();
1582                 }
1583                 write!(msg, ":").unwrap();
1584
1585                 match self.stack[frame].get_local(local) {
1586                     Err(err) => {
1587                         if let EvalErrorKind::DeadLocal = err.kind {
1588                             write!(msg, " is dead").unwrap();
1589                         } else {
1590                             panic!("Failed to access local: {:?}", err);
1591                         }
1592                     }
1593                     Ok(Value::ByRef(ptr, align)) => {
1594                         match ptr.into_inner_primval() {
1595                             PrimVal::Ptr(ptr) => {
1596                                 write!(msg, " by align({}) ref:", align.abi()).unwrap();
1597                                 allocs.push(ptr.alloc_id);
1598                             }
1599                             ptr => write!(msg, " integral by ref: {:?}", ptr).unwrap(),
1600                         }
1601                     }
1602                     Ok(Value::ByVal(val)) => {
1603                         write!(msg, " {:?}", val).unwrap();
1604                         if let PrimVal::Ptr(ptr) = val {
1605                             allocs.push(ptr.alloc_id);
1606                         }
1607                     }
1608                     Ok(Value::ByValPair(val1, val2)) => {
1609                         write!(msg, " ({:?}, {:?})", val1, val2).unwrap();
1610                         if let PrimVal::Ptr(ptr) = val1 {
1611                             allocs.push(ptr.alloc_id);
1612                         }
1613                         if let PrimVal::Ptr(ptr) = val2 {
1614                             allocs.push(ptr.alloc_id);
1615                         }
1616                     }
1617                 }
1618
1619                 trace!("{}", msg);
1620                 self.memory.dump_allocs(allocs);
1621             }
1622             Place::Ptr { ptr, align, .. } => {
1623                 match ptr.into_inner_primval() {
1624                     PrimVal::Ptr(ptr) => {
1625                         trace!("by align({}) ref:", align.abi());
1626                         self.memory.dump_alloc(ptr.alloc_id);
1627                     }
1628                     ptr => trace!(" integral by ref: {:?}", ptr),
1629                 }
1630             }
1631         }
1632     }
1633
1634     /// Convenience function to ensure correct usage of locals
1635     pub fn modify_local<F>(&mut self, frame: usize, local: mir::Local, f: F) -> EvalResult<'tcx>
1636     where
1637         F: FnOnce(&mut Self, Value) -> EvalResult<'tcx, Value>,
1638     {
1639         let val = self.stack[frame].get_local(local)?;
1640         let new_val = f(self, val)?;
1641         self.stack[frame].set_local(local, new_val)?;
1642         // FIXME(solson): Run this when setting to Undef? (See previous version of this code.)
1643         // if let Value::ByRef(ptr) = self.stack[frame].get_local(local) {
1644         //     self.memory.deallocate(ptr)?;
1645         // }
1646         Ok(())
1647     }
1648
1649     pub fn generate_stacktrace(&self, explicit_span: Option<Span>) -> (Vec<FrameInfo>, Span) {
1650         let mut last_span = None;
1651         let mut frames = Vec::new();
1652         // skip 1 because the last frame is just the environment of the constant
1653         for &Frame { instance, span, .. } in self.stack().iter().skip(1).rev() {
1654             // make sure we don't emit frames that are duplicates of the previous
1655             if explicit_span == Some(span) {
1656                 last_span = Some(span);
1657                 continue;
1658             }
1659             if let Some(last) = last_span {
1660                 if last == span {
1661                     continue;
1662                 }
1663             } else {
1664                 last_span = Some(span);
1665             }
1666             let location = if self.tcx.def_key(instance.def_id()).disambiguated_data.data == DefPathData::ClosureExpr {
1667                 "closure".to_owned()
1668             } else {
1669                 instance.to_string()
1670             };
1671             frames.push(FrameInfo { span, location });
1672         }
1673         trace!("generate stacktrace: {:#?}, {:?}", frames, explicit_span);
1674         (frames, self.tcx.span)
1675     }
1676
1677     pub fn report(&self, e: &mut EvalError, as_err: bool, explicit_span: Option<Span>) {
1678         match e.kind {
1679             EvalErrorKind::Layout(_) |
1680             EvalErrorKind::TypeckError => return,
1681             _ => {},
1682         }
1683         if let Some(ref mut backtrace) = e.backtrace {
1684             let mut trace_text = "\n\nAn error occurred in miri:\n".to_string();
1685             backtrace.resolve();
1686             write!(trace_text, "backtrace frames: {}\n", backtrace.frames().len()).unwrap();
1687             'frames: for (i, frame) in backtrace.frames().iter().enumerate() {
1688                 if frame.symbols().is_empty() {
1689                     write!(trace_text, "{}: no symbols\n", i).unwrap();
1690                 }
1691                 for symbol in frame.symbols() {
1692                     write!(trace_text, "{}: ", i).unwrap();
1693                     if let Some(name) = symbol.name() {
1694                         write!(trace_text, "{}\n", name).unwrap();
1695                     } else {
1696                         write!(trace_text, "<unknown>\n").unwrap();
1697                     }
1698                     write!(trace_text, "\tat ").unwrap();
1699                     if let Some(file_path) = symbol.filename() {
1700                         write!(trace_text, "{}", file_path.display()).unwrap();
1701                     } else {
1702                         write!(trace_text, "<unknown_file>").unwrap();
1703                     }
1704                     if let Some(line) = symbol.lineno() {
1705                         write!(trace_text, ":{}\n", line).unwrap();
1706                     } else {
1707                         write!(trace_text, "\n").unwrap();
1708                     }
1709                 }
1710             }
1711             error!("{}", trace_text);
1712         }
1713         if let Some(frame) = self.stack().last() {
1714             let block = &frame.mir.basic_blocks()[frame.block];
1715             let span = explicit_span.unwrap_or_else(|| if frame.stmt < block.statements.len() {
1716                 block.statements[frame.stmt].source_info.span
1717             } else {
1718                 block.terminator().source_info.span
1719             });
1720             trace!("reporting const eval failure at {:?}", span);
1721             let mut err = if as_err {
1722                 ::rustc::middle::const_val::struct_error(*self.tcx, span, "constant evaluation error")
1723             } else {
1724                 let node_id = self
1725                     .stack()
1726                     .iter()
1727                     .rev()
1728                     .filter_map(|frame| self.tcx.hir.as_local_node_id(frame.instance.def_id()))
1729                     .next()
1730                     .expect("some part of a failing const eval must be local");
1731                 self.tcx.struct_span_lint_node(
1732                     ::rustc::lint::builtin::CONST_ERR,
1733                     node_id,
1734                     span,
1735                     "constant evaluation error",
1736                 )
1737             };
1738             let (frames, span) = self.generate_stacktrace(explicit_span);
1739             err.span_label(span, e.to_string());
1740             for FrameInfo { span, location } in frames {
1741                 err.span_note(span, &format!("inside call to `{}`", location));
1742             }
1743             err.emit();
1744         } else {
1745             self.tcx.sess.err(&e.to_string());
1746         }
1747     }
1748
1749     pub fn sign_extend(&self, value: u128, ty: Ty<'tcx>) -> EvalResult<'tcx, u128> {
1750         super::sign_extend(self.tcx.tcx, value, ty)
1751     }
1752
1753     pub fn truncate(&self, value: u128, ty: Ty<'tcx>) -> EvalResult<'tcx, u128> {
1754         super::truncate(self.tcx.tcx, value, ty)
1755     }
1756 }
1757
1758 impl<'mir, 'tcx> Frame<'mir, 'tcx> {
1759     pub fn get_local(&self, local: mir::Local) -> EvalResult<'tcx, Value> {
1760         self.locals[local].ok_or_else(|| EvalErrorKind::DeadLocal.into())
1761     }
1762
1763     fn set_local(&mut self, local: mir::Local, value: Value) -> EvalResult<'tcx> {
1764         match self.locals[local] {
1765             None => err!(DeadLocal),
1766             Some(ref mut local) => {
1767                 *local = value;
1768                 Ok(())
1769             }
1770         }
1771     }
1772
1773     pub fn storage_live(&mut self, local: mir::Local) -> Option<Value> {
1774         trace!("{:?} is now live", local);
1775
1776         // StorageLive *always* kills the value that's currently stored
1777         mem::replace(&mut self.locals[local], Some(Value::ByVal(PrimVal::Undef)))
1778     }
1779
1780     /// Returns the old value of the local
1781     pub fn storage_dead(&mut self, local: mir::Local) -> Option<Value> {
1782         trace!("{:?} is now dead", local);
1783
1784         self.locals[local].take()
1785     }
1786 }