]> git.lizzy.rs Git - rust.git/blob - src/librustc_mir/interpret/eval_context.rs
Allow the linker to choose the LTO-plugin (which is useful when using LLD)
[rust.git] / src / librustc_mir / interpret / eval_context.rs
1 use std::fmt::Write;
2
3 use rustc::hir::def_id::DefId;
4 use rustc::hir::def::Def;
5 use rustc::hir::map::definitions::DefPathData;
6 use rustc::mir;
7 use rustc::ty::layout::{self, Size, Align, HasDataLayout, IntegerExt, LayoutOf, TyLayout};
8 use rustc::ty::subst::{Subst, Substs};
9 use rustc::ty::{self, Ty, TyCtxt, TypeAndMut};
10 use rustc::ty::query::TyCtxtAt;
11 use rustc_data_structures::indexed_vec::{IndexVec, Idx};
12 use rustc::mir::interpret::FrameInfo;
13 use syntax::codemap::{self, Span};
14 use syntax::ast::Mutability;
15 use rustc::mir::interpret::{
16     GlobalId, Value, Scalar,
17     EvalResult, EvalErrorKind, Pointer, ConstValue,
18 };
19 use std::mem;
20
21 use super::{Place, PlaceExtra, Memory,
22             HasMemory, MemoryKind,
23             Machine};
24
25 pub struct EvalContext<'a, 'mir, 'tcx: 'a + 'mir, M: Machine<'mir, 'tcx>> {
26     /// Stores the `Machine` instance.
27     pub machine: M,
28
29     /// The results of the type checker, from rustc.
30     pub tcx: TyCtxtAt<'a, 'tcx, 'tcx>,
31
32     /// Bounds in scope for polymorphic evaluations.
33     pub param_env: ty::ParamEnv<'tcx>,
34
35     /// The virtual memory system.
36     pub memory: Memory<'a, 'mir, 'tcx, M>,
37
38     /// The virtual call stack.
39     pub(crate) stack: Vec<Frame<'mir, 'tcx>>,
40
41     /// The maximum number of stack frames allowed
42     pub(crate) stack_limit: usize,
43
44     /// The maximum number of terminators that may be evaluated.
45     /// This prevents infinite loops and huge computations from freezing up const eval.
46     /// Remove once halting problem is solved.
47     pub(crate) terminators_remaining: usize,
48 }
49
50 /// A stack frame.
51 pub struct Frame<'mir, 'tcx: 'mir> {
52     ////////////////////////////////////////////////////////////////////////////////
53     // Function and callsite information
54     ////////////////////////////////////////////////////////////////////////////////
55     /// The MIR for the function called on this frame.
56     pub mir: &'mir mir::Mir<'tcx>,
57
58     /// The def_id and substs of the current function
59     pub instance: ty::Instance<'tcx>,
60
61     /// The span of the call site.
62     pub span: codemap::Span,
63
64     ////////////////////////////////////////////////////////////////////////////////
65     // Return place and locals
66     ////////////////////////////////////////////////////////////////////////////////
67     /// The block to return to when returning from the current stack frame
68     pub return_to_block: StackPopCleanup,
69
70     /// The location where the result of the current stack frame should be written to.
71     pub return_place: Place,
72
73     /// The list of locals for this stack frame, stored in order as
74     /// `[return_ptr, arguments..., variables..., temporaries...]`. The locals are stored as `Option<Value>`s.
75     /// `None` represents a local that is currently dead, while a live local
76     /// can either directly contain `Scalar` or refer to some part of an `Allocation`.
77     ///
78     /// Before being initialized, arguments are `Value::Scalar(Scalar::undef())` and other locals are `None`.
79     pub locals: IndexVec<mir::Local, Option<Value>>,
80
81     ////////////////////////////////////////////////////////////////////////////////
82     // Current position within the function
83     ////////////////////////////////////////////////////////////////////////////////
84     /// The block that is currently executed (or will be executed after the above call stacks
85     /// return).
86     pub block: mir::BasicBlock,
87
88     /// The index of the currently evaluated statement.
89     pub stmt: usize,
90 }
91
92 #[derive(Clone, Debug, Eq, PartialEq, Hash)]
93 pub enum StackPopCleanup {
94     /// The stackframe existed to compute the initial value of a static/constant, make sure it
95     /// isn't modifyable afterwards in case of constants.
96     /// In case of `static mut`, mark the memory to ensure it's never marked as immutable through
97     /// references or deallocated
98     MarkStatic(Mutability),
99     /// A regular stackframe added due to a function call will need to get forwarded to the next
100     /// block
101     Goto(mir::BasicBlock),
102     /// The main function and diverging functions have nowhere to return to
103     None,
104 }
105
106 #[derive(Copy, Clone, Debug)]
107 pub struct TyAndPacked<'tcx> {
108     pub ty: Ty<'tcx>,
109     pub packed: bool,
110 }
111
112 #[derive(Copy, Clone, Debug)]
113 pub struct ValTy<'tcx> {
114     pub value: Value,
115     pub ty: Ty<'tcx>,
116 }
117
118 impl<'tcx> ::std::ops::Deref for ValTy<'tcx> {
119     type Target = Value;
120     fn deref(&self) -> &Value {
121         &self.value
122     }
123 }
124
125 impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> HasDataLayout for &'a EvalContext<'a, 'mir, 'tcx, M> {
126     #[inline]
127     fn data_layout(&self) -> &layout::TargetDataLayout {
128         &self.tcx.data_layout
129     }
130 }
131
132 impl<'c, 'b, 'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> HasDataLayout
133     for &'c &'b mut EvalContext<'a, 'mir, 'tcx, M> {
134     #[inline]
135     fn data_layout(&self) -> &layout::TargetDataLayout {
136         &self.tcx.data_layout
137     }
138 }
139
140 impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> layout::HasTyCtxt<'tcx> for &'a EvalContext<'a, 'mir, 'tcx, M> {
141     #[inline]
142     fn tcx<'b>(&'b self) -> TyCtxt<'b, 'tcx, 'tcx> {
143         *self.tcx
144     }
145 }
146
147 impl<'c, 'b, 'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> layout::HasTyCtxt<'tcx>
148     for &'c &'b mut EvalContext<'a, 'mir, 'tcx, M> {
149     #[inline]
150     fn tcx<'d>(&'d self) -> TyCtxt<'d, 'tcx, 'tcx> {
151         *self.tcx
152     }
153 }
154
155 impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> LayoutOf for &'a EvalContext<'a, 'mir, 'tcx, M> {
156     type Ty = Ty<'tcx>;
157     type TyLayout = EvalResult<'tcx, TyLayout<'tcx>>;
158
159     fn layout_of(self, ty: Ty<'tcx>) -> Self::TyLayout {
160         self.tcx.layout_of(self.param_env.and(ty))
161             .map_err(|layout| EvalErrorKind::Layout(layout).into())
162     }
163 }
164
165 impl<'c, 'b, 'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> LayoutOf
166     for &'c &'b mut EvalContext<'a, 'mir, 'tcx, M> {
167     type Ty = Ty<'tcx>;
168     type TyLayout = EvalResult<'tcx, TyLayout<'tcx>>;
169
170     #[inline]
171     fn layout_of(self, ty: Ty<'tcx>) -> Self::TyLayout {
172         (&**self).layout_of(ty)
173     }
174 }
175
176 const MAX_TERMINATORS: usize = 1_000_000;
177
178 impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
179     pub fn new(
180         tcx: TyCtxtAt<'a, 'tcx, 'tcx>,
181         param_env: ty::ParamEnv<'tcx>,
182         machine: M,
183         memory_data: M::MemoryData,
184     ) -> Self {
185         EvalContext {
186             machine,
187             tcx,
188             param_env,
189             memory: Memory::new(tcx, memory_data),
190             stack: Vec::new(),
191             stack_limit: tcx.sess.const_eval_stack_frame_limit,
192             terminators_remaining: MAX_TERMINATORS,
193         }
194     }
195
196     pub(crate) fn with_fresh_body<F: FnOnce(&mut Self) -> R, R>(&mut self, f: F) -> R {
197         let stack = mem::replace(&mut self.stack, Vec::new());
198         let terminators_remaining = mem::replace(&mut self.terminators_remaining, MAX_TERMINATORS);
199         let r = f(self);
200         self.stack = stack;
201         self.terminators_remaining = terminators_remaining;
202         r
203     }
204
205     pub fn alloc_ptr(&mut self, ty: Ty<'tcx>) -> EvalResult<'tcx, Pointer> {
206         let layout = self.layout_of(ty)?;
207         assert!(!layout.is_unsized(), "cannot alloc memory for unsized type");
208
209         self.memory.allocate(layout.size, layout.align, Some(MemoryKind::Stack))
210     }
211
212     pub fn memory(&self) -> &Memory<'a, 'mir, 'tcx, M> {
213         &self.memory
214     }
215
216     pub fn memory_mut(&mut self) -> &mut Memory<'a, 'mir, 'tcx, M> {
217         &mut self.memory
218     }
219
220     pub fn stack(&self) -> &[Frame<'mir, 'tcx>] {
221         &self.stack
222     }
223
224     #[inline]
225     pub fn cur_frame(&self) -> usize {
226         assert!(self.stack.len() > 0);
227         self.stack.len() - 1
228     }
229
230     pub fn str_to_value(&mut self, s: &str) -> EvalResult<'tcx, Value> {
231         let ptr = self.memory.allocate_bytes(s.as_bytes());
232         Ok(Scalar::Ptr(ptr).to_value_with_len(s.len() as u64, self.tcx.tcx))
233     }
234
235     pub fn const_to_value(
236         &mut self,
237         val: ConstValue<'tcx>,
238     ) -> EvalResult<'tcx, Value> {
239         match val {
240             ConstValue::Unevaluated(def_id, substs) => {
241                 let instance = self.resolve(def_id, substs)?;
242                 self.read_global_as_value(GlobalId {
243                     instance,
244                     promoted: None,
245                 })
246             }
247             ConstValue::ByRef(alloc, offset) => {
248                 // FIXME: Allocate new AllocId for all constants inside
249                 let id = self.memory.allocate_value(alloc.clone(), Some(MemoryKind::Stack))?;
250                 Ok(Value::ByRef(Pointer::new(id, offset).into(), alloc.align))
251             },
252             ConstValue::ScalarPair(a, b) => Ok(Value::ScalarPair(a, b)),
253             ConstValue::Scalar(val) => Ok(Value::Scalar(val)),
254         }
255     }
256
257     pub(super) fn resolve(&self, def_id: DefId, substs: &'tcx Substs<'tcx>) -> EvalResult<'tcx, ty::Instance<'tcx>> {
258         trace!("resolve: {:?}, {:#?}", def_id, substs);
259         trace!("substs: {:#?}", self.substs());
260         trace!("param_env: {:#?}", self.param_env);
261         let substs = self.tcx.subst_and_normalize_erasing_regions(
262             self.substs(),
263             self.param_env,
264             &substs,
265         );
266         ty::Instance::resolve(
267             *self.tcx,
268             self.param_env,
269             def_id,
270             substs,
271         ).ok_or_else(|| EvalErrorKind::TooGeneric.into())
272     }
273
274     pub(super) fn type_is_sized(&self, ty: Ty<'tcx>) -> bool {
275         ty.is_sized(self.tcx, self.param_env)
276     }
277
278     pub fn load_mir(
279         &self,
280         instance: ty::InstanceDef<'tcx>,
281     ) -> EvalResult<'tcx, &'tcx mir::Mir<'tcx>> {
282         // do not continue if typeck errors occurred (can only occur in local crate)
283         let did = instance.def_id();
284         if did.is_local() && self.tcx.has_typeck_tables(did) && self.tcx.typeck_tables_of(did).tainted_by_errors {
285             return err!(TypeckError);
286         }
287         trace!("load mir {:?}", instance);
288         match instance {
289             ty::InstanceDef::Item(def_id) => {
290                 self.tcx.maybe_optimized_mir(def_id).ok_or_else(||
291                     EvalErrorKind::NoMirFor(self.tcx.item_path_str(def_id)).into()
292                 )
293             }
294             _ => Ok(self.tcx.instance_mir(instance)),
295         }
296     }
297
298     pub fn monomorphize(&self, ty: Ty<'tcx>, substs: &'tcx Substs<'tcx>) -> Ty<'tcx> {
299         // miri doesn't care about lifetimes, and will choke on some crazy ones
300         // let's simply get rid of them
301         let substituted = ty.subst(*self.tcx, substs);
302         self.tcx.normalize_erasing_regions(ty::ParamEnv::reveal_all(), substituted)
303     }
304
305     /// Return the size and aligment of the value at the given type.
306     /// Note that the value does not matter if the type is sized. For unsized types,
307     /// the value has to be a fat pointer, and we only care about the "extra" data in it.
308     pub fn size_and_align_of_dst(
309         &mut self,
310         ty: Ty<'tcx>,
311         value: Value,
312     ) -> EvalResult<'tcx, (Size, Align)> {
313         let layout = self.layout_of(ty)?;
314         if !layout.is_unsized() {
315             Ok(layout.size_and_align())
316         } else {
317             match ty.sty {
318                 ty::TyAdt(..) | ty::TyTuple(..) => {
319                     // First get the size of all statically known fields.
320                     // Don't use type_of::sizing_type_of because that expects t to be sized,
321                     // and it also rounds up to alignment, which we want to avoid,
322                     // as the unsized field's alignment could be smaller.
323                     assert!(!ty.is_simd());
324                     debug!("DST {} layout: {:?}", ty, layout);
325
326                     let sized_size = layout.fields.offset(layout.fields.count() - 1);
327                     let sized_align = layout.align;
328                     debug!(
329                         "DST {} statically sized prefix size: {:?} align: {:?}",
330                         ty,
331                         sized_size,
332                         sized_align
333                     );
334
335                     // Recurse to get the size of the dynamically sized field (must be
336                     // the last field).
337                     let field_ty = layout.field(&self, layout.fields.count() - 1)?.ty;
338                     let (unsized_size, unsized_align) =
339                         self.size_and_align_of_dst(field_ty, value)?;
340
341                     // FIXME (#26403, #27023): We should be adding padding
342                     // to `sized_size` (to accommodate the `unsized_align`
343                     // required of the unsized field that follows) before
344                     // summing it with `sized_size`. (Note that since #26403
345                     // is unfixed, we do not yet add the necessary padding
346                     // here. But this is where the add would go.)
347
348                     // Return the sum of sizes and max of aligns.
349                     let size = sized_size + unsized_size;
350
351                     // Choose max of two known alignments (combined value must
352                     // be aligned according to more restrictive of the two).
353                     let align = sized_align.max(unsized_align);
354
355                     // Issue #27023: must add any necessary padding to `size`
356                     // (to make it a multiple of `align`) before returning it.
357                     //
358                     // Namely, the returned size should be, in C notation:
359                     //
360                     //   `size + ((size & (align-1)) ? align : 0)`
361                     //
362                     // emulated via the semi-standard fast bit trick:
363                     //
364                     //   `(size + (align-1)) & -align`
365
366                     Ok((size.abi_align(align), align))
367                 }
368                 ty::TyDynamic(..) => {
369                     let (_, vtable) = self.into_ptr_vtable_pair(value)?;
370                     // the second entry in the vtable is the dynamic size of the object.
371                     self.read_size_and_align_from_vtable(vtable)
372                 }
373
374                 ty::TySlice(_) | ty::TyStr => {
375                     let (elem_size, align) = layout.field(&self, 0)?.size_and_align();
376                     let (_, len) = self.into_slice(value)?;
377                     Ok((elem_size * len, align))
378                 }
379
380                 _ => bug!("size_of_val::<{:?}>", ty),
381             }
382         }
383     }
384
385     pub fn push_stack_frame(
386         &mut self,
387         instance: ty::Instance<'tcx>,
388         span: codemap::Span,
389         mir: &'mir mir::Mir<'tcx>,
390         return_place: Place,
391         return_to_block: StackPopCleanup,
392     ) -> EvalResult<'tcx> {
393         ::log_settings::settings().indentation += 1;
394
395         let locals = if mir.local_decls.len() > 1 {
396             let mut locals = IndexVec::from_elem(Some(Value::Scalar(Scalar::undef())), &mir.local_decls);
397             match self.tcx.describe_def(instance.def_id()) {
398                 // statics and constants don't have `Storage*` statements, no need to look for them
399                 Some(Def::Static(..)) | Some(Def::Const(..)) | Some(Def::AssociatedConst(..)) => {},
400                 _ => {
401                     trace!("push_stack_frame: {:?}: num_bbs: {}", span, mir.basic_blocks().len());
402                     for block in mir.basic_blocks() {
403                         for stmt in block.statements.iter() {
404                             use rustc::mir::StatementKind::{StorageDead, StorageLive};
405                             match stmt.kind {
406                                 StorageLive(local) |
407                                 StorageDead(local) => locals[local] = None,
408                                 _ => {}
409                             }
410                         }
411                     }
412                 },
413             }
414             locals
415         } else {
416             // don't allocate at all for trivial constants
417             IndexVec::new()
418         };
419
420         self.stack.push(Frame {
421             mir,
422             block: mir::START_BLOCK,
423             return_to_block,
424             return_place,
425             locals,
426             span,
427             instance,
428             stmt: 0,
429         });
430
431         self.memory.cur_frame = self.cur_frame();
432
433         if self.stack.len() > self.stack_limit {
434             err!(StackFrameLimitReached)
435         } else {
436             Ok(())
437         }
438     }
439
440     pub(super) fn pop_stack_frame(&mut self) -> EvalResult<'tcx> {
441         ::log_settings::settings().indentation -= 1;
442         M::end_region(self, None)?;
443         let frame = self.stack.pop().expect(
444             "tried to pop a stack frame, but there were none",
445         );
446         if !self.stack.is_empty() {
447             // TODO: Is this the correct time to start considering these accesses as originating from the returned-to stack frame?
448             self.memory.cur_frame = self.cur_frame();
449         }
450         match frame.return_to_block {
451             StackPopCleanup::MarkStatic(mutable) => {
452                 if let Place::Ptr { ptr, .. } = frame.return_place {
453                     // FIXME: to_ptr()? might be too extreme here, static zsts might reach this under certain conditions
454                     self.memory.mark_static_initialized(
455                         ptr.to_ptr()?.alloc_id,
456                         mutable,
457                     )?
458                 } else {
459                     bug!("StackPopCleanup::MarkStatic on: {:?}", frame.return_place);
460                 }
461             }
462             StackPopCleanup::Goto(target) => self.goto_block(target),
463             StackPopCleanup::None => {}
464         }
465         // deallocate all locals that are backed by an allocation
466         for local in frame.locals {
467             self.deallocate_local(local)?;
468         }
469
470         Ok(())
471     }
472
473     pub fn deallocate_local(&mut self, local: Option<Value>) -> EvalResult<'tcx> {
474         if let Some(Value::ByRef(ptr, _align)) = local {
475             trace!("deallocating local");
476             let ptr = ptr.to_ptr()?;
477             self.memory.dump_alloc(ptr.alloc_id);
478             self.memory.deallocate_local(ptr)?;
479         };
480         Ok(())
481     }
482
483     /// Evaluate an assignment statement.
484     ///
485     /// There is no separate `eval_rvalue` function. Instead, the code for handling each rvalue
486     /// type writes its results directly into the memory specified by the place.
487     pub(super) fn eval_rvalue_into_place(
488         &mut self,
489         rvalue: &mir::Rvalue<'tcx>,
490         place: &mir::Place<'tcx>,
491     ) -> EvalResult<'tcx> {
492         let dest = self.eval_place(place)?;
493         let dest_ty = self.place_ty(place);
494
495         use rustc::mir::Rvalue::*;
496         match *rvalue {
497             Use(ref operand) => {
498                 let value = self.eval_operand(operand)?.value;
499                 let valty = ValTy {
500                     value,
501                     ty: dest_ty,
502                 };
503                 self.write_value(valty, dest)?;
504             }
505
506             BinaryOp(bin_op, ref left, ref right) => {
507                 let left = self.eval_operand(left)?;
508                 let right = self.eval_operand(right)?;
509                 self.intrinsic_overflowing(
510                     bin_op,
511                     left,
512                     right,
513                     dest,
514                     dest_ty,
515                 )?;
516             }
517
518             CheckedBinaryOp(bin_op, ref left, ref right) => {
519                 let left = self.eval_operand(left)?;
520                 let right = self.eval_operand(right)?;
521                 self.intrinsic_with_overflow(
522                     bin_op,
523                     left,
524                     right,
525                     dest,
526                     dest_ty,
527                 )?;
528             }
529
530             UnaryOp(un_op, ref operand) => {
531                 let val = self.eval_operand_to_scalar(operand)?;
532                 let val = self.unary_op(un_op, val, dest_ty)?;
533                 self.write_scalar(
534                     dest,
535                     val,
536                     dest_ty,
537                 )?;
538             }
539
540             Aggregate(ref kind, ref operands) => {
541                 self.inc_step_counter_and_check_limit(operands.len());
542
543                 let (dest, active_field_index) = match **kind {
544                     mir::AggregateKind::Adt(adt_def, variant_index, _, active_field_index) => {
545                         self.write_discriminant_value(dest_ty, dest, variant_index)?;
546                         if adt_def.is_enum() {
547                             (self.place_downcast(dest, variant_index)?, active_field_index)
548                         } else {
549                             (dest, active_field_index)
550                         }
551                     }
552                     _ => (dest, None)
553                 };
554
555                 let layout = self.layout_of(dest_ty)?;
556                 for (i, operand) in operands.iter().enumerate() {
557                     let value = self.eval_operand(operand)?;
558                     // Ignore zero-sized fields.
559                     if !self.layout_of(value.ty)?.is_zst() {
560                         let field_index = active_field_index.unwrap_or(i);
561                         let (field_dest, _) = self.place_field(dest, mir::Field::new(field_index), layout)?;
562                         self.write_value(value, field_dest)?;
563                     }
564                 }
565             }
566
567             Repeat(ref operand, _) => {
568                 let (elem_ty, length) = match dest_ty.sty {
569                     ty::TyArray(elem_ty, n) => (elem_ty, n.unwrap_usize(self.tcx.tcx)),
570                     _ => {
571                         bug!(
572                             "tried to assign array-repeat to non-array type {:?}",
573                             dest_ty
574                         )
575                     }
576                 };
577                 let elem_size = self.layout_of(elem_ty)?.size;
578                 let value = self.eval_operand(operand)?.value;
579
580                 let (dest, dest_align) = self.force_allocation(dest)?.to_ptr_align();
581
582                 if length > 0 {
583                     //write the first value
584                     self.write_value_to_ptr(value, dest, dest_align, elem_ty)?;
585
586                     if length > 1 {
587                         let rest = dest.ptr_offset(elem_size * 1 as u64, &self)?;
588                         self.memory.copy_repeatedly(dest, dest_align, rest, dest_align, elem_size, length - 1, false)?;
589                     }
590                 }
591             }
592
593             Len(ref place) => {
594                 // FIXME(CTFE): don't allow computing the length of arrays in const eval
595                 let src = self.eval_place(place)?;
596                 let ty = self.place_ty(place);
597                 let (_, len) = src.elem_ty_and_len(ty, self.tcx.tcx);
598                 let defined = self.memory.pointer_size().bits() as u8;
599                 self.write_scalar(
600                     dest,
601                     Scalar::Bits {
602                         bits: len as u128,
603                         defined,
604                     },
605                     dest_ty,
606                 )?;
607             }
608
609             Ref(_, _, ref place) => {
610                 let src = self.eval_place(place)?;
611                 // We ignore the alignment of the place here -- special handling for packed structs ends
612                 // at the `&` operator.
613                 let (ptr, _align, extra) = self.force_allocation(src)?.to_ptr_align_extra();
614
615                 let val = match extra {
616                     PlaceExtra::None => ptr.to_value(),
617                     PlaceExtra::Length(len) => ptr.to_value_with_len(len, self.tcx.tcx),
618                     PlaceExtra::Vtable(vtable) => ptr.to_value_with_vtable(vtable),
619                     PlaceExtra::DowncastVariant(..) => {
620                         bug!("attempted to take a reference to an enum downcast place")
621                     }
622                 };
623                 let valty = ValTy {
624                     value: val,
625                     ty: dest_ty,
626                 };
627                 self.write_value(valty, dest)?;
628             }
629
630             NullaryOp(mir::NullOp::Box, ty) => {
631                 let ty = self.monomorphize(ty, self.substs());
632                 M::box_alloc(self, ty, dest)?;
633             }
634
635             NullaryOp(mir::NullOp::SizeOf, ty) => {
636                 let ty = self.monomorphize(ty, self.substs());
637                 let layout = self.layout_of(ty)?;
638                 assert!(!layout.is_unsized(),
639                         "SizeOf nullary MIR operator called for unsized type");
640                 let defined = self.memory.pointer_size().bits() as u8;
641                 self.write_scalar(
642                     dest,
643                     Scalar::Bits {
644                         bits: layout.size.bytes() as u128,
645                         defined,
646                     },
647                     dest_ty,
648                 )?;
649             }
650
651             Cast(kind, ref operand, cast_ty) => {
652                 debug_assert_eq!(self.monomorphize(cast_ty, self.substs()), dest_ty);
653                 use rustc::mir::CastKind::*;
654                 match kind {
655                     Unsize => {
656                         let src = self.eval_operand(operand)?;
657                         let src_layout = self.layout_of(src.ty)?;
658                         let dst_layout = self.layout_of(dest_ty)?;
659                         self.unsize_into(src.value, src_layout, dest, dst_layout)?;
660                     }
661
662                     Misc => {
663                         let src = self.eval_operand(operand)?;
664                         if self.type_is_fat_ptr(src.ty) {
665                             match (src.value, self.type_is_fat_ptr(dest_ty)) {
666                                 (Value::ByRef { .. }, _) |
667                                 // pointers to extern types
668                                 (Value::Scalar(_),_) |
669                                 // slices and trait objects to other slices/trait objects
670                                 (Value::ScalarPair(..), true) => {
671                                     let valty = ValTy {
672                                         value: src.value,
673                                         ty: dest_ty,
674                                     };
675                                     self.write_value(valty, dest)?;
676                                 }
677                                 // slices and trait objects to thin pointers (dropping the metadata)
678                                 (Value::ScalarPair(data, _), false) => {
679                                     let valty = ValTy {
680                                         value: Value::Scalar(data),
681                                         ty: dest_ty,
682                                     };
683                                     self.write_value(valty, dest)?;
684                                 }
685                             }
686                         } else {
687                             let src_layout = self.layout_of(src.ty)?;
688                             match src_layout.variants {
689                                 layout::Variants::Single { index } => {
690                                     if let Some(def) = src.ty.ty_adt_def() {
691                                         let discr_val = def
692                                             .discriminant_for_variant(*self.tcx, index)
693                                             .val;
694                                         let defined = self
695                                             .layout_of(dest_ty)
696                                             .unwrap()
697                                             .size
698                                             .bits() as u8;
699                                         return self.write_scalar(
700                                             dest,
701                                             Scalar::Bits {
702                                                 bits: discr_val,
703                                                 defined,
704                                             },
705                                             dest_ty);
706                                     }
707                                 }
708                                 layout::Variants::Tagged { .. } |
709                                 layout::Variants::NicheFilling { .. } => {},
710                             }
711
712                             let src_val = self.value_to_scalar(src)?;
713                             let dest_val = self.cast_scalar(src_val, src.ty, dest_ty)?;
714                             let valty = ValTy {
715                                 value: Value::Scalar(dest_val),
716                                 ty: dest_ty,
717                             };
718                             self.write_value(valty, dest)?;
719                         }
720                     }
721
722                     ReifyFnPointer => {
723                         match self.eval_operand(operand)?.ty.sty {
724                             ty::TyFnDef(def_id, substs) => {
725                                 if self.tcx.has_attr(def_id, "rustc_args_required_const") {
726                                     bug!("reifying a fn ptr that requires \
727                                           const arguments");
728                                 }
729                                 let instance: EvalResult<'tcx, _> = ty::Instance::resolve(
730                                     *self.tcx,
731                                     self.param_env,
732                                     def_id,
733                                     substs,
734                                 ).ok_or_else(|| EvalErrorKind::TooGeneric.into());
735                                 let fn_ptr = self.memory.create_fn_alloc(instance?);
736                                 let valty = ValTy {
737                                     value: Value::Scalar(fn_ptr.into()),
738                                     ty: dest_ty,
739                                 };
740                                 self.write_value(valty, dest)?;
741                             }
742                             ref other => bug!("reify fn pointer on {:?}", other),
743                         }
744                     }
745
746                     UnsafeFnPointer => {
747                         match dest_ty.sty {
748                             ty::TyFnPtr(_) => {
749                                 let mut src = self.eval_operand(operand)?;
750                                 src.ty = dest_ty;
751                                 self.write_value(src, dest)?;
752                             }
753                             ref other => bug!("fn to unsafe fn cast on {:?}", other),
754                         }
755                     }
756
757                     ClosureFnPointer => {
758                         match self.eval_operand(operand)?.ty.sty {
759                             ty::TyClosure(def_id, substs) => {
760                                 let substs = self.tcx.subst_and_normalize_erasing_regions(
761                                     self.substs(),
762                                     ty::ParamEnv::reveal_all(),
763                                     &substs,
764                                 );
765                                 let instance = ty::Instance::resolve_closure(
766                                     *self.tcx,
767                                     def_id,
768                                     substs,
769                                     ty::ClosureKind::FnOnce,
770                                 );
771                                 let fn_ptr = self.memory.create_fn_alloc(instance);
772                                 let valty = ValTy {
773                                     value: Value::Scalar(fn_ptr.into()),
774                                     ty: dest_ty,
775                                 };
776                                 self.write_value(valty, dest)?;
777                             }
778                             ref other => bug!("closure fn pointer on {:?}", other),
779                         }
780                     }
781                 }
782             }
783
784             Discriminant(ref place) => {
785                 let ty = self.place_ty(place);
786                 let place = self.eval_place(place)?;
787                 let discr_val = self.read_discriminant_value(place, ty)?;
788                 let defined = self.layout_of(dest_ty).unwrap().size.bits() as u8;
789                 self.write_scalar(dest, Scalar::Bits {
790                     bits: discr_val,
791                     defined,
792                 }, dest_ty)?;
793             }
794         }
795
796         self.dump_local(dest);
797
798         Ok(())
799     }
800
801     pub(super) fn type_is_fat_ptr(&self, ty: Ty<'tcx>) -> bool {
802         match ty.sty {
803             ty::TyRawPtr(ty::TypeAndMut { ty, .. }) |
804             ty::TyRef(_, ty, _) => !self.type_is_sized(ty),
805             ty::TyAdt(def, _) if def.is_box() => !self.type_is_sized(ty.boxed_ty()),
806             _ => false,
807         }
808     }
809
810     pub(super) fn eval_operand_to_scalar(
811         &mut self,
812         op: &mir::Operand<'tcx>,
813     ) -> EvalResult<'tcx, Scalar> {
814         let valty = self.eval_operand(op)?;
815         self.value_to_scalar(valty)
816     }
817
818     pub(crate) fn operands_to_args(
819         &mut self,
820         ops: &[mir::Operand<'tcx>],
821     ) -> EvalResult<'tcx, Vec<ValTy<'tcx>>> {
822         ops.into_iter()
823             .map(|op| self.eval_operand(op))
824             .collect()
825     }
826
827     pub fn eval_operand(&mut self, op: &mir::Operand<'tcx>) -> EvalResult<'tcx, ValTy<'tcx>> {
828         use rustc::mir::Operand::*;
829         let ty = self.monomorphize(op.ty(self.mir(), *self.tcx), self.substs());
830         match *op {
831             // FIXME: do some more logic on `move` to invalidate the old location
832             Copy(ref place) |
833             Move(ref place) => {
834                 Ok(ValTy {
835                     value: self.eval_and_read_place(place)?,
836                     ty
837                 })
838             },
839
840             Constant(ref constant) => {
841                 use rustc::mir::Literal;
842                 let mir::Constant { ref literal, .. } = **constant;
843                 let value = match *literal {
844                     Literal::Value { ref value } => self.const_to_value(value.val)?,
845
846                     Literal::Promoted { index } => {
847                         let instance = self.frame().instance;
848                         self.read_global_as_value(GlobalId {
849                             instance,
850                             promoted: Some(index),
851                         })?
852                     }
853                 };
854
855                 Ok(ValTy {
856                     value,
857                     ty,
858                 })
859             }
860         }
861     }
862
863     /// reads a tag and produces the corresponding variant index
864     pub fn read_discriminant_as_variant_index(
865         &mut self,
866         place: Place,
867         ty: Ty<'tcx>,
868     ) -> EvalResult<'tcx, usize> {
869         let layout = self.layout_of(ty)?;
870         match layout.variants {
871             ty::layout::Variants::Single { index } => Ok(index),
872             ty::layout::Variants::Tagged { .. } => {
873                 let discr_val = self.read_discriminant_value(place, ty)?;
874                 ty
875                     .ty_adt_def()
876                     .expect("tagged layout for non adt")
877                     .discriminants(self.tcx.tcx)
878                     .position(|var| var.val == discr_val)
879                     .ok_or_else(|| EvalErrorKind::InvalidDiscriminant.into())
880             }
881             ty::layout::Variants::NicheFilling { .. } => {
882                 let discr_val = self.read_discriminant_value(place, ty)?;
883                 assert_eq!(discr_val as usize as u128, discr_val);
884                 Ok(discr_val as usize)
885             },
886         }
887     }
888
889     pub fn read_discriminant_value(
890         &mut self,
891         place: Place,
892         ty: Ty<'tcx>,
893     ) -> EvalResult<'tcx, u128> {
894         let layout = self.layout_of(ty)?;
895         trace!("read_discriminant_value {:#?}", layout);
896         if layout.abi == layout::Abi::Uninhabited {
897             return Ok(0);
898         }
899
900         match layout.variants {
901             layout::Variants::Single { index } => {
902                 let discr_val = ty.ty_adt_def().map_or(
903                     index as u128,
904                     |def| def.discriminant_for_variant(*self.tcx, index).val);
905                 return Ok(discr_val);
906             }
907             layout::Variants::Tagged { .. } |
908             layout::Variants::NicheFilling { .. } => {},
909         }
910
911         let (discr_place, discr) = self.place_field(place, mir::Field::new(0), layout)?;
912         trace!("discr place: {:?}, {:?}", discr_place, discr);
913         let raw_discr = self.value_to_scalar(ValTy {
914             value: self.read_place(discr_place)?,
915             ty: discr.ty
916         })?;
917         let discr_val = match layout.variants {
918             layout::Variants::Single { .. } => bug!(),
919             // FIXME: should we catch invalid discriminants here?
920             layout::Variants::Tagged { .. } => {
921                 if discr.ty.is_signed() {
922                     let i = raw_discr.to_bits(discr.size)? as i128;
923                     // going from layout tag type to typeck discriminant type
924                     // requires first sign extending with the layout discriminant
925                     let shift = 128 - discr.size.bits();
926                     let sexted = (i << shift) >> shift;
927                     // and then zeroing with the typeck discriminant type
928                     let discr_ty = ty
929                         .ty_adt_def().expect("tagged layout corresponds to adt")
930                         .repr
931                         .discr_type();
932                     let discr_ty = layout::Integer::from_attr(self.tcx.tcx, discr_ty);
933                     let shift = 128 - discr_ty.size().bits();
934                     let truncatee = sexted as u128;
935                     (truncatee << shift) >> shift
936                 } else {
937                     raw_discr.to_bits(discr.size)?
938                 }
939             },
940             layout::Variants::NicheFilling {
941                 dataful_variant,
942                 ref niche_variants,
943                 niche_start,
944                 ..
945             } => {
946                 let variants_start = *niche_variants.start() as u128;
947                 let variants_end = *niche_variants.end() as u128;
948                 match raw_discr {
949                     Scalar::Ptr(_) => {
950                         assert!(niche_start == 0);
951                         assert!(variants_start == variants_end);
952                         dataful_variant as u128
953                     },
954                     Scalar::Bits { bits: raw_discr, defined } => {
955                         if defined < discr.size.bits() as u8 {
956                             return err!(ReadUndefBytes);
957                         }
958                         let discr = raw_discr.wrapping_sub(niche_start)
959                             .wrapping_add(variants_start);
960                         if variants_start <= discr && discr <= variants_end {
961                             discr
962                         } else {
963                             dataful_variant as u128
964                         }
965                     },
966                 }
967             }
968         };
969
970         Ok(discr_val)
971     }
972
973
974     pub fn write_discriminant_value(
975         &mut self,
976         dest_ty: Ty<'tcx>,
977         dest: Place,
978         variant_index: usize,
979     ) -> EvalResult<'tcx> {
980         let layout = self.layout_of(dest_ty)?;
981
982         match layout.variants {
983             layout::Variants::Single { index } => {
984                 if index != variant_index {
985                     // If the layout of an enum is `Single`, all
986                     // other variants are necessarily uninhabited.
987                     assert_eq!(layout.for_variant(&self, variant_index).abi,
988                                layout::Abi::Uninhabited);
989                 }
990             }
991             layout::Variants::Tagged { ref tag, .. } => {
992                 let discr_val = dest_ty.ty_adt_def().unwrap()
993                     .discriminant_for_variant(*self.tcx, variant_index)
994                     .val;
995
996                 // raw discriminants for enums are isize or bigger during
997                 // their computation, but the in-memory tag is the smallest possible
998                 // representation
999                 let size = tag.value.size(self.tcx.tcx).bits();
1000                 let shift = 128 - size;
1001                 let discr_val = (discr_val << shift) >> shift;
1002
1003                 let (discr_dest, tag) = self.place_field(dest, mir::Field::new(0), layout)?;
1004                 self.write_scalar(discr_dest, Scalar::Bits {
1005                     bits: discr_val,
1006                     defined: size as u8,
1007                 }, tag.ty)?;
1008             }
1009             layout::Variants::NicheFilling {
1010                 dataful_variant,
1011                 ref niche_variants,
1012                 niche_start,
1013                 ..
1014             } => {
1015                 if variant_index != dataful_variant {
1016                     let (niche_dest, niche) =
1017                         self.place_field(dest, mir::Field::new(0), layout)?;
1018                     let niche_value = ((variant_index - niche_variants.start()) as u128)
1019                         .wrapping_add(niche_start);
1020                     self.write_scalar(niche_dest, Scalar::Bits {
1021                         bits: niche_value,
1022                         defined: niche.size.bits() as u8,
1023                     }, niche.ty)?;
1024                 }
1025             }
1026         }
1027
1028         Ok(())
1029     }
1030
1031     pub fn read_global_as_value(&mut self, gid: GlobalId<'tcx>) -> EvalResult<'tcx, Value> {
1032         let cv = self.const_eval(gid)?;
1033         self.const_to_value(cv.val)
1034     }
1035
1036     pub fn const_eval(&self, gid: GlobalId<'tcx>) -> EvalResult<'tcx, &'tcx ty::Const<'tcx>> {
1037         let param_env = if self.tcx.is_static(gid.instance.def_id()).is_some() {
1038             ty::ParamEnv::reveal_all()
1039         } else {
1040             self.param_env
1041         };
1042         self.tcx.const_eval(param_env.and(gid)).map_err(|err| EvalErrorKind::ReferencedConstant(err).into())
1043     }
1044
1045     pub fn force_allocation(&mut self, place: Place) -> EvalResult<'tcx, Place> {
1046         let new_place = match place {
1047             Place::Local { frame, local } => {
1048                 match self.stack[frame].locals[local] {
1049                     None => return err!(DeadLocal),
1050                     Some(Value::ByRef(ptr, align)) => {
1051                         Place::Ptr {
1052                             ptr,
1053                             align,
1054                             extra: PlaceExtra::None,
1055                         }
1056                     }
1057                     Some(val) => {
1058                         let ty = self.stack[frame].mir.local_decls[local].ty;
1059                         let ty = self.monomorphize(ty, self.stack[frame].instance.substs);
1060                         let layout = self.layout_of(ty)?;
1061                         let ptr = self.alloc_ptr(ty)?;
1062                         self.stack[frame].locals[local] =
1063                             Some(Value::ByRef(ptr.into(), layout.align)); // it stays live
1064                         let place = Place::from_ptr(ptr, layout.align);
1065                         self.write_value(ValTy { value: val, ty }, place)?;
1066                         place
1067                     }
1068                 }
1069             }
1070             Place::Ptr { .. } => place,
1071         };
1072         Ok(new_place)
1073     }
1074
1075     /// ensures this Value is not a ByRef
1076     pub fn follow_by_ref_value(
1077         &self,
1078         value: Value,
1079         ty: Ty<'tcx>,
1080     ) -> EvalResult<'tcx, Value> {
1081         match value {
1082             Value::ByRef(ptr, align) => {
1083                 self.read_value(ptr, align, ty)
1084             }
1085             other => Ok(other),
1086         }
1087     }
1088
1089     pub fn value_to_scalar(
1090         &self,
1091         ValTy { value, ty } : ValTy<'tcx>,
1092     ) -> EvalResult<'tcx, Scalar> {
1093         match self.follow_by_ref_value(value, ty)? {
1094             Value::ByRef { .. } => bug!("follow_by_ref_value can't result in `ByRef`"),
1095
1096             Value::Scalar(scalar) => {
1097                 // TODO: Do we really want insta-UB here?
1098                 self.ensure_valid_value(scalar, ty)?;
1099                 Ok(scalar)
1100             }
1101
1102             Value::ScalarPair(..) => bug!("value_to_scalar can't work with fat pointers"),
1103         }
1104     }
1105
1106     pub fn write_ptr(&mut self, dest: Place, val: Scalar, dest_ty: Ty<'tcx>) -> EvalResult<'tcx> {
1107         let valty = ValTy {
1108             value: val.to_value(),
1109             ty: dest_ty,
1110         };
1111         self.write_value(valty, dest)
1112     }
1113
1114     pub fn write_scalar(
1115         &mut self,
1116         dest: Place,
1117         val: Scalar,
1118         dest_ty: Ty<'tcx>,
1119     ) -> EvalResult<'tcx> {
1120         let valty = ValTy {
1121             value: Value::Scalar(val),
1122             ty: dest_ty,
1123         };
1124         self.write_value(valty, dest)
1125     }
1126
1127     pub fn write_value(
1128         &mut self,
1129         ValTy { value: src_val, ty: dest_ty } : ValTy<'tcx>,
1130         dest: Place,
1131     ) -> EvalResult<'tcx> {
1132         //trace!("Writing {:?} to {:?} at type {:?}", src_val, dest, dest_ty);
1133         // Note that it is really important that the type here is the right one, and matches the type things are read at.
1134         // In case `src_val` is a `ScalarPair`, we don't do any magic here to handle padding properly, which is only
1135         // correct if we never look at this data with the wrong type.
1136
1137         match dest {
1138             Place::Ptr { ptr, align, extra } => {
1139                 assert_eq!(extra, PlaceExtra::None);
1140                 self.write_value_to_ptr(src_val, ptr, align, dest_ty)
1141             }
1142
1143             Place::Local { frame, local } => {
1144                 let dest = self.stack[frame].get_local(local)?;
1145                 self.write_value_possibly_by_val(
1146                     src_val,
1147                     |this, val| this.stack[frame].set_local(local, val),
1148                     dest,
1149                     dest_ty,
1150                 )
1151             }
1152         }
1153     }
1154
1155     // The cases here can be a bit subtle. Read carefully!
1156     fn write_value_possibly_by_val<F: FnOnce(&mut Self, Value) -> EvalResult<'tcx>>(
1157         &mut self,
1158         src_val: Value,
1159         write_dest: F,
1160         old_dest_val: Value,
1161         dest_ty: Ty<'tcx>,
1162     ) -> EvalResult<'tcx> {
1163         if let Value::ByRef(dest_ptr, align) = old_dest_val {
1164             // If the value is already `ByRef` (that is, backed by an `Allocation`),
1165             // then we must write the new value into this allocation, because there may be
1166             // other pointers into the allocation. These other pointers are logically
1167             // pointers into the local variable, and must be able to observe the change.
1168             //
1169             // Thus, it would be an error to replace the `ByRef` with a `ByVal`, unless we
1170             // knew for certain that there were no outstanding pointers to this allocation.
1171             self.write_value_to_ptr(src_val, dest_ptr, align, dest_ty)?;
1172         } else if let Value::ByRef(src_ptr, align) = src_val {
1173             // If the value is not `ByRef`, then we know there are no pointers to it
1174             // and we can simply overwrite the `Value` in the locals array directly.
1175             //
1176             // In this specific case, where the source value is `ByRef`, we must duplicate
1177             // the allocation, because this is a by-value operation. It would be incorrect
1178             // if they referred to the same allocation, since then a change to one would
1179             // implicitly change the other.
1180             //
1181             // It is a valid optimization to attempt reading a primitive value out of the
1182             // source and write that into the destination without making an allocation, so
1183             // we do so here.
1184             if let Ok(Some(src_val)) = self.try_read_value(src_ptr, align, dest_ty) {
1185                 write_dest(self, src_val)?;
1186             } else {
1187                 let dest_ptr = self.alloc_ptr(dest_ty)?.into();
1188                 let layout = self.layout_of(dest_ty)?;
1189                 self.memory.copy(src_ptr, align.min(layout.align), dest_ptr, layout.align, layout.size, false)?;
1190                 write_dest(self, Value::ByRef(dest_ptr, layout.align))?;
1191             }
1192         } else {
1193             // Finally, we have the simple case where neither source nor destination are
1194             // `ByRef`. We may simply copy the source value over the the destintion.
1195             write_dest(self, src_val)?;
1196         }
1197         Ok(())
1198     }
1199
1200     pub fn write_value_to_ptr(
1201         &mut self,
1202         value: Value,
1203         dest: Scalar,
1204         dest_align: Align,
1205         dest_ty: Ty<'tcx>,
1206     ) -> EvalResult<'tcx> {
1207         let layout = self.layout_of(dest_ty)?;
1208         trace!("write_value_to_ptr: {:#?}, {}, {:#?}", value, dest_ty, layout);
1209         match value {
1210             Value::ByRef(ptr, align) => {
1211                 self.memory.copy(ptr, align.min(layout.align), dest, dest_align.min(layout.align), layout.size, false)
1212             }
1213             Value::Scalar(scalar) => {
1214                 let signed = match layout.abi {
1215                     layout::Abi::Scalar(ref scal) => match scal.value {
1216                         layout::Primitive::Int(_, signed) => signed,
1217                         _ => false,
1218                     },
1219                     _ => match scalar {
1220                         Scalar::Bits { defined: 0, .. } => false,
1221                         _ => bug!("write_value_to_ptr: invalid ByVal layout: {:#?}", layout),
1222                     }
1223                 };
1224                 self.memory.write_scalar(dest, dest_align, scalar, layout.size, signed)
1225             }
1226             Value::ScalarPair(a_val, b_val) => {
1227                 trace!("write_value_to_ptr valpair: {:#?}", layout);
1228                 let (a, b) = match layout.abi {
1229                     layout::Abi::ScalarPair(ref a, ref b) => (&a.value, &b.value),
1230                     _ => bug!("write_value_to_ptr: invalid ScalarPair layout: {:#?}", layout)
1231                 };
1232                 let (a_size, b_size) = (a.size(&self), b.size(&self));
1233                 let a_ptr = dest;
1234                 let b_offset = a_size.abi_align(b.align(&self));
1235                 let b_ptr = dest.ptr_offset(b_offset, &self)?.into();
1236                 // TODO: What about signedess?
1237                 self.memory.write_scalar(a_ptr, dest_align, a_val, a_size, false)?;
1238                 self.memory.write_scalar(b_ptr, dest_align, b_val, b_size, false)
1239             }
1240         }
1241     }
1242
1243     fn ensure_valid_value(&self, val: Scalar, ty: Ty<'tcx>) -> EvalResult<'tcx> {
1244         match ty.sty {
1245             ty::TyBool => val.to_bool().map(|_| ()),
1246
1247             ty::TyChar if ::std::char::from_u32(val.to_bits(Size::from_bytes(4))? as u32).is_none() => {
1248                 err!(InvalidChar(val.to_bits(Size::from_bytes(4))? as u32 as u128))
1249             }
1250
1251             _ => Ok(()),
1252         }
1253     }
1254
1255     pub fn read_value(&self, ptr: Scalar, align: Align, ty: Ty<'tcx>) -> EvalResult<'tcx, Value> {
1256         if let Some(val) = self.try_read_value(ptr, align, ty)? {
1257             Ok(val)
1258         } else {
1259             bug!("primitive read failed for type: {:?}", ty);
1260         }
1261     }
1262
1263     pub(crate) fn read_ptr(
1264         &self,
1265         ptr: Pointer,
1266         ptr_align: Align,
1267         pointee_ty: Ty<'tcx>,
1268     ) -> EvalResult<'tcx, Value> {
1269         let ptr_size = self.memory.pointer_size();
1270         let p: Scalar = self.memory.read_ptr_sized(ptr, ptr_align)?.into();
1271         if self.type_is_sized(pointee_ty) {
1272             Ok(p.to_value())
1273         } else {
1274             trace!("reading fat pointer extra of type {}", pointee_ty);
1275             let extra = ptr.offset(ptr_size, self)?;
1276             match self.tcx.struct_tail(pointee_ty).sty {
1277                 ty::TyDynamic(..) => Ok(p.to_value_with_vtable(
1278                     self.memory.read_ptr_sized(extra, ptr_align)?.to_ptr()?,
1279                 )),
1280                 ty::TySlice(..) | ty::TyStr => {
1281                     let len = self
1282                         .memory
1283                         .read_ptr_sized(extra, ptr_align)?
1284                         .to_bits(ptr_size)?;
1285                     Ok(p.to_value_with_len(len as u64, self.tcx.tcx))
1286                 },
1287                 _ => bug!("unsized scalar ptr read from {:?}", pointee_ty),
1288             }
1289         }
1290     }
1291
1292     pub fn validate_ptr_target(
1293         &self,
1294         ptr: Pointer,
1295         ptr_align: Align,
1296         ty: Ty<'tcx>
1297     ) -> EvalResult<'tcx> {
1298         match ty.sty {
1299             ty::TyBool => {
1300                 self.memory.read_scalar(ptr, ptr_align, Size::from_bytes(1))?.to_bool()?;
1301             }
1302             ty::TyChar => {
1303                 let c = self.memory.read_scalar(ptr, ptr_align, Size::from_bytes(4))?.to_bits(Size::from_bytes(4))? as u32;
1304                 match ::std::char::from_u32(c) {
1305                     Some(..) => (),
1306                     None => return err!(InvalidChar(c as u128)),
1307                 }
1308             }
1309
1310             ty::TyFnPtr(_) => {
1311                 self.memory.read_ptr_sized(ptr, ptr_align)?;
1312             },
1313             ty::TyRef(_, rty, _) |
1314             ty::TyRawPtr(ty::TypeAndMut { ty: rty, .. }) => {
1315                 self.read_ptr(ptr, ptr_align, rty)?;
1316             }
1317
1318             ty::TyAdt(def, _) => {
1319                 if def.is_box() {
1320                     self.read_ptr(ptr, ptr_align, ty.boxed_ty())?;
1321                     return Ok(());
1322                 }
1323
1324                 if let layout::Abi::Scalar(ref scalar) = self.layout_of(ty)?.abi {
1325                     let size = scalar.value.size(self);
1326                     self.memory.read_scalar(ptr, ptr_align, size)?;
1327                 }
1328             }
1329
1330             _ => (),
1331         }
1332         Ok(())
1333     }
1334
1335     pub fn try_read_by_ref(&self, mut val: Value, ty: Ty<'tcx>) -> EvalResult<'tcx, Value> {
1336         // Convert to ByVal or ScalarPair if possible
1337         if let Value::ByRef(ptr, align) = val {
1338             if let Some(read_val) = self.try_read_value(ptr, align, ty)? {
1339                 val = read_val;
1340             }
1341         }
1342         Ok(val)
1343     }
1344
1345     pub fn try_read_value(&self, ptr: Scalar, ptr_align: Align, ty: Ty<'tcx>) -> EvalResult<'tcx, Option<Value>> {
1346         let layout = self.layout_of(ty)?;
1347         self.memory.check_align(ptr, ptr_align)?;
1348
1349         if layout.size.bytes() == 0 {
1350             return Ok(Some(Value::Scalar(Scalar::undef())));
1351         }
1352
1353         let ptr = ptr.to_ptr()?;
1354
1355         // Not the right place to do this
1356         //self.validate_ptr_target(ptr, ptr_align, ty)?;
1357
1358         match layout.abi {
1359             layout::Abi::Scalar(..) => {
1360                 let scalar = self.memory.read_scalar(ptr, ptr_align, layout.size)?;
1361                 Ok(Some(Value::Scalar(scalar)))
1362             }
1363             layout::Abi::ScalarPair(ref a, ref b) => {
1364                 let (a, b) = (&a.value, &b.value);
1365                 let (a_size, b_size) = (a.size(self), b.size(self));
1366                 let a_ptr = ptr;
1367                 let b_offset = a_size.abi_align(b.align(self));
1368                 let b_ptr = ptr.offset(b_offset, self)?.into();
1369                 let a_val = self.memory.read_scalar(a_ptr, ptr_align, a_size)?;
1370                 let b_val = self.memory.read_scalar(b_ptr, ptr_align, b_size)?;
1371                 Ok(Some(Value::ScalarPair(a_val, b_val)))
1372             }
1373             _ => Ok(None),
1374         }
1375     }
1376
1377     pub fn frame(&self) -> &Frame<'mir, 'tcx> {
1378         self.stack.last().expect("no call frames exist")
1379     }
1380
1381     pub fn frame_mut(&mut self) -> &mut Frame<'mir, 'tcx> {
1382         self.stack.last_mut().expect("no call frames exist")
1383     }
1384
1385     pub(super) fn mir(&self) -> &'mir mir::Mir<'tcx> {
1386         self.frame().mir
1387     }
1388
1389     pub fn substs(&self) -> &'tcx Substs<'tcx> {
1390         if let Some(frame) = self.stack.last() {
1391             frame.instance.substs
1392         } else {
1393             Substs::empty()
1394         }
1395     }
1396
1397     fn unsize_into_ptr(
1398         &mut self,
1399         src: Value,
1400         src_ty: Ty<'tcx>,
1401         dest: Place,
1402         dest_ty: Ty<'tcx>,
1403         sty: Ty<'tcx>,
1404         dty: Ty<'tcx>,
1405     ) -> EvalResult<'tcx> {
1406         // A<Struct> -> A<Trait> conversion
1407         let (src_pointee_ty, dest_pointee_ty) = self.tcx.struct_lockstep_tails(sty, dty);
1408
1409         match (&src_pointee_ty.sty, &dest_pointee_ty.sty) {
1410             (&ty::TyArray(_, length), &ty::TySlice(_)) => {
1411                 let ptr = self.into_ptr(src)?;
1412                 // u64 cast is from usize to u64, which is always good
1413                 let valty = ValTy {
1414                     value: ptr.to_value_with_len(length.unwrap_usize(self.tcx.tcx), self.tcx.tcx),
1415                     ty: dest_ty,
1416                 };
1417                 self.write_value(valty, dest)
1418             }
1419             (&ty::TyDynamic(..), &ty::TyDynamic(..)) => {
1420                 // For now, upcasts are limited to changes in marker
1421                 // traits, and hence never actually require an actual
1422                 // change to the vtable.
1423                 let valty = ValTy {
1424                     value: src,
1425                     ty: dest_ty,
1426                 };
1427                 self.write_value(valty, dest)
1428             }
1429             (_, &ty::TyDynamic(ref data, _)) => {
1430                 let trait_ref = data.principal().unwrap().with_self_ty(
1431                     *self.tcx,
1432                     src_pointee_ty,
1433                 );
1434                 let trait_ref = self.tcx.erase_regions(&trait_ref);
1435                 let vtable = self.get_vtable(src_pointee_ty, trait_ref)?;
1436                 let ptr = self.into_ptr(src)?;
1437                 let valty = ValTy {
1438                     value: ptr.to_value_with_vtable(vtable),
1439                     ty: dest_ty,
1440                 };
1441                 self.write_value(valty, dest)
1442             }
1443
1444             _ => bug!("invalid unsizing {:?} -> {:?}", src_ty, dest_ty),
1445         }
1446     }
1447
1448     fn unsize_into(
1449         &mut self,
1450         src: Value,
1451         src_layout: TyLayout<'tcx>,
1452         dst: Place,
1453         dst_layout: TyLayout<'tcx>,
1454     ) -> EvalResult<'tcx> {
1455         match (&src_layout.ty.sty, &dst_layout.ty.sty) {
1456             (&ty::TyRef(_, s, _), &ty::TyRef(_, d, _)) |
1457             (&ty::TyRef(_, s, _), &ty::TyRawPtr(TypeAndMut { ty: d, .. })) |
1458             (&ty::TyRawPtr(TypeAndMut { ty: s, .. }),
1459              &ty::TyRawPtr(TypeAndMut { ty: d, .. })) => {
1460                 self.unsize_into_ptr(src, src_layout.ty, dst, dst_layout.ty, s, d)
1461             }
1462             (&ty::TyAdt(def_a, _), &ty::TyAdt(def_b, _)) => {
1463                 assert_eq!(def_a, def_b);
1464                 if def_a.is_box() || def_b.is_box() {
1465                     if !def_a.is_box() || !def_b.is_box() {
1466                         bug!("invalid unsizing between {:?} -> {:?}", src_layout, dst_layout);
1467                     }
1468                     return self.unsize_into_ptr(
1469                         src,
1470                         src_layout.ty,
1471                         dst,
1472                         dst_layout.ty,
1473                         src_layout.ty.boxed_ty(),
1474                         dst_layout.ty.boxed_ty(),
1475                     );
1476                 }
1477
1478                 // unsizing of generic struct with pointer fields
1479                 // Example: `Arc<T>` -> `Arc<Trait>`
1480                 // here we need to increase the size of every &T thin ptr field to a fat ptr
1481                 for i in 0..src_layout.fields.count() {
1482                     let (dst_f_place, dst_field) =
1483                         self.place_field(dst, mir::Field::new(i), dst_layout)?;
1484                     if dst_field.is_zst() {
1485                         continue;
1486                     }
1487                     let (src_f_value, src_field) = match src {
1488                         Value::ByRef(ptr, align) => {
1489                             let src_place = Place::from_scalar_ptr(ptr, align);
1490                             let (src_f_place, src_field) =
1491                                 self.place_field(src_place, mir::Field::new(i), src_layout)?;
1492                             (self.read_place(src_f_place)?, src_field)
1493                         }
1494                         Value::Scalar(_) | Value::ScalarPair(..) => {
1495                             let src_field = src_layout.field(&self, i)?;
1496                             assert_eq!(src_layout.fields.offset(i).bytes(), 0);
1497                             assert_eq!(src_field.size, src_layout.size);
1498                             (src, src_field)
1499                         }
1500                     };
1501                     if src_field.ty == dst_field.ty {
1502                         self.write_value(ValTy {
1503                             value: src_f_value,
1504                             ty: src_field.ty,
1505                         }, dst_f_place)?;
1506                     } else {
1507                         self.unsize_into(src_f_value, src_field, dst_f_place, dst_field)?;
1508                     }
1509                 }
1510                 Ok(())
1511             }
1512             _ => {
1513                 bug!(
1514                     "unsize_into: invalid conversion: {:?} -> {:?}",
1515                     src_layout,
1516                     dst_layout
1517                 )
1518             }
1519         }
1520     }
1521
1522     pub fn dump_local(&self, place: Place) {
1523         // Debug output
1524         if !log_enabled!(::log::Level::Trace) {
1525             return;
1526         }
1527         match place {
1528             Place::Local { frame, local } => {
1529                 let mut allocs = Vec::new();
1530                 let mut msg = format!("{:?}", local);
1531                 if frame != self.cur_frame() {
1532                     write!(msg, " ({} frames up)", self.cur_frame() - frame).unwrap();
1533                 }
1534                 write!(msg, ":").unwrap();
1535
1536                 match self.stack[frame].get_local(local) {
1537                     Err(err) => {
1538                         if let EvalErrorKind::DeadLocal = err.kind {
1539                             write!(msg, " is dead").unwrap();
1540                         } else {
1541                             panic!("Failed to access local: {:?}", err);
1542                         }
1543                     }
1544                     Ok(Value::ByRef(ptr, align)) => {
1545                         match ptr {
1546                             Scalar::Ptr(ptr) => {
1547                                 write!(msg, " by align({}) ref:", align.abi()).unwrap();
1548                                 allocs.push(ptr.alloc_id);
1549                             }
1550                             ptr => write!(msg, " integral by ref: {:?}", ptr).unwrap(),
1551                         }
1552                     }
1553                     Ok(Value::Scalar(val)) => {
1554                         write!(msg, " {:?}", val).unwrap();
1555                         if let Scalar::Ptr(ptr) = val {
1556                             allocs.push(ptr.alloc_id);
1557                         }
1558                     }
1559                     Ok(Value::ScalarPair(val1, val2)) => {
1560                         write!(msg, " ({:?}, {:?})", val1, val2).unwrap();
1561                         if let Scalar::Ptr(ptr) = val1 {
1562                             allocs.push(ptr.alloc_id);
1563                         }
1564                         if let Scalar::Ptr(ptr) = val2 {
1565                             allocs.push(ptr.alloc_id);
1566                         }
1567                     }
1568                 }
1569
1570                 trace!("{}", msg);
1571                 self.memory.dump_allocs(allocs);
1572             }
1573             Place::Ptr { ptr, align, .. } => {
1574                 match ptr {
1575                     Scalar::Ptr(ptr) => {
1576                         trace!("by align({}) ref:", align.abi());
1577                         self.memory.dump_alloc(ptr.alloc_id);
1578                     }
1579                     ptr => trace!(" integral by ref: {:?}", ptr),
1580                 }
1581             }
1582         }
1583     }
1584
1585     /// Convenience function to ensure correct usage of locals
1586     pub fn modify_local<F>(&mut self, frame: usize, local: mir::Local, f: F) -> EvalResult<'tcx>
1587     where
1588         F: FnOnce(&mut Self, Value) -> EvalResult<'tcx, Value>,
1589     {
1590         let val = self.stack[frame].get_local(local)?;
1591         let new_val = f(self, val)?;
1592         self.stack[frame].set_local(local, new_val)?;
1593         // FIXME(solson): Run this when setting to Undef? (See previous version of this code.)
1594         // if let Value::ByRef(ptr) = self.stack[frame].get_local(local) {
1595         //     self.memory.deallocate(ptr)?;
1596         // }
1597         Ok(())
1598     }
1599
1600     pub fn generate_stacktrace(&self, explicit_span: Option<Span>) -> (Vec<FrameInfo>, Span) {
1601         let mut last_span = None;
1602         let mut frames = Vec::new();
1603         // skip 1 because the last frame is just the environment of the constant
1604         for &Frame { instance, span, mir, block, stmt, .. } in self.stack().iter().skip(1).rev() {
1605             // make sure we don't emit frames that are duplicates of the previous
1606             if explicit_span == Some(span) {
1607                 last_span = Some(span);
1608                 continue;
1609             }
1610             if let Some(last) = last_span {
1611                 if last == span {
1612                     continue;
1613                 }
1614             } else {
1615                 last_span = Some(span);
1616             }
1617             let location = if self.tcx.def_key(instance.def_id()).disambiguated_data.data == DefPathData::ClosureExpr {
1618                 "closure".to_owned()
1619             } else {
1620                 instance.to_string()
1621             };
1622             let block = &mir.basic_blocks()[block];
1623             let source_info = if stmt < block.statements.len() {
1624                 block.statements[stmt].source_info
1625             } else {
1626                 block.terminator().source_info
1627             };
1628             let lint_root = match mir.source_scope_local_data {
1629                 mir::ClearCrossCrate::Set(ref ivs) => Some(ivs[source_info.scope].lint_root),
1630                 mir::ClearCrossCrate::Clear => None,
1631             };
1632             frames.push(FrameInfo { span, location, lint_root });
1633         }
1634         trace!("generate stacktrace: {:#?}, {:?}", frames, explicit_span);
1635         (frames, self.tcx.span)
1636     }
1637
1638     pub fn sign_extend(&self, value: u128, ty: Ty<'tcx>) -> EvalResult<'tcx, u128> {
1639         super::sign_extend(self.tcx.tcx, value, ty)
1640     }
1641
1642     pub fn truncate(&self, value: u128, ty: Ty<'tcx>) -> EvalResult<'tcx, u128> {
1643         super::truncate(self.tcx.tcx, value, ty)
1644     }
1645 }
1646
1647 impl<'mir, 'tcx> Frame<'mir, 'tcx> {
1648     pub fn get_local(&self, local: mir::Local) -> EvalResult<'tcx, Value> {
1649         self.locals[local].ok_or_else(|| EvalErrorKind::DeadLocal.into())
1650     }
1651
1652     fn set_local(&mut self, local: mir::Local, value: Value) -> EvalResult<'tcx> {
1653         match self.locals[local] {
1654             None => err!(DeadLocal),
1655             Some(ref mut local) => {
1656                 *local = value;
1657                 Ok(())
1658             }
1659         }
1660     }
1661
1662     pub fn storage_live(&mut self, local: mir::Local) -> Option<Value> {
1663         trace!("{:?} is now live", local);
1664
1665         // StorageLive *always* kills the value that's currently stored
1666         mem::replace(&mut self.locals[local], Some(Value::Scalar(Scalar::undef())))
1667     }
1668
1669     /// Returns the old value of the local
1670     pub fn storage_dead(&mut self, local: mir::Local) -> Option<Value> {
1671         trace!("{:?} is now dead", local);
1672
1673         self.locals[local].take()
1674     }
1675 }