]> git.lizzy.rs Git - rust.git/blob - src/interpreter/mod.rs
Merge pull request #24 from oli-obk/typesafe_fn_calls
[rust.git] / src / interpreter / mod.rs
1 use rustc::middle::const_val;
2 use rustc::hir::def_id::DefId;
3 use rustc::mir::mir_map::MirMap;
4 use rustc::mir::repr as mir;
5 use rustc::traits::{self, ProjectionMode};
6 use rustc::ty::fold::TypeFoldable;
7 use rustc::ty::layout::{self, Layout, Size};
8 use rustc::ty::subst::{self, Subst, Substs};
9 use rustc::ty::{self, Ty, TyCtxt, BareFnTy};
10 use rustc::util::nodemap::DefIdMap;
11 use rustc_data_structures::indexed_vec::Idx;
12 use std::cell::RefCell;
13 use std::ops::Deref;
14 use std::rc::Rc;
15 use std::{iter, mem};
16 use syntax::ast;
17 use syntax::attr;
18 use syntax::codemap::{self, DUMMY_SP, Span};
19
20 use error::{EvalError, EvalResult};
21 use memory::{Memory, Pointer, FunctionDefinition};
22 use primval::{self, PrimVal};
23
24 use std::collections::HashMap;
25
26 mod stepper;
27
28 pub fn step<'ecx, 'a: 'ecx, 'tcx: 'a>(ecx: &'ecx mut EvalContext<'a, 'tcx>) -> EvalResult<'tcx, bool> {
29     stepper::Stepper::new(ecx).step()
30 }
31
32 pub struct EvalContext<'a, 'tcx: 'a> {
33     /// The results of the type checker, from rustc.
34     tcx: TyCtxt<'a, 'tcx, 'tcx>,
35
36     /// A mapping from NodeIds to Mir, from rustc. Only contains MIR for crate-local items.
37     mir_map: &'a MirMap<'tcx>,
38
39     /// A local cache from DefIds to Mir for non-crate-local items.
40     mir_cache: RefCell<DefIdMap<Rc<mir::Mir<'tcx>>>>,
41
42     /// The virtual memory system.
43     memory: Memory<'tcx>,
44
45     /// Precomputed statics, constants and promoteds
46     statics: HashMap<ConstantId<'tcx>, Pointer>,
47
48     /// The virtual call stack.
49     stack: Vec<Frame<'a, 'tcx>>,
50 }
51
52 /// A stack frame.
53 pub struct Frame<'a, 'tcx: 'a> {
54     /// The def_id of the current function
55     pub def_id: DefId,
56
57     /// The span of the call site
58     pub span: codemap::Span,
59
60     /// type substitutions for the current function invocation
61     pub substs: &'tcx Substs<'tcx>,
62
63     /// The MIR for the function called on this frame.
64     pub mir: CachedMir<'a, 'tcx>,
65
66     /// The block that is currently executed (or will be executed after the above call stacks return)
67     pub next_block: mir::BasicBlock,
68
69     /// A pointer for writing the return value of the current call if it's not a diverging call.
70     pub return_ptr: Option<Pointer>,
71
72     /// The list of locals for the current function, stored in order as
73     /// `[arguments..., variables..., temporaries...]`. The variables begin at `self.var_offset`
74     /// and the temporaries at `self.temp_offset`.
75     pub locals: Vec<Pointer>,
76
77     /// The offset of the first variable in `self.locals`.
78     pub var_offset: usize,
79
80     /// The offset of the first temporary in `self.locals`.
81     pub temp_offset: usize,
82
83     /// The index of the currently evaluated statment
84     pub stmt: usize,
85 }
86
87 #[derive(Copy, Clone, Debug, Eq, PartialEq)]
88 struct Lvalue {
89     ptr: Pointer,
90     extra: LvalueExtra,
91 }
92
93 #[derive(Copy, Clone, Debug, Eq, PartialEq)]
94 enum LvalueExtra {
95     None,
96     Length(u64),
97     // TODO(solson): Vtable(memory::AllocId),
98     DowncastVariant(usize),
99 }
100
101 #[derive(Clone)]
102 pub enum CachedMir<'mir, 'tcx: 'mir> {
103     Ref(&'mir mir::Mir<'tcx>),
104     Owned(Rc<mir::Mir<'tcx>>)
105 }
106
107 #[derive(Clone, Debug, Eq, PartialEq, Hash)]
108 /// Uniquely identifies a specific constant or static
109 struct ConstantId<'tcx> {
110     /// the def id of the constant/static or in case of promoteds, the def id of the function they belong to
111     def_id: DefId,
112     /// In case of statics and constants this is `Substs::empty()`, so only promoteds and associated
113     /// constants actually have something useful here. We could special case statics and constants,
114     /// but that would only require more branching when working with constants, and not bring any
115     /// real benefits.
116     substs: &'tcx Substs<'tcx>,
117     kind: ConstantKind,
118 }
119
120 #[derive(Clone, Debug, Eq, PartialEq, Hash)]
121 enum ConstantKind {
122     Promoted(mir::Promoted),
123     /// Statics, constants and associated constants
124     Global,
125 }
126
127 impl<'a, 'tcx> EvalContext<'a, 'tcx> {
128     pub fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>, mir_map: &'a MirMap<'tcx>) -> Self {
129         EvalContext {
130             tcx: tcx,
131             mir_map: mir_map,
132             mir_cache: RefCell::new(DefIdMap()),
133             memory: Memory::new(tcx.sess
134                                    .target
135                                    .uint_type
136                                    .bit_width()
137                                    .expect("Session::target::uint_type was usize")/8),
138             statics: HashMap::new(),
139             stack: Vec::new(),
140         }
141     }
142
143     pub fn alloc_ret_ptr(&mut self, output_ty: ty::FnOutput<'tcx>, substs: &'tcx Substs<'tcx>) -> Option<Pointer> {
144         match output_ty {
145             ty::FnConverging(ty) => {
146                 let size = self.type_size_with_substs(ty, substs);
147                 Some(self.memory.allocate(size))
148             }
149             ty::FnDiverging => None,
150         }
151     }
152
153     pub fn memory(&self) -> &Memory {
154         &self.memory
155     }
156
157     pub fn memory_mut(&mut self) -> &mut Memory<'tcx> {
158         &mut self.memory
159     }
160
161     pub fn stack(&self) -> &[Frame] {
162         &self.stack
163     }
164
165     // TODO(solson): Try making const_to_primval instead.
166     fn const_to_ptr(&mut self, const_val: &const_val::ConstVal) -> EvalResult<'tcx, Pointer> {
167         use rustc::middle::const_val::ConstVal::*;
168         match *const_val {
169             Float(_f) => unimplemented!(),
170             Integral(int) => {
171                 // TODO(solson): Check int constant type.
172                 let ptr = self.memory.allocate(8);
173                 self.memory.write_uint(ptr, int.to_u64_unchecked(), 8)?;
174                 Ok(ptr)
175             }
176             Str(ref s) => {
177                 let psize = self.memory.pointer_size;
178                 let static_ptr = self.memory.allocate(s.len());
179                 let ptr = self.memory.allocate(psize * 2);
180                 self.memory.write_bytes(static_ptr, s.as_bytes())?;
181                 self.memory.write_ptr(ptr, static_ptr)?;
182                 self.memory.write_usize(ptr.offset(psize as isize), s.len() as u64)?;
183                 Ok(ptr)
184             }
185             ByteStr(ref bs) => {
186                 let psize = self.memory.pointer_size;
187                 let static_ptr = self.memory.allocate(bs.len());
188                 let ptr = self.memory.allocate(psize);
189                 self.memory.write_bytes(static_ptr, bs)?;
190                 self.memory.write_ptr(ptr, static_ptr)?;
191                 Ok(ptr)
192             }
193             Bool(b) => {
194                 let ptr = self.memory.allocate(1);
195                 self.memory.write_bool(ptr, b)?;
196                 Ok(ptr)
197             }
198             Char(_c)          => unimplemented!(),
199             Struct(_node_id)  => unimplemented!(),
200             Tuple(_node_id)   => unimplemented!(),
201             Function(_def_id) => unimplemented!(),
202             Array(_, _)       => unimplemented!(),
203             Repeat(_, _)      => unimplemented!(),
204             Dummy             => unimplemented!(),
205         }
206     }
207
208     fn type_needs_drop(&self, ty: Ty<'tcx>) -> bool {
209         self.tcx.type_needs_drop_given_env(ty, &self.tcx.empty_parameter_environment())
210     }
211
212     fn type_is_sized(&self, ty: Ty<'tcx>) -> bool {
213         ty.is_sized(self.tcx, &self.tcx.empty_parameter_environment(), DUMMY_SP)
214     }
215
216     fn fulfill_obligation(&self, trait_ref: ty::PolyTraitRef<'tcx>) -> traits::Vtable<'tcx, ()> {
217         // Do the initial selection for the obligation. This yields the shallow result we are
218         // looking for -- that is, what specific impl.
219         self.tcx.normalizing_infer_ctxt(ProjectionMode::Any).enter(|infcx| {
220             let mut selcx = traits::SelectionContext::new(&infcx);
221
222             let obligation = traits::Obligation::new(
223                 traits::ObligationCause::misc(DUMMY_SP, ast::DUMMY_NODE_ID),
224                 trait_ref.to_poly_trait_predicate(),
225             );
226             let selection = selcx.select(&obligation).unwrap().unwrap();
227
228             // Currently, we use a fulfillment context to completely resolve all nested obligations.
229             // This is because they can inform the inference of the impl's type parameters.
230             let mut fulfill_cx = traits::FulfillmentContext::new();
231             let vtable = selection.map(|predicate| {
232                 fulfill_cx.register_predicate_obligation(&infcx, predicate);
233             });
234             infcx.drain_fulfillment_cx_or_panic(DUMMY_SP, &mut fulfill_cx, &vtable)
235         })
236     }
237
238     /// Trait method, which has to be resolved to an impl method.
239     pub fn trait_method(
240         &self,
241         def_id: DefId,
242         substs: &'tcx Substs<'tcx>
243     ) -> (DefId, &'tcx Substs<'tcx>) {
244         let method_item = self.tcx.impl_or_trait_item(def_id);
245         let trait_id = method_item.container().id();
246         let trait_ref = ty::Binder(substs.to_trait_ref(self.tcx, trait_id));
247         match self.fulfill_obligation(trait_ref) {
248             traits::VtableImpl(vtable_impl) => {
249                 let impl_did = vtable_impl.impl_def_id;
250                 let mname = self.tcx.item_name(def_id);
251                 // Create a concatenated set of substitutions which includes those from the impl
252                 // and those from the method:
253                 let impl_substs = vtable_impl.substs.with_method_from(substs);
254                 let substs = self.tcx.mk_substs(impl_substs);
255                 let mth = get_impl_method(self.tcx, impl_did, substs, mname);
256
257                 (mth.method.def_id, mth.substs)
258             }
259
260             traits::VtableClosure(vtable_closure) =>
261                 (vtable_closure.closure_def_id, vtable_closure.substs.func_substs),
262
263             traits::VtableFnPointer(_fn_ty) => {
264                 let _trait_closure_kind = self.tcx.lang_items.fn_trait_kind(trait_id).unwrap();
265                 unimplemented!()
266                 // let llfn = trans_fn_pointer_shim(ccx, trait_closure_kind, fn_ty);
267
268                 // let method_ty = def_ty(tcx, def_id, substs);
269                 // let fn_ptr_ty = match method_ty.sty {
270                 //     ty::TyFnDef(_, _, fty) => tcx.mk_ty(ty::TyFnPtr(fty)),
271                 //     _ => unreachable!("expected fn item type, found {}",
272                 //                       method_ty)
273                 // };
274                 // Callee::ptr(immediate_rvalue(llfn, fn_ptr_ty))
275             }
276
277             traits::VtableObject(ref _data) => {
278                 unimplemented!()
279                 // Callee {
280                 //     data: Virtual(traits::get_vtable_index_of_object_method(
281                 //                   tcx, data, def_id)),
282                 //                   ty: def_ty(tcx, def_id, substs)
283                 // }
284             }
285             vtable => unreachable!("resolved vtable bad vtable {:?} in trans", vtable),
286         }
287     }
288
289     fn load_mir(&self, def_id: DefId) -> CachedMir<'a, 'tcx> {
290         use rustc_trans::back::symbol_names::def_id_to_string;
291         match self.tcx.map.as_local_node_id(def_id) {
292             Some(node_id) => CachedMir::Ref(self.mir_map.map.get(&node_id).unwrap()),
293             None => {
294                 let mut mir_cache = self.mir_cache.borrow_mut();
295                 if let Some(mir) = mir_cache.get(&def_id) {
296                     return CachedMir::Owned(mir.clone());
297                 }
298
299                 let cs = &self.tcx.sess.cstore;
300                 let mir = cs.maybe_get_item_mir(self.tcx, def_id).unwrap_or_else(|| {
301                     panic!("no mir for `{}`", def_id_to_string(self.tcx, def_id));
302                 });
303                 let cached = Rc::new(mir);
304                 mir_cache.insert(def_id, cached.clone());
305                 CachedMir::Owned(cached)
306             }
307         }
308     }
309
310     fn monomorphize(&self, ty: Ty<'tcx>, substs: &'tcx Substs<'tcx>) -> Ty<'tcx> {
311         let substituted = ty.subst(self.tcx, substs);
312         self.tcx.normalize_associated_type(&substituted)
313     }
314
315     fn type_size(&self, ty: Ty<'tcx>) -> usize {
316         self.type_size_with_substs(ty, self.substs())
317     }
318
319     fn type_size_with_substs(&self, ty: Ty<'tcx>, substs: &'tcx Substs<'tcx>) -> usize {
320         self.type_layout_with_substs(ty, substs).size(&self.tcx.data_layout).bytes() as usize
321     }
322
323     fn type_layout(&self, ty: Ty<'tcx>) -> &'tcx Layout {
324         self.type_layout_with_substs(ty, self.substs())
325     }
326
327     fn type_layout_with_substs(&self, ty: Ty<'tcx>, substs: &'tcx Substs<'tcx>) -> &'tcx Layout {
328         // TODO(solson): Is this inefficient? Needs investigation.
329         let ty = self.monomorphize(ty, substs);
330
331         self.tcx.normalizing_infer_ctxt(ProjectionMode::Any).enter(|infcx| {
332             // TODO(solson): Report this error properly.
333             ty.layout(&infcx).unwrap()
334         })
335     }
336
337     pub fn push_stack_frame(&mut self, def_id: DefId, span: codemap::Span, mir: CachedMir<'a, 'tcx>, substs: &'tcx Substs<'tcx>,
338         return_ptr: Option<Pointer>)
339     {
340         let arg_tys = mir.arg_decls.iter().map(|a| a.ty);
341         let var_tys = mir.var_decls.iter().map(|v| v.ty);
342         let temp_tys = mir.temp_decls.iter().map(|t| t.ty);
343
344         let num_args = mir.arg_decls.len();
345         let num_vars = mir.var_decls.len();
346
347         ::log_settings::settings().indentation += 1;
348
349         let locals: Vec<Pointer> = arg_tys.chain(var_tys).chain(temp_tys).map(|ty| {
350             let size = self.type_size_with_substs(ty, substs);
351             self.memory.allocate(size)
352         }).collect();
353
354         self.stack.push(Frame {
355             mir: mir.clone(),
356             next_block: mir::START_BLOCK,
357             return_ptr: return_ptr,
358             locals: locals,
359             var_offset: num_args,
360             temp_offset: num_args + num_vars,
361             span: span,
362             def_id: def_id,
363             substs: substs,
364             stmt: 0,
365         });
366     }
367
368     fn pop_stack_frame(&mut self) {
369         ::log_settings::settings().indentation -= 1;
370         let _frame = self.stack.pop().expect("tried to pop a stack frame, but there were none");
371         // TODO(solson): Deallocate local variables.
372     }
373
374     fn eval_terminator(&mut self, terminator: &mir::Terminator<'tcx>)
375             -> EvalResult<'tcx, ()> {
376         use rustc::mir::repr::TerminatorKind::*;
377         match terminator.kind {
378             Return => self.pop_stack_frame(),
379
380             Goto { target } => {
381                 self.frame_mut().next_block = target;
382             },
383
384             If { ref cond, targets: (then_target, else_target) } => {
385                 let cond_ptr = self.eval_operand(cond)?;
386                 let cond_val = self.memory.read_bool(cond_ptr)?;
387                 self.frame_mut().next_block = if cond_val { then_target } else { else_target };
388             }
389
390             SwitchInt { ref discr, ref values, ref targets, .. } => {
391                 let discr_ptr = self.eval_lvalue(discr)?.to_ptr();
392                 let discr_size = self
393                     .type_layout(self.lvalue_ty(discr))
394                     .size(&self.tcx.data_layout)
395                     .bytes() as usize;
396                 let discr_val = self.memory.read_uint(discr_ptr, discr_size)?;
397
398                 // Branch to the `otherwise` case by default, if no match is found.
399                 let mut target_block = targets[targets.len() - 1];
400
401                 for (index, val_const) in values.iter().enumerate() {
402                     let ptr = self.const_to_ptr(val_const)?;
403                     let val = self.memory.read_uint(ptr, discr_size)?;
404                     if discr_val == val {
405                         target_block = targets[index];
406                         break;
407                     }
408                 }
409
410                 self.frame_mut().next_block = target_block;
411             }
412
413             Switch { ref discr, ref targets, adt_def } => {
414                 let adt_ptr = self.eval_lvalue(discr)?.to_ptr();
415                 let adt_ty = self.lvalue_ty(discr);
416                 let discr_val = self.read_discriminant_value(adt_ptr, adt_ty)?;
417                 let matching = adt_def.variants.iter()
418                     .position(|v| discr_val == v.disr_val.to_u64_unchecked());
419
420                 match matching {
421                     Some(i) => {
422                         self.frame_mut().next_block = targets[i];
423                     },
424                     None => return Err(EvalError::InvalidDiscriminant),
425                 }
426             }
427
428             Call { ref func, ref args, ref destination, .. } => {
429                 let mut return_ptr = None;
430                 if let Some((ref lv, target)) = *destination {
431                     self.frame_mut().next_block = target;
432                     return_ptr = Some(self.eval_lvalue(lv)?.to_ptr());
433                 }
434
435                 let func_ty = self.operand_ty(func);
436                 match func_ty.sty {
437                     ty::TyFnPtr(bare_fn_ty) => {
438                         let ptr = self.eval_operand(func)?;
439                         assert_eq!(ptr.offset, 0);
440                         let fn_ptr = self.memory.read_ptr(ptr)?;
441                         let FunctionDefinition { def_id, substs, fn_ty } = self.memory.get_fn(fn_ptr.alloc_id)?;
442                         if fn_ty != bare_fn_ty {
443                             return Err(EvalError::FunctionPointerTyMismatch(fn_ty, bare_fn_ty));
444                         }
445                         self.eval_fn_call(def_id, substs, bare_fn_ty, return_ptr, args,
446                                           terminator.source_info.span)?
447                     },
448                     ty::TyFnDef(def_id, substs, fn_ty) => {
449                         self.eval_fn_call(def_id, substs, fn_ty, return_ptr, args,
450                                           terminator.source_info.span)?
451                     }
452
453                     _ => return Err(EvalError::Unimplemented(format!("can't handle callee of type {:?}", func_ty))),
454                 }
455             }
456
457             Drop { ref location, target, .. } => {
458                 let ptr = self.eval_lvalue(location)?.to_ptr();
459                 let ty = self.lvalue_ty(location);
460                 self.drop(ptr, ty)?;
461                 self.frame_mut().next_block = target;
462             }
463
464             Assert { ref cond, expected, ref msg, target, cleanup } => {
465                 let actual_ptr = self.eval_operand(cond)?;
466                 let actual = self.memory.read_bool(actual_ptr)?;
467                 if actual == expected {
468                     self.frame_mut().next_block = target;
469                 } else {
470                     panic!("unimplemented: jump to {:?} and print {:?}", cleanup, msg);
471                 }
472             }
473
474             DropAndReplace { .. } => unimplemented!(),
475             Resume => unimplemented!(),
476             Unreachable => unimplemented!(),
477         }
478
479         Ok(())
480     }
481
482     pub fn eval_fn_call(
483         &mut self,
484         def_id: DefId,
485         substs: &'tcx Substs<'tcx>,
486         fn_ty: &'tcx BareFnTy,
487         return_ptr: Option<Pointer>,
488         args: &[mir::Operand<'tcx>],
489         span: Span,
490     ) -> EvalResult<'tcx, ()> {
491         use syntax::abi::Abi;
492         match fn_ty.abi {
493             Abi::RustIntrinsic => {
494                 let name = self.tcx.item_name(def_id).as_str();
495                 match fn_ty.sig.0.output {
496                     ty::FnConverging(ty) => {
497                         let size = self.type_size(ty);
498                         let ret = return_ptr.unwrap();
499                         self.call_intrinsic(&name, substs, args, ret, size)
500                     }
501                     ty::FnDiverging => unimplemented!(),
502                 }
503             }
504
505             Abi::C => {
506                 match fn_ty.sig.0.output {
507                     ty::FnConverging(ty) => {
508                         let size = self.type_size(ty);
509                         self.call_c_abi(def_id, args, return_ptr.unwrap(), size)
510                     }
511                     ty::FnDiverging => unimplemented!(),
512                 }
513             }
514
515             Abi::Rust | Abi::RustCall => {
516                 // TODO(solson): Adjust the first argument when calling a Fn or
517                 // FnMut closure via FnOnce::call_once.
518
519                 // Only trait methods can have a Self parameter.
520                 let (resolved_def_id, resolved_substs) = if substs.self_ty().is_some() {
521                     self.trait_method(def_id, substs)
522                 } else {
523                     (def_id, substs)
524                 };
525
526                 let mut arg_srcs = Vec::new();
527                 for arg in args {
528                     let src = self.eval_operand(arg)?;
529                     let src_ty = self.operand_ty(arg);
530                     arg_srcs.push((src, src_ty));
531                 }
532
533                 if fn_ty.abi == Abi::RustCall && !args.is_empty() {
534                     arg_srcs.pop();
535                     let last_arg = args.last().unwrap();
536                     let last = self.eval_operand(last_arg)?;
537                     let last_ty = self.operand_ty(last_arg);
538                     let last_layout = self.type_layout(last_ty);
539                     match (&last_ty.sty, last_layout) {
540                         (&ty::TyTuple(fields),
541                          &Layout::Univariant { ref variant, .. }) => {
542                             let offsets = iter::once(0)
543                                 .chain(variant.offset_after_field.iter()
544                                     .map(|s| s.bytes()));
545                             for (offset, ty) in offsets.zip(fields) {
546                                 let src = last.offset(offset as isize);
547                                 arg_srcs.push((src, ty));
548                             }
549                         }
550                         ty => panic!("expected tuple as last argument in function with 'rust-call' ABI, got {:?}", ty),
551                     }
552                 }
553
554                 let mir = self.load_mir(resolved_def_id);
555                 self.push_stack_frame(def_id, span, mir, resolved_substs, return_ptr);
556
557                 for (i, (src, src_ty)) in arg_srcs.into_iter().enumerate() {
558                     let dest = self.frame().locals[i];
559                     self.move_(src, dest, src_ty)?;
560                 }
561
562                 Ok(())
563             }
564
565             abi => Err(EvalError::Unimplemented(format!("can't handle function with {:?} ABI", abi))),
566         }
567     }
568
569     fn drop(&mut self, ptr: Pointer, ty: Ty<'tcx>) -> EvalResult<'tcx, ()> {
570         if !self.type_needs_drop(ty) {
571             debug!("no need to drop {:?}", ty);
572             return Ok(());
573         }
574         trace!("-need to drop {:?}", ty);
575
576         // TODO(solson): Call user-defined Drop::drop impls.
577
578         match ty.sty {
579             ty::TyBox(contents_ty) => {
580                 match self.memory.read_ptr(ptr) {
581                     Ok(contents_ptr) => {
582                         self.drop(contents_ptr, contents_ty)?;
583                         trace!("-deallocating box");
584                         self.memory.deallocate(contents_ptr)?;
585                     }
586                     Err(EvalError::ReadBytesAsPointer) => {
587                         let size = self.memory.pointer_size;
588                         let possible_drop_fill = self.memory.read_bytes(ptr, size)?;
589                         if possible_drop_fill.iter().all(|&b| b == mem::POST_DROP_U8) {
590                             return Ok(());
591                         } else {
592                             return Err(EvalError::ReadBytesAsPointer);
593                         }
594                     }
595                     Err(e) => return Err(e),
596                 }
597             }
598
599             // TODO(solson): Implement drop for other relevant types (e.g. aggregates).
600             _ => {}
601         }
602
603         // Filling drop.
604         // FIXME(solson): Trait objects (with no static size) probably get filled, too.
605         let size = self.type_size(ty);
606         self.memory.drop_fill(ptr, size)?;
607
608         Ok(())
609     }
610
611     fn read_discriminant_value(&self, adt_ptr: Pointer, adt_ty: Ty<'tcx>) -> EvalResult<'tcx, u64> {
612         use rustc::ty::layout::Layout::*;
613         let adt_layout = self.type_layout(adt_ty);
614
615         let discr_val = match *adt_layout {
616             General { discr, .. } | CEnum { discr, .. } => {
617                 let discr_size = discr.size().bytes();
618                 self.memory.read_uint(adt_ptr, discr_size as usize)?
619             }
620
621             RawNullablePointer { nndiscr, .. } => {
622                 self.read_nonnull_discriminant_value(adt_ptr, nndiscr)?
623             }
624
625             StructWrappedNullablePointer { nndiscr, ref discrfield, .. } => {
626                 let offset = self.nonnull_offset(adt_ty, nndiscr, discrfield)?;
627                 let nonnull = adt_ptr.offset(offset.bytes() as isize);
628                 self.read_nonnull_discriminant_value(nonnull, nndiscr)?
629             }
630
631             // The discriminant_value intrinsic returns 0 for non-sum types.
632             Array { .. } | FatPointer { .. } | Scalar { .. } | Univariant { .. } |
633             Vector { .. } => 0,
634         };
635
636         Ok(discr_val)
637     }
638
639     fn read_nonnull_discriminant_value(&self, ptr: Pointer, nndiscr: u64) -> EvalResult<'tcx, u64> {
640         let not_null = match self.memory.read_usize(ptr) {
641             Ok(0) => false,
642             Ok(_) | Err(EvalError::ReadPointerAsBytes) => true,
643             Err(e) => return Err(e),
644         };
645         assert!(nndiscr == 0 || nndiscr == 1);
646         Ok(if not_null { nndiscr } else { 1 - nndiscr })
647     }
648
649     fn call_intrinsic(
650         &mut self,
651         name: &str,
652         substs: &'tcx Substs<'tcx>,
653         args: &[mir::Operand<'tcx>],
654         dest: Pointer,
655         dest_size: usize
656     ) -> EvalResult<'tcx, ()> {
657         let args_res: EvalResult<Vec<Pointer>> = args.iter()
658             .map(|arg| self.eval_operand(arg))
659             .collect();
660         let args = args_res?;
661
662         match name {
663             // FIXME(solson): Handle different integer types correctly.
664             "add_with_overflow" => {
665                 let ty = *substs.types.get(subst::FnSpace, 0);
666                 let size = self.type_size(ty);
667                 let left = self.memory.read_int(args[0], size)?;
668                 let right = self.memory.read_int(args[1], size)?;
669                 let (n, overflowed) = unsafe {
670                     ::std::intrinsics::add_with_overflow::<i64>(left, right)
671                 };
672                 self.memory.write_int(dest, n, size)?;
673                 self.memory.write_bool(dest.offset(size as isize), overflowed)?;
674             }
675
676             "assume" => {}
677
678             "copy_nonoverlapping" => {
679                 let elem_ty = *substs.types.get(subst::FnSpace, 0);
680                 let elem_size = self.type_size(elem_ty);
681                 let src = self.memory.read_ptr(args[0])?;
682                 let dest = self.memory.read_ptr(args[1])?;
683                 let count = self.memory.read_isize(args[2])?;
684                 self.memory.copy(src, dest, count as usize * elem_size)?;
685             }
686
687             "discriminant_value" => {
688                 let ty = *substs.types.get(subst::FnSpace, 0);
689                 let adt_ptr = self.memory.read_ptr(args[0])?;
690                 let discr_val = self.read_discriminant_value(adt_ptr, ty)?;
691                 self.memory.write_uint(dest, discr_val, dest_size)?;
692             }
693
694             "forget" => {
695                 let arg_ty = *substs.types.get(subst::FnSpace, 0);
696                 let arg_size = self.type_size(arg_ty);
697                 self.memory.drop_fill(args[0], arg_size)?;
698             }
699
700             "init" => self.memory.write_repeat(dest, 0, dest_size)?,
701
702             "min_align_of" => {
703                 self.memory.write_int(dest, 1, dest_size)?;
704             }
705
706             "move_val_init" => {
707                 let ty = *substs.types.get(subst::FnSpace, 0);
708                 let ptr = self.memory.read_ptr(args[0])?;
709                 self.move_(args[1], ptr, ty)?;
710             }
711
712             // FIXME(solson): Handle different integer types correctly.
713             "mul_with_overflow" => {
714                 let ty = *substs.types.get(subst::FnSpace, 0);
715                 let size = self.type_size(ty);
716                 let left = self.memory.read_int(args[0], size)?;
717                 let right = self.memory.read_int(args[1], size)?;
718                 let (n, overflowed) = unsafe {
719                     ::std::intrinsics::mul_with_overflow::<i64>(left, right)
720                 };
721                 self.memory.write_int(dest, n, size)?;
722                 self.memory.write_bool(dest.offset(size as isize), overflowed)?;
723             }
724
725             "offset" => {
726                 let pointee_ty = *substs.types.get(subst::FnSpace, 0);
727                 let pointee_size = self.type_size(pointee_ty) as isize;
728                 let ptr_arg = args[0];
729                 let offset = self.memory.read_isize(args[1])?;
730
731                 match self.memory.read_ptr(ptr_arg) {
732                     Ok(ptr) => {
733                         let result_ptr = ptr.offset(offset as isize * pointee_size);
734                         self.memory.write_ptr(dest, result_ptr)?;
735                     }
736                     Err(EvalError::ReadBytesAsPointer) => {
737                         let addr = self.memory.read_isize(ptr_arg)?;
738                         let result_addr = addr + offset * pointee_size as i64;
739                         self.memory.write_isize(dest, result_addr)?;
740                     }
741                     Err(e) => return Err(e),
742                 }
743             }
744
745             // FIXME(solson): Handle different integer types correctly. Use primvals?
746             "overflowing_sub" => {
747                 let ty = *substs.types.get(subst::FnSpace, 0);
748                 let size = self.type_size(ty);
749                 let left = self.memory.read_int(args[0], size)?;
750                 let right = self.memory.read_int(args[1], size)?;
751                 let n = left.wrapping_sub(right);
752                 self.memory.write_int(dest, n, size)?;
753             }
754
755             "size_of" => {
756                 let ty = *substs.types.get(subst::FnSpace, 0);
757                 let size = self.type_size(ty) as u64;
758                 self.memory.write_uint(dest, size, dest_size)?;
759             }
760
761             "size_of_val" => {
762                 let ty = *substs.types.get(subst::FnSpace, 0);
763                 if self.type_is_sized(ty) {
764                     let size = self.type_size(ty) as u64;
765                     self.memory.write_uint(dest, size, dest_size)?;
766                 } else {
767                     match ty.sty {
768                         ty::TySlice(_) | ty::TyStr => {
769                             let elem_ty = ty.sequence_element_type(self.tcx);
770                             let elem_size = self.type_size(elem_ty) as u64;
771                             let ptr_size = self.memory.pointer_size as isize;
772                             let n = self.memory.read_usize(args[0].offset(ptr_size))?;
773                             self.memory.write_uint(dest, n * elem_size, dest_size)?;
774                         }
775
776                         _ => return Err(EvalError::Unimplemented(format!("unimplemented: size_of_val::<{:?}>", ty))),
777                     }
778                 }
779             }
780
781             "transmute" => {
782                 let ty = *substs.types.get(subst::FnSpace, 0);
783                 self.move_(args[0], dest, ty)?;
784             }
785             "uninit" => self.memory.mark_definedness(dest, dest_size, false)?,
786
787             name => return Err(EvalError::Unimplemented(format!("unimplemented intrinsic: {}", name))),
788         }
789
790         // Since we pushed no stack frame, the main loop will act
791         // as if the call just completed and it's returning to the
792         // current frame.
793         Ok(())
794     }
795
796     fn call_c_abi(
797         &mut self,
798         def_id: DefId,
799         args: &[mir::Operand<'tcx>],
800         dest: Pointer,
801         dest_size: usize,
802     ) -> EvalResult<'tcx, ()> {
803         let name = self.tcx.item_name(def_id);
804         let attrs = self.tcx.get_attrs(def_id);
805         let link_name = match attr::first_attr_value_str_by_name(&attrs, "link_name") {
806             Some(ln) => ln.clone(),
807             None => name.as_str(),
808         };
809
810         let args_res: EvalResult<Vec<Pointer>> = args.iter()
811             .map(|arg| self.eval_operand(arg))
812             .collect();
813         let args = args_res?;
814
815         match &link_name[..] {
816             "__rust_allocate" => {
817                 let size = self.memory.read_usize(args[0])?;
818                 let ptr = self.memory.allocate(size as usize);
819                 self.memory.write_ptr(dest, ptr)?;
820             }
821
822             "__rust_reallocate" => {
823                 let ptr = self.memory.read_ptr(args[0])?;
824                 let size = self.memory.read_usize(args[2])?;
825                 self.memory.reallocate(ptr, size as usize)?;
826                 self.memory.write_ptr(dest, ptr)?;
827             }
828
829             "memcmp" => {
830                 let left = self.memory.read_ptr(args[0])?;
831                 let right = self.memory.read_ptr(args[1])?;
832                 let n = self.memory.read_usize(args[2])? as usize;
833
834                 let result = {
835                     let left_bytes = self.memory.read_bytes(left, n)?;
836                     let right_bytes = self.memory.read_bytes(right, n)?;
837
838                     use std::cmp::Ordering::*;
839                     match left_bytes.cmp(right_bytes) {
840                         Less => -1,
841                         Equal => 0,
842                         Greater => 1,
843                     }
844                 };
845
846                 self.memory.write_int(dest, result, dest_size)?;
847             }
848
849             _ => {
850                 return Err(EvalError::Unimplemented(format!("can't call C ABI function: {}", link_name)));
851             }
852         }
853
854         // Since we pushed no stack frame, the main loop will act
855         // as if the call just completed and it's returning to the
856         // current frame.
857         Ok(())
858     }
859
860     fn assign_fields<I: IntoIterator<Item = u64>>(
861         &mut self,
862         dest: Pointer,
863         offsets: I,
864         operands: &[mir::Operand<'tcx>],
865     ) -> EvalResult<'tcx, ()> {
866         for (offset, operand) in offsets.into_iter().zip(operands) {
867             let src = self.eval_operand(operand)?;
868             let src_ty = self.operand_ty(operand);
869             let field_dest = dest.offset(offset as isize);
870             self.move_(src, field_dest, src_ty)?;
871         }
872         Ok(())
873     }
874
875     fn eval_assignment(&mut self, lvalue: &mir::Lvalue<'tcx>, rvalue: &mir::Rvalue<'tcx>)
876         -> EvalResult<'tcx, ()>
877     {
878         let dest = self.eval_lvalue(lvalue)?.to_ptr();
879         let dest_ty = self.lvalue_ty(lvalue);
880         let dest_layout = self.type_layout(dest_ty);
881
882         use rustc::mir::repr::Rvalue::*;
883         match *rvalue {
884             Use(ref operand) => {
885                 let src = self.eval_operand(operand)?;
886                 self.move_(src, dest, dest_ty)?;
887             }
888
889             BinaryOp(bin_op, ref left, ref right) => {
890                 let left_ptr = self.eval_operand(left)?;
891                 let left_ty = self.operand_ty(left);
892                 let left_val = self.read_primval(left_ptr, left_ty)?;
893
894                 let right_ptr = self.eval_operand(right)?;
895                 let right_ty = self.operand_ty(right);
896                 let right_val = self.read_primval(right_ptr, right_ty)?;
897
898                 let val = primval::binary_op(bin_op, left_val, right_val)?;
899                 self.memory.write_primval(dest, val)?;
900             }
901
902             // FIXME(solson): Factor this out with BinaryOp.
903             CheckedBinaryOp(bin_op, ref left, ref right) => {
904                 let left_ptr = self.eval_operand(left)?;
905                 let left_ty = self.operand_ty(left);
906                 let left_val = self.read_primval(left_ptr, left_ty)?;
907
908                 let right_ptr = self.eval_operand(right)?;
909                 let right_ty = self.operand_ty(right);
910                 let right_val = self.read_primval(right_ptr, right_ty)?;
911
912                 let val = primval::binary_op(bin_op, left_val, right_val)?;
913                 self.memory.write_primval(dest, val)?;
914
915                 // FIXME(solson): Find the result type size properly. Perhaps refactor out
916                 // Projection calculations so we can do the equivalent of `dest.1` here.
917                 let s = self.type_size(left_ty);
918                 self.memory.write_bool(dest.offset(s as isize), false)?;
919             }
920
921             UnaryOp(un_op, ref operand) => {
922                 let ptr = self.eval_operand(operand)?;
923                 let ty = self.operand_ty(operand);
924                 let val = self.read_primval(ptr, ty)?;
925                 self.memory.write_primval(dest, primval::unary_op(un_op, val)?)?;
926             }
927
928             Aggregate(ref kind, ref operands) => {
929                 use rustc::ty::layout::Layout::*;
930                 match *dest_layout {
931                     Univariant { ref variant, .. } => {
932                         let offsets = iter::once(0)
933                             .chain(variant.offset_after_field.iter().map(|s| s.bytes()));
934                         self.assign_fields(dest, offsets, operands)?;
935                     }
936
937                     Array { .. } => {
938                         let elem_size = match dest_ty.sty {
939                             ty::TyArray(elem_ty, _) => self.type_size(elem_ty) as u64,
940                             _ => panic!("tried to assign {:?} to non-array type {:?}",
941                                         kind, dest_ty),
942                         };
943                         let offsets = (0..).map(|i| i * elem_size);
944                         self.assign_fields(dest, offsets, operands)?;
945                     }
946
947                     General { discr, ref variants, .. } => {
948                         if let mir::AggregateKind::Adt(adt_def, variant, _) = *kind {
949                             let discr_val = adt_def.variants[variant].disr_val.to_u64_unchecked();
950                             let discr_size = discr.size().bytes() as usize;
951                             self.memory.write_uint(dest, discr_val, discr_size)?;
952
953                             let offsets = variants[variant].offset_after_field.iter()
954                                 .map(|s| s.bytes());
955                             self.assign_fields(dest, offsets, operands)?;
956                         } else {
957                             panic!("tried to assign {:?} to Layout::General", kind);
958                         }
959                     }
960
961                     RawNullablePointer { nndiscr, .. } => {
962                         if let mir::AggregateKind::Adt(_, variant, _) = *kind {
963                             if nndiscr == variant as u64 {
964                                 assert_eq!(operands.len(), 1);
965                                 let operand = &operands[0];
966                                 let src = self.eval_operand(operand)?;
967                                 let src_ty = self.operand_ty(operand);
968                                 self.move_(src, dest, src_ty)?;
969                             } else {
970                                 assert_eq!(operands.len(), 0);
971                                 self.memory.write_isize(dest, 0)?;
972                             }
973                         } else {
974                             panic!("tried to assign {:?} to Layout::RawNullablePointer", kind);
975                         }
976                     }
977
978                     StructWrappedNullablePointer { nndiscr, ref nonnull, ref discrfield } => {
979                         if let mir::AggregateKind::Adt(_, variant, _) = *kind {
980                             if nndiscr == variant as u64 {
981                                 let offsets = iter::once(0)
982                                     .chain(nonnull.offset_after_field.iter().map(|s| s.bytes()));
983                                 try!(self.assign_fields(dest, offsets, operands));
984                             } else {
985                                 assert_eq!(operands.len(), 0);
986                                 let offset = self.nonnull_offset(dest_ty, nndiscr, discrfield)?;
987                                 let dest = dest.offset(offset.bytes() as isize);
988                                 try!(self.memory.write_isize(dest, 0));
989                             }
990                         } else {
991                             panic!("tried to assign {:?} to Layout::RawNullablePointer", kind);
992                         }
993                     }
994
995                     CEnum { discr, signed, .. } => {
996                         assert_eq!(operands.len(), 0);
997                         if let mir::AggregateKind::Adt(adt_def, variant, _) = *kind {
998                             let val = adt_def.variants[variant].disr_val.to_u64_unchecked();
999                             let size = discr.size().bytes() as usize;
1000
1001                             if signed {
1002                                 self.memory.write_int(dest, val as i64, size)?;
1003                             } else {
1004                                 self.memory.write_uint(dest, val, size)?;
1005                             }
1006                         } else {
1007                             panic!("tried to assign {:?} to Layout::CEnum", kind);
1008                         }
1009                     }
1010
1011                     _ => return Err(EvalError::Unimplemented(format!("can't handle destination layout {:?} when assigning {:?}", dest_layout, kind))),
1012                 }
1013             }
1014
1015             Repeat(ref operand, _) => {
1016                 let (elem_size, length) = match dest_ty.sty {
1017                     ty::TyArray(elem_ty, n) => (self.type_size(elem_ty), n),
1018                     _ => panic!("tried to assign array-repeat to non-array type {:?}", dest_ty),
1019                 };
1020
1021                 let src = self.eval_operand(operand)?;
1022                 for i in 0..length {
1023                     let elem_dest = dest.offset((i * elem_size) as isize);
1024                     self.memory.copy(src, elem_dest, elem_size)?;
1025                 }
1026             }
1027
1028             Len(ref lvalue) => {
1029                 let src = self.eval_lvalue(lvalue)?;
1030                 let ty = self.lvalue_ty(lvalue);
1031                 let len = match ty.sty {
1032                     ty::TyArray(_, n) => n as u64,
1033                     ty::TySlice(_) => if let LvalueExtra::Length(n) = src.extra {
1034                         n
1035                     } else {
1036                         panic!("Rvalue::Len of a slice given non-slice pointer: {:?}", src);
1037                     },
1038                     _ => panic!("Rvalue::Len expected array or slice, got {:?}", ty),
1039                 };
1040                 self.memory.write_usize(dest, len)?;
1041             }
1042
1043             Ref(_, _, ref lvalue) => {
1044                 let lv = self.eval_lvalue(lvalue)?;
1045                 self.memory.write_ptr(dest, lv.ptr)?;
1046                 match lv.extra {
1047                     LvalueExtra::None => {},
1048                     LvalueExtra::Length(len) => {
1049                         let len_ptr = dest.offset(self.memory.pointer_size as isize);
1050                         self.memory.write_usize(len_ptr, len)?;
1051                     }
1052                     LvalueExtra::DowncastVariant(..) =>
1053                         panic!("attempted to take a reference to an enum downcast lvalue"),
1054                 }
1055             }
1056
1057             Box(ty) => {
1058                 let size = self.type_size(ty);
1059                 let ptr = self.memory.allocate(size);
1060                 self.memory.write_ptr(dest, ptr)?;
1061             }
1062
1063             Cast(kind, ref operand, dest_ty) => {
1064                 use rustc::mir::repr::CastKind::*;
1065                 match kind {
1066                     Unsize => {
1067                         let src = self.eval_operand(operand)?;
1068                         let src_ty = self.operand_ty(operand);
1069                         self.move_(src, dest, src_ty)?;
1070                         let src_pointee_ty = pointee_type(src_ty).unwrap();
1071                         let dest_pointee_ty = pointee_type(dest_ty).unwrap();
1072
1073                         match (&src_pointee_ty.sty, &dest_pointee_ty.sty) {
1074                             (&ty::TyArray(_, length), &ty::TySlice(_)) => {
1075                                 let len_ptr = dest.offset(self.memory.pointer_size as isize);
1076                                 self.memory.write_usize(len_ptr, length as u64)?;
1077                             }
1078
1079                             _ => return Err(EvalError::Unimplemented(format!("can't handle cast: {:?}", rvalue))),
1080                         }
1081                     }
1082
1083                     Misc => {
1084                         let src = self.eval_operand(operand)?;
1085                         let src_ty = self.operand_ty(operand);
1086                         // FIXME(solson): Wrong for almost everything.
1087                         warn!("misc cast from {:?} to {:?}", src_ty, dest_ty);
1088                         let dest_size = self.type_size(dest_ty);
1089                         let src_size = self.type_size(src_ty);
1090
1091                         // Hack to support fat pointer -> thin pointer casts to keep tests for
1092                         // other things passing for now.
1093                         let is_fat_ptr_cast = pointee_type(src_ty).map(|ty| {
1094                             !self.type_is_sized(ty)
1095                         }).unwrap_or(false);
1096
1097                         if dest_size == src_size || is_fat_ptr_cast {
1098                             self.memory.copy(src, dest, dest_size)?;
1099                         } else {
1100                             return Err(EvalError::Unimplemented(format!("can't handle cast: {:?}", rvalue)));
1101                         }
1102                     }
1103
1104                     ReifyFnPointer => match self.operand_ty(operand).sty {
1105                         ty::TyFnDef(def_id, substs, fn_ty) => {
1106                             let fn_ptr = self.memory.create_fn_ptr(def_id, substs, fn_ty);
1107                             self.memory.write_ptr(dest, fn_ptr)?;
1108                         },
1109                         ref other => panic!("reify fn pointer on {:?}", other),
1110                     },
1111
1112                     _ => return Err(EvalError::Unimplemented(format!("can't handle cast: {:?}", rvalue))),
1113                 }
1114             }
1115
1116             InlineAsm { .. } => unimplemented!(),
1117         }
1118
1119         Ok(())
1120     }
1121
1122     fn nonnull_offset(&self, ty: Ty<'tcx>, nndiscr: u64, discrfield: &[u32]) -> EvalResult<'tcx, Size> {
1123         // Skip the constant 0 at the start meant for LLVM GEP.
1124         let mut path = discrfield.iter().skip(1).map(|&i| i as usize);
1125
1126         // Handle the field index for the outer non-null variant.
1127         let inner_ty = match ty.sty {
1128             ty::TyEnum(adt_def, substs) => {
1129                 let variant = &adt_def.variants[nndiscr as usize];
1130                 let index = path.next().unwrap();
1131                 let field = &variant.fields[index];
1132                 field.ty(self.tcx, substs)
1133             }
1134             _ => panic!(
1135                 "non-enum for StructWrappedNullablePointer: {}",
1136                 ty,
1137             ),
1138         };
1139
1140         self.field_path_offset(inner_ty, path)
1141     }
1142
1143     fn field_path_offset<I: Iterator<Item = usize>>(&self, mut ty: Ty<'tcx>, path: I) -> EvalResult<'tcx, Size> {
1144         let mut offset = Size::from_bytes(0);
1145
1146         // Skip the initial 0 intended for LLVM GEP.
1147         for field_index in path {
1148             let field_offset = self.get_field_offset(ty, field_index)?;
1149             ty = self.get_field_ty(ty, field_index)?;
1150             offset = offset.checked_add(field_offset, &self.tcx.data_layout).unwrap();
1151         }
1152
1153         Ok(offset)
1154     }
1155
1156     fn get_field_ty(&self, ty: Ty<'tcx>, field_index: usize) -> EvalResult<'tcx, Ty<'tcx>> {
1157         match ty.sty {
1158             ty::TyStruct(adt_def, substs) => {
1159                 Ok(adt_def.struct_variant().fields[field_index].ty(self.tcx, substs))
1160             }
1161
1162             ty::TyRef(_, ty::TypeAndMut { ty, .. }) |
1163             ty::TyRawPtr(ty::TypeAndMut { ty, .. }) |
1164             ty::TyBox(ty) => {
1165                 assert_eq!(field_index, 0);
1166                 Ok(ty)
1167             }
1168             _ => Err(EvalError::Unimplemented(format!("can't handle type: {:?}", ty))),
1169         }
1170     }
1171
1172     fn get_field_offset(&self, ty: Ty<'tcx>, field_index: usize) -> EvalResult<'tcx, Size> {
1173         let layout = self.type_layout(ty);
1174
1175         use rustc::ty::layout::Layout::*;
1176         match *layout {
1177             Univariant { .. } => {
1178                 assert_eq!(field_index, 0);
1179                 Ok(Size::from_bytes(0))
1180             }
1181             FatPointer { .. } => {
1182                 let bytes = layout::FAT_PTR_ADDR * self.memory.pointer_size;
1183                 Ok(Size::from_bytes(bytes as u64))
1184             }
1185             _ => Err(EvalError::Unimplemented(format!("can't handle type: {:?}, with layout: {:?}", ty, layout))),
1186         }
1187     }
1188
1189     fn eval_operand(&mut self, op: &mir::Operand<'tcx>) -> EvalResult<'tcx, Pointer> {
1190         use rustc::mir::repr::Operand::*;
1191         match *op {
1192             Consume(ref lvalue) => Ok(self.eval_lvalue(lvalue)?.to_ptr()),
1193             Constant(mir::Constant { ref literal, ty, .. }) => {
1194                 use rustc::mir::repr::Literal::*;
1195                 match *literal {
1196                     Value { ref value } => Ok(self.const_to_ptr(value)?),
1197                     Item { def_id, substs } => {
1198                         if let ty::TyFnDef(..) = ty.sty {
1199                             // function items are zero sized
1200                             Ok(self.memory.allocate(0))
1201                         } else {
1202                             let cid = ConstantId {
1203                                 def_id: def_id,
1204                                 substs: substs,
1205                                 kind: ConstantKind::Global,
1206                             };
1207                             Ok(*self.statics.get(&cid).expect("static should have been cached (rvalue)"))
1208                         }
1209                     },
1210                     Promoted { index } => {
1211                         let cid = ConstantId {
1212                             def_id: self.frame().def_id,
1213                             substs: self.substs(),
1214                             kind: ConstantKind::Promoted(index),
1215                         };
1216                         Ok(*self.statics.get(&cid).expect("a promoted constant hasn't been precomputed"))
1217                     },
1218                 }
1219             }
1220         }
1221     }
1222
1223     fn eval_lvalue(&mut self, lvalue: &mir::Lvalue<'tcx>) -> EvalResult<'tcx, Lvalue> {
1224         use rustc::mir::repr::Lvalue::*;
1225         let ptr = match *lvalue {
1226             ReturnPointer => self.frame().return_ptr
1227                 .expect("ReturnPointer used in a function with no return value"),
1228             Arg(i) => self.frame().locals[i.index()],
1229             Var(i) => self.frame().locals[self.frame().var_offset + i.index()],
1230             Temp(i) => self.frame().locals[self.frame().temp_offset + i.index()],
1231
1232             Static(def_id) => {
1233                 let substs = self.tcx.mk_substs(subst::Substs::empty());
1234                 let cid = ConstantId {
1235                     def_id: def_id,
1236                     substs: substs,
1237                     kind: ConstantKind::Global,
1238                 };
1239                 *self.statics.get(&cid).expect("static should have been cached (lvalue)")
1240             },
1241
1242             Projection(ref proj) => {
1243                 let base = self.eval_lvalue(&proj.base)?;
1244                 let base_ty = self.lvalue_ty(&proj.base);
1245                 let base_layout = self.type_layout(base_ty);
1246
1247                 use rustc::mir::repr::ProjectionElem::*;
1248                 match proj.elem {
1249                     Field(field, _) => {
1250                         use rustc::ty::layout::Layout::*;
1251                         let variant = match *base_layout {
1252                             Univariant { ref variant, .. } => variant,
1253                             General { ref variants, .. } => {
1254                                 if let LvalueExtra::DowncastVariant(variant_idx) = base.extra {
1255                                     &variants[variant_idx]
1256                                 } else {
1257                                     panic!("field access on enum had no variant index");
1258                                 }
1259                             }
1260                             RawNullablePointer { .. } => {
1261                                 assert_eq!(field.index(), 0);
1262                                 return Ok(base);
1263                             }
1264                             StructWrappedNullablePointer { ref nonnull, .. } => nonnull,
1265                             _ => panic!("field access on non-product type: {:?}", base_layout),
1266                         };
1267
1268                         let offset = variant.field_offset(field.index()).bytes();
1269                         base.ptr.offset(offset as isize)
1270                     },
1271
1272                     Downcast(_, variant) => {
1273                         use rustc::ty::layout::Layout::*;
1274                         match *base_layout {
1275                             General { discr, .. } => {
1276                                 return Ok(Lvalue {
1277                                     ptr: base.ptr.offset(discr.size().bytes() as isize),
1278                                     extra: LvalueExtra::DowncastVariant(variant),
1279                                 });
1280                             }
1281                             RawNullablePointer { .. } | StructWrappedNullablePointer { .. } => {
1282                                 return Ok(base);
1283                             }
1284                             _ => panic!("variant downcast on non-aggregate: {:?}", base_layout),
1285                         }
1286                     },
1287
1288                     Deref => {
1289                         let pointee_ty = pointee_type(base_ty).expect("Deref of non-pointer");
1290                         let ptr = self.memory.read_ptr(base.ptr)?;
1291                         let extra = match pointee_ty.sty {
1292                             ty::TySlice(_) | ty::TyStr => {
1293                                 let len_ptr = base.ptr.offset(self.memory.pointer_size as isize);
1294                                 let len = self.memory.read_usize(len_ptr)?;
1295                                 LvalueExtra::Length(len)
1296                             }
1297                             ty::TyTrait(_) => unimplemented!(),
1298                             _ => LvalueExtra::None,
1299                         };
1300                         return Ok(Lvalue { ptr: ptr, extra: extra });
1301                     }
1302
1303                     Index(ref operand) => {
1304                         let elem_size = match base_ty.sty {
1305                             ty::TyArray(elem_ty, _) |
1306                             ty::TySlice(elem_ty) => self.type_size(elem_ty),
1307                             _ => panic!("indexing expected an array or slice, got {:?}", base_ty),
1308                         };
1309                         let n_ptr = self.eval_operand(operand)?;
1310                         let n = self.memory.read_usize(n_ptr)?;
1311                         base.ptr.offset(n as isize * elem_size as isize)
1312                     }
1313
1314                     ConstantIndex { .. } => unimplemented!(),
1315                     Subslice { .. } => unimplemented!(),
1316                 }
1317             }
1318         };
1319
1320         Ok(Lvalue { ptr: ptr, extra: LvalueExtra::None })
1321     }
1322
1323     fn lvalue_ty(&self, lvalue: &mir::Lvalue<'tcx>) -> Ty<'tcx> {
1324         self.monomorphize(self.mir().lvalue_ty(self.tcx, lvalue).to_ty(self.tcx), self.substs())
1325     }
1326
1327     fn operand_ty(&self, operand: &mir::Operand<'tcx>) -> Ty<'tcx> {
1328         self.monomorphize(self.mir().operand_ty(self.tcx, operand), self.substs())
1329     }
1330
1331     fn move_(&mut self, src: Pointer, dest: Pointer, ty: Ty<'tcx>) -> EvalResult<'tcx, ()> {
1332         let size = self.type_size(ty);
1333         self.memory.copy(src, dest, size)?;
1334         if self.type_needs_drop(ty) {
1335             self.memory.drop_fill(src, size)?;
1336         }
1337         Ok(())
1338     }
1339
1340     pub fn read_primval(&mut self, ptr: Pointer, ty: Ty<'tcx>) -> EvalResult<'tcx, PrimVal> {
1341         use syntax::ast::{IntTy, UintTy};
1342         let val = match (self.memory.pointer_size, &ty.sty) {
1343             (_, &ty::TyBool)              => PrimVal::Bool(self.memory.read_bool(ptr)?),
1344             (_, &ty::TyInt(IntTy::I8))    => PrimVal::I8(self.memory.read_int(ptr, 1)? as i8),
1345             (2, &ty::TyInt(IntTy::Is)) |
1346             (_, &ty::TyInt(IntTy::I16))   => PrimVal::I16(self.memory.read_int(ptr, 2)? as i16),
1347             (4, &ty::TyInt(IntTy::Is)) |
1348             (_, &ty::TyInt(IntTy::I32))   => PrimVal::I32(self.memory.read_int(ptr, 4)? as i32),
1349             (8, &ty::TyInt(IntTy::Is)) |
1350             (_, &ty::TyInt(IntTy::I64))   => PrimVal::I64(self.memory.read_int(ptr, 8)? as i64),
1351             (_, &ty::TyUint(UintTy::U8))  => PrimVal::U8(self.memory.read_uint(ptr, 1)? as u8),
1352             (2, &ty::TyUint(UintTy::Us)) |
1353             (_, &ty::TyUint(UintTy::U16)) => PrimVal::U16(self.memory.read_uint(ptr, 2)? as u16),
1354             (4, &ty::TyUint(UintTy::Us)) |
1355             (_, &ty::TyUint(UintTy::U32)) => PrimVal::U32(self.memory.read_uint(ptr, 4)? as u32),
1356             (8, &ty::TyUint(UintTy::Us)) |
1357             (_, &ty::TyUint(UintTy::U64)) => PrimVal::U64(self.memory.read_uint(ptr, 8)? as u64),
1358
1359             (_, &ty::TyRef(_, ty::TypeAndMut { ty, .. })) |
1360             (_, &ty::TyRawPtr(ty::TypeAndMut { ty, .. })) => {
1361                 if self.type_is_sized(ty) {
1362                     match self.memory.read_ptr(ptr) {
1363                         Ok(p) => PrimVal::AbstractPtr(p),
1364                         Err(EvalError::ReadBytesAsPointer) => {
1365                             PrimVal::IntegerPtr(self.memory.read_usize(ptr)?)
1366                         }
1367                         Err(e) => return Err(e),
1368                     }
1369                 } else {
1370                     return Err(EvalError::Unimplemented(format!("unimplemented: primitive read of fat pointer type: {:?}", ty)));
1371                 }
1372             }
1373
1374             _ => panic!("primitive read of non-primitive type: {:?}", ty),
1375         };
1376         Ok(val)
1377     }
1378
1379     fn frame(&self) -> &Frame<'a, 'tcx> {
1380         self.stack.last().expect("no call frames exist")
1381     }
1382
1383     pub fn frame_mut(&mut self) -> &mut Frame<'a, 'tcx> {
1384         self.stack.last_mut().expect("no call frames exist")
1385     }
1386
1387     fn mir(&self) -> CachedMir<'a, 'tcx> {
1388         self.frame().mir.clone()
1389     }
1390
1391     fn substs(&self) -> &'tcx Substs<'tcx> {
1392         self.frame().substs
1393     }
1394 }
1395
1396 fn pointee_type(ptr_ty: ty::Ty) -> Option<ty::Ty> {
1397     match ptr_ty.sty {
1398         ty::TyRef(_, ty::TypeAndMut { ty, .. }) |
1399         ty::TyRawPtr(ty::TypeAndMut { ty, .. }) |
1400         ty::TyBox(ty) => {
1401             Some(ty)
1402         }
1403         _ => None,
1404     }
1405 }
1406
1407 impl Lvalue {
1408     fn to_ptr(self) -> Pointer {
1409         assert_eq!(self.extra, LvalueExtra::None);
1410         self.ptr
1411     }
1412 }
1413
1414 impl<'mir, 'tcx: 'mir> Deref for CachedMir<'mir, 'tcx> {
1415     type Target = mir::Mir<'tcx>;
1416     fn deref(&self) -> &mir::Mir<'tcx> {
1417         match *self {
1418             CachedMir::Ref(r) => r,
1419             CachedMir::Owned(ref rc) => rc,
1420         }
1421     }
1422 }
1423
1424 #[derive(Debug)]
1425 pub struct ImplMethod<'tcx> {
1426     pub method: Rc<ty::Method<'tcx>>,
1427     pub substs: &'tcx Substs<'tcx>,
1428     pub is_provided: bool,
1429 }
1430
1431 /// Locates the applicable definition of a method, given its name.
1432 pub fn get_impl_method<'a, 'tcx>(
1433     tcx: TyCtxt<'a, 'tcx, 'tcx>,
1434     impl_def_id: DefId,
1435     substs: &'tcx Substs<'tcx>,
1436     name: ast::Name,
1437 ) -> ImplMethod<'tcx> {
1438     assert!(!substs.types.needs_infer());
1439
1440     let trait_def_id = tcx.trait_id_of_impl(impl_def_id).unwrap();
1441     let trait_def = tcx.lookup_trait_def(trait_def_id);
1442
1443     match trait_def.ancestors(impl_def_id).fn_defs(tcx, name).next() {
1444         Some(node_item) => {
1445             let substs = tcx.normalizing_infer_ctxt(ProjectionMode::Any).enter(|infcx| {
1446                 let substs = traits::translate_substs(&infcx, impl_def_id,
1447                                                       substs, node_item.node);
1448                 tcx.lift(&substs).unwrap_or_else(|| {
1449                     bug!("trans::meth::get_impl_method: translate_substs \
1450                           returned {:?} which contains inference types/regions",
1451                          substs);
1452                 })
1453             });
1454             ImplMethod {
1455                 method: node_item.item,
1456                 substs: substs,
1457                 is_provided: node_item.node.is_from_trait(),
1458             }
1459         }
1460         None => {
1461             bug!("method {:?} not found in {:?}", name, impl_def_id)
1462         }
1463     }
1464 }
1465
1466 // TODO(solson): Upstream these methods into rustc::ty::layout.
1467
1468 trait IntegerExt {
1469     fn size(self) -> Size;
1470 }
1471
1472 impl IntegerExt for layout::Integer {
1473     fn size(self) -> Size {
1474         use rustc::ty::layout::Integer::*;
1475         match self {
1476             I1 | I8 => Size::from_bits(8),
1477             I16 => Size::from_bits(16),
1478             I32 => Size::from_bits(32),
1479             I64 => Size::from_bits(64),
1480         }
1481     }
1482 }
1483
1484 trait StructExt {
1485     fn field_offset(&self, index: usize) -> Size;
1486 }
1487
1488 impl StructExt for layout::Struct {
1489     fn field_offset(&self, index: usize) -> Size {
1490         if index == 0 {
1491             Size::from_bytes(0)
1492         } else {
1493             self.offset_after_field[index - 1]
1494         }
1495     }
1496 }