]> git.lizzy.rs Git - rust.git/blob - src/interpreter.rs
8e8565b50c55c302a43119438cd214be0124e321
[rust.git] / src / interpreter.rs
1 use rustc::middle::const_val;
2 use rustc::hir::def_id::DefId;
3 use rustc::mir::mir_map::MirMap;
4 use rustc::mir::repr as mir;
5 use rustc::traits::{self, ProjectionMode};
6 use rustc::ty::fold::TypeFoldable;
7 use rustc::ty::layout::{self, Layout, Size};
8 use rustc::ty::subst::{self, Subst, Substs};
9 use rustc::ty::{self, Ty, TyCtxt};
10 use rustc::util::nodemap::DefIdMap;
11 use std::cell::RefCell;
12 use std::ops::{Deref, DerefMut};
13 use std::rc::Rc;
14 use std::{iter, mem};
15 use syntax::ast;
16 use syntax::attr;
17 use syntax::codemap::{self, DUMMY_SP};
18
19 use error::{EvalError, EvalResult};
20 use memory::{Memory, Pointer};
21 use primval::{self, PrimVal};
22
23 const TRACE_EXECUTION: bool = true;
24
25 struct GlobalEvalContext<'a, 'tcx: 'a> {
26     /// The results of the type checker, from rustc.
27     tcx: TyCtxt<'a, 'tcx, 'tcx>,
28
29     /// A mapping from NodeIds to Mir, from rustc. Only contains MIR for crate-local items.
30     mir_map: &'a MirMap<'tcx>,
31
32     /// A local cache from DefIds to Mir for non-crate-local items.
33     mir_cache: RefCell<DefIdMap<Rc<mir::Mir<'tcx>>>>,
34
35     /// The virtual memory system.
36     memory: Memory,
37
38     /// Another stack containing the type substitutions for the current function invocation. It
39     /// exists separately from `stack` because it must contain the `Substs` for a function while
40     /// *creating* the `Frame` for that same function.
41     substs_stack: Vec<&'tcx Substs<'tcx>>,
42
43     // TODO(solson): Merge with `substs_stack`. Also try restructuring `Frame` to accomodate.
44     /// A stack of the things necessary to print good strack traces:
45     ///   * Function DefIds and Substs to print proper substituted function names.
46     ///   * Spans pointing to specific function calls in the source.
47     name_stack: Vec<(DefId, &'tcx Substs<'tcx>, codemap::Span)>,
48 }
49
50 struct FnEvalContext<'a, 'b: 'a + 'mir, 'mir, 'tcx: 'b> {
51     gecx: &'a mut GlobalEvalContext<'b, 'tcx>,
52
53     /// The virtual call stack.
54     stack: Vec<Frame<'mir, 'tcx>>,
55 }
56
57 impl<'a, 'b, 'mir, 'tcx> Deref for FnEvalContext<'a, 'b, 'mir, 'tcx> {
58     type Target = GlobalEvalContext<'b, 'tcx>;
59     fn deref(&self) -> &Self::Target {
60         self.gecx
61     }
62 }
63
64 impl<'a, 'b, 'mir, 'tcx> DerefMut for FnEvalContext<'a, 'b, 'mir, 'tcx> {
65     fn deref_mut(&mut self) -> &mut Self::Target {
66         self.gecx
67     }
68 }
69
70 /// A stack frame.
71 struct Frame<'a, 'tcx: 'a> {
72     /// The MIR for the function called on this frame.
73     mir: CachedMir<'a, 'tcx>,
74
75     /// The block this frame will execute when a function call returns back to this frame.
76     next_block: mir::BasicBlock,
77
78     /// A pointer for writing the return value of the current call if it's not a diverging call.
79     return_ptr: Option<Pointer>,
80
81     /// The list of locals for the current function, stored in order as
82     /// `[arguments..., variables..., temporaries...]`. The variables begin at `self.var_offset`
83     /// and the temporaries at `self.temp_offset`.
84     locals: Vec<Pointer>,
85
86     /// The offset of the first variable in `self.locals`.
87     var_offset: usize,
88
89     /// The offset of the first temporary in `self.locals`.
90     temp_offset: usize,
91 }
92
93 #[derive(Copy, Clone, Debug, Eq, PartialEq)]
94 struct Lvalue {
95     ptr: Pointer,
96     extra: LvalueExtra,
97 }
98
99 #[derive(Copy, Clone, Debug, Eq, PartialEq)]
100 enum LvalueExtra {
101     None,
102     Length(u64),
103     // TODO(solson): Vtable(memory::AllocId),
104     DowncastVariant(usize),
105 }
106
107 #[derive(Clone)]
108 enum CachedMir<'mir, 'tcx: 'mir> {
109     Ref(&'mir mir::Mir<'tcx>),
110     Owned(Rc<mir::Mir<'tcx>>)
111 }
112
113 /// Represents the action to be taken in the main loop as a result of executing a terminator.
114 enum TerminatorTarget {
115     /// Make a local jump to the given block.
116     Block(mir::BasicBlock),
117
118     /// Start executing from the new current frame. (For function calls.)
119     Call,
120
121     /// Stop executing the current frame and resume the previous frame.
122     Return,
123 }
124
125 impl<'a, 'tcx> GlobalEvalContext<'a, 'tcx> {
126     fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>, mir_map: &'a MirMap<'tcx>) -> Self {
127         GlobalEvalContext {
128             tcx: tcx,
129             mir_map: mir_map,
130             mir_cache: RefCell::new(DefIdMap()),
131             memory: Memory::new(),
132             substs_stack: Vec::new(),
133             name_stack: Vec::new(),
134         }
135     }
136 }
137
138 impl<'a, 'b, 'mir, 'tcx> FnEvalContext<'a, 'b, 'mir, 'tcx> {
139     fn new(gecx: &'a mut GlobalEvalContext<'b, 'tcx>) -> Self {
140         FnEvalContext {
141             gecx: gecx,
142             stack: Vec::new(),
143         }
144     }
145
146     fn maybe_report<T>(&self, span: codemap::Span, r: EvalResult<T>) -> EvalResult<T> {
147         if let Err(ref e) = r {
148             let mut err = self.tcx.sess.struct_span_err(span, &e.to_string());
149             for &(def_id, substs, span) in self.name_stack.iter().rev() {
150                 // FIXME(solson): Find a way to do this without this Display impl hack.
151                 use rustc::util::ppaux;
152                 use std::fmt;
153                 struct Instance<'tcx>(DefId, &'tcx Substs<'tcx>);
154                 impl<'tcx> fmt::Display for Instance<'tcx> {
155                     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
156                         ppaux::parameterized(f, self.1, self.0, ppaux::Ns::Value, &[],
157                             |tcx| tcx.lookup_item_type(self.0).generics)
158                     }
159                 }
160                 err.span_note(span, &format!("inside call to {}", Instance(def_id, substs)));
161             }
162             err.emit();
163         }
164         r
165     }
166
167     fn log<F>(&self, extra_indent: usize, f: F) where F: FnOnce() {
168         let indent = self.stack.len() + extra_indent;
169         if !TRACE_EXECUTION { return; }
170         for _ in 0..indent { print!("    "); }
171         f();
172         println!("");
173     }
174
175     fn run(&mut self) -> EvalResult<()> {
176         'outer: while !self.stack.is_empty() {
177             let mut current_block = self.frame().next_block;
178
179             loop {
180                 self.log(0, || print!("// {:?}", current_block));
181                 let current_mir = self.mir().clone(); // Cloning a reference.
182                 let block_data = current_mir.basic_block_data(current_block);
183
184                 for stmt in &block_data.statements {
185                     self.log(0, || print!("{:?}", stmt));
186                     let mir::StatementKind::Assign(ref lvalue, ref rvalue) = stmt.kind;
187                     let result = self.eval_assignment(lvalue, rvalue);
188                     self.maybe_report(stmt.span, result)?;
189                 }
190
191                 let terminator = block_data.terminator();
192                 self.log(0, || print!("{:?}", terminator.kind));
193
194                 let result = self.eval_terminator(terminator);
195                 match self.maybe_report(terminator.span, result)? {
196                     TerminatorTarget::Block(block) => current_block = block,
197                     TerminatorTarget::Return => {
198                         self.pop_stack_frame();
199                         self.name_stack.pop();
200                         continue 'outer;
201                     }
202                     TerminatorTarget::Call => continue 'outer,
203                 }
204             }
205         }
206
207         Ok(())
208     }
209
210     fn call_nested(&mut self, mir: &mir::Mir<'tcx>) -> EvalResult<Option<Pointer>> {
211         let mut nested_fecx = FnEvalContext::new(self.gecx);
212
213         let return_ptr = match mir.return_ty {
214             ty::FnConverging(ty) => {
215                 let size = nested_fecx.type_size(ty);
216                 Some(nested_fecx.memory.allocate(size))
217             }
218             ty::FnDiverging => None,
219         };
220
221         let substs = nested_fecx.substs();
222         nested_fecx.push_stack_frame(CachedMir::Ref(mir), substs, return_ptr);
223         nested_fecx.run()?;
224         Ok(return_ptr)
225     }
226
227     fn push_stack_frame(&mut self, mir: CachedMir<'mir, 'tcx>, substs: &'tcx Substs<'tcx>,
228         return_ptr: Option<Pointer>)
229     {
230         self.substs_stack.push(substs);
231
232         let arg_tys = mir.arg_decls.iter().map(|a| a.ty);
233         let var_tys = mir.var_decls.iter().map(|v| v.ty);
234         let temp_tys = mir.temp_decls.iter().map(|t| t.ty);
235
236         let locals: Vec<Pointer> = arg_tys.chain(var_tys).chain(temp_tys).map(|ty| {
237             let size = self.type_size(ty);
238             self.memory.allocate(size)
239         }).collect();
240
241         let num_args = mir.arg_decls.len();
242         let num_vars = mir.var_decls.len();
243
244         self.stack.push(Frame {
245             mir: mir.clone(),
246             next_block: mir::START_BLOCK,
247             return_ptr: return_ptr,
248             locals: locals,
249             var_offset: num_args,
250             temp_offset: num_args + num_vars,
251         });
252     }
253
254     fn pop_stack_frame(&mut self) {
255         let _frame = self.stack.pop().expect("tried to pop a stack frame, but there were none");
256         // TODO(solson): Deallocate local variables.
257         self.substs_stack.pop();
258     }
259
260     fn eval_terminator(&mut self, terminator: &mir::Terminator<'tcx>)
261             -> EvalResult<TerminatorTarget> {
262         use rustc::mir::repr::TerminatorKind::*;
263         let target = match terminator.kind {
264             Return => TerminatorTarget::Return,
265
266             Goto { target } => TerminatorTarget::Block(target),
267
268             If { ref cond, targets: (then_target, else_target) } => {
269                 let cond_ptr = self.eval_operand(cond)?;
270                 let cond_val = self.memory.read_bool(cond_ptr)?;
271                 TerminatorTarget::Block(if cond_val { then_target } else { else_target })
272             }
273
274             SwitchInt { ref discr, ref values, ref targets, .. } => {
275                 let discr_ptr = self.eval_lvalue(discr)?.to_ptr();
276                 let discr_size = self
277                     .type_layout(self.lvalue_ty(discr))
278                     .size(&self.tcx.data_layout)
279                     .bytes() as usize;
280                 let discr_val = self.memory.read_uint(discr_ptr, discr_size)?;
281
282                 // Branch to the `otherwise` case by default, if no match is found.
283                 let mut target_block = targets[targets.len() - 1];
284
285                 for (index, val_const) in values.iter().enumerate() {
286                     let ptr = self.const_to_ptr(val_const)?;
287                     let val = self.memory.read_uint(ptr, discr_size)?;
288                     if discr_val == val {
289                         target_block = targets[index];
290                         break;
291                     }
292                 }
293
294                 TerminatorTarget::Block(target_block)
295             }
296
297             Switch { ref discr, ref targets, adt_def } => {
298                 let adt_ptr = self.eval_lvalue(discr)?.to_ptr();
299                 let adt_ty = self.lvalue_ty(discr);
300                 let discr_val = self.read_discriminant_value(adt_ptr, adt_ty)?;
301                 let matching = adt_def.variants.iter()
302                     .position(|v| discr_val == v.disr_val.to_u64_unchecked());
303
304                 match matching {
305                     Some(i) => TerminatorTarget::Block(targets[i]),
306                     None => return Err(EvalError::InvalidDiscriminant),
307                 }
308             }
309
310             Call { ref func, ref args, ref destination, .. } => {
311                 let mut return_ptr = None;
312                 if let Some((ref lv, target)) = *destination {
313                     self.frame_mut().next_block = target;
314                     return_ptr = Some(self.eval_lvalue(lv)?.to_ptr());
315                 }
316
317                 let func_ty = self.operand_ty(func);
318                 match func_ty.sty {
319                     ty::TyFnDef(def_id, substs, fn_ty) => {
320                         use syntax::abi::Abi;
321                         match fn_ty.abi {
322                             Abi::RustIntrinsic => {
323                                 let name = self.tcx.item_name(def_id).as_str();
324                                 match fn_ty.sig.0.output {
325                                     ty::FnConverging(ty) => {
326                                         let size = self.type_size(ty);
327                                         let ret = return_ptr.unwrap();
328                                         self.call_intrinsic(&name, substs, args, ret, size)?
329                                     }
330                                     ty::FnDiverging => unimplemented!(),
331                                 }
332                             }
333
334                             Abi::C => {
335                                 match fn_ty.sig.0.output {
336                                     ty::FnConverging(ty) => {
337                                         let size = self.type_size(ty);
338                                         self.call_c_abi(def_id, args, return_ptr.unwrap(), size)?
339                                     }
340                                     ty::FnDiverging => unimplemented!(),
341                                 }
342                             }
343
344                             Abi::Rust | Abi::RustCall => {
345                                 // TODO(solson): Adjust the first argument when calling a Fn or
346                                 // FnMut closure via FnOnce::call_once.
347
348                                 // Only trait methods can have a Self parameter.
349                                 let (resolved_def_id, resolved_substs) = if substs.self_ty().is_some() {
350                                     self.trait_method(def_id, substs)
351                                 } else {
352                                     (def_id, substs)
353                                 };
354
355                                 let mut arg_srcs = Vec::new();
356                                 for arg in args {
357                                     let src = self.eval_operand(arg)?;
358                                     let src_ty = self.operand_ty(arg);
359                                     arg_srcs.push((src, src_ty));
360                                 }
361
362                                 if fn_ty.abi == Abi::RustCall && !args.is_empty() {
363                                     arg_srcs.pop();
364                                     let last_arg = args.last().unwrap();
365                                     let last = self.eval_operand(last_arg)?;
366                                     let last_ty = self.operand_ty(last_arg);
367                                     let last_layout = self.type_layout(last_ty);
368                                     match (&last_ty.sty, last_layout) {
369                                         (&ty::TyTuple(fields),
370                                          &Layout::Univariant { ref variant, .. }) => {
371                                             let offsets = iter::once(0)
372                                                 .chain(variant.offset_after_field.iter()
373                                                     .map(|s| s.bytes()));
374                                             for (offset, ty) in offsets.zip(fields) {
375                                                 let src = last.offset(offset as isize);
376                                                 arg_srcs.push((src, ty));
377                                             }
378                                         }
379                                         ty => panic!("expected tuple as last argument in function with 'rust-call' ABI, got {:?}", ty),
380                                     }
381                                 }
382
383                                 let mir = self.load_mir(resolved_def_id);
384                                 self.name_stack.push((def_id, substs, terminator.span));
385                                 self.push_stack_frame(mir, resolved_substs, return_ptr);
386
387                                 for (i, (src, src_ty)) in arg_srcs.into_iter().enumerate() {
388                                     let dest = self.frame().locals[i];
389                                     self.move_(src, dest, src_ty)?;
390                                 }
391
392                                 TerminatorTarget::Call
393                             }
394
395                             abi => panic!("can't handle function with {:?} ABI", abi),
396                         }
397                     }
398
399                     _ => panic!("can't handle callee of type {:?}", func_ty),
400                 }
401             }
402
403             Drop { ref value, target, .. } => {
404                 let ptr = self.eval_lvalue(value)?.to_ptr();
405                 let ty = self.lvalue_ty(value);
406                 self.drop(ptr, ty)?;
407                 TerminatorTarget::Block(target)
408             }
409
410             Resume => unimplemented!(),
411         };
412
413         Ok(target)
414     }
415
416     fn drop(&mut self, ptr: Pointer, ty: Ty<'tcx>) -> EvalResult<()> {
417         if !self.type_needs_drop(ty) {
418             self.log(1, || print!("no need to drop {:?}", ty));
419             return Ok(());
420         }
421         self.log(1, || print!("need to drop {:?}", ty));
422
423         // TODO(solson): Call user-defined Drop::drop impls.
424
425         match ty.sty {
426             ty::TyBox(contents_ty) => {
427                 match self.memory.read_ptr(ptr) {
428                     Ok(contents_ptr) => {
429                         self.drop(contents_ptr, contents_ty)?;
430                         self.log(1, || print!("deallocating box"));
431                         self.memory.deallocate(contents_ptr)?;
432                     }
433                     Err(EvalError::ReadBytesAsPointer) => {
434                         let size = self.memory.pointer_size;
435                         let possible_drop_fill = self.memory.read_bytes(ptr, size)?;
436                         if possible_drop_fill.iter().all(|&b| b == mem::POST_DROP_U8) {
437                             return Ok(());
438                         } else {
439                             return Err(EvalError::ReadBytesAsPointer);
440                         }
441                     }
442                     Err(e) => return Err(e),
443                 }
444             }
445
446             // TODO(solson): Implement drop for other relevant types (e.g. aggregates).
447             _ => {}
448         }
449
450         // Filling drop.
451         // FIXME(solson): Trait objects (with no static size) probably get filled, too.
452         let size = self.type_size(ty);
453         self.memory.drop_fill(ptr, size)?;
454
455         Ok(())
456     }
457
458     fn read_discriminant_value(&self, adt_ptr: Pointer, adt_ty: Ty<'tcx>) -> EvalResult<u64> {
459         use rustc::ty::layout::Layout::*;
460         let adt_layout = self.type_layout(adt_ty);
461
462         let discr_val = match *adt_layout {
463             General { discr, .. } | CEnum { discr, .. } => {
464                 let discr_size = discr.size().bytes();
465                 self.memory.read_uint(adt_ptr, discr_size as usize)?
466             }
467
468             RawNullablePointer { nndiscr, .. } => {
469                 self.read_nonnull_discriminant_value(adt_ptr, nndiscr)?
470             }
471
472             StructWrappedNullablePointer { nndiscr, ref discrfield, .. } => {
473                 let offset = self.nonnull_offset(adt_ty, nndiscr, discrfield);
474                 let nonnull = adt_ptr.offset(offset.bytes() as isize);
475                 self.read_nonnull_discriminant_value(nonnull, nndiscr)?
476             }
477
478             // The discriminant_value intrinsic returns 0 for non-sum types.
479             Array { .. } | FatPointer { .. } | Scalar { .. } | Univariant { .. } |
480             Vector { .. } => 0,
481         };
482
483         Ok(discr_val)
484     }
485
486     fn read_nonnull_discriminant_value(&self, ptr: Pointer, nndiscr: u64) -> EvalResult<u64> {
487         let not_null = match self.memory.read_usize(ptr) {
488             Ok(0) => false,
489             Ok(_) | Err(EvalError::ReadPointerAsBytes) => true,
490             Err(e) => return Err(e),
491         };
492         assert!(nndiscr == 0 || nndiscr == 1);
493         Ok(if not_null { nndiscr } else { 1 - nndiscr })
494     }
495
496     fn call_intrinsic(
497         &mut self,
498         name: &str,
499         substs: &'tcx Substs<'tcx>,
500         args: &[mir::Operand<'tcx>],
501         dest: Pointer,
502         dest_size: usize
503     ) -> EvalResult<TerminatorTarget> {
504         let args_res: EvalResult<Vec<Pointer>> = args.iter()
505             .map(|arg| self.eval_operand(arg))
506             .collect();
507         let args = args_res?;
508
509         match name {
510             // FIXME(solson): Handle different integer types correctly.
511             "add_with_overflow" => {
512                 let ty = *substs.types.get(subst::FnSpace, 0);
513                 let size = self.type_size(ty);
514                 let left = self.memory.read_int(args[0], size)?;
515                 let right = self.memory.read_int(args[1], size)?;
516                 let (n, overflowed) = unsafe {
517                     ::std::intrinsics::add_with_overflow::<i64>(left, right)
518                 };
519                 self.memory.write_int(dest, n, size)?;
520                 self.memory.write_bool(dest.offset(size as isize), overflowed)?;
521             }
522
523             "assume" => {}
524
525             "copy_nonoverlapping" => {
526                 let elem_ty = *substs.types.get(subst::FnSpace, 0);
527                 let elem_size = self.type_size(elem_ty);
528                 let src = self.memory.read_ptr(args[0])?;
529                 let dest = self.memory.read_ptr(args[1])?;
530                 let count = self.memory.read_isize(args[2])?;
531                 self.memory.copy(src, dest, count as usize * elem_size)?;
532             }
533
534             "discriminant_value" => {
535                 let ty = *substs.types.get(subst::FnSpace, 0);
536                 let adt_ptr = self.memory.read_ptr(args[0])?;
537                 let discr_val = self.read_discriminant_value(adt_ptr, ty)?;
538                 self.memory.write_uint(dest, discr_val, dest_size)?;
539             }
540
541             "forget" => {
542                 let arg_ty = *substs.types.get(subst::FnSpace, 0);
543                 let arg_size = self.type_size(arg_ty);
544                 self.memory.drop_fill(args[0], arg_size)?;
545             }
546
547             "init" => self.memory.write_repeat(dest, 0, dest_size)?,
548
549             "min_align_of" => {
550                 self.memory.write_int(dest, 1, dest_size)?;
551             }
552
553             "move_val_init" => {
554                 let ty = *substs.types.get(subst::FnSpace, 0);
555                 let ptr = self.memory.read_ptr(args[0])?;
556                 self.move_(args[1], ptr, ty)?;
557             }
558
559             // FIXME(solson): Handle different integer types correctly.
560             "mul_with_overflow" => {
561                 let ty = *substs.types.get(subst::FnSpace, 0);
562                 let size = self.type_size(ty);
563                 let left = self.memory.read_int(args[0], size)?;
564                 let right = self.memory.read_int(args[1], size)?;
565                 let (n, overflowed) = unsafe {
566                     ::std::intrinsics::mul_with_overflow::<i64>(left, right)
567                 };
568                 self.memory.write_int(dest, n, size)?;
569                 self.memory.write_bool(dest.offset(size as isize), overflowed)?;
570             }
571
572             "offset" => {
573                 let pointee_ty = *substs.types.get(subst::FnSpace, 0);
574                 let pointee_size = self.type_size(pointee_ty) as isize;
575                 let ptr_arg = args[0];
576                 let offset = self.memory.read_isize(args[1])?;
577
578                 match self.memory.read_ptr(ptr_arg) {
579                     Ok(ptr) => {
580                         let result_ptr = ptr.offset(offset as isize * pointee_size);
581                         self.memory.write_ptr(dest, result_ptr)?;
582                     }
583                     Err(EvalError::ReadBytesAsPointer) => {
584                         let addr = self.memory.read_isize(ptr_arg)?;
585                         let result_addr = addr + offset * pointee_size as i64;
586                         self.memory.write_isize(dest, result_addr)?;
587                     }
588                     Err(e) => return Err(e),
589                 }
590             }
591
592             // FIXME(solson): Handle different integer types correctly. Use primvals?
593             "overflowing_sub" => {
594                 let ty = *substs.types.get(subst::FnSpace, 0);
595                 let size = self.type_size(ty);
596                 let left = self.memory.read_int(args[0], size)?;
597                 let right = self.memory.read_int(args[1], size)?;
598                 let n = left.wrapping_sub(right);
599                 self.memory.write_int(dest, n, size)?;
600             }
601
602             "size_of" => {
603                 let ty = *substs.types.get(subst::FnSpace, 0);
604                 let size = self.type_size(ty) as u64;
605                 self.memory.write_uint(dest, size, dest_size)?;
606             }
607
608             "size_of_val" => {
609                 let ty = *substs.types.get(subst::FnSpace, 0);
610                 if self.type_is_sized(ty) {
611                     let size = self.type_size(ty) as u64;
612                     self.memory.write_uint(dest, size, dest_size)?;
613                 } else {
614                     match ty.sty {
615                         ty::TySlice(_) | ty::TyStr => {
616                             let elem_ty = ty.sequence_element_type(self.tcx);
617                             let elem_size = self.type_size(elem_ty) as u64;
618                             let ptr_size = self.memory.pointer_size as isize;
619                             let n = self.memory.read_usize(args[0].offset(ptr_size))?;
620                             self.memory.write_uint(dest, n * elem_size, dest_size)?;
621                         }
622
623                         _ => panic!("unimplemented: size_of_val::<{:?}>", ty),
624                     }
625                 }
626             }
627
628             "transmute" => {
629                 let ty = *substs.types.get(subst::FnSpace, 0);
630                 self.move_(args[0], dest, ty)?;
631             }
632             "uninit" => self.memory.mark_definedness(dest, dest_size, false)?,
633
634             name => panic!("can't handle intrinsic: {}", name),
635         }
636
637         // Since we pushed no stack frame, the main loop will act
638         // as if the call just completed and it's returning to the
639         // current frame.
640         Ok(TerminatorTarget::Call)
641     }
642
643     fn call_c_abi(
644         &mut self,
645         def_id: DefId,
646         args: &[mir::Operand<'tcx>],
647         dest: Pointer,
648         dest_size: usize,
649     ) -> EvalResult<TerminatorTarget> {
650         let name = self.tcx.item_name(def_id);
651         let attrs = self.tcx.get_attrs(def_id);
652         let link_name = match attr::first_attr_value_str_by_name(&attrs, "link_name") {
653             Some(ln) => ln.clone(),
654             None => name.as_str(),
655         };
656
657         let args_res: EvalResult<Vec<Pointer>> = args.iter()
658             .map(|arg| self.eval_operand(arg))
659             .collect();
660         let args = args_res?;
661
662         match &link_name[..] {
663             "__rust_allocate" => {
664                 let size = self.memory.read_usize(args[0])?;
665                 let ptr = self.memory.allocate(size as usize);
666                 self.memory.write_ptr(dest, ptr)?;
667             }
668
669             "__rust_reallocate" => {
670                 let ptr = self.memory.read_ptr(args[0])?;
671                 let size = self.memory.read_usize(args[2])?;
672                 self.memory.reallocate(ptr, size as usize)?;
673                 self.memory.write_ptr(dest, ptr)?;
674             }
675
676             "memcmp" => {
677                 let left = self.memory.read_ptr(args[0])?;
678                 let right = self.memory.read_ptr(args[1])?;
679                 let n = self.memory.read_usize(args[2])? as usize;
680
681                 let result = {
682                     let left_bytes = self.memory.read_bytes(left, n)?;
683                     let right_bytes = self.memory.read_bytes(right, n)?;
684
685                     use std::cmp::Ordering::*;
686                     match left_bytes.cmp(right_bytes) {
687                         Less => -1,
688                         Equal => 0,
689                         Greater => 1,
690                     }
691                 };
692
693                 self.memory.write_int(dest, result, dest_size)?;
694             }
695
696             _ => panic!("can't call C ABI function: {}", link_name),
697         }
698
699         // Since we pushed no stack frame, the main loop will act
700         // as if the call just completed and it's returning to the
701         // current frame.
702         Ok(TerminatorTarget::Call)
703     }
704
705     fn assign_fields<I: IntoIterator<Item = u64>>(
706         &mut self,
707         dest: Pointer,
708         offsets: I,
709         operands: &[mir::Operand<'tcx>],
710     ) -> EvalResult<()> {
711         for (offset, operand) in offsets.into_iter().zip(operands) {
712             let src = self.eval_operand(operand)?;
713             let src_ty = self.operand_ty(operand);
714             let field_dest = dest.offset(offset as isize);
715             self.move_(src, field_dest, src_ty)?;
716         }
717         Ok(())
718     }
719
720     fn eval_assignment(&mut self, lvalue: &mir::Lvalue<'tcx>, rvalue: &mir::Rvalue<'tcx>)
721         -> EvalResult<()>
722     {
723         let dest = self.eval_lvalue(lvalue)?.to_ptr();
724         let dest_ty = self.lvalue_ty(lvalue);
725         let dest_layout = self.type_layout(dest_ty);
726
727         use rustc::mir::repr::Rvalue::*;
728         match *rvalue {
729             Use(ref operand) => {
730                 let src = self.eval_operand(operand)?;
731                 self.move_(src, dest, dest_ty)?;
732             }
733
734             BinaryOp(bin_op, ref left, ref right) => {
735                 let left_ptr = self.eval_operand(left)?;
736                 let left_ty = self.operand_ty(left);
737                 let left_val = self.read_primval(left_ptr, left_ty)?;
738
739                 let right_ptr = self.eval_operand(right)?;
740                 let right_ty = self.operand_ty(right);
741                 let right_val = self.read_primval(right_ptr, right_ty)?;
742
743                 let val = primval::binary_op(bin_op, left_val, right_val)?;
744                 self.memory.write_primval(dest, val)?;
745             }
746
747             UnaryOp(un_op, ref operand) => {
748                 let ptr = self.eval_operand(operand)?;
749                 let ty = self.operand_ty(operand);
750                 let val = self.read_primval(ptr, ty)?;
751                 self.memory.write_primval(dest, primval::unary_op(un_op, val))?;
752             }
753
754             Aggregate(ref kind, ref operands) => {
755                 use rustc::ty::layout::Layout::*;
756                 match *dest_layout {
757                     Univariant { ref variant, .. } => {
758                         let offsets = iter::once(0)
759                             .chain(variant.offset_after_field.iter().map(|s| s.bytes()));
760                         self.assign_fields(dest, offsets, operands)?;
761                     }
762
763                     Array { .. } => {
764                         let elem_size = match dest_ty.sty {
765                             ty::TyArray(elem_ty, _) => self.type_size(elem_ty) as u64,
766                             _ => panic!("tried to assign {:?} to non-array type {:?}",
767                                         kind, dest_ty),
768                         };
769                         let offsets = (0..).map(|i| i * elem_size);
770                         self.assign_fields(dest, offsets, operands)?;
771                     }
772
773                     General { discr, ref variants, .. } => {
774                         if let mir::AggregateKind::Adt(adt_def, variant, _) = *kind {
775                             let discr_val = adt_def.variants[variant].disr_val.to_u64_unchecked();
776                             let discr_size = discr.size().bytes() as usize;
777                             self.memory.write_uint(dest, discr_val, discr_size)?;
778
779                             let offsets = variants[variant].offset_after_field.iter()
780                                 .map(|s| s.bytes());
781                             self.assign_fields(dest, offsets, operands)?;
782                         } else {
783                             panic!("tried to assign {:?} to Layout::General", kind);
784                         }
785                     }
786
787                     RawNullablePointer { nndiscr, .. } => {
788                         if let mir::AggregateKind::Adt(_, variant, _) = *kind {
789                             if nndiscr == variant as u64 {
790                                 assert_eq!(operands.len(), 1);
791                                 let operand = &operands[0];
792                                 let src = self.eval_operand(operand)?;
793                                 let src_ty = self.operand_ty(operand);
794                                 self.move_(src, dest, src_ty)?;
795                             } else {
796                                 assert_eq!(operands.len(), 0);
797                                 self.memory.write_isize(dest, 0)?;
798                             }
799                         } else {
800                             panic!("tried to assign {:?} to Layout::RawNullablePointer", kind);
801                         }
802                     }
803
804                     StructWrappedNullablePointer { nndiscr, ref nonnull, ref discrfield } => {
805                         if let mir::AggregateKind::Adt(_, variant, _) = *kind {
806                             if nndiscr == variant as u64 {
807                                 let offsets = iter::once(0)
808                                     .chain(nonnull.offset_after_field.iter().map(|s| s.bytes()));
809                                 try!(self.assign_fields(dest, offsets, operands));
810                             } else {
811                                 assert_eq!(operands.len(), 0);
812                                 let offset = self.nonnull_offset(dest_ty, nndiscr, discrfield);
813                                 let dest = dest.offset(offset.bytes() as isize);
814                                 try!(self.memory.write_isize(dest, 0));
815                             }
816                         } else {
817                             panic!("tried to assign {:?} to Layout::RawNullablePointer", kind);
818                         }
819                     }
820
821                     CEnum { discr, signed, .. } => {
822                         assert_eq!(operands.len(), 0);
823                         if let mir::AggregateKind::Adt(adt_def, variant, _) = *kind {
824                             let val = adt_def.variants[variant].disr_val.to_u64_unchecked();
825                             let size = discr.size().bytes() as usize;
826
827                             if signed {
828                                 self.memory.write_int(dest, val as i64, size)?;
829                             } else {
830                                 self.memory.write_uint(dest, val, size)?;
831                             }
832                         } else {
833                             panic!("tried to assign {:?} to Layout::CEnum", kind);
834                         }
835                     }
836
837                     _ => panic!("can't handle destination layout {:?} when assigning {:?}",
838                                 dest_layout, kind),
839                 }
840             }
841
842             Repeat(ref operand, _) => {
843                 let (elem_size, length) = match dest_ty.sty {
844                     ty::TyArray(elem_ty, n) => (self.type_size(elem_ty), n),
845                     _ => panic!("tried to assign array-repeat to non-array type {:?}", dest_ty),
846                 };
847
848                 let src = self.eval_operand(operand)?;
849                 for i in 0..length {
850                     let elem_dest = dest.offset((i * elem_size) as isize);
851                     self.memory.copy(src, elem_dest, elem_size)?;
852                 }
853             }
854
855             Len(ref lvalue) => {
856                 let src = self.eval_lvalue(lvalue)?;
857                 let ty = self.lvalue_ty(lvalue);
858                 let len = match ty.sty {
859                     ty::TyArray(_, n) => n as u64,
860                     ty::TySlice(_) => if let LvalueExtra::Length(n) = src.extra {
861                         n
862                     } else {
863                         panic!("Rvalue::Len of a slice given non-slice pointer: {:?}", src);
864                     },
865                     _ => panic!("Rvalue::Len expected array or slice, got {:?}", ty),
866                 };
867                 self.memory.write_usize(dest, len)?;
868             }
869
870             Ref(_, _, ref lvalue) => {
871                 let lv = self.eval_lvalue(lvalue)?;
872                 self.memory.write_ptr(dest, lv.ptr)?;
873                 match lv.extra {
874                     LvalueExtra::None => {},
875                     LvalueExtra::Length(len) => {
876                         let len_ptr = dest.offset(self.memory.pointer_size as isize);
877                         self.memory.write_usize(len_ptr, len)?;
878                     }
879                     LvalueExtra::DowncastVariant(..) =>
880                         panic!("attempted to take a reference to an enum downcast lvalue"),
881                 }
882             }
883
884             Box(ty) => {
885                 let size = self.type_size(ty);
886                 let ptr = self.memory.allocate(size);
887                 self.memory.write_ptr(dest, ptr)?;
888             }
889
890             Cast(kind, ref operand, dest_ty) => {
891                 let src = self.eval_operand(operand)?;
892                 let src_ty = self.operand_ty(operand);
893
894                 use rustc::mir::repr::CastKind::*;
895                 match kind {
896                     Unsize => {
897                         self.move_(src, dest, src_ty)?;
898                         let src_pointee_ty = pointee_type(src_ty).unwrap();
899                         let dest_pointee_ty = pointee_type(dest_ty).unwrap();
900
901                         match (&src_pointee_ty.sty, &dest_pointee_ty.sty) {
902                             (&ty::TyArray(_, length), &ty::TySlice(_)) => {
903                                 let len_ptr = dest.offset(self.memory.pointer_size as isize);
904                                 self.memory.write_usize(len_ptr, length as u64)?;
905                             }
906
907                             _ => panic!("can't handle cast: {:?}", rvalue),
908                         }
909                     }
910
911                     Misc => {
912                         // FIXME(solson): Wrong for almost everything.
913                         let size = dest_layout.size(&self.tcx.data_layout).bytes() as usize;
914                         self.memory.copy(src, dest, size)?;
915                     }
916
917                     _ => panic!("can't handle cast: {:?}", rvalue),
918                 }
919             }
920
921             Slice { .. } => unimplemented!(),
922             InlineAsm { .. } => unimplemented!(),
923         }
924
925         Ok(())
926     }
927
928     fn nonnull_offset(&self, ty: Ty<'tcx>, nndiscr: u64, discrfield: &[u32]) -> Size {
929         // Skip the constant 0 at the start meant for LLVM GEP.
930         let mut path = discrfield.iter().skip(1).map(|&i| i as usize);
931
932         // Handle the field index for the outer non-null variant.
933         let inner_ty = match ty.sty {
934             ty::TyEnum(adt_def, substs) => {
935                 let variant = &adt_def.variants[nndiscr as usize];
936                 let index = path.next().unwrap();
937                 let field = &variant.fields[index];
938                 field.ty(self.tcx, substs)
939             }
940             _ => panic!(
941                 "non-enum for StructWrappedNullablePointer: {}",
942                 ty,
943             ),
944         };
945
946         self.field_path_offset(inner_ty, path)
947     }
948
949     fn field_path_offset<I: Iterator<Item = usize>>(&self, mut ty: Ty<'tcx>, path: I) -> Size {
950         let mut offset = Size::from_bytes(0);
951
952         // Skip the initial 0 intended for LLVM GEP.
953         for field_index in path {
954             let field_offset = self.get_field_offset(ty, field_index);
955             ty = self.get_field_ty(ty, field_index);
956             offset = offset.checked_add(field_offset, &self.tcx.data_layout).unwrap();
957         }
958
959         offset
960     }
961
962     fn get_field_ty(&self, ty: Ty<'tcx>, field_index: usize) -> Ty<'tcx> {
963         match ty.sty {
964             ty::TyStruct(adt_def, substs) => {
965                 adt_def.struct_variant().fields[field_index].ty(self.tcx, substs)
966             }
967
968             ty::TyRef(_, ty::TypeAndMut { ty, .. }) |
969             ty::TyRawPtr(ty::TypeAndMut { ty, .. }) |
970             ty::TyBox(ty) => {
971                 assert_eq!(field_index, 0);
972                 ty
973             }
974             _ => panic!("can't handle type: {:?}", ty),
975         }
976     }
977
978     fn get_field_offset(&self, ty: Ty<'tcx>, field_index: usize) -> Size {
979         let layout = self.type_layout(ty);
980
981         use rustc::ty::layout::Layout::*;
982         match *layout {
983             Univariant { .. } => {
984                 assert_eq!(field_index, 0);
985                 Size::from_bytes(0)
986             }
987             FatPointer { .. } => {
988                 let bytes = layout::FAT_PTR_ADDR * self.memory.pointer_size;
989                 Size::from_bytes(bytes as u64)
990             }
991             _ => panic!("can't handle type: {:?}, with layout: {:?}", ty, layout),
992         }
993     }
994
995     fn eval_operand(&mut self, op: &mir::Operand<'tcx>) -> EvalResult<Pointer> {
996         use rustc::mir::repr::Operand::*;
997         match *op {
998             Consume(ref lvalue) => Ok(self.eval_lvalue(lvalue)?.to_ptr()),
999             Constant(mir::Constant { ref literal, .. }) => {
1000                 use rustc::mir::repr::Literal::*;
1001                 match *literal {
1002                     Value { ref value } => Ok(self.const_to_ptr(value)?),
1003                     Item { .. } => unimplemented!(),
1004                     Promoted { index } => {
1005                         // TODO(solson): Mark constants and statics as read-only and cache their
1006                         // values.
1007                         let current_mir = self.mir();
1008                         let mir = &current_mir.promoted[index];
1009                         self.call_nested(mir).map(Option::unwrap)
1010                     }
1011                 }
1012             }
1013         }
1014     }
1015
1016     fn eval_lvalue(&mut self, lvalue: &mir::Lvalue<'tcx>) -> EvalResult<Lvalue> {
1017         use rustc::mir::repr::Lvalue::*;
1018         let ptr = match *lvalue {
1019             ReturnPointer => self.frame().return_ptr
1020                 .expect("ReturnPointer used in a function with no return value"),
1021             Arg(i) => self.frame().locals[i as usize],
1022             Var(i) => self.frame().locals[self.frame().var_offset + i as usize],
1023             Temp(i) => self.frame().locals[self.frame().temp_offset + i as usize],
1024
1025             Static(def_id) => {
1026                 // TODO(solson): Mark constants and statics as read-only and cache their values.
1027                 let mir = self.load_mir(def_id);
1028                 self.call_nested(&mir)?.unwrap()
1029             }
1030
1031             Projection(ref proj) => {
1032                 let base = self.eval_lvalue(&proj.base)?;
1033                 let base_ty = self.lvalue_ty(&proj.base);
1034                 let base_layout = self.type_layout(base_ty);
1035
1036                 use rustc::mir::repr::ProjectionElem::*;
1037                 match proj.elem {
1038                     Field(field, _) => {
1039                         use rustc::ty::layout::Layout::*;
1040                         let variant = match *base_layout {
1041                             Univariant { ref variant, .. } => variant,
1042                             General { ref variants, .. } => {
1043                                 if let LvalueExtra::DowncastVariant(variant_idx) = base.extra {
1044                                     &variants[variant_idx]
1045                                 } else {
1046                                     panic!("field access on enum had no variant index");
1047                                 }
1048                             }
1049                             RawNullablePointer { .. } => {
1050                                 assert_eq!(field.index(), 0);
1051                                 return Ok(base);
1052                             }
1053                             StructWrappedNullablePointer { ref nonnull, .. } => nonnull,
1054                             _ => panic!("field access on non-product type: {:?}", base_layout),
1055                         };
1056
1057                         let offset = variant.field_offset(field.index()).bytes();
1058                         base.ptr.offset(offset as isize)
1059                     },
1060
1061                     Downcast(_, variant) => {
1062                         use rustc::ty::layout::Layout::*;
1063                         match *base_layout {
1064                             General { discr, .. } => {
1065                                 return Ok(Lvalue {
1066                                     ptr: base.ptr.offset(discr.size().bytes() as isize),
1067                                     extra: LvalueExtra::DowncastVariant(variant),
1068                                 });
1069                             }
1070                             RawNullablePointer { .. } | StructWrappedNullablePointer { .. } => {
1071                                 return Ok(base);
1072                             }
1073                             _ => panic!("variant downcast on non-aggregate: {:?}", base_layout),
1074                         }
1075                     },
1076
1077                     Deref => {
1078                         let pointee_ty = pointee_type(base_ty).expect("Deref of non-pointer");
1079                         let ptr = self.memory.read_ptr(base.ptr)?;
1080                         let extra = match pointee_ty.sty {
1081                             ty::TySlice(_) | ty::TyStr => {
1082                                 let len_ptr = base.ptr.offset(self.memory.pointer_size as isize);
1083                                 let len = self.memory.read_usize(len_ptr)?;
1084                                 LvalueExtra::Length(len)
1085                             }
1086                             ty::TyTrait(_) => unimplemented!(),
1087                             _ => LvalueExtra::None,
1088                         };
1089                         return Ok(Lvalue { ptr: ptr, extra: extra });
1090                     }
1091
1092                     Index(ref operand) => {
1093                         let elem_size = match base_ty.sty {
1094                             ty::TyArray(elem_ty, _) |
1095                             ty::TySlice(elem_ty) => self.type_size(elem_ty),
1096                             _ => panic!("indexing expected an array or slice, got {:?}", base_ty),
1097                         };
1098                         let n_ptr = self.eval_operand(operand)?;
1099                         let n = self.memory.read_usize(n_ptr)?;
1100                         base.ptr.offset(n as isize * elem_size as isize)
1101                     }
1102
1103                     ConstantIndex { .. } => unimplemented!(),
1104                 }
1105             }
1106         };
1107
1108         Ok(Lvalue { ptr: ptr, extra: LvalueExtra::None })
1109     }
1110
1111     // TODO(solson): Try making const_to_primval instead.
1112     fn const_to_ptr(&mut self, const_val: &const_val::ConstVal) -> EvalResult<Pointer> {
1113         use rustc::middle::const_val::ConstVal::*;
1114         match *const_val {
1115             Float(_f) => unimplemented!(),
1116             Integral(int) => {
1117                 // TODO(solson): Check int constant type.
1118                 let ptr = self.memory.allocate(8);
1119                 self.memory.write_uint(ptr, int.to_u64_unchecked(), 8)?;
1120                 Ok(ptr)
1121             }
1122             Str(ref s) => {
1123                 let psize = self.memory.pointer_size;
1124                 let static_ptr = self.memory.allocate(s.len());
1125                 let ptr = self.memory.allocate(psize * 2);
1126                 self.memory.write_bytes(static_ptr, s.as_bytes())?;
1127                 self.memory.write_ptr(ptr, static_ptr)?;
1128                 self.memory.write_usize(ptr.offset(psize as isize), s.len() as u64)?;
1129                 Ok(ptr)
1130             }
1131             ByteStr(ref bs) => {
1132                 let psize = self.memory.pointer_size;
1133                 let static_ptr = self.memory.allocate(bs.len());
1134                 let ptr = self.memory.allocate(psize);
1135                 self.memory.write_bytes(static_ptr, bs)?;
1136                 self.memory.write_ptr(ptr, static_ptr)?;
1137                 Ok(ptr)
1138             }
1139             Bool(b) => {
1140                 let ptr = self.memory.allocate(1);
1141                 self.memory.write_bool(ptr, b)?;
1142                 Ok(ptr)
1143             }
1144             Char(_c)          => unimplemented!(),
1145             Struct(_node_id)  => unimplemented!(),
1146             Tuple(_node_id)   => unimplemented!(),
1147             Function(_def_id) => unimplemented!(),
1148             Array(_, _)       => unimplemented!(),
1149             Repeat(_, _)      => unimplemented!(),
1150             Dummy             => unimplemented!(),
1151         }
1152     }
1153
1154     fn lvalue_ty(&self, lvalue: &mir::Lvalue<'tcx>) -> Ty<'tcx> {
1155         self.monomorphize(self.mir().lvalue_ty(self.tcx, lvalue).to_ty(self.tcx))
1156     }
1157
1158     fn operand_ty(&self, operand: &mir::Operand<'tcx>) -> Ty<'tcx> {
1159         self.monomorphize(self.mir().operand_ty(self.tcx, operand))
1160     }
1161
1162     fn monomorphize(&self, ty: Ty<'tcx>) -> Ty<'tcx> {
1163         let substituted = ty.subst(self.tcx, self.substs());
1164         self.tcx.normalize_associated_type(&substituted)
1165     }
1166
1167     fn type_needs_drop(&self, ty: Ty<'tcx>) -> bool {
1168         self.tcx.type_needs_drop_given_env(ty, &self.tcx.empty_parameter_environment())
1169     }
1170
1171     fn move_(&mut self, src: Pointer, dest: Pointer, ty: Ty<'tcx>) -> EvalResult<()> {
1172         let size = self.type_size(ty);
1173         self.memory.copy(src, dest, size)?;
1174         if self.type_needs_drop(ty) {
1175             self.memory.drop_fill(src, size)?;
1176         }
1177         Ok(())
1178     }
1179
1180     fn type_is_sized(&self, ty: Ty<'tcx>) -> bool {
1181         ty.is_sized(self.tcx, &self.tcx.empty_parameter_environment(), DUMMY_SP)
1182     }
1183
1184     fn type_size(&self, ty: Ty<'tcx>) -> usize {
1185         self.type_layout(ty).size(&self.tcx.data_layout).bytes() as usize
1186     }
1187
1188     fn type_layout(&self, ty: Ty<'tcx>) -> &'tcx Layout {
1189         // TODO(solson): Is this inefficient? Needs investigation.
1190         let ty = self.monomorphize(ty);
1191
1192         self.tcx.normalizing_infer_ctxt(ProjectionMode::Any).enter(|infcx| {
1193             // TODO(solson): Report this error properly.
1194             ty.layout(&infcx).unwrap()
1195         })
1196     }
1197
1198     pub fn read_primval(&mut self, ptr: Pointer, ty: Ty<'tcx>) -> EvalResult<PrimVal> {
1199         use syntax::ast::{IntTy, UintTy};
1200         let val = match ty.sty {
1201             ty::TyBool              => PrimVal::Bool(self.memory.read_bool(ptr)?),
1202             ty::TyInt(IntTy::I8)    => PrimVal::I8(self.memory.read_int(ptr, 1)? as i8),
1203             ty::TyInt(IntTy::I16)   => PrimVal::I16(self.memory.read_int(ptr, 2)? as i16),
1204             ty::TyInt(IntTy::I32)   => PrimVal::I32(self.memory.read_int(ptr, 4)? as i32),
1205             ty::TyInt(IntTy::I64)   => PrimVal::I64(self.memory.read_int(ptr, 8)? as i64),
1206             ty::TyUint(UintTy::U8)  => PrimVal::U8(self.memory.read_uint(ptr, 1)? as u8),
1207             ty::TyUint(UintTy::U16) => PrimVal::U16(self.memory.read_uint(ptr, 2)? as u16),
1208             ty::TyUint(UintTy::U32) => PrimVal::U32(self.memory.read_uint(ptr, 4)? as u32),
1209             ty::TyUint(UintTy::U64) => PrimVal::U64(self.memory.read_uint(ptr, 8)? as u64),
1210
1211             // TODO(solson): Pick the PrimVal dynamically.
1212             ty::TyInt(IntTy::Is)   => PrimVal::I64(self.memory.read_isize(ptr)?),
1213             ty::TyUint(UintTy::Us) => PrimVal::U64(self.memory.read_usize(ptr)?),
1214
1215             ty::TyRef(_, ty::TypeAndMut { ty, .. }) |
1216             ty::TyRawPtr(ty::TypeAndMut { ty, .. }) => {
1217                 if self.type_is_sized(ty) {
1218                     match self.memory.read_ptr(ptr) {
1219                         Ok(p) => PrimVal::AbstractPtr(p),
1220                         Err(EvalError::ReadBytesAsPointer) => {
1221                             PrimVal::IntegerPtr(self.memory.read_usize(ptr)?)
1222                         }
1223                         Err(e) => return Err(e),
1224                     }
1225                 } else {
1226                     panic!("unimplemented: primitive read of fat pointer type: {:?}", ty);
1227                 }
1228             }
1229
1230             _ => panic!("primitive read of non-primitive type: {:?}", ty),
1231         };
1232         Ok(val)
1233     }
1234
1235     fn frame(&self) -> &Frame<'mir, 'tcx> {
1236         self.stack.last().expect("no call frames exist")
1237     }
1238
1239     fn frame_mut(&mut self) -> &mut Frame<'mir, 'tcx> {
1240         self.stack.last_mut().expect("no call frames exist")
1241     }
1242
1243     fn mir(&self) -> CachedMir<'mir, 'tcx> {
1244         self.frame().mir.clone()
1245     }
1246
1247     fn substs(&self) -> &'tcx Substs<'tcx> {
1248         self.substs_stack.last().cloned().unwrap_or_else(|| self.tcx.mk_substs(Substs::empty()))
1249     }
1250
1251     fn load_mir(&self, def_id: DefId) -> CachedMir<'mir, 'tcx> {
1252         match self.tcx.map.as_local_node_id(def_id) {
1253             Some(node_id) => CachedMir::Ref(self.mir_map.map.get(&node_id).unwrap()),
1254             None => {
1255                 let mut mir_cache = self.mir_cache.borrow_mut();
1256                 if let Some(mir) = mir_cache.get(&def_id) {
1257                     return CachedMir::Owned(mir.clone());
1258                 }
1259
1260                 let cs = &self.tcx.sess.cstore;
1261                 let mir = cs.maybe_get_item_mir(self.tcx, def_id).unwrap_or_else(|| {
1262                     panic!("no mir for {:?}", def_id);
1263                 });
1264                 let cached = Rc::new(mir);
1265                 mir_cache.insert(def_id, cached.clone());
1266                 CachedMir::Owned(cached)
1267             }
1268         }
1269     }
1270
1271     fn fulfill_obligation(&self, trait_ref: ty::PolyTraitRef<'tcx>) -> traits::Vtable<'tcx, ()> {
1272         // Do the initial selection for the obligation. This yields the shallow result we are
1273         // looking for -- that is, what specific impl.
1274         self.tcx.normalizing_infer_ctxt(ProjectionMode::Any).enter(|infcx| {
1275             let mut selcx = traits::SelectionContext::new(&infcx);
1276
1277             let obligation = traits::Obligation::new(
1278                 traits::ObligationCause::misc(DUMMY_SP, ast::DUMMY_NODE_ID),
1279                 trait_ref.to_poly_trait_predicate(),
1280             );
1281             let selection = selcx.select(&obligation).unwrap().unwrap();
1282
1283             // Currently, we use a fulfillment context to completely resolve all nested obligations.
1284             // This is because they can inform the inference of the impl's type parameters.
1285             let mut fulfill_cx = traits::FulfillmentContext::new();
1286             let vtable = selection.map(|predicate| {
1287                 fulfill_cx.register_predicate_obligation(&infcx, predicate);
1288             });
1289             infcx.drain_fulfillment_cx_or_panic(DUMMY_SP, &mut fulfill_cx, &vtable)
1290         })
1291     }
1292
1293     /// Trait method, which has to be resolved to an impl method.
1294     pub fn trait_method(
1295         &self,
1296         def_id: DefId,
1297         substs: &'tcx Substs<'tcx>
1298     ) -> (DefId, &'tcx Substs<'tcx>) {
1299         let method_item = self.tcx.impl_or_trait_item(def_id);
1300         let trait_id = method_item.container().id();
1301         let trait_ref = ty::Binder(substs.to_trait_ref(self.tcx, trait_id));
1302         match self.fulfill_obligation(trait_ref) {
1303             traits::VtableImpl(vtable_impl) => {
1304                 let impl_did = vtable_impl.impl_def_id;
1305                 let mname = self.tcx.item_name(def_id);
1306                 // Create a concatenated set of substitutions which includes those from the impl
1307                 // and those from the method:
1308                 let impl_substs = vtable_impl.substs.with_method_from(substs);
1309                 let substs = self.tcx.mk_substs(impl_substs);
1310                 let mth = get_impl_method(self.tcx, impl_did, substs, mname);
1311
1312                 (mth.method.def_id, mth.substs)
1313             }
1314
1315             traits::VtableClosure(vtable_closure) =>
1316                 (vtable_closure.closure_def_id, vtable_closure.substs.func_substs),
1317
1318             traits::VtableFnPointer(_fn_ty) => {
1319                 let _trait_closure_kind = self.tcx.lang_items.fn_trait_kind(trait_id).unwrap();
1320                 unimplemented!()
1321                 // let llfn = trans_fn_pointer_shim(ccx, trait_closure_kind, fn_ty);
1322
1323                 // let method_ty = def_ty(tcx, def_id, substs);
1324                 // let fn_ptr_ty = match method_ty.sty {
1325                 //     ty::TyFnDef(_, _, fty) => tcx.mk_ty(ty::TyFnPtr(fty)),
1326                 //     _ => unreachable!("expected fn item type, found {}",
1327                 //                       method_ty)
1328                 // };
1329                 // Callee::ptr(immediate_rvalue(llfn, fn_ptr_ty))
1330             }
1331
1332             traits::VtableObject(ref _data) => {
1333                 unimplemented!()
1334                 // Callee {
1335                 //     data: Virtual(traits::get_vtable_index_of_object_method(
1336                 //                   tcx, data, def_id)),
1337                 //                   ty: def_ty(tcx, def_id, substs)
1338                 // }
1339             }
1340             vtable => unreachable!("resolved vtable bad vtable {:?} in trans", vtable),
1341         }
1342     }
1343 }
1344
1345 fn pointee_type(ptr_ty: ty::Ty) -> Option<ty::Ty> {
1346     match ptr_ty.sty {
1347         ty::TyRef(_, ty::TypeAndMut { ty, .. }) |
1348         ty::TyRawPtr(ty::TypeAndMut { ty, .. }) |
1349         ty::TyBox(ty) => {
1350             Some(ty)
1351         }
1352         _ => None,
1353     }
1354 }
1355
1356 impl Lvalue {
1357     fn to_ptr(self) -> Pointer {
1358         assert_eq!(self.extra, LvalueExtra::None);
1359         self.ptr
1360     }
1361 }
1362
1363 impl<'mir, 'tcx: 'mir> Deref for CachedMir<'mir, 'tcx> {
1364     type Target = mir::Mir<'tcx>;
1365     fn deref(&self) -> &mir::Mir<'tcx> {
1366         match *self {
1367             CachedMir::Ref(r) => r,
1368             CachedMir::Owned(ref rc) => rc,
1369         }
1370     }
1371 }
1372
1373 #[derive(Debug)]
1374 pub struct ImplMethod<'tcx> {
1375     pub method: Rc<ty::Method<'tcx>>,
1376     pub substs: &'tcx Substs<'tcx>,
1377     pub is_provided: bool,
1378 }
1379
1380 /// Locates the applicable definition of a method, given its name.
1381 pub fn get_impl_method<'a, 'tcx>(
1382     tcx: TyCtxt<'a, 'tcx, 'tcx>,
1383     impl_def_id: DefId,
1384     substs: &'tcx Substs<'tcx>,
1385     name: ast::Name,
1386 ) -> ImplMethod<'tcx> {
1387     assert!(!substs.types.needs_infer());
1388
1389     let trait_def_id = tcx.trait_id_of_impl(impl_def_id).unwrap();
1390     let trait_def = tcx.lookup_trait_def(trait_def_id);
1391
1392     match trait_def.ancestors(impl_def_id).fn_defs(tcx, name).next() {
1393         Some(node_item) => {
1394             let substs = tcx.normalizing_infer_ctxt(ProjectionMode::Any).enter(|infcx| {
1395                 let substs = traits::translate_substs(&infcx, impl_def_id,
1396                                                       substs, node_item.node);
1397                 tcx.lift(&substs).unwrap_or_else(|| {
1398                     bug!("trans::meth::get_impl_method: translate_substs \
1399                           returned {:?} which contains inference types/regions",
1400                          substs);
1401                 })
1402             });
1403             ImplMethod {
1404                 method: node_item.item,
1405                 substs: substs,
1406                 is_provided: node_item.node.is_from_trait(),
1407             }
1408         }
1409         None => {
1410             bug!("method {:?} not found in {:?}", name, impl_def_id)
1411         }
1412     }
1413 }
1414
1415 pub fn interpret_start_points<'a, 'tcx>(
1416     tcx: TyCtxt<'a, 'tcx, 'tcx>,
1417     mir_map: &MirMap<'tcx>,
1418 ) {
1419     for (&id, mir) in &mir_map.map {
1420         for attr in tcx.map.attrs(id) {
1421             use syntax::attr::AttrMetaMethods;
1422             if attr.check_name("miri_run") {
1423                 let item = tcx.map.expect_item(id);
1424
1425                 if TRACE_EXECUTION {
1426                     println!("Interpreting: {}", item.name);
1427                 }
1428
1429                 let mut gecx = GlobalEvalContext::new(tcx, mir_map);
1430                 let mut fecx = FnEvalContext::new(&mut gecx);
1431                 match fecx.call_nested(mir) {
1432                     Ok(Some(return_ptr)) => if TRACE_EXECUTION {
1433                         fecx.memory.dump(return_ptr.alloc_id);
1434                     },
1435                     Ok(None) => println!("(diverging function returned)"),
1436                     Err(_e) => {
1437                         // TODO(solson): Detect whether the error was already reported or not.
1438                         // tcx.sess.err(&e.to_string());
1439                     }
1440                 }
1441
1442                 if TRACE_EXECUTION {
1443                     println!("");
1444                 }
1445             }
1446         }
1447     }
1448 }
1449
1450 // TODO(solson): Upstream these methods into rustc::ty::layout.
1451
1452 trait IntegerExt {
1453     fn size(self) -> Size;
1454 }
1455
1456 impl IntegerExt for layout::Integer {
1457     fn size(self) -> Size {
1458         use rustc::ty::layout::Integer::*;
1459         match self {
1460             I1 | I8 => Size::from_bits(8),
1461             I16 => Size::from_bits(16),
1462             I32 => Size::from_bits(32),
1463             I64 => Size::from_bits(64),
1464         }
1465     }
1466 }
1467
1468 trait StructExt {
1469     fn field_offset(&self, index: usize) -> Size;
1470 }
1471
1472 impl StructExt for layout::Struct {
1473     fn field_offset(&self, index: usize) -> Size {
1474         if index == 0 {
1475             Size::from_bytes(0)
1476         } else {
1477             self.offset_after_field[index - 1]
1478         }
1479     }
1480 }