]> git.lizzy.rs Git - rust.git/blob - src/interpreter.rs
3c91998b4a768e30841cd8796c50d87efe7d10a4
[rust.git] / src / interpreter.rs
1 use rustc::infer;
2 use rustc::middle::const_val;
3 use rustc::hir::def_id::DefId;
4 use rustc::mir::mir_map::MirMap;
5 use rustc::mir::repr as mir;
6 use rustc::traits::{self, ProjectionMode};
7 use rustc::ty::fold::TypeFoldable;
8 use rustc::ty::layout::{self, Layout, Size};
9 use rustc::ty::subst::{self, Subst, Substs};
10 use rustc::ty::{self, TyCtxt};
11 use rustc::util::nodemap::DefIdMap;
12 use std::cell::RefCell;
13 use std::ops::{Deref, DerefMut};
14 use std::rc::Rc;
15 use std::{iter, mem};
16 use syntax::ast;
17 use syntax::attr;
18 use syntax::codemap::{self, DUMMY_SP};
19
20 use error::{EvalError, EvalResult};
21 use memory::{Memory, Pointer};
22 use primval::{self, PrimVal};
23
24 const TRACE_EXECUTION: bool = true;
25
26 struct GlobalEvalContext<'a, 'tcx: 'a> {
27     /// The results of the type checker, from rustc.
28     tcx: &'a TyCtxt<'tcx>,
29
30     /// A mapping from NodeIds to Mir, from rustc. Only contains MIR for crate-local items.
31     mir_map: &'a MirMap<'tcx>,
32
33     /// A local cache from DefIds to Mir for non-crate-local items.
34     mir_cache: RefCell<DefIdMap<Rc<mir::Mir<'tcx>>>>,
35
36     /// The virtual memory system.
37     memory: Memory,
38
39     /// Another stack containing the type substitutions for the current function invocation. It
40     /// exists separately from `stack` because it must contain the `Substs` for a function while
41     /// *creating* the `Frame` for that same function.
42     substs_stack: Vec<&'tcx Substs<'tcx>>,
43
44     // TODO(solson): Merge with `substs_stack`. Also try restructuring `Frame` to accomodate.
45     /// A stack of the things necessary to print good strack traces:
46     ///   * Function DefIds and Substs to print proper substituted function names.
47     ///   * Spans pointing to specific function calls in the source.
48     name_stack: Vec<(DefId, &'tcx Substs<'tcx>, codemap::Span)>,
49 }
50
51 struct FnEvalContext<'a, 'b: 'a + 'mir, 'mir, 'tcx: 'b> {
52     gecx: &'a mut GlobalEvalContext<'b, 'tcx>,
53
54     /// The virtual call stack.
55     stack: Vec<Frame<'mir, 'tcx>>,
56 }
57
58 impl<'a, 'b, 'mir, 'tcx> Deref for FnEvalContext<'a, 'b, 'mir, 'tcx> {
59     type Target = GlobalEvalContext<'b, 'tcx>;
60     fn deref(&self) -> &Self::Target {
61         self.gecx
62     }
63 }
64
65 impl<'a, 'b, 'mir, 'tcx> DerefMut for FnEvalContext<'a, 'b, 'mir, 'tcx> {
66     fn deref_mut(&mut self) -> &mut Self::Target {
67         self.gecx
68     }
69 }
70
71 /// A stack frame.
72 struct Frame<'a, 'tcx: 'a> {
73     /// The MIR for the function called on this frame.
74     mir: CachedMir<'a, 'tcx>,
75
76     /// The block this frame will execute when a function call returns back to this frame.
77     next_block: mir::BasicBlock,
78
79     /// A pointer for writing the return value of the current call if it's not a diverging call.
80     return_ptr: Option<Pointer>,
81
82     /// The list of locals for the current function, stored in order as
83     /// `[arguments..., variables..., temporaries...]`. The variables begin at `self.var_offset`
84     /// and the temporaries at `self.temp_offset`.
85     locals: Vec<Pointer>,
86
87     /// The offset of the first variable in `self.locals`.
88     var_offset: usize,
89
90     /// The offset of the first temporary in `self.locals`.
91     temp_offset: usize,
92 }
93
94 #[derive(Copy, Clone, Debug, Eq, PartialEq)]
95 struct Lvalue {
96     ptr: Pointer,
97     extra: LvalueExtra,
98 }
99
100 #[derive(Copy, Clone, Debug, Eq, PartialEq)]
101 enum LvalueExtra {
102     None,
103     Length(u64),
104     // TODO(solson): Vtable(memory::AllocId),
105     DowncastVariant(usize),
106 }
107
108 #[derive(Clone)]
109 enum CachedMir<'mir, 'tcx: 'mir> {
110     Ref(&'mir mir::Mir<'tcx>),
111     Owned(Rc<mir::Mir<'tcx>>)
112 }
113
114 /// Represents the action to be taken in the main loop as a result of executing a terminator.
115 enum TerminatorTarget {
116     /// Make a local jump to the given block.
117     Block(mir::BasicBlock),
118
119     /// Start executing from the new current frame. (For function calls.)
120     Call,
121
122     /// Stop executing the current frame and resume the previous frame.
123     Return,
124 }
125
126 impl<'a, 'tcx> GlobalEvalContext<'a, 'tcx> {
127     fn new(tcx: &'a TyCtxt<'tcx>, mir_map: &'a MirMap<'tcx>) -> Self {
128         GlobalEvalContext {
129             tcx: tcx,
130             mir_map: mir_map,
131             mir_cache: RefCell::new(DefIdMap()),
132             memory: Memory::new(),
133             substs_stack: Vec::new(),
134             name_stack: Vec::new(),
135         }
136     }
137 }
138
139 impl<'a, 'b, 'mir, 'tcx> FnEvalContext<'a, 'b, 'mir, 'tcx> {
140     fn new(gecx: &'a mut GlobalEvalContext<'b, 'tcx>) -> Self {
141         FnEvalContext {
142             gecx: gecx,
143             stack: Vec::new(),
144         }
145     }
146
147     fn maybe_report<T>(&self, span: codemap::Span, r: EvalResult<T>) -> EvalResult<T> {
148         if let Err(ref e) = r {
149             let mut err = self.tcx.sess.struct_span_err(span, &e.to_string());
150             for &(def_id, substs, span) in self.name_stack.iter().rev() {
151                 // FIXME(solson): Find a way to do this without this Display impl hack.
152                 use rustc::util::ppaux;
153                 use std::fmt;
154                 struct Instance<'tcx>(DefId, &'tcx Substs<'tcx>);
155                 impl<'tcx> fmt::Display for Instance<'tcx> {
156                     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
157                         ppaux::parameterized(f, self.1, self.0, ppaux::Ns::Value, &[],
158                             |tcx| tcx.lookup_item_type(self.0).generics)
159                     }
160                 }
161                 err.span_note(span, &format!("inside call to {}", Instance(def_id, substs)));
162             }
163             err.emit();
164         }
165         r
166     }
167
168     fn log<F>(&self, extra_indent: usize, f: F) where F: FnOnce() {
169         let indent = self.stack.len() + extra_indent;
170         if !TRACE_EXECUTION { return; }
171         for _ in 0..indent { print!("    "); }
172         f();
173         println!("");
174     }
175
176     fn run(&mut self) -> EvalResult<()> {
177         'outer: while !self.stack.is_empty() {
178             let mut current_block = self.frame().next_block;
179
180             loop {
181                 self.log(0, || print!("// {:?}", current_block));
182                 let current_mir = self.mir().clone(); // Cloning a reference.
183                 let block_data = current_mir.basic_block_data(current_block);
184
185                 for stmt in &block_data.statements {
186                     self.log(0, || print!("{:?}", stmt));
187                     let mir::StatementKind::Assign(ref lvalue, ref rvalue) = stmt.kind;
188                     let result = self.eval_assignment(lvalue, rvalue);
189                     self.maybe_report(stmt.span, result)?;
190                 }
191
192                 let terminator = block_data.terminator();
193                 self.log(0, || print!("{:?}", terminator.kind));
194
195                 let result = self.eval_terminator(terminator);
196                 match self.maybe_report(terminator.span, result)? {
197                     TerminatorTarget::Block(block) => current_block = block,
198                     TerminatorTarget::Return => {
199                         self.pop_stack_frame();
200                         self.name_stack.pop();
201                         continue 'outer;
202                     }
203                     TerminatorTarget::Call => continue 'outer,
204                 }
205             }
206         }
207
208         Ok(())
209     }
210
211     fn call_nested(&mut self, mir: &mir::Mir<'tcx>) -> EvalResult<Option<Pointer>> {
212         let mut nested_fecx = FnEvalContext::new(self.gecx);
213
214         let return_ptr = match mir.return_ty {
215             ty::FnConverging(ty) => {
216                 let size = nested_fecx.type_size(ty);
217                 Some(nested_fecx.memory.allocate(size))
218             }
219             ty::FnDiverging => None,
220         };
221
222         let substs = nested_fecx.substs();
223         nested_fecx.push_stack_frame(CachedMir::Ref(mir), substs, return_ptr);
224         nested_fecx.run()?;
225         Ok(return_ptr)
226     }
227
228     fn push_stack_frame(&mut self, mir: CachedMir<'mir, 'tcx>, substs: &'tcx Substs<'tcx>,
229         return_ptr: Option<Pointer>)
230     {
231         self.substs_stack.push(substs);
232
233         let arg_tys = mir.arg_decls.iter().map(|a| a.ty);
234         let var_tys = mir.var_decls.iter().map(|v| v.ty);
235         let temp_tys = mir.temp_decls.iter().map(|t| t.ty);
236
237         let locals: Vec<Pointer> = arg_tys.chain(var_tys).chain(temp_tys).map(|ty| {
238             let size = self.type_size(ty);
239             self.memory.allocate(size)
240         }).collect();
241
242         let num_args = mir.arg_decls.len();
243         let num_vars = mir.var_decls.len();
244
245         self.stack.push(Frame {
246             mir: mir.clone(),
247             next_block: mir::START_BLOCK,
248             return_ptr: return_ptr,
249             locals: locals,
250             var_offset: num_args,
251             temp_offset: num_args + num_vars,
252         });
253     }
254
255     fn pop_stack_frame(&mut self) {
256         let _frame = self.stack.pop().expect("tried to pop a stack frame, but there were none");
257         // TODO(solson): Deallocate local variables.
258         self.substs_stack.pop();
259     }
260
261     fn eval_terminator(&mut self, terminator: &mir::Terminator<'tcx>)
262             -> EvalResult<TerminatorTarget> {
263         use rustc::mir::repr::TerminatorKind::*;
264         let target = match terminator.kind {
265             Return => TerminatorTarget::Return,
266
267             Goto { target } => TerminatorTarget::Block(target),
268
269             If { ref cond, targets: (then_target, else_target) } => {
270                 let cond_ptr = self.eval_operand(cond)?;
271                 let cond_val = self.memory.read_bool(cond_ptr)?;
272                 TerminatorTarget::Block(if cond_val { then_target } else { else_target })
273             }
274
275             SwitchInt { ref discr, ref values, ref targets, .. } => {
276                 let discr_ptr = self.eval_lvalue(discr)?.to_ptr();
277                 let discr_size = self
278                     .type_layout(self.lvalue_ty(discr))
279                     .size(&self.tcx.data_layout)
280                     .bytes() as usize;
281                 let discr_val = self.memory.read_uint(discr_ptr, discr_size)?;
282
283                 // Branch to the `otherwise` case by default, if no match is found.
284                 let mut target_block = targets[targets.len() - 1];
285
286                 for (index, val_const) in values.iter().enumerate() {
287                     let ptr = self.const_to_ptr(val_const)?;
288                     let val = self.memory.read_uint(ptr, discr_size)?;
289                     if discr_val == val {
290                         target_block = targets[index];
291                         break;
292                     }
293                 }
294
295                 TerminatorTarget::Block(target_block)
296             }
297
298             Switch { ref discr, ref targets, adt_def } => {
299                 let adt_ptr = self.eval_lvalue(discr)?.to_ptr();
300                 let adt_layout = self.type_layout(self.lvalue_ty(discr));
301
302                  match *adt_layout {
303                     Layout::General { discr, .. } | Layout::CEnum { discr, .. } => {
304                         let discr_size = discr.size().bytes();
305                         let discr_val = self.memory.read_uint(adt_ptr, discr_size as usize)?;
306
307                         let matching = adt_def.variants.iter()
308                             .position(|v| discr_val == v.disr_val.to_u64_unchecked());
309
310                         match matching {
311                             Some(i) => TerminatorTarget::Block(targets[i]),
312                             None => return Err(EvalError::InvalidDiscriminant),
313                         }
314                     }
315
316                     Layout::RawNullablePointer { nndiscr, .. } => {
317                         let is_null = match self.memory.read_usize(adt_ptr) {
318                             Ok(0) => true,
319                             Ok(_) | Err(EvalError::ReadPointerAsBytes) => false,
320                             Err(e) => return Err(e),
321                         };
322
323                         assert!(nndiscr == 0 || nndiscr == 1);
324                         let target = if is_null { 1 - nndiscr } else { nndiscr };
325                         TerminatorTarget::Block(targets[target as usize])
326                     }
327
328                     _ => panic!("attempted to switch on non-aggregate type"),
329                 }
330             }
331
332             Call { ref func, ref args, ref destination, .. } => {
333                 let mut return_ptr = None;
334                 if let Some((ref lv, target)) = *destination {
335                     self.frame_mut().next_block = target;
336                     return_ptr = Some(self.eval_lvalue(lv)?.to_ptr());
337                 }
338
339                 let func_ty = self.operand_ty(func);
340                 match func_ty.sty {
341                     ty::TyFnDef(def_id, substs, fn_ty) => {
342                         use syntax::abi::Abi;
343                         match fn_ty.abi {
344                             Abi::RustIntrinsic => {
345                                 let name = self.tcx.item_name(def_id).as_str();
346                                 match fn_ty.sig.0.output {
347                                     ty::FnConverging(ty) => {
348                                         let size = self.type_size(ty);
349                                         self.call_intrinsic(&name, substs, args,
350                                             return_ptr.unwrap(), size)?
351                                     }
352                                     ty::FnDiverging => unimplemented!(),
353                                 }
354                             }
355
356                             Abi::C => self.call_c_abi(def_id, args, return_ptr.unwrap())?,
357
358                             Abi::Rust | Abi::RustCall => {
359                                 // TODO(solson): Adjust the first argument when calling a Fn or
360                                 // FnMut closure via FnOnce::call_once.
361
362                                 // Only trait methods can have a Self parameter.
363                                 let (resolved_def_id, resolved_substs) = if substs.self_ty().is_some() {
364                                     self.trait_method(def_id, substs)
365                                 } else {
366                                     (def_id, substs)
367                                 };
368
369                                 let mut arg_srcs = Vec::new();
370                                 for arg in args {
371                                     let src = self.eval_operand(arg)?;
372                                     let src_ty = self.operand_ty(arg);
373                                     arg_srcs.push((src, src_ty));
374                                 }
375
376                                 if fn_ty.abi == Abi::RustCall && !args.is_empty() {
377                                     arg_srcs.pop();
378                                     let last_arg = args.last().unwrap();
379                                     let last = self.eval_operand(last_arg)?;
380                                     let last_ty = self.operand_ty(last_arg);
381                                     let last_layout = self.type_layout(last_ty);
382                                     match (&last_ty.sty, last_layout) {
383                                         (&ty::TyTuple(ref fields),
384                                          &Layout::Univariant { ref variant, .. }) => {
385                                             let offsets = iter::once(0)
386                                                 .chain(variant.offset_after_field.iter()
387                                                     .map(|s| s.bytes()));
388                                             for (offset, ty) in offsets.zip(fields) {
389                                                 let src = last.offset(offset as isize);
390                                                 arg_srcs.push((src, ty));
391                                             }
392                                         }
393                                         ty => panic!("expected tuple as last argument in function with 'rust-call' ABI, got {:?}", ty),
394                                     }
395                                 }
396
397                                 let mir = self.load_mir(resolved_def_id);
398                                 self.name_stack.push((def_id, substs, terminator.span));
399                                 self.push_stack_frame(mir, resolved_substs, return_ptr);
400
401                                 for (i, (src, src_ty)) in arg_srcs.into_iter().enumerate() {
402                                     let dest = self.frame().locals[i];
403                                     self.move_(src, dest, src_ty)?;
404                                 }
405
406                                 TerminatorTarget::Call
407                             }
408
409                             abi => panic!("can't handle function with {:?} ABI", abi),
410                         }
411                     }
412
413                     _ => panic!("can't handle callee of type {:?}", func_ty),
414                 }
415             }
416
417             Drop { ref value, target, .. } => {
418                 let ptr = self.eval_lvalue(value)?.to_ptr();
419                 let ty = self.lvalue_ty(value);
420                 self.drop(ptr, ty)?;
421                 TerminatorTarget::Block(target)
422             }
423
424             Resume => unimplemented!(),
425         };
426
427         Ok(target)
428     }
429
430     fn drop(&mut self, ptr: Pointer, ty: ty::Ty<'tcx>) -> EvalResult<()> {
431         if !self.type_needs_drop(ty) {
432             self.log(1, || print!("no need to drop {:?}", ty));
433             return Ok(());
434         }
435         self.log(1, || print!("need to drop {:?}", ty));
436
437         // TODO(solson): Call user-defined Drop::drop impls.
438
439         match ty.sty {
440             ty::TyBox(contents_ty) => {
441                 match self.memory.read_ptr(ptr) {
442                     Ok(contents_ptr) => {
443                         self.drop(contents_ptr, contents_ty)?;
444                         self.log(1, || print!("deallocating box"));
445                         self.memory.deallocate(contents_ptr)?;
446                     }
447                     Err(EvalError::ReadBytesAsPointer) => {
448                         let size = self.memory.pointer_size;
449                         let possible_drop_fill = self.memory.read_bytes(ptr, size)?;
450                         if possible_drop_fill.iter().all(|&b| b == mem::POST_DROP_U8) {
451                             return Ok(());
452                         } else {
453                             return Err(EvalError::ReadBytesAsPointer);
454                         }
455                     }
456                     Err(e) => return Err(e),
457                 }
458             }
459
460             // TODO(solson): Implement drop for other relevant types (e.g. aggregates).
461             _ => {}
462         }
463
464         // Filling drop.
465         // FIXME(solson): Trait objects (with no static size) probably get filled, too.
466         let size = self.type_size(ty);
467         self.memory.drop_fill(ptr, size)?;
468
469         Ok(())
470     }
471
472     fn call_intrinsic(
473         &mut self,
474         name: &str,
475         substs: &'tcx Substs<'tcx>,
476         args: &[mir::Operand<'tcx>],
477         dest: Pointer,
478         dest_size: usize
479     ) -> EvalResult<TerminatorTarget> {
480         let args_res: EvalResult<Vec<Pointer>> = args.iter()
481             .map(|arg| self.eval_operand(arg))
482             .collect();
483         let args = args_res?;
484
485         match name {
486             "assume" => {}
487
488             "copy_nonoverlapping" => {
489                 let elem_ty = *substs.types.get(subst::FnSpace, 0);
490                 let elem_size = self.type_size(elem_ty);
491                 let src = self.memory.read_ptr(args[0])?;
492                 let dest = self.memory.read_ptr(args[1])?;
493                 let count = self.memory.read_isize(args[2])?;
494                 self.memory.copy(src, dest, count as usize * elem_size)?;
495             }
496
497             "forget" => {
498                 let arg_ty = *substs.types.get(subst::FnSpace, 0);
499                 let arg_size = self.type_size(arg_ty);
500                 self.memory.drop_fill(args[0], arg_size)?;
501             }
502
503             "init" => self.memory.write_repeat(dest, 0, dest_size)?,
504
505             "min_align_of" => {
506                 self.memory.write_int(dest, 1, dest_size)?;
507             }
508
509             "move_val_init" => {
510                 let ty = *substs.types.get(subst::FnSpace, 0);
511                 let ptr = self.memory.read_ptr(args[0])?;
512                 self.move_(args[1], ptr, ty)?;
513             }
514
515             // FIXME(solson): Handle different integer types correctly.
516             "add_with_overflow" => {
517                 let ty = *substs.types.get(subst::FnSpace, 0);
518                 let size = self.type_size(ty);
519                 let left = self.memory.read_int(args[0], size)?;
520                 let right = self.memory.read_int(args[1], size)?;
521                 let (n, overflowed) = unsafe {
522                     ::std::intrinsics::add_with_overflow::<i64>(left, right)
523                 };
524                 self.memory.write_int(dest, n, size)?;
525                 self.memory.write_bool(dest.offset(size as isize), overflowed)?;
526             }
527
528             // FIXME(solson): Handle different integer types correctly.
529             "mul_with_overflow" => {
530                 let ty = *substs.types.get(subst::FnSpace, 0);
531                 let size = self.type_size(ty);
532                 let left = self.memory.read_int(args[0], size)?;
533                 let right = self.memory.read_int(args[1], size)?;
534                 let (n, overflowed) = unsafe {
535                     ::std::intrinsics::mul_with_overflow::<i64>(left, right)
536                 };
537                 self.memory.write_int(dest, n, size)?;
538                 self.memory.write_bool(dest.offset(size as isize), overflowed)?;
539             }
540
541             "offset" => {
542                 let pointee_ty = *substs.types.get(subst::FnSpace, 0);
543                 let pointee_size = self.type_size(pointee_ty) as isize;
544                 let ptr_arg = args[0];
545                 let offset = self.memory.read_isize(args[1])?;
546
547                 match self.memory.read_ptr(ptr_arg) {
548                     Ok(ptr) => {
549                         let result_ptr = ptr.offset(offset as isize * pointee_size);
550                         self.memory.write_ptr(dest, result_ptr)?;
551                     }
552                     Err(EvalError::ReadBytesAsPointer) => {
553                         let addr = self.memory.read_isize(ptr_arg)?;
554                         let result_addr = addr + offset * pointee_size as i64;
555                         self.memory.write_isize(dest, result_addr)?;
556                     }
557                     Err(e) => return Err(e),
558                 }
559             }
560
561             // FIXME(solson): Handle different integer types correctly. Use primvals?
562             "overflowing_sub" => {
563                 let ty = *substs.types.get(subst::FnSpace, 0);
564                 let size = self.type_size(ty);
565                 let left = self.memory.read_int(args[0], size)?;
566                 let right = self.memory.read_int(args[1], size)?;
567                 let n = left.wrapping_sub(right);
568                 self.memory.write_int(dest, n, size)?;
569             }
570
571             "size_of" => {
572                 let ty = *substs.types.get(subst::FnSpace, 0);
573                 let size = self.type_size(ty) as u64;
574                 self.memory.write_uint(dest, size, dest_size)?;
575             }
576
577             "size_of_val" => {
578                 let ty = *substs.types.get(subst::FnSpace, 0);
579                 if self.type_is_sized(ty) {
580                     let size = self.type_size(ty) as u64;
581                     self.memory.write_uint(dest, size, dest_size)?;
582                 } else {
583                     match ty.sty {
584                         ty::TySlice(_) | ty::TyStr => {
585                             let elem_ty = ty.sequence_element_type(self.tcx);
586                             let elem_size = self.type_size(elem_ty) as u64;
587                             let ptr_size = self.memory.pointer_size as isize;
588                             let n = self.memory.read_usize(args[0].offset(ptr_size))?;
589                             self.memory.write_uint(dest, n * elem_size, dest_size)?;
590                         }
591
592                         _ => panic!("unimplemented: size_of_val::<{:?}>", ty),
593                     }
594                 }
595             }
596
597             "transmute" => {
598                 let ty = *substs.types.get(subst::FnSpace, 0);
599                 self.move_(args[0], dest, ty)?;
600             }
601             "uninit" => self.memory.mark_definedness(dest, dest_size, false)?,
602
603             name => panic!("can't handle intrinsic: {}", name),
604         }
605
606         // Since we pushed no stack frame, the main loop will act
607         // as if the call just completed and it's returning to the
608         // current frame.
609         Ok(TerminatorTarget::Call)
610     }
611
612     fn call_c_abi(
613         &mut self,
614         def_id: DefId,
615         args: &[mir::Operand<'tcx>],
616         dest: Pointer
617     ) -> EvalResult<TerminatorTarget> {
618         let name = self.tcx.item_name(def_id);
619         let attrs = self.tcx.get_attrs(def_id);
620         let link_name = match attr::first_attr_value_str_by_name(&attrs, "link_name") {
621             Some(ln) => ln.clone(),
622             None => name.as_str(),
623         };
624
625         let args_res: EvalResult<Vec<Pointer>> = args.iter()
626             .map(|arg| self.eval_operand(arg))
627             .collect();
628         let args = args_res?;
629
630         match &link_name[..] {
631             "__rust_allocate" => {
632                 let size = self.memory.read_usize(args[0])?;
633                 let ptr = self.memory.allocate(size as usize);
634                 self.memory.write_ptr(dest, ptr)?;
635             }
636
637             "__rust_reallocate" => {
638                 let ptr = self.memory.read_ptr(args[0])?;
639                 let size = self.memory.read_usize(args[2])?;
640                 self.memory.reallocate(ptr, size as usize)?;
641                 self.memory.write_ptr(dest, ptr)?;
642             }
643
644             _ => panic!("can't call C ABI function: {}", link_name),
645         }
646
647         // Since we pushed no stack frame, the main loop will act
648         // as if the call just completed and it's returning to the
649         // current frame.
650         Ok(TerminatorTarget::Call)
651     }
652
653     fn assign_fields<I: IntoIterator<Item = u64>>(
654         &mut self,
655         dest: Pointer,
656         offsets: I,
657         operands: &[mir::Operand<'tcx>],
658     ) -> EvalResult<()> {
659         for (offset, operand) in offsets.into_iter().zip(operands) {
660             let src = self.eval_operand(operand)?;
661             let src_ty = self.operand_ty(operand);
662             let field_dest = dest.offset(offset as isize);
663             self.move_(src, field_dest, src_ty)?;
664         }
665         Ok(())
666     }
667
668     fn eval_assignment(&mut self, lvalue: &mir::Lvalue<'tcx>, rvalue: &mir::Rvalue<'tcx>)
669         -> EvalResult<()>
670     {
671         let dest = self.eval_lvalue(lvalue)?.to_ptr();
672         let dest_ty = self.lvalue_ty(lvalue);
673         let dest_layout = self.type_layout(dest_ty);
674
675         use rustc::mir::repr::Rvalue::*;
676         match *rvalue {
677             Use(ref operand) => {
678                 let src = self.eval_operand(operand)?;
679                 self.move_(src, dest, dest_ty)?;
680             }
681
682             BinaryOp(bin_op, ref left, ref right) => {
683                 let left_ptr = self.eval_operand(left)?;
684                 let left_ty = self.operand_ty(left);
685                 let left_val = self.read_primval(left_ptr, left_ty)?;
686
687                 let right_ptr = self.eval_operand(right)?;
688                 let right_ty = self.operand_ty(right);
689                 let right_val = self.read_primval(right_ptr, right_ty)?;
690
691                 let val = primval::binary_op(bin_op, left_val, right_val)?;
692                 self.memory.write_primval(dest, val)?;
693             }
694
695             UnaryOp(un_op, ref operand) => {
696                 let ptr = self.eval_operand(operand)?;
697                 let ty = self.operand_ty(operand);
698                 let val = self.read_primval(ptr, ty)?;
699                 self.memory.write_primval(dest, primval::unary_op(un_op, val))?;
700             }
701
702             Aggregate(ref kind, ref operands) => {
703                 use rustc::ty::layout::Layout::*;
704                 match *dest_layout {
705                     Univariant { ref variant, .. } => {
706                         let offsets = iter::once(0)
707                             .chain(variant.offset_after_field.iter().map(|s| s.bytes()));
708                         self.assign_fields(dest, offsets, operands)?;
709                     }
710
711                     Array { .. } => {
712                         let elem_size = match dest_ty.sty {
713                             ty::TyArray(elem_ty, _) => self.type_size(elem_ty) as u64,
714                             _ => panic!("tried to assign {:?} to non-array type {:?}",
715                                         kind, dest_ty),
716                         };
717                         let offsets = (0..).map(|i| i * elem_size);
718                         self.assign_fields(dest, offsets, operands)?;
719                     }
720
721                     General { discr, ref variants, .. } => {
722                         if let mir::AggregateKind::Adt(adt_def, variant, _) = *kind {
723                             let discr_val = adt_def.variants[variant].disr_val.to_u64_unchecked();
724                             let discr_size = discr.size().bytes() as usize;
725                             self.memory.write_uint(dest, discr_val, discr_size)?;
726
727                             let offsets = variants[variant].offset_after_field.iter()
728                                 .map(|s| s.bytes());
729                             self.assign_fields(dest, offsets, operands)?;
730                         } else {
731                             panic!("tried to assign {:?} to Layout::General", kind);
732                         }
733                     }
734
735                     RawNullablePointer { nndiscr, .. } => {
736                         if let mir::AggregateKind::Adt(_, variant, _) = *kind {
737                             if nndiscr == variant as u64 {
738                                 assert_eq!(operands.len(), 1);
739                                 let operand = &operands[0];
740                                 let src = self.eval_operand(operand)?;
741                                 let src_ty = self.operand_ty(operand);
742                                 self.move_(src, dest, src_ty)?;
743                             } else {
744                                 assert_eq!(operands.len(), 0);
745                                 self.memory.write_isize(dest, 0)?;
746                             }
747                         } else {
748                             panic!("tried to assign {:?} to Layout::RawNullablePointer", kind);
749                         }
750                     }
751
752                     CEnum { discr, signed, .. } => {
753                         assert_eq!(operands.len(), 0);
754                         if let mir::AggregateKind::Adt(adt_def, variant, _) = *kind {
755                             let val = adt_def.variants[variant].disr_val.to_u64_unchecked();
756                             let size = discr.size().bytes() as usize;
757
758                             if signed {
759                                 self.memory.write_int(dest, val as i64, size)?;
760                             } else {
761                                 self.memory.write_uint(dest, val, size)?;
762                             }
763                         } else {
764                             panic!("tried to assign {:?} to Layout::CEnum", kind);
765                         }
766                     }
767
768                     _ => panic!("can't handle destination layout {:?} when assigning {:?}",
769                                 dest_layout, kind),
770                 }
771             }
772
773             Repeat(ref operand, _) => {
774                 let (elem_size, length) = match dest_ty.sty {
775                     ty::TyArray(elem_ty, n) => (self.type_size(elem_ty), n),
776                     _ => panic!("tried to assign array-repeat to non-array type {:?}", dest_ty),
777                 };
778
779                 let src = self.eval_operand(operand)?;
780                 for i in 0..length {
781                     let elem_dest = dest.offset((i * elem_size) as isize);
782                     self.memory.copy(src, elem_dest, elem_size)?;
783                 }
784             }
785
786             Len(ref lvalue) => {
787                 let src = self.eval_lvalue(lvalue)?;
788                 let ty = self.lvalue_ty(lvalue);
789                 let len = match ty.sty {
790                     ty::TyArray(_, n) => n as u64,
791                     ty::TySlice(_) => if let LvalueExtra::Length(n) = src.extra {
792                         n
793                     } else {
794                         panic!("Rvalue::Len of a slice given non-slice pointer: {:?}", src);
795                     },
796                     _ => panic!("Rvalue::Len expected array or slice, got {:?}", ty),
797                 };
798                 self.memory.write_usize(dest, len)?;
799             }
800
801             Ref(_, _, ref lvalue) => {
802                 let lv = self.eval_lvalue(lvalue)?;
803                 self.memory.write_ptr(dest, lv.ptr)?;
804                 match lv.extra {
805                     LvalueExtra::None => {},
806                     LvalueExtra::Length(len) => {
807                         let len_ptr = dest.offset(self.memory.pointer_size as isize);
808                         self.memory.write_usize(len_ptr, len)?;
809                     }
810                     LvalueExtra::DowncastVariant(..) =>
811                         panic!("attempted to take a reference to an enum downcast lvalue"),
812                 }
813             }
814
815             Box(ty) => {
816                 let size = self.type_size(ty);
817                 let ptr = self.memory.allocate(size);
818                 self.memory.write_ptr(dest, ptr)?;
819             }
820
821             Cast(kind, ref operand, dest_ty) => {
822                 let src = self.eval_operand(operand)?;
823                 let src_ty = self.operand_ty(operand);
824
825                 use rustc::mir::repr::CastKind::*;
826                 match kind {
827                     Unsize => {
828                         self.move_(src, dest, src_ty)?;
829                         let src_pointee_ty = pointee_type(src_ty).unwrap();
830                         let dest_pointee_ty = pointee_type(dest_ty).unwrap();
831
832                         match (&src_pointee_ty.sty, &dest_pointee_ty.sty) {
833                             (&ty::TyArray(_, length), &ty::TySlice(_)) => {
834                                 let len_ptr = dest.offset(self.memory.pointer_size as isize);
835                                 self.memory.write_usize(len_ptr, length as u64)?;
836                             }
837
838                             _ => panic!("can't handle cast: {:?}", rvalue),
839                         }
840                     }
841
842                     Misc => {
843                         // FIXME(solson): Wrong for almost everything.
844                         let size = dest_layout.size(&self.tcx.data_layout).bytes() as usize;
845                         self.memory.copy(src, dest, size)?;
846                     }
847
848                     _ => panic!("can't handle cast: {:?}", rvalue),
849                 }
850             }
851
852             Slice { .. } => unimplemented!(),
853             InlineAsm { .. } => unimplemented!(),
854         }
855
856         Ok(())
857     }
858
859     fn eval_operand(&mut self, op: &mir::Operand<'tcx>) -> EvalResult<Pointer> {
860         use rustc::mir::repr::Operand::*;
861         match *op {
862             Consume(ref lvalue) => Ok(self.eval_lvalue(lvalue)?.to_ptr()),
863             Constant(mir::Constant { ref literal, .. }) => {
864                 use rustc::mir::repr::Literal::*;
865                 match *literal {
866                     Value { ref value } => Ok(self.const_to_ptr(value)?),
867                     Item { .. } => unimplemented!(),
868                     Promoted { index } => {
869                         // TODO(solson): Mark constants and statics as read-only and cache their
870                         // values.
871                         let current_mir = self.mir();
872                         let mir = &current_mir.promoted[index];
873                         self.call_nested(mir).map(Option::unwrap)
874                     }
875                 }
876             }
877         }
878     }
879
880     fn eval_lvalue(&mut self, lvalue: &mir::Lvalue<'tcx>) -> EvalResult<Lvalue> {
881         use rustc::mir::repr::Lvalue::*;
882         let ptr = match *lvalue {
883             ReturnPointer => self.frame().return_ptr
884                 .expect("ReturnPointer used in a function with no return value"),
885             Arg(i) => self.frame().locals[i as usize],
886             Var(i) => self.frame().locals[self.frame().var_offset + i as usize],
887             Temp(i) => self.frame().locals[self.frame().temp_offset + i as usize],
888
889             Static(def_id) => {
890                 // TODO(solson): Mark constants and statics as read-only and cache their values.
891                 let mir = self.load_mir(def_id);
892                 self.call_nested(&mir)?.unwrap()
893             }
894
895             Projection(ref proj) => {
896                 let base = self.eval_lvalue(&proj.base)?;
897                 let base_ty = self.lvalue_ty(&proj.base);
898                 let base_layout = self.type_layout(base_ty);
899
900                 use rustc::mir::repr::ProjectionElem::*;
901                 match proj.elem {
902                     Field(field, _) => {
903                         let variant = match *base_layout {
904                             Layout::Univariant { ref variant, .. } => variant,
905                             Layout::General { ref variants, .. } => {
906                                 if let LvalueExtra::DowncastVariant(variant_idx) = base.extra {
907                                     &variants[variant_idx]
908                                 } else {
909                                     panic!("field access on enum had no variant index");
910                                 }
911                             }
912                             Layout::RawNullablePointer { .. } => {
913                                 assert_eq!(field.index(), 0);
914                                 return Ok(base);
915                             }
916                             _ => panic!("field access on non-product type: {:?}", base_layout),
917                         };
918
919                         let offset = variant.field_offset(field.index()).bytes();
920                         base.ptr.offset(offset as isize)
921                     },
922
923                     Downcast(_, variant) => match *base_layout {
924                         Layout::General { discr, .. } => {
925                             return Ok(Lvalue {
926                                 ptr: base.ptr.offset(discr.size().bytes() as isize),
927                                 extra: LvalueExtra::DowncastVariant(variant),
928                             });
929                         }
930                         Layout::RawNullablePointer { .. } => return Ok(base),
931                         _ => panic!("variant downcast on non-aggregate type: {:?}", base_layout),
932                     },
933
934                     Deref => {
935                         let pointee_ty = pointee_type(base_ty).expect("Deref of non-pointer");
936                         let ptr = self.memory.read_ptr(base.ptr)?;
937                         let extra = match pointee_ty.sty {
938                             ty::TySlice(_) | ty::TyStr => {
939                                 let len_ptr = base.ptr.offset(self.memory.pointer_size as isize);
940                                 let len = self.memory.read_usize(len_ptr)?;
941                                 LvalueExtra::Length(len)
942                             }
943                             ty::TyTrait(_) => unimplemented!(),
944                             _ => LvalueExtra::None,
945                         };
946                         return Ok(Lvalue { ptr: ptr, extra: extra });
947                     }
948
949                     Index(ref operand) => {
950                         let elem_size = match base_ty.sty {
951                             ty::TyArray(elem_ty, _) |
952                             ty::TySlice(elem_ty) => self.type_size(elem_ty),
953                             _ => panic!("indexing expected an array or slice, got {:?}", base_ty),
954                         };
955                         let n_ptr = self.eval_operand(operand)?;
956                         let n = self.memory.read_usize(n_ptr)?;
957                         base.ptr.offset(n as isize * elem_size as isize)
958                     }
959
960                     ConstantIndex { .. } => unimplemented!(),
961                 }
962             }
963         };
964
965         Ok(Lvalue { ptr: ptr, extra: LvalueExtra::None })
966     }
967
968     // TODO(solson): Try making const_to_primval instead.
969     fn const_to_ptr(&mut self, const_val: &const_val::ConstVal) -> EvalResult<Pointer> {
970         use rustc::middle::const_val::ConstVal::*;
971         match *const_val {
972             Float(_f) => unimplemented!(),
973             Integral(int) => {
974                 // TODO(solson): Check int constant type.
975                 let ptr = self.memory.allocate(8);
976                 self.memory.write_uint(ptr, int.to_u64_unchecked(), 8)?;
977                 Ok(ptr)
978             }
979             Str(ref s) => {
980                 let psize = self.memory.pointer_size;
981                 let static_ptr = self.memory.allocate(s.len());
982                 let ptr = self.memory.allocate(psize * 2);
983                 self.memory.write_bytes(static_ptr, s.as_bytes())?;
984                 self.memory.write_ptr(ptr, static_ptr)?;
985                 self.memory.write_usize(ptr.offset(psize as isize), s.len() as u64)?;
986                 Ok(ptr)
987             }
988             ByteStr(ref bs) => {
989                 let psize = self.memory.pointer_size;
990                 let static_ptr = self.memory.allocate(bs.len());
991                 let ptr = self.memory.allocate(psize);
992                 self.memory.write_bytes(static_ptr, bs)?;
993                 self.memory.write_ptr(ptr, static_ptr)?;
994                 Ok(ptr)
995             }
996             Bool(b) => {
997                 let ptr = self.memory.allocate(1);
998                 self.memory.write_bool(ptr, b)?;
999                 Ok(ptr)
1000             }
1001             Char(_c)          => unimplemented!(),
1002             Struct(_node_id)  => unimplemented!(),
1003             Tuple(_node_id)   => unimplemented!(),
1004             Function(_def_id) => unimplemented!(),
1005             Array(_, _)       => unimplemented!(),
1006             Repeat(_, _)      => unimplemented!(),
1007             Dummy             => unimplemented!(),
1008         }
1009     }
1010
1011     fn lvalue_ty(&self, lvalue: &mir::Lvalue<'tcx>) -> ty::Ty<'tcx> {
1012         self.monomorphize(self.mir().lvalue_ty(self.tcx, lvalue).to_ty(self.tcx))
1013     }
1014
1015     fn operand_ty(&self, operand: &mir::Operand<'tcx>) -> ty::Ty<'tcx> {
1016         self.monomorphize(self.mir().operand_ty(self.tcx, operand))
1017     }
1018
1019     fn monomorphize(&self, ty: ty::Ty<'tcx>) -> ty::Ty<'tcx> {
1020         let substituted = ty.subst(self.tcx, self.substs());
1021         infer::normalize_associated_type(self.tcx, &substituted)
1022     }
1023
1024     fn type_needs_drop(&self, ty: ty::Ty<'tcx>) -> bool {
1025         self.tcx.type_needs_drop_given_env(ty, &self.tcx.empty_parameter_environment())
1026     }
1027
1028     fn move_(&mut self, src: Pointer, dest: Pointer, ty: ty::Ty<'tcx>) -> EvalResult<()> {
1029         let size = self.type_size(ty);
1030         self.memory.copy(src, dest, size)?;
1031         if self.type_needs_drop(ty) {
1032             self.memory.drop_fill(src, size)?;
1033         }
1034         Ok(())
1035     }
1036
1037     fn type_is_sized(&self, ty: ty::Ty<'tcx>) -> bool {
1038         ty.is_sized(&self.tcx.empty_parameter_environment(), DUMMY_SP)
1039     }
1040
1041     fn type_size(&self, ty: ty::Ty<'tcx>) -> usize {
1042         self.type_layout(ty).size(&self.tcx.data_layout).bytes() as usize
1043     }
1044
1045     fn type_layout(&self, ty: ty::Ty<'tcx>) -> &'tcx Layout {
1046         // TODO(solson): Is this inefficient? Needs investigation.
1047         let ty = self.monomorphize(ty);
1048
1049         let infcx = infer::normalizing_infer_ctxt(self.tcx, &self.tcx.tables, ProjectionMode::Any);
1050
1051         // TODO(solson): Report this error properly.
1052         ty.layout(&infcx).unwrap()
1053     }
1054
1055     pub fn read_primval(&mut self, ptr: Pointer, ty: ty::Ty<'tcx>) -> EvalResult<PrimVal> {
1056         use syntax::ast::{IntTy, UintTy};
1057         let val = match ty.sty {
1058             ty::TyBool              => PrimVal::Bool(self.memory.read_bool(ptr)?),
1059             ty::TyInt(IntTy::I8)    => PrimVal::I8(self.memory.read_int(ptr, 1)? as i8),
1060             ty::TyInt(IntTy::I16)   => PrimVal::I16(self.memory.read_int(ptr, 2)? as i16),
1061             ty::TyInt(IntTy::I32)   => PrimVal::I32(self.memory.read_int(ptr, 4)? as i32),
1062             ty::TyInt(IntTy::I64)   => PrimVal::I64(self.memory.read_int(ptr, 8)? as i64),
1063             ty::TyUint(UintTy::U8)  => PrimVal::U8(self.memory.read_uint(ptr, 1)? as u8),
1064             ty::TyUint(UintTy::U16) => PrimVal::U16(self.memory.read_uint(ptr, 2)? as u16),
1065             ty::TyUint(UintTy::U32) => PrimVal::U32(self.memory.read_uint(ptr, 4)? as u32),
1066             ty::TyUint(UintTy::U64) => PrimVal::U64(self.memory.read_uint(ptr, 8)? as u64),
1067
1068             // TODO(solson): Pick the PrimVal dynamically.
1069             ty::TyInt(IntTy::Is)   => PrimVal::I64(self.memory.read_isize(ptr)?),
1070             ty::TyUint(UintTy::Us) => PrimVal::U64(self.memory.read_usize(ptr)?),
1071
1072             ty::TyRef(_, ty::TypeAndMut { ty, .. }) |
1073             ty::TyRawPtr(ty::TypeAndMut { ty, .. }) => {
1074                 if self.type_is_sized(ty) {
1075                     match self.memory.read_ptr(ptr) {
1076                         Ok(p) => PrimVal::AbstractPtr(p),
1077                         Err(EvalError::ReadBytesAsPointer) => {
1078                             PrimVal::IntegerPtr(self.memory.read_usize(ptr)?)
1079                         }
1080                         Err(e) => return Err(e),
1081                     }
1082                 } else {
1083                     panic!("unimplemented: primitive read of fat pointer type: {:?}", ty);
1084                 }
1085             }
1086
1087             _ => panic!("primitive read of non-primitive type: {:?}", ty),
1088         };
1089         Ok(val)
1090     }
1091
1092     fn frame(&self) -> &Frame<'mir, 'tcx> {
1093         self.stack.last().expect("no call frames exist")
1094     }
1095
1096     fn frame_mut(&mut self) -> &mut Frame<'mir, 'tcx> {
1097         self.stack.last_mut().expect("no call frames exist")
1098     }
1099
1100     fn mir(&self) -> CachedMir<'mir, 'tcx> {
1101         self.frame().mir.clone()
1102     }
1103
1104     fn substs(&self) -> &'tcx Substs<'tcx> {
1105         self.substs_stack.last().cloned().unwrap_or_else(|| self.tcx.mk_substs(Substs::empty()))
1106     }
1107
1108     fn load_mir(&self, def_id: DefId) -> CachedMir<'mir, 'tcx> {
1109         match self.tcx.map.as_local_node_id(def_id) {
1110             Some(node_id) => CachedMir::Ref(self.mir_map.map.get(&node_id).unwrap()),
1111             None => {
1112                 let mut mir_cache = self.mir_cache.borrow_mut();
1113                 if let Some(mir) = mir_cache.get(&def_id) {
1114                     return CachedMir::Owned(mir.clone());
1115                 }
1116
1117                 let cs = &self.tcx.sess.cstore;
1118                 let mir = cs.maybe_get_item_mir(self.tcx, def_id).unwrap_or_else(|| {
1119                     panic!("no mir for {:?}", def_id);
1120                 });
1121                 let cached = Rc::new(mir);
1122                 mir_cache.insert(def_id, cached.clone());
1123                 CachedMir::Owned(cached)
1124             }
1125         }
1126     }
1127
1128     fn fulfill_obligation(&self, trait_ref: ty::PolyTraitRef<'tcx>) -> traits::Vtable<'tcx, ()> {
1129         // Do the initial selection for the obligation. This yields the shallow result we are
1130         // looking for -- that is, what specific impl.
1131         let infcx = infer::normalizing_infer_ctxt(self.tcx, &self.tcx.tables, ProjectionMode::Any);
1132         let mut selcx = traits::SelectionContext::new(&infcx);
1133
1134         let obligation = traits::Obligation::new(
1135             traits::ObligationCause::misc(DUMMY_SP, ast::DUMMY_NODE_ID),
1136             trait_ref.to_poly_trait_predicate(),
1137         );
1138         let selection = selcx.select(&obligation).unwrap().unwrap();
1139
1140         // Currently, we use a fulfillment context to completely resolve all nested obligations.
1141         // This is because they can inform the inference of the impl's type parameters.
1142         let mut fulfill_cx = traits::FulfillmentContext::new();
1143         let vtable = selection.map(|predicate| {
1144             fulfill_cx.register_predicate_obligation(&infcx, predicate);
1145         });
1146         infer::drain_fulfillment_cx_or_panic(
1147             DUMMY_SP, &infcx, &mut fulfill_cx, &vtable
1148         )
1149     }
1150
1151     /// Trait method, which has to be resolved to an impl method.
1152     pub fn trait_method(&self, def_id: DefId, substs: &'tcx Substs<'tcx>)
1153         -> (DefId, &'tcx Substs<'tcx>)
1154     {
1155         let method_item = self.tcx.impl_or_trait_item(def_id);
1156         let trait_id = method_item.container().id();
1157         let trait_ref = ty::Binder(substs.to_trait_ref(self.tcx, trait_id));
1158         match self.fulfill_obligation(trait_ref) {
1159             traits::VtableImpl(vtable_impl) => {
1160                 let impl_did = vtable_impl.impl_def_id;
1161                 let mname = self.tcx.item_name(def_id);
1162                 // Create a concatenated set of substitutions which includes those from the impl
1163                 // and those from the method:
1164                 let impl_substs = vtable_impl.substs.with_method_from(substs);
1165                 let substs = self.tcx.mk_substs(impl_substs);
1166                 let mth = get_impl_method(self.tcx, impl_did, substs, mname);
1167
1168                 (mth.method.def_id, mth.substs)
1169             }
1170
1171             traits::VtableClosure(vtable_closure) =>
1172                 (vtable_closure.closure_def_id, vtable_closure.substs.func_substs),
1173
1174             traits::VtableFnPointer(_fn_ty) => {
1175                 let _trait_closure_kind = self.tcx.lang_items.fn_trait_kind(trait_id).unwrap();
1176                 unimplemented!()
1177                 // let llfn = trans_fn_pointer_shim(ccx, trait_closure_kind, fn_ty);
1178
1179                 // let method_ty = def_ty(tcx, def_id, substs);
1180                 // let fn_ptr_ty = match method_ty.sty {
1181                 //     ty::TyFnDef(_, _, fty) => tcx.mk_ty(ty::TyFnPtr(fty)),
1182                 //     _ => unreachable!("expected fn item type, found {}",
1183                 //                       method_ty)
1184                 // };
1185                 // Callee::ptr(immediate_rvalue(llfn, fn_ptr_ty))
1186             }
1187
1188             traits::VtableObject(ref _data) => {
1189                 unimplemented!()
1190                 // Callee {
1191                 //     data: Virtual(traits::get_vtable_index_of_object_method(
1192                 //                   tcx, data, def_id)),
1193                 //                   ty: def_ty(tcx, def_id, substs)
1194                 // }
1195             }
1196             vtable => unreachable!("resolved vtable bad vtable {:?} in trans", vtable),
1197         }
1198     }
1199 }
1200
1201 fn pointee_type(ptr_ty: ty::Ty) -> Option<ty::Ty> {
1202     match ptr_ty.sty {
1203         ty::TyRef(_, ty::TypeAndMut { ty, .. }) |
1204         ty::TyRawPtr(ty::TypeAndMut { ty, .. }) |
1205         ty::TyBox(ty) => {
1206             Some(ty)
1207         }
1208         _ => None,
1209     }
1210 }
1211
1212 impl Lvalue {
1213     fn to_ptr(self) -> Pointer {
1214         assert_eq!(self.extra, LvalueExtra::None);
1215         self.ptr
1216     }
1217 }
1218
1219 impl<'mir, 'tcx: 'mir> Deref for CachedMir<'mir, 'tcx> {
1220     type Target = mir::Mir<'tcx>;
1221     fn deref(&self) -> &mir::Mir<'tcx> {
1222         match *self {
1223             CachedMir::Ref(r) => r,
1224             CachedMir::Owned(ref rc) => &rc,
1225         }
1226     }
1227 }
1228
1229 #[derive(Debug)]
1230 pub struct ImplMethod<'tcx> {
1231     pub method: Rc<ty::Method<'tcx>>,
1232     pub substs: &'tcx Substs<'tcx>,
1233     pub is_provided: bool,
1234 }
1235
1236 /// Locates the applicable definition of a method, given its name.
1237 pub fn get_impl_method<'tcx>(
1238     tcx: &TyCtxt<'tcx>,
1239     impl_def_id: DefId,
1240     substs: &'tcx Substs<'tcx>,
1241     name: ast::Name,
1242 ) -> ImplMethod<'tcx> {
1243     assert!(!substs.types.needs_infer());
1244
1245     let trait_def_id = tcx.trait_id_of_impl(impl_def_id).unwrap();
1246     let trait_def = tcx.lookup_trait_def(trait_def_id);
1247     let infcx = infer::normalizing_infer_ctxt(tcx, &tcx.tables, ProjectionMode::Any);
1248
1249     match trait_def.ancestors(impl_def_id).fn_defs(tcx, name).next() {
1250         Some(node_item) => {
1251             ImplMethod {
1252                 method: node_item.item,
1253                 substs: traits::translate_substs(&infcx, impl_def_id, substs, node_item.node),
1254                 is_provided: node_item.node.is_from_trait(),
1255             }
1256         }
1257         None => {
1258             bug!("method {:?} not found in {:?}", name, impl_def_id);
1259         }
1260     }
1261 }
1262
1263 pub fn interpret_start_points<'tcx>(tcx: &TyCtxt<'tcx>, mir_map: &MirMap<'tcx>) {
1264     for (&id, mir) in &mir_map.map {
1265         for attr in tcx.map.attrs(id) {
1266             use syntax::attr::AttrMetaMethods;
1267             if attr.check_name("miri_run") {
1268                 let item = tcx.map.expect_item(id);
1269
1270                 println!("Interpreting: {}", item.name);
1271
1272                 let mut gecx = GlobalEvalContext::new(tcx, mir_map);
1273                 let mut fecx = FnEvalContext::new(&mut gecx);
1274                 match fecx.call_nested(mir) {
1275                     Ok(Some(return_ptr)) => fecx.memory.dump(return_ptr.alloc_id),
1276                     Ok(None) => println!("(diverging function returned)"),
1277                     Err(_e) => {
1278                         // TODO(solson): Detect whether the error was already reported or not.
1279                         // tcx.sess.err(&e.to_string());
1280                     }
1281                 }
1282
1283                 println!("");
1284             }
1285         }
1286     }
1287 }
1288
1289 // TODO(solson): Upstream these methods into rustc::ty::layout.
1290
1291 trait IntegerExt {
1292     fn size(self) -> Size;
1293 }
1294
1295 impl IntegerExt for layout::Integer {
1296     fn size(self) -> Size {
1297         use rustc::ty::layout::Integer::*;
1298         match self {
1299             I1 | I8 => Size::from_bits(8),
1300             I16 => Size::from_bits(16),
1301             I32 => Size::from_bits(32),
1302             I64 => Size::from_bits(64),
1303         }
1304     }
1305 }
1306
1307 trait StructExt {
1308     fn field_offset(&self, index: usize) -> Size;
1309 }
1310
1311 impl StructExt for layout::Struct {
1312     fn field_offset(&self, index: usize) -> Size {
1313         if index == 0 {
1314             Size::from_bytes(0)
1315         } else {
1316             self.offset_after_field[index - 1]
1317         }
1318     }
1319 }