1 // TODO(tsion): Remove this.
2 #![allow(unused_imports, dead_code, unused_variables)]
5 use byteorder::ByteOrder;
6 use rustc::middle::{const_eval, def_id, ty};
7 use rustc::middle::cstore::CrateStore;
8 use rustc::mir::repr::{self as mir, Mir};
9 use rustc::mir::mir_map::MirMap;
10 use std::collections::HashMap;
11 use syntax::ast::Attribute;
12 use syntax::attr::AttrMetaMethods;
16 const TRACE_EXECUTION: bool = true;
20 use byteorder::ByteOrder;
21 use rustc::middle::ty;
22 use std::collections::HashMap;
27 alloc_map: HashMap<u64, Value>,
30 #[derive(Copy, Clone, Debug, Eq, PartialEq)]
31 pub struct AllocId(u64);
33 // TODO(tsion): Remove this hack.
34 pub fn alloc_id_hack(i: u64) -> AllocId {
38 // TODO(tsion): Shouldn't clone values.
39 #[derive(Clone, Debug)]
42 // TODO(tsion): relocations
43 // TODO(tsion): undef mask
46 #[derive(Clone, Debug, PartialEq, Eq)]
48 pub alloc_id: AllocId,
53 #[derive(Clone, Debug, PartialEq, Eq)]
63 pub fn new() -> Self {
64 Memory { next_id: 0, alloc_map: HashMap::new() }
67 pub fn allocate(&mut self, size: usize) -> AllocId {
68 let id = AllocId(self.next_id);
69 let val = Value { bytes: vec![0; size] };
70 self.alloc_map.insert(self.next_id, val);
75 pub fn allocate_int(&mut self, n: i64) -> AllocId {
76 let id = self.allocate(mem::size_of::<i64>());
77 byteorder::NativeEndian::write_i64(&mut self.value_mut(id).unwrap().bytes, n);
81 pub fn value(&self, id: AllocId) -> Option<&Value> {
82 self.alloc_map.get(&id.0)
85 pub fn value_mut(&mut self, id: AllocId) -> Option<&mut Value> {
86 self.alloc_map.get_mut(&id.0)
91 pub fn offset(self, i: usize) -> Self {
92 Pointer { offset: self.offset + i, ..self }
97 // TODO(tsion): Cache these outputs.
98 pub fn from_ty(ty: ty::Ty) -> Self {
100 ty::TyInt(_) => Repr::Int,
101 _ => unimplemented!(),
105 pub fn size(&self) -> usize {
108 Repr::StackFrame { ref locals } =>
109 locals.iter().map(Repr::size).fold(0, |a, b| a + b)
114 use self::memory::{Pointer, Repr, Value};
116 // #[derive(Clone, Debug, PartialEq)]
120 // Int(i64), // FIXME(tsion): Should be bit-width aware.
122 // Adt { variant: usize, data_ptr: Pointer },
123 // Func(def_id::DefId),
129 /// +-----------------------+
131 /// | Arg(1) | arguments
133 /// | Arg(num_args - 1) |
134 /// + - - - - - - - - - - - +
136 /// | Var(1) | variables
138 /// | Var(num_vars - 1) |
139 /// + - - - - - - - - - - - +
141 /// | Temp(1) | temporaries
143 /// | Temp(num_temps - 1) |
144 /// + - - - - - - - - - - - +
145 /// | Aggregates | aggregates
146 /// +-----------------------+
150 // /// A pointer to a stack cell to write the return value of the current call, if it's not a
151 // /// divering call.
152 // return_ptr: Option<Pointer>,
158 // num_aggregate_fields: usize,
162 // fn size(&self) -> usize {
163 // self.num_args + self.num_vars + self.num_temps + self.num_aggregate_fields
166 // fn arg_offset(&self, i: usize) -> usize {
170 // fn var_offset(&self, i: usize) -> usize {
171 // self.offset + self.num_args + i
174 // fn temp_offset(&self, i: usize) -> usize {
175 // self.offset + self.num_args + self.num_vars + i
179 struct Interpreter<'a, 'tcx: 'a> {
180 tcx: &'a ty::ctxt<'tcx>,
181 mir_map: &'a MirMap<'tcx>,
182 // value_stack: Vec<Value>,
183 // call_stack: Vec<Frame>,
184 memory: memory::Memory,
187 impl<'a, 'tcx> Interpreter<'a, 'tcx> {
188 fn new(tcx: &'a ty::ctxt<'tcx>, mir_map: &'a MirMap<'tcx>) -> Self {
192 // value_stack: vec![Value::Uninit], // Allocate a spot for the top-level return value.
193 // call_stack: Vec::new(),
194 memory: memory::Memory::new(),
198 // fn push_stack_frame(&mut self, mir: &Mir, args: &[Value], return_ptr: Option<Pointer>) {
199 // let frame = Frame {
200 // return_ptr: return_ptr,
201 // offset: self.value_stack.len(),
202 // num_args: mir.arg_decls.len(),
203 // num_vars: mir.var_decls.len(),
204 // num_temps: mir.temp_decls.len(),
205 // num_aggregate_fields: 0,
208 // self.value_stack.extend(iter::repeat(Value::Uninit).take(frame.size()));
210 // for (i, arg) in args.iter().enumerate() {
211 // self.value_stack[frame.arg_offset(i)] = arg.clone();
214 // self.call_stack.push(frame);
217 // fn pop_stack_frame(&mut self) {
218 // let frame = self.call_stack.pop().expect("tried to pop stack frame, but there were none");
219 // self.value_stack.truncate(frame.offset);
222 // fn allocate_aggregate(&mut self, size: usize) -> Pointer {
223 // let frame = self.call_stack.last_mut().expect("missing call frame");
224 // frame.num_aggregate_fields += size;
226 // let ptr = Pointer::Stack(self.value_stack.len());
227 // self.value_stack.extend(iter::repeat(Value::Uninit).take(size));
231 fn call(&mut self, mir: &Mir, args: &[Value], return_ptr: Option<Pointer>) {
232 // self.push_stack_frame(mir, args, return_ptr);
233 let mut block = mir::START_BLOCK;
236 if TRACE_EXECUTION { println!("Entering block: {:?}", block); }
237 let block_data = mir.basic_block_data(block);
239 for stmt in &block_data.statements {
240 if TRACE_EXECUTION { println!("{:?}", stmt); }
243 mir::StatementKind::Assign(ref lvalue, ref rvalue) => {
244 let ptr = self.lvalue_to_ptr(lvalue);
245 self.eval_rvalue_into(rvalue, ptr);
250 if TRACE_EXECUTION { println!("{:?}", block_data.terminator()); }
252 match *block_data.terminator() {
253 mir::Terminator::Return => break,
254 mir::Terminator::Goto { target } => block = target,
256 // mir::Terminator::Call { ref func, ref args, ref destination, .. } => {
257 // let ptr = destination.as_ref().map(|&(ref lv, _)| self.lvalue_to_ptr(lv));
258 // let func_val = self.operand_to_ptr(func);
260 // if let Value::Func(def_id) = func_val {
262 // let mir = match self.tcx.map.as_local_node_id(def_id) {
263 // Some(node_id) => self.mir_map.map.get(&node_id).unwrap(),
265 // let cstore = &self.tcx.sess.cstore;
266 // mir_data = cstore.maybe_get_item_mir(self.tcx, def_id).unwrap();
271 // let arg_vals: Vec<Value> =
272 // args.iter().map(|arg| self.operand_to_ptr(arg)).collect();
274 // self.call(mir, &arg_vals, ptr);
276 // if let Some((_, target)) = *destination {
280 // panic!("tried to call a non-function value: {:?}", func_val);
284 // mir::Terminator::If { ref cond, targets: (then_target, else_target) } => {
285 // match self.operand_to_ptr(cond) {
286 // Value::Bool(true) => block = then_target,
287 // Value::Bool(false) => block = else_target,
288 // cond_val => panic!("Non-boolean `if` condition value: {:?}", cond_val),
292 // mir::Terminator::SwitchInt { ref discr, ref values, ref targets, .. } => {
293 // let discr_val = self.read_lvalue(discr);
295 // let index = values.iter().position(|v| discr_val == self.eval_constant(v))
296 // .expect("discriminant matched no values");
298 // block = targets[index];
301 // mir::Terminator::Switch { ref discr, ref targets, .. } => {
302 // let discr_val = self.read_lvalue(discr);
304 // if let Value::Adt { variant, .. } = discr_val {
305 // block = targets[variant];
307 // panic!("Switch on non-Adt value: {:?}", discr_val);
311 mir::Terminator::Drop { target, .. } => {
312 // TODO: Handle destructors and dynamic drop.
316 mir::Terminator::Resume => unimplemented!(),
317 _ => unimplemented!(),
321 // self.pop_stack_frame();
324 fn lvalue_to_ptr(&self, lvalue: &mir::Lvalue) -> Pointer {
326 mir::Lvalue::ReturnPointer => Pointer {
327 alloc_id: self::memory::alloc_id_hack(0),
332 _ => unimplemented!(),
335 // let frame = self.call_stack.last().expect("missing call frame");
338 // mir::Lvalue::ReturnPointer =>
339 // frame.return_ptr.expect("ReturnPointer used in a function with no return value"),
340 // mir::Lvalue::Arg(i) => Pointer::Stack(frame.arg_offset(i as usize)),
341 // mir::Lvalue::Var(i) => Pointer::Stack(frame.var_offset(i as usize)),
342 // mir::Lvalue::Temp(i) => Pointer::Stack(frame.temp_offset(i as usize)),
344 // mir::Lvalue::Projection(ref proj) => {
345 // let base_ptr = self.lvalue_to_ptr(&proj.base);
348 // mir::ProjectionElem::Field(field, _) => {
349 // base_ptr.offset(field.index())
352 // mir::ProjectionElem::Downcast(_, variant) => {
353 // let adt_val = self.read_pointer(base_ptr);
354 // if let Value::Adt { variant: actual_variant, data_ptr } = adt_val {
355 // debug_assert_eq!(variant, actual_variant);
358 // panic!("Downcast attempted on non-ADT: {:?}", adt_val)
362 // mir::ProjectionElem::Deref => {
363 // let ptr_val = self.read_pointer(base_ptr);
364 // if let Value::Pointer(ptr) = ptr_val {
367 // panic!("Deref attempted on non-pointer: {:?}", ptr_val)
371 // mir::ProjectionElem::Index(ref _operand) => unimplemented!(),
372 // mir::ProjectionElem::ConstantIndex { .. } => unimplemented!(),
376 // _ => unimplemented!(),
380 fn eval_binary_op(&mut self, bin_op: mir::BinOp, left: Pointer, right: Pointer, out: Pointer) {
381 match (left.repr, right.repr, out.repr) {
382 (Repr::Int, Repr::Int, Repr::Int) => {
383 let l = byteorder::NativeEndian::read_i64(&self.memory.value(left.alloc_id).unwrap().bytes);
384 let r = byteorder::NativeEndian::read_i64(&self.memory.value(right.alloc_id).unwrap().bytes);
385 let n = match bin_op {
386 mir::BinOp::Add => l + r,
387 mir::BinOp::Sub => l - r,
388 mir::BinOp::Mul => l * r,
389 mir::BinOp::Div => l / r,
390 mir::BinOp::Rem => l % r,
391 mir::BinOp::BitXor => l ^ r,
392 mir::BinOp::BitAnd => l & r,
393 mir::BinOp::BitOr => l | r,
394 mir::BinOp::Shl => l << r,
395 mir::BinOp::Shr => l >> r,
396 _ => unimplemented!(),
397 // mir::BinOp::Eq => Value::Bool(l == r),
398 // mir::BinOp::Lt => Value::Bool(l < r),
399 // mir::BinOp::Le => Value::Bool(l <= r),
400 // mir::BinOp::Ne => Value::Bool(l != r),
401 // mir::BinOp::Ge => Value::Bool(l >= r),
402 // mir::BinOp::Gt => Value::Bool(l > r),
404 byteorder::NativeEndian::write_i64(&mut self.memory.value_mut(out.alloc_id).unwrap().bytes, n);
407 _ => unimplemented!(),
411 fn eval_rvalue_into(&mut self, rvalue: &mir::Rvalue, out: Pointer) {
413 mir::Rvalue::Use(ref operand) => {
414 let ptr = self.operand_to_ptr(operand);
415 let val = self.read_pointer(ptr);
416 self.write_pointer(out, val);
419 mir::Rvalue::BinaryOp(bin_op, ref left, ref right) => {
420 let left_ptr = self.operand_to_ptr(left);
421 let right_ptr = self.operand_to_ptr(right);
422 self.eval_binary_op(bin_op, left_ptr, right_ptr, out)
425 mir::Rvalue::UnaryOp(un_op, ref operand) => {
427 // match (un_op, self.operand_to_ptr(operand)) {
428 // (mir::UnOp::Not, Value::Int(n)) => Value::Int(!n),
429 // (mir::UnOp::Neg, Value::Int(n)) => Value::Int(-n),
430 // _ => unimplemented!(),
434 // mir::Rvalue::Ref(_region, _kind, ref lvalue) => {
435 // Value::Pointer(self.lvalue_to_ptr(lvalue))
438 // mir::Rvalue::Aggregate(mir::AggregateKind::Adt(ref adt_def, variant, _substs),
439 // ref operands) => {
440 // let max_fields = adt_def.variants
442 // .map(|v| v.fields.len())
446 // let ptr = self.allocate_aggregate(max_fields);
448 // for (i, operand) in operands.iter().enumerate() {
449 // let val = self.operand_to_ptr(operand);
450 // self.write_pointer(ptr.offset(i), val);
453 // Value::Adt { variant: variant, data_ptr: ptr }
456 ref r => panic!("can't handle rvalue: {:?}", r),
460 fn operand_to_ptr(&mut self, op: &mir::Operand) -> Pointer {
462 mir::Operand::Consume(ref lvalue) => self.lvalue_to_ptr(lvalue),
464 mir::Operand::Constant(ref constant) => {
465 match constant.literal {
466 mir::Literal::Value { ref value } => self.eval_constant(value),
468 mir::Literal::Item { def_id, kind, .. } => match kind {
469 // mir::ItemKind::Function | mir::ItemKind::Method => Value::Func(def_id),
470 _ => panic!("can't handle item literal: {:?}", constant.literal),
477 fn eval_constant(&mut self, const_val: &const_eval::ConstVal) -> Pointer {
479 const_eval::ConstVal::Float(_f) => unimplemented!(),
480 // const_eval::ConstVal::Int(i) => Value::new_int(i),
481 const_eval::ConstVal::Int(i) => Pointer {
482 alloc_id: self.memory.allocate_int(i),
486 const_eval::ConstVal::Uint(_u) => unimplemented!(),
487 const_eval::ConstVal::Str(ref _s) => unimplemented!(),
488 const_eval::ConstVal::ByteStr(ref _bs) => unimplemented!(),
489 const_eval::ConstVal::Bool(b) => unimplemented!(),
490 const_eval::ConstVal::Struct(_node_id) => unimplemented!(),
491 const_eval::ConstVal::Tuple(_node_id) => unimplemented!(),
492 const_eval::ConstVal::Function(_def_id) => unimplemented!(),
493 const_eval::ConstVal::Array(_, _) => unimplemented!(),
494 const_eval::ConstVal::Repeat(_, _) => unimplemented!(),
498 // fn read_lvalue(&self, lvalue: &mir::Lvalue) -> Value {
499 // self.read_pointer(self.lvalue_to_ptr(lvalue))
502 fn read_pointer(&self, p: Pointer) -> Value {
503 self.memory.value(p.alloc_id).unwrap().clone()
506 fn write_pointer(&mut self, p: Pointer, val: Value) {
507 // TODO(tsion): Remove panics.
508 let alloc = self.memory.value_mut(p.alloc_id).unwrap();
509 for (i, byte) in val.bytes.into_iter().enumerate() {
510 alloc.bytes[p.offset + i] = byte;
515 pub fn interpret_start_points<'tcx>(tcx: &ty::ctxt<'tcx>, mir_map: &MirMap<'tcx>) {
516 for (&id, mir) in &mir_map.map {
517 for attr in tcx.map.attrs(id) {
518 if attr.check_name("miri_run") {
519 let item = tcx.map.expect_item(id);
521 println!("Interpreting: {}", item.name);
523 let mut interpreter = Interpreter::new(tcx, mir_map);
524 let return_ptr = match mir.return_ty {
525 ty::FnOutput::FnConverging(ty) => {
526 let repr = Repr::from_ty(ty);
528 alloc_id: interpreter.memory.allocate(repr.size()),
533 ty::FnOutput::FnDiverging => None,
535 interpreter.call(mir, &[], return_ptr.clone());
537 let val_str = format!("{:?}", interpreter.read_pointer(return_ptr.unwrap()));
538 if !check_expected(&val_str, attr) {
539 println!("=> {}\n", val_str);
546 fn check_expected(actual: &str, attr: &Attribute) -> bool {
547 if let Some(meta_items) = attr.meta_item_list() {
548 for meta_item in meta_items {
549 if meta_item.check_name("expected") {
550 let expected = meta_item.value_str().unwrap();
552 if actual == &expected[..] {
553 println!("Test passed!\n");
555 println!("Actual value:\t{}\nExpected value:\t{}\n", actual, expected);