1 use std::collections::HashMap;
4 use rustc::hir::def_id::DefId;
5 use rustc::hir::map::definitions::DefPathData;
6 use rustc::middle::const_val::ConstVal;
7 use rustc_const_math::{ConstInt, ConstUsize};
9 use rustc::traits::Reveal;
10 use rustc::ty::layout::{self, Layout, Size};
11 use rustc::ty::subst::{Subst, Substs, Kind};
12 use rustc::ty::{self, Ty, TyCtxt, TypeFoldable, Binder};
14 use rustc_data_structures::indexed_vec::Idx;
15 use syntax::codemap::{self, DUMMY_SP, Span};
18 use syntax::symbol::Symbol;
20 use error::{EvalError, EvalResult};
21 use lvalue::{Global, GlobalId, Lvalue, LvalueExtra};
22 use memory::{Memory, Pointer};
24 use value::{PrimVal, PrimValKind, Value};
26 pub struct EvalContext<'a, 'tcx: 'a> {
27 /// The results of the type checker, from rustc.
28 pub(crate) tcx: TyCtxt<'a, 'tcx, 'tcx>,
30 /// The virtual memory system.
31 pub(crate) memory: Memory<'a, 'tcx>,
33 /// Precomputed statics, constants and promoteds.
34 pub(crate) globals: HashMap<GlobalId<'tcx>, Global<'tcx>>,
36 /// The virtual call stack.
37 pub(crate) stack: Vec<Frame<'tcx>>,
39 /// The maximum number of stack frames allowed
40 pub(crate) stack_limit: usize,
42 /// The maximum number of operations that may be executed.
43 /// This prevents infinite loops and huge computations from freezing up const eval.
44 /// Remove once halting problem is solved.
45 pub(crate) steps_remaining: u64,
47 /// Drop glue for arrays and slices
48 pub(crate) seq_drop_glue: &'tcx mir::Mir<'tcx>,
52 pub struct Frame<'tcx> {
53 ////////////////////////////////////////////////////////////////////////////////
54 // Function and callsite information
55 ////////////////////////////////////////////////////////////////////////////////
57 /// The MIR for the function called on this frame.
58 pub mir: &'tcx mir::Mir<'tcx>,
60 /// The def_id and substs of the current function
61 pub instance: ty::Instance<'tcx>,
63 /// The span of the call site.
64 pub span: codemap::Span,
66 ////////////////////////////////////////////////////////////////////////////////
67 // Return lvalue and locals
68 ////////////////////////////////////////////////////////////////////////////////
70 /// The block to return to when returning from the current stack frame
71 pub return_to_block: StackPopCleanup,
73 /// The location where the result of the current stack frame should be written to.
74 pub return_lvalue: Lvalue<'tcx>,
76 /// The list of locals for this stack frame, stored in order as
77 /// `[arguments..., variables..., temporaries...]`. The locals are stored as `Value`s, which
78 /// can either directly contain `PrimVal` or refer to some part of an `Allocation`.
80 /// Before being initialized, all locals are `Value::ByVal(PrimVal::Undef)`.
81 pub locals: Vec<Value>,
83 ////////////////////////////////////////////////////////////////////////////////
84 // Current position within the function
85 ////////////////////////////////////////////////////////////////////////////////
87 /// The block that is currently executed (or will be executed after the above call stacks
89 pub block: mir::BasicBlock,
91 /// The index of the currently evaluated statment.
95 #[derive(Clone, Debug, Eq, PartialEq, Hash)]
96 pub enum StackPopCleanup {
97 /// The stackframe existed to compute the initial value of a static/constant, make sure it
98 /// isn't modifyable afterwards in case of constants.
99 /// In case of `static mut`, mark the memory to ensure it's never marked as immutable through
100 /// references or deallocated
101 /// The bool decides whether the value is mutable (true) or not (false)
103 /// A regular stackframe added due to a function call will need to get forwarded to the next
105 Goto(mir::BasicBlock),
106 /// The main function and diverging functions have nowhere to return to
110 #[derive(Copy, Clone, Debug)]
111 pub struct ResourceLimits {
112 pub memory_size: u64,
114 pub stack_limit: usize,
117 impl Default for ResourceLimits {
118 fn default() -> Self {
120 memory_size: 100 * 1024 * 1024, // 100 MB
121 step_limit: 1_000_000,
127 impl<'a, 'tcx> EvalContext<'a, 'tcx> {
128 pub fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>, limits: ResourceLimits) -> Self {
129 let source_info = mir::SourceInfo {
131 scope: mir::ARGUMENT_VISIBILITY_SCOPE
133 // i = 0; len = Len(*a0); goto head;
134 let start_block = mir::BasicBlockData {
138 kind: mir::StatementKind::Assign(
139 mir::Lvalue::Local(mir::Local::new(2)),
140 mir::Rvalue::Use(mir::Operand::Constant(mir::Constant {
143 literal: mir::Literal::Value {
144 value: ConstVal::Integral(ConstInt::Usize(ConstUsize::new(0, tcx.sess.target.uint_type).unwrap())),
151 kind: mir::StatementKind::Assign(
152 mir::Lvalue::Local(mir::Local::new(3)),
153 mir::Rvalue::Len(mir::Lvalue::Projection(Box::new(mir::LvalueProjection {
154 base: mir::Lvalue::Local(mir::Local::new(1)),
155 elem: mir::ProjectionElem::Deref,
160 terminator: Some(mir::Terminator {
161 source_info: source_info,
162 kind: mir::TerminatorKind::Goto { target: mir::BasicBlock::new(1) },
166 // head: done = i == len; switch done { 1 => ret, 0 => loop }
167 let head = mir::BasicBlockData {
171 kind: mir::StatementKind::Assign(
172 mir::Lvalue::Local(mir::Local::new(4)),
173 mir::Rvalue::BinaryOp(
175 mir::Operand::Consume(mir::Lvalue::Local(mir::Local::new(2))),
176 mir::Operand::Consume(mir::Lvalue::Local(mir::Local::new(3))),
181 terminator: Some(mir::Terminator {
182 source_info: source_info,
183 kind: mir::TerminatorKind::SwitchInt {
185 mir::BasicBlock::new(2),
186 mir::BasicBlock::new(4),
188 discr: mir::Operand::Consume(mir::Lvalue::Local(mir::Local::new(4))),
189 switch_ty: tcx.types.bool,
190 values: vec![ConstInt::U8(0)].into(),
195 // loop: drop (*a0)[i]; goto inc;
196 let loop_ = mir::BasicBlockData {
197 statements: Vec::new(),
198 terminator: Some(mir::Terminator {
199 source_info: source_info,
200 kind: mir::TerminatorKind::Drop {
201 target: mir::BasicBlock::new(3),
203 location: mir::Lvalue::Projection(Box::new(
204 mir::LvalueProjection {
205 base: mir::Lvalue::Projection(Box::new(
206 mir::LvalueProjection {
207 base: mir::Lvalue::Local(mir::Local::new(1)),
208 elem: mir::ProjectionElem::Deref,
211 elem: mir::ProjectionElem::Index(mir::Operand::Consume(mir::Lvalue::Local(mir::Local::new(2)))),
218 // inc: i++; goto head;
219 let inc = mir::BasicBlockData {
223 kind: mir::StatementKind::Assign(
224 mir::Lvalue::Local(mir::Local::new(2)),
225 mir::Rvalue::BinaryOp(
227 mir::Operand::Consume(mir::Lvalue::Local(mir::Local::new(2))),
228 mir::Operand::Constant(mir::Constant {
231 literal: mir::Literal::Value {
232 value: ConstVal::Integral(ConstInt::Usize(ConstUsize::new(1, tcx.sess.target.uint_type).unwrap())),
239 terminator: Some(mir::Terminator {
240 source_info: source_info,
241 kind: mir::TerminatorKind::Goto { target: mir::BasicBlock::new(1) },
246 let ret = mir::BasicBlockData {
247 statements: Vec::new(),
248 terminator: Some(mir::Terminator {
249 source_info: source_info,
250 kind: mir::TerminatorKind::Return,
256 mutability: mir::Mutability::Mut,
260 is_user_variable: false,
263 mutability: mir::Mutability::Mut,
264 ty: tcx.mk_mut_ptr(tcx.mk_slice(tcx.mk_param(0, Symbol::intern("T")))),
267 is_user_variable: false,
270 mutability: mir::Mutability::Mut,
274 is_user_variable: false,
277 mutability: mir::Mutability::Mut,
281 is_user_variable: false,
284 mutability: mir::Mutability::Mut,
288 is_user_variable: false,
291 let seq_drop_glue = mir::Mir::new(
292 vec![start_block, head, loop_, inc, ret].into_iter().collect(),
293 Vec::new().into_iter().collect(), // vis scopes
294 Vec::new().into_iter().collect(), // promoted
295 tcx.mk_nil(), // return type
296 locals.into_iter().collect(),
298 Vec::new(), // upvars
301 let seq_drop_glue = tcx.alloc_mir(seq_drop_glue);
304 memory: Memory::new(&tcx.data_layout, limits.memory_size),
305 globals: HashMap::new(),
307 stack_limit: limits.stack_limit,
308 steps_remaining: limits.step_limit,
309 seq_drop_glue: seq_drop_glue,
313 pub fn alloc_ptr(&mut self, ty: Ty<'tcx>) -> EvalResult<'tcx, Pointer> {
314 let substs = self.substs();
315 self.alloc_ptr_with_substs(ty, substs)
318 pub fn alloc_ptr_with_substs(
321 substs: &'tcx Substs<'tcx>
322 ) -> EvalResult<'tcx, Pointer> {
323 let size = self.type_size_with_substs(ty, substs)?.expect("cannot alloc memory for unsized type");
324 let align = self.type_align_with_substs(ty, substs)?;
325 self.memory.allocate(size, align)
328 pub fn memory(&self) -> &Memory<'a, 'tcx> {
332 pub fn memory_mut(&mut self) -> &mut Memory<'a, 'tcx> {
336 pub fn stack(&self) -> &[Frame<'tcx>] {
340 pub(crate) fn str_to_value(&mut self, s: &str) -> EvalResult<'tcx, Value> {
341 let ptr = self.memory.allocate_cached(s.as_bytes())?;
342 Ok(Value::ByValPair(PrimVal::Ptr(ptr), PrimVal::from_u128(s.len() as u128)))
345 pub(super) fn const_to_value(&mut self, const_val: &ConstVal<'tcx>) -> EvalResult<'tcx, Value> {
346 use rustc::middle::const_val::ConstVal::*;
347 use rustc_const_math::ConstFloat;
349 let primval = match *const_val {
350 Integral(const_int) => PrimVal::Bytes(const_int.to_u128_unchecked()),
352 Float(ConstFloat::F32(f)) => PrimVal::from_f32(f),
353 Float(ConstFloat::F64(f)) => PrimVal::from_f64(f),
355 Bool(b) => PrimVal::from_bool(b),
356 Char(c) => PrimVal::from_char(c),
358 Str(ref s) => return self.str_to_value(s),
361 let ptr = self.memory.allocate_cached(bs)?;
365 Variant(_) => unimplemented!(),
366 Struct(_) => unimplemented!(),
367 Tuple(_) => unimplemented!(),
368 // function items are zero sized and thus have no readable value
369 Function(..) => PrimVal::Undef,
370 Array(_) => unimplemented!(),
371 Repeat(_, _) => unimplemented!(),
374 Ok(Value::ByVal(primval))
377 pub(super) fn type_is_sized(&self, ty: Ty<'tcx>) -> bool {
378 // generics are weird, don't run this function on a generic
379 assert!(!ty.needs_subst());
380 ty.is_sized(self.tcx, &self.tcx.empty_parameter_environment(), DUMMY_SP)
383 pub fn load_mir(&self, instance: ty::InstanceDef<'tcx>) -> EvalResult<'tcx, &'tcx mir::Mir<'tcx>> {
384 trace!("load mir {:?}", instance);
386 ty::InstanceDef::Item(def_id) => self.tcx.maybe_optimized_mir(def_id).ok_or_else(|| EvalError::NoMirFor(self.tcx.item_path_str(def_id))),
387 _ => Ok(self.tcx.instance_mir(instance)),
391 pub fn monomorphize(&self, ty: Ty<'tcx>, substs: &'tcx Substs<'tcx>) -> Ty<'tcx> {
392 // miri doesn't care about lifetimes, and will choke on some crazy ones
393 // let's simply get rid of them
394 let without_lifetimes = self.tcx.erase_regions(&ty);
395 let substituted = without_lifetimes.subst(self.tcx, substs);
396 self.tcx.normalize_associated_type(&substituted)
399 pub fn erase_lifetimes<T>(&self, value: &Binder<T>) -> T
400 where T : TypeFoldable<'tcx>
402 let value = self.tcx.erase_late_bound_regions(value);
403 self.tcx.erase_regions(&value)
406 pub(super) fn type_size(&self, ty: Ty<'tcx>) -> EvalResult<'tcx, Option<u64>> {
407 self.type_size_with_substs(ty, self.substs())
410 pub(super) fn type_align(&self, ty: Ty<'tcx>) -> EvalResult<'tcx, u64> {
411 self.type_align_with_substs(ty, self.substs())
414 fn type_size_with_substs(
417 substs: &'tcx Substs<'tcx>,
418 ) -> EvalResult<'tcx, Option<u64>> {
419 let layout = self.type_layout_with_substs(ty, substs)?;
420 if layout.is_unsized() {
423 Ok(Some(layout.size(&self.tcx.data_layout).bytes()))
427 fn type_align_with_substs(&self, ty: Ty<'tcx>, substs: &'tcx Substs<'tcx>) -> EvalResult<'tcx, u64> {
428 self.type_layout_with_substs(ty, substs).map(|layout| layout.align(&self.tcx.data_layout).abi())
431 pub(super) fn type_layout(&self, ty: Ty<'tcx>) -> EvalResult<'tcx, &'tcx Layout> {
432 self.type_layout_with_substs(ty, self.substs())
435 fn type_layout_with_substs(&self, ty: Ty<'tcx>, substs: &'tcx Substs<'tcx>) -> EvalResult<'tcx, &'tcx Layout> {
436 // TODO(solson): Is this inefficient? Needs investigation.
437 let ty = self.monomorphize(ty, substs);
439 self.tcx.infer_ctxt((), Reveal::All).enter(|infcx| {
440 ty.layout(&infcx).map_err(EvalError::Layout)
444 pub fn push_stack_frame(
446 instance: ty::Instance<'tcx>,
448 mir: &'tcx mir::Mir<'tcx>,
449 return_lvalue: Lvalue<'tcx>,
450 return_to_block: StackPopCleanup,
451 ) -> EvalResult<'tcx> {
452 ::log_settings::settings().indentation += 1;
454 // Subtract 1 because `local_decls` includes the ReturnPointer, but we don't store a local
456 let num_locals = mir.local_decls.len() - 1;
457 let locals = vec![Value::ByVal(PrimVal::Undef); num_locals];
459 self.stack.push(Frame {
461 block: mir::START_BLOCK,
470 if self.stack.len() > self.stack_limit {
471 Err(EvalError::StackFrameLimitReached)
477 pub(super) fn pop_stack_frame(&mut self) -> EvalResult<'tcx> {
478 ::log_settings::settings().indentation -= 1;
479 let frame = self.stack.pop().expect("tried to pop a stack frame, but there were none");
480 match frame.return_to_block {
481 StackPopCleanup::MarkStatic(mutable) => if let Lvalue::Global(id) = frame.return_lvalue {
482 let global_value = self.globals.get_mut(&id)
483 .expect("global should have been cached (static)");
484 match global_value.value {
485 Value::ByRef(ptr) => self.memory.mark_static_initalized(ptr.alloc_id, mutable)?,
486 Value::ByVal(val) => if let PrimVal::Ptr(ptr) = val {
487 self.memory.mark_inner_allocation(ptr.alloc_id, mutable)?;
489 Value::ByValPair(val1, val2) => {
490 if let PrimVal::Ptr(ptr) = val1 {
491 self.memory.mark_inner_allocation(ptr.alloc_id, mutable)?;
493 if let PrimVal::Ptr(ptr) = val2 {
494 self.memory.mark_inner_allocation(ptr.alloc_id, mutable)?;
498 // see comment on `initialized` field
499 assert!(!global_value.initialized);
500 global_value.initialized = true;
501 assert!(global_value.mutable);
502 global_value.mutable = mutable;
504 bug!("StackPopCleanup::MarkStatic on: {:?}", frame.return_lvalue);
506 StackPopCleanup::Goto(target) => self.goto_block(target),
507 StackPopCleanup::None => {},
509 // deallocate all locals that are backed by an allocation
510 for local in frame.locals {
511 if let Value::ByRef(ptr) = local {
512 trace!("deallocating local");
513 self.memory.dump_alloc(ptr.alloc_id);
514 match self.memory.deallocate(ptr) {
515 // We could alternatively check whether the alloc_id is static before calling
516 // deallocate, but this is much simpler and is probably the rare case.
517 Ok(()) | Err(EvalError::DeallocatedStaticMemory) => {},
518 other => return other,
526 pub fn assign_discr_and_fields<
527 V: IntoValTyPair<'tcx>,
528 J: IntoIterator<Item = V>,
538 ) -> EvalResult<'tcx>
539 where J::IntoIter: ExactSizeIterator,
542 let dest_ptr = self.force_allocation(dest)?.to_ptr();
544 let discr_dest = dest_ptr.offset(discr_offset);
545 self.memory.write_uint(discr_dest, discr_val, discr_size)?;
547 let dest = Lvalue::Ptr {
549 extra: LvalueExtra::DowncastVariant(variant_idx),
552 self.assign_fields(dest, dest_ty, operands)
555 pub fn assign_fields<
556 V: IntoValTyPair<'tcx>,
557 J: IntoIterator<Item = V>,
563 ) -> EvalResult<'tcx>
564 where J::IntoIter: ExactSizeIterator,
566 if self.type_size(dest_ty)? == Some(0) {
567 // zst assigning is a nop
570 if self.ty_to_primval_kind(dest_ty).is_ok() {
571 let mut iter = operands.into_iter();
572 assert_eq!(iter.len(), 1);
573 let (value, value_ty) = iter.next().unwrap().into_val_ty_pair(self)?;
574 return self.write_value(value, dest, value_ty);
576 for (field_index, operand) in operands.into_iter().enumerate() {
577 let (value, value_ty) = operand.into_val_ty_pair(self)?;
578 let field_dest = self.lvalue_field(dest, field_index, dest_ty, value_ty)?;
579 self.write_value(value, field_dest, value_ty)?;
584 /// Evaluate an assignment statement.
586 /// There is no separate `eval_rvalue` function. Instead, the code for handling each rvalue
587 /// type writes its results directly into the memory specified by the lvalue.
588 pub(super) fn eval_rvalue_into_lvalue(
590 rvalue: &mir::Rvalue<'tcx>,
591 lvalue: &mir::Lvalue<'tcx>,
592 ) -> EvalResult<'tcx> {
593 let dest = self.eval_lvalue(lvalue)?;
594 let dest_ty = self.lvalue_ty(lvalue);
595 let dest_layout = self.type_layout(dest_ty)?;
597 use rustc::mir::Rvalue::*;
599 Use(ref operand) => {
600 let value = self.eval_operand(operand)?;
601 self.write_value(value, dest, dest_ty)?;
604 BinaryOp(bin_op, ref left, ref right) => {
605 // ignore overflow bit, rustc inserts check branches for us
606 self.intrinsic_overflowing(bin_op, left, right, dest, dest_ty)?;
609 CheckedBinaryOp(bin_op, ref left, ref right) => {
610 self.intrinsic_with_overflow(bin_op, left, right, dest, dest_ty)?;
613 UnaryOp(un_op, ref operand) => {
614 let val = self.eval_operand_to_primval(operand)?;
615 let kind = self.ty_to_primval_kind(dest_ty)?;
616 self.write_primval(dest, operator::unary_op(un_op, val, kind)?, dest_ty)?;
619 // Skip everything for zsts
620 Aggregate(..) if self.type_size(dest_ty)? == Some(0) => {}
622 Aggregate(ref kind, ref operands) => {
623 self.inc_step_counter_and_check_limit(operands.len() as u64)?;
624 use rustc::ty::layout::Layout::*;
626 Univariant { ref variant, .. } => {
628 let ptr = self.force_allocation(dest)?.to_ptr_and_extra().0;
629 self.memory.mark_packed(ptr, variant.stride().bytes());
631 self.assign_fields(dest, dest_ty, operands)?;
635 self.assign_fields(dest, dest_ty, operands)?;
638 General { discr, ref variants, .. } => {
639 if let mir::AggregateKind::Adt(adt_def, variant, _, _) = *kind {
640 let discr_val = adt_def.discriminants(self.tcx)
642 .expect("broken mir: Adt variant id invalid")
643 .to_u128_unchecked();
644 let discr_size = discr.size().bytes();
645 if variants[variant].packed {
646 let ptr = self.force_allocation(dest)?.to_ptr_and_extra().0;
647 self.memory.mark_packed(ptr, variants[variant].stride().bytes());
650 self.assign_discr_and_fields(
653 variants[variant].offsets[0].bytes(),
660 bug!("tried to assign {:?} to Layout::General", kind);
664 RawNullablePointer { nndiscr, .. } => {
665 if let mir::AggregateKind::Adt(_, variant, _, _) = *kind {
666 if nndiscr == variant as u64 {
667 assert_eq!(operands.len(), 1);
668 let operand = &operands[0];
669 let value = self.eval_operand(operand)?;
670 let value_ty = self.operand_ty(operand);
671 self.write_value(value, dest, value_ty)?;
673 if let Some(operand) = operands.get(0) {
674 assert_eq!(operands.len(), 1);
675 let operand_ty = self.operand_ty(operand);
676 assert_eq!(self.type_size(operand_ty)?, Some(0));
678 self.write_primval(dest, PrimVal::Bytes(0), dest_ty)?;
681 bug!("tried to assign {:?} to Layout::RawNullablePointer", kind);
685 StructWrappedNullablePointer { nndiscr, ref nonnull, ref discrfield, .. } => {
686 if let mir::AggregateKind::Adt(_, variant, _, _) = *kind {
688 let ptr = self.force_allocation(dest)?.to_ptr_and_extra().0;
689 self.memory.mark_packed(ptr, nonnull.stride().bytes());
691 if nndiscr == variant as u64 {
692 self.assign_fields(dest, dest_ty, operands)?;
694 for operand in operands {
695 let operand_ty = self.operand_ty(operand);
696 assert_eq!(self.type_size(operand_ty)?, Some(0));
698 let (offset, ty) = self.nonnull_offset_and_ty(dest_ty, nndiscr, discrfield)?;
701 let dest = self.force_allocation(dest)?.to_ptr();
703 let dest = dest.offset(offset.bytes());
704 let dest_size = self.type_size(ty)?
705 .expect("bad StructWrappedNullablePointer discrfield");
706 self.memory.write_int(dest, 0, dest_size)?;
709 bug!("tried to assign {:?} to Layout::RawNullablePointer", kind);
714 assert_eq!(operands.len(), 0);
715 if let mir::AggregateKind::Adt(adt_def, variant, _, _) = *kind {
716 let n = adt_def.discriminants(self.tcx)
718 .expect("broken mir: Adt variant index invalid")
719 .to_u128_unchecked();
720 self.write_primval(dest, PrimVal::Bytes(n), dest_ty)?;
722 bug!("tried to assign {:?} to Layout::CEnum", kind);
726 Vector { count, .. } => {
727 debug_assert_eq!(count, operands.len() as u64);
728 self.assign_fields(dest, dest_ty, operands)?;
731 UntaggedUnion { .. } => {
732 assert_eq!(operands.len(), 1);
733 let operand = &operands[0];
734 let value = self.eval_operand(operand)?;
735 let value_ty = self.operand_ty(operand);
736 self.write_value(value, dest, value_ty)?;
740 return Err(EvalError::Unimplemented(format!(
741 "can't handle destination layout {:?} when assigning {:?}",
749 Repeat(ref operand, _) => {
750 let (elem_ty, length) = match dest_ty.sty {
751 ty::TyArray(elem_ty, n) => (elem_ty, n as u64),
752 _ => bug!("tried to assign array-repeat to non-array type {:?}", dest_ty),
754 self.inc_step_counter_and_check_limit(length)?;
755 let elem_size = self.type_size(elem_ty)?
756 .expect("repeat element type must be sized");
757 let value = self.eval_operand(operand)?;
760 let dest = self.force_allocation(dest)?.to_ptr();
763 let elem_dest = dest.offset(i * elem_size);
764 self.write_value_to_ptr(value, elem_dest, elem_ty)?;
769 let src = self.eval_lvalue(lvalue)?;
770 let ty = self.lvalue_ty(lvalue);
771 let (_, len) = src.elem_ty_and_len(ty);
772 self.write_primval(dest, PrimVal::from_u128(len as u128), dest_ty)?;
775 Ref(_, _, ref lvalue) => {
776 let src = self.eval_lvalue(lvalue)?;
777 let (raw_ptr, extra) = self.force_allocation(src)?.to_ptr_and_extra();
778 let ptr = PrimVal::Ptr(raw_ptr);
780 let val = match extra {
781 LvalueExtra::None => Value::ByVal(ptr),
782 LvalueExtra::Length(len) => Value::ByValPair(ptr, PrimVal::from_u128(len as u128)),
783 LvalueExtra::Vtable(vtable) => Value::ByValPair(ptr, PrimVal::Ptr(vtable)),
784 LvalueExtra::DowncastVariant(..) =>
785 bug!("attempted to take a reference to an enum downcast lvalue"),
788 self.write_value(val, dest, dest_ty)?;
792 let ptr = self.alloc_ptr(ty)?;
793 self.write_primval(dest, PrimVal::Ptr(ptr), dest_ty)?;
796 Cast(kind, ref operand, cast_ty) => {
797 debug_assert_eq!(self.monomorphize(cast_ty, self.substs()), dest_ty);
798 use rustc::mir::CastKind::*;
801 let src = self.eval_operand(operand)?;
802 let src_ty = self.operand_ty(operand);
803 self.unsize_into(src, src_ty, dest, dest_ty)?;
807 let src = self.eval_operand(operand)?;
808 let src_ty = self.operand_ty(operand);
809 if self.type_is_fat_ptr(src_ty) {
810 trace!("misc cast: {:?}", src);
811 match (src, self.type_is_fat_ptr(dest_ty)) {
812 (Value::ByRef(_), _) |
813 (Value::ByValPair(..), true) => {
814 self.write_value(src, dest, dest_ty)?;
816 (Value::ByValPair(data, _), false) => {
817 self.write_value(Value::ByVal(data), dest, dest_ty)?;
819 (Value::ByVal(_), _) => bug!("expected fat ptr"),
822 let src_val = self.value_to_primval(src, src_ty)?;
823 let dest_val = self.cast_primval(src_val, src_ty, dest_ty)?;
824 self.write_value(Value::ByVal(dest_val), dest, dest_ty)?;
828 ReifyFnPointer => match self.operand_ty(operand).sty {
829 ty::TyFnDef(def_id, substs, _) => {
830 let instance = resolve(self.tcx, def_id, substs);
831 let fn_ptr = self.memory.create_fn_alloc(instance);
832 self.write_value(Value::ByVal(PrimVal::Ptr(fn_ptr)), dest, dest_ty)?;
834 ref other => bug!("reify fn pointer on {:?}", other),
837 UnsafeFnPointer => match dest_ty.sty {
839 let src = self.eval_operand(operand)?;
840 self.write_value(src, dest, dest_ty)?;
842 ref other => bug!("fn to unsafe fn cast on {:?}", other),
845 ClosureFnPointer => match self.operand_ty(operand).sty {
846 ty::TyClosure(def_id, substs) => {
847 let instance = resolve_closure(self.tcx, def_id, substs, ty::ClosureKind::FnOnce);
848 let fn_ptr = self.memory.create_fn_alloc(instance);
849 self.write_value(Value::ByVal(PrimVal::Ptr(fn_ptr)), dest, dest_ty)?;
851 ref other => bug!("reify fn pointer on {:?}", other),
856 Discriminant(ref lvalue) => {
857 let lval = self.eval_lvalue(lvalue)?;
858 let ty = self.lvalue_ty(lvalue);
859 let ptr = self.force_allocation(lval)?.to_ptr();
860 let discr_val = self.read_discriminant_value(ptr, ty)?;
861 if let ty::TyAdt(adt_def, _) = ty.sty {
862 if adt_def.discriminants(self.tcx).all(|v| discr_val != v.to_u128_unchecked()) {
863 return Err(EvalError::InvalidDiscriminant);
866 bug!("rustc only generates Rvalue::Discriminant for enums");
868 self.write_primval(dest, PrimVal::Bytes(discr_val), dest_ty)?;
872 if log_enabled!(::log::LogLevel::Trace) {
873 self.dump_local(dest);
879 fn type_is_fat_ptr(&self, ty: Ty<'tcx>) -> bool {
881 ty::TyRawPtr(ref tam) |
882 ty::TyRef(_, ref tam) => !self.type_is_sized(tam.ty),
883 ty::TyAdt(def, _) if def.is_box() => !self.type_is_sized(ty.boxed_ty()),
888 pub(super) fn nonnull_offset_and_ty(
893 ) -> EvalResult<'tcx, (Size, Ty<'tcx>)> {
894 // Skip the constant 0 at the start meant for LLVM GEP and the outer non-null variant
895 let path = discrfield.iter().skip(2).map(|&i| i as usize);
897 // Handle the field index for the outer non-null variant.
898 let (inner_offset, inner_ty) = match ty.sty {
899 ty::TyAdt(adt_def, substs) => {
900 let variant = &adt_def.variants[nndiscr as usize];
901 let index = discrfield[1];
902 let field = &variant.fields[index as usize];
903 (self.get_field_offset(ty, index as usize)?, field.ty(self.tcx, substs))
905 _ => bug!("non-enum for StructWrappedNullablePointer: {}", ty),
908 self.field_path_offset_and_ty(inner_offset, inner_ty, path)
911 fn field_path_offset_and_ty<I: Iterator<Item = usize>>(
916 ) -> EvalResult<'tcx, (Size, Ty<'tcx>)> {
917 // Skip the initial 0 intended for LLVM GEP.
918 for field_index in path {
919 let field_offset = self.get_field_offset(ty, field_index)?;
920 trace!("field_path_offset_and_ty: {}, {}, {:?}, {:?}", field_index, ty, field_offset, offset);
921 ty = self.get_field_ty(ty, field_index)?;
922 offset = offset.checked_add(field_offset, &self.tcx.data_layout).unwrap();
927 fn get_fat_field(&self, pointee_ty: Ty<'tcx>, field_index: usize) -> EvalResult<'tcx, Ty<'tcx>> {
928 match (field_index, &self.tcx.struct_tail(pointee_ty).sty) {
930 (1, &ty::TySlice(_)) => Ok(self.tcx.types.usize),
931 (1, &ty::TyDynamic(..)) |
932 (0, _) => Ok(self.tcx.mk_imm_ptr(self.tcx.types.u8)),
933 _ => bug!("invalid fat pointee type: {}", pointee_ty),
937 pub fn get_field_ty(&self, ty: Ty<'tcx>, field_index: usize) -> EvalResult<'tcx, Ty<'tcx>> {
939 ty::TyAdt(adt_def, _) if adt_def.is_box() => self.get_fat_field(ty.boxed_ty(), field_index),
940 ty::TyAdt(adt_def, substs) => {
941 Ok(adt_def.struct_variant().fields[field_index].ty(self.tcx, substs))
944 ty::TyTuple(fields, _) => Ok(fields[field_index]),
946 ty::TyRef(_, ref tam) |
947 ty::TyRawPtr(ref tam) => self.get_fat_field(tam.ty, field_index),
948 _ => Err(EvalError::Unimplemented(format!("can't handle type: {:?}, {:?}", ty, ty.sty))),
952 fn get_field_offset(&self, ty: Ty<'tcx>, field_index: usize) -> EvalResult<'tcx, Size> {
953 let layout = self.type_layout(ty)?;
955 use rustc::ty::layout::Layout::*;
957 Univariant { ref variant, .. } => {
958 Ok(variant.offsets[field_index])
960 FatPointer { .. } => {
961 let bytes = field_index as u64 * self.memory.pointer_size();
962 Ok(Size::from_bytes(bytes))
964 StructWrappedNullablePointer { ref nonnull, .. } => {
965 Ok(nonnull.offsets[field_index])
968 let msg = format!("can't handle type: {:?}, with layout: {:?}", ty, layout);
969 Err(EvalError::Unimplemented(msg))
974 pub fn get_field_count(&self, ty: Ty<'tcx>) -> EvalResult<'tcx, usize> {
975 let layout = self.type_layout(ty)?;
977 use rustc::ty::layout::Layout::*;
979 Univariant { ref variant, .. } => Ok(variant.offsets.len()),
980 FatPointer { .. } => Ok(2),
981 StructWrappedNullablePointer { ref nonnull, .. } => Ok(nonnull.offsets.len()),
983 let msg = format!("can't handle type: {:?}, with layout: {:?}", ty, layout);
984 Err(EvalError::Unimplemented(msg))
989 pub(super) fn eval_operand_to_primval(&mut self, op: &mir::Operand<'tcx>) -> EvalResult<'tcx, PrimVal> {
990 let value = self.eval_operand(op)?;
991 let ty = self.operand_ty(op);
992 self.value_to_primval(value, ty)
995 pub(super) fn eval_operand(&mut self, op: &mir::Operand<'tcx>) -> EvalResult<'tcx, Value> {
996 use rustc::mir::Operand::*;
998 Consume(ref lvalue) => self.eval_and_read_lvalue(lvalue),
1000 Constant(mir::Constant { ref literal, .. }) => {
1001 use rustc::mir::Literal;
1002 let value = match *literal {
1003 Literal::Value { ref value } => self.const_to_value(value)?,
1005 Literal::Item { def_id, substs } => {
1006 let instance = self.resolve_associated_const(def_id, substs);
1007 let cid = GlobalId { instance, promoted: None };
1008 self.globals.get(&cid).expect("static/const not cached").value
1011 Literal::Promoted { index } => {
1012 let cid = GlobalId {
1013 instance: self.frame().instance,
1014 promoted: Some(index),
1016 self.globals.get(&cid).expect("promoted not cached").value
1025 pub(super) fn operand_ty(&self, operand: &mir::Operand<'tcx>) -> Ty<'tcx> {
1026 self.monomorphize(operand.ty(&self.mir(), self.tcx), self.substs())
1029 fn copy(&mut self, src: Pointer, dest: Pointer, ty: Ty<'tcx>) -> EvalResult<'tcx> {
1030 let size = self.type_size(ty)?.expect("cannot copy from an unsized type");
1031 let align = self.type_align(ty)?;
1032 self.memory.copy(src, dest, size, align)?;
1036 pub(super) fn force_allocation(
1038 lvalue: Lvalue<'tcx>,
1039 ) -> EvalResult<'tcx, Lvalue<'tcx>> {
1040 let new_lvalue = match lvalue {
1041 Lvalue::Local { frame, local, field } => {
1042 // -1 since we don't store the return value
1043 match self.stack[frame].locals[local.index() - 1] {
1044 Value::ByRef(ptr) => {
1045 assert!(field.is_none());
1046 Lvalue::from_ptr(ptr)
1049 let ty = self.stack[frame].mir.local_decls[local].ty;
1050 let ty = self.monomorphize(ty, self.stack[frame].instance.substs);
1051 let substs = self.stack[frame].instance.substs;
1052 let ptr = self.alloc_ptr_with_substs(ty, substs)?;
1053 self.stack[frame].locals[local.index() - 1] = Value::ByRef(ptr);
1054 self.write_value_to_ptr(val, ptr, ty)?;
1055 let lval = Lvalue::from_ptr(ptr);
1056 if let Some((field, field_ty)) = field {
1057 self.lvalue_field(lval, field, ty, field_ty)?
1064 Lvalue::Ptr { .. } => lvalue,
1065 Lvalue::Global(cid) => {
1066 let global_val = *self.globals.get(&cid).expect("global not cached");
1067 match global_val.value {
1068 Value::ByRef(ptr) => Lvalue::from_ptr(ptr),
1070 let ptr = self.alloc_ptr_with_substs(global_val.ty, cid.instance.substs)?;
1071 self.memory.mark_static(ptr.alloc_id);
1072 self.write_value_to_ptr(global_val.value, ptr, global_val.ty)?;
1073 // see comment on `initialized` field
1074 if global_val.initialized {
1075 self.memory.mark_static_initalized(ptr.alloc_id, global_val.mutable)?;
1077 let lval = self.globals.get_mut(&cid).expect("already checked");
1079 value: Value::ByRef(ptr),
1082 Lvalue::from_ptr(ptr)
1090 /// ensures this Value is not a ByRef
1091 pub(super) fn follow_by_ref_value(&mut self, value: Value, ty: Ty<'tcx>) -> EvalResult<'tcx, Value> {
1093 Value::ByRef(ptr) => self.read_value(ptr, ty),
1098 pub(super) fn value_to_primval(&mut self, value: Value, ty: Ty<'tcx>) -> EvalResult<'tcx, PrimVal> {
1099 match self.follow_by_ref_value(value, ty)? {
1100 Value::ByRef(_) => bug!("follow_by_ref_value can't result in `ByRef`"),
1102 Value::ByVal(primval) => {
1103 self.ensure_valid_value(primval, ty)?;
1107 Value::ByValPair(..) => bug!("value_to_primval can't work with fat pointers"),
1111 pub(super) fn write_primval(
1116 ) -> EvalResult<'tcx> {
1117 self.write_value(Value::ByVal(val), dest, dest_ty)
1120 pub(super) fn write_value(
1125 ) -> EvalResult<'tcx> {
1127 Lvalue::Global(cid) => {
1128 let dest = *self.globals.get_mut(&cid).expect("global should be cached");
1130 return Err(EvalError::ModifiedConstantMemory);
1132 let write_dest = |this: &mut Self, val| {
1133 *this.globals.get_mut(&cid).expect("already checked") = Global {
1138 self.write_value_possibly_by_val(src_val, write_dest, dest.value, dest_ty)
1141 Lvalue::Ptr { ptr, extra } => {
1142 assert_eq!(extra, LvalueExtra::None);
1143 self.write_value_to_ptr(src_val, ptr, dest_ty)
1146 Lvalue::Local { frame, local, field } => {
1147 let dest = self.stack[frame].get_local(local, field.map(|(i, _)| i));
1148 self.write_value_possibly_by_val(
1150 |this, val| this.stack[frame].set_local(local, field.map(|(i, _)| i), val),
1158 // The cases here can be a bit subtle. Read carefully!
1159 fn write_value_possibly_by_val<F: FnOnce(&mut Self, Value)>(
1163 old_dest_val: Value,
1165 ) -> EvalResult<'tcx> {
1166 if let Value::ByRef(dest_ptr) = old_dest_val {
1167 // If the value is already `ByRef` (that is, backed by an `Allocation`),
1168 // then we must write the new value into this allocation, because there may be
1169 // other pointers into the allocation. These other pointers are logically
1170 // pointers into the local variable, and must be able to observe the change.
1172 // Thus, it would be an error to replace the `ByRef` with a `ByVal`, unless we
1173 // knew for certain that there were no outstanding pointers to this allocation.
1174 self.write_value_to_ptr(src_val, dest_ptr, dest_ty)?;
1176 } else if let Value::ByRef(src_ptr) = src_val {
1177 // If the value is not `ByRef`, then we know there are no pointers to it
1178 // and we can simply overwrite the `Value` in the locals array directly.
1180 // In this specific case, where the source value is `ByRef`, we must duplicate
1181 // the allocation, because this is a by-value operation. It would be incorrect
1182 // if they referred to the same allocation, since then a change to one would
1183 // implicitly change the other.
1185 // It is a valid optimization to attempt reading a primitive value out of the
1186 // source and write that into the destination without making an allocation, so
1188 if let Ok(Some(src_val)) = self.try_read_value(src_ptr, dest_ty) {
1189 write_dest(self, src_val);
1191 let dest_ptr = self.alloc_ptr(dest_ty)?;
1192 self.copy(src_ptr, dest_ptr, dest_ty)?;
1193 write_dest(self, Value::ByRef(dest_ptr));
1197 // Finally, we have the simple case where neither source nor destination are
1198 // `ByRef`. We may simply copy the source value over the the destintion.
1199 write_dest(self, src_val);
1204 pub(super) fn write_value_to_ptr(
1209 ) -> EvalResult<'tcx> {
1211 Value::ByRef(ptr) => self.copy(ptr, dest, dest_ty),
1212 Value::ByVal(primval) => {
1213 let size = self.type_size(dest_ty)?.expect("dest type must be sized");
1214 self.memory.write_primval(dest, primval, size)
1216 Value::ByValPair(a, b) => self.write_pair_to_ptr(a, b, dest, dest_ty),
1220 pub(super) fn write_pair_to_ptr(
1226 ) -> EvalResult<'tcx> {
1227 while self.get_field_count(ty)? == 1 {
1228 ty = self.get_field_ty(ty, 0)?;
1230 assert_eq!(self.get_field_count(ty)?, 2);
1231 let field_0 = self.get_field_offset(ty, 0)?.bytes();
1232 let field_1 = self.get_field_offset(ty, 1)?.bytes();
1233 let field_0_ty = self.get_field_ty(ty, 0)?;
1234 let field_1_ty = self.get_field_ty(ty, 1)?;
1235 let field_0_size = self.type_size(field_0_ty)?.expect("pair element type must be sized");
1236 let field_1_size = self.type_size(field_1_ty)?.expect("pair element type must be sized");
1237 self.memory.write_primval(ptr.offset(field_0), a, field_0_size)?;
1238 self.memory.write_primval(ptr.offset(field_1), b, field_1_size)?;
1242 pub fn ty_to_primval_kind(&self, ty: Ty<'tcx>) -> EvalResult<'tcx, PrimValKind> {
1243 use syntax::ast::FloatTy;
1245 let kind = match ty.sty {
1246 ty::TyBool => PrimValKind::Bool,
1247 ty::TyChar => PrimValKind::Char,
1249 ty::TyInt(int_ty) => {
1250 use syntax::ast::IntTy::*;
1251 let size = match int_ty {
1257 Is => self.memory.pointer_size(),
1259 PrimValKind::from_int_size(size)
1262 ty::TyUint(uint_ty) => {
1263 use syntax::ast::UintTy::*;
1264 let size = match uint_ty {
1270 Us => self.memory.pointer_size(),
1272 PrimValKind::from_uint_size(size)
1275 ty::TyFloat(FloatTy::F32) => PrimValKind::F32,
1276 ty::TyFloat(FloatTy::F64) => PrimValKind::F64,
1278 ty::TyFnPtr(_) => PrimValKind::FnPtr,
1280 ty::TyRef(_, ref tam) |
1281 ty::TyRawPtr(ref tam) if self.type_is_sized(tam.ty) => PrimValKind::Ptr,
1283 ty::TyAdt(def, _) if def.is_box() => PrimValKind::Ptr,
1285 ty::TyAdt(def, substs) => {
1286 use rustc::ty::layout::Layout::*;
1287 match *self.type_layout(ty)? {
1288 CEnum { discr, signed, .. } => {
1289 let size = discr.size().bytes();
1291 PrimValKind::from_int_size(size)
1293 PrimValKind::from_uint_size(size)
1297 RawNullablePointer { value, .. } => {
1298 use rustc::ty::layout::Primitive::*;
1300 // TODO(solson): Does signedness matter here? What should the sign be?
1301 Int(int) => PrimValKind::from_uint_size(int.size().bytes()),
1302 F32 => PrimValKind::F32,
1303 F64 => PrimValKind::F64,
1304 Pointer => PrimValKind::Ptr,
1308 // represent single field structs as their single field
1309 Univariant { .. } => {
1310 // enums with just one variant are no different, but `.struct_variant()` doesn't work for enums
1311 let variant = &def.variants[0];
1312 // FIXME: also allow structs with only a single non zst field
1313 if variant.fields.len() == 1 {
1314 return self.ty_to_primval_kind(variant.fields[0].ty(self.tcx, substs));
1316 return Err(EvalError::TypeNotPrimitive(ty));
1320 _ => return Err(EvalError::TypeNotPrimitive(ty)),
1324 _ => return Err(EvalError::TypeNotPrimitive(ty)),
1330 fn ensure_valid_value(&self, val: PrimVal, ty: Ty<'tcx>) -> EvalResult<'tcx> {
1332 ty::TyBool if val.to_bytes()? > 1 => Err(EvalError::InvalidBool),
1334 ty::TyChar if ::std::char::from_u32(val.to_bytes()? as u32).is_none()
1335 => Err(EvalError::InvalidChar(val.to_bytes()? as u32 as u128)),
1341 pub(super) fn read_value(&mut self, ptr: Pointer, ty: Ty<'tcx>) -> EvalResult<'tcx, Value> {
1342 if let Some(val) = self.try_read_value(ptr, ty)? {
1345 bug!("primitive read failed for type: {:?}", ty);
1349 fn read_ptr(&mut self, ptr: Pointer, pointee_ty: Ty<'tcx>) -> EvalResult<'tcx, Value> {
1350 let p = self.memory.read_ptr(ptr)?;
1351 if self.type_is_sized(pointee_ty) {
1352 Ok(Value::ByVal(PrimVal::Ptr(p)))
1354 trace!("reading fat pointer extra of type {}", pointee_ty);
1355 let extra = ptr.offset(self.memory.pointer_size());
1356 let extra = match self.tcx.struct_tail(pointee_ty).sty {
1357 ty::TyDynamic(..) => PrimVal::Ptr(self.memory.read_ptr(extra)?),
1359 ty::TyStr => PrimVal::from_u128(self.memory.read_usize(extra)? as u128),
1360 _ => bug!("unsized primval ptr read from {:?}", pointee_ty),
1362 Ok(Value::ByValPair(PrimVal::Ptr(p), extra))
1366 fn try_read_value(&mut self, ptr: Pointer, ty: Ty<'tcx>) -> EvalResult<'tcx, Option<Value>> {
1367 use syntax::ast::FloatTy;
1369 let val = match ty.sty {
1370 ty::TyBool => PrimVal::from_bool(self.memory.read_bool(ptr)?),
1372 let c = self.memory.read_uint(ptr, 4)? as u32;
1373 match ::std::char::from_u32(c) {
1374 Some(ch) => PrimVal::from_char(ch),
1375 None => return Err(EvalError::InvalidChar(c as u128)),
1379 ty::TyInt(int_ty) => {
1380 use syntax::ast::IntTy::*;
1381 let size = match int_ty {
1387 Is => self.memory.pointer_size(),
1389 PrimVal::from_i128(self.memory.read_int(ptr, size)?)
1392 ty::TyUint(uint_ty) => {
1393 use syntax::ast::UintTy::*;
1394 let size = match uint_ty {
1400 Us => self.memory.pointer_size(),
1402 PrimVal::from_u128(self.memory.read_uint(ptr, size)?)
1405 ty::TyFloat(FloatTy::F32) => PrimVal::from_f32(self.memory.read_f32(ptr)?),
1406 ty::TyFloat(FloatTy::F64) => PrimVal::from_f64(self.memory.read_f64(ptr)?),
1408 ty::TyFnPtr(_) => self.memory.read_ptr(ptr).map(PrimVal::Ptr)?,
1409 ty::TyRef(_, ref tam) |
1410 ty::TyRawPtr(ref tam) => return self.read_ptr(ptr, tam.ty).map(Some),
1412 ty::TyAdt(def, _) => {
1414 return self.read_ptr(ptr, ty.boxed_ty()).map(Some);
1416 use rustc::ty::layout::Layout::*;
1417 if let CEnum { discr, signed, .. } = *self.type_layout(ty)? {
1418 let size = discr.size().bytes();
1420 PrimVal::from_i128(self.memory.read_int(ptr, size)?)
1422 PrimVal::from_u128(self.memory.read_uint(ptr, size)?)
1429 _ => return Ok(None),
1432 Ok(Some(Value::ByVal(val)))
1435 pub(super) fn frame(&self) -> &Frame<'tcx> {
1436 self.stack.last().expect("no call frames exist")
1439 pub(super) fn frame_mut(&mut self) -> &mut Frame<'tcx> {
1440 self.stack.last_mut().expect("no call frames exist")
1443 pub(super) fn mir(&self) -> &'tcx mir::Mir<'tcx> {
1447 pub(super) fn substs(&self) -> &'tcx Substs<'tcx> {
1448 self.frame().instance.substs
1459 ) -> EvalResult<'tcx> {
1460 // A<Struct> -> A<Trait> conversion
1461 let (src_pointee_ty, dest_pointee_ty) = self.tcx.struct_lockstep_tails(sty, dty);
1463 match (&src_pointee_ty.sty, &dest_pointee_ty.sty) {
1464 (&ty::TyArray(_, length), &ty::TySlice(_)) => {
1465 let ptr = src.read_ptr(&self.memory)?;
1466 let len = PrimVal::from_u128(length as u128);
1467 let ptr = PrimVal::Ptr(ptr);
1468 self.write_value(Value::ByValPair(ptr, len), dest, dest_ty)
1470 (&ty::TyDynamic(..), &ty::TyDynamic(..)) => {
1471 // For now, upcasts are limited to changes in marker
1472 // traits, and hence never actually require an actual
1473 // change to the vtable.
1474 self.write_value(src, dest, dest_ty)
1476 (_, &ty::TyDynamic(ref data, _)) => {
1477 let trait_ref = data.principal().unwrap().with_self_ty(self.tcx, src_pointee_ty);
1478 let trait_ref = self.tcx.erase_regions(&trait_ref);
1479 let vtable = self.get_vtable(src_pointee_ty, trait_ref)?;
1480 let ptr = src.read_ptr(&self.memory)?;
1481 let ptr = PrimVal::Ptr(ptr);
1482 let extra = PrimVal::Ptr(vtable);
1483 self.write_value(Value::ByValPair(ptr, extra), dest, dest_ty)
1486 _ => bug!("invalid unsizing {:?} -> {:?}", src_ty, dest_ty),
1496 ) -> EvalResult<'tcx> {
1497 match (&src_ty.sty, &dest_ty.sty) {
1498 (&ty::TyRef(_, ref s), &ty::TyRef(_, ref d)) |
1499 (&ty::TyRef(_, ref s), &ty::TyRawPtr(ref d)) |
1500 (&ty::TyRawPtr(ref s), &ty::TyRawPtr(ref d)) => self.unsize_into_ptr(src, src_ty, dest, dest_ty, s.ty, d.ty),
1501 (&ty::TyAdt(def_a, substs_a), &ty::TyAdt(def_b, substs_b)) => {
1502 if def_a.is_box() || def_b.is_box() {
1503 if !def_a.is_box() || !def_b.is_box() {
1504 panic!("invalid unsizing between {:?} -> {:?}", src_ty, dest_ty);
1506 return self.unsize_into_ptr(src, src_ty, dest, dest_ty, src_ty.boxed_ty(), dest_ty.boxed_ty());
1508 if self.ty_to_primval_kind(src_ty).is_ok() {
1509 let sty = self.get_field_ty(src_ty, 0)?;
1510 let dty = self.get_field_ty(dest_ty, 0)?;
1511 return self.unsize_into(src, sty, dest, dty);
1513 // unsizing of generic struct with pointer fields
1514 // Example: `Arc<T>` -> `Arc<Trait>`
1515 // here we need to increase the size of every &T thin ptr field to a fat ptr
1517 assert_eq!(def_a, def_b);
1519 let src_fields = def_a.variants[0].fields.iter();
1520 let dst_fields = def_b.variants[0].fields.iter();
1522 //let src = adt::MaybeSizedValue::sized(src);
1523 //let dst = adt::MaybeSizedValue::sized(dst);
1524 let src_ptr = match src {
1525 Value::ByRef(ptr) => ptr,
1526 _ => bug!("expected pointer, got {:?}", src),
1530 let dest = self.force_allocation(dest)?.to_ptr();
1531 let iter = src_fields.zip(dst_fields).enumerate();
1532 for (i, (src_f, dst_f)) in iter {
1533 let src_fty = monomorphize_field_ty(self.tcx, src_f, substs_a);
1534 let dst_fty = monomorphize_field_ty(self.tcx, dst_f, substs_b);
1535 if self.type_size(dst_fty)? == Some(0) {
1538 let src_field_offset = self.get_field_offset(src_ty, i)?.bytes();
1539 let dst_field_offset = self.get_field_offset(dest_ty, i)?.bytes();
1540 let src_f_ptr = src_ptr.offset(src_field_offset);
1541 let dst_f_ptr = dest.offset(dst_field_offset);
1542 if src_fty == dst_fty {
1543 self.copy(src_f_ptr, dst_f_ptr, src_fty)?;
1545 self.unsize_into(Value::ByRef(src_f_ptr), src_fty, Lvalue::from_ptr(dst_f_ptr), dst_fty)?;
1550 _ => bug!("unsize_into: invalid conversion: {:?} -> {:?}", src_ty, dest_ty),
1554 pub(super) fn dump_local(&self, lvalue: Lvalue<'tcx>) {
1555 if let Lvalue::Local { frame, local, field } = lvalue {
1556 let mut allocs = Vec::new();
1557 let mut msg = format!("{:?}", local);
1558 if let Some((field, _)) = field {
1559 write!(msg, ".{}", field).unwrap();
1561 let last_frame = self.stack.len() - 1;
1562 if frame != last_frame {
1563 write!(msg, " ({} frames up)", last_frame - frame).unwrap();
1565 write!(msg, ":").unwrap();
1567 match self.stack[frame].get_local(local, field.map(|(i, _)| i)) {
1568 Value::ByRef(ptr) => {
1569 allocs.push(ptr.alloc_id);
1571 Value::ByVal(val) => {
1572 write!(msg, " {:?}", val).unwrap();
1573 if let PrimVal::Ptr(ptr) = val { allocs.push(ptr.alloc_id); }
1575 Value::ByValPair(val1, val2) => {
1576 write!(msg, " ({:?}, {:?})", val1, val2).unwrap();
1577 if let PrimVal::Ptr(ptr) = val1 { allocs.push(ptr.alloc_id); }
1578 if let PrimVal::Ptr(ptr) = val2 { allocs.push(ptr.alloc_id); }
1583 self.memory.dump_allocs(allocs);
1587 /// Convenience function to ensure correct usage of globals and code-sharing with locals.
1588 pub fn modify_global<F>(&mut self, cid: GlobalId<'tcx>, f: F) -> EvalResult<'tcx>
1589 where F: FnOnce(&mut Self, Value) -> EvalResult<'tcx, Value>,
1591 let mut val = *self.globals.get(&cid).expect("global not cached");
1593 return Err(EvalError::ModifiedConstantMemory);
1595 val.value = f(self, val.value)?;
1596 *self.globals.get_mut(&cid).expect("already checked") = val;
1600 /// Convenience function to ensure correct usage of locals and code-sharing with globals.
1601 pub fn modify_local<F>(
1605 field: Option<usize>,
1607 ) -> EvalResult<'tcx>
1608 where F: FnOnce(&mut Self, Value) -> EvalResult<'tcx, Value>,
1610 let val = self.stack[frame].get_local(local, field);
1611 let new_val = f(self, val)?;
1612 self.stack[frame].set_local(local, field, new_val);
1613 // FIXME(solson): Run this when setting to Undef? (See previous version of this code.)
1614 // if let Value::ByRef(ptr) = self.stack[frame].get_local(local) {
1615 // self.memory.deallocate(ptr)?;
1621 impl<'tcx> Frame<'tcx> {
1622 pub fn get_local(&self, local: mir::Local, field: Option<usize>) -> Value {
1623 // Subtract 1 because we don't store a value for the ReturnPointer, the local with index 0.
1624 if let Some(field) = field {
1625 match self.locals[local.index() - 1] {
1626 Value::ByRef(_) => bug!("can't have lvalue fields for ByRef"),
1627 val @ Value::ByVal(_) => {
1628 assert_eq!(field, 0);
1631 Value::ByValPair(a, b) => {
1633 0 => Value::ByVal(a),
1634 1 => Value::ByVal(b),
1635 _ => bug!("ByValPair has only two fields, tried to access {}", field),
1640 self.locals[local.index() - 1]
1644 fn set_local(&mut self, local: mir::Local, field: Option<usize>, value: Value) {
1645 // Subtract 1 because we don't store a value for the ReturnPointer, the local with index 0.
1646 if let Some(field) = field {
1647 match self.locals[local.index() - 1] {
1648 Value::ByRef(_) => bug!("can't have lvalue fields for ByRef"),
1649 Value::ByVal(_) => {
1650 assert_eq!(field, 0);
1651 self.set_local(local, None, value);
1653 Value::ByValPair(a, b) => {
1654 let prim = match value {
1655 Value::ByRef(_) => bug!("can't set ValPair field to ByRef"),
1656 Value::ByVal(val) => val,
1657 Value::ByValPair(_, _) => bug!("can't set ValPair field to ValPair"),
1660 0 => self.set_local(local, None, Value::ByValPair(prim, b)),
1661 1 => self.set_local(local, None, Value::ByValPair(a, prim)),
1662 _ => bug!("ByValPair has only two fields, tried to access {}", field),
1667 self.locals[local.index() - 1] = value;
1672 pub fn eval_main<'a, 'tcx: 'a>(
1673 tcx: TyCtxt<'a, 'tcx, 'tcx>,
1675 limits: ResourceLimits,
1677 let mut ecx = EvalContext::new(tcx, limits);
1678 let instance = ty::Instance::mono(tcx, def_id);
1679 let mir = ecx.load_mir(instance.def).expect("main function's MIR not found");
1681 if !mir.return_ty.is_nil() || mir.arg_count != 0 {
1682 let msg = "miri does not support main functions without `fn()` type signatures";
1683 tcx.sess.err(&EvalError::Unimplemented(String::from(msg)).to_string());
1687 ecx.push_stack_frame(
1691 Lvalue::from_ptr(Pointer::zst_ptr()),
1692 StackPopCleanup::None,
1693 ).expect("could not allocate first stack frame");
1699 let leaks = ecx.memory.leak_report();
1701 tcx.sess.err("the evaluated program leaked memory");
1706 report(tcx, &ecx, e);
1713 fn report(tcx: TyCtxt, ecx: &EvalContext, e: EvalError) {
1714 let frame = ecx.stack().last().expect("stackframe was empty");
1715 let block = &frame.mir.basic_blocks()[frame.block];
1716 let span = if frame.stmt < block.statements.len() {
1717 block.statements[frame.stmt].source_info.span
1719 block.terminator().source_info.span
1721 let mut err = tcx.sess.struct_span_err(span, &e.to_string());
1722 for &Frame { instance, span, .. } in ecx.stack().iter().rev() {
1723 if tcx.def_key(instance.def_id()).disambiguated_data.data == DefPathData::ClosureExpr {
1724 err.span_note(span, "inside call to closure");
1727 err.span_note(span, &format!("inside call to {}", instance));
1732 // TODO(solson): Upstream these methods into rustc::ty::layout.
1734 pub(super) trait IntegerExt {
1735 fn size(self) -> Size;
1738 impl IntegerExt for layout::Integer {
1739 fn size(self) -> Size {
1740 use rustc::ty::layout::Integer::*;
1742 I1 | I8 => Size::from_bits(8),
1743 I16 => Size::from_bits(16),
1744 I32 => Size::from_bits(32),
1745 I64 => Size::from_bits(64),
1746 I128 => Size::from_bits(128),
1752 pub fn monomorphize_field_ty<'a, 'tcx:'a >(tcx: TyCtxt<'a, 'tcx, 'tcx>, f: &ty::FieldDef, substs: &'tcx Substs<'tcx>) -> Ty<'tcx> {
1753 let substituted = f.ty(tcx, substs);
1754 tcx.normalize_associated_type(&substituted)
1757 pub fn is_inhabited<'a, 'tcx: 'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>, ty: Ty<'tcx>) -> bool {
1758 ty.uninhabited_from(&mut HashMap::default(), tcx).is_empty()
1761 pub trait IntoValTyPair<'tcx> {
1762 fn into_val_ty_pair<'a>(self, ecx: &mut EvalContext<'a, 'tcx>) -> EvalResult<'tcx, (Value, Ty<'tcx>)> where 'tcx: 'a;
1765 impl<'tcx> IntoValTyPair<'tcx> for (Value, Ty<'tcx>) {
1766 fn into_val_ty_pair<'a>(self, _: &mut EvalContext<'a, 'tcx>) -> EvalResult<'tcx, (Value, Ty<'tcx>)> where 'tcx: 'a {
1771 impl<'b, 'tcx: 'b> IntoValTyPair<'tcx> for &'b mir::Operand<'tcx> {
1772 fn into_val_ty_pair<'a>(self, ecx: &mut EvalContext<'a, 'tcx>) -> EvalResult<'tcx, (Value, Ty<'tcx>)> where 'tcx: 'a {
1773 let value = ecx.eval_operand(self)?;
1774 let value_ty = ecx.operand_ty(self);
1775 Ok((value, value_ty))
1780 /// FIXME: expose trans::monomorphize::resolve_closure
1781 pub fn resolve_closure<'a, 'tcx> (
1782 tcx: TyCtxt<'a, 'tcx, 'tcx>,
1784 substs: ty::ClosureSubsts<'tcx>,
1785 requested_kind: ty::ClosureKind,
1786 ) -> ty::Instance<'tcx> {
1787 let actual_kind = tcx.closure_kind(def_id);
1788 match needs_fn_once_adapter_shim(actual_kind, requested_kind) {
1789 Ok(true) => fn_once_adapter_instance(tcx, def_id, substs),
1790 _ => ty::Instance::new(def_id, substs.substs)
1794 fn fn_once_adapter_instance<'a, 'tcx>(
1795 tcx: TyCtxt<'a, 'tcx, 'tcx>,
1797 substs: ty::ClosureSubsts<'tcx>,
1798 ) -> ty::Instance<'tcx> {
1799 debug!("fn_once_adapter_shim({:?}, {:?})",
1802 let fn_once = tcx.lang_items.fn_once_trait().unwrap();
1803 let call_once = tcx.associated_items(fn_once)
1804 .find(|it| it.kind == ty::AssociatedKind::Method)
1806 let def = ty::InstanceDef::ClosureOnceShim { call_once };
1808 let self_ty = tcx.mk_closure_from_closure_substs(
1809 closure_did, substs);
1811 let sig = tcx.closure_type(closure_did).subst(tcx, substs.substs);
1812 let sig = tcx.erase_late_bound_regions_and_normalize(&sig);
1813 assert_eq!(sig.inputs().len(), 1);
1814 let substs = tcx.mk_substs([
1815 Kind::from(self_ty),
1816 Kind::from(sig.inputs()[0]),
1819 debug!("fn_once_adapter_shim: self_ty={:?} sig={:?}", self_ty, sig);
1820 ty::Instance { def, substs }
1823 fn needs_fn_once_adapter_shim(actual_closure_kind: ty::ClosureKind,
1824 trait_closure_kind: ty::ClosureKind)
1827 match (actual_closure_kind, trait_closure_kind) {
1828 (ty::ClosureKind::Fn, ty::ClosureKind::Fn) |
1829 (ty::ClosureKind::FnMut, ty::ClosureKind::FnMut) |
1830 (ty::ClosureKind::FnOnce, ty::ClosureKind::FnOnce) => {
1831 // No adapter needed.
1834 (ty::ClosureKind::Fn, ty::ClosureKind::FnMut) => {
1835 // The closure fn `llfn` is a `fn(&self, ...)`. We want a
1836 // `fn(&mut self, ...)`. In fact, at trans time, these are
1837 // basically the same thing, so we can just return llfn.
1840 (ty::ClosureKind::Fn, ty::ClosureKind::FnOnce) |
1841 (ty::ClosureKind::FnMut, ty::ClosureKind::FnOnce) => {
1842 // The closure fn `llfn` is a `fn(&self, ...)` or `fn(&mut
1843 // self, ...)`. We want a `fn(self, ...)`. We can produce
1844 // this by doing something like:
1846 // fn call_once(self, ...) { call_mut(&self, ...) }
1847 // fn call_once(mut self, ...) { call_mut(&mut self, ...) }
1849 // These are both the same at trans time.
1856 /// The point where linking happens. Resolve a (def_id, substs)
1857 /// pair to an instance.
1858 pub fn resolve<'a, 'tcx>(
1859 tcx: TyCtxt<'a, 'tcx, 'tcx>,
1861 substs: &'tcx Substs<'tcx>
1862 ) -> ty::Instance<'tcx> {
1863 debug!("resolve(def_id={:?}, substs={:?})",
1865 let result = if let Some(trait_def_id) = tcx.trait_of_item(def_id) {
1866 debug!(" => associated item, attempting to find impl");
1867 let item = tcx.associated_item(def_id);
1868 resolve_associated_item(tcx, &item, trait_def_id, substs)
1870 let item_type = def_ty(tcx, def_id, substs);
1871 let def = match item_type.sty {
1872 ty::TyFnDef(_, _, f) if
1873 f.abi() == Abi::RustIntrinsic ||
1874 f.abi() == Abi::PlatformIntrinsic =>
1876 debug!(" => intrinsic");
1877 ty::InstanceDef::Intrinsic(def_id)
1880 if Some(def_id) == tcx.lang_items.drop_in_place_fn() {
1881 let ty = substs.type_at(0);
1882 if needs_drop_glue(tcx, ty) {
1883 debug!(" => nontrivial drop glue");
1884 ty::InstanceDef::DropGlue(def_id, Some(ty))
1886 debug!(" => trivial drop glue");
1887 ty::InstanceDef::DropGlue(def_id, None)
1890 debug!(" => free item");
1891 ty::InstanceDef::Item(def_id)
1895 ty::Instance { def, substs }
1897 debug!("resolve(def_id={:?}, substs={:?}) = {}",
1898 def_id, substs, result);
1902 pub fn needs_drop_glue<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, t: Ty<'tcx>) -> bool {
1903 assert!(t.is_normalized_for_trans());
1905 let t = tcx.erase_regions(&t);
1907 // FIXME (#22815): note that type_needs_drop conservatively
1908 // approximates in some cases and may say a type expression
1909 // requires drop glue when it actually does not.
1911 // (In this case it is not clear whether any harm is done, i.e.
1912 // erroneously returning `true` in some cases where we could have
1913 // returned `false` does not appear unsound. The impact on
1914 // code quality is unknown at this time.)
1916 let env = tcx.empty_parameter_environment();
1917 if !t.needs_drop(tcx, &env) {
1921 ty::TyAdt(def, _) if def.is_box() => {
1922 let typ = t.boxed_ty();
1923 if !typ.needs_drop(tcx, &env) && type_is_sized(tcx, typ) {
1924 tcx.infer_ctxt((), traits::Reveal::All).enter(|infcx| {
1925 let layout = t.layout(&infcx).unwrap();
1926 // `Box<ZeroSizeType>` does not allocate.
1927 layout.size(&tcx.data_layout).bytes() != 0
1937 fn resolve_associated_item<'a, 'tcx>(
1938 tcx: TyCtxt<'a, 'tcx, 'tcx>,
1939 trait_item: &ty::AssociatedItem,
1941 rcvr_substs: &'tcx Substs<'tcx>
1942 ) -> ty::Instance<'tcx> {
1943 let def_id = trait_item.def_id;
1944 debug!("resolve_associated_item(trait_item={:?}, \
1947 def_id, trait_id, rcvr_substs);
1949 let trait_ref = ty::TraitRef::from_method(tcx, trait_id, rcvr_substs);
1950 let vtbl = fulfill_obligation(tcx, DUMMY_SP, ty::Binder(trait_ref));
1952 // Now that we know which impl is being used, we can dispatch to
1953 // the actual function:
1955 ::rustc::traits::VtableImpl(impl_data) => {
1956 let (def_id, substs) = ::rustc::traits::find_associated_item(
1957 tcx, trait_item, rcvr_substs, &impl_data);
1958 let substs = tcx.erase_regions(&substs);
1959 ty::Instance::new(def_id, substs)
1961 ::rustc::traits::VtableClosure(closure_data) => {
1962 let trait_closure_kind = tcx.lang_items.fn_trait_kind(trait_id).unwrap();
1963 resolve_closure(tcx, closure_data.closure_def_id, closure_data.substs,
1966 ::rustc::traits::VtableFnPointer(ref data) => {
1968 def: ty::InstanceDef::FnPtrShim(trait_item.def_id, data.fn_ty),
1972 ::rustc::traits::VtableObject(ref data) => {
1973 let index = tcx.get_vtable_index_of_object_method(data, def_id);
1975 def: ty::InstanceDef::Virtual(def_id, index),
1980 bug!("static call to invalid vtable: {:?}", vtbl)
1985 pub fn def_ty<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
1987 substs: &'tcx Substs<'tcx>)
1990 let ty = tcx.type_of(def_id);
1991 apply_param_substs(tcx, substs, &ty)
1994 /// Monomorphizes a type from the AST by first applying the in-scope
1995 /// substitutions and then normalizing any associated types.
1996 pub fn apply_param_substs<'a, 'tcx, T>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
1997 param_substs: &Substs<'tcx>,
2000 where T: ::rustc::infer::TransNormalize<'tcx>
2002 debug!("apply_param_substs(param_substs={:?}, value={:?})", param_substs, value);
2003 let substituted = value.subst(tcx, param_substs);
2004 let substituted = tcx.erase_regions(&substituted);
2005 AssociatedTypeNormalizer{ tcx }.fold(&substituted)
2009 struct AssociatedTypeNormalizer<'a, 'tcx: 'a> {
2010 tcx: TyCtxt<'a, 'tcx, 'tcx>,
2013 impl<'a, 'tcx> AssociatedTypeNormalizer<'a, 'tcx> {
2014 fn fold<T: TypeFoldable<'tcx>>(&mut self, value: &T) -> T {
2015 if !value.has_projection_types() {
2018 value.fold_with(self)
2023 impl<'a, 'tcx> ::rustc::ty::fold::TypeFolder<'tcx, 'tcx> for AssociatedTypeNormalizer<'a, 'tcx> {
2024 fn tcx<'c>(&'c self) -> TyCtxt<'c, 'tcx, 'tcx> {
2028 fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> {
2029 if !ty.has_projection_types() {
2032 self.tcx.normalize_associated_type(&ty)
2037 fn type_is_sized<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, ty: Ty<'tcx>) -> bool {
2038 // generics are weird, don't run this function on a generic
2039 assert!(!ty.needs_subst());
2040 ty.is_sized(tcx, &tcx.empty_parameter_environment(), DUMMY_SP)
2043 /// Attempts to resolve an obligation. The result is a shallow vtable resolution -- meaning that we
2044 /// do not (necessarily) resolve all nested obligations on the impl. Note that type check should
2045 /// guarantee to us that all nested obligations *could be* resolved if we wanted to.
2046 fn fulfill_obligation<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
2048 trait_ref: ty::PolyTraitRef<'tcx>)
2049 -> traits::Vtable<'tcx, ()>
2051 // Remove any references to regions; this helps improve caching.
2052 let trait_ref = tcx.erase_regions(&trait_ref);
2054 debug!("trans::fulfill_obligation(trait_ref={:?}, def_id={:?})",
2055 trait_ref, trait_ref.def_id());
2057 // Do the initial selection for the obligation. This yields the
2058 // shallow result we are looking for -- that is, what specific impl.
2059 tcx.infer_ctxt((), Reveal::All).enter(|infcx| {
2060 let mut selcx = traits::SelectionContext::new(&infcx);
2062 let obligation_cause = traits::ObligationCause::misc(span,
2063 ast::DUMMY_NODE_ID);
2064 let obligation = traits::Obligation::new(obligation_cause,
2065 trait_ref.to_poly_trait_predicate());
2067 let selection = match selcx.select(&obligation) {
2068 Ok(Some(selection)) => selection,
2070 // Ambiguity can happen when monomorphizing during trans
2071 // expands to some humongo type that never occurred
2072 // statically -- this humongo type can then overflow,
2073 // leading to an ambiguous result. So report this as an
2074 // overflow bug, since I believe this is the only case
2075 // where ambiguity can result.
2076 debug!("Encountered ambiguity selecting `{:?}` during trans, \
2077 presuming due to overflow",
2079 tcx.sess.span_fatal(span,
2080 "reached the recursion limit during monomorphization \
2081 (selection ambiguity)");
2084 span_bug!(span, "Encountered error `{:?}` selecting `{:?}` during trans",
2089 debug!("fulfill_obligation: selection={:?}", selection);
2091 // Currently, we use a fulfillment context to completely resolve
2092 // all nested obligations. This is because they can inform the
2093 // inference of the impl's type parameters.
2094 let mut fulfill_cx = traits::FulfillmentContext::new();
2095 let vtable = selection.map(|predicate| {
2096 debug!("fulfill_obligation: register_predicate_obligation {:?}", predicate);
2097 fulfill_cx.register_predicate_obligation(&infcx, predicate);
2099 let vtable = infcx.drain_fulfillment_cx_or_panic(span, &mut fulfill_cx, &vtable);
2101 debug!("Cache miss: {:?} => {:?}", trait_ref, vtable);
2106 pub fn resolve_drop_in_place<'a, 'tcx>(
2107 tcx: TyCtxt<'a, 'tcx, 'tcx>,
2109 ) -> ty::Instance<'tcx>
2111 let def_id = tcx.require_lang_item(::rustc::middle::lang_items::DropInPlaceFnLangItem);
2112 let substs = tcx.intern_substs(&[Kind::from(ty)]);
2113 resolve(tcx, def_id, substs)