2 use std::hash::{Hash, Hasher};
5 use rustc::hir::def_id::DefId;
6 use rustc::hir::def::Def;
7 use rustc::hir::map::definitions::DefPathData;
9 use rustc::ty::layout::{
10 self, Size, Align, HasDataLayout, LayoutOf, TyLayout, Primitive
12 use rustc::ty::subst::{Subst, Substs};
13 use rustc::ty::{self, Ty, TyCtxt, TypeFoldable};
14 use rustc::ty::query::TyCtxtAt;
15 use rustc_data_structures::fx::{FxHashSet, FxHasher};
16 use rustc_data_structures::indexed_vec::IndexVec;
17 use rustc::mir::interpret::{
18 GlobalId, Scalar, FrameInfo, AllocType,
19 EvalResult, EvalErrorKind,
21 truncate, sign_extend,
24 use syntax::source_map::{self, Span};
25 use syntax::ast::Mutability;
28 Value, Operand, MemPlace, MPlaceTy, Place, PlaceExtra,
32 macro_rules! validation_failure{
33 ($what:expr, $where:expr, $details:expr) => {{
34 let where_ = if $where.is_empty() {
37 format!(" at {}", $where)
39 err!(ValidationFailure(format!(
40 "encountered {}{}, but expected {}",
41 $what, where_, $details,
44 ($what:expr, $where:expr) => {{
45 let where_ = if $where.is_empty() {
48 format!(" at {}", $where)
50 err!(ValidationFailure(format!(
57 pub struct EvalContext<'a, 'mir, 'tcx: 'a + 'mir, M: Machine<'mir, 'tcx>> {
58 /// Stores the `Machine` instance.
61 /// The results of the type checker, from rustc.
62 pub tcx: TyCtxtAt<'a, 'tcx, 'tcx>,
64 /// Bounds in scope for polymorphic evaluations.
65 pub param_env: ty::ParamEnv<'tcx>,
67 /// The virtual memory system.
68 pub memory: Memory<'a, 'mir, 'tcx, M>,
70 /// The virtual call stack.
71 pub(crate) stack: Vec<Frame<'mir, 'tcx>>,
73 /// The maximum number of stack frames allowed
74 pub(crate) stack_limit: usize,
76 /// When this value is negative, it indicates the number of interpreter
77 /// steps *until* the loop detector is enabled. When it is positive, it is
78 /// the number of steps after the detector has been enabled modulo the loop
80 pub(crate) steps_since_detector_enabled: isize,
82 pub(crate) loop_detector: InfiniteLoopDetector<'a, 'mir, 'tcx, M>,
87 pub struct Frame<'mir, 'tcx: 'mir> {
88 ////////////////////////////////////////////////////////////////////////////////
89 // Function and callsite information
90 ////////////////////////////////////////////////////////////////////////////////
91 /// The MIR for the function called on this frame.
92 pub mir: &'mir mir::Mir<'tcx>,
94 /// The def_id and substs of the current function
95 pub instance: ty::Instance<'tcx>,
97 /// The span of the call site.
98 pub span: source_map::Span,
100 ////////////////////////////////////////////////////////////////////////////////
101 // Return place and locals
102 ////////////////////////////////////////////////////////////////////////////////
103 /// The block to return to when returning from the current stack frame
104 pub return_to_block: StackPopCleanup,
106 /// The location where the result of the current stack frame should be written to.
107 pub return_place: Place,
109 /// The list of locals for this stack frame, stored in order as
110 /// `[return_ptr, arguments..., variables..., temporaries...]`. The locals are stored as `Option<Value>`s.
111 /// `None` represents a local that is currently dead, while a live local
112 /// can either directly contain `Scalar` or refer to some part of an `Allocation`.
113 pub locals: IndexVec<mir::Local, LocalValue>,
115 ////////////////////////////////////////////////////////////////////////////////
116 // Current position within the function
117 ////////////////////////////////////////////////////////////////////////////////
118 /// The block that is currently executed (or will be executed after the above call stacks
120 pub block: mir::BasicBlock,
122 /// The index of the currently evaluated statement.
126 impl<'mir, 'tcx: 'mir> Eq for Frame<'mir, 'tcx> {}
128 impl<'mir, 'tcx: 'mir> PartialEq for Frame<'mir, 'tcx> {
129 fn eq(&self, other: &Self) -> bool {
141 // Some of these are constant during evaluation, but are included
142 // anyways for correctness.
143 *instance == other.instance
144 && *return_to_block == other.return_to_block
145 && *return_place == other.return_place
146 && *locals == other.locals
147 && *block == other.block
148 && *stmt == other.stmt
152 impl<'mir, 'tcx: 'mir> Hash for Frame<'mir, 'tcx> {
153 fn hash<H: Hasher>(&self, state: &mut H) {
165 instance.hash(state);
166 return_to_block.hash(state);
167 return_place.hash(state);
174 // State of a local variable
175 #[derive(Copy, Clone, PartialEq, Eq, Hash)]
176 pub enum LocalValue {
178 // Mostly for convenience, we re-use the `Operand` type here.
179 // This is an optimization over just always having a pointer here;
180 // we can thus avoid doing an allocation when the local just stores
181 // immediate values *and* never has its address taken.
185 impl<'tcx> LocalValue {
186 pub fn access(&self) -> EvalResult<'tcx, &Operand> {
188 LocalValue::Dead => err!(DeadLocal),
189 LocalValue::Live(ref val) => Ok(val),
193 pub fn access_mut(&mut self) -> EvalResult<'tcx, &mut Operand> {
195 LocalValue::Dead => err!(DeadLocal),
196 LocalValue::Live(ref mut val) => Ok(val),
201 /// The virtual machine state during const-evaluation at a given point in time.
202 type EvalSnapshot<'a, 'mir, 'tcx, M>
203 = (M, Vec<Frame<'mir, 'tcx>>, Memory<'a, 'mir, 'tcx, M>);
205 pub(crate) struct InfiniteLoopDetector<'a, 'mir, 'tcx: 'a + 'mir, M: Machine<'mir, 'tcx>> {
206 /// The set of all `EvalSnapshot` *hashes* observed by this detector.
208 /// When a collision occurs in this table, we store the full snapshot in
210 hashes: FxHashSet<u64>,
212 /// The set of all `EvalSnapshot`s observed by this detector.
214 /// An `EvalSnapshot` will only be fully cloned once it has caused a
215 /// collision in `hashes`. As a result, the detector must observe at least
216 /// *two* full cycles of an infinite loop before it triggers.
217 snapshots: FxHashSet<EvalSnapshot<'a, 'mir, 'tcx, M>>,
220 impl<'a, 'mir, 'tcx, M> Default for InfiniteLoopDetector<'a, 'mir, 'tcx, M>
221 where M: Machine<'mir, 'tcx>,
224 fn default() -> Self {
225 InfiniteLoopDetector {
226 hashes: FxHashSet::default(),
227 snapshots: FxHashSet::default(),
232 impl<'a, 'mir, 'tcx, M> InfiniteLoopDetector<'a, 'mir, 'tcx, M>
233 where M: Machine<'mir, 'tcx>,
236 /// Returns `true` if the loop detector has not yet observed a snapshot.
237 pub fn is_empty(&self) -> bool {
238 self.hashes.is_empty()
241 pub fn observe_and_analyze(
244 stack: &Vec<Frame<'mir, 'tcx>>,
245 memory: &Memory<'a, 'mir, 'tcx, M>,
246 ) -> EvalResult<'tcx, ()> {
247 let snapshot = (machine, stack, memory);
249 let mut fx = FxHasher::default();
250 snapshot.hash(&mut fx);
251 let hash = fx.finish();
253 if self.hashes.insert(hash) {
258 if self.snapshots.insert((machine.clone(), stack.clone(), memory.clone())) {
259 // Spurious collision or first cycle
264 Err(EvalErrorKind::InfiniteLoop.into())
268 #[derive(Clone, Debug, Eq, PartialEq, Hash)]
269 pub enum StackPopCleanup {
270 /// The stackframe existed to compute the initial value of a static/constant, make sure it
271 /// isn't modifyable afterwards in case of constants.
272 /// In case of `static mut`, mark the memory to ensure it's never marked as immutable through
273 /// references or deallocated
274 MarkStatic(Mutability),
275 /// A regular stackframe added due to a function call will need to get forwarded to the next
277 Goto(mir::BasicBlock),
278 /// The main function and diverging functions have nowhere to return to
282 impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> HasDataLayout for &'a EvalContext<'a, 'mir, 'tcx, M> {
284 fn data_layout(&self) -> &layout::TargetDataLayout {
285 &self.tcx.data_layout
289 impl<'c, 'b, 'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> HasDataLayout
290 for &'c &'b mut EvalContext<'a, 'mir, 'tcx, M> {
292 fn data_layout(&self) -> &layout::TargetDataLayout {
293 &self.tcx.data_layout
297 impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> layout::HasTyCtxt<'tcx> for &'a EvalContext<'a, 'mir, 'tcx, M> {
299 fn tcx<'b>(&'b self) -> TyCtxt<'b, 'tcx, 'tcx> {
304 impl<'c, 'b, 'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> layout::HasTyCtxt<'tcx>
305 for &'c &'b mut EvalContext<'a, 'mir, 'tcx, M> {
307 fn tcx<'d>(&'d self) -> TyCtxt<'d, 'tcx, 'tcx> {
312 impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> LayoutOf for &'a EvalContext<'a, 'mir, 'tcx, M> {
314 type TyLayout = EvalResult<'tcx, TyLayout<'tcx>>;
316 fn layout_of(self, ty: Ty<'tcx>) -> Self::TyLayout {
317 self.tcx.layout_of(self.param_env.and(ty))
318 .map_err(|layout| EvalErrorKind::Layout(layout).into())
322 impl<'c, 'b, 'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> LayoutOf
323 for &'c &'b mut EvalContext<'a, 'mir, 'tcx, M> {
325 type TyLayout = EvalResult<'tcx, TyLayout<'tcx>>;
328 fn layout_of(self, ty: Ty<'tcx>) -> Self::TyLayout {
329 (&**self).layout_of(ty)
333 const STEPS_UNTIL_DETECTOR_ENABLED: isize = 1_000_000;
335 impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
337 tcx: TyCtxtAt<'a, 'tcx, 'tcx>,
338 param_env: ty::ParamEnv<'tcx>,
340 memory_data: M::MemoryData,
346 memory: Memory::new(tcx, memory_data),
348 stack_limit: tcx.sess.const_eval_stack_frame_limit,
349 loop_detector: Default::default(),
350 steps_since_detector_enabled: -STEPS_UNTIL_DETECTOR_ENABLED,
354 pub(crate) fn with_fresh_body<F: FnOnce(&mut Self) -> R, R>(&mut self, f: F) -> R {
355 let stack = mem::replace(&mut self.stack, Vec::new());
356 let steps = mem::replace(&mut self.steps_since_detector_enabled, -STEPS_UNTIL_DETECTOR_ENABLED);
359 self.steps_since_detector_enabled = steps;
363 pub fn memory(&self) -> &Memory<'a, 'mir, 'tcx, M> {
367 pub fn memory_mut(&mut self) -> &mut Memory<'a, 'mir, 'tcx, M> {
371 pub fn stack(&self) -> &[Frame<'mir, 'tcx>] {
376 pub fn cur_frame(&self) -> usize {
377 assert!(self.stack.len() > 0);
381 /// Mark a storage as live, killing the previous content and returning it.
382 /// Remember to deallocate that!
383 pub fn storage_live(&mut self, local: mir::Local) -> EvalResult<'tcx, LocalValue> {
384 trace!("{:?} is now live", local);
386 let layout = self.layout_of_local(self.cur_frame(), local)?;
387 let init = LocalValue::Live(self.uninit_operand(layout)?);
388 // StorageLive *always* kills the value that's currently stored
389 Ok(mem::replace(&mut self.frame_mut().locals[local], init))
392 /// Returns the old value of the local.
393 /// Remember to deallocate that!
394 pub fn storage_dead(&mut self, local: mir::Local) -> LocalValue {
395 trace!("{:?} is now dead", local);
397 mem::replace(&mut self.frame_mut().locals[local], LocalValue::Dead)
400 pub fn str_to_value(&mut self, s: &str) -> EvalResult<'tcx, Value> {
401 let ptr = self.memory.allocate_bytes(s.as_bytes());
402 Ok(Value::new_slice(Scalar::Ptr(ptr), s.len() as u64, self.tcx.tcx))
405 pub(super) fn resolve(&self, def_id: DefId, substs: &'tcx Substs<'tcx>) -> EvalResult<'tcx, ty::Instance<'tcx>> {
406 trace!("resolve: {:?}, {:#?}", def_id, substs);
407 trace!("substs: {:#?}", self.substs());
408 trace!("param_env: {:#?}", self.param_env);
409 let substs = self.tcx.subst_and_normalize_erasing_regions(
414 ty::Instance::resolve(
419 ).ok_or_else(|| EvalErrorKind::TooGeneric.into())
422 pub(super) fn type_is_sized(&self, ty: Ty<'tcx>) -> bool {
423 ty.is_sized(self.tcx, self.param_env)
428 instance: ty::InstanceDef<'tcx>,
429 ) -> EvalResult<'tcx, &'tcx mir::Mir<'tcx>> {
430 // do not continue if typeck errors occurred (can only occur in local crate)
431 let did = instance.def_id();
432 if did.is_local() && self.tcx.has_typeck_tables(did) && self.tcx.typeck_tables_of(did).tainted_by_errors {
433 return err!(TypeckError);
435 trace!("load mir {:?}", instance);
437 ty::InstanceDef::Item(def_id) => {
438 self.tcx.maybe_optimized_mir(def_id).ok_or_else(||
439 EvalErrorKind::NoMirFor(self.tcx.item_path_str(def_id)).into()
442 _ => Ok(self.tcx.instance_mir(instance)),
446 pub fn monomorphize<T: TypeFoldable<'tcx> + Subst<'tcx>>(
449 substs: &'tcx Substs<'tcx>
451 // miri doesn't care about lifetimes, and will choke on some crazy ones
452 // let's simply get rid of them
453 let substituted = t.subst(*self.tcx, substs);
454 self.tcx.normalize_erasing_regions(ty::ParamEnv::reveal_all(), substituted)
457 pub fn layout_of_local(
461 ) -> EvalResult<'tcx, TyLayout<'tcx>> {
462 let local_ty = self.stack[frame].mir.local_decls[local].ty;
463 let local_ty = self.monomorphize(
465 self.stack[frame].instance.substs
467 self.layout_of(local_ty)
470 /// Return the actual dynamic size and alignment of the place at the given type.
471 /// Note that the value does not matter if the type is sized. For unsized types,
472 /// the value has to be a fat pointer, and we only care about the "extra" data in it.
473 pub fn size_and_align_of_mplace(
475 mplace: MPlaceTy<'tcx>,
476 ) -> EvalResult<'tcx, (Size, Align)> {
477 if let PlaceExtra::None = mplace.extra {
478 assert!(!mplace.layout.is_unsized());
479 Ok(mplace.layout.size_and_align())
481 let layout = mplace.layout;
482 assert!(layout.is_unsized());
483 match layout.ty.sty {
484 ty::TyAdt(..) | ty::TyTuple(..) => {
485 // First get the size of all statically known fields.
486 // Don't use type_of::sizing_type_of because that expects t to be sized,
487 // and it also rounds up to alignment, which we want to avoid,
488 // as the unsized field's alignment could be smaller.
489 assert!(!layout.ty.is_simd());
490 debug!("DST layout: {:?}", layout);
492 let sized_size = layout.fields.offset(layout.fields.count() - 1);
493 let sized_align = layout.align;
495 "DST {} statically sized prefix size: {:?} align: {:?}",
501 // Recurse to get the size of the dynamically sized field (must be
503 let field = self.mplace_field(mplace, layout.fields.count() as u64 - 1)?;
504 let (unsized_size, unsized_align) = self.size_and_align_of_mplace(field)?;
506 // FIXME (#26403, #27023): We should be adding padding
507 // to `sized_size` (to accommodate the `unsized_align`
508 // required of the unsized field that follows) before
509 // summing it with `sized_size`. (Note that since #26403
510 // is unfixed, we do not yet add the necessary padding
511 // here. But this is where the add would go.)
513 // Return the sum of sizes and max of aligns.
514 let size = sized_size + unsized_size;
516 // Choose max of two known alignments (combined value must
517 // be aligned according to more restrictive of the two).
518 let align = sized_align.max(unsized_align);
520 // Issue #27023: must add any necessary padding to `size`
521 // (to make it a multiple of `align`) before returning it.
523 // Namely, the returned size should be, in C notation:
525 // `size + ((size & (align-1)) ? align : 0)`
527 // emulated via the semi-standard fast bit trick:
529 // `(size + (align-1)) & -align`
531 Ok((size.abi_align(align), align))
533 ty::TyDynamic(..) => {
534 let vtable = match mplace.extra {
535 PlaceExtra::Vtable(vtable) => vtable,
536 _ => bug!("Expected vtable"),
538 // the second entry in the vtable is the dynamic size of the object.
539 self.read_size_and_align_from_vtable(vtable)
542 ty::TySlice(_) | ty::TyStr => {
543 let len = match mplace.extra {
544 PlaceExtra::Length(len) => len,
545 _ => bug!("Expected length"),
547 let (elem_size, align) = layout.field(self, 0)?.size_and_align();
548 Ok((elem_size * len, align))
551 _ => bug!("size_of_val::<{:?}> not supported", layout.ty),
556 pub fn push_stack_frame(
558 instance: ty::Instance<'tcx>,
559 span: source_map::Span,
560 mir: &'mir mir::Mir<'tcx>,
562 return_to_block: StackPopCleanup,
563 ) -> EvalResult<'tcx> {
564 ::log_settings::settings().indentation += 1;
566 // first push a stack frame so we have access to the local substs
567 self.stack.push(Frame {
569 block: mir::START_BLOCK,
572 // empty local array, we fill it in below, after we are inside the stack frame and
573 // all methods actually know about the frame
574 locals: IndexVec::new(),
580 // don't allocate at all for trivial constants
581 if mir.local_decls.len() > 1 {
582 // We put some marker value into the locals that we later want to initialize.
583 // This can be anything except for LocalValue::Dead -- because *that* is the
584 // value we use for things that we know are initially dead.
586 LocalValue::Live(Operand::Immediate(Value::Scalar(ScalarMaybeUndef::Undef)));
587 self.frame_mut().locals = IndexVec::from_elem(dummy, &mir.local_decls);
588 // Now mark those locals as dead that we do not want to initialize
589 match self.tcx.describe_def(instance.def_id()) {
590 // statics and constants don't have `Storage*` statements, no need to look for them
591 Some(Def::Static(..)) | Some(Def::Const(..)) | Some(Def::AssociatedConst(..)) => {},
593 trace!("push_stack_frame: {:?}: num_bbs: {}", span, mir.basic_blocks().len());
594 for block in mir.basic_blocks() {
595 for stmt in block.statements.iter() {
596 use rustc::mir::StatementKind::{StorageDead, StorageLive};
599 StorageDead(local) => {
600 // Worst case we are overwriting a dummy, no deallocation needed
601 self.storage_dead(local);
609 // Finally, properly initialize all those that still have the dummy value
610 for local in mir.local_decls.indices() {
611 if self.frame().locals[local] == dummy {
612 self.storage_live(local)?;
617 if self.stack.len() > self.stack_limit {
618 err!(StackFrameLimitReached)
624 pub(super) fn pop_stack_frame(&mut self) -> EvalResult<'tcx> {
625 ::log_settings::settings().indentation -= 1;
626 M::end_region(self, None)?;
627 let frame = self.stack.pop().expect(
628 "tried to pop a stack frame, but there were none",
630 match frame.return_to_block {
631 StackPopCleanup::MarkStatic(mutable) => {
632 if let Place::Ptr(MemPlace { ptr, .. }) = frame.return_place {
633 // FIXME: to_ptr()? might be too extreme here, static zsts might reach this under certain conditions
634 self.memory.mark_static_initialized(
635 ptr.to_ptr()?.alloc_id,
639 bug!("StackPopCleanup::MarkStatic on: {:?}", frame.return_place);
642 StackPopCleanup::Goto(target) => self.goto_block(target),
643 StackPopCleanup::None => {}
645 // deallocate all locals that are backed by an allocation
646 for local in frame.locals {
647 self.deallocate_local(local)?;
653 crate fn deallocate_local(&mut self, local: LocalValue) -> EvalResult<'tcx> {
654 // FIXME: should we tell the user that there was a local which was never written to?
655 if let LocalValue::Live(Operand::Indirect(MemPlace { ptr, .. })) = local {
656 trace!("deallocating local");
657 let ptr = ptr.to_ptr()?;
658 self.memory.dump_alloc(ptr.alloc_id);
659 self.memory.deallocate_local(ptr)?;
664 pub fn const_eval(&self, gid: GlobalId<'tcx>) -> EvalResult<'tcx, &'tcx ty::Const<'tcx>> {
665 let param_env = if self.tcx.is_static(gid.instance.def_id()).is_some() {
666 ty::ParamEnv::reveal_all()
670 self.tcx.const_eval(param_env.and(gid)).map_err(|err| EvalErrorKind::ReferencedConstant(err).into())
675 value: ScalarMaybeUndef,
677 scalar: &layout::Scalar,
680 ) -> EvalResult<'tcx> {
681 trace!("validate scalar: {:#?}, {:#?}, {:#?}, {}", value, size, scalar, ty);
682 let (lo, hi) = scalar.valid_range.clone().into_inner();
684 let value = match value {
685 ScalarMaybeUndef::Scalar(scalar) => scalar,
686 ScalarMaybeUndef::Undef => return validation_failure!("undefined bytes", path),
689 let bits = match value {
690 Scalar::Bits { bits, size: value_size } => {
691 assert_eq!(value_size as u64, size.bytes());
695 let ptr_size = self.memory.pointer_size();
696 let ptr_max = u128::max_value() >> (128 - ptr_size.bits());
699 // no gap, all values are ok
701 } else if hi < ptr_max || lo > 1 {
702 let max = u128::max_value() >> (128 - size.bits());
706 format!("something in the range {:?} or {:?}", 0..=lo, hi..=max)
711 } else if hi < ptr_max || lo > 1 {
715 format!("something in the range {:?}", scalar.valid_range)
723 // char gets a special treatment, because its number space is not contiguous so `TyLayout`
724 // has no special checks for chars
727 debug_assert_eq!(size.bytes(), 4);
728 if ::std::char::from_u32(bits as u32).is_none() {
729 return err!(InvalidChar(bits));
735 use std::ops::RangeInclusive;
736 let in_range = |bound: RangeInclusive<u128>| bound.contains(&bits);
738 if in_range(0..=hi) || in_range(lo..=u128::max_value()) {
744 format!("something in the range {:?} or {:?}", ..=hi, lo..)
748 if in_range(scalar.valid_range.clone()) {
754 format!("something in the range {:?}", scalar.valid_range)
760 /// This function checks the memory where `ptr` points to.
761 /// It will error if the bits at the destination do not match the ones described by the layout.
762 pub fn validate_mplace(
764 dest: MPlaceTy<'tcx>,
766 seen: &mut FxHashSet<(MPlaceTy<'tcx>)>,
767 todo: &mut Vec<(MPlaceTy<'tcx>, String)>,
768 ) -> EvalResult<'tcx> {
769 self.memory.dump_alloc(dest.to_ptr()?.alloc_id);
770 trace!("validate_mplace: {:?}, {:#?}", *dest, dest.layout);
772 // Find the right variant
773 let (variant, dest) = match dest.layout.variants {
774 layout::Variants::NicheFilling { niche: ref tag, .. } |
775 layout::Variants::Tagged { ref tag, .. } => {
776 let size = tag.value.size(self);
777 // we first read the tag value as scalar, to be able to validate it
778 let tag_mplace = self.mplace_field(dest, 0)?;
779 let tag_value = self.read_scalar(tag_mplace.into())?;
780 let path = format!("{}.TAG", path);
781 self.validate_scalar(
782 tag_value, size, tag, &path, tag_mplace.layout.ty
784 // then we read it again to get the index, to continue
785 let variant = self.read_discriminant_as_variant_index(dest.into())?;
786 let dest = self.mplace_downcast(dest, variant)?;
787 trace!("variant layout: {:#?}", dest.layout);
790 layout::Variants::Single { index } => {
795 // Validate all fields
796 match dest.layout.fields {
797 // primitives are unions with zero fields
798 layout::FieldPlacement::Union(0) => {
799 match dest.layout.abi {
800 // nothing to do, whatever the pointer points to, it is never going to be read
801 layout::Abi::Uninhabited => validation_failure!("a value of an uninhabited type", path),
802 // check that the scalar is a valid pointer or that its bit range matches the
804 layout::Abi::Scalar(ref scalar_layout) => {
805 let size = scalar_layout.value.size(self);
806 let value = self.read_value(dest.into())?;
807 let scalar = value.to_scalar_or_undef();
808 self.validate_scalar(scalar, size, scalar_layout, &path, dest.layout.ty)?;
809 if scalar_layout.value == Primitive::Pointer {
810 // ignore integer pointers, we can't reason about the final hardware
811 if let Scalar::Ptr(ptr) = scalar.not_undef()? {
812 let alloc_kind = self.tcx.alloc_map.lock().get(ptr.alloc_id);
813 if let Some(AllocType::Static(did)) = alloc_kind {
814 // statics from other crates are already checked
815 // extern statics should not be validated as they have no body
816 if !did.is_local() || self.tcx.is_foreign_item(did) {
820 if value.layout.ty.builtin_deref(false).is_some() {
821 trace!("Recursing below ptr {:#?}", value);
822 let ptr_place = self.ref_to_mplace(value)?;
823 // we have not encountered this pointer+layout combination before
824 if seen.insert(ptr_place) {
825 todo.push((ptr_place, format!("(*{})", path)))
832 _ => bug!("bad abi for FieldPlacement::Union(0): {:#?}", dest.layout.abi),
835 layout::FieldPlacement::Union(_) => {
836 // We can't check unions, their bits are allowed to be anything.
837 // The fields don't need to correspond to any bit pattern of the union's fields.
838 // See https://github.com/rust-lang/rust/issues/32836#issuecomment-406875389
841 layout::FieldPlacement::Array { count, .. } => {
843 let mut path = path.clone();
844 self.dump_field_name(&mut path, dest.layout.ty, i as usize, variant).unwrap();
845 let field = self.mplace_field(dest, i)?;
846 self.validate_mplace(field, path, seen, todo)?;
850 layout::FieldPlacement::Arbitrary { ref offsets, .. } => {
851 // fat pointers need special treatment
852 match dest.layout.ty.builtin_deref(false).map(|tam| &tam.ty.sty) {
854 | Some(ty::TySlice(_)) => {
855 // check the length (for nicer error messages)
856 let len_mplace = self.mplace_field(dest, 1)?;
857 let len = self.read_scalar(len_mplace.into())?;
858 let len = match len.to_bits(len_mplace.layout.size) {
859 Err(_) => return validation_failure!("length is not a valid integer", path),
860 Ok(len) => len as u64,
862 // get the fat ptr, and recursively check it
863 let ptr = self.ref_to_mplace(self.read_value(dest.into())?)?;
864 assert_eq!(ptr.extra, PlaceExtra::Length(len));
865 let unpacked_ptr = self.unpack_unsized_mplace(ptr)?;
866 if seen.insert(unpacked_ptr) {
867 let mut path = path.clone();
868 self.dump_field_name(&mut path, dest.layout.ty, 0, 0).unwrap();
869 todo.push((unpacked_ptr, path))
872 Some(ty::TyDynamic(..)) => {
873 // check the vtable (for nicer error messages)
874 let vtable = self.read_scalar(self.mplace_field(dest, 1)?.into())?;
875 let vtable = match vtable.to_ptr() {
876 Err(_) => return validation_failure!("vtable address is not a pointer", path),
877 Ok(vtable) => vtable,
879 // get the fat ptr, and recursively check it
880 let ptr = self.ref_to_mplace(self.read_value(dest.into())?)?;
881 assert_eq!(ptr.extra, PlaceExtra::Vtable(vtable));
882 let unpacked_ptr = self.unpack_unsized_mplace(ptr)?;
883 if seen.insert(unpacked_ptr) {
884 let mut path = path.clone();
885 self.dump_field_name(&mut path, dest.layout.ty, 0, 0).unwrap();
886 todo.push((unpacked_ptr, path))
888 // FIXME: More checks for the vtable... making sure it is exactly
889 // the one one would expect for this type.
892 bug!("Unexpected fat pointer target type {:?}", ty),
894 // Not a pointer, perform regular aggregate handling below
895 for i in 0..offsets.len() {
896 let mut path = path.clone();
897 self.dump_field_name(&mut path, dest.layout.ty, i, variant).unwrap();
898 let field = self.mplace_field(dest, i as u64)?;
899 self.validate_mplace(field, path, seen, todo)?;
901 // FIXME: For a TyStr, check that this is valid UTF-8.
911 pub fn frame(&self) -> &Frame<'mir, 'tcx> {
912 self.stack.last().expect("no call frames exist")
916 pub fn frame_mut(&mut self) -> &mut Frame<'mir, 'tcx> {
917 self.stack.last_mut().expect("no call frames exist")
920 pub(super) fn mir(&self) -> &'mir mir::Mir<'tcx> {
924 pub fn substs(&self) -> &'tcx Substs<'tcx> {
925 if let Some(frame) = self.stack.last() {
926 frame.instance.substs
932 pub fn dump_place(&self, place: Place) {
934 if !log_enabled!(::log::Level::Trace) {
938 Place::Local { frame, local } => {
939 let mut allocs = Vec::new();
940 let mut msg = format!("{:?}", local);
941 if frame != self.cur_frame() {
942 write!(msg, " ({} frames up)", self.cur_frame() - frame).unwrap();
944 write!(msg, ":").unwrap();
946 match self.stack[frame].locals[local].access() {
948 if let EvalErrorKind::DeadLocal = err.kind {
949 write!(msg, " is dead").unwrap();
951 panic!("Failed to access local: {:?}", err);
954 Ok(Operand::Indirect(mplace)) => {
955 let (ptr, align) = mplace.to_scalar_ptr_align();
957 Scalar::Ptr(ptr) => {
958 write!(msg, " by align({}) ref:", align.abi()).unwrap();
959 allocs.push(ptr.alloc_id);
961 ptr => write!(msg, " by integral ref: {:?}", ptr).unwrap(),
964 Ok(Operand::Immediate(Value::Scalar(val))) => {
965 write!(msg, " {:?}", val).unwrap();
966 if let ScalarMaybeUndef::Scalar(Scalar::Ptr(ptr)) = val {
967 allocs.push(ptr.alloc_id);
970 Ok(Operand::Immediate(Value::ScalarPair(val1, val2))) => {
971 write!(msg, " ({:?}, {:?})", val1, val2).unwrap();
972 if let ScalarMaybeUndef::Scalar(Scalar::Ptr(ptr)) = val1 {
973 allocs.push(ptr.alloc_id);
975 if let ScalarMaybeUndef::Scalar(Scalar::Ptr(ptr)) = val2 {
976 allocs.push(ptr.alloc_id);
982 self.memory.dump_allocs(allocs);
984 Place::Ptr(mplace) => {
986 Scalar::Ptr(ptr) => {
987 trace!("by align({}) ref:", mplace.align.abi());
988 self.memory.dump_alloc(ptr.alloc_id);
990 ptr => trace!(" integral by ref: {:?}", ptr),
996 pub fn generate_stacktrace(&self, explicit_span: Option<Span>) -> (Vec<FrameInfo>, Span) {
997 let mut last_span = None;
998 let mut frames = Vec::new();
999 // skip 1 because the last frame is just the environment of the constant
1000 for &Frame { instance, span, mir, block, stmt, .. } in self.stack().iter().skip(1).rev() {
1001 // make sure we don't emit frames that are duplicates of the previous
1002 if explicit_span == Some(span) {
1003 last_span = Some(span);
1006 if let Some(last) = last_span {
1011 last_span = Some(span);
1013 let location = if self.tcx.def_key(instance.def_id()).disambiguated_data.data == DefPathData::ClosureExpr {
1014 "closure".to_owned()
1016 instance.to_string()
1018 let block = &mir.basic_blocks()[block];
1019 let source_info = if stmt < block.statements.len() {
1020 block.statements[stmt].source_info
1022 block.terminator().source_info
1024 let lint_root = match mir.source_scope_local_data {
1025 mir::ClearCrossCrate::Set(ref ivs) => Some(ivs[source_info.scope].lint_root),
1026 mir::ClearCrossCrate::Clear => None,
1028 frames.push(FrameInfo { span, location, lint_root });
1030 trace!("generate stacktrace: {:#?}, {:?}", frames, explicit_span);
1031 (frames, self.tcx.span)
1035 pub fn sign_extend(&self, value: u128, ty: TyLayout<'_>) -> u128 {
1036 assert!(ty.abi.is_signed());
1037 sign_extend(value, ty.size)
1041 pub fn truncate(&self, value: u128, ty: TyLayout<'_>) -> u128 {
1042 truncate(value, ty.size)
1045 fn dump_field_name(&self, s: &mut String, ty: Ty<'tcx>, i: usize, variant: usize) -> ::std::fmt::Result {
1055 ty::TyGeneratorWitness(..) |
1057 ty::TyDynamic(..) => {
1058 bug!("field_name({:?}): not applicable", ty)
1061 // Potentially-fat pointers.
1062 ty::TyRef(_, pointee, _) |
1063 ty::TyRawPtr(ty::TypeAndMut { ty: pointee, .. }) => {
1066 // Reuse the fat *T type as its own thin pointer data field.
1067 // This provides information about e.g. DST struct pointees
1068 // (which may have no non-DST form), and will work as long
1069 // as the `Abi` or `FieldPlacement` is checked by users.
1071 return write!(s, ".data_ptr");
1074 match self.tcx.struct_tail(pointee).sty {
1076 ty::TyStr => write!(s, ".len"),
1077 ty::TyDynamic(..) => write!(s, ".vtable_ptr"),
1078 _ => bug!("field_name({:?}): not applicable", ty)
1082 // Arrays and slices.
1085 ty::TyStr => write!(s, "[{}]", i),
1087 // generators and closures.
1088 ty::TyClosure(def_id, _) | ty::TyGenerator(def_id, _, _) => {
1089 let node_id = self.tcx.hir.as_local_node_id(def_id).unwrap();
1090 let freevar = self.tcx.with_freevars(node_id, |fv| fv[i]);
1091 write!(s, ".upvar({})", self.tcx.hir.name(freevar.var_id()))
1094 ty::TyTuple(_) => write!(s, ".{}", i),
1097 ty::TyAdt(def, ..) if def.is_enum() => {
1098 let variant = &def.variants[variant];
1099 write!(s, ".{}::{}", variant.name, variant.fields[i].ident)
1103 ty::TyAdt(def, _) => write!(s, ".{}", def.non_enum_variant().fields[i].ident),
1105 ty::TyProjection(_) | ty::TyAnon(..) | ty::TyParam(_) |
1106 ty::TyInfer(_) | ty::TyError => {
1107 bug!("dump_field_name: unexpected type `{}`", ty)