2 use std::hash::{Hash, Hasher};
5 use rustc::hir::def_id::DefId;
6 use rustc::hir::def::Def;
7 use rustc::hir::map::definitions::DefPathData;
9 use rustc::ty::layout::{
10 self, Size, Align, HasDataLayout, LayoutOf, TyLayout, Primitive
12 use rustc::ty::subst::{Subst, Substs};
13 use rustc::ty::{self, Ty, TyCtxt};
14 use rustc::ty::query::TyCtxtAt;
15 use rustc_data_structures::fx::{FxHashSet, FxHasher};
16 use rustc_data_structures::indexed_vec::IndexVec;
17 use rustc::mir::interpret::{
18 GlobalId, Scalar, FrameInfo, AllocType,
19 EvalResult, EvalErrorKind,
23 use syntax::source_map::{self, Span};
24 use syntax::ast::Mutability;
27 Value, ValTy, Operand, MemPlace, MPlaceTy, Place,
31 macro_rules! validation_failure{
32 ($what:expr, $where:expr, $details:expr) => {{
33 let where_ = if $where.is_empty() {
36 format!(" at {}", $where)
38 err!(ValidationFailure(format!(
39 "encountered {}{}, but expected {}",
40 $what, where_, $details,
43 ($what:expr, $where:expr) => {{
44 let where_ = if $where.is_empty() {
47 format!(" at {}", $where)
49 err!(ValidationFailure(format!(
56 pub struct EvalContext<'a, 'mir, 'tcx: 'a + 'mir, M: Machine<'mir, 'tcx>> {
57 /// Stores the `Machine` instance.
60 /// The results of the type checker, from rustc.
61 pub tcx: TyCtxtAt<'a, 'tcx, 'tcx>,
63 /// Bounds in scope for polymorphic evaluations.
64 pub param_env: ty::ParamEnv<'tcx>,
66 /// The virtual memory system.
67 pub memory: Memory<'a, 'mir, 'tcx, M>,
69 /// The virtual call stack.
70 pub(crate) stack: Vec<Frame<'mir, 'tcx>>,
72 /// The maximum number of stack frames allowed
73 pub(crate) stack_limit: usize,
75 /// When this value is negative, it indicates the number of interpreter
76 /// steps *until* the loop detector is enabled. When it is positive, it is
77 /// the number of steps after the detector has been enabled modulo the loop
79 pub(crate) steps_since_detector_enabled: isize,
81 pub(crate) loop_detector: InfiniteLoopDetector<'a, 'mir, 'tcx, M>,
86 pub struct Frame<'mir, 'tcx: 'mir> {
87 ////////////////////////////////////////////////////////////////////////////////
88 // Function and callsite information
89 ////////////////////////////////////////////////////////////////////////////////
90 /// The MIR for the function called on this frame.
91 pub mir: &'mir mir::Mir<'tcx>,
93 /// The def_id and substs of the current function
94 pub instance: ty::Instance<'tcx>,
96 /// The span of the call site.
97 pub span: source_map::Span,
99 ////////////////////////////////////////////////////////////////////////////////
100 // Return place and locals
101 ////////////////////////////////////////////////////////////////////////////////
102 /// The block to return to when returning from the current stack frame
103 pub return_to_block: StackPopCleanup,
105 /// The location where the result of the current stack frame should be written to.
106 pub return_place: Place,
108 /// The list of locals for this stack frame, stored in order as
109 /// `[return_ptr, arguments..., variables..., temporaries...]`. The locals are stored as `Option<Value>`s.
110 /// `None` represents a local that is currently dead, while a live local
111 /// can either directly contain `Scalar` or refer to some part of an `Allocation`.
112 pub locals: IndexVec<mir::Local, LocalValue>,
114 ////////////////////////////////////////////////////////////////////////////////
115 // Current position within the function
116 ////////////////////////////////////////////////////////////////////////////////
117 /// The block that is currently executed (or will be executed after the above call stacks
119 pub block: mir::BasicBlock,
121 /// The index of the currently evaluated statement.
125 impl<'mir, 'tcx: 'mir> Eq for Frame<'mir, 'tcx> {}
127 impl<'mir, 'tcx: 'mir> PartialEq for Frame<'mir, 'tcx> {
128 fn eq(&self, other: &Self) -> bool {
140 // Some of these are constant during evaluation, but are included
141 // anyways for correctness.
142 *instance == other.instance
143 && *return_to_block == other.return_to_block
144 && *return_place == other.return_place
145 && *locals == other.locals
146 && *block == other.block
147 && *stmt == other.stmt
151 impl<'mir, 'tcx: 'mir> Hash for Frame<'mir, 'tcx> {
152 fn hash<H: Hasher>(&self, state: &mut H) {
164 instance.hash(state);
165 return_to_block.hash(state);
166 return_place.hash(state);
173 // State of a local variable
174 #[derive(Copy, Clone, PartialEq, Eq, Hash)]
175 pub enum LocalValue {
177 // Mostly for convenience, we re-use the `Operand` type here.
178 // This is an optimization over just always having a pointer here;
179 // we can thus avoid doing an allocation when the local just stores
180 // immediate values *and* never has its address taken.
184 impl<'tcx> LocalValue {
185 pub fn access(&self) -> EvalResult<'tcx, &Operand> {
187 LocalValue::Dead => err!(DeadLocal),
188 LocalValue::Live(ref val) => Ok(val),
192 pub fn access_mut(&mut self) -> EvalResult<'tcx, &mut Operand> {
194 LocalValue::Dead => err!(DeadLocal),
195 LocalValue::Live(ref mut val) => Ok(val),
200 /// The virtual machine state during const-evaluation at a given point in time.
201 type EvalSnapshot<'a, 'mir, 'tcx, M>
202 = (M, Vec<Frame<'mir, 'tcx>>, Memory<'a, 'mir, 'tcx, M>);
204 pub(crate) struct InfiniteLoopDetector<'a, 'mir, 'tcx: 'a + 'mir, M: Machine<'mir, 'tcx>> {
205 /// The set of all `EvalSnapshot` *hashes* observed by this detector.
207 /// When a collision occurs in this table, we store the full snapshot in
209 hashes: FxHashSet<u64>,
211 /// The set of all `EvalSnapshot`s observed by this detector.
213 /// An `EvalSnapshot` will only be fully cloned once it has caused a
214 /// collision in `hashes`. As a result, the detector must observe at least
215 /// *two* full cycles of an infinite loop before it triggers.
216 snapshots: FxHashSet<EvalSnapshot<'a, 'mir, 'tcx, M>>,
219 impl<'a, 'mir, 'tcx, M> Default for InfiniteLoopDetector<'a, 'mir, 'tcx, M>
220 where M: Machine<'mir, 'tcx>,
223 fn default() -> Self {
224 InfiniteLoopDetector {
225 hashes: FxHashSet::default(),
226 snapshots: FxHashSet::default(),
231 impl<'a, 'mir, 'tcx, M> InfiniteLoopDetector<'a, 'mir, 'tcx, M>
232 where M: Machine<'mir, 'tcx>,
235 /// Returns `true` if the loop detector has not yet observed a snapshot.
236 pub fn is_empty(&self) -> bool {
237 self.hashes.is_empty()
240 pub fn observe_and_analyze(
243 stack: &Vec<Frame<'mir, 'tcx>>,
244 memory: &Memory<'a, 'mir, 'tcx, M>,
245 ) -> EvalResult<'tcx, ()> {
246 let snapshot = (machine, stack, memory);
248 let mut fx = FxHasher::default();
249 snapshot.hash(&mut fx);
250 let hash = fx.finish();
252 if self.hashes.insert(hash) {
257 if self.snapshots.insert((machine.clone(), stack.clone(), memory.clone())) {
258 // Spurious collision or first cycle
263 Err(EvalErrorKind::InfiniteLoop.into())
267 #[derive(Clone, Debug, Eq, PartialEq, Hash)]
268 pub enum StackPopCleanup {
269 /// The stackframe existed to compute the initial value of a static/constant, make sure it
270 /// isn't modifyable afterwards in case of constants.
271 /// In case of `static mut`, mark the memory to ensure it's never marked as immutable through
272 /// references or deallocated
273 MarkStatic(Mutability),
274 /// A regular stackframe added due to a function call will need to get forwarded to the next
276 Goto(mir::BasicBlock),
277 /// The main function and diverging functions have nowhere to return to
281 impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> HasDataLayout for &'a EvalContext<'a, 'mir, 'tcx, M> {
283 fn data_layout(&self) -> &layout::TargetDataLayout {
284 &self.tcx.data_layout
288 impl<'c, 'b, 'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> HasDataLayout
289 for &'c &'b mut EvalContext<'a, 'mir, 'tcx, M> {
291 fn data_layout(&self) -> &layout::TargetDataLayout {
292 &self.tcx.data_layout
296 impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> layout::HasTyCtxt<'tcx> for &'a EvalContext<'a, 'mir, 'tcx, M> {
298 fn tcx<'b>(&'b self) -> TyCtxt<'b, 'tcx, 'tcx> {
303 impl<'c, 'b, 'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> layout::HasTyCtxt<'tcx>
304 for &'c &'b mut EvalContext<'a, 'mir, 'tcx, M> {
306 fn tcx<'d>(&'d self) -> TyCtxt<'d, 'tcx, 'tcx> {
311 impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> LayoutOf for &'a EvalContext<'a, 'mir, 'tcx, M> {
313 type TyLayout = EvalResult<'tcx, TyLayout<'tcx>>;
315 fn layout_of(self, ty: Ty<'tcx>) -> Self::TyLayout {
316 self.tcx.layout_of(self.param_env.and(ty))
317 .map_err(|layout| EvalErrorKind::Layout(layout).into())
321 impl<'c, 'b, 'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> LayoutOf
322 for &'c &'b mut EvalContext<'a, 'mir, 'tcx, M> {
324 type TyLayout = EvalResult<'tcx, TyLayout<'tcx>>;
327 fn layout_of(self, ty: Ty<'tcx>) -> Self::TyLayout {
328 (&**self).layout_of(ty)
332 const STEPS_UNTIL_DETECTOR_ENABLED: isize = 1_000_000;
334 impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
336 tcx: TyCtxtAt<'a, 'tcx, 'tcx>,
337 param_env: ty::ParamEnv<'tcx>,
339 memory_data: M::MemoryData,
345 memory: Memory::new(tcx, memory_data),
347 stack_limit: tcx.sess.const_eval_stack_frame_limit,
348 loop_detector: Default::default(),
349 steps_since_detector_enabled: -STEPS_UNTIL_DETECTOR_ENABLED,
353 pub(crate) fn with_fresh_body<F: FnOnce(&mut Self) -> R, R>(&mut self, f: F) -> R {
354 let stack = mem::replace(&mut self.stack, Vec::new());
355 let steps = mem::replace(&mut self.steps_since_detector_enabled, -STEPS_UNTIL_DETECTOR_ENABLED);
358 self.steps_since_detector_enabled = steps;
362 pub fn memory(&self) -> &Memory<'a, 'mir, 'tcx, M> {
366 pub fn memory_mut(&mut self) -> &mut Memory<'a, 'mir, 'tcx, M> {
370 pub fn stack(&self) -> &[Frame<'mir, 'tcx>] {
375 pub fn cur_frame(&self) -> usize {
376 assert!(self.stack.len() > 0);
380 /// Mark a storage as live, killing the previous content and returning it.
381 /// Remember to deallocate that!
382 pub fn storage_live(&mut self, local: mir::Local) -> EvalResult<'tcx, LocalValue> {
383 trace!("{:?} is now live", local);
385 let layout = self.layout_of_local(self.cur_frame(), local)?;
386 let init = LocalValue::Live(self.uninit_operand(layout)?);
387 // StorageLive *always* kills the value that's currently stored
388 Ok(mem::replace(&mut self.frame_mut().locals[local], init))
391 /// Returns the old value of the local.
392 /// Remember to deallocate that!
393 pub fn storage_dead(&mut self, local: mir::Local) -> LocalValue {
394 trace!("{:?} is now dead", local);
396 mem::replace(&mut self.frame_mut().locals[local], LocalValue::Dead)
399 pub fn str_to_value(&mut self, s: &str) -> EvalResult<'tcx, Value> {
400 let ptr = self.memory.allocate_bytes(s.as_bytes());
401 Ok(Value::new_slice(Scalar::Ptr(ptr), s.len() as u64, self.tcx.tcx))
404 pub(super) fn resolve(&self, def_id: DefId, substs: &'tcx Substs<'tcx>) -> EvalResult<'tcx, ty::Instance<'tcx>> {
405 trace!("resolve: {:?}, {:#?}", def_id, substs);
406 trace!("substs: {:#?}", self.substs());
407 trace!("param_env: {:#?}", self.param_env);
408 let substs = self.tcx.subst_and_normalize_erasing_regions(
413 ty::Instance::resolve(
418 ).ok_or_else(|| EvalErrorKind::TooGeneric.into())
421 pub(super) fn type_is_sized(&self, ty: Ty<'tcx>) -> bool {
422 ty.is_sized(self.tcx, self.param_env)
427 instance: ty::InstanceDef<'tcx>,
428 ) -> EvalResult<'tcx, &'tcx mir::Mir<'tcx>> {
429 // do not continue if typeck errors occurred (can only occur in local crate)
430 let did = instance.def_id();
431 if did.is_local() && self.tcx.has_typeck_tables(did) && self.tcx.typeck_tables_of(did).tainted_by_errors {
432 return err!(TypeckError);
434 trace!("load mir {:?}", instance);
436 ty::InstanceDef::Item(def_id) => {
437 self.tcx.maybe_optimized_mir(def_id).ok_or_else(||
438 EvalErrorKind::NoMirFor(self.tcx.item_path_str(def_id)).into()
441 _ => Ok(self.tcx.instance_mir(instance)),
445 pub fn monomorphize(&self, ty: Ty<'tcx>, substs: &'tcx Substs<'tcx>) -> Ty<'tcx> {
446 // miri doesn't care about lifetimes, and will choke on some crazy ones
447 // let's simply get rid of them
448 let substituted = ty.subst(*self.tcx, substs);
449 self.tcx.normalize_erasing_regions(ty::ParamEnv::reveal_all(), substituted)
452 pub fn layout_of_local(
456 ) -> EvalResult<'tcx, TyLayout<'tcx>> {
457 let local_ty = self.stack[frame].mir.local_decls[local].ty;
458 let local_ty = self.monomorphize(
460 self.stack[frame].instance.substs
462 self.layout_of(local_ty)
465 /// Return the size and alignment of the value at the given type.
466 /// Note that the value does not matter if the type is sized. For unsized types,
467 /// the value has to be a fat pointer, and we only care about the "extra" data in it.
468 pub fn size_and_align_of_dst(
471 ) -> EvalResult<'tcx, (Size, Align)> {
472 if !val.layout.is_unsized() {
473 Ok(val.layout.size_and_align())
475 match val.layout.ty.sty {
476 ty::TyAdt(..) | ty::TyTuple(..) => {
477 // First get the size of all statically known fields.
478 // Don't use type_of::sizing_type_of because that expects t to be sized,
479 // and it also rounds up to alignment, which we want to avoid,
480 // as the unsized field's alignment could be smaller.
481 assert!(!val.layout.ty.is_simd());
482 debug!("DST layout: {:?}", val.layout);
484 let sized_size = val.layout.fields.offset(val.layout.fields.count() - 1);
485 let sized_align = val.layout.align;
487 "DST {} statically sized prefix size: {:?} align: {:?}",
493 // Recurse to get the size of the dynamically sized field (must be
495 let field_layout = val.layout.field(self, val.layout.fields.count() - 1)?;
496 let (unsized_size, unsized_align) =
497 self.size_and_align_of_dst(ValTy {
502 // FIXME (#26403, #27023): We should be adding padding
503 // to `sized_size` (to accommodate the `unsized_align`
504 // required of the unsized field that follows) before
505 // summing it with `sized_size`. (Note that since #26403
506 // is unfixed, we do not yet add the necessary padding
507 // here. But this is where the add would go.)
509 // Return the sum of sizes and max of aligns.
510 let size = sized_size + unsized_size;
512 // Choose max of two known alignments (combined value must
513 // be aligned according to more restrictive of the two).
514 let align = sized_align.max(unsized_align);
516 // Issue #27023: must add any necessary padding to `size`
517 // (to make it a multiple of `align`) before returning it.
519 // Namely, the returned size should be, in C notation:
521 // `size + ((size & (align-1)) ? align : 0)`
523 // emulated via the semi-standard fast bit trick:
525 // `(size + (align-1)) & -align`
527 Ok((size.abi_align(align), align))
529 ty::TyDynamic(..) => {
530 let (_, vtable) = val.to_scalar_dyn_trait()?;
531 // the second entry in the vtable is the dynamic size of the object.
532 self.read_size_and_align_from_vtable(vtable)
535 ty::TySlice(_) | ty::TyStr => {
536 let (elem_size, align) = val.layout.field(self, 0)?.size_and_align();
537 let (_, len) = val.to_scalar_slice(self)?;
538 Ok((elem_size * len, align))
541 _ => bug!("size_of_val::<{:?}>", val.layout.ty),
546 pub fn push_stack_frame(
548 instance: ty::Instance<'tcx>,
549 span: source_map::Span,
550 mir: &'mir mir::Mir<'tcx>,
552 return_to_block: StackPopCleanup,
553 ) -> EvalResult<'tcx> {
554 ::log_settings::settings().indentation += 1;
556 // first push a stack frame so we have access to the local substs
557 self.stack.push(Frame {
559 block: mir::START_BLOCK,
562 // empty local array, we fill it in below, after we are inside the stack frame and
563 // all methods actually know about the frame
564 locals: IndexVec::new(),
570 // don't allocate at all for trivial constants
571 if mir.local_decls.len() > 1 {
572 // We put some marker value into the locals that we later want to initialize.
573 // This can be anything except for LocalValue::Dead -- because *that* is the
574 // value we use for things that we know are initially dead.
576 LocalValue::Live(Operand::Immediate(Value::Scalar(ScalarMaybeUndef::Undef)));
577 self.frame_mut().locals = IndexVec::from_elem(dummy, &mir.local_decls);
578 // Now mark those locals as dead that we do not want to initialize
579 match self.tcx.describe_def(instance.def_id()) {
580 // statics and constants don't have `Storage*` statements, no need to look for them
581 Some(Def::Static(..)) | Some(Def::Const(..)) | Some(Def::AssociatedConst(..)) => {},
583 trace!("push_stack_frame: {:?}: num_bbs: {}", span, mir.basic_blocks().len());
584 for block in mir.basic_blocks() {
585 for stmt in block.statements.iter() {
586 use rustc::mir::StatementKind::{StorageDead, StorageLive};
589 StorageDead(local) => {
590 // Worst case we are overwriting a dummy, no deallocation needed
591 self.storage_dead(local);
599 // Finally, properly initialize all those that still have the dummy value
600 for local in mir.local_decls.indices() {
601 if self.frame().locals[local] == dummy {
602 self.storage_live(local)?;
607 self.memory.cur_frame = self.cur_frame();
609 if self.stack.len() > self.stack_limit {
610 err!(StackFrameLimitReached)
616 pub(super) fn pop_stack_frame(&mut self) -> EvalResult<'tcx> {
617 ::log_settings::settings().indentation -= 1;
618 M::end_region(self, None)?;
619 let frame = self.stack.pop().expect(
620 "tried to pop a stack frame, but there were none",
622 if !self.stack.is_empty() {
623 // TODO: Is this the correct time to start considering these accesses as originating from the returned-to stack frame?
624 self.memory.cur_frame = self.cur_frame();
626 match frame.return_to_block {
627 StackPopCleanup::MarkStatic(mutable) => {
628 if let Place::Ptr(MemPlace { ptr, .. }) = frame.return_place {
629 // FIXME: to_ptr()? might be too extreme here, static zsts might reach this under certain conditions
630 self.memory.mark_static_initialized(
631 ptr.to_ptr()?.alloc_id,
635 bug!("StackPopCleanup::MarkStatic on: {:?}", frame.return_place);
638 StackPopCleanup::Goto(target) => self.goto_block(target),
639 StackPopCleanup::None => {}
641 // deallocate all locals that are backed by an allocation
642 for local in frame.locals {
643 self.deallocate_local(local)?;
649 crate fn deallocate_local(&mut self, local: LocalValue) -> EvalResult<'tcx> {
650 // FIXME: should we tell the user that there was a local which was never written to?
651 if let LocalValue::Live(Operand::Indirect(MemPlace { ptr, .. })) = local {
652 trace!("deallocating local");
653 let ptr = ptr.to_ptr()?;
654 self.memory.dump_alloc(ptr.alloc_id);
655 self.memory.deallocate_local(ptr)?;
660 pub fn const_eval(&self, gid: GlobalId<'tcx>) -> EvalResult<'tcx, &'tcx ty::Const<'tcx>> {
661 let param_env = if self.tcx.is_static(gid.instance.def_id()).is_some() {
662 ty::ParamEnv::reveal_all()
666 self.tcx.const_eval(param_env.and(gid)).map_err(|err| EvalErrorKind::ReferencedConstant(err).into())
671 value: ScalarMaybeUndef,
673 scalar: &layout::Scalar,
676 ) -> EvalResult<'tcx> {
677 trace!("validate scalar: {:#?}, {:#?}, {:#?}, {}", value, size, scalar, ty);
678 let (lo, hi) = scalar.valid_range.clone().into_inner();
680 let value = match value {
681 ScalarMaybeUndef::Scalar(scalar) => scalar,
682 ScalarMaybeUndef::Undef => return validation_failure!("undefined bytes", path),
685 let bits = match value {
686 Scalar::Bits { bits, size: value_size } => {
687 assert_eq!(value_size as u64, size.bytes());
691 let ptr_size = self.memory.pointer_size();
692 let ptr_max = u128::max_value() >> (128 - ptr_size.bits());
695 // no gap, all values are ok
697 } else if hi < ptr_max || lo > 1 {
698 let max = u128::max_value() >> (128 - size.bits());
702 format!("something in the range {:?} or {:?}", 0..=lo, hi..=max)
707 } else if hi < ptr_max || lo > 1 {
711 format!("something in the range {:?}", scalar.valid_range)
719 // char gets a special treatment, because its number space is not contiguous so `TyLayout`
720 // has no special checks for chars
723 debug_assert_eq!(size.bytes(), 4);
724 if ::std::char::from_u32(bits as u32).is_none() {
725 return err!(InvalidChar(bits));
731 use std::ops::RangeInclusive;
732 let in_range = |bound: RangeInclusive<u128>| bound.contains(&bits);
734 if in_range(0..=hi) || in_range(lo..=u128::max_value()) {
740 format!("something in the range {:?} or {:?}", ..=hi, lo..)
744 if in_range(scalar.valid_range.clone()) {
750 format!("something in the range {:?}", scalar.valid_range)
756 /// This function checks the memory where `ptr` points to.
757 /// It will error if the bits at the destination do not match the ones described by the layout.
758 pub fn validate_mplace(
760 dest: MPlaceTy<'tcx>,
762 seen: &mut FxHashSet<(MPlaceTy<'tcx>)>,
763 todo: &mut Vec<(MPlaceTy<'tcx>, String)>,
764 ) -> EvalResult<'tcx> {
765 self.memory.dump_alloc(dest.to_ptr()?.alloc_id);
766 trace!("validate_mplace: {:?}, {:#?}", *dest, dest.layout);
768 // Find the right variant
769 let (variant, dest) = match dest.layout.variants {
770 layout::Variants::NicheFilling { niche: ref tag, .. } |
771 layout::Variants::Tagged { ref tag, .. } => {
772 let size = tag.value.size(self);
773 // we first read the tag value as scalar, to be able to validate it
774 let tag_mplace = self.mplace_field(dest, 0)?;
775 let tag_value = self.read_scalar(tag_mplace.into())?;
776 let path = format!("{}.TAG", path);
777 self.validate_scalar(
778 tag_value, size, tag, &path, tag_mplace.layout.ty
780 // then we read it again to get the index, to continue
781 let variant = self.read_discriminant_as_variant_index(dest.into())?;
782 let dest = self.mplace_downcast(dest, variant)?;
783 trace!("variant layout: {:#?}", dest.layout);
786 layout::Variants::Single { index } => {
791 // Validate all fields
792 match dest.layout.fields {
793 // primitives are unions with zero fields
794 layout::FieldPlacement::Union(0) => {
795 match dest.layout.abi {
796 // nothing to do, whatever the pointer points to, it is never going to be read
797 layout::Abi::Uninhabited => validation_failure!("a value of an uninhabited type", path),
798 // check that the scalar is a valid pointer or that its bit range matches the
800 layout::Abi::Scalar(ref scalar_layout) => {
801 let size = scalar_layout.value.size(self);
802 let value = self.read_value(dest.into())?;
803 let scalar = value.to_scalar_or_undef();
804 self.validate_scalar(scalar, size, scalar_layout, &path, dest.layout.ty)?;
805 if scalar_layout.value == Primitive::Pointer {
806 // ignore integer pointers, we can't reason about the final hardware
807 if let Scalar::Ptr(ptr) = scalar.not_undef()? {
808 let alloc_kind = self.tcx.alloc_map.lock().get(ptr.alloc_id);
809 if let Some(AllocType::Static(did)) = alloc_kind {
810 // statics from other crates are already checked
811 // extern statics should not be validated as they have no body
812 if !did.is_local() || self.tcx.is_foreign_item(did) {
816 if value.layout.ty.builtin_deref(false).is_some() {
817 trace!("Recursing below ptr {:#?}", value);
818 let ptr_place = self.ref_to_mplace(value)?;
819 // we have not encountered this pointer+layout combination before
820 if seen.insert(ptr_place) {
821 todo.push((ptr_place, format!("(*{})", path)))
828 _ => bug!("bad abi for FieldPlacement::Union(0): {:#?}", dest.layout.abi),
831 layout::FieldPlacement::Union(_) => {
832 // We can't check unions, their bits are allowed to be anything.
833 // The fields don't need to correspond to any bit pattern of the union's fields.
834 // See https://github.com/rust-lang/rust/issues/32836#issuecomment-406875389
837 layout::FieldPlacement::Array { count, .. } => {
839 let mut path = path.clone();
840 self.dump_field_name(&mut path, dest.layout.ty, i as usize, variant).unwrap();
841 let field = self.mplace_field(dest, i)?;
842 self.validate_mplace(field, path, seen, todo)?;
846 layout::FieldPlacement::Arbitrary { ref offsets, .. } => {
847 // fat pointers need special treatment
848 match dest.layout.ty.builtin_deref(false).map(|tam| &tam.ty.sty) {
850 | Some(ty::TySlice(_)) => {
852 let len_mplace = self.mplace_field(dest, 1)?;
853 let len = self.read_scalar(len_mplace.into())?;
854 let len = match len.to_bits(len_mplace.layout.size) {
855 Err(_) => return validation_failure!("length is not a valid integer", path),
856 Ok(len) => len as u64,
859 let ptr = self.ref_to_mplace(self.read_value(dest.into())?)?;
860 let mut path = path.clone();
861 self.dump_field_name(&mut path, dest.layout.ty, 0, variant).unwrap();
864 let mut path = path.clone();
865 self.dump_field_name(&mut path, ptr.layout.ty, i as usize, 0).unwrap();
866 let field = self.mplace_field(ptr, i)?;
867 self.validate_mplace(field, path, seen, todo)?;
869 // FIXME: For a TyStr, check that this is valid UTF-8
871 Some(ty::TyDynamic(..)) => {
872 let vtable_mplace = self.mplace_field(dest, 1)?;
873 let vtable = self.read_scalar(vtable_mplace.into())?;
874 if vtable.to_ptr().is_err() {
875 return validation_failure!("vtable address is not a pointer", path);
878 let _ptr = self.ref_to_mplace(self.read_value(dest.into())?)?;
879 // FIXME: What can we verify about this?
882 bug!("Unexpected fat pointer target type {:?}", ty),
884 // Not a pointer, perform regular aggregate handling below
885 for i in 0..offsets.len() {
886 let mut path = path.clone();
887 self.dump_field_name(&mut path, dest.layout.ty, i, variant).unwrap();
888 let field = self.mplace_field(dest, i as u64)?;
889 self.validate_mplace(field, path, seen, todo)?;
899 pub fn frame(&self) -> &Frame<'mir, 'tcx> {
900 self.stack.last().expect("no call frames exist")
903 pub fn frame_mut(&mut self) -> &mut Frame<'mir, 'tcx> {
904 self.stack.last_mut().expect("no call frames exist")
907 pub(super) fn mir(&self) -> &'mir mir::Mir<'tcx> {
911 pub fn substs(&self) -> &'tcx Substs<'tcx> {
912 if let Some(frame) = self.stack.last() {
913 frame.instance.substs
919 pub fn dump_place(&self, place: Place) {
921 if !log_enabled!(::log::Level::Trace) {
925 Place::Local { frame, local } => {
926 let mut allocs = Vec::new();
927 let mut msg = format!("{:?}", local);
928 if frame != self.cur_frame() {
929 write!(msg, " ({} frames up)", self.cur_frame() - frame).unwrap();
931 write!(msg, ":").unwrap();
933 match self.stack[frame].locals[local].access() {
935 if let EvalErrorKind::DeadLocal = err.kind {
936 write!(msg, " is dead").unwrap();
938 panic!("Failed to access local: {:?}", err);
941 Ok(Operand::Indirect(mplace)) => {
942 let (ptr, align) = mplace.to_scalar_ptr_align();
944 Scalar::Ptr(ptr) => {
945 write!(msg, " by align({}) ref:", align.abi()).unwrap();
946 allocs.push(ptr.alloc_id);
948 ptr => write!(msg, " by integral ref: {:?}", ptr).unwrap(),
951 Ok(Operand::Immediate(Value::Scalar(val))) => {
952 write!(msg, " {:?}", val).unwrap();
953 if let ScalarMaybeUndef::Scalar(Scalar::Ptr(ptr)) = val {
954 allocs.push(ptr.alloc_id);
957 Ok(Operand::Immediate(Value::ScalarPair(val1, val2))) => {
958 write!(msg, " ({:?}, {:?})", val1, val2).unwrap();
959 if let ScalarMaybeUndef::Scalar(Scalar::Ptr(ptr)) = val1 {
960 allocs.push(ptr.alloc_id);
962 if let ScalarMaybeUndef::Scalar(Scalar::Ptr(ptr)) = val2 {
963 allocs.push(ptr.alloc_id);
969 self.memory.dump_allocs(allocs);
971 Place::Ptr(mplace) => {
972 let (ptr, align) = mplace.to_scalar_ptr_align();
974 Scalar::Ptr(ptr) => {
975 trace!("by align({}) ref:", align.abi());
976 self.memory.dump_alloc(ptr.alloc_id);
978 ptr => trace!(" integral by ref: {:?}", ptr),
984 pub fn generate_stacktrace(&self, explicit_span: Option<Span>) -> (Vec<FrameInfo>, Span) {
985 let mut last_span = None;
986 let mut frames = Vec::new();
987 // skip 1 because the last frame is just the environment of the constant
988 for &Frame { instance, span, mir, block, stmt, .. } in self.stack().iter().skip(1).rev() {
989 // make sure we don't emit frames that are duplicates of the previous
990 if explicit_span == Some(span) {
991 last_span = Some(span);
994 if let Some(last) = last_span {
999 last_span = Some(span);
1001 let location = if self.tcx.def_key(instance.def_id()).disambiguated_data.data == DefPathData::ClosureExpr {
1002 "closure".to_owned()
1004 instance.to_string()
1006 let block = &mir.basic_blocks()[block];
1007 let source_info = if stmt < block.statements.len() {
1008 block.statements[stmt].source_info
1010 block.terminator().source_info
1012 let lint_root = match mir.source_scope_local_data {
1013 mir::ClearCrossCrate::Set(ref ivs) => Some(ivs[source_info.scope].lint_root),
1014 mir::ClearCrossCrate::Clear => None,
1016 frames.push(FrameInfo { span, location, lint_root });
1018 trace!("generate stacktrace: {:#?}, {:?}", frames, explicit_span);
1019 (frames, self.tcx.span)
1022 pub fn sign_extend(&self, value: u128, ty: TyLayout<'_>) -> u128 {
1023 assert!(ty.abi.is_signed());
1024 super::sign_extend(value, ty.size)
1027 pub fn truncate(&self, value: u128, ty: TyLayout<'_>) -> u128 {
1028 super::truncate(value, ty.size)
1031 fn dump_field_name(&self, s: &mut String, ty: Ty<'tcx>, i: usize, variant: usize) -> ::std::fmt::Result {
1041 ty::TyGeneratorWitness(..) |
1043 ty::TyDynamic(..) => {
1044 bug!("field_name({:?}): not applicable", ty)
1047 // Potentially-fat pointers.
1048 ty::TyRef(_, pointee, _) |
1049 ty::TyRawPtr(ty::TypeAndMut { ty: pointee, .. }) => {
1052 // Reuse the fat *T type as its own thin pointer data field.
1053 // This provides information about e.g. DST struct pointees
1054 // (which may have no non-DST form), and will work as long
1055 // as the `Abi` or `FieldPlacement` is checked by users.
1057 return write!(s, ".data_ptr");
1060 match self.tcx.struct_tail(pointee).sty {
1062 ty::TyStr => write!(s, ".len"),
1063 ty::TyDynamic(..) => write!(s, ".vtable_ptr"),
1064 _ => bug!("field_name({:?}): not applicable", ty)
1068 // Arrays and slices.
1071 ty::TyStr => write!(s, "[{}]", i),
1073 // generators and closures.
1074 ty::TyClosure(def_id, _) | ty::TyGenerator(def_id, _, _) => {
1075 let node_id = self.tcx.hir.as_local_node_id(def_id).unwrap();
1076 let freevar = self.tcx.with_freevars(node_id, |fv| fv[i]);
1077 write!(s, ".upvar({})", self.tcx.hir.name(freevar.var_id()))
1080 ty::TyTuple(_) => write!(s, ".{}", i),
1083 ty::TyAdt(def, ..) if def.is_enum() => {
1084 let variant = &def.variants[variant];
1085 write!(s, ".{}::{}", variant.name, variant.fields[i].ident)
1089 ty::TyAdt(def, _) => write!(s, ".{}", def.non_enum_variant().fields[i].ident),
1091 ty::TyProjection(_) | ty::TyAnon(..) | ty::TyParam(_) |
1092 ty::TyInfer(_) | ty::TyError => {
1093 bug!("dump_field_name: unexpected type `{}`", ty)
1099 pub fn sign_extend(value: u128, size: Size) -> u128 {
1100 let size = size.bits();
1102 let shift = 128 - size;
1103 // shift the unsigned value to the left
1104 // and back to the right as signed (essentially fills with FF on the left)
1105 (((value << shift) as i128) >> shift) as u128
1108 pub fn truncate(value: u128, size: Size) -> u128 {
1109 let size = size.bits();
1110 let shift = 128 - size;
1111 // truncate (shift left to drop out leftover values, shift right to fill with zeroes)
1112 (value << shift) >> shift