1 //! Check the validity invariant of a given value, and tell the user
2 //! where in the value it got violated.
3 //! In const context, this goes even further and tries to approximate const safety.
4 //! That's useful because it means other passes (e.g. promotion) can rely on `const`s
8 use std::ops::RangeInclusive;
11 use rustc::ty::layout::{self, LayoutOf, TyLayout, VariantIdx};
12 use rustc_data_structures::fx::FxHashSet;
14 use rustc_span::symbol::{sym, Symbol};
19 CheckInAllocMsg, GlobalAlloc, InterpCx, InterpResult, MPlaceTy, Machine, MemPlaceMeta, OpTy,
23 macro_rules! throw_validation_failure {
24 ($what:expr, $where:expr, $details:expr) => {{
25 let mut msg = format!("encountered {}", $what);
27 if !where_.is_empty() {
29 write_path(&mut msg, where_);
31 write!(&mut msg, ", but expected {}", $details).unwrap();
32 throw_unsup!(ValidationFailure(msg))
34 ($what:expr, $where:expr) => {{
35 let mut msg = format!("encountered {}", $what);
37 if !where_.is_empty() {
39 write_path(&mut msg, where_);
41 throw_unsup!(ValidationFailure(msg))
45 macro_rules! try_validation {
46 ($e:expr, $what:expr, $where:expr, $details:expr) => {{
49 Err(_) => throw_validation_failure!($what, $where, $details),
53 ($e:expr, $what:expr, $where:expr) => {{
56 Err(_) => throw_validation_failure!($what, $where),
61 /// We want to show a nice path to the invalid field for diagnostics,
62 /// but avoid string operations in the happy case where no error happens.
63 /// So we track a `Vec<PathElem>` where `PathElem` contains all the data we
64 /// need to later print something for the user.
65 #[derive(Copy, Clone, Debug)]
69 GeneratorState(VariantIdx),
79 /// State for tracking recursive validation of references
80 pub struct RefTracking<T, PATH = ()> {
81 pub seen: FxHashSet<T>,
82 pub todo: Vec<(T, PATH)>,
85 impl<T: Copy + Eq + Hash + std::fmt::Debug, PATH: Default> RefTracking<T, PATH> {
86 pub fn empty() -> Self {
87 RefTracking { seen: FxHashSet::default(), todo: vec![] }
89 pub fn new(op: T) -> Self {
90 let mut ref_tracking_for_consts =
91 RefTracking { seen: FxHashSet::default(), todo: vec![(op, PATH::default())] };
92 ref_tracking_for_consts.seen.insert(op);
93 ref_tracking_for_consts
96 pub fn track(&mut self, op: T, path: impl FnOnce() -> PATH) {
97 if self.seen.insert(op) {
98 trace!("Recursing below ptr {:#?}", op);
100 // Remember to come back to this later.
101 self.todo.push((op, path));
107 fn write_path(out: &mut String, path: &Vec<PathElem>) {
108 use self::PathElem::*;
110 for elem in path.iter() {
112 Field(name) => write!(out, ".{}", name),
113 EnumTag => write!(out, ".<enum-tag>"),
114 Variant(name) => write!(out, ".<enum-variant({})>", name),
115 GeneratorTag => write!(out, ".<generator-tag>"),
116 GeneratorState(idx) => write!(out, ".<generator-state({})>", idx.index()),
117 CapturedVar(name) => write!(out, ".<captured-var({})>", name),
118 TupleElem(idx) => write!(out, ".{}", idx),
119 ArrayElem(idx) => write!(out, "[{}]", idx),
120 // `.<deref>` does not match Rust syntax, but it is more readable for long paths -- and
121 // some of the other items here also are not Rust syntax. Actually we can't
122 // even use the usual syntax because we are just showing the projections,
124 Deref => write!(out, ".<deref>"),
125 DynDowncast => write!(out, ".<dyn-downcast>"),
131 // Test if a range that wraps at overflow contains `test`
132 fn wrapping_range_contains(r: &RangeInclusive<u128>, test: u128) -> bool {
133 let (lo, hi) = r.clone().into_inner();
136 (..=hi).contains(&test) || (lo..).contains(&test)
143 // Formats such that a sentence like "expected something {}" to mean
144 // "expected something <in the given range>" makes sense.
145 fn wrapping_range_format(r: &RangeInclusive<u128>, max_hi: u128) -> String {
146 let (lo, hi) = r.clone().into_inner();
147 debug_assert!(hi <= max_hi);
149 format!("less or equal to {}, or greater or equal to {}", hi, lo)
151 format!("equal to {}", lo)
153 debug_assert!(hi < max_hi, "should not be printing if the range covers everything");
154 format!("less or equal to {}", hi)
155 } else if hi == max_hi {
156 debug_assert!(lo > 0, "should not be printing if the range covers everything");
157 format!("greater or equal to {}", lo)
159 format!("in the range {:?}", r)
163 struct ValidityVisitor<'rt, 'mir, 'tcx, M: Machine<'mir, 'tcx>> {
164 /// The `path` may be pushed to, but the part that is present when a function
165 /// starts must not be changed! `visit_fields` and `visit_array` rely on
166 /// this stack discipline.
168 ref_tracking_for_consts:
169 Option<&'rt mut RefTracking<MPlaceTy<'tcx, M::PointerTag>, Vec<PathElem>>>,
170 ecx: &'rt InterpCx<'mir, 'tcx, M>,
173 impl<'rt, 'mir, 'tcx, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, 'tcx, M> {
174 fn aggregate_field_path_elem(&mut self, layout: TyLayout<'tcx>, field: usize) -> PathElem {
175 // First, check if we are projecting to a variant.
176 match layout.variants {
177 layout::Variants::Multiple { discr_index, .. } => {
178 if discr_index == field {
179 return match layout.ty.kind {
180 ty::Adt(def, ..) if def.is_enum() => PathElem::EnumTag,
181 ty::Generator(..) => PathElem::GeneratorTag,
182 _ => bug!("non-variant type {:?}", layout.ty),
186 layout::Variants::Single { .. } => {}
189 // Now we know we are projecting to a field, so figure out which one.
190 match layout.ty.kind {
191 // generators and closures.
192 ty::Closure(def_id, _) | ty::Generator(def_id, _, _) => {
194 if def_id.is_local() {
195 let tables = self.ecx.tcx.typeck_tables_of(def_id);
196 if let Some(upvars) = tables.upvar_list.get(&def_id) {
197 // Sometimes the index is beyond the number of upvars (seen
199 if let Some((&var_hir_id, _)) = upvars.get_index(field) {
200 let node = self.ecx.tcx.hir().get(var_hir_id);
201 if let hir::Node::Binding(pat) = node {
202 if let hir::PatKind::Binding(_, _, ident, _) = pat.kind {
203 name = Some(ident.name);
210 PathElem::CapturedVar(name.unwrap_or_else(|| {
211 // Fall back to showing the field index.
217 ty::Tuple(_) => PathElem::TupleElem(field),
220 ty::Adt(def, ..) if def.is_enum() => {
221 // we might be projecting *to* a variant, or to a field *in* a variant.
222 match layout.variants {
223 layout::Variants::Single { index } => {
225 PathElem::Field(def.variants[index].fields[field].ident.name)
227 layout::Variants::Multiple { .. } => bug!("we handled variants above"),
232 ty::Adt(def, _) => PathElem::Field(def.non_enum_variant().fields[field].ident.name),
235 ty::Array(..) | ty::Slice(..) => PathElem::ArrayElem(field),
238 ty::Dynamic(..) => PathElem::DynDowncast,
240 // nothing else has an aggregate layout
241 _ => bug!("aggregate_field_path_elem: got non-aggregate type {:?}", layout.ty),
247 new_op: OpTy<'tcx, M::PointerTag>,
249 ) -> InterpResult<'tcx> {
250 // Remember the old state
251 let path_len = self.path.len();
253 self.path.push(elem);
254 self.visit_value(new_op)?;
256 self.path.truncate(path_len);
260 fn check_wide_ptr_meta(
262 meta: MemPlaceMeta<M::PointerTag>,
263 pointee: TyLayout<'tcx>,
264 ) -> InterpResult<'tcx> {
265 let tail = self.ecx.tcx.struct_tail_erasing_lifetimes(pointee.ty, self.ecx.param_env);
268 let vtable = meta.unwrap_meta();
270 self.ecx.memory.check_ptr_access(
272 3 * self.ecx.tcx.data_layout.pointer_size, // drop, size, align
273 self.ecx.tcx.data_layout.pointer_align.abi,
275 "dangling or unaligned vtable pointer in wide pointer or too small vtable",
279 self.ecx.read_drop_type_from_vtable(vtable),
280 "invalid drop fn in vtable",
284 self.ecx.read_size_and_align_from_vtable(vtable),
285 "invalid size or align in vtable",
288 // FIXME: More checks for the vtable.
290 ty::Slice(..) | ty::Str => {
291 let _len = try_validation!(
292 meta.unwrap_meta().to_machine_usize(self.ecx),
293 "non-integer slice length in wide pointer",
296 // We do not check that `len * elem_size <= isize::MAX`:
297 // that is only required for references, and there it falls out of the
298 // "dereferenceable" check performed by Stacked Borrows.
301 // Unsized, but not wide.
303 _ => bug!("Unexpected unsized type tail: {:?}", tail),
309 fn visit_primitive(&mut self, value: OpTy<'tcx, M::PointerTag>) -> InterpResult<'tcx> {
310 let value = self.ecx.read_immediate(value)?;
311 // Go over all the primitive types
312 let ty = value.layout.ty;
315 let value = value.to_scalar_or_undef();
316 try_validation!(value.to_bool(), value, self.path, "a boolean");
319 let value = value.to_scalar_or_undef();
320 try_validation!(value.to_char(), value, self.path, "a valid unicode codepoint");
322 ty::Float(_) | ty::Int(_) | ty::Uint(_) => {
323 // NOTE: Keep this in sync with the array optimization for int/float
325 let size = value.layout.size;
326 let value = value.to_scalar_or_undef();
327 if self.ref_tracking_for_consts.is_some() {
328 // Integers/floats in CTFE: Must be scalar bits, pointers are dangerous
333 "initialized plain (non-pointer) bytes"
336 // At run-time, for now, we accept *anything* for these types, including
337 // undef. We should fix that, but let's start low.
341 // We are conservative with undef for integers, but try to
342 // actually enforce our current rules for raw pointers.
344 try_validation!(self.ecx.ref_to_mplace(value), "undefined pointer", self.path);
345 if place.layout.is_unsized() {
346 self.check_wide_ptr_meta(place.meta, place.layout)?;
349 _ if ty.is_box() || ty.is_region_ptr() => {
350 // Handle wide pointers.
351 // Check metadata early, for better diagnostics
353 try_validation!(self.ecx.ref_to_mplace(value), "undefined pointer", self.path);
354 if place.layout.is_unsized() {
355 self.check_wide_ptr_meta(place.meta, place.layout)?;
357 // Make sure this is dereferenceable and all.
358 let (size, align) = self
360 .size_and_align_of(place.meta, place.layout)?
361 // for the purpose of validity, consider foreign types to have
362 // alignment and size determined by the layout (size will be 0,
363 // alignment should take attributes into account).
364 .unwrap_or_else(|| (place.layout.size, place.layout.align.abi));
365 let ptr: Option<_> = match self.ecx.memory.check_ptr_access_align(
369 CheckInAllocMsg::InboundsTest,
374 "{:?} did not pass access check for size {:?}, align {:?}",
375 place.ptr, size, align
378 err_unsup!(InvalidNullPointerUsage) => {
379 throw_validation_failure!("a NULL reference", self.path)
381 err_unsup!(AlignmentCheckFailed { required, has }) => {
382 throw_validation_failure!(
384 "an unaligned reference \
385 (required {} byte alignment but found {})",
392 err_unsup!(ReadBytesAsPointer) => throw_validation_failure!(
393 "a dangling reference (created from integer)",
396 _ => throw_validation_failure!(
397 "a dangling reference (not entirely in bounds)",
403 // Recursive checking
404 if let Some(ref mut ref_tracking) = self.ref_tracking_for_consts {
405 if let Some(ptr) = ptr {
407 // Skip validation entirely for some external statics
408 let alloc_kind = self.ecx.tcx.alloc_map.lock().get(ptr.alloc_id);
409 if let Some(GlobalAlloc::Static(did)) = alloc_kind {
410 // `extern static` cannot be validated as they have no body.
411 // FIXME: Statics from other crates are also skipped.
412 // They might be checked at a different type, but for now we
413 // want to avoid recursing too deeply. This is not sound!
414 if !did.is_local() || self.ecx.tcx.is_foreign_item(did) {
419 // Proceed recursively even for ZST, no reason to skip them!
420 // `!` is a ZST and we want to validate it.
421 // Normalize before handing `place` to tracking because that will
422 // check for duplicates.
423 let place = if size.bytes() > 0 {
424 self.ecx.force_mplace_ptr(place).expect("we already bounds-checked")
428 let path = &self.path;
429 ref_tracking.track(place, || {
430 // We need to clone the path anyway, make sure it gets created
431 // with enough space for the additional `Deref`.
432 let mut new_path = Vec::with_capacity(path.len() + 1);
433 new_path.clone_from(path);
434 new_path.push(PathElem::Deref);
440 let value = value.to_scalar_or_undef();
441 let _fn = try_validation!(
442 value.not_undef().and_then(|ptr| self.ecx.memory.get_fn(ptr)),
447 // FIXME: Check if the signature matches
449 // This should be all the (inhabited) primitive types
450 _ => bug!("Unexpected primitive type {}", value.layout.ty),
457 op: OpTy<'tcx, M::PointerTag>,
458 scalar_layout: &layout::Scalar,
459 ) -> InterpResult<'tcx> {
460 let value = self.ecx.read_scalar(op)?;
461 let valid_range = &scalar_layout.valid_range;
462 let (lo, hi) = valid_range.clone().into_inner();
463 // Determine the allowed range
464 // `max_hi` is as big as the size fits
465 let max_hi = u128::max_value() >> (128 - op.layout.size.bits());
466 assert!(hi <= max_hi);
467 // We could also write `(hi + 1) % (max_hi + 1) == lo` but `max_hi + 1` overflows for `u128`
468 if (lo == 0 && hi == max_hi) || (hi + 1 == lo) {
472 // At least one value is excluded. Get the bits.
473 let value = try_validation!(
477 format_args!("something {}", wrapping_range_format(valid_range, max_hi),)
479 let bits = match value.to_bits_or_ptr(op.layout.size, self.ecx) {
481 if lo == 1 && hi == max_hi {
482 // Only NULL is the niche. So make sure the ptr is NOT NULL.
483 if self.ecx.memory.ptr_may_be_null(ptr) {
484 throw_validation_failure!(
485 "a potentially NULL pointer",
488 "something that cannot possibly fail to be {}",
489 wrapping_range_format(valid_range, max_hi)
495 // Conservatively, we reject, because the pointer *could* have a bad
497 throw_validation_failure!(
501 "something that cannot possibly fail to be {}",
502 wrapping_range_format(valid_range, max_hi)
509 // Now compare. This is slightly subtle because this is a special "wrap-around" range.
510 if wrapping_range_contains(&valid_range, bits) {
513 throw_validation_failure!(
516 format_args!("something {}", wrapping_range_format(valid_range, max_hi))
522 impl<'rt, 'mir, 'tcx, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M>
523 for ValidityVisitor<'rt, 'mir, 'tcx, M>
525 type V = OpTy<'tcx, M::PointerTag>;
528 fn ecx(&self) -> &InterpCx<'mir, 'tcx, M> {
535 old_op: OpTy<'tcx, M::PointerTag>,
537 new_op: OpTy<'tcx, M::PointerTag>,
538 ) -> InterpResult<'tcx> {
539 let elem = self.aggregate_field_path_elem(old_op.layout, field);
540 self.visit_elem(new_op, elem)
546 old_op: OpTy<'tcx, M::PointerTag>,
547 variant_id: VariantIdx,
548 new_op: OpTy<'tcx, M::PointerTag>,
549 ) -> InterpResult<'tcx> {
550 let name = match old_op.layout.ty.kind {
551 ty::Adt(adt, _) => PathElem::Variant(adt.variants[variant_id].ident.name),
552 // Generators also have variants
553 ty::Generator(..) => PathElem::GeneratorState(variant_id),
554 _ => bug!("Unexpected type with variant: {:?}", old_op.layout.ty),
556 self.visit_elem(new_op, name)
560 fn visit_union(&mut self, _v: Self::V, fields: usize) -> InterpResult<'tcx> {
561 // Empty unions are not accepted by rustc. That's great, it means we can
562 // use that as a signal for detecting primitives. Make sure
563 // we did not miss any primitive.
569 fn visit_value(&mut self, op: OpTy<'tcx, M::PointerTag>) -> InterpResult<'tcx> {
570 trace!("visit_value: {:?}, {:?}", *op, op.layout);
572 if op.layout.abi.is_uninhabited() {
573 // Uninhabited types do not have sensible layout, stop right here.
574 throw_validation_failure!(
575 format_args!("a value of uninhabited type {:?}", op.layout.ty),
580 // Check primitive types. We do this after checking for uninhabited types,
581 // to exclude fieldless enums (that also appear as fieldless unions here).
582 // Primitives can have varying layout, so we check them separately and before aggregate
584 // It is CRITICAL that we get this check right, or we might be validating the wrong thing!
585 let primitive = match op.layout.fields {
586 // Primitives appear as Union with 0 fields - except for Boxes and fat pointers.
587 // (Fieldless enums also appear here, but they are uninhabited and thus handled above.)
588 layout::FieldPlacement::Union(0) => true,
589 _ => op.layout.ty.builtin_deref(true).is_some(),
592 // No need to recurse further or check scalar layout, this is a leaf type.
593 return self.visit_primitive(op);
596 // Recursively walk the type. Translate some possible errors to something nicer.
597 match self.walk_value(op) {
599 Err(err) => match err.kind {
600 err_ub!(InvalidDiscriminant(val)) => {
601 throw_validation_failure!(val, self.path, "a valid enum discriminant")
603 err_unsup!(ReadPointerAsBytes) => {
604 throw_validation_failure!("a pointer", self.path, "plain (non-pointer) bytes")
606 _ => return Err(err),
610 // *After* all of this, check the ABI. We need to check the ABI to handle
611 // types like `NonNull` where the `Scalar` info is more restrictive than what
612 // the fields say (`rustc_layout_scalar_valid_range_start`).
613 // But in most cases, this will just propagate what the fields say,
614 // and then we want the error to point at the field -- so, first recurse,
617 // FIXME: We could avoid some redundant checks here. For newtypes wrapping
618 // scalars, we do the same check on every "level" (e.g., first we check
619 // MyNewtype and then the scalar in there).
620 match op.layout.abi {
621 layout::Abi::Uninhabited => unreachable!(), // checked above
622 layout::Abi::Scalar(ref scalar_layout) => {
623 self.visit_scalar(op, scalar_layout)?;
625 layout::Abi::ScalarPair { .. } | layout::Abi::Vector { .. } => {
626 // These have fields that we already visited above, so we already checked
627 // all their scalar-level restrictions.
628 // There is also no equivalent to `rustc_layout_scalar_valid_range_start`
629 // that would make skipping them here an issue.
631 layout::Abi::Aggregate { .. } => {
641 op: OpTy<'tcx, M::PointerTag>,
642 fields: impl Iterator<Item = InterpResult<'tcx, Self::V>>,
643 ) -> InterpResult<'tcx> {
644 match op.layout.ty.kind {
646 let mplace = op.assert_mem_place(self.ecx); // strings are never immediate
648 self.ecx.read_str(mplace),
649 "uninitialized or non-UTF-8 data in str",
653 ty::Array(tys, ..) | ty::Slice(tys)
655 // This optimization applies for types that can hold arbitrary bytes (such as
656 // integer and floating point types) or for structs or tuples with no fields.
657 // FIXME(wesleywiser) This logic could be extended further to arbitrary structs
658 // or tuples made up of integer/floating point types or inhabited ZSTs with no
661 ty::Int(..) | ty::Uint(..) | ty::Float(..) => true,
666 // Optimized handling for arrays of integer/float type.
668 // Arrays cannot be immediate, slices are never immediate.
669 let mplace = op.assert_mem_place(self.ecx);
670 // This is the length of the array/slice.
671 let len = mplace.len(self.ecx)?;
672 // Zero length slices have nothing to be checked.
676 // This is the element type size.
677 let layout = self.ecx.layout_of(tys)?;
678 // This is the size in bytes of the whole array.
679 let size = layout.size * len;
680 // Size is not 0, get a pointer.
681 let ptr = self.ecx.force_ptr(mplace.ptr)?;
683 // Optimization: we just check the entire range at once.
684 // NOTE: Keep this in sync with the handling of integer and float
685 // types above, in `visit_primitive`.
686 // In run-time mode, we accept pointers in here. This is actually more
687 // permissive than a per-element check would be, e.g., we accept
688 // an &[u8] that contains a pointer even though bytewise checking would
689 // reject it. However, that's good: We don't inherently want
690 // to reject those pointers, we just do not have the machinery to
691 // talk about parts of a pointer.
692 // We also accept undef, for consistency with the slow path.
693 match self.ecx.memory.get_raw(ptr.alloc_id)?.check_bytes(
697 /*allow_ptr_and_undef*/ self.ref_tracking_for_consts.is_none(),
699 // In the happy case, we needn't check anything else.
701 // Some error happened, try to provide a more detailed description.
703 // For some errors we might be able to provide extra information
705 err_unsup!(ReadUndefBytes(offset)) => {
706 // Some byte was undefined, determine which
707 // element that byte belongs to so we can
709 let i = (offset.bytes() / layout.size.bytes()) as usize;
710 self.path.push(PathElem::ArrayElem(i));
712 throw_validation_failure!("undefined bytes", self.path)
714 // Other errors shouldn't be possible
715 _ => return Err(err),
720 // Fast path for arrays and slices of ZSTs. We only need to check a single ZST element
721 // of an array and not all of them, because there's only a single value of a specific
722 // ZST type, so either validation fails for all elements or none.
723 ty::Array(tys, ..) | ty::Slice(tys) if self.ecx.layout_of(tys)?.is_zst() => {
724 // Validate just the first element
725 self.walk_aggregate(op, fields.take(1))?
728 self.walk_aggregate(op, fields)? // default handler
735 impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
736 /// This function checks the data at `op`. `op` is assumed to cover valid memory if it
737 /// is an indirect operand.
738 /// It will error if the bits at the destination do not match the ones described by the layout.
740 /// `ref_tracking_for_consts` can be `None` to avoid recursive checking below references.
741 /// This also toggles between "run-time" (no recursion) and "compile-time" (with recursion)
742 /// validation (e.g., pointer values are fine in integers at runtime) and various other const
743 /// specific validation checks.
744 pub fn validate_operand(
746 op: OpTy<'tcx, M::PointerTag>,
748 ref_tracking_for_consts: Option<
749 &mut RefTracking<MPlaceTy<'tcx, M::PointerTag>, Vec<PathElem>>,
751 ) -> InterpResult<'tcx> {
752 trace!("validate_operand: {:?}, {:?}", *op, op.layout.ty);
754 // Construct a visitor
755 let mut visitor = ValidityVisitor { path, ref_tracking_for_consts, ecx: self };
757 // Try to cast to ptr *once* instead of all the time.
758 let op = self.force_op_ptr(op).unwrap_or(op);
761 visitor.visit_value(op)