1 //! Check the validity invariant of a given value, and tell the user
2 //! where in the value it got violated.
3 //! In const context, this goes even further and tries to approximate const safety.
4 //! That's useful because it means other passes (e.g. promotion) can rely on `const`s
8 use std::ops::RangeInclusive;
11 use rustc::ty::layout::{self, LayoutOf, TyLayout, VariantIdx};
12 use rustc_data_structures::fx::FxHashSet;
14 use rustc_span::symbol::{sym, Symbol};
19 CheckInAllocMsg, GlobalAlloc, InterpCx, InterpResult, MPlaceTy, Machine, MemPlaceMeta, OpTy,
23 macro_rules! throw_validation_failure {
24 ($what:expr, $where:expr, $details:expr) => {{
25 let mut msg = format!("encountered {}", $what);
27 if !where_.is_empty() {
29 write_path(&mut msg, where_);
31 write!(&mut msg, ", but expected {}", $details).unwrap();
32 throw_unsup!(ValidationFailure(msg))
34 ($what:expr, $where:expr) => {{
35 let mut msg = format!("encountered {}", $what);
37 if !where_.is_empty() {
39 write_path(&mut msg, where_);
41 throw_unsup!(ValidationFailure(msg))
45 macro_rules! try_validation {
46 ($e:expr, $what:expr, $where:expr, $details:expr) => {{
49 Err(_) => throw_validation_failure!($what, $where, $details),
53 ($e:expr, $what:expr, $where:expr) => {{
56 Err(_) => throw_validation_failure!($what, $where),
61 /// We want to show a nice path to the invalid field for diagnostics,
62 /// but avoid string operations in the happy case where no error happens.
63 /// So we track a `Vec<PathElem>` where `PathElem` contains all the data we
64 /// need to later print something for the user.
65 #[derive(Copy, Clone, Debug)]
69 GeneratorState(VariantIdx),
79 /// State for tracking recursive validation of references
80 pub struct RefTracking<T, PATH = ()> {
81 pub seen: FxHashSet<T>,
82 pub todo: Vec<(T, PATH)>,
85 impl<T: Copy + Eq + Hash + std::fmt::Debug, PATH: Default> RefTracking<T, PATH> {
86 pub fn empty() -> Self {
87 RefTracking { seen: FxHashSet::default(), todo: vec![] }
89 pub fn new(op: T) -> Self {
90 let mut ref_tracking_for_consts =
91 RefTracking { seen: FxHashSet::default(), todo: vec![(op, PATH::default())] };
92 ref_tracking_for_consts.seen.insert(op);
93 ref_tracking_for_consts
96 pub fn track(&mut self, op: T, path: impl FnOnce() -> PATH) {
97 if self.seen.insert(op) {
98 trace!("Recursing below ptr {:#?}", op);
100 // Remember to come back to this later.
101 self.todo.push((op, path));
107 fn write_path(out: &mut String, path: &Vec<PathElem>) {
108 use self::PathElem::*;
110 for elem in path.iter() {
112 Field(name) => write!(out, ".{}", name),
113 EnumTag => write!(out, ".<enum-tag>"),
114 Variant(name) => write!(out, ".<enum-variant({})>", name),
115 GeneratorTag => write!(out, ".<generator-tag>"),
116 GeneratorState(idx) => write!(out, ".<generator-state({})>", idx.index()),
117 CapturedVar(name) => write!(out, ".<captured-var({})>", name),
118 TupleElem(idx) => write!(out, ".{}", idx),
119 ArrayElem(idx) => write!(out, "[{}]", idx),
120 // `.<deref>` does not match Rust syntax, but it is more readable for long paths -- and
121 // some of the other items here also are not Rust syntax. Actually we can't
122 // even use the usual syntax because we are just showing the projections,
124 Deref => write!(out, ".<deref>"),
125 DynDowncast => write!(out, ".<dyn-downcast>"),
131 // Test if a range that wraps at overflow contains `test`
132 fn wrapping_range_contains(r: &RangeInclusive<u128>, test: u128) -> bool {
133 let (lo, hi) = r.clone().into_inner();
136 (..=hi).contains(&test) || (lo..).contains(&test)
143 // Formats such that a sentence like "expected something {}" to mean
144 // "expected something <in the given range>" makes sense.
145 fn wrapping_range_format(r: &RangeInclusive<u128>, max_hi: u128) -> String {
146 let (lo, hi) = r.clone().into_inner();
147 assert!(hi <= max_hi);
149 format!("less or equal to {}, or greater or equal to {}", hi, lo)
151 format!("equal to {}", lo)
153 assert!(hi < max_hi, "should not be printing if the range covers everything");
154 format!("less or equal to {}", hi)
155 } else if hi == max_hi {
156 assert!(lo > 0, "should not be printing if the range covers everything");
157 format!("greater or equal to {}", lo)
159 format!("in the range {:?}", r)
163 struct ValidityVisitor<'rt, 'mir, 'tcx, M: Machine<'mir, 'tcx>> {
164 /// The `path` may be pushed to, but the part that is present when a function
165 /// starts must not be changed! `visit_fields` and `visit_array` rely on
166 /// this stack discipline.
168 ref_tracking_for_consts:
169 Option<&'rt mut RefTracking<MPlaceTy<'tcx, M::PointerTag>, Vec<PathElem>>>,
170 ecx: &'rt InterpCx<'mir, 'tcx, M>,
173 impl<'rt, 'mir, 'tcx, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, 'tcx, M> {
174 fn aggregate_field_path_elem(&mut self, layout: TyLayout<'tcx>, field: usize) -> PathElem {
175 // First, check if we are projecting to a variant.
176 match layout.variants {
177 layout::Variants::Multiple { discr_index, .. } => {
178 if discr_index == field {
179 return match layout.ty.kind {
180 ty::Adt(def, ..) if def.is_enum() => PathElem::EnumTag,
181 ty::Generator(..) => PathElem::GeneratorTag,
182 _ => bug!("non-variant type {:?}", layout.ty),
186 layout::Variants::Single { .. } => {}
189 // Now we know we are projecting to a field, so figure out which one.
190 match layout.ty.kind {
191 // generators and closures.
192 ty::Closure(def_id, _) | ty::Generator(def_id, _, _) => {
194 if def_id.is_local() {
195 let tables = self.ecx.tcx.typeck_tables_of(def_id);
196 if let Some(upvars) = tables.upvar_list.get(&def_id) {
197 // Sometimes the index is beyond the number of upvars (seen
199 if let Some((&var_hir_id, _)) = upvars.get_index(field) {
200 let node = self.ecx.tcx.hir().get(var_hir_id);
201 if let hir::Node::Binding(pat) = node {
202 if let hir::PatKind::Binding(_, _, ident, _) = pat.kind {
203 name = Some(ident.name);
210 PathElem::CapturedVar(name.unwrap_or_else(|| {
211 // Fall back to showing the field index.
217 ty::Tuple(_) => PathElem::TupleElem(field),
220 ty::Adt(def, ..) if def.is_enum() => {
221 // we might be projecting *to* a variant, or to a field *in* a variant.
222 match layout.variants {
223 layout::Variants::Single { index } => {
225 PathElem::Field(def.variants[index].fields[field].ident.name)
227 layout::Variants::Multiple { .. } => bug!("we handled variants above"),
232 ty::Adt(def, _) => PathElem::Field(def.non_enum_variant().fields[field].ident.name),
235 ty::Array(..) | ty::Slice(..) => PathElem::ArrayElem(field),
238 ty::Dynamic(..) => PathElem::DynDowncast,
240 // nothing else has an aggregate layout
241 _ => bug!("aggregate_field_path_elem: got non-aggregate type {:?}", layout.ty),
247 new_op: OpTy<'tcx, M::PointerTag>,
249 ) -> InterpResult<'tcx> {
250 // Remember the old state
251 let path_len = self.path.len();
253 self.path.push(elem);
254 self.visit_value(new_op)?;
256 self.path.truncate(path_len);
260 fn check_wide_ptr_meta(
262 meta: MemPlaceMeta<M::PointerTag>,
263 pointee: TyLayout<'tcx>,
264 ) -> InterpResult<'tcx> {
265 let tail = self.ecx.tcx.struct_tail_erasing_lifetimes(pointee.ty, self.ecx.param_env);
268 let vtable = meta.unwrap_meta();
270 self.ecx.memory.check_ptr_access(
272 3 * self.ecx.tcx.data_layout.pointer_size, // drop, size, align
273 self.ecx.tcx.data_layout.pointer_align.abi,
275 "dangling or unaligned vtable pointer in wide pointer or too small vtable",
279 self.ecx.read_drop_type_from_vtable(vtable),
280 "invalid drop fn in vtable",
284 self.ecx.read_size_and_align_from_vtable(vtable),
285 "invalid size or align in vtable",
288 // FIXME: More checks for the vtable.
290 ty::Slice(..) | ty::Str => {
291 let _len = try_validation!(
292 meta.unwrap_meta().to_machine_usize(self.ecx),
293 "non-integer slice length in wide pointer",
296 // We do not check that `len * elem_size <= isize::MAX`:
297 // that is only required for references, and there it falls out of the
298 // "dereferenceable" check performed by Stacked Borrows.
301 // Unsized, but not wide.
303 _ => bug!("Unexpected unsized type tail: {:?}", tail),
309 fn visit_primitive(&mut self, value: OpTy<'tcx, M::PointerTag>) -> InterpResult<'tcx> {
310 let value = self.ecx.read_immediate(value)?;
311 // Go over all the primitive types
312 let ty = value.layout.ty;
315 let value = value.to_scalar_or_undef();
316 try_validation!(value.to_bool(), value, self.path, "a boolean");
319 let value = value.to_scalar_or_undef();
320 try_validation!(value.to_char(), value, self.path, "a valid unicode codepoint");
322 ty::Float(_) | ty::Int(_) | ty::Uint(_) => {
323 // NOTE: Keep this in sync with the array optimization for int/float
325 let value = value.to_scalar_or_undef();
326 if self.ref_tracking_for_consts.is_some() {
327 // Integers/floats in CTFE: Must be scalar bits, pointers are dangerous
328 let is_bits = value.not_undef().map_or(false, |v| v.is_bits());
330 throw_validation_failure!(
333 "initialized plain (non-pointer) bytes"
337 // At run-time, for now, we accept *anything* for these types, including
338 // undef. We should fix that, but let's start low.
342 // We are conservative with undef for integers, but try to
343 // actually enforce our current rules for raw pointers.
345 try_validation!(self.ecx.ref_to_mplace(value), "undefined pointer", self.path);
346 if place.layout.is_unsized() {
347 self.check_wide_ptr_meta(place.meta, place.layout)?;
350 _ if ty.is_box() || ty.is_region_ptr() => {
351 // Handle wide pointers.
352 // Check metadata early, for better diagnostics
354 try_validation!(self.ecx.ref_to_mplace(value), "undefined pointer", self.path);
355 if place.layout.is_unsized() {
356 self.check_wide_ptr_meta(place.meta, place.layout)?;
358 // Make sure this is dereferenceable and all.
359 let (size, align) = self
361 .size_and_align_of(place.meta, place.layout)?
362 // for the purpose of validity, consider foreign types to have
363 // alignment and size determined by the layout (size will be 0,
364 // alignment should take attributes into account).
365 .unwrap_or_else(|| (place.layout.size, place.layout.align.abi));
366 let ptr: Option<_> = match self.ecx.memory.check_ptr_access_align(
370 CheckInAllocMsg::InboundsTest,
375 "{:?} did not pass access check for size {:?}, align {:?}",
376 place.ptr, size, align
379 err_unsup!(InvalidNullPointerUsage) => {
380 throw_validation_failure!("a NULL reference", self.path)
382 err_unsup!(AlignmentCheckFailed { required, has }) => {
383 throw_validation_failure!(
385 "an unaligned reference \
386 (required {} byte alignment but found {})",
393 err_unsup!(ReadBytesAsPointer) => throw_validation_failure!(
394 "a dangling reference (created from integer)",
397 _ => throw_validation_failure!(
398 "a dangling reference (not entirely in bounds)",
404 // Recursive checking
405 if let Some(ref mut ref_tracking) = self.ref_tracking_for_consts {
406 if let Some(ptr) = ptr {
408 // Skip validation entirely for some external statics
409 let alloc_kind = self.ecx.tcx.alloc_map.lock().get(ptr.alloc_id);
410 if let Some(GlobalAlloc::Static(did)) = alloc_kind {
411 // `extern static` cannot be validated as they have no body.
412 // FIXME: Statics from other crates are also skipped.
413 // They might be checked at a different type, but for now we
414 // want to avoid recursing too deeply. This is not sound!
415 if !did.is_local() || self.ecx.tcx.is_foreign_item(did) {
420 // Proceed recursively even for ZST, no reason to skip them!
421 // `!` is a ZST and we want to validate it.
422 // Normalize before handing `place` to tracking because that will
423 // check for duplicates.
424 let place = if size.bytes() > 0 {
425 self.ecx.force_mplace_ptr(place).expect("we already bounds-checked")
429 let path = &self.path;
430 ref_tracking.track(place, || {
431 // We need to clone the path anyway, make sure it gets created
432 // with enough space for the additional `Deref`.
433 let mut new_path = Vec::with_capacity(path.len() + 1);
434 new_path.clone_from(path);
435 new_path.push(PathElem::Deref);
441 let value = value.to_scalar_or_undef();
442 let _fn = try_validation!(
443 value.not_undef().and_then(|ptr| self.ecx.memory.get_fn(ptr)),
448 // FIXME: Check if the signature matches
450 // This should be all the (inhabited) primitive types
451 _ => bug!("Unexpected primitive type {}", value.layout.ty),
458 op: OpTy<'tcx, M::PointerTag>,
459 scalar_layout: &layout::Scalar,
460 ) -> InterpResult<'tcx> {
461 let value = self.ecx.read_scalar(op)?;
462 let valid_range = &scalar_layout.valid_range;
463 let (lo, hi) = valid_range.clone().into_inner();
464 // Determine the allowed range
465 // `max_hi` is as big as the size fits
466 let max_hi = u128::max_value() >> (128 - op.layout.size.bits());
467 assert!(hi <= max_hi);
468 // We could also write `(hi + 1) % (max_hi + 1) == lo` but `max_hi + 1` overflows for `u128`
469 if (lo == 0 && hi == max_hi) || (hi + 1 == lo) {
473 // At least one value is excluded. Get the bits.
474 let value = try_validation!(
478 format_args!("something {}", wrapping_range_format(valid_range, max_hi),)
480 let bits = match value.to_bits_or_ptr(op.layout.size, self.ecx) {
482 if lo == 1 && hi == max_hi {
483 // Only NULL is the niche. So make sure the ptr is NOT NULL.
484 if self.ecx.memory.ptr_may_be_null(ptr) {
485 throw_validation_failure!(
486 "a potentially NULL pointer",
489 "something that cannot possibly fail to be {}",
490 wrapping_range_format(valid_range, max_hi)
496 // Conservatively, we reject, because the pointer *could* have a bad
498 throw_validation_failure!(
502 "something that cannot possibly fail to be {}",
503 wrapping_range_format(valid_range, max_hi)
510 // Now compare. This is slightly subtle because this is a special "wrap-around" range.
511 if wrapping_range_contains(&valid_range, bits) {
514 throw_validation_failure!(
517 format_args!("something {}", wrapping_range_format(valid_range, max_hi))
523 impl<'rt, 'mir, 'tcx, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M>
524 for ValidityVisitor<'rt, 'mir, 'tcx, M>
526 type V = OpTy<'tcx, M::PointerTag>;
529 fn ecx(&self) -> &InterpCx<'mir, 'tcx, M> {
536 old_op: OpTy<'tcx, M::PointerTag>,
538 new_op: OpTy<'tcx, M::PointerTag>,
539 ) -> InterpResult<'tcx> {
540 let elem = self.aggregate_field_path_elem(old_op.layout, field);
541 self.visit_elem(new_op, elem)
547 old_op: OpTy<'tcx, M::PointerTag>,
548 variant_id: VariantIdx,
549 new_op: OpTy<'tcx, M::PointerTag>,
550 ) -> InterpResult<'tcx> {
551 let name = match old_op.layout.ty.kind {
552 ty::Adt(adt, _) => PathElem::Variant(adt.variants[variant_id].ident.name),
553 // Generators also have variants
554 ty::Generator(..) => PathElem::GeneratorState(variant_id),
555 _ => bug!("Unexpected type with variant: {:?}", old_op.layout.ty),
557 self.visit_elem(new_op, name)
561 fn visit_union(&mut self, _v: Self::V, fields: usize) -> InterpResult<'tcx> {
562 // Empty unions are not accepted by rustc. That's great, it means we can
563 // use that as a signal for detecting primitives. Make sure
564 // we did not miss any primitive.
570 fn visit_value(&mut self, op: OpTy<'tcx, M::PointerTag>) -> InterpResult<'tcx> {
571 trace!("visit_value: {:?}, {:?}", *op, op.layout);
573 if op.layout.abi.is_uninhabited() {
574 // Uninhabited types do not have sensible layout, stop right here.
575 throw_validation_failure!(
576 format_args!("a value of uninhabited type {:?}", op.layout.ty),
581 // Check primitive types. We do this after checking for uninhabited types,
582 // to exclude fieldless enums (that also appear as fieldless unions here).
583 // Primitives can have varying layout, so we check them separately and before aggregate
585 // It is CRITICAL that we get this check right, or we might be validating the wrong thing!
586 let primitive = match op.layout.fields {
587 // Primitives appear as Union with 0 fields - except for Boxes and fat pointers.
588 // (Fieldless enums also appear here, but they are uninhabited and thus handled above.)
589 layout::FieldPlacement::Union(0) => true,
590 _ => op.layout.ty.builtin_deref(true).is_some(),
593 // No need to recurse further or check scalar layout, this is a leaf type.
594 return self.visit_primitive(op);
597 // Recursively walk the type. Translate some possible errors to something nicer.
598 match self.walk_value(op) {
600 Err(err) => match err.kind {
601 err_ub!(InvalidDiscriminant(val)) => {
602 throw_validation_failure!(val, self.path, "a valid enum discriminant")
604 err_unsup!(ReadPointerAsBytes) => {
605 throw_validation_failure!("a pointer", self.path, "plain (non-pointer) bytes")
607 _ => return Err(err),
611 // *After* all of this, check the ABI. We need to check the ABI to handle
612 // types like `NonNull` where the `Scalar` info is more restrictive than what
613 // the fields say (`rustc_layout_scalar_valid_range_start`).
614 // But in most cases, this will just propagate what the fields say,
615 // and then we want the error to point at the field -- so, first recurse,
618 // FIXME: We could avoid some redundant checks here. For newtypes wrapping
619 // scalars, we do the same check on every "level" (e.g., first we check
620 // MyNewtype and then the scalar in there).
621 match op.layout.abi {
622 layout::Abi::Uninhabited => unreachable!(), // checked above
623 layout::Abi::Scalar(ref scalar_layout) => {
624 self.visit_scalar(op, scalar_layout)?;
626 layout::Abi::ScalarPair { .. } | layout::Abi::Vector { .. } => {
627 // These have fields that we already visited above, so we already checked
628 // all their scalar-level restrictions.
629 // There is also no equivalent to `rustc_layout_scalar_valid_range_start`
630 // that would make skipping them here an issue.
632 layout::Abi::Aggregate { .. } => {
642 op: OpTy<'tcx, M::PointerTag>,
643 fields: impl Iterator<Item = InterpResult<'tcx, Self::V>>,
644 ) -> InterpResult<'tcx> {
645 match op.layout.ty.kind {
647 let mplace = op.assert_mem_place(self.ecx); // strings are never immediate
649 self.ecx.read_str(mplace),
650 "uninitialized or non-UTF-8 data in str",
654 ty::Array(tys, ..) | ty::Slice(tys)
656 // This optimization applies for types that can hold arbitrary bytes (such as
657 // integer and floating point types) or for structs or tuples with no fields.
658 // FIXME(wesleywiser) This logic could be extended further to arbitrary structs
659 // or tuples made up of integer/floating point types or inhabited ZSTs with no
662 ty::Int(..) | ty::Uint(..) | ty::Float(..) => true,
667 // Optimized handling for arrays of integer/float type.
669 // Arrays cannot be immediate, slices are never immediate.
670 let mplace = op.assert_mem_place(self.ecx);
671 // This is the length of the array/slice.
672 let len = mplace.len(self.ecx)?;
673 // Zero length slices have nothing to be checked.
677 // This is the element type size.
678 let layout = self.ecx.layout_of(tys)?;
679 // This is the size in bytes of the whole array.
680 let size = layout.size * len;
681 // Size is not 0, get a pointer.
682 let ptr = self.ecx.force_ptr(mplace.ptr)?;
684 // Optimization: we just check the entire range at once.
685 // NOTE: Keep this in sync with the handling of integer and float
686 // types above, in `visit_primitive`.
687 // In run-time mode, we accept pointers in here. This is actually more
688 // permissive than a per-element check would be, e.g., we accept
689 // an &[u8] that contains a pointer even though bytewise checking would
690 // reject it. However, that's good: We don't inherently want
691 // to reject those pointers, we just do not have the machinery to
692 // talk about parts of a pointer.
693 // We also accept undef, for consistency with the slow path.
694 match self.ecx.memory.get_raw(ptr.alloc_id)?.check_bytes(
698 /*allow_ptr_and_undef*/ self.ref_tracking_for_consts.is_none(),
700 // In the happy case, we needn't check anything else.
702 // Some error happened, try to provide a more detailed description.
704 // For some errors we might be able to provide extra information
706 err_unsup!(ReadUndefBytes(offset)) => {
707 // Some byte was undefined, determine which
708 // element that byte belongs to so we can
710 let i = (offset.bytes() / layout.size.bytes()) as usize;
711 self.path.push(PathElem::ArrayElem(i));
713 throw_validation_failure!("undefined bytes", self.path)
715 // Other errors shouldn't be possible
716 _ => return Err(err),
721 // Fast path for arrays and slices of ZSTs. We only need to check a single ZST element
722 // of an array and not all of them, because there's only a single value of a specific
723 // ZST type, so either validation fails for all elements or none.
724 ty::Array(tys, ..) | ty::Slice(tys) if self.ecx.layout_of(tys)?.is_zst() => {
725 // Validate just the first element
726 self.walk_aggregate(op, fields.take(1))?
729 self.walk_aggregate(op, fields)? // default handler
736 impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
737 /// This function checks the data at `op`. `op` is assumed to cover valid memory if it
738 /// is an indirect operand.
739 /// It will error if the bits at the destination do not match the ones described by the layout.
741 /// `ref_tracking_for_consts` can be `None` to avoid recursive checking below references.
742 /// This also toggles between "run-time" (no recursion) and "compile-time" (with recursion)
743 /// validation (e.g., pointer values are fine in integers at runtime) and various other const
744 /// specific validation checks.
745 pub fn validate_operand(
747 op: OpTy<'tcx, M::PointerTag>,
749 ref_tracking_for_consts: Option<
750 &mut RefTracking<MPlaceTy<'tcx, M::PointerTag>, Vec<PathElem>>,
752 ) -> InterpResult<'tcx> {
753 trace!("validate_operand: {:?}, {:?}", *op, op.layout.ty);
755 // Construct a visitor
756 let mut visitor = ValidityVisitor { path, ref_tracking_for_consts, ecx: self };
758 // Try to cast to ptr *once* instead of all the time.
759 let op = self.force_op_ptr(op).unwrap_or(op);
762 visitor.visit_value(op)