1 //! Check the validity invariant of a given value, and tell the user
2 //! where in the value it got violated.
3 //! In const context, this goes even further and tries to approximate const safety.
4 //! That's useful because it means other passes (e.g. promotion) can rely on `const`s
7 use std::convert::TryFrom;
8 use std::fmt::{Display, Write};
9 use std::num::NonZeroUsize;
11 use either::{Left, Right};
13 use rustc_ast::Mutability;
14 use rustc_data_structures::fx::FxHashSet;
16 use rustc_middle::mir::interpret::InterpError;
18 use rustc_middle::ty::layout::{LayoutOf, TyAndLayout};
19 use rustc_span::symbol::{sym, Symbol};
20 use rustc_target::abi::{Abi, Scalar as ScalarAbi, Size, VariantIdx, Variants, WrappingRange};
24 // for the validation errors
25 use super::UndefinedBehaviorInfo::*;
27 CheckInAllocMsg, GlobalAlloc, ImmTy, Immediate, InterpCx, InterpResult, MPlaceTy, Machine,
28 MemPlaceMeta, OpTy, Scalar, ValueVisitor,
31 macro_rules! throw_validation_failure {
32 ($where:expr, { $( $what_fmt:expr ),+ } $( expected { $( $expected_fmt:expr ),+ } )?) => {{
33 let mut msg = String::new();
34 msg.push_str("encountered ");
35 write!(&mut msg, $($what_fmt),+).unwrap();
37 msg.push_str(", but expected ");
38 write!(&mut msg, $($expected_fmt),+).unwrap();
40 let path = rustc_middle::ty::print::with_no_trimmed_paths!({
42 if !where_.is_empty() {
43 let mut path = String::new();
44 write_path(&mut path, where_);
50 throw_ub!(ValidationFailure { path, msg })
54 /// If $e throws an error matching the pattern, throw a validation failure.
55 /// Other errors are passed back to the caller, unchanged -- and if they reach the root of
56 /// the visitor, we make sure only validation errors and `InvalidProgram` errors are left.
57 /// This lets you use the patterns as a kind of validation list, asserting which errors
58 /// can possibly happen:
60 /// ```ignore(illustrative)
61 /// let v = try_validation!(some_fn(), some_path, {
62 /// Foo | Bar | Baz => { "some failure" },
66 /// The patterns must be of type `UndefinedBehaviorInfo`.
67 /// An additional expected parameter can also be added to the failure message:
69 /// ```ignore(illustrative)
70 /// let v = try_validation!(some_fn(), some_path, {
71 /// Foo | Bar | Baz => { "some failure" } expected { "something that wasn't a failure" },
75 /// An additional nicety is that both parameters actually take format args, so you can just write
76 /// the format string in directly:
78 /// ```ignore(illustrative)
79 /// let v = try_validation!(some_fn(), some_path, {
80 /// Foo | Bar | Baz => { "{:?}", some_failure } expected { "{}", expected_value },
84 macro_rules! try_validation {
85 ($e:expr, $where:expr,
86 $( $( $p:pat_param )|+ => { $( $what_fmt:expr ),+ } $( expected { $( $expected_fmt:expr ),+ } )? ),+ $(,)?
90 // We catch the error and turn it into a validation failure. We are okay with
91 // allocation here as this can only slow down builds that fail anyway.
92 Err(e) => match e.kind() {
94 InterpError::UndefinedBehavior($($p)|+) =>
95 throw_validation_failure!(
97 { $( $what_fmt ),+ } $( expected { $( $expected_fmt ),+ } )?
100 #[allow(unreachable_patterns)]
101 _ => Err::<!, _>(e)?,
107 /// We want to show a nice path to the invalid field for diagnostics,
108 /// but avoid string operations in the happy case where no error happens.
109 /// So we track a `Vec<PathElem>` where `PathElem` contains all the data we
110 /// need to later print something for the user.
111 #[derive(Copy, Clone, Debug)]
115 GeneratorState(VariantIdx),
125 /// Extra things to check for during validation of CTFE results.
126 pub enum CtfeValidationMode {
127 /// Regular validation, nothing special happening.
129 /// Validation of a `const`.
130 /// `inner` says if this is an inner, indirect allocation (as opposed to the top-level const
131 /// allocation). Being an inner allocation makes a difference because the top-level allocation
132 /// of a `const` is copied for each use, but the inner allocations are implicitly shared.
133 /// `allow_static_ptrs` says if pointers to statics are permitted (which is the case for promoteds in statics).
134 Const { inner: bool, allow_static_ptrs: bool },
137 /// State for tracking recursive validation of references
138 pub struct RefTracking<T, PATH = ()> {
139 pub seen: FxHashSet<T>,
140 pub todo: Vec<(T, PATH)>,
143 impl<T: Copy + Eq + Hash + std::fmt::Debug, PATH: Default> RefTracking<T, PATH> {
144 pub fn empty() -> Self {
145 RefTracking { seen: FxHashSet::default(), todo: vec![] }
147 pub fn new(op: T) -> Self {
148 let mut ref_tracking_for_consts =
149 RefTracking { seen: FxHashSet::default(), todo: vec![(op, PATH::default())] };
150 ref_tracking_for_consts.seen.insert(op);
151 ref_tracking_for_consts
154 pub fn track(&mut self, op: T, path: impl FnOnce() -> PATH) {
155 if self.seen.insert(op) {
156 trace!("Recursing below ptr {:#?}", op);
158 // Remember to come back to this later.
159 self.todo.push((op, path));
165 fn write_path(out: &mut String, path: &[PathElem]) {
166 use self::PathElem::*;
168 for elem in path.iter() {
170 Field(name) => write!(out, ".{}", name),
171 EnumTag => write!(out, ".<enum-tag>"),
172 Variant(name) => write!(out, ".<enum-variant({})>", name),
173 GeneratorTag => write!(out, ".<generator-tag>"),
174 GeneratorState(idx) => write!(out, ".<generator-state({})>", idx.index()),
175 CapturedVar(name) => write!(out, ".<captured-var({})>", name),
176 TupleElem(idx) => write!(out, ".{}", idx),
177 ArrayElem(idx) => write!(out, "[{}]", idx),
178 // `.<deref>` does not match Rust syntax, but it is more readable for long paths -- and
179 // some of the other items here also are not Rust syntax. Actually we can't
180 // even use the usual syntax because we are just showing the projections,
182 Deref => write!(out, ".<deref>"),
183 DynDowncast => write!(out, ".<dyn-downcast>"),
189 // Formats such that a sentence like "expected something {}" to mean
190 // "expected something <in the given range>" makes sense.
191 fn wrapping_range_format(r: WrappingRange, max_hi: u128) -> String {
192 let WrappingRange { start: lo, end: hi } = r;
193 assert!(hi <= max_hi);
195 format!("less or equal to {}, or greater or equal to {}", hi, lo)
197 format!("equal to {}", lo)
199 assert!(hi < max_hi, "should not be printing if the range covers everything");
200 format!("less or equal to {}", hi)
201 } else if hi == max_hi {
202 assert!(lo > 0, "should not be printing if the range covers everything");
203 format!("greater or equal to {}", lo)
205 format!("in the range {:?}", r)
209 struct ValidityVisitor<'rt, 'mir, 'tcx, M: Machine<'mir, 'tcx>> {
210 /// The `path` may be pushed to, but the part that is present when a function
211 /// starts must not be changed! `visit_fields` and `visit_array` rely on
212 /// this stack discipline.
214 ref_tracking: Option<&'rt mut RefTracking<MPlaceTy<'tcx, M::Provenance>, Vec<PathElem>>>,
215 /// `None` indicates this is not validating for CTFE (but for runtime).
216 ctfe_mode: Option<CtfeValidationMode>,
217 ecx: &'rt InterpCx<'mir, 'tcx, M>,
220 impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, 'tcx, M> {
221 fn aggregate_field_path_elem(&mut self, layout: TyAndLayout<'tcx>, field: usize) -> PathElem {
222 // First, check if we are projecting to a variant.
223 match layout.variants {
224 Variants::Multiple { tag_field, .. } => {
225 if tag_field == field {
226 return match layout.ty.kind() {
227 ty::Adt(def, ..) if def.is_enum() => PathElem::EnumTag,
228 ty::Generator(..) => PathElem::GeneratorTag,
229 _ => bug!("non-variant type {:?}", layout.ty),
233 Variants::Single { .. } => {}
236 // Now we know we are projecting to a field, so figure out which one.
237 match layout.ty.kind() {
238 // generators and closures.
239 ty::Closure(def_id, _) | ty::Generator(def_id, _, _) => {
241 // FIXME this should be more descriptive i.e. CapturePlace instead of CapturedVar
242 // https://github.com/rust-lang/project-rfc-2229/issues/46
243 if let Some(local_def_id) = def_id.as_local() {
244 let tables = self.ecx.tcx.typeck(local_def_id);
245 if let Some(captured_place) =
246 tables.closure_min_captures_flattened(local_def_id).nth(field)
248 // Sometimes the index is beyond the number of upvars (seen
250 let var_hir_id = captured_place.get_root_variable();
251 let node = self.ecx.tcx.hir().get(var_hir_id);
252 if let hir::Node::Pat(pat) = node {
253 if let hir::PatKind::Binding(_, _, ident, _) = pat.kind {
254 name = Some(ident.name);
260 PathElem::CapturedVar(name.unwrap_or_else(|| {
261 // Fall back to showing the field index.
267 ty::Tuple(_) => PathElem::TupleElem(field),
270 ty::Adt(def, ..) if def.is_enum() => {
271 // we might be projecting *to* a variant, or to a field *in* a variant.
272 match layout.variants {
273 Variants::Single { index } => {
275 PathElem::Field(def.variant(index).fields[field].name)
277 Variants::Multiple { .. } => bug!("we handled variants above"),
282 ty::Adt(def, _) => PathElem::Field(def.non_enum_variant().fields[field].name),
285 ty::Array(..) | ty::Slice(..) => PathElem::ArrayElem(field),
288 ty::Dynamic(..) => PathElem::DynDowncast,
290 // nothing else has an aggregate layout
291 _ => bug!("aggregate_field_path_elem: got non-aggregate type {:?}", layout.ty),
298 f: impl FnOnce(&mut Self) -> InterpResult<'tcx, R>,
299 ) -> InterpResult<'tcx, R> {
300 // Remember the old state
301 let path_len = self.path.len();
302 // Record new element
303 self.path.push(elem);
307 self.path.truncate(path_len);
314 op: &OpTy<'tcx, M::Provenance>,
315 expected: impl Display,
316 ) -> InterpResult<'tcx, ImmTy<'tcx, M::Provenance>> {
318 self.ecx.read_immediate(op),
320 InvalidUninitBytes(None) => { "uninitialized memory" } expected { "{expected}" }
326 op: &OpTy<'tcx, M::Provenance>,
327 expected: impl Display,
328 ) -> InterpResult<'tcx, Scalar<M::Provenance>> {
329 Ok(self.read_immediate(op, expected)?.to_scalar())
332 fn check_wide_ptr_meta(
334 meta: MemPlaceMeta<M::Provenance>,
335 pointee: TyAndLayout<'tcx>,
336 ) -> InterpResult<'tcx> {
337 let tail = self.ecx.tcx.struct_tail_erasing_lifetimes(pointee.ty, self.ecx.param_env);
340 let vtable = meta.unwrap_meta().to_pointer(self.ecx)?;
341 // Make sure it is a genuine vtable pointer.
342 let (_ty, _trait) = try_validation!(
343 self.ecx.get_ptr_vtable(vtable),
345 DanglingIntPointer(..) |
346 InvalidVTablePointer(..) =>
347 { "{vtable}" } expected { "a vtable pointer" },
349 // FIXME: check if the type/trait match what ty::Dynamic says?
351 ty::Slice(..) | ty::Str => {
352 let _len = meta.unwrap_meta().to_machine_usize(self.ecx)?;
353 // We do not check that `len * elem_size <= isize::MAX`:
354 // that is only required for references, and there it falls out of the
355 // "dereferenceable" check performed by Stacked Borrows.
358 // Unsized, but not wide.
360 _ => bug!("Unexpected unsized type tail: {:?}", tail),
366 /// Check a reference or `Box`.
367 fn check_safe_pointer(
369 value: &OpTy<'tcx, M::Provenance>,
371 ) -> InterpResult<'tcx> {
373 self.ecx.ref_to_mplace(&self.read_immediate(value, format_args!("a {kind}"))?)?;
374 // Handle wide pointers.
375 // Check metadata early, for better diagnostics
376 if place.layout.is_unsized() {
377 self.check_wide_ptr_meta(place.meta, place.layout)?;
379 // Make sure this is dereferenceable and all.
380 let size_and_align = try_validation!(
381 self.ecx.size_and_align_of_mplace(&place),
383 InvalidMeta(msg) => { "invalid {} metadata: {}", kind, msg },
385 let (size, align) = size_and_align
386 // for the purpose of validity, consider foreign types to have
387 // alignment and size determined by the layout (size will be 0,
388 // alignment should take attributes into account).
389 .unwrap_or_else(|| (place.layout.size, place.layout.align.abi));
390 // Direct call to `check_ptr_access_align` checks alignment even on CTFE machines.
392 self.ecx.check_ptr_access_align(
396 CheckInAllocMsg::InboundsTest, // will anyway be replaced by validity message
399 AlignmentCheckFailed { required, has } =>
401 "an unaligned {kind} (required {} byte alignment but found {})",
405 DanglingIntPointer(0, _) =>
407 DanglingIntPointer(i, _) =>
408 { "a dangling {kind} (address {i:#x} is unallocated)" },
409 PointerOutOfBounds { .. } =>
410 { "a dangling {kind} (going beyond the bounds of its allocation)" },
411 // This cannot happen during const-eval (because interning already detects
412 // dangling pointers), but it can happen in Miri.
413 PointerUseAfterFree(..) =>
414 { "a dangling {kind} (use-after-free)" },
416 // Do not allow pointers to uninhabited types.
417 if place.layout.abi.is_uninhabited() {
418 throw_validation_failure!(self.path,
419 { "a {kind} pointing to uninhabited type {}", place.layout.ty }
422 // Recursive checking
423 if let Some(ref mut ref_tracking) = self.ref_tracking {
424 // Proceed recursively even for ZST, no reason to skip them!
425 // `!` is a ZST and we want to validate it.
426 if let Ok((alloc_id, _offset, _prov)) = self.ecx.ptr_try_get_alloc_id(place.ptr) {
427 // Let's see what kind of memory this points to.
428 let alloc_kind = self.ecx.tcx.try_get_global_alloc(alloc_id);
430 Some(GlobalAlloc::Static(did)) => {
431 // Special handling for pointers to statics (irrespective of their type).
432 assert!(!self.ecx.tcx.is_thread_local_static(did));
433 assert!(self.ecx.tcx.is_static(did));
436 Some(CtfeValidationMode::Const { allow_static_ptrs: false, .. })
438 // See const_eval::machine::MemoryExtra::can_access_statics for why
439 // this check is so important.
440 // This check is reachable when the const just referenced the static,
441 // but never read it (so we never entered `before_access_global`).
442 throw_validation_failure!(self.path,
443 { "a {} pointing to a static variable in a constant", kind }
446 // We skip recursively checking other statics. These statics must be sound by
447 // themselves, and the only way to get broken statics here is by using
449 // The reasons we don't check other statics is twofold. For one, in all
450 // sound cases, the static was already validated on its own, and second, we
451 // trigger cycle errors if we try to compute the value of the other static
452 // and that static refers back to us.
453 // We might miss const-invalid data,
454 // but things are still sound otherwise (in particular re: consts
455 // referring to statics).
458 Some(GlobalAlloc::Memory(alloc)) => {
459 if alloc.inner().mutability == Mutability::Mut
460 && matches!(self.ctfe_mode, Some(CtfeValidationMode::Const { .. }))
462 // This should be unreachable, but if someone manages to copy a pointer
463 // out of a `static`, then that pointer might point to mutable memory,
464 // and we would catch that here.
465 throw_validation_failure!(self.path,
466 { "a {} pointing to mutable memory in a constant", kind }
470 // Nothing to check for these.
471 None | Some(GlobalAlloc::Function(..) | GlobalAlloc::VTable(..)) => {}
474 let path = &self.path;
475 ref_tracking.track(place, || {
476 // We need to clone the path anyway, make sure it gets created
477 // with enough space for the additional `Deref`.
478 let mut new_path = Vec::with_capacity(path.len() + 1);
479 new_path.extend(path);
480 new_path.push(PathElem::Deref);
487 /// Check if this is a value of primitive type, and if yes check the validity of the value
488 /// at that type. Return `true` if the type is indeed primitive.
489 fn try_visit_primitive(
491 value: &OpTy<'tcx, M::Provenance>,
492 ) -> InterpResult<'tcx, bool> {
493 // Go over all the primitive types
494 let ty = value.layout.ty;
497 let value = self.read_scalar(value, "a boolean")?;
502 { "{:x}", value } expected { "a boolean" },
507 let value = self.read_scalar(value, "a unicode scalar value")?;
512 { "{:x}", value } expected { "a valid unicode scalar value (in `0..=0x10FFFF` but not in `0xD800..=0xDFFF`)" },
516 ty::Float(_) | ty::Int(_) | ty::Uint(_) => {
517 // NOTE: Keep this in sync with the array optimization for int/float
519 let value = self.read_scalar(
521 if matches!(ty.kind(), ty::Float(..)) {
522 "a floating point number"
527 // As a special exception we *do* match on a `Scalar` here, since we truly want
528 // to know its underlying representation (and *not* cast it to an integer).
529 if matches!(value, Scalar::Ptr(..)) {
530 throw_validation_failure!(self.path,
531 { "{:x}", value } expected { "plain (non-pointer) bytes" }
537 // We are conservative with uninit for integers, but try to
538 // actually enforce the strict rules for raw pointers (mostly because
539 // that lets us re-use `ref_to_mplace`).
541 self.ecx.ref_to_mplace(&self.read_immediate(value, "a raw pointer")?)?;
542 if place.layout.is_unsized() {
543 self.check_wide_ptr_meta(place.meta, place.layout)?;
547 ty::Ref(_, ty, mutbl) => {
548 if matches!(self.ctfe_mode, Some(CtfeValidationMode::Const { .. }))
549 && *mutbl == Mutability::Mut
551 // A mutable reference inside a const? That does not seem right (except if it is
553 let layout = self.ecx.layout_of(*ty)?;
554 if !layout.is_zst() {
555 throw_validation_failure!(self.path, { "mutable reference in a `const`" });
558 self.check_safe_pointer(value, "reference")?;
562 let value = self.read_scalar(value, "a function pointer")?;
564 // If we check references recursively, also check that this points to a function.
565 if let Some(_) = self.ref_tracking {
566 let ptr = value.to_pointer(self.ecx)?;
567 let _fn = try_validation!(
568 self.ecx.get_ptr_fn(ptr),
570 DanglingIntPointer(..) |
571 InvalidFunctionPointer(..) =>
572 { "{ptr}" } expected { "a function pointer" },
574 // FIXME: Check if the signature matches
576 // Otherwise (for standalone Miri), we have to still check it to be non-null.
577 if self.ecx.scalar_may_be_null(value)? {
578 throw_validation_failure!(self.path, { "a null function pointer" });
583 ty::Never => throw_validation_failure!(self.path, { "a value of the never type `!`" }),
584 ty::Foreign(..) | ty::FnDef(..) => {
588 // The above should be all the primitive types. The rest is compound, we
589 // check them by visiting their fields/variants.
597 | ty::Generator(..) => Ok(false),
598 // Some types only occur during typechecking, they have no layout.
599 // We should not see them here and we could not check them anyway.
602 | ty::Placeholder(..)
607 | ty::GeneratorWitness(..) => bug!("Encountered invalid type {:?}", ty),
613 scalar: Scalar<M::Provenance>,
614 scalar_layout: ScalarAbi,
615 ) -> InterpResult<'tcx> {
616 let size = scalar_layout.size(self.ecx);
617 let valid_range = scalar_layout.valid_range(self.ecx);
618 let WrappingRange { start, end } = valid_range;
619 let max_value = size.unsigned_int_max();
620 assert!(end <= max_value);
621 let bits = match scalar.try_to_int() {
622 Ok(int) => int.assert_bits(size),
624 // So this is a pointer then, and casting to an int failed.
625 // Can only happen during CTFE.
626 // We support 2 kinds of ranges here: full range, and excluding zero.
627 if start == 1 && end == max_value {
628 // Only null is the niche. So make sure the ptr is NOT null.
629 if self.ecx.scalar_may_be_null(scalar)? {
630 throw_validation_failure!(self.path,
631 { "a potentially null pointer" }
633 "something that cannot possibly fail to be {}",
634 wrapping_range_format(valid_range, max_value)
640 } else if scalar_layout.is_always_valid(self.ecx) {
641 // Easy. (This is reachable if `enforce_number_validity` is set.)
644 // Conservatively, we reject, because the pointer *could* have a bad
646 throw_validation_failure!(self.path,
649 "something that cannot possibly fail to be {}",
650 wrapping_range_format(valid_range, max_value)
657 if valid_range.contains(bits) {
660 throw_validation_failure!(self.path,
662 expected { "something {}", wrapping_range_format(valid_range, max_value) }
668 impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M>
669 for ValidityVisitor<'rt, 'mir, 'tcx, M>
671 type V = OpTy<'tcx, M::Provenance>;
674 fn ecx(&self) -> &InterpCx<'mir, 'tcx, M> {
678 fn read_discriminant(
680 op: &OpTy<'tcx, M::Provenance>,
681 ) -> InterpResult<'tcx, VariantIdx> {
682 self.with_elem(PathElem::EnumTag, move |this| {
684 this.ecx.read_discriminant(op),
687 { "{:x}", val } expected { "a valid enum tag" },
688 InvalidUninitBytes(None) =>
689 { "uninitialized bytes" } expected { "a valid enum tag" },
698 old_op: &OpTy<'tcx, M::Provenance>,
700 new_op: &OpTy<'tcx, M::Provenance>,
701 ) -> InterpResult<'tcx> {
702 let elem = self.aggregate_field_path_elem(old_op.layout, field);
703 self.with_elem(elem, move |this| this.visit_value(new_op))
709 old_op: &OpTy<'tcx, M::Provenance>,
710 variant_id: VariantIdx,
711 new_op: &OpTy<'tcx, M::Provenance>,
712 ) -> InterpResult<'tcx> {
713 let name = match old_op.layout.ty.kind() {
714 ty::Adt(adt, _) => PathElem::Variant(adt.variant(variant_id).name),
715 // Generators also have variants
716 ty::Generator(..) => PathElem::GeneratorState(variant_id),
717 _ => bug!("Unexpected type with variant: {:?}", old_op.layout.ty),
719 self.with_elem(name, move |this| this.visit_value(new_op))
725 op: &OpTy<'tcx, M::Provenance>,
726 _fields: NonZeroUsize,
727 ) -> InterpResult<'tcx> {
728 // Special check preventing `UnsafeCell` inside unions in the inner part of constants.
729 if matches!(self.ctfe_mode, Some(CtfeValidationMode::Const { inner: true, .. })) {
730 if !op.layout.ty.is_freeze(*self.ecx.tcx, self.ecx.param_env) {
731 throw_validation_failure!(self.path, { "`UnsafeCell` in a `const`" });
738 fn visit_box(&mut self, op: &OpTy<'tcx, M::Provenance>) -> InterpResult<'tcx> {
739 self.check_safe_pointer(op, "box")?;
744 fn visit_value(&mut self, op: &OpTy<'tcx, M::Provenance>) -> InterpResult<'tcx> {
745 trace!("visit_value: {:?}, {:?}", *op, op.layout);
747 // Check primitive types -- the leaves of our recursive descent.
748 if self.try_visit_primitive(op)? {
752 // Special check preventing `UnsafeCell` in the inner part of constants
753 if let Some(def) = op.layout.ty.ty_adt_def() {
754 if matches!(self.ctfe_mode, Some(CtfeValidationMode::Const { inner: true, .. }))
755 && def.is_unsafe_cell()
757 throw_validation_failure!(self.path, { "`UnsafeCell` in a `const`" });
761 // Recursively walk the value at its type.
762 self.walk_value(op)?;
764 // *After* all of this, check the ABI. We need to check the ABI to handle
765 // types like `NonNull` where the `Scalar` info is more restrictive than what
766 // the fields say (`rustc_layout_scalar_valid_range_start`).
767 // But in most cases, this will just propagate what the fields say,
768 // and then we want the error to point at the field -- so, first recurse,
771 // FIXME: We could avoid some redundant checks here. For newtypes wrapping
772 // scalars, we do the same check on every "level" (e.g., first we check
773 // MyNewtype and then the scalar in there).
774 match op.layout.abi {
775 Abi::Uninhabited => {
776 throw_validation_failure!(self.path,
777 { "a value of uninhabited type {:?}", op.layout.ty }
780 Abi::Scalar(scalar_layout) => {
781 if !scalar_layout.is_uninit_valid() {
782 // There is something to check here.
783 let scalar = self.read_scalar(op, "initiailized scalar value")?;
784 self.visit_scalar(scalar, scalar_layout)?;
787 Abi::ScalarPair(a_layout, b_layout) => {
788 // There is no `rustc_layout_scalar_valid_range_start` for pairs, so
789 // we would validate these things as we descend into the fields,
790 // but that can miss bugs in layout computation. Layout computation
791 // is subtle due to enums having ScalarPair layout, where one field
792 // is the discriminant.
793 if cfg!(debug_assertions)
794 && !a_layout.is_uninit_valid()
795 && !b_layout.is_uninit_valid()
797 // We can only proceed if *both* scalars need to be initialized.
798 // FIXME: find a way to also check ScalarPair when one side can be uninit but
799 // the other must be init.
801 self.read_immediate(op, "initiailized scalar value")?.to_scalar_pair();
802 self.visit_scalar(a, a_layout)?;
803 self.visit_scalar(b, b_layout)?;
806 Abi::Vector { .. } => {
807 // No checks here, we assume layout computation gets this right.
808 // (This is harder to check since Miri does not represent these as `Immediate`. We
809 // also cannot use field projections since this might be a newtype around a vector.)
811 Abi::Aggregate { .. } => {
821 op: &OpTy<'tcx, M::Provenance>,
822 fields: impl Iterator<Item = InterpResult<'tcx, Self::V>>,
823 ) -> InterpResult<'tcx> {
824 match op.layout.ty.kind() {
826 let mplace = op.assert_mem_place(); // strings are unsized and hence never immediate
827 let len = mplace.len(self.ecx)?;
829 self.ecx.read_bytes_ptr_strip_provenance(mplace.ptr, Size::from_bytes(len)),
831 InvalidUninitBytes(..) => { "uninitialized data in `str`" },
834 ty::Array(tys, ..) | ty::Slice(tys)
835 // This optimization applies for types that can hold arbitrary bytes (such as
836 // integer and floating point types) or for structs or tuples with no fields.
837 // FIXME(wesleywiser) This logic could be extended further to arbitrary structs
838 // or tuples made up of integer/floating point types or inhabited ZSTs with no
840 if matches!(tys.kind(), ty::Int(..) | ty::Uint(..) | ty::Float(..))
843 // Optimized handling for arrays of integer/float type.
845 // This is the length of the array/slice.
846 let len = op.len(self.ecx)?;
847 // This is the element type size.
848 let layout = self.ecx.layout_of(*tys)?;
849 // This is the size in bytes of the whole array. (This checks for overflow.)
850 let size = layout.size * len;
851 // If the size is 0, there is nothing to check.
852 // (`size` can only be 0 of `len` is 0, and empty arrays are always valid.)
853 if size == Size::ZERO {
856 // Now that we definitely have a non-ZST array, we know it lives in memory.
857 let mplace = match op.as_mplace_or_imm() {
858 Left(mplace) => mplace,
859 Right(imm) => match *imm {
861 throw_validation_failure!(self.path, { "uninitialized bytes" }),
862 Immediate::Scalar(..) | Immediate::ScalarPair(..) =>
863 bug!("arrays/slices can never have Scalar/ScalarPair layout"),
867 // Optimization: we just check the entire range at once.
868 // NOTE: Keep this in sync with the handling of integer and float
869 // types above, in `visit_primitive`.
870 // In run-time mode, we accept pointers in here. This is actually more
871 // permissive than a per-element check would be, e.g., we accept
872 // a &[u8] that contains a pointer even though bytewise checking would
873 // reject it. However, that's good: We don't inherently want
874 // to reject those pointers, we just do not have the machinery to
875 // talk about parts of a pointer.
876 // We also accept uninit, for consistency with the slow path.
877 let alloc = self.ecx.get_ptr_alloc(mplace.ptr, size, mplace.align)?.expect("we already excluded size 0");
879 match alloc.get_bytes_strip_provenance() {
880 // In the happy case, we needn't check anything else.
882 // Some error happened, try to provide a more detailed description.
884 // For some errors we might be able to provide extra information.
885 // (This custom logic does not fit the `try_validation!` macro.)
887 err_ub!(InvalidUninitBytes(Some((_alloc_id, access)))) => {
888 // Some byte was uninitialized, determine which
889 // element that byte belongs to so we can
891 let i = usize::try_from(
892 access.uninit.start.bytes() / layout.size.bytes(),
895 self.path.push(PathElem::ArrayElem(i));
897 throw_validation_failure!(self.path, { "uninitialized bytes" })
900 // Propagate upwards (that will also check for unexpected errors).
901 _ => return Err(err),
906 // Fast path for arrays and slices of ZSTs. We only need to check a single ZST element
907 // of an array and not all of them, because there's only a single value of a specific
908 // ZST type, so either validation fails for all elements or none.
909 ty::Array(tys, ..) | ty::Slice(tys) if self.ecx.layout_of(*tys)?.is_zst() => {
910 // Validate just the first element (if any).
911 self.walk_aggregate(op, fields.take(1))?
914 self.walk_aggregate(op, fields)? // default handler
921 impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
922 fn validate_operand_internal(
924 op: &OpTy<'tcx, M::Provenance>,
926 ref_tracking: Option<&mut RefTracking<MPlaceTy<'tcx, M::Provenance>, Vec<PathElem>>>,
927 ctfe_mode: Option<CtfeValidationMode>,
928 ) -> InterpResult<'tcx> {
929 trace!("validate_operand_internal: {:?}, {:?}", *op, op.layout.ty);
931 // Construct a visitor
932 let mut visitor = ValidityVisitor { path, ref_tracking, ctfe_mode, ecx: self };
935 match visitor.visit_value(&op) {
937 // Pass through validation failures.
938 Err(err) if matches!(err.kind(), err_ub!(ValidationFailure { .. })) => Err(err),
939 // Complain about any other kind of UB error -- those are bad because we'd like to
940 // report them in a way that shows *where* in the value the issue lies.
941 Err(err) if matches!(err.kind(), InterpError::UndefinedBehavior(_)) => {
942 err.print_backtrace();
943 bug!("Unexpected Undefined Behavior error during validation: {}", err);
945 // Pass through everything else.
946 Err(err) => Err(err),
950 /// This function checks the data at `op` to be const-valid.
951 /// `op` is assumed to cover valid memory if it is an indirect operand.
952 /// It will error if the bits at the destination do not match the ones described by the layout.
954 /// `ref_tracking` is used to record references that we encounter so that they
955 /// can be checked recursively by an outside driving loop.
957 /// `constant` controls whether this must satisfy the rules for constants:
958 /// - no pointers to statics.
959 /// - no `UnsafeCell` or non-ZST `&mut`.
961 pub fn const_validate_operand(
963 op: &OpTy<'tcx, M::Provenance>,
965 ref_tracking: &mut RefTracking<MPlaceTy<'tcx, M::Provenance>, Vec<PathElem>>,
966 ctfe_mode: CtfeValidationMode,
967 ) -> InterpResult<'tcx> {
968 self.validate_operand_internal(op, path, Some(ref_tracking), Some(ctfe_mode))
971 /// This function checks the data at `op` to be runtime-valid.
972 /// `op` is assumed to cover valid memory if it is an indirect operand.
973 /// It will error if the bits at the destination do not match the ones described by the layout.
975 pub fn validate_operand(&self, op: &OpTy<'tcx, M::Provenance>) -> InterpResult<'tcx> {
976 // Note that we *could* actually be in CTFE here with `-Zextra-const-ub-checks`, but it's
977 // still correct to not use `ctfe_mode`: that mode is for validation of the final constant
978 // value, it rules out things like `UnsafeCell` in awkward places. It also can make checking
979 // recurse through references which, for now, we don't want here, either.
980 self.validate_operand_internal(op, vec![], None, None)