1 //! Check the validity invariant of a given value, and tell the user
2 //! where in the value it got violated.
3 //! In const context, this goes even further and tries to approximate const safety.
4 //! That's useful because it means other passes (e.g. promotion) can rely on `const`s
7 use std::convert::TryFrom;
9 use std::num::NonZeroUsize;
11 use rustc_data_structures::fx::FxHashSet;
13 use rustc_middle::mir::interpret::InterpError;
15 use rustc_middle::ty::layout::{LayoutOf, TyAndLayout};
16 use rustc_span::symbol::{sym, Symbol};
17 use rustc_span::DUMMY_SP;
18 use rustc_target::abi::{Abi, Scalar as ScalarAbi, Size, VariantIdx, Variants, WrappingRange};
23 alloc_range, CheckInAllocMsg, GlobalAlloc, Immediate, InterpCx, InterpResult, MPlaceTy,
24 Machine, MemPlaceMeta, OpTy, Scalar, ScalarMaybeUninit, ValueVisitor,
27 macro_rules! throw_validation_failure {
28 ($where:expr, { $( $what_fmt:expr ),+ } $( expected { $( $expected_fmt:expr ),+ } )?) => {{
29 let mut msg = String::new();
30 msg.push_str("encountered ");
31 write!(&mut msg, $($what_fmt),+).unwrap();
33 msg.push_str(", but expected ");
34 write!(&mut msg, $($expected_fmt),+).unwrap();
36 let path = rustc_middle::ty::print::with_no_trimmed_paths!({
38 if !where_.is_empty() {
39 let mut path = String::new();
40 write_path(&mut path, where_);
46 throw_ub!(ValidationFailure { path, msg })
50 /// If $e throws an error matching the pattern, throw a validation failure.
51 /// Other errors are passed back to the caller, unchanged -- and if they reach the root of
52 /// the visitor, we make sure only validation errors and `InvalidProgram` errors are left.
53 /// This lets you use the patterns as a kind of validation list, asserting which errors
54 /// can possibly happen:
57 /// let v = try_validation!(some_fn(), some_path, {
58 /// Foo | Bar | Baz => { "some failure" },
62 /// An additional expected parameter can also be added to the failure message:
65 /// let v = try_validation!(some_fn(), some_path, {
66 /// Foo | Bar | Baz => { "some failure" } expected { "something that wasn't a failure" },
70 /// An additional nicety is that both parameters actually take format args, so you can just write
71 /// the format string in directly:
74 /// let v = try_validation!(some_fn(), some_path, {
75 /// Foo | Bar | Baz => { "{:?}", some_failure } expected { "{}", expected_value },
79 macro_rules! try_validation {
80 ($e:expr, $where:expr,
81 $( $( $p:pat_param )|+ => { $( $what_fmt:expr ),+ } $( expected { $( $expected_fmt:expr ),+ } )? ),+ $(,)?
85 // We catch the error and turn it into a validation failure. We are okay with
86 // allocation here as this can only slow down builds that fail anyway.
87 Err(e) => match e.kind() {
90 throw_validation_failure!(
92 { $( $what_fmt ),+ } $( expected { $( $expected_fmt ),+ } )?
95 #[allow(unreachable_patterns)]
102 /// We want to show a nice path to the invalid field for diagnostics,
103 /// but avoid string operations in the happy case where no error happens.
104 /// So we track a `Vec<PathElem>` where `PathElem` contains all the data we
105 /// need to later print something for the user.
106 #[derive(Copy, Clone, Debug)]
110 GeneratorState(VariantIdx),
120 /// Extra things to check for during validation of CTFE results.
121 pub enum CtfeValidationMode {
122 /// Regular validation, nothing special happening.
124 /// Validation of a `const`.
125 /// `inner` says if this is an inner, indirect allocation (as opposed to the top-level const
126 /// allocation). Being an inner allocation makes a difference because the top-level allocation
127 /// of a `const` is copied for each use, but the inner allocations are implicitly shared.
128 /// `allow_static_ptrs` says if pointers to statics are permitted (which is the case for promoteds in statics).
129 Const { inner: bool, allow_static_ptrs: bool },
132 /// State for tracking recursive validation of references
133 pub struct RefTracking<T, PATH = ()> {
134 pub seen: FxHashSet<T>,
135 pub todo: Vec<(T, PATH)>,
138 impl<T: Copy + Eq + Hash + std::fmt::Debug, PATH: Default> RefTracking<T, PATH> {
139 pub fn empty() -> Self {
140 RefTracking { seen: FxHashSet::default(), todo: vec![] }
142 pub fn new(op: T) -> Self {
143 let mut ref_tracking_for_consts =
144 RefTracking { seen: FxHashSet::default(), todo: vec![(op, PATH::default())] };
145 ref_tracking_for_consts.seen.insert(op);
146 ref_tracking_for_consts
149 pub fn track(&mut self, op: T, path: impl FnOnce() -> PATH) {
150 if self.seen.insert(op) {
151 trace!("Recursing below ptr {:#?}", op);
153 // Remember to come back to this later.
154 self.todo.push((op, path));
160 fn write_path(out: &mut String, path: &[PathElem]) {
161 use self::PathElem::*;
163 for elem in path.iter() {
165 Field(name) => write!(out, ".{}", name),
166 EnumTag => write!(out, ".<enum-tag>"),
167 Variant(name) => write!(out, ".<enum-variant({})>", name),
168 GeneratorTag => write!(out, ".<generator-tag>"),
169 GeneratorState(idx) => write!(out, ".<generator-state({})>", idx.index()),
170 CapturedVar(name) => write!(out, ".<captured-var({})>", name),
171 TupleElem(idx) => write!(out, ".{}", idx),
172 ArrayElem(idx) => write!(out, "[{}]", idx),
173 // `.<deref>` does not match Rust syntax, but it is more readable for long paths -- and
174 // some of the other items here also are not Rust syntax. Actually we can't
175 // even use the usual syntax because we are just showing the projections,
177 Deref => write!(out, ".<deref>"),
178 DynDowncast => write!(out, ".<dyn-downcast>"),
184 // Formats such that a sentence like "expected something {}" to mean
185 // "expected something <in the given range>" makes sense.
186 fn wrapping_range_format(r: WrappingRange, max_hi: u128) -> String {
187 let WrappingRange { start: lo, end: hi } = r;
188 assert!(hi <= max_hi);
190 format!("less or equal to {}, or greater or equal to {}", hi, lo)
192 format!("equal to {}", lo)
194 assert!(hi < max_hi, "should not be printing if the range covers everything");
195 format!("less or equal to {}", hi)
196 } else if hi == max_hi {
197 assert!(lo > 0, "should not be printing if the range covers everything");
198 format!("greater or equal to {}", lo)
200 format!("in the range {:?}", r)
204 struct ValidityVisitor<'rt, 'mir, 'tcx, M: Machine<'mir, 'tcx>> {
205 /// The `path` may be pushed to, but the part that is present when a function
206 /// starts must not be changed! `visit_fields` and `visit_array` rely on
207 /// this stack discipline.
209 ref_tracking: Option<&'rt mut RefTracking<MPlaceTy<'tcx, M::PointerTag>, Vec<PathElem>>>,
210 /// `None` indicates this is not validating for CTFE (but for runtime).
211 ctfe_mode: Option<CtfeValidationMode>,
212 ecx: &'rt InterpCx<'mir, 'tcx, M>,
215 impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, 'tcx, M> {
216 fn aggregate_field_path_elem(&mut self, layout: TyAndLayout<'tcx>, field: usize) -> PathElem {
217 // First, check if we are projecting to a variant.
218 match layout.variants {
219 Variants::Multiple { tag_field, .. } => {
220 if tag_field == field {
221 return match layout.ty.kind() {
222 ty::Adt(def, ..) if def.is_enum() => PathElem::EnumTag,
223 ty::Generator(..) => PathElem::GeneratorTag,
224 _ => bug!("non-variant type {:?}", layout.ty),
228 Variants::Single { .. } => {}
231 // Now we know we are projecting to a field, so figure out which one.
232 match layout.ty.kind() {
233 // generators and closures.
234 ty::Closure(def_id, _) | ty::Generator(def_id, _, _) => {
236 // FIXME this should be more descriptive i.e. CapturePlace instead of CapturedVar
237 // https://github.com/rust-lang/project-rfc-2229/issues/46
238 if let Some(local_def_id) = def_id.as_local() {
239 let tables = self.ecx.tcx.typeck(local_def_id);
240 if let Some(captured_place) =
241 tables.closure_min_captures_flattened(*def_id).nth(field)
243 // Sometimes the index is beyond the number of upvars (seen
245 let var_hir_id = captured_place.get_root_variable();
246 let node = self.ecx.tcx.hir().get(var_hir_id);
247 if let hir::Node::Pat(pat) = node {
248 if let hir::PatKind::Binding(_, _, ident, _) = pat.kind {
249 name = Some(ident.name);
255 PathElem::CapturedVar(name.unwrap_or_else(|| {
256 // Fall back to showing the field index.
262 ty::Tuple(_) => PathElem::TupleElem(field),
265 ty::Adt(def, ..) if def.is_enum() => {
266 // we might be projecting *to* a variant, or to a field *in* a variant.
267 match layout.variants {
268 Variants::Single { index } => {
270 PathElem::Field(def.variant(index).fields[field].name)
272 Variants::Multiple { .. } => bug!("we handled variants above"),
277 ty::Adt(def, _) => PathElem::Field(def.non_enum_variant().fields[field].name),
280 ty::Array(..) | ty::Slice(..) => PathElem::ArrayElem(field),
283 ty::Dynamic(..) => PathElem::DynDowncast,
285 // nothing else has an aggregate layout
286 _ => bug!("aggregate_field_path_elem: got non-aggregate type {:?}", layout.ty),
293 f: impl FnOnce(&mut Self) -> InterpResult<'tcx, R>,
294 ) -> InterpResult<'tcx, R> {
295 // Remember the old state
296 let path_len = self.path.len();
297 // Record new element
298 self.path.push(elem);
302 self.path.truncate(path_len);
307 fn check_wide_ptr_meta(
309 meta: MemPlaceMeta<M::PointerTag>,
310 pointee: TyAndLayout<'tcx>,
311 ) -> InterpResult<'tcx> {
312 let tail = self.ecx.tcx.struct_tail_erasing_lifetimes(pointee.ty, self.ecx.param_env);
315 let vtable = self.ecx.scalar_to_ptr(meta.unwrap_meta())?;
316 // Direct call to `check_ptr_access_align` checks alignment even on CTFE machines.
318 self.ecx.check_ptr_access_align(
320 3 * self.ecx.tcx.data_layout.pointer_size, // drop, size, align
321 self.ecx.tcx.data_layout.pointer_align.abi,
322 CheckInAllocMsg::InboundsTest, // will anyway be replaced by validity message
325 err_ub!(DanglingIntPointer(..)) |
326 err_ub!(PointerUseAfterFree(..)) =>
327 { "dangling vtable pointer in wide pointer" },
328 err_ub!(AlignmentCheckFailed { .. }) =>
329 { "unaligned vtable pointer in wide pointer" },
330 err_ub!(PointerOutOfBounds { .. }) =>
331 { "too small vtable" },
334 self.ecx.read_drop_type_from_vtable(vtable),
336 err_ub!(DanglingIntPointer(..)) |
337 err_ub!(InvalidFunctionPointer(..)) =>
338 { "invalid drop function pointer in vtable (not pointing to a function)" },
339 err_ub!(InvalidVtableDropFn(..)) =>
340 { "invalid drop function pointer in vtable (function has incompatible signature)" },
341 // Stacked Borrows errors can happen here, see https://github.com/rust-lang/miri/issues/2123.
342 // (We assume there are no other MachineStop errors possible here.)
343 InterpError::MachineStop(_) =>
344 { "vtable pointer does not have permission to read drop function pointer" },
347 self.ecx.read_size_and_align_from_vtable(vtable),
349 err_ub!(InvalidVtableSize) =>
350 { "invalid vtable: size is bigger than largest supported object" },
351 err_ub!(InvalidVtableAlignment(msg)) =>
352 { "invalid vtable: alignment {}", msg },
353 err_unsup!(ReadPointerAsBytes) => { "invalid size or align in vtable" },
354 // Stacked Borrows errors can happen here, see https://github.com/rust-lang/miri/issues/2123.
355 // (We assume there are no other MachineStop errors possible here.)
356 InterpError::MachineStop(_) =>
357 { "vtable pointer does not have permission to read size and alignment" },
359 // FIXME: More checks for the vtable.
361 ty::Slice(..) | ty::Str => {
362 let _len = try_validation!(
363 meta.unwrap_meta().to_machine_usize(self.ecx),
365 err_unsup!(ReadPointerAsBytes) => { "non-integer slice length in wide pointer" },
367 // We do not check that `len * elem_size <= isize::MAX`:
368 // that is only required for references, and there it falls out of the
369 // "dereferenceable" check performed by Stacked Borrows.
372 // Unsized, but not wide.
374 _ => bug!("Unexpected unsized type tail: {:?}", tail),
380 /// Check a reference or `Box`.
381 fn check_safe_pointer(
383 value: &OpTy<'tcx, M::PointerTag>,
385 ) -> InterpResult<'tcx> {
386 let value = try_validation!(
387 self.ecx.read_immediate(value),
389 err_unsup!(ReadPointerAsBytes) => { "part of a pointer" } expected { "a proper pointer or integer value" },
391 // Handle wide pointers.
392 // Check metadata early, for better diagnostics
393 let place = try_validation!(
394 self.ecx.ref_to_mplace(&value),
396 err_ub!(InvalidUninitBytes(None)) => { "uninitialized {}", kind },
398 if place.layout.is_unsized() {
399 self.check_wide_ptr_meta(place.meta, place.layout)?;
401 // Make sure this is dereferenceable and all.
402 let size_and_align = try_validation!(
403 self.ecx.size_and_align_of_mplace(&place),
405 err_ub!(InvalidMeta(msg)) => { "invalid {} metadata: {}", kind, msg },
407 let (size, align) = size_and_align
408 // for the purpose of validity, consider foreign types to have
409 // alignment and size determined by the layout (size will be 0,
410 // alignment should take attributes into account).
411 .unwrap_or_else(|| (place.layout.size, place.layout.align.abi));
412 // Direct call to `check_ptr_access_align` checks alignment even on CTFE machines.
414 self.ecx.check_ptr_access_align(
418 CheckInAllocMsg::InboundsTest, // will anyway be replaced by validity message
421 err_ub!(AlignmentCheckFailed { required, has }) =>
423 "an unaligned {kind} (required {} byte alignment but found {})",
427 err_ub!(DanglingIntPointer(0, _)) =>
429 err_ub!(DanglingIntPointer(i, _)) =>
430 { "a dangling {kind} (address 0x{i:x} is unallocated)" },
431 err_ub!(PointerOutOfBounds { .. }) =>
432 { "a dangling {kind} (going beyond the bounds of its allocation)" },
433 // This cannot happen during const-eval (because interning already detects
434 // dangling pointers), but it can happen in Miri.
435 err_ub!(PointerUseAfterFree(..)) =>
436 { "a dangling {kind} (use-after-free)" },
438 // Do not allow pointers to uninhabited types.
439 if place.layout.abi.is_uninhabited() {
440 throw_validation_failure!(self.path,
441 { "a {kind} pointing to uninhabited type {}", place.layout.ty }
444 // Recursive checking
445 if let Some(ref mut ref_tracking) = self.ref_tracking {
446 // Proceed recursively even for ZST, no reason to skip them!
447 // `!` is a ZST and we want to validate it.
448 if let Ok((alloc_id, _offset, _tag)) = self.ecx.ptr_try_get_alloc_id(place.ptr) {
449 // Special handling for pointers to statics (irrespective of their type).
450 let alloc_kind = self.ecx.tcx.get_global_alloc(alloc_id);
451 if let Some(GlobalAlloc::Static(did)) = alloc_kind {
452 assert!(!self.ecx.tcx.is_thread_local_static(did));
453 assert!(self.ecx.tcx.is_static(did));
456 Some(CtfeValidationMode::Const { allow_static_ptrs: false, .. })
458 // See const_eval::machine::MemoryExtra::can_access_statics for why
459 // this check is so important.
460 // This check is reachable when the const just referenced the static,
461 // but never read it (so we never entered `before_access_global`).
462 throw_validation_failure!(self.path,
463 { "a {} pointing to a static variable", kind }
466 // We skip checking other statics. These statics must be sound by
467 // themselves, and the only way to get broken statics here is by using
469 // The reasons we don't check other statics is twofold. For one, in all
470 // sound cases, the static was already validated on its own, and second, we
471 // trigger cycle errors if we try to compute the value of the other static
472 // and that static refers back to us.
473 // We might miss const-invalid data,
474 // but things are still sound otherwise (in particular re: consts
475 // referring to statics).
479 let path = &self.path;
480 ref_tracking.track(place, || {
481 // We need to clone the path anyway, make sure it gets created
482 // with enough space for the additional `Deref`.
483 let mut new_path = Vec::with_capacity(path.len() + 1);
484 new_path.extend(path);
485 new_path.push(PathElem::Deref);
494 op: &OpTy<'tcx, M::PointerTag>,
495 ) -> InterpResult<'tcx, ScalarMaybeUninit<M::PointerTag>> {
497 self.ecx.read_scalar(op),
499 err_unsup!(ReadPointerAsBytes) => { "(potentially part of) a pointer" } expected { "plain (non-pointer) bytes" },
503 fn read_immediate_forced(
505 op: &OpTy<'tcx, M::PointerTag>,
506 ) -> InterpResult<'tcx, Immediate<M::PointerTag>> {
508 self.ecx.read_immediate_raw(op, /*force*/ true),
510 err_unsup!(ReadPointerAsBytes) => { "(potentially part of) a pointer" } expected { "plain (non-pointer) bytes" },
514 /// Check if this is a value of primitive type, and if yes check the validity of the value
515 /// at that type. Return `true` if the type is indeed primitive.
516 fn try_visit_primitive(
518 value: &OpTy<'tcx, M::PointerTag>,
519 ) -> InterpResult<'tcx, bool> {
520 // Go over all the primitive types
521 let ty = value.layout.ty;
524 let value = self.read_scalar(value)?;
528 err_ub!(InvalidBool(..)) | err_ub!(InvalidUninitBytes(None)) =>
529 { "{:x}", value } expected { "a boolean" },
534 let value = self.read_scalar(value)?;
538 err_ub!(InvalidChar(..)) | err_ub!(InvalidUninitBytes(None)) =>
539 { "{:x}", value } expected { "a valid unicode scalar value (in `0..=0x10FFFF` but not in `0xD800..=0xDFFF`)" },
543 ty::Float(_) | ty::Int(_) | ty::Uint(_) => {
544 let value = self.read_scalar(value)?;
545 // NOTE: Keep this in sync with the array optimization for int/float
547 if M::enforce_number_init(self.ecx) {
551 err_ub!(InvalidUninitBytes(..)) =>
552 { "{:x}", value } expected { "initialized bytes" }
555 if M::enforce_number_no_provenance(self.ecx) {
556 // As a special exception we *do* match on a `Scalar` here, since we truly want
557 // to know its underlying representation (and *not* cast it to an integer).
558 let is_ptr = value.check_init().map_or(false, |v| matches!(v, Scalar::Ptr(..)));
560 throw_validation_failure!(self.path,
561 { "{:x}", value } expected { "plain (non-pointer) bytes" }
568 // We are conservative with uninit for integers, but try to
569 // actually enforce the strict rules for raw pointers (mostly because
570 // that lets us re-use `ref_to_mplace`).
571 let place = try_validation!(
572 self.ecx.read_immediate(value).and_then(|ref i| self.ecx.ref_to_mplace(i)),
574 err_ub!(InvalidUninitBytes(None)) => { "uninitialized raw pointer" },
575 err_unsup!(ReadPointerAsBytes) => { "part of a pointer" } expected { "a proper pointer or integer value" },
577 if place.layout.is_unsized() {
578 self.check_wide_ptr_meta(place.meta, place.layout)?;
582 ty::Ref(_, ty, mutbl) => {
583 if matches!(self.ctfe_mode, Some(CtfeValidationMode::Const { .. }))
584 && *mutbl == hir::Mutability::Mut
586 // A mutable reference inside a const? That does not seem right (except if it is
588 let layout = self.ecx.layout_of(*ty)?;
589 if !layout.is_zst() {
590 throw_validation_failure!(self.path, { "mutable reference in a `const`" });
593 self.check_safe_pointer(value, "reference")?;
597 let value = try_validation!(
598 self.ecx.read_scalar(value).and_then(|v| v.check_init()),
600 err_unsup!(ReadPointerAsBytes) => { "part of a pointer" } expected { "a proper pointer or integer value" },
601 err_ub!(InvalidUninitBytes(None)) => { "uninitialized bytes" } expected { "a proper pointer or integer value" },
604 // If we check references recursively, also check that this points to a function.
605 if let Some(_) = self.ref_tracking {
606 let ptr = self.ecx.scalar_to_ptr(value)?;
607 let _fn = try_validation!(
608 self.ecx.get_ptr_fn(ptr),
610 err_ub!(DanglingIntPointer(0, _)) =>
611 { "a null function pointer" },
612 err_ub!(DanglingIntPointer(..)) |
613 err_ub!(InvalidFunctionPointer(..)) =>
614 { "{:x}", value } expected { "a function pointer" },
616 // FIXME: Check if the signature matches
618 // Otherwise (for standalone Miri), we have to still check it to be non-null.
619 if self.ecx.scalar_may_be_null(value)? {
620 throw_validation_failure!(self.path, { "a null function pointer" });
625 ty::Never => throw_validation_failure!(self.path, { "a value of the never type `!`" }),
626 ty::Foreign(..) | ty::FnDef(..) => {
630 // The above should be all the primitive types. The rest is compound, we
631 // check them by visiting their fields/variants.
639 | ty::Generator(..) => Ok(false),
640 // Some types only occur during typechecking, they have no layout.
641 // We should not see them here and we could not check them anyway.
644 | ty::Placeholder(..)
649 | ty::GeneratorWitness(..) => bug!("Encountered invalid type {:?}", ty),
655 scalar: ScalarMaybeUninit<M::PointerTag>,
656 scalar_layout: ScalarAbi,
657 ) -> InterpResult<'tcx> {
658 // We check `is_full_range` in a slightly complicated way because *if* we are checking
659 // number validity, then we want to ensure that `Scalar::Initialized` is indeed initialized,
660 // i.e. that we go over the `check_init` below.
661 let size = scalar_layout.size(self.ecx);
662 let is_full_range = match scalar_layout {
663 ScalarAbi::Initialized { .. } => {
664 if M::enforce_number_init(self.ecx) {
665 false // not "full" since uninit is not accepted
667 scalar_layout.is_always_valid(self.ecx)
670 ScalarAbi::Union { .. } => true,
673 // Nothing to check. Cruciall we don't even `read_scalar` until here, since that would
674 // fail for `Union` scalars!
677 // We have something to check: it must at least be initialized.
678 let valid_range = scalar_layout.valid_range(self.ecx);
679 let WrappingRange { start, end } = valid_range;
680 let max_value = size.unsigned_int_max();
681 assert!(end <= max_value);
682 let value = try_validation!(
685 err_ub!(InvalidUninitBytes(None)) => { "{:x}", scalar }
686 expected { "something {}", wrapping_range_format(valid_range, max_value) },
688 let bits = match value.try_to_int() {
689 Ok(int) => int.assert_bits(size),
691 // So this is a pointer then, and casting to an int failed.
692 // Can only happen during CTFE.
693 // We support 2 kinds of ranges here: full range, and excluding zero.
694 if start == 1 && end == max_value {
695 // Only null is the niche. So make sure the ptr is NOT null.
696 if self.ecx.scalar_may_be_null(value)? {
697 throw_validation_failure!(self.path,
698 { "a potentially null pointer" }
700 "something that cannot possibly fail to be {}",
701 wrapping_range_format(valid_range, max_value)
707 } else if scalar_layout.is_always_valid(self.ecx) {
708 // Easy. (This is reachable if `enforce_number_validity` is set.)
711 // Conservatively, we reject, because the pointer *could* have a bad
713 throw_validation_failure!(self.path,
716 "something that cannot possibly fail to be {}",
717 wrapping_range_format(valid_range, max_value)
724 if valid_range.contains(bits) {
727 throw_validation_failure!(self.path,
729 expected { "something {}", wrapping_range_format(valid_range, max_value) }
735 impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M>
736 for ValidityVisitor<'rt, 'mir, 'tcx, M>
738 type V = OpTy<'tcx, M::PointerTag>;
741 fn ecx(&self) -> &InterpCx<'mir, 'tcx, M> {
745 fn read_discriminant(
747 op: &OpTy<'tcx, M::PointerTag>,
748 ) -> InterpResult<'tcx, VariantIdx> {
749 self.with_elem(PathElem::EnumTag, move |this| {
751 this.ecx.read_discriminant(op),
753 err_ub!(InvalidTag(val)) =>
754 { "{:x}", val } expected { "a valid enum tag" },
755 err_ub!(InvalidUninitBytes(None)) =>
756 { "uninitialized bytes" } expected { "a valid enum tag" },
757 err_unsup!(ReadPointerAsBytes) =>
758 { "a pointer" } expected { "a valid enum tag" },
767 old_op: &OpTy<'tcx, M::PointerTag>,
769 new_op: &OpTy<'tcx, M::PointerTag>,
770 ) -> InterpResult<'tcx> {
771 let elem = self.aggregate_field_path_elem(old_op.layout, field);
772 self.with_elem(elem, move |this| this.visit_value(new_op))
778 old_op: &OpTy<'tcx, M::PointerTag>,
779 variant_id: VariantIdx,
780 new_op: &OpTy<'tcx, M::PointerTag>,
781 ) -> InterpResult<'tcx> {
782 let name = match old_op.layout.ty.kind() {
783 ty::Adt(adt, _) => PathElem::Variant(adt.variant(variant_id).name),
784 // Generators also have variants
785 ty::Generator(..) => PathElem::GeneratorState(variant_id),
786 _ => bug!("Unexpected type with variant: {:?}", old_op.layout.ty),
788 self.with_elem(name, move |this| this.visit_value(new_op))
794 op: &OpTy<'tcx, M::PointerTag>,
795 _fields: NonZeroUsize,
796 ) -> InterpResult<'tcx> {
797 // Special check preventing `UnsafeCell` inside unions in the inner part of constants.
798 if matches!(self.ctfe_mode, Some(CtfeValidationMode::Const { inner: true, .. })) {
799 if !op.layout.ty.is_freeze(self.ecx.tcx.at(DUMMY_SP), self.ecx.param_env) {
800 throw_validation_failure!(self.path, { "`UnsafeCell` in a `const`" });
807 fn visit_box(&mut self, op: &OpTy<'tcx, M::PointerTag>) -> InterpResult<'tcx> {
808 self.check_safe_pointer(op, "box")?;
813 fn visit_value(&mut self, op: &OpTy<'tcx, M::PointerTag>) -> InterpResult<'tcx> {
814 trace!("visit_value: {:?}, {:?}", *op, op.layout);
816 // Check primitive types -- the leaves of our recursive descent.
817 if self.try_visit_primitive(op)? {
821 // Special check preventing `UnsafeCell` in the inner part of constants
822 if let Some(def) = op.layout.ty.ty_adt_def() {
823 if matches!(self.ctfe_mode, Some(CtfeValidationMode::Const { inner: true, .. }))
824 && Some(def.did()) == self.ecx.tcx.lang_items().unsafe_cell_type()
826 throw_validation_failure!(self.path, { "`UnsafeCell` in a `const`" });
830 // Recursively walk the value at its type.
831 self.walk_value(op)?;
833 // *After* all of this, check the ABI. We need to check the ABI to handle
834 // types like `NonNull` where the `Scalar` info is more restrictive than what
835 // the fields say (`rustc_layout_scalar_valid_range_start`).
836 // But in most cases, this will just propagate what the fields say,
837 // and then we want the error to point at the field -- so, first recurse,
840 // FIXME: We could avoid some redundant checks here. For newtypes wrapping
841 // scalars, we do the same check on every "level" (e.g., first we check
842 // MyNewtype and then the scalar in there).
843 match op.layout.abi {
844 Abi::Uninhabited => {
845 throw_validation_failure!(self.path,
846 { "a value of uninhabited type {:?}", op.layout.ty }
849 Abi::Scalar(scalar_layout) => {
850 let scalar = self.read_immediate_forced(op)?.to_scalar_or_uninit();
851 self.visit_scalar(scalar, scalar_layout)?;
853 Abi::ScalarPair(a_layout, b_layout) => {
854 // We would validate these things as we descend into the fields,
855 // but that can miss bugs in layout computation. Layout computation
856 // is subtle due to enums having ScalarPair layout, where one field
857 // is the discriminant.
858 if cfg!(debug_assertions) {
859 let (a, b) = self.read_immediate_forced(op)?.to_scalar_or_uninit_pair();
860 self.visit_scalar(a, a_layout)?;
861 self.visit_scalar(b, b_layout)?;
864 Abi::Vector { .. } => {
865 // No checks here, we assume layout computation gets this right.
866 // (This is harder to check since Miri does not represent these as `Immediate`.)
868 Abi::Aggregate { .. } => {
878 op: &OpTy<'tcx, M::PointerTag>,
879 fields: impl Iterator<Item = InterpResult<'tcx, Self::V>>,
880 ) -> InterpResult<'tcx> {
881 match op.layout.ty.kind() {
883 let mplace = op.assert_mem_place(); // strings are never immediate
884 let len = mplace.len(self.ecx)?;
886 self.ecx.read_bytes_ptr(mplace.ptr, Size::from_bytes(len)),
888 err_ub!(InvalidUninitBytes(..)) => { "uninitialized data in `str`" },
889 err_unsup!(ReadPointerAsBytes) => { "a pointer in `str`" },
892 ty::Array(tys, ..) | ty::Slice(tys)
893 // This optimization applies for types that can hold arbitrary bytes (such as
894 // integer and floating point types) or for structs or tuples with no fields.
895 // FIXME(wesleywiser) This logic could be extended further to arbitrary structs
896 // or tuples made up of integer/floating point types or inhabited ZSTs with no
898 if matches!(tys.kind(), ty::Int(..) | ty::Uint(..) | ty::Float(..))
901 // Optimized handling for arrays of integer/float type.
903 // Arrays cannot be immediate, slices are never immediate.
904 let mplace = op.assert_mem_place();
905 // This is the length of the array/slice.
906 let len = mplace.len(self.ecx)?;
907 // This is the element type size.
908 let layout = self.ecx.layout_of(*tys)?;
909 // This is the size in bytes of the whole array. (This checks for overflow.)
910 let size = layout.size * len;
912 // Optimization: we just check the entire range at once.
913 // NOTE: Keep this in sync with the handling of integer and float
914 // types above, in `visit_primitive`.
915 // In run-time mode, we accept pointers in here. This is actually more
916 // permissive than a per-element check would be, e.g., we accept
917 // a &[u8] that contains a pointer even though bytewise checking would
918 // reject it. However, that's good: We don't inherently want
919 // to reject those pointers, we just do not have the machinery to
920 // talk about parts of a pointer.
921 // We also accept uninit, for consistency with the slow path.
922 let Some(alloc) = self.ecx.get_ptr_alloc(mplace.ptr, size, mplace.align)? else {
923 // Size 0, nothing more to check.
927 match alloc.check_bytes(
928 alloc_range(Size::ZERO, size),
929 /*allow_uninit*/ !M::enforce_number_init(self.ecx),
930 /*allow_ptr*/ !M::enforce_number_no_provenance(self.ecx),
932 // In the happy case, we needn't check anything else.
934 // Some error happened, try to provide a more detailed description.
936 // For some errors we might be able to provide extra information.
937 // (This custom logic does not fit the `try_validation!` macro.)
939 err_ub!(InvalidUninitBytes(Some((_alloc_id, access)))) => {
940 // Some byte was uninitialized, determine which
941 // element that byte belongs to so we can
943 let i = usize::try_from(
944 access.uninit_offset.bytes() / layout.size.bytes(),
947 self.path.push(PathElem::ArrayElem(i));
949 throw_validation_failure!(self.path, { "uninitialized bytes" })
951 err_unsup!(ReadPointerAsBytes) => {
952 throw_validation_failure!(self.path, { "a pointer" } expected { "plain (non-pointer) bytes" })
955 // Propagate upwards (that will also check for unexpected errors).
956 _ => return Err(err),
961 // Fast path for arrays and slices of ZSTs. We only need to check a single ZST element
962 // of an array and not all of them, because there's only a single value of a specific
963 // ZST type, so either validation fails for all elements or none.
964 ty::Array(tys, ..) | ty::Slice(tys) if self.ecx.layout_of(*tys)?.is_zst() => {
965 // Validate just the first element (if any).
966 self.walk_aggregate(op, fields.take(1))?
969 self.walk_aggregate(op, fields)? // default handler
976 impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
977 fn validate_operand_internal(
979 op: &OpTy<'tcx, M::PointerTag>,
981 ref_tracking: Option<&mut RefTracking<MPlaceTy<'tcx, M::PointerTag>, Vec<PathElem>>>,
982 ctfe_mode: Option<CtfeValidationMode>,
983 ) -> InterpResult<'tcx> {
984 trace!("validate_operand_internal: {:?}, {:?}", *op, op.layout.ty);
986 // Construct a visitor
987 let mut visitor = ValidityVisitor { path, ref_tracking, ctfe_mode, ecx: self };
990 match visitor.visit_value(&op) {
992 // Pass through validation failures.
993 Err(err) if matches!(err.kind(), err_ub!(ValidationFailure { .. })) => Err(err),
994 // Also pass through InvalidProgram, those just indicate that we could not
995 // validate and each caller will know best what to do with them.
996 Err(err) if matches!(err.kind(), InterpError::InvalidProgram(_)) => Err(err),
997 // Avoid other errors as those do not show *where* in the value the issue lies.
999 err.print_backtrace();
1000 bug!("Unexpected error during validation: {}", err);
1005 /// This function checks the data at `op` to be const-valid.
1006 /// `op` is assumed to cover valid memory if it is an indirect operand.
1007 /// It will error if the bits at the destination do not match the ones described by the layout.
1009 /// `ref_tracking` is used to record references that we encounter so that they
1010 /// can be checked recursively by an outside driving loop.
1012 /// `constant` controls whether this must satisfy the rules for constants:
1013 /// - no pointers to statics.
1014 /// - no `UnsafeCell` or non-ZST `&mut`.
1016 pub fn const_validate_operand(
1018 op: &OpTy<'tcx, M::PointerTag>,
1019 path: Vec<PathElem>,
1020 ref_tracking: &mut RefTracking<MPlaceTy<'tcx, M::PointerTag>, Vec<PathElem>>,
1021 ctfe_mode: CtfeValidationMode,
1022 ) -> InterpResult<'tcx> {
1023 self.validate_operand_internal(op, path, Some(ref_tracking), Some(ctfe_mode))
1026 /// This function checks the data at `op` to be runtime-valid.
1027 /// `op` is assumed to cover valid memory if it is an indirect operand.
1028 /// It will error if the bits at the destination do not match the ones described by the layout.
1030 pub fn validate_operand(&self, op: &OpTy<'tcx, M::PointerTag>) -> InterpResult<'tcx> {
1031 self.validate_operand_internal(op, vec![], None, None)