1 //! Validates the MIR to ensure that invariants are upheld.
3 use rustc_index::bit_set::BitSet;
4 use rustc_infer::infer::TyCtxtInferExt;
5 use rustc_middle::mir::interpret::Scalar;
6 use rustc_middle::mir::traversal;
7 use rustc_middle::mir::visit::{PlaceContext, Visitor};
8 use rustc_middle::mir::{
9 AggregateKind, BasicBlock, Body, BorrowKind, Local, Location, MirPass, MirPhase, Operand,
10 PlaceElem, PlaceRef, ProjectionElem, Rvalue, SourceScope, Statement, StatementKind, Terminator,
11 TerminatorKind, START_BLOCK,
13 use rustc_middle::ty::fold::BottomUpFolder;
14 use rustc_middle::ty::{self, ParamEnv, Ty, TyCtxt, TypeFoldable};
15 use rustc_mir_dataflow::impls::MaybeStorageLive;
16 use rustc_mir_dataflow::storage::AlwaysLiveLocals;
17 use rustc_mir_dataflow::{Analysis, ResultsCursor};
18 use rustc_target::abi::Size;
20 #[derive(Copy, Clone, Debug)]
26 pub struct Validator {
27 /// Describes at which point in the pipeline this validation is happening.
29 /// The phase for which we are upholding the dialect. If the given phase forbids a specific
30 /// element, this validator will now emit errors if that specific element is encountered.
31 /// Note that phases that change the dialect cause all *following* phases to check the
32 /// invariants of the new dialect. A phase that changes dialects never checks the new invariants
34 pub mir_phase: MirPhase,
37 impl<'tcx> MirPass<'tcx> for Validator {
38 fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
39 let def_id = body.source.def_id();
40 let param_env = tcx.param_env(def_id);
41 let mir_phase = self.mir_phase;
43 let always_live_locals = AlwaysLiveLocals::new(body);
44 let storage_liveness = MaybeStorageLive::new(always_live_locals)
45 .into_engine(tcx, body)
46 .iterate_to_fixpoint()
47 .into_results_cursor(body);
55 reachable_blocks: traversal::reachable_as_bitset(body),
57 place_cache: Vec::new(),
58 value_cache: Vec::new(),
64 /// Returns whether the two types are equal up to lifetimes.
65 /// All lifetimes, including higher-ranked ones, get ignored for this comparison.
66 /// (This is unlike the `erasing_regions` methods, which keep higher-ranked lifetimes for soundness reasons.)
68 /// The point of this function is to approximate "equal up to subtyping". However,
69 /// the approximation is incorrect as variance is ignored.
70 pub fn equal_up_to_regions<'tcx>(
72 param_env: ParamEnv<'tcx>,
81 // Normalize lifetimes away on both sides, then compare.
82 let normalize = |ty: Ty<'tcx>| {
83 tcx.normalize_erasing_regions(
85 ty.fold_with(&mut BottomUpFolder {
87 // FIXME: We erase all late-bound lifetimes, but this is not fully correct.
88 // If you have a type like `<for<'a> fn(&'a u32) as SomeTrait>::Assoc`,
89 // this is not necessarily equivalent to `<fn(&'static u32) as SomeTrait>::Assoc`,
90 // since one may have an `impl SomeTrait for fn(&32)` and
91 // `impl SomeTrait for fn(&'static u32)` at the same time which
92 // specify distinct values for Assoc. (See also #56105)
93 lt_op: |_| tcx.lifetimes.re_erased,
94 // Leave consts and types unchanged.
100 tcx.infer_ctxt().enter(|infcx| infcx.can_eq(param_env, normalize(src), normalize(dest)).is_ok())
103 struct TypeChecker<'a, 'tcx> {
105 body: &'a Body<'tcx>,
107 param_env: ParamEnv<'tcx>,
109 reachable_blocks: BitSet<BasicBlock>,
110 storage_liveness: ResultsCursor<'a, 'tcx, MaybeStorageLive>,
111 place_cache: Vec<PlaceRef<'tcx>>,
112 value_cache: Vec<u128>,
115 impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
116 fn fail(&self, location: Location, msg: impl AsRef<str>) {
117 let span = self.body.source_info(location).span;
118 // We use `delay_span_bug` as we might see broken MIR when other errors have already
120 self.tcx.sess.diagnostic().delay_span_bug(
123 "broken MIR in {:?} ({}) at {:?}:\n{}",
124 self.body.source.instance,
132 fn check_edge(&self, location: Location, bb: BasicBlock, edge_kind: EdgeKind) {
133 if bb == START_BLOCK {
134 self.fail(location, "start block must not have predecessors")
136 if let Some(bb) = self.body.basic_blocks().get(bb) {
137 let src = self.body.basic_blocks().get(location.block).unwrap();
138 match (src.is_cleanup, bb.is_cleanup, edge_kind) {
139 // Non-cleanup blocks can jump to non-cleanup blocks along non-unwind edges
140 (false, false, EdgeKind::Normal)
141 // Non-cleanup blocks can jump to cleanup blocks along unwind edges
142 | (false, true, EdgeKind::Unwind)
143 // Cleanup blocks can jump to cleanup blocks along non-unwind edges
144 | (true, true, EdgeKind::Normal) => {}
145 // All other jumps are invalid
150 "{:?} edge to {:?} violates unwind invariants (cleanup {:?} -> {:?})",
160 self.fail(location, format!("encountered jump to invalid basic block {:?}", bb))
164 /// Check if src can be assigned into dest.
165 /// This is not precise, it will accept some incorrect assignments.
166 fn mir_assign_valid_types(&self, src: Ty<'tcx>, dest: Ty<'tcx>) -> bool {
167 // Fast path before we normalize.
169 // Equal types, all is good.
172 // Normalization reveals opaque types, but we may be validating MIR while computing
173 // said opaque types, causing cycles.
174 if (src, dest).has_opaque_types() {
177 // Normalize projections and things like that.
178 let param_env = self.param_env.with_reveal_all_normalized(self.tcx);
179 let src = self.tcx.normalize_erasing_regions(param_env, src);
180 let dest = self.tcx.normalize_erasing_regions(param_env, dest);
182 // Type-changing assignments can happen when subtyping is used. While
183 // all normal lifetimes are erased, higher-ranked types with their
184 // late-bound lifetimes are still around and can lead to type
185 // differences. So we compare ignoring lifetimes.
186 equal_up_to_regions(self.tcx, param_env, src, dest)
190 impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> {
191 fn visit_local(&mut self, local: &Local, context: PlaceContext, location: Location) {
192 if self.body.local_decls.get(*local).is_none() {
195 format!("local {:?} has no corresponding declaration in `body.local_decls`", local),
199 if self.reachable_blocks.contains(location.block) && context.is_use() {
200 // Uses of locals must occur while the local's storage is allocated.
201 self.storage_liveness.seek_after_primary_effect(location);
202 let locals_with_storage = self.storage_liveness.get();
203 if !locals_with_storage.contains(*local) {
204 self.fail(location, format!("use of local {:?}, which has no storage here", local));
209 fn visit_operand(&mut self, operand: &Operand<'tcx>, location: Location) {
210 // This check is somewhat expensive, so only run it when -Zvalidate-mir is passed.
211 if self.tcx.sess.opts.debugging_opts.validate_mir {
212 // `Operand::Copy` is only supposed to be used with `Copy` types.
213 if let Operand::Copy(place) = operand {
214 let ty = place.ty(&self.body.local_decls, self.tcx).ty;
215 let span = self.body.source_info(location).span;
217 if !ty.is_copy_modulo_regions(self.tcx.at(span), self.param_env) {
218 self.fail(location, format!("`Operand::Copy` with non-`Copy` type {}", ty));
223 self.super_operand(operand, location);
226 fn visit_projection_elem(
229 proj_base: &[PlaceElem<'tcx>],
230 elem: PlaceElem<'tcx>,
231 context: PlaceContext,
234 if let ProjectionElem::Index(index) = elem {
235 let index_ty = self.body.local_decls[index].ty;
236 if index_ty != self.tcx.types.usize {
237 self.fail(location, format!("bad index ({:?} != usize)", index_ty))
240 self.super_projection_elem(local, proj_base, elem, context, location);
243 fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
244 match &statement.kind {
245 StatementKind::Assign(box (dest, rvalue)) => {
246 // LHS and RHS of the assignment must have the same type.
247 let left_ty = dest.ty(&self.body.local_decls, self.tcx).ty;
248 let right_ty = rvalue.ty(&self.body.local_decls, self.tcx);
249 if !self.mir_assign_valid_types(right_ty, left_ty) {
253 "encountered `{:?}` with incompatible types:\n\
254 left-hand side has type: {}\n\
255 right-hand side has type: {}",
256 statement.kind, left_ty, right_ty,
261 // The sides of an assignment must not alias. Currently this just checks whether the places
263 Rvalue::Use(Operand::Copy(src) | Operand::Move(src)) => {
267 "encountered `Assign` statement with overlapping memory",
271 Rvalue::Aggregate(agg_kind, _) => {
272 let disallowed = match **agg_kind {
273 AggregateKind::Array(..) => false,
274 AggregateKind::Generator(..) => {
275 self.mir_phase >= MirPhase::GeneratorsLowered
277 _ => self.mir_phase >= MirPhase::Deaggregated,
282 format!("{:?} have been lowered to field assignments", rvalue),
286 Rvalue::Ref(_, BorrowKind::Shallow, _) => {
287 if self.mir_phase >= MirPhase::DropsLowered {
290 "`Assign` statement with a `Shallow` borrow should have been removed after drop lowering phase",
297 StatementKind::AscribeUserType(..) => {
298 if self.mir_phase >= MirPhase::DropsLowered {
301 "`AscribeUserType` should have been removed after drop lowering phase",
305 StatementKind::FakeRead(..) => {
306 if self.mir_phase >= MirPhase::DropsLowered {
309 "`FakeRead` should have been removed after drop lowering phase",
313 StatementKind::CopyNonOverlapping(box rustc_middle::mir::CopyNonOverlapping {
318 let src_ty = src.ty(&self.body.local_decls, self.tcx);
319 let op_src_ty = if let Some(src_deref) = src_ty.builtin_deref(true) {
324 format!("Expected src to be ptr in copy_nonoverlapping, got: {}", src_ty),
328 let dst_ty = dst.ty(&self.body.local_decls, self.tcx);
329 let op_dst_ty = if let Some(dst_deref) = dst_ty.builtin_deref(true) {
334 format!("Expected dst to be ptr in copy_nonoverlapping, got: {}", dst_ty),
338 // since CopyNonOverlapping is parametrized by 1 type,
339 // we only need to check that they are equal and not keep an extra parameter.
340 if op_src_ty != op_dst_ty {
341 self.fail(location, format!("bad arg ({:?} != {:?})", op_src_ty, op_dst_ty));
344 let op_cnt_ty = count.ty(&self.body.local_decls, self.tcx);
345 if op_cnt_ty != self.tcx.types.usize {
346 self.fail(location, format!("bad arg ({:?} != usize)", op_cnt_ty))
349 StatementKind::SetDiscriminant { .. } => {
350 if self.mir_phase < MirPhase::DropsLowered {
351 self.fail(location, "`SetDiscriminant` is not allowed until drop elaboration");
354 StatementKind::Retag(_, _) => {
355 // FIXME(JakobDegen) The validator should check that `self.mir_phase <
356 // DropsLowered`. However, this causes ICEs with generation of drop shims, which
357 // seem to fail to set their `MirPhase` correctly.
359 StatementKind::StorageLive(..)
360 | StatementKind::StorageDead(..)
361 | StatementKind::Coverage(_)
362 | StatementKind::Nop => {}
365 self.super_statement(statement, location);
368 fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
369 match &terminator.kind {
370 TerminatorKind::Goto { target } => {
371 self.check_edge(location, *target, EdgeKind::Normal);
373 TerminatorKind::SwitchInt { targets, switch_ty, discr } => {
374 let ty = discr.ty(&self.body.local_decls, self.tcx);
375 if ty != *switch_ty {
379 "encountered `SwitchInt` terminator with type mismatch: {:?} != {:?}",
385 let target_width = self.tcx.sess.target.pointer_width;
387 let size = Size::from_bits(match switch_ty.kind() {
388 ty::Uint(uint) => uint.normalize(target_width).bit_width().unwrap(),
389 ty::Int(int) => int.normalize(target_width).bit_width().unwrap(),
392 other => bug!("unhandled type: {:?}", other),
395 for (value, target) in targets.iter() {
396 if Scalar::<()>::try_from_uint(value, size).is_none() {
399 format!("the value {:#x} is not a proper {:?}", value, switch_ty),
403 self.check_edge(location, target, EdgeKind::Normal);
405 self.check_edge(location, targets.otherwise(), EdgeKind::Normal);
407 self.value_cache.clear();
408 self.value_cache.extend(targets.iter().map(|(value, _)| value));
409 let all_len = self.value_cache.len();
410 self.value_cache.sort_unstable();
411 self.value_cache.dedup();
412 let has_duplicates = all_len != self.value_cache.len();
417 "duplicated values in `SwitchInt` terminator: {:?}",
423 TerminatorKind::Drop { target, unwind, .. } => {
424 self.check_edge(location, *target, EdgeKind::Normal);
425 if let Some(unwind) = unwind {
426 self.check_edge(location, *unwind, EdgeKind::Unwind);
429 TerminatorKind::DropAndReplace { target, unwind, .. } => {
430 if self.mir_phase >= MirPhase::DropsLowered {
433 "`DropAndReplace` should have been removed during drop elaboration",
436 self.check_edge(location, *target, EdgeKind::Normal);
437 if let Some(unwind) = unwind {
438 self.check_edge(location, *unwind, EdgeKind::Unwind);
441 TerminatorKind::Call { func, args, destination, cleanup, .. } => {
442 let func_ty = func.ty(&self.body.local_decls, self.tcx);
443 match func_ty.kind() {
444 ty::FnPtr(..) | ty::FnDef(..) => {}
447 format!("encountered non-callable type {} in `Call` terminator", func_ty),
450 if let Some((_, target)) = destination {
451 self.check_edge(location, *target, EdgeKind::Normal);
453 if let Some(cleanup) = cleanup {
454 self.check_edge(location, *cleanup, EdgeKind::Unwind);
457 // The call destination place and Operand::Move place used as an argument might be
458 // passed by a reference to the callee. Consequently they must be non-overlapping.
459 // Currently this simply checks for duplicate places.
460 self.place_cache.clear();
461 if let Some((destination, _)) = destination {
462 self.place_cache.push(destination.as_ref());
465 if let Operand::Move(place) = arg {
466 self.place_cache.push(place.as_ref());
469 let all_len = self.place_cache.len();
470 self.place_cache.sort_unstable();
471 self.place_cache.dedup();
472 let has_duplicates = all_len != self.place_cache.len();
477 "encountered overlapping memory in `Call` terminator: {:?}",
483 TerminatorKind::Assert { cond, target, cleanup, .. } => {
484 let cond_ty = cond.ty(&self.body.local_decls, self.tcx);
485 if cond_ty != self.tcx.types.bool {
489 "encountered non-boolean condition of type {} in `Assert` terminator",
494 self.check_edge(location, *target, EdgeKind::Normal);
495 if let Some(cleanup) = cleanup {
496 self.check_edge(location, *cleanup, EdgeKind::Unwind);
499 TerminatorKind::Yield { resume, drop, .. } => {
500 if self.mir_phase >= MirPhase::GeneratorsLowered {
501 self.fail(location, "`Yield` should have been replaced by generator lowering");
503 self.check_edge(location, *resume, EdgeKind::Normal);
504 if let Some(drop) = drop {
505 self.check_edge(location, *drop, EdgeKind::Normal);
508 TerminatorKind::FalseEdge { real_target, imaginary_target } => {
509 if self.mir_phase >= MirPhase::DropsLowered {
512 "`FalseEdge` should have been removed after drop elaboration",
515 self.check_edge(location, *real_target, EdgeKind::Normal);
516 self.check_edge(location, *imaginary_target, EdgeKind::Normal);
518 TerminatorKind::FalseUnwind { real_target, unwind } => {
519 if self.mir_phase >= MirPhase::DropsLowered {
522 "`FalseUnwind` should have been removed after drop elaboration",
525 self.check_edge(location, *real_target, EdgeKind::Normal);
526 if let Some(unwind) = unwind {
527 self.check_edge(location, *unwind, EdgeKind::Unwind);
530 TerminatorKind::InlineAsm { destination, cleanup, .. } => {
531 if let Some(destination) = destination {
532 self.check_edge(location, *destination, EdgeKind::Normal);
534 if let Some(cleanup) = cleanup {
535 self.check_edge(location, *cleanup, EdgeKind::Unwind);
538 TerminatorKind::GeneratorDrop => {
539 if self.mir_phase >= MirPhase::GeneratorsLowered {
542 "`GeneratorDrop` should have been replaced by generator lowering",
546 // Nothing to validate for these.
547 TerminatorKind::Resume
548 | TerminatorKind::Abort
549 | TerminatorKind::Return
550 | TerminatorKind::Unreachable => {}
553 self.super_terminator(terminator, location);
556 fn visit_source_scope(&mut self, scope: &SourceScope) {
557 if self.body.source_scopes.get(*scope).is_none() {
558 self.tcx.sess.diagnostic().delay_span_bug(
561 "broken MIR in {:?} ({}):\ninvalid source scope {:?}",
562 self.body.source.instance, self.when, scope,