]> git.lizzy.rs Git - rust.git/blob - compiler/rustc_mir/src/transform/validate.rs
Rollup merge of #82255 - nhwn:nonzero-err-as-bug, r=davidtwco
[rust.git] / compiler / rustc_mir / src / transform / validate.rs
1 //! Validates the MIR to ensure that invariants are upheld.
2
3 use crate::dataflow::impls::MaybeStorageLive;
4 use crate::dataflow::{Analysis, ResultsCursor};
5 use crate::util::storage::AlwaysLiveLocals;
6
7 use super::MirPass;
8 use rustc_index::bit_set::BitSet;
9 use rustc_infer::infer::TyCtxtInferExt;
10 use rustc_middle::mir::interpret::Scalar;
11 use rustc_middle::mir::traversal;
12 use rustc_middle::mir::visit::{PlaceContext, Visitor};
13 use rustc_middle::mir::{
14     AggregateKind, BasicBlock, Body, BorrowKind, Local, Location, MirPhase, Operand, PlaceRef,
15     Rvalue, SourceScope, Statement, StatementKind, Terminator, TerminatorKind,
16 };
17 use rustc_middle::ty::fold::BottomUpFolder;
18 use rustc_middle::ty::{self, ParamEnv, Ty, TyCtxt, TypeFoldable};
19 use rustc_target::abi::Size;
20
21 #[derive(Copy, Clone, Debug)]
22 enum EdgeKind {
23     Unwind,
24     Normal,
25 }
26
27 pub struct Validator {
28     /// Describes at which point in the pipeline this validation is happening.
29     pub when: String,
30     /// The phase for which we are upholding the dialect. If the given phase forbids a specific
31     /// element, this validator will now emit errors if that specific element is encountered.
32     /// Note that phases that change the dialect cause all *following* phases to check the
33     /// invariants of the new dialect. A phase that changes dialects never checks the new invariants
34     /// itself.
35     pub mir_phase: MirPhase,
36 }
37
38 impl<'tcx> MirPass<'tcx> for Validator {
39     fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
40         let def_id = body.source.def_id();
41         let param_env = tcx.param_env(def_id);
42         let mir_phase = self.mir_phase;
43
44         let always_live_locals = AlwaysLiveLocals::new(body);
45         let storage_liveness = MaybeStorageLive::new(always_live_locals)
46             .into_engine(tcx, body)
47             .iterate_to_fixpoint()
48             .into_results_cursor(body);
49
50         TypeChecker {
51             when: &self.when,
52             body,
53             tcx,
54             param_env,
55             mir_phase,
56             reachable_blocks: traversal::reachable_as_bitset(body),
57             storage_liveness,
58             place_cache: Vec::new(),
59         }
60         .visit_body(body);
61     }
62 }
63
64 /// Returns whether the two types are equal up to lifetimes.
65 /// All lifetimes, including higher-ranked ones, get ignored for this comparison.
66 /// (This is unlike the `erasing_regions` methods, which keep higher-ranked lifetimes for soundness reasons.)
67 ///
68 /// The point of this function is to approximate "equal up to subtyping".  However,
69 /// the approximation is incorrect as variance is ignored.
70 pub fn equal_up_to_regions(
71     tcx: TyCtxt<'tcx>,
72     param_env: ParamEnv<'tcx>,
73     src: Ty<'tcx>,
74     dest: Ty<'tcx>,
75 ) -> bool {
76     // Fast path.
77     if src == dest {
78         return true;
79     }
80
81     // Normalize lifetimes away on both sides, then compare.
82     let param_env = param_env.with_reveal_all_normalized(tcx);
83     let normalize = |ty: Ty<'tcx>| {
84         tcx.normalize_erasing_regions(
85             param_env,
86             ty.fold_with(&mut BottomUpFolder {
87                 tcx,
88                 // FIXME: We erase all late-bound lifetimes, but this is not fully correct.
89                 // If you have a type like `<for<'a> fn(&'a u32) as SomeTrait>::Assoc`,
90                 // this is not necessarily equivalent to `<fn(&'static u32) as SomeTrait>::Assoc`,
91                 // since one may have an `impl SomeTrait for fn(&32)` and
92                 // `impl SomeTrait for fn(&'static u32)` at the same time which
93                 // specify distinct values for Assoc. (See also #56105)
94                 lt_op: |_| tcx.lifetimes.re_erased,
95                 // Leave consts and types unchanged.
96                 ct_op: |ct| ct,
97                 ty_op: |ty| ty,
98             }),
99         )
100     };
101     tcx.infer_ctxt().enter(|infcx| infcx.can_eq(param_env, normalize(src), normalize(dest)).is_ok())
102 }
103
104 struct TypeChecker<'a, 'tcx> {
105     when: &'a str,
106     body: &'a Body<'tcx>,
107     tcx: TyCtxt<'tcx>,
108     param_env: ParamEnv<'tcx>,
109     mir_phase: MirPhase,
110     reachable_blocks: BitSet<BasicBlock>,
111     storage_liveness: ResultsCursor<'a, 'tcx, MaybeStorageLive>,
112     place_cache: Vec<PlaceRef<'tcx>>,
113 }
114
115 impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
116     fn fail(&self, location: Location, msg: impl AsRef<str>) {
117         let span = self.body.source_info(location).span;
118         // We use `delay_span_bug` as we might see broken MIR when other errors have already
119         // occurred.
120         self.tcx.sess.diagnostic().delay_span_bug(
121             span,
122             &format!(
123                 "broken MIR in {:?} ({}) at {:?}:\n{}",
124                 self.body.source.instance,
125                 self.when,
126                 location,
127                 msg.as_ref()
128             ),
129         );
130     }
131
132     fn check_edge(&self, location: Location, bb: BasicBlock, edge_kind: EdgeKind) {
133         if let Some(bb) = self.body.basic_blocks().get(bb) {
134             let src = self.body.basic_blocks().get(location.block).unwrap();
135             match (src.is_cleanup, bb.is_cleanup, edge_kind) {
136                 // Non-cleanup blocks can jump to non-cleanup blocks along non-unwind edges
137                 (false, false, EdgeKind::Normal)
138                 // Non-cleanup blocks can jump to cleanup blocks along unwind edges
139                 | (false, true, EdgeKind::Unwind)
140                 // Cleanup blocks can jump to cleanup blocks along non-unwind edges
141                 | (true, true, EdgeKind::Normal) => {}
142                 // All other jumps are invalid
143                 _ => {
144                     self.fail(
145                         location,
146                         format!(
147                             "{:?} edge to {:?} violates unwind invariants (cleanup {:?} -> {:?})",
148                             edge_kind,
149                             bb,
150                             src.is_cleanup,
151                             bb.is_cleanup,
152                         )
153                     )
154                 }
155             }
156         } else {
157             self.fail(location, format!("encountered jump to invalid basic block {:?}", bb))
158         }
159     }
160
161     /// Check if src can be assigned into dest.
162     /// This is not precise, it will accept some incorrect assignments.
163     fn mir_assign_valid_types(&self, src: Ty<'tcx>, dest: Ty<'tcx>) -> bool {
164         // Fast path before we normalize.
165         if src == dest {
166             // Equal types, all is good.
167             return true;
168         }
169         // Normalize projections and things like that.
170         // FIXME: We need to reveal_all, as some optimizations change types in ways
171         // that require unfolding opaque types.
172         let param_env = self.param_env.with_reveal_all_normalized(self.tcx);
173         let src = self.tcx.normalize_erasing_regions(param_env, src);
174         let dest = self.tcx.normalize_erasing_regions(param_env, dest);
175
176         // Type-changing assignments can happen when subtyping is used. While
177         // all normal lifetimes are erased, higher-ranked types with their
178         // late-bound lifetimes are still around and can lead to type
179         // differences. So we compare ignoring lifetimes.
180         equal_up_to_regions(self.tcx, param_env, src, dest)
181     }
182 }
183
184 impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> {
185     fn visit_local(&mut self, local: &Local, context: PlaceContext, location: Location) {
186         if self.body.local_decls.get(*local).is_none() {
187             self.fail(
188                 location,
189                 format!("local {:?} has no corresponding declaration in `body.local_decls`", local),
190             );
191         }
192
193         if self.reachable_blocks.contains(location.block) && context.is_use() {
194             // Uses of locals must occur while the local's storage is allocated.
195             self.storage_liveness.seek_after_primary_effect(location);
196             let locals_with_storage = self.storage_liveness.get();
197             if !locals_with_storage.contains(*local) {
198                 self.fail(location, format!("use of local {:?}, which has no storage here", local));
199             }
200         }
201     }
202
203     fn visit_operand(&mut self, operand: &Operand<'tcx>, location: Location) {
204         // This check is somewhat expensive, so only run it when -Zvalidate-mir is passed.
205         if self.tcx.sess.opts.debugging_opts.validate_mir {
206             // `Operand::Copy` is only supposed to be used with `Copy` types.
207             if let Operand::Copy(place) = operand {
208                 let ty = place.ty(&self.body.local_decls, self.tcx).ty;
209                 let span = self.body.source_info(location).span;
210
211                 if !ty.is_copy_modulo_regions(self.tcx.at(span), self.param_env) {
212                     self.fail(location, format!("`Operand::Copy` with non-`Copy` type {}", ty));
213                 }
214             }
215         }
216
217         self.super_operand(operand, location);
218     }
219
220     fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
221         match &statement.kind {
222             StatementKind::Assign(box (dest, rvalue)) => {
223                 // LHS and RHS of the assignment must have the same type.
224                 let left_ty = dest.ty(&self.body.local_decls, self.tcx).ty;
225                 let right_ty = rvalue.ty(&self.body.local_decls, self.tcx);
226                 if !self.mir_assign_valid_types(right_ty, left_ty) {
227                     self.fail(
228                         location,
229                         format!(
230                             "encountered `{:?}` with incompatible types:\n\
231                             left-hand side has type: {}\n\
232                             right-hand side has type: {}",
233                             statement.kind, left_ty, right_ty,
234                         ),
235                     );
236                 }
237                 match rvalue {
238                     // The sides of an assignment must not alias. Currently this just checks whether the places
239                     // are identical.
240                     Rvalue::Use(Operand::Copy(src) | Operand::Move(src)) => {
241                         if dest == src {
242                             self.fail(
243                                 location,
244                                 "encountered `Assign` statement with overlapping memory",
245                             );
246                         }
247                     }
248                     // The deaggregator currently does not deaggreagate arrays.
249                     // So for now, we ignore them here.
250                     Rvalue::Aggregate(box AggregateKind::Array { .. }, _) => {}
251                     // All other aggregates must be gone after some phases.
252                     Rvalue::Aggregate(box kind, _) => {
253                         if self.mir_phase > MirPhase::DropLowering
254                             && !matches!(kind, AggregateKind::Generator(..))
255                         {
256                             // Generators persist until the state machine transformation, but all
257                             // other aggregates must have been lowered.
258                             self.fail(
259                                 location,
260                                 format!("{:?} have been lowered to field assignments", rvalue),
261                             )
262                         } else if self.mir_phase > MirPhase::GeneratorLowering {
263                             // No more aggregates after drop and generator lowering.
264                             self.fail(
265                                 location,
266                                 format!("{:?} have been lowered to field assignments", rvalue),
267                             )
268                         }
269                     }
270                     Rvalue::Ref(_, BorrowKind::Shallow, _) => {
271                         if self.mir_phase > MirPhase::DropLowering {
272                             self.fail(
273                                 location,
274                                 "`Assign` statement with a `Shallow` borrow should have been removed after drop lowering phase",
275                             );
276                         }
277                     }
278                     _ => {}
279                 }
280             }
281             StatementKind::AscribeUserType(..) => {
282                 if self.mir_phase > MirPhase::DropLowering {
283                     self.fail(
284                         location,
285                         "`AscribeUserType` should have been removed after drop lowering phase",
286                     );
287                 }
288             }
289             StatementKind::FakeRead(..) => {
290                 if self.mir_phase > MirPhase::DropLowering {
291                     self.fail(
292                         location,
293                         "`FakeRead` should have been removed after drop lowering phase",
294                     );
295                 }
296             }
297             _ => {}
298         }
299
300         self.super_statement(statement, location);
301     }
302
303     fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
304         match &terminator.kind {
305             TerminatorKind::Goto { target } => {
306                 self.check_edge(location, *target, EdgeKind::Normal);
307             }
308             TerminatorKind::SwitchInt { targets, switch_ty, discr } => {
309                 let ty = discr.ty(&self.body.local_decls, self.tcx);
310                 if ty != *switch_ty {
311                     self.fail(
312                         location,
313                         format!(
314                             "encountered `SwitchInt` terminator with type mismatch: {:?} != {:?}",
315                             ty, switch_ty,
316                         ),
317                     );
318                 }
319
320                 let target_width = self.tcx.sess.target.pointer_width;
321
322                 let size = Size::from_bits(match switch_ty.kind() {
323                     ty::Uint(uint) => uint.normalize(target_width).bit_width().unwrap(),
324                     ty::Int(int) => int.normalize(target_width).bit_width().unwrap(),
325                     ty::Char => 32,
326                     ty::Bool => 1,
327                     other => bug!("unhandled type: {:?}", other),
328                 });
329
330                 for (value, target) in targets.iter() {
331                     if Scalar::<()>::try_from_uint(value, size).is_none() {
332                         self.fail(
333                             location,
334                             format!("the value {:#x} is not a proper {:?}", value, switch_ty),
335                         )
336                     }
337
338                     self.check_edge(location, target, EdgeKind::Normal);
339                 }
340                 self.check_edge(location, targets.otherwise(), EdgeKind::Normal);
341             }
342             TerminatorKind::Drop { target, unwind, .. } => {
343                 self.check_edge(location, *target, EdgeKind::Normal);
344                 if let Some(unwind) = unwind {
345                     self.check_edge(location, *unwind, EdgeKind::Unwind);
346                 }
347             }
348             TerminatorKind::DropAndReplace { target, unwind, .. } => {
349                 if self.mir_phase > MirPhase::DropLowering {
350                     self.fail(
351                         location,
352                         "`DropAndReplace` is not permitted to exist after drop elaboration",
353                     );
354                 }
355                 self.check_edge(location, *target, EdgeKind::Normal);
356                 if let Some(unwind) = unwind {
357                     self.check_edge(location, *unwind, EdgeKind::Unwind);
358                 }
359             }
360             TerminatorKind::Call { func, args, destination, cleanup, .. } => {
361                 let func_ty = func.ty(&self.body.local_decls, self.tcx);
362                 match func_ty.kind() {
363                     ty::FnPtr(..) | ty::FnDef(..) => {}
364                     _ => self.fail(
365                         location,
366                         format!("encountered non-callable type {} in `Call` terminator", func_ty),
367                     ),
368                 }
369                 if let Some((_, target)) = destination {
370                     self.check_edge(location, *target, EdgeKind::Normal);
371                 }
372                 if let Some(cleanup) = cleanup {
373                     self.check_edge(location, *cleanup, EdgeKind::Unwind);
374                 }
375
376                 // The call destination place and Operand::Move place used as an argument might be
377                 // passed by a reference to the callee. Consequently they must be non-overlapping.
378                 // Currently this simply checks for duplicate places.
379                 self.place_cache.clear();
380                 if let Some((destination, _)) = destination {
381                     self.place_cache.push(destination.as_ref());
382                 }
383                 for arg in args {
384                     if let Operand::Move(place) = arg {
385                         self.place_cache.push(place.as_ref());
386                     }
387                 }
388                 let all_len = self.place_cache.len();
389                 self.place_cache.sort_unstable();
390                 self.place_cache.dedup();
391                 let has_duplicates = all_len != self.place_cache.len();
392                 if has_duplicates {
393                     self.fail(
394                         location,
395                         format!(
396                             "encountered overlapping memory in `Call` terminator: {:?}",
397                             terminator.kind,
398                         ),
399                     );
400                 }
401             }
402             TerminatorKind::Assert { cond, target, cleanup, .. } => {
403                 let cond_ty = cond.ty(&self.body.local_decls, self.tcx);
404                 if cond_ty != self.tcx.types.bool {
405                     self.fail(
406                         location,
407                         format!(
408                             "encountered non-boolean condition of type {} in `Assert` terminator",
409                             cond_ty
410                         ),
411                     );
412                 }
413                 self.check_edge(location, *target, EdgeKind::Normal);
414                 if let Some(cleanup) = cleanup {
415                     self.check_edge(location, *cleanup, EdgeKind::Unwind);
416                 }
417             }
418             TerminatorKind::Yield { resume, drop, .. } => {
419                 if self.mir_phase > MirPhase::GeneratorLowering {
420                     self.fail(location, "`Yield` should have been replaced by generator lowering");
421                 }
422                 self.check_edge(location, *resume, EdgeKind::Normal);
423                 if let Some(drop) = drop {
424                     self.check_edge(location, *drop, EdgeKind::Normal);
425                 }
426             }
427             TerminatorKind::FalseEdge { real_target, imaginary_target } => {
428                 self.check_edge(location, *real_target, EdgeKind::Normal);
429                 self.check_edge(location, *imaginary_target, EdgeKind::Normal);
430             }
431             TerminatorKind::FalseUnwind { real_target, unwind } => {
432                 self.check_edge(location, *real_target, EdgeKind::Normal);
433                 if let Some(unwind) = unwind {
434                     self.check_edge(location, *unwind, EdgeKind::Unwind);
435                 }
436             }
437             TerminatorKind::InlineAsm { destination, .. } => {
438                 if let Some(destination) = destination {
439                     self.check_edge(location, *destination, EdgeKind::Normal);
440                 }
441             }
442             // Nothing to validate for these.
443             TerminatorKind::Resume
444             | TerminatorKind::Abort
445             | TerminatorKind::Return
446             | TerminatorKind::Unreachable
447             | TerminatorKind::GeneratorDrop => {}
448         }
449
450         self.super_terminator(terminator, location);
451     }
452
453     fn visit_source_scope(&mut self, scope: &SourceScope) {
454         if self.body.source_scopes.get(*scope).is_none() {
455             self.tcx.sess.diagnostic().delay_span_bug(
456                 self.body.span,
457                 &format!(
458                     "broken MIR in {:?} ({}):\ninvalid source scope {:?}",
459                     self.body.source.instance, self.when, scope,
460                 ),
461             );
462         }
463     }
464 }