]> git.lizzy.rs Git - rust.git/blob - compiler/rustc_const_eval/src/transform/validate.rs
Auto merge of #95456 - RalfJung:size, r=oli-obk
[rust.git] / compiler / rustc_const_eval / src / transform / validate.rs
1 //! Validates the MIR to ensure that invariants are upheld.
2
3 use rustc_index::bit_set::BitSet;
4 use rustc_infer::infer::TyCtxtInferExt;
5 use rustc_middle::mir::interpret::Scalar;
6 use rustc_middle::mir::traversal;
7 use rustc_middle::mir::visit::{PlaceContext, Visitor};
8 use rustc_middle::mir::{
9     AggregateKind, BasicBlock, Body, BorrowKind, Local, Location, MirPass, MirPhase, Operand,
10     PlaceElem, PlaceRef, ProjectionElem, Rvalue, SourceScope, Statement, StatementKind, Terminator,
11     TerminatorKind, START_BLOCK,
12 };
13 use rustc_middle::ty::fold::BottomUpFolder;
14 use rustc_middle::ty::{self, ParamEnv, Ty, TyCtxt, TypeFoldable};
15 use rustc_mir_dataflow::impls::MaybeStorageLive;
16 use rustc_mir_dataflow::storage::AlwaysLiveLocals;
17 use rustc_mir_dataflow::{Analysis, ResultsCursor};
18 use rustc_target::abi::Size;
19
20 #[derive(Copy, Clone, Debug)]
21 enum EdgeKind {
22     Unwind,
23     Normal,
24 }
25
26 pub struct Validator {
27     /// Describes at which point in the pipeline this validation is happening.
28     pub when: String,
29     /// The phase for which we are upholding the dialect. If the given phase forbids a specific
30     /// element, this validator will now emit errors if that specific element is encountered.
31     /// Note that phases that change the dialect cause all *following* phases to check the
32     /// invariants of the new dialect. A phase that changes dialects never checks the new invariants
33     /// itself.
34     pub mir_phase: MirPhase,
35 }
36
37 impl<'tcx> MirPass<'tcx> for Validator {
38     fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
39         let def_id = body.source.def_id();
40         let param_env = tcx.param_env(def_id);
41         let mir_phase = self.mir_phase;
42
43         let always_live_locals = AlwaysLiveLocals::new(body);
44         let storage_liveness = MaybeStorageLive::new(always_live_locals)
45             .into_engine(tcx, body)
46             .iterate_to_fixpoint()
47             .into_results_cursor(body);
48
49         TypeChecker {
50             when: &self.when,
51             body,
52             tcx,
53             param_env,
54             mir_phase,
55             reachable_blocks: traversal::reachable_as_bitset(body),
56             storage_liveness,
57             place_cache: Vec::new(),
58             value_cache: Vec::new(),
59         }
60         .visit_body(body);
61     }
62 }
63
64 /// Returns whether the two types are equal up to lifetimes.
65 /// All lifetimes, including higher-ranked ones, get ignored for this comparison.
66 /// (This is unlike the `erasing_regions` methods, which keep higher-ranked lifetimes for soundness reasons.)
67 ///
68 /// The point of this function is to approximate "equal up to subtyping".  However,
69 /// the approximation is incorrect as variance is ignored.
70 pub fn equal_up_to_regions<'tcx>(
71     tcx: TyCtxt<'tcx>,
72     param_env: ParamEnv<'tcx>,
73     src: Ty<'tcx>,
74     dest: Ty<'tcx>,
75 ) -> bool {
76     // Fast path.
77     if src == dest {
78         return true;
79     }
80
81     // Normalize lifetimes away on both sides, then compare.
82     let normalize = |ty: Ty<'tcx>| {
83         tcx.normalize_erasing_regions(
84             param_env,
85             ty.fold_with(&mut BottomUpFolder {
86                 tcx,
87                 // FIXME: We erase all late-bound lifetimes, but this is not fully correct.
88                 // If you have a type like `<for<'a> fn(&'a u32) as SomeTrait>::Assoc`,
89                 // this is not necessarily equivalent to `<fn(&'static u32) as SomeTrait>::Assoc`,
90                 // since one may have an `impl SomeTrait for fn(&32)` and
91                 // `impl SomeTrait for fn(&'static u32)` at the same time which
92                 // specify distinct values for Assoc. (See also #56105)
93                 lt_op: |_| tcx.lifetimes.re_erased,
94                 // Leave consts and types unchanged.
95                 ct_op: |ct| ct,
96                 ty_op: |ty| ty,
97             }),
98         )
99     };
100     tcx.infer_ctxt().enter(|infcx| infcx.can_eq(param_env, normalize(src), normalize(dest)).is_ok())
101 }
102
103 struct TypeChecker<'a, 'tcx> {
104     when: &'a str,
105     body: &'a Body<'tcx>,
106     tcx: TyCtxt<'tcx>,
107     param_env: ParamEnv<'tcx>,
108     mir_phase: MirPhase,
109     reachable_blocks: BitSet<BasicBlock>,
110     storage_liveness: ResultsCursor<'a, 'tcx, MaybeStorageLive>,
111     place_cache: Vec<PlaceRef<'tcx>>,
112     value_cache: Vec<u128>,
113 }
114
115 impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
116     fn fail(&self, location: Location, msg: impl AsRef<str>) {
117         let span = self.body.source_info(location).span;
118         // We use `delay_span_bug` as we might see broken MIR when other errors have already
119         // occurred.
120         self.tcx.sess.diagnostic().delay_span_bug(
121             span,
122             &format!(
123                 "broken MIR in {:?} ({}) at {:?}:\n{}",
124                 self.body.source.instance,
125                 self.when,
126                 location,
127                 msg.as_ref()
128             ),
129         );
130     }
131
132     fn check_edge(&self, location: Location, bb: BasicBlock, edge_kind: EdgeKind) {
133         if bb == START_BLOCK {
134             self.fail(location, "start block must not have predecessors")
135         }
136         if let Some(bb) = self.body.basic_blocks().get(bb) {
137             let src = self.body.basic_blocks().get(location.block).unwrap();
138             match (src.is_cleanup, bb.is_cleanup, edge_kind) {
139                 // Non-cleanup blocks can jump to non-cleanup blocks along non-unwind edges
140                 (false, false, EdgeKind::Normal)
141                 // Non-cleanup blocks can jump to cleanup blocks along unwind edges
142                 | (false, true, EdgeKind::Unwind)
143                 // Cleanup blocks can jump to cleanup blocks along non-unwind edges
144                 | (true, true, EdgeKind::Normal) => {}
145                 // All other jumps are invalid
146                 _ => {
147                     self.fail(
148                         location,
149                         format!(
150                             "{:?} edge to {:?} violates unwind invariants (cleanup {:?} -> {:?})",
151                             edge_kind,
152                             bb,
153                             src.is_cleanup,
154                             bb.is_cleanup,
155                         )
156                     )
157                 }
158             }
159         } else {
160             self.fail(location, format!("encountered jump to invalid basic block {:?}", bb))
161         }
162     }
163
164     /// Check if src can be assigned into dest.
165     /// This is not precise, it will accept some incorrect assignments.
166     fn mir_assign_valid_types(&self, src: Ty<'tcx>, dest: Ty<'tcx>) -> bool {
167         // Fast path before we normalize.
168         if src == dest {
169             // Equal types, all is good.
170             return true;
171         }
172         // Normalization reveals opaque types, but we may be validating MIR while computing
173         // said opaque types, causing cycles.
174         if (src, dest).has_opaque_types() {
175             return true;
176         }
177         // Normalize projections and things like that.
178         let param_env = self.param_env.with_reveal_all_normalized(self.tcx);
179         let src = self.tcx.normalize_erasing_regions(param_env, src);
180         let dest = self.tcx.normalize_erasing_regions(param_env, dest);
181
182         // Type-changing assignments can happen when subtyping is used. While
183         // all normal lifetimes are erased, higher-ranked types with their
184         // late-bound lifetimes are still around and can lead to type
185         // differences. So we compare ignoring lifetimes.
186         equal_up_to_regions(self.tcx, param_env, src, dest)
187     }
188 }
189
190 impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> {
191     fn visit_local(&mut self, local: &Local, context: PlaceContext, location: Location) {
192         if self.body.local_decls.get(*local).is_none() {
193             self.fail(
194                 location,
195                 format!("local {:?} has no corresponding declaration in `body.local_decls`", local),
196             );
197         }
198
199         if self.reachable_blocks.contains(location.block) && context.is_use() {
200             // Uses of locals must occur while the local's storage is allocated.
201             self.storage_liveness.seek_after_primary_effect(location);
202             let locals_with_storage = self.storage_liveness.get();
203             if !locals_with_storage.contains(*local) {
204                 self.fail(location, format!("use of local {:?}, which has no storage here", local));
205             }
206         }
207     }
208
209     fn visit_operand(&mut self, operand: &Operand<'tcx>, location: Location) {
210         // This check is somewhat expensive, so only run it when -Zvalidate-mir is passed.
211         if self.tcx.sess.opts.debugging_opts.validate_mir {
212             // `Operand::Copy` is only supposed to be used with `Copy` types.
213             if let Operand::Copy(place) = operand {
214                 let ty = place.ty(&self.body.local_decls, self.tcx).ty;
215                 let span = self.body.source_info(location).span;
216
217                 if !ty.is_copy_modulo_regions(self.tcx.at(span), self.param_env) {
218                     self.fail(location, format!("`Operand::Copy` with non-`Copy` type {}", ty));
219                 }
220             }
221         }
222
223         self.super_operand(operand, location);
224     }
225
226     fn visit_projection_elem(
227         &mut self,
228         local: Local,
229         proj_base: &[PlaceElem<'tcx>],
230         elem: PlaceElem<'tcx>,
231         context: PlaceContext,
232         location: Location,
233     ) {
234         if let ProjectionElem::Index(index) = elem {
235             let index_ty = self.body.local_decls[index].ty;
236             if index_ty != self.tcx.types.usize {
237                 self.fail(location, format!("bad index ({:?} != usize)", index_ty))
238             }
239         }
240         self.super_projection_elem(local, proj_base, elem, context, location);
241     }
242
243     fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
244         match &statement.kind {
245             StatementKind::Assign(box (dest, rvalue)) => {
246                 // LHS and RHS of the assignment must have the same type.
247                 let left_ty = dest.ty(&self.body.local_decls, self.tcx).ty;
248                 let right_ty = rvalue.ty(&self.body.local_decls, self.tcx);
249                 if !self.mir_assign_valid_types(right_ty, left_ty) {
250                     self.fail(
251                         location,
252                         format!(
253                             "encountered `{:?}` with incompatible types:\n\
254                             left-hand side has type: {}\n\
255                             right-hand side has type: {}",
256                             statement.kind, left_ty, right_ty,
257                         ),
258                     );
259                 }
260                 match rvalue {
261                     // The sides of an assignment must not alias. Currently this just checks whether the places
262                     // are identical.
263                     Rvalue::Use(Operand::Copy(src) | Operand::Move(src)) => {
264                         if dest == src {
265                             self.fail(
266                                 location,
267                                 "encountered `Assign` statement with overlapping memory",
268                             );
269                         }
270                     }
271                     Rvalue::Aggregate(agg_kind, _) => {
272                         let disallowed = match **agg_kind {
273                             AggregateKind::Array(..) => false,
274                             AggregateKind::Generator(..) => {
275                                 self.mir_phase >= MirPhase::GeneratorsLowered
276                             }
277                             _ => self.mir_phase >= MirPhase::Deaggregated,
278                         };
279                         if disallowed {
280                             self.fail(
281                                 location,
282                                 format!("{:?} have been lowered to field assignments", rvalue),
283                             )
284                         }
285                     }
286                     Rvalue::Ref(_, BorrowKind::Shallow, _) => {
287                         if self.mir_phase >= MirPhase::DropsLowered {
288                             self.fail(
289                                 location,
290                                 "`Assign` statement with a `Shallow` borrow should have been removed after drop lowering phase",
291                             );
292                         }
293                     }
294                     _ => {}
295                 }
296             }
297             StatementKind::AscribeUserType(..) => {
298                 if self.mir_phase >= MirPhase::DropsLowered {
299                     self.fail(
300                         location,
301                         "`AscribeUserType` should have been removed after drop lowering phase",
302                     );
303                 }
304             }
305             StatementKind::FakeRead(..) => {
306                 if self.mir_phase >= MirPhase::DropsLowered {
307                     self.fail(
308                         location,
309                         "`FakeRead` should have been removed after drop lowering phase",
310                     );
311                 }
312             }
313             StatementKind::CopyNonOverlapping(box rustc_middle::mir::CopyNonOverlapping {
314                 ref src,
315                 ref dst,
316                 ref count,
317             }) => {
318                 let src_ty = src.ty(&self.body.local_decls, self.tcx);
319                 let op_src_ty = if let Some(src_deref) = src_ty.builtin_deref(true) {
320                     src_deref.ty
321                 } else {
322                     self.fail(
323                         location,
324                         format!("Expected src to be ptr in copy_nonoverlapping, got: {}", src_ty),
325                     );
326                     return;
327                 };
328                 let dst_ty = dst.ty(&self.body.local_decls, self.tcx);
329                 let op_dst_ty = if let Some(dst_deref) = dst_ty.builtin_deref(true) {
330                     dst_deref.ty
331                 } else {
332                     self.fail(
333                         location,
334                         format!("Expected dst to be ptr in copy_nonoverlapping, got: {}", dst_ty),
335                     );
336                     return;
337                 };
338                 // since CopyNonOverlapping is parametrized by 1 type,
339                 // we only need to check that they are equal and not keep an extra parameter.
340                 if op_src_ty != op_dst_ty {
341                     self.fail(location, format!("bad arg ({:?} != {:?})", op_src_ty, op_dst_ty));
342                 }
343
344                 let op_cnt_ty = count.ty(&self.body.local_decls, self.tcx);
345                 if op_cnt_ty != self.tcx.types.usize {
346                     self.fail(location, format!("bad arg ({:?} != usize)", op_cnt_ty))
347                 }
348             }
349             StatementKind::SetDiscriminant { .. } => {
350                 if self.mir_phase < MirPhase::DropsLowered {
351                     self.fail(location, "`SetDiscriminant` is not allowed until drop elaboration");
352                 }
353             }
354             StatementKind::Retag(_, _) => {
355                 // FIXME(JakobDegen) The validator should check that `self.mir_phase <
356                 // DropsLowered`. However, this causes ICEs with generation of drop shims, which
357                 // seem to fail to set their `MirPhase` correctly.
358             }
359             StatementKind::StorageLive(..)
360             | StatementKind::StorageDead(..)
361             | StatementKind::Coverage(_)
362             | StatementKind::Nop => {}
363         }
364
365         self.super_statement(statement, location);
366     }
367
368     fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
369         match &terminator.kind {
370             TerminatorKind::Goto { target } => {
371                 self.check_edge(location, *target, EdgeKind::Normal);
372             }
373             TerminatorKind::SwitchInt { targets, switch_ty, discr } => {
374                 let ty = discr.ty(&self.body.local_decls, self.tcx);
375                 if ty != *switch_ty {
376                     self.fail(
377                         location,
378                         format!(
379                             "encountered `SwitchInt` terminator with type mismatch: {:?} != {:?}",
380                             ty, switch_ty,
381                         ),
382                     );
383                 }
384
385                 let target_width = self.tcx.sess.target.pointer_width;
386
387                 let size = Size::from_bits(match switch_ty.kind() {
388                     ty::Uint(uint) => uint.normalize(target_width).bit_width().unwrap(),
389                     ty::Int(int) => int.normalize(target_width).bit_width().unwrap(),
390                     ty::Char => 32,
391                     ty::Bool => 1,
392                     other => bug!("unhandled type: {:?}", other),
393                 });
394
395                 for (value, target) in targets.iter() {
396                     if Scalar::<()>::try_from_uint(value, size).is_none() {
397                         self.fail(
398                             location,
399                             format!("the value {:#x} is not a proper {:?}", value, switch_ty),
400                         )
401                     }
402
403                     self.check_edge(location, target, EdgeKind::Normal);
404                 }
405                 self.check_edge(location, targets.otherwise(), EdgeKind::Normal);
406
407                 self.value_cache.clear();
408                 self.value_cache.extend(targets.iter().map(|(value, _)| value));
409                 let all_len = self.value_cache.len();
410                 self.value_cache.sort_unstable();
411                 self.value_cache.dedup();
412                 let has_duplicates = all_len != self.value_cache.len();
413                 if has_duplicates {
414                     self.fail(
415                         location,
416                         format!(
417                             "duplicated values in `SwitchInt` terminator: {:?}",
418                             terminator.kind,
419                         ),
420                     );
421                 }
422             }
423             TerminatorKind::Drop { target, unwind, .. } => {
424                 self.check_edge(location, *target, EdgeKind::Normal);
425                 if let Some(unwind) = unwind {
426                     self.check_edge(location, *unwind, EdgeKind::Unwind);
427                 }
428             }
429             TerminatorKind::DropAndReplace { target, unwind, .. } => {
430                 if self.mir_phase >= MirPhase::DropsLowered {
431                     self.fail(
432                         location,
433                         "`DropAndReplace` should have been removed during drop elaboration",
434                     );
435                 }
436                 self.check_edge(location, *target, EdgeKind::Normal);
437                 if let Some(unwind) = unwind {
438                     self.check_edge(location, *unwind, EdgeKind::Unwind);
439                 }
440             }
441             TerminatorKind::Call { func, args, destination, cleanup, .. } => {
442                 let func_ty = func.ty(&self.body.local_decls, self.tcx);
443                 match func_ty.kind() {
444                     ty::FnPtr(..) | ty::FnDef(..) => {}
445                     _ => self.fail(
446                         location,
447                         format!("encountered non-callable type {} in `Call` terminator", func_ty),
448                     ),
449                 }
450                 if let Some((_, target)) = destination {
451                     self.check_edge(location, *target, EdgeKind::Normal);
452                 }
453                 if let Some(cleanup) = cleanup {
454                     self.check_edge(location, *cleanup, EdgeKind::Unwind);
455                 }
456
457                 // The call destination place and Operand::Move place used as an argument might be
458                 // passed by a reference to the callee. Consequently they must be non-overlapping.
459                 // Currently this simply checks for duplicate places.
460                 self.place_cache.clear();
461                 if let Some((destination, _)) = destination {
462                     self.place_cache.push(destination.as_ref());
463                 }
464                 for arg in args {
465                     if let Operand::Move(place) = arg {
466                         self.place_cache.push(place.as_ref());
467                     }
468                 }
469                 let all_len = self.place_cache.len();
470                 self.place_cache.sort_unstable();
471                 self.place_cache.dedup();
472                 let has_duplicates = all_len != self.place_cache.len();
473                 if has_duplicates {
474                     self.fail(
475                         location,
476                         format!(
477                             "encountered overlapping memory in `Call` terminator: {:?}",
478                             terminator.kind,
479                         ),
480                     );
481                 }
482             }
483             TerminatorKind::Assert { cond, target, cleanup, .. } => {
484                 let cond_ty = cond.ty(&self.body.local_decls, self.tcx);
485                 if cond_ty != self.tcx.types.bool {
486                     self.fail(
487                         location,
488                         format!(
489                             "encountered non-boolean condition of type {} in `Assert` terminator",
490                             cond_ty
491                         ),
492                     );
493                 }
494                 self.check_edge(location, *target, EdgeKind::Normal);
495                 if let Some(cleanup) = cleanup {
496                     self.check_edge(location, *cleanup, EdgeKind::Unwind);
497                 }
498             }
499             TerminatorKind::Yield { resume, drop, .. } => {
500                 if self.mir_phase >= MirPhase::GeneratorsLowered {
501                     self.fail(location, "`Yield` should have been replaced by generator lowering");
502                 }
503                 self.check_edge(location, *resume, EdgeKind::Normal);
504                 if let Some(drop) = drop {
505                     self.check_edge(location, *drop, EdgeKind::Normal);
506                 }
507             }
508             TerminatorKind::FalseEdge { real_target, imaginary_target } => {
509                 if self.mir_phase >= MirPhase::DropsLowered {
510                     self.fail(
511                         location,
512                         "`FalseEdge` should have been removed after drop elaboration",
513                     );
514                 }
515                 self.check_edge(location, *real_target, EdgeKind::Normal);
516                 self.check_edge(location, *imaginary_target, EdgeKind::Normal);
517             }
518             TerminatorKind::FalseUnwind { real_target, unwind } => {
519                 if self.mir_phase >= MirPhase::DropsLowered {
520                     self.fail(
521                         location,
522                         "`FalseUnwind` should have been removed after drop elaboration",
523                     );
524                 }
525                 self.check_edge(location, *real_target, EdgeKind::Normal);
526                 if let Some(unwind) = unwind {
527                     self.check_edge(location, *unwind, EdgeKind::Unwind);
528                 }
529             }
530             TerminatorKind::InlineAsm { destination, cleanup, .. } => {
531                 if let Some(destination) = destination {
532                     self.check_edge(location, *destination, EdgeKind::Normal);
533                 }
534                 if let Some(cleanup) = cleanup {
535                     self.check_edge(location, *cleanup, EdgeKind::Unwind);
536                 }
537             }
538             TerminatorKind::GeneratorDrop => {
539                 if self.mir_phase >= MirPhase::GeneratorsLowered {
540                     self.fail(
541                         location,
542                         "`GeneratorDrop` should have been replaced by generator lowering",
543                     );
544                 }
545             }
546             // Nothing to validate for these.
547             TerminatorKind::Resume
548             | TerminatorKind::Abort
549             | TerminatorKind::Return
550             | TerminatorKind::Unreachable => {}
551         }
552
553         self.super_terminator(terminator, location);
554     }
555
556     fn visit_source_scope(&mut self, scope: &SourceScope) {
557         if self.body.source_scopes.get(*scope).is_none() {
558             self.tcx.sess.diagnostic().delay_span_bug(
559                 self.body.span,
560                 &format!(
561                     "broken MIR in {:?} ({}):\ninvalid source scope {:?}",
562                     self.body.source.instance, self.when, scope,
563                 ),
564             );
565         }
566     }
567 }