]> git.lizzy.rs Git - rust.git/blob - compiler/rustc_mir_dataflow/src/impls/mod.rs
fix most compiler/ doctests
[rust.git] / compiler / rustc_mir_dataflow / src / impls / mod.rs
1 //! Dataflow analyses are built upon some interpretation of the
2 //! bitvectors attached to each basic block, represented via a
3 //! zero-sized structure.
4
5 use rustc_index::bit_set::{BitSet, ChunkedBitSet};
6 use rustc_index::vec::Idx;
7 use rustc_middle::mir::visit::{MirVisitable, Visitor};
8 use rustc_middle::mir::{self, Body, Location};
9 use rustc_middle::ty::{self, TyCtxt};
10
11 use crate::drop_flag_effects_for_function_entry;
12 use crate::drop_flag_effects_for_location;
13 use crate::elaborate_drops::DropFlagState;
14 use crate::framework::{CallReturnPlaces, SwitchIntEdgeEffects};
15 use crate::move_paths::{HasMoveData, InitIndex, InitKind, LookupResult, MoveData, MovePathIndex};
16 use crate::on_lookup_result_bits;
17 use crate::MoveDataParamEnv;
18 use crate::{drop_flag_effects, on_all_children_bits};
19 use crate::{lattice, AnalysisDomain, GenKill, GenKillAnalysis};
20
21 mod borrowed_locals;
22 mod init_locals;
23 mod liveness;
24 mod storage_liveness;
25
26 pub use self::borrowed_locals::MaybeBorrowedLocals;
27 pub use self::init_locals::MaybeInitializedLocals;
28 pub use self::liveness::MaybeLiveLocals;
29 pub use self::storage_liveness::{MaybeRequiresStorage, MaybeStorageLive};
30
31 /// `MaybeInitializedPlaces` tracks all places that might be
32 /// initialized upon reaching a particular point in the control flow
33 /// for a function.
34 ///
35 /// For example, in code like the following, we have corresponding
36 /// dataflow information shown in the right-hand comments.
37 ///
38 /// ```rust
39 /// struct S;
40 /// fn foo(pred: bool) {                        // maybe-init:
41 ///                                             // {}
42 ///     let a = S; let mut b = S; let c; let d; // {a, b}
43 ///
44 ///     if pred {
45 ///         drop(a);                            // {   b}
46 ///         b = S;                              // {   b}
47 ///
48 ///     } else {
49 ///         drop(b);                            // {a}
50 ///         d = S;                              // {a,       d}
51 ///
52 ///     }                                       // {a, b,    d}
53 ///
54 ///     c = S;                                  // {a, b, c, d}
55 /// }
56 /// ```
57 ///
58 /// To determine whether a place *must* be initialized at a
59 /// particular control-flow point, one can take the set-difference
60 /// between this data and the data from `MaybeUninitializedPlaces` at the
61 /// corresponding control-flow point.
62 ///
63 /// Similarly, at a given `drop` statement, the set-intersection
64 /// between this data and `MaybeUninitializedPlaces` yields the set of
65 /// places that would require a dynamic drop-flag at that statement.
66 pub struct MaybeInitializedPlaces<'a, 'tcx> {
67     tcx: TyCtxt<'tcx>,
68     body: &'a Body<'tcx>,
69     mdpe: &'a MoveDataParamEnv<'tcx>,
70 }
71
72 impl<'a, 'tcx> MaybeInitializedPlaces<'a, 'tcx> {
73     pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
74         MaybeInitializedPlaces { tcx, body, mdpe }
75     }
76 }
77
78 impl<'a, 'tcx> HasMoveData<'tcx> for MaybeInitializedPlaces<'a, 'tcx> {
79     fn move_data(&self) -> &MoveData<'tcx> {
80         &self.mdpe.move_data
81     }
82 }
83
84 /// `MaybeUninitializedPlaces` tracks all places that might be
85 /// uninitialized upon reaching a particular point in the control flow
86 /// for a function.
87 ///
88 /// For example, in code like the following, we have corresponding
89 /// dataflow information shown in the right-hand comments.
90 ///
91 /// ```rust
92 /// struct S;
93 /// fn foo(pred: bool) {                        // maybe-uninit:
94 ///                                             // {a, b, c, d}
95 ///     let a = S; let mut b = S; let c; let d; // {      c, d}
96 ///
97 ///     if pred {
98 ///         drop(a);                            // {a,    c, d}
99 ///         b = S;                              // {a,    c, d}
100 ///
101 ///     } else {
102 ///         drop(b);                            // {   b, c, d}
103 ///         d = S;                              // {   b, c   }
104 ///
105 ///     }                                       // {a, b, c, d}
106 ///
107 ///     c = S;                                  // {a, b,    d}
108 /// }
109 /// ```
110 ///
111 /// To determine whether a place *must* be uninitialized at a
112 /// particular control-flow point, one can take the set-difference
113 /// between this data and the data from `MaybeInitializedPlaces` at the
114 /// corresponding control-flow point.
115 ///
116 /// Similarly, at a given `drop` statement, the set-intersection
117 /// between this data and `MaybeInitializedPlaces` yields the set of
118 /// places that would require a dynamic drop-flag at that statement.
119 pub struct MaybeUninitializedPlaces<'a, 'tcx> {
120     tcx: TyCtxt<'tcx>,
121     body: &'a Body<'tcx>,
122     mdpe: &'a MoveDataParamEnv<'tcx>,
123
124     mark_inactive_variants_as_uninit: bool,
125 }
126
127 impl<'a, 'tcx> MaybeUninitializedPlaces<'a, 'tcx> {
128     pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
129         MaybeUninitializedPlaces { tcx, body, mdpe, mark_inactive_variants_as_uninit: false }
130     }
131
132     /// Causes inactive enum variants to be marked as "maybe uninitialized" after a switch on an
133     /// enum discriminant.
134     ///
135     /// This is correct in a vacuum but is not the default because it causes problems in the borrow
136     /// checker, where this information gets propagated along `FakeEdge`s.
137     pub fn mark_inactive_variants_as_uninit(mut self) -> Self {
138         self.mark_inactive_variants_as_uninit = true;
139         self
140     }
141 }
142
143 impl<'a, 'tcx> HasMoveData<'tcx> for MaybeUninitializedPlaces<'a, 'tcx> {
144     fn move_data(&self) -> &MoveData<'tcx> {
145         &self.mdpe.move_data
146     }
147 }
148
149 /// `DefinitelyInitializedPlaces` tracks all places that are definitely
150 /// initialized upon reaching a particular point in the control flow
151 /// for a function.
152 ///
153 /// For example, in code like the following, we have corresponding
154 /// dataflow information shown in the right-hand comments.
155 ///
156 /// ```rust
157 /// struct S;
158 /// fn foo(pred: bool) {                        // definite-init:
159 ///                                             // {          }
160 ///     let a = S; let mut b = S; let c; let d; // {a, b      }
161 ///
162 ///     if pred {
163 ///         drop(a);                            // {   b,     }
164 ///         b = S;                              // {   b,     }
165 ///
166 ///     } else {
167 ///         drop(b);                            // {a,        }
168 ///         d = S;                              // {a,       d}
169 ///
170 ///     }                                       // {          }
171 ///
172 ///     c = S;                                  // {       c  }
173 /// }
174 /// ```
175 ///
176 /// To determine whether a place *may* be uninitialized at a
177 /// particular control-flow point, one can take the set-complement
178 /// of this data.
179 ///
180 /// Similarly, at a given `drop` statement, the set-difference between
181 /// this data and `MaybeInitializedPlaces` yields the set of places
182 /// that would require a dynamic drop-flag at that statement.
183 pub struct DefinitelyInitializedPlaces<'a, 'tcx> {
184     tcx: TyCtxt<'tcx>,
185     body: &'a Body<'tcx>,
186     mdpe: &'a MoveDataParamEnv<'tcx>,
187 }
188
189 impl<'a, 'tcx> DefinitelyInitializedPlaces<'a, 'tcx> {
190     pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
191         DefinitelyInitializedPlaces { tcx, body, mdpe }
192     }
193 }
194
195 impl<'a, 'tcx> HasMoveData<'tcx> for DefinitelyInitializedPlaces<'a, 'tcx> {
196     fn move_data(&self) -> &MoveData<'tcx> {
197         &self.mdpe.move_data
198     }
199 }
200
201 /// `EverInitializedPlaces` tracks all places that might have ever been
202 /// initialized upon reaching a particular point in the control flow
203 /// for a function, without an intervening `StorageDead`.
204 ///
205 /// This dataflow is used to determine if an immutable local variable may
206 /// be assigned to.
207 ///
208 /// For example, in code like the following, we have corresponding
209 /// dataflow information shown in the right-hand comments.
210 ///
211 /// ```rust
212 /// struct S;
213 /// fn foo(pred: bool) {                        // ever-init:
214 ///                                             // {          }
215 ///     let a = S; let mut b = S; let c; let d; // {a, b      }
216 ///
217 ///     if pred {
218 ///         drop(a);                            // {a, b,     }
219 ///         b = S;                              // {a, b,     }
220 ///
221 ///     } else {
222 ///         drop(b);                            // {a, b,      }
223 ///         d = S;                              // {a, b,    d }
224 ///
225 ///     }                                       // {a, b,    d }
226 ///
227 ///     c = S;                                  // {a, b, c, d }
228 /// }
229 /// ```
230 pub struct EverInitializedPlaces<'a, 'tcx> {
231     #[allow(dead_code)]
232     tcx: TyCtxt<'tcx>,
233     body: &'a Body<'tcx>,
234     mdpe: &'a MoveDataParamEnv<'tcx>,
235 }
236
237 impl<'a, 'tcx> EverInitializedPlaces<'a, 'tcx> {
238     pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
239         EverInitializedPlaces { tcx, body, mdpe }
240     }
241 }
242
243 impl<'a, 'tcx> HasMoveData<'tcx> for EverInitializedPlaces<'a, 'tcx> {
244     fn move_data(&self) -> &MoveData<'tcx> {
245         &self.mdpe.move_data
246     }
247 }
248
249 impl<'a, 'tcx> MaybeInitializedPlaces<'a, 'tcx> {
250     fn update_bits(
251         trans: &mut impl GenKill<MovePathIndex>,
252         path: MovePathIndex,
253         state: DropFlagState,
254     ) {
255         match state {
256             DropFlagState::Absent => trans.kill(path),
257             DropFlagState::Present => trans.gen(path),
258         }
259     }
260 }
261
262 impl<'a, 'tcx> MaybeUninitializedPlaces<'a, 'tcx> {
263     fn update_bits(
264         trans: &mut impl GenKill<MovePathIndex>,
265         path: MovePathIndex,
266         state: DropFlagState,
267     ) {
268         match state {
269             DropFlagState::Absent => trans.gen(path),
270             DropFlagState::Present => trans.kill(path),
271         }
272     }
273 }
274
275 impl<'a, 'tcx> DefinitelyInitializedPlaces<'a, 'tcx> {
276     fn update_bits(
277         trans: &mut impl GenKill<MovePathIndex>,
278         path: MovePathIndex,
279         state: DropFlagState,
280     ) {
281         match state {
282             DropFlagState::Absent => trans.kill(path),
283             DropFlagState::Present => trans.gen(path),
284         }
285     }
286 }
287
288 impl<'tcx> AnalysisDomain<'tcx> for MaybeInitializedPlaces<'_, 'tcx> {
289     type Domain = ChunkedBitSet<MovePathIndex>;
290     const NAME: &'static str = "maybe_init";
291
292     fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
293         // bottom = uninitialized
294         ChunkedBitSet::new_empty(self.move_data().move_paths.len())
295     }
296
297     fn initialize_start_block(&self, _: &mir::Body<'tcx>, state: &mut Self::Domain) {
298         drop_flag_effects_for_function_entry(self.tcx, self.body, self.mdpe, |path, s| {
299             assert!(s == DropFlagState::Present);
300             state.insert(path);
301         });
302     }
303 }
304
305 impl<'tcx> GenKillAnalysis<'tcx> for MaybeInitializedPlaces<'_, 'tcx> {
306     type Idx = MovePathIndex;
307
308     fn statement_effect(
309         &self,
310         trans: &mut impl GenKill<Self::Idx>,
311         statement: &mir::Statement<'tcx>,
312         location: Location,
313     ) {
314         drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
315             Self::update_bits(trans, path, s)
316         });
317
318         if !self.tcx.sess.opts.debugging_opts.precise_enum_drop_elaboration {
319             return;
320         }
321
322         // Mark all places as "maybe init" if they are mutably borrowed. See #90752.
323         for_each_mut_borrow(statement, location, |place| {
324             let LookupResult::Exact(mpi) = self.move_data().rev_lookup.find(place.as_ref()) else { return };
325             on_all_children_bits(self.tcx, self.body, self.move_data(), mpi, |child| {
326                 trans.gen(child);
327             })
328         })
329     }
330
331     fn terminator_effect(
332         &self,
333         trans: &mut impl GenKill<Self::Idx>,
334         terminator: &mir::Terminator<'tcx>,
335         location: Location,
336     ) {
337         drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
338             Self::update_bits(trans, path, s)
339         });
340
341         if !self.tcx.sess.opts.debugging_opts.precise_enum_drop_elaboration {
342             return;
343         }
344
345         for_each_mut_borrow(terminator, location, |place| {
346             let LookupResult::Exact(mpi) = self.move_data().rev_lookup.find(place.as_ref()) else { return };
347             on_all_children_bits(self.tcx, self.body, self.move_data(), mpi, |child| {
348                 trans.gen(child);
349             })
350         })
351     }
352
353     fn call_return_effect(
354         &self,
355         trans: &mut impl GenKill<Self::Idx>,
356         _block: mir::BasicBlock,
357         return_places: CallReturnPlaces<'_, 'tcx>,
358     ) {
359         return_places.for_each(|place| {
360             // when a call returns successfully, that means we need to set
361             // the bits for that dest_place to 1 (initialized).
362             on_lookup_result_bits(
363                 self.tcx,
364                 self.body,
365                 self.move_data(),
366                 self.move_data().rev_lookup.find(place.as_ref()),
367                 |mpi| {
368                     trans.gen(mpi);
369                 },
370             );
371         });
372     }
373
374     fn switch_int_edge_effects<G: GenKill<Self::Idx>>(
375         &self,
376         block: mir::BasicBlock,
377         discr: &mir::Operand<'tcx>,
378         edge_effects: &mut impl SwitchIntEdgeEffects<G>,
379     ) {
380         if !self.tcx.sess.opts.debugging_opts.precise_enum_drop_elaboration {
381             return;
382         }
383
384         let enum_ = discr.place().and_then(|discr| {
385             switch_on_enum_discriminant(self.tcx, &self.body, &self.body[block], discr)
386         });
387
388         let Some((enum_place, enum_def)) = enum_ else {
389             return;
390         };
391
392         let mut discriminants = enum_def.discriminants(self.tcx);
393         edge_effects.apply(|trans, edge| {
394             let Some(value) = edge.value else {
395                 return;
396             };
397
398             // MIR building adds discriminants to the `values` array in the same order as they
399             // are yielded by `AdtDef::discriminants`. We rely on this to match each
400             // discriminant in `values` to its corresponding variant in linear time.
401             let (variant, _) = discriminants
402                 .find(|&(_, discr)| discr.val == value)
403                 .expect("Order of `AdtDef::discriminants` differed from `SwitchInt::values`");
404
405             // Kill all move paths that correspond to variants we know to be inactive along this
406             // particular outgoing edge of a `SwitchInt`.
407             drop_flag_effects::on_all_inactive_variants(
408                 self.tcx,
409                 self.body,
410                 self.move_data(),
411                 enum_place,
412                 variant,
413                 |mpi| trans.kill(mpi),
414             );
415         });
416     }
417 }
418
419 impl<'tcx> AnalysisDomain<'tcx> for MaybeUninitializedPlaces<'_, 'tcx> {
420     type Domain = ChunkedBitSet<MovePathIndex>;
421
422     const NAME: &'static str = "maybe_uninit";
423
424     fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
425         // bottom = initialized (start_block_effect counters this at outset)
426         ChunkedBitSet::new_empty(self.move_data().move_paths.len())
427     }
428
429     // sets on_entry bits for Arg places
430     fn initialize_start_block(&self, _: &mir::Body<'tcx>, state: &mut Self::Domain) {
431         // set all bits to 1 (uninit) before gathering counter-evidence
432         state.insert_all();
433
434         drop_flag_effects_for_function_entry(self.tcx, self.body, self.mdpe, |path, s| {
435             assert!(s == DropFlagState::Present);
436             state.remove(path);
437         });
438     }
439 }
440
441 impl<'tcx> GenKillAnalysis<'tcx> for MaybeUninitializedPlaces<'_, 'tcx> {
442     type Idx = MovePathIndex;
443
444     fn statement_effect(
445         &self,
446         trans: &mut impl GenKill<Self::Idx>,
447         _statement: &mir::Statement<'tcx>,
448         location: Location,
449     ) {
450         drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
451             Self::update_bits(trans, path, s)
452         });
453
454         // Unlike in `MaybeInitializedPlaces` above, we don't need to change the state when a
455         // mutable borrow occurs. Places cannot become uninitialized through a mutable reference.
456     }
457
458     fn terminator_effect(
459         &self,
460         trans: &mut impl GenKill<Self::Idx>,
461         _terminator: &mir::Terminator<'tcx>,
462         location: Location,
463     ) {
464         drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
465             Self::update_bits(trans, path, s)
466         });
467     }
468
469     fn call_return_effect(
470         &self,
471         trans: &mut impl GenKill<Self::Idx>,
472         _block: mir::BasicBlock,
473         return_places: CallReturnPlaces<'_, 'tcx>,
474     ) {
475         return_places.for_each(|place| {
476             // when a call returns successfully, that means we need to set
477             // the bits for that dest_place to 0 (initialized).
478             on_lookup_result_bits(
479                 self.tcx,
480                 self.body,
481                 self.move_data(),
482                 self.move_data().rev_lookup.find(place.as_ref()),
483                 |mpi| {
484                     trans.kill(mpi);
485                 },
486             );
487         });
488     }
489
490     fn switch_int_edge_effects<G: GenKill<Self::Idx>>(
491         &self,
492         block: mir::BasicBlock,
493         discr: &mir::Operand<'tcx>,
494         edge_effects: &mut impl SwitchIntEdgeEffects<G>,
495     ) {
496         if !self.tcx.sess.opts.debugging_opts.precise_enum_drop_elaboration {
497             return;
498         }
499
500         if !self.mark_inactive_variants_as_uninit {
501             return;
502         }
503
504         let enum_ = discr.place().and_then(|discr| {
505             switch_on_enum_discriminant(self.tcx, &self.body, &self.body[block], discr)
506         });
507
508         let Some((enum_place, enum_def)) = enum_ else {
509             return;
510         };
511
512         let mut discriminants = enum_def.discriminants(self.tcx);
513         edge_effects.apply(|trans, edge| {
514             let Some(value) = edge.value else {
515                 return;
516             };
517
518             // MIR building adds discriminants to the `values` array in the same order as they
519             // are yielded by `AdtDef::discriminants`. We rely on this to match each
520             // discriminant in `values` to its corresponding variant in linear time.
521             let (variant, _) = discriminants
522                 .find(|&(_, discr)| discr.val == value)
523                 .expect("Order of `AdtDef::discriminants` differed from `SwitchInt::values`");
524
525             // Mark all move paths that correspond to variants other than this one as maybe
526             // uninitialized (in reality, they are *definitely* uninitialized).
527             drop_flag_effects::on_all_inactive_variants(
528                 self.tcx,
529                 self.body,
530                 self.move_data(),
531                 enum_place,
532                 variant,
533                 |mpi| trans.gen(mpi),
534             );
535         });
536     }
537 }
538
539 impl<'a, 'tcx> AnalysisDomain<'tcx> for DefinitelyInitializedPlaces<'a, 'tcx> {
540     /// Use set intersection as the join operator.
541     type Domain = lattice::Dual<BitSet<MovePathIndex>>;
542
543     const NAME: &'static str = "definite_init";
544
545     fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
546         // bottom = initialized (start_block_effect counters this at outset)
547         lattice::Dual(BitSet::new_filled(self.move_data().move_paths.len()))
548     }
549
550     // sets on_entry bits for Arg places
551     fn initialize_start_block(&self, _: &mir::Body<'tcx>, state: &mut Self::Domain) {
552         state.0.clear();
553
554         drop_flag_effects_for_function_entry(self.tcx, self.body, self.mdpe, |path, s| {
555             assert!(s == DropFlagState::Present);
556             state.0.insert(path);
557         });
558     }
559 }
560
561 impl<'tcx> GenKillAnalysis<'tcx> for DefinitelyInitializedPlaces<'_, 'tcx> {
562     type Idx = MovePathIndex;
563
564     fn statement_effect(
565         &self,
566         trans: &mut impl GenKill<Self::Idx>,
567         _statement: &mir::Statement<'tcx>,
568         location: Location,
569     ) {
570         drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
571             Self::update_bits(trans, path, s)
572         })
573     }
574
575     fn terminator_effect(
576         &self,
577         trans: &mut impl GenKill<Self::Idx>,
578         _terminator: &mir::Terminator<'tcx>,
579         location: Location,
580     ) {
581         drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
582             Self::update_bits(trans, path, s)
583         })
584     }
585
586     fn call_return_effect(
587         &self,
588         trans: &mut impl GenKill<Self::Idx>,
589         _block: mir::BasicBlock,
590         return_places: CallReturnPlaces<'_, 'tcx>,
591     ) {
592         return_places.for_each(|place| {
593             // when a call returns successfully, that means we need to set
594             // the bits for that dest_place to 1 (initialized).
595             on_lookup_result_bits(
596                 self.tcx,
597                 self.body,
598                 self.move_data(),
599                 self.move_data().rev_lookup.find(place.as_ref()),
600                 |mpi| {
601                     trans.gen(mpi);
602                 },
603             );
604         });
605     }
606 }
607
608 impl<'tcx> AnalysisDomain<'tcx> for EverInitializedPlaces<'_, 'tcx> {
609     type Domain = ChunkedBitSet<InitIndex>;
610
611     const NAME: &'static str = "ever_init";
612
613     fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
614         // bottom = no initialized variables by default
615         ChunkedBitSet::new_empty(self.move_data().inits.len())
616     }
617
618     fn initialize_start_block(&self, body: &mir::Body<'tcx>, state: &mut Self::Domain) {
619         for arg_init in 0..body.arg_count {
620             state.insert(InitIndex::new(arg_init));
621         }
622     }
623 }
624
625 impl<'tcx> GenKillAnalysis<'tcx> for EverInitializedPlaces<'_, 'tcx> {
626     type Idx = InitIndex;
627
628     #[instrument(skip(self, trans), level = "debug")]
629     fn statement_effect(
630         &self,
631         trans: &mut impl GenKill<Self::Idx>,
632         stmt: &mir::Statement<'tcx>,
633         location: Location,
634     ) {
635         let move_data = self.move_data();
636         let init_path_map = &move_data.init_path_map;
637         let init_loc_map = &move_data.init_loc_map;
638         let rev_lookup = &move_data.rev_lookup;
639
640         debug!("initializes move_indexes {:?}", &init_loc_map[location]);
641         trans.gen_all(init_loc_map[location].iter().copied());
642
643         if let mir::StatementKind::StorageDead(local) = stmt.kind {
644             // End inits for StorageDead, so that an immutable variable can
645             // be reinitialized on the next iteration of the loop.
646             let move_path_index = rev_lookup.find_local(local);
647             debug!("clears the ever initialized status of {:?}", init_path_map[move_path_index]);
648             trans.kill_all(init_path_map[move_path_index].iter().copied());
649         }
650     }
651
652     #[instrument(skip(self, trans, _terminator), level = "debug")]
653     fn terminator_effect(
654         &self,
655         trans: &mut impl GenKill<Self::Idx>,
656         _terminator: &mir::Terminator<'tcx>,
657         location: Location,
658     ) {
659         let (body, move_data) = (self.body, self.move_data());
660         let term = body[location.block].terminator();
661         let init_loc_map = &move_data.init_loc_map;
662         debug!(?term);
663         debug!("initializes move_indexes {:?}", init_loc_map[location]);
664         trans.gen_all(
665             init_loc_map[location]
666                 .iter()
667                 .filter(|init_index| {
668                     move_data.inits[**init_index].kind != InitKind::NonPanicPathOnly
669                 })
670                 .copied(),
671         );
672     }
673
674     fn call_return_effect(
675         &self,
676         trans: &mut impl GenKill<Self::Idx>,
677         block: mir::BasicBlock,
678         _return_places: CallReturnPlaces<'_, 'tcx>,
679     ) {
680         let move_data = self.move_data();
681         let init_loc_map = &move_data.init_loc_map;
682
683         let call_loc = self.body.terminator_loc(block);
684         for init_index in &init_loc_map[call_loc] {
685             trans.gen(*init_index);
686         }
687     }
688 }
689
690 /// Inspect a `SwitchInt`-terminated basic block to see if the condition of that `SwitchInt` is
691 /// an enum discriminant.
692 ///
693 /// We expect such blocks to have a call to `discriminant` as their last statement like so:
694 ///
695 /// ```text
696 /// ...
697 /// _42 = discriminant(_1)
698 /// SwitchInt(_42, ..)
699 /// ```
700 ///
701 /// If the basic block matches this pattern, this function returns the place corresponding to the
702 /// enum (`_1` in the example above) as well as the `AdtDef` of that enum.
703 fn switch_on_enum_discriminant<'mir, 'tcx>(
704     tcx: TyCtxt<'tcx>,
705     body: &'mir mir::Body<'tcx>,
706     block: &'mir mir::BasicBlockData<'tcx>,
707     switch_on: mir::Place<'tcx>,
708 ) -> Option<(mir::Place<'tcx>, ty::AdtDef<'tcx>)> {
709     for statement in block.statements.iter().rev() {
710         match &statement.kind {
711             mir::StatementKind::Assign(box (lhs, mir::Rvalue::Discriminant(discriminated)))
712                 if *lhs == switch_on =>
713             {
714                 match discriminated.ty(body, tcx).ty.kind() {
715                     ty::Adt(def, _) => return Some((*discriminated, *def)),
716
717                     // `Rvalue::Discriminant` is also used to get the active yield point for a
718                     // generator, but we do not need edge-specific effects in that case. This may
719                     // change in the future.
720                     ty::Generator(..) => return None,
721
722                     t => bug!("`discriminant` called on unexpected type {:?}", t),
723                 }
724             }
725             mir::StatementKind::Coverage(_) => continue,
726             _ => return None,
727         }
728     }
729     None
730 }
731
732 struct OnMutBorrow<F>(F);
733
734 impl<F> Visitor<'_> for OnMutBorrow<F>
735 where
736     F: FnMut(&mir::Place<'_>),
737 {
738     fn visit_rvalue(&mut self, rvalue: &mir::Rvalue<'_>, location: Location) {
739         // FIXME: Does `&raw const foo` allow mutation? See #90413.
740         match rvalue {
741             mir::Rvalue::Ref(_, mir::BorrowKind::Mut { .. }, place)
742             | mir::Rvalue::AddressOf(_, place) => (self.0)(place),
743
744             _ => {}
745         }
746
747         self.super_rvalue(rvalue, location)
748     }
749 }
750
751 /// Calls `f` for each mutable borrow or raw reference in the program.
752 ///
753 /// This DOES NOT call `f` for a shared borrow of a type with interior mutability.  That's okay for
754 /// initializedness, because we cannot move from an `UnsafeCell` (outside of `core::cell`), but
755 /// other analyses will likely need to check for `!Freeze`.
756 fn for_each_mut_borrow<'tcx>(
757     mir: &impl MirVisitable<'tcx>,
758     location: Location,
759     f: impl FnMut(&mir::Place<'_>),
760 ) {
761     let mut vis = OnMutBorrow(f);
762
763     mir.apply(location, &mut vis);
764 }