]> git.lizzy.rs Git - rust.git/blob - compiler/rustc_mir_dataflow/src/impls/mod.rs
Rollup merge of #101997 - cuviper:drop-legacy-pm, r=nikic
[rust.git] / compiler / rustc_mir_dataflow / src / impls / mod.rs
1 //! Dataflow analyses are built upon some interpretation of the
2 //! bitvectors attached to each basic block, represented via a
3 //! zero-sized structure.
4
5 use rustc_index::bit_set::{BitSet, ChunkedBitSet};
6 use rustc_index::vec::Idx;
7 use rustc_middle::mir::visit::{MirVisitable, Visitor};
8 use rustc_middle::mir::{self, Body, Location};
9 use rustc_middle::ty::{self, TyCtxt};
10
11 use crate::drop_flag_effects_for_function_entry;
12 use crate::drop_flag_effects_for_location;
13 use crate::elaborate_drops::DropFlagState;
14 use crate::framework::{CallReturnPlaces, SwitchIntEdgeEffects};
15 use crate::move_paths::{HasMoveData, InitIndex, InitKind, LookupResult, MoveData, MovePathIndex};
16 use crate::on_lookup_result_bits;
17 use crate::MoveDataParamEnv;
18 use crate::{drop_flag_effects, on_all_children_bits};
19 use crate::{lattice, AnalysisDomain, GenKill, GenKillAnalysis};
20
21 mod borrowed_locals;
22 mod init_locals;
23 mod liveness;
24 mod storage_liveness;
25
26 pub use self::borrowed_locals::borrowed_locals;
27 pub use self::borrowed_locals::MaybeBorrowedLocals;
28 pub use self::init_locals::MaybeInitializedLocals;
29 pub use self::liveness::MaybeLiveLocals;
30 pub use self::liveness::MaybeTransitiveLiveLocals;
31 pub use self::storage_liveness::{MaybeRequiresStorage, MaybeStorageLive};
32
33 /// `MaybeInitializedPlaces` tracks all places that might be
34 /// initialized upon reaching a particular point in the control flow
35 /// for a function.
36 ///
37 /// For example, in code like the following, we have corresponding
38 /// dataflow information shown in the right-hand comments.
39 ///
40 /// ```rust
41 /// struct S;
42 /// fn foo(pred: bool) {                        // maybe-init:
43 ///                                             // {}
44 ///     let a = S; let mut b = S; let c; let d; // {a, b}
45 ///
46 ///     if pred {
47 ///         drop(a);                            // {   b}
48 ///         b = S;                              // {   b}
49 ///
50 ///     } else {
51 ///         drop(b);                            // {a}
52 ///         d = S;                              // {a,       d}
53 ///
54 ///     }                                       // {a, b,    d}
55 ///
56 ///     c = S;                                  // {a, b, c, d}
57 /// }
58 /// ```
59 ///
60 /// To determine whether a place *must* be initialized at a
61 /// particular control-flow point, one can take the set-difference
62 /// between this data and the data from `MaybeUninitializedPlaces` at the
63 /// corresponding control-flow point.
64 ///
65 /// Similarly, at a given `drop` statement, the set-intersection
66 /// between this data and `MaybeUninitializedPlaces` yields the set of
67 /// places that would require a dynamic drop-flag at that statement.
68 pub struct MaybeInitializedPlaces<'a, 'tcx> {
69     tcx: TyCtxt<'tcx>,
70     body: &'a Body<'tcx>,
71     mdpe: &'a MoveDataParamEnv<'tcx>,
72 }
73
74 impl<'a, 'tcx> MaybeInitializedPlaces<'a, 'tcx> {
75     pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
76         MaybeInitializedPlaces { tcx, body, mdpe }
77     }
78 }
79
80 impl<'a, 'tcx> HasMoveData<'tcx> for MaybeInitializedPlaces<'a, 'tcx> {
81     fn move_data(&self) -> &MoveData<'tcx> {
82         &self.mdpe.move_data
83     }
84 }
85
86 /// `MaybeUninitializedPlaces` tracks all places that might be
87 /// uninitialized upon reaching a particular point in the control flow
88 /// for a function.
89 ///
90 /// For example, in code like the following, we have corresponding
91 /// dataflow information shown in the right-hand comments.
92 ///
93 /// ```rust
94 /// struct S;
95 /// fn foo(pred: bool) {                        // maybe-uninit:
96 ///                                             // {a, b, c, d}
97 ///     let a = S; let mut b = S; let c; let d; // {      c, d}
98 ///
99 ///     if pred {
100 ///         drop(a);                            // {a,    c, d}
101 ///         b = S;                              // {a,    c, d}
102 ///
103 ///     } else {
104 ///         drop(b);                            // {   b, c, d}
105 ///         d = S;                              // {   b, c   }
106 ///
107 ///     }                                       // {a, b, c, d}
108 ///
109 ///     c = S;                                  // {a, b,    d}
110 /// }
111 /// ```
112 ///
113 /// To determine whether a place *must* be uninitialized at a
114 /// particular control-flow point, one can take the set-difference
115 /// between this data and the data from `MaybeInitializedPlaces` at the
116 /// corresponding control-flow point.
117 ///
118 /// Similarly, at a given `drop` statement, the set-intersection
119 /// between this data and `MaybeInitializedPlaces` yields the set of
120 /// places that would require a dynamic drop-flag at that statement.
121 pub struct MaybeUninitializedPlaces<'a, 'tcx> {
122     tcx: TyCtxt<'tcx>,
123     body: &'a Body<'tcx>,
124     mdpe: &'a MoveDataParamEnv<'tcx>,
125
126     mark_inactive_variants_as_uninit: bool,
127 }
128
129 impl<'a, 'tcx> MaybeUninitializedPlaces<'a, 'tcx> {
130     pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
131         MaybeUninitializedPlaces { tcx, body, mdpe, mark_inactive_variants_as_uninit: false }
132     }
133
134     /// Causes inactive enum variants to be marked as "maybe uninitialized" after a switch on an
135     /// enum discriminant.
136     ///
137     /// This is correct in a vacuum but is not the default because it causes problems in the borrow
138     /// checker, where this information gets propagated along `FakeEdge`s.
139     pub fn mark_inactive_variants_as_uninit(mut self) -> Self {
140         self.mark_inactive_variants_as_uninit = true;
141         self
142     }
143 }
144
145 impl<'a, 'tcx> HasMoveData<'tcx> for MaybeUninitializedPlaces<'a, 'tcx> {
146     fn move_data(&self) -> &MoveData<'tcx> {
147         &self.mdpe.move_data
148     }
149 }
150
151 /// `DefinitelyInitializedPlaces` tracks all places that are definitely
152 /// initialized upon reaching a particular point in the control flow
153 /// for a function.
154 ///
155 /// For example, in code like the following, we have corresponding
156 /// dataflow information shown in the right-hand comments.
157 ///
158 /// ```rust
159 /// struct S;
160 /// fn foo(pred: bool) {                        // definite-init:
161 ///                                             // {          }
162 ///     let a = S; let mut b = S; let c; let d; // {a, b      }
163 ///
164 ///     if pred {
165 ///         drop(a);                            // {   b,     }
166 ///         b = S;                              // {   b,     }
167 ///
168 ///     } else {
169 ///         drop(b);                            // {a,        }
170 ///         d = S;                              // {a,       d}
171 ///
172 ///     }                                       // {          }
173 ///
174 ///     c = S;                                  // {       c  }
175 /// }
176 /// ```
177 ///
178 /// To determine whether a place *may* be uninitialized at a
179 /// particular control-flow point, one can take the set-complement
180 /// of this data.
181 ///
182 /// Similarly, at a given `drop` statement, the set-difference between
183 /// this data and `MaybeInitializedPlaces` yields the set of places
184 /// that would require a dynamic drop-flag at that statement.
185 pub struct DefinitelyInitializedPlaces<'a, 'tcx> {
186     tcx: TyCtxt<'tcx>,
187     body: &'a Body<'tcx>,
188     mdpe: &'a MoveDataParamEnv<'tcx>,
189 }
190
191 impl<'a, 'tcx> DefinitelyInitializedPlaces<'a, 'tcx> {
192     pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
193         DefinitelyInitializedPlaces { tcx, body, mdpe }
194     }
195 }
196
197 impl<'a, 'tcx> HasMoveData<'tcx> for DefinitelyInitializedPlaces<'a, 'tcx> {
198     fn move_data(&self) -> &MoveData<'tcx> {
199         &self.mdpe.move_data
200     }
201 }
202
203 /// `EverInitializedPlaces` tracks all places that might have ever been
204 /// initialized upon reaching a particular point in the control flow
205 /// for a function, without an intervening `StorageDead`.
206 ///
207 /// This dataflow is used to determine if an immutable local variable may
208 /// be assigned to.
209 ///
210 /// For example, in code like the following, we have corresponding
211 /// dataflow information shown in the right-hand comments.
212 ///
213 /// ```rust
214 /// struct S;
215 /// fn foo(pred: bool) {                        // ever-init:
216 ///                                             // {          }
217 ///     let a = S; let mut b = S; let c; let d; // {a, b      }
218 ///
219 ///     if pred {
220 ///         drop(a);                            // {a, b,     }
221 ///         b = S;                              // {a, b,     }
222 ///
223 ///     } else {
224 ///         drop(b);                            // {a, b,      }
225 ///         d = S;                              // {a, b,    d }
226 ///
227 ///     }                                       // {a, b,    d }
228 ///
229 ///     c = S;                                  // {a, b, c, d }
230 /// }
231 /// ```
232 pub struct EverInitializedPlaces<'a, 'tcx> {
233     #[allow(dead_code)]
234     tcx: TyCtxt<'tcx>,
235     body: &'a Body<'tcx>,
236     mdpe: &'a MoveDataParamEnv<'tcx>,
237 }
238
239 impl<'a, 'tcx> EverInitializedPlaces<'a, 'tcx> {
240     pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
241         EverInitializedPlaces { tcx, body, mdpe }
242     }
243 }
244
245 impl<'a, 'tcx> HasMoveData<'tcx> for EverInitializedPlaces<'a, 'tcx> {
246     fn move_data(&self) -> &MoveData<'tcx> {
247         &self.mdpe.move_data
248     }
249 }
250
251 impl<'a, 'tcx> MaybeInitializedPlaces<'a, 'tcx> {
252     fn update_bits(
253         trans: &mut impl GenKill<MovePathIndex>,
254         path: MovePathIndex,
255         state: DropFlagState,
256     ) {
257         match state {
258             DropFlagState::Absent => trans.kill(path),
259             DropFlagState::Present => trans.gen(path),
260         }
261     }
262 }
263
264 impl<'a, 'tcx> MaybeUninitializedPlaces<'a, 'tcx> {
265     fn update_bits(
266         trans: &mut impl GenKill<MovePathIndex>,
267         path: MovePathIndex,
268         state: DropFlagState,
269     ) {
270         match state {
271             DropFlagState::Absent => trans.gen(path),
272             DropFlagState::Present => trans.kill(path),
273         }
274     }
275 }
276
277 impl<'a, 'tcx> DefinitelyInitializedPlaces<'a, 'tcx> {
278     fn update_bits(
279         trans: &mut impl GenKill<MovePathIndex>,
280         path: MovePathIndex,
281         state: DropFlagState,
282     ) {
283         match state {
284             DropFlagState::Absent => trans.kill(path),
285             DropFlagState::Present => trans.gen(path),
286         }
287     }
288 }
289
290 impl<'tcx> AnalysisDomain<'tcx> for MaybeInitializedPlaces<'_, 'tcx> {
291     type Domain = ChunkedBitSet<MovePathIndex>;
292     const NAME: &'static str = "maybe_init";
293
294     fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
295         // bottom = uninitialized
296         ChunkedBitSet::new_empty(self.move_data().move_paths.len())
297     }
298
299     fn initialize_start_block(&self, _: &mir::Body<'tcx>, state: &mut Self::Domain) {
300         drop_flag_effects_for_function_entry(self.tcx, self.body, self.mdpe, |path, s| {
301             assert!(s == DropFlagState::Present);
302             state.insert(path);
303         });
304     }
305 }
306
307 impl<'tcx> GenKillAnalysis<'tcx> for MaybeInitializedPlaces<'_, 'tcx> {
308     type Idx = MovePathIndex;
309
310     fn statement_effect(
311         &self,
312         trans: &mut impl GenKill<Self::Idx>,
313         statement: &mir::Statement<'tcx>,
314         location: Location,
315     ) {
316         drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
317             Self::update_bits(trans, path, s)
318         });
319
320         if !self.tcx.sess.opts.unstable_opts.precise_enum_drop_elaboration {
321             return;
322         }
323
324         // Mark all places as "maybe init" if they are mutably borrowed. See #90752.
325         for_each_mut_borrow(statement, location, |place| {
326             let LookupResult::Exact(mpi) = self.move_data().rev_lookup.find(place.as_ref()) else { return };
327             on_all_children_bits(self.tcx, self.body, self.move_data(), mpi, |child| {
328                 trans.gen(child);
329             })
330         })
331     }
332
333     fn terminator_effect(
334         &self,
335         trans: &mut impl GenKill<Self::Idx>,
336         terminator: &mir::Terminator<'tcx>,
337         location: Location,
338     ) {
339         drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
340             Self::update_bits(trans, path, s)
341         });
342
343         if !self.tcx.sess.opts.unstable_opts.precise_enum_drop_elaboration {
344             return;
345         }
346
347         for_each_mut_borrow(terminator, location, |place| {
348             let LookupResult::Exact(mpi) = self.move_data().rev_lookup.find(place.as_ref()) else { return };
349             on_all_children_bits(self.tcx, self.body, self.move_data(), mpi, |child| {
350                 trans.gen(child);
351             })
352         })
353     }
354
355     fn call_return_effect(
356         &self,
357         trans: &mut impl GenKill<Self::Idx>,
358         _block: mir::BasicBlock,
359         return_places: CallReturnPlaces<'_, 'tcx>,
360     ) {
361         return_places.for_each(|place| {
362             // when a call returns successfully, that means we need to set
363             // the bits for that dest_place to 1 (initialized).
364             on_lookup_result_bits(
365                 self.tcx,
366                 self.body,
367                 self.move_data(),
368                 self.move_data().rev_lookup.find(place.as_ref()),
369                 |mpi| {
370                     trans.gen(mpi);
371                 },
372             );
373         });
374     }
375
376     fn switch_int_edge_effects<G: GenKill<Self::Idx>>(
377         &self,
378         block: mir::BasicBlock,
379         discr: &mir::Operand<'tcx>,
380         edge_effects: &mut impl SwitchIntEdgeEffects<G>,
381     ) {
382         if !self.tcx.sess.opts.unstable_opts.precise_enum_drop_elaboration {
383             return;
384         }
385
386         let enum_ = discr.place().and_then(|discr| {
387             switch_on_enum_discriminant(self.tcx, &self.body, &self.body[block], discr)
388         });
389
390         let Some((enum_place, enum_def)) = enum_ else {
391             return;
392         };
393
394         let mut discriminants = enum_def.discriminants(self.tcx);
395         edge_effects.apply(|trans, edge| {
396             let Some(value) = edge.value else {
397                 return;
398             };
399
400             // MIR building adds discriminants to the `values` array in the same order as they
401             // are yielded by `AdtDef::discriminants`. We rely on this to match each
402             // discriminant in `values` to its corresponding variant in linear time.
403             let (variant, _) = discriminants
404                 .find(|&(_, discr)| discr.val == value)
405                 .expect("Order of `AdtDef::discriminants` differed from `SwitchInt::values`");
406
407             // Kill all move paths that correspond to variants we know to be inactive along this
408             // particular outgoing edge of a `SwitchInt`.
409             drop_flag_effects::on_all_inactive_variants(
410                 self.tcx,
411                 self.body,
412                 self.move_data(),
413                 enum_place,
414                 variant,
415                 |mpi| trans.kill(mpi),
416             );
417         });
418     }
419 }
420
421 impl<'tcx> AnalysisDomain<'tcx> for MaybeUninitializedPlaces<'_, 'tcx> {
422     type Domain = ChunkedBitSet<MovePathIndex>;
423
424     const NAME: &'static str = "maybe_uninit";
425
426     fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
427         // bottom = initialized (start_block_effect counters this at outset)
428         ChunkedBitSet::new_empty(self.move_data().move_paths.len())
429     }
430
431     // sets on_entry bits for Arg places
432     fn initialize_start_block(&self, _: &mir::Body<'tcx>, state: &mut Self::Domain) {
433         // set all bits to 1 (uninit) before gathering counter-evidence
434         state.insert_all();
435
436         drop_flag_effects_for_function_entry(self.tcx, self.body, self.mdpe, |path, s| {
437             assert!(s == DropFlagState::Present);
438             state.remove(path);
439         });
440     }
441 }
442
443 impl<'tcx> GenKillAnalysis<'tcx> for MaybeUninitializedPlaces<'_, 'tcx> {
444     type Idx = MovePathIndex;
445
446     fn statement_effect(
447         &self,
448         trans: &mut impl GenKill<Self::Idx>,
449         _statement: &mir::Statement<'tcx>,
450         location: Location,
451     ) {
452         drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
453             Self::update_bits(trans, path, s)
454         });
455
456         // Unlike in `MaybeInitializedPlaces` above, we don't need to change the state when a
457         // mutable borrow occurs. Places cannot become uninitialized through a mutable reference.
458     }
459
460     fn terminator_effect(
461         &self,
462         trans: &mut impl GenKill<Self::Idx>,
463         _terminator: &mir::Terminator<'tcx>,
464         location: Location,
465     ) {
466         drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
467             Self::update_bits(trans, path, s)
468         });
469     }
470
471     fn call_return_effect(
472         &self,
473         trans: &mut impl GenKill<Self::Idx>,
474         _block: mir::BasicBlock,
475         return_places: CallReturnPlaces<'_, 'tcx>,
476     ) {
477         return_places.for_each(|place| {
478             // when a call returns successfully, that means we need to set
479             // the bits for that dest_place to 0 (initialized).
480             on_lookup_result_bits(
481                 self.tcx,
482                 self.body,
483                 self.move_data(),
484                 self.move_data().rev_lookup.find(place.as_ref()),
485                 |mpi| {
486                     trans.kill(mpi);
487                 },
488             );
489         });
490     }
491
492     fn switch_int_edge_effects<G: GenKill<Self::Idx>>(
493         &self,
494         block: mir::BasicBlock,
495         discr: &mir::Operand<'tcx>,
496         edge_effects: &mut impl SwitchIntEdgeEffects<G>,
497     ) {
498         if !self.tcx.sess.opts.unstable_opts.precise_enum_drop_elaboration {
499             return;
500         }
501
502         if !self.mark_inactive_variants_as_uninit {
503             return;
504         }
505
506         let enum_ = discr.place().and_then(|discr| {
507             switch_on_enum_discriminant(self.tcx, &self.body, &self.body[block], discr)
508         });
509
510         let Some((enum_place, enum_def)) = enum_ else {
511             return;
512         };
513
514         let mut discriminants = enum_def.discriminants(self.tcx);
515         edge_effects.apply(|trans, edge| {
516             let Some(value) = edge.value else {
517                 return;
518             };
519
520             // MIR building adds discriminants to the `values` array in the same order as they
521             // are yielded by `AdtDef::discriminants`. We rely on this to match each
522             // discriminant in `values` to its corresponding variant in linear time.
523             let (variant, _) = discriminants
524                 .find(|&(_, discr)| discr.val == value)
525                 .expect("Order of `AdtDef::discriminants` differed from `SwitchInt::values`");
526
527             // Mark all move paths that correspond to variants other than this one as maybe
528             // uninitialized (in reality, they are *definitely* uninitialized).
529             drop_flag_effects::on_all_inactive_variants(
530                 self.tcx,
531                 self.body,
532                 self.move_data(),
533                 enum_place,
534                 variant,
535                 |mpi| trans.gen(mpi),
536             );
537         });
538     }
539 }
540
541 impl<'a, 'tcx> AnalysisDomain<'tcx> for DefinitelyInitializedPlaces<'a, 'tcx> {
542     /// Use set intersection as the join operator.
543     type Domain = lattice::Dual<BitSet<MovePathIndex>>;
544
545     const NAME: &'static str = "definite_init";
546
547     fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
548         // bottom = initialized (start_block_effect counters this at outset)
549         lattice::Dual(BitSet::new_filled(self.move_data().move_paths.len()))
550     }
551
552     // sets on_entry bits for Arg places
553     fn initialize_start_block(&self, _: &mir::Body<'tcx>, state: &mut Self::Domain) {
554         state.0.clear();
555
556         drop_flag_effects_for_function_entry(self.tcx, self.body, self.mdpe, |path, s| {
557             assert!(s == DropFlagState::Present);
558             state.0.insert(path);
559         });
560     }
561 }
562
563 impl<'tcx> GenKillAnalysis<'tcx> for DefinitelyInitializedPlaces<'_, 'tcx> {
564     type Idx = MovePathIndex;
565
566     fn statement_effect(
567         &self,
568         trans: &mut impl GenKill<Self::Idx>,
569         _statement: &mir::Statement<'tcx>,
570         location: Location,
571     ) {
572         drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
573             Self::update_bits(trans, path, s)
574         })
575     }
576
577     fn terminator_effect(
578         &self,
579         trans: &mut impl GenKill<Self::Idx>,
580         _terminator: &mir::Terminator<'tcx>,
581         location: Location,
582     ) {
583         drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
584             Self::update_bits(trans, path, s)
585         })
586     }
587
588     fn call_return_effect(
589         &self,
590         trans: &mut impl GenKill<Self::Idx>,
591         _block: mir::BasicBlock,
592         return_places: CallReturnPlaces<'_, 'tcx>,
593     ) {
594         return_places.for_each(|place| {
595             // when a call returns successfully, that means we need to set
596             // the bits for that dest_place to 1 (initialized).
597             on_lookup_result_bits(
598                 self.tcx,
599                 self.body,
600                 self.move_data(),
601                 self.move_data().rev_lookup.find(place.as_ref()),
602                 |mpi| {
603                     trans.gen(mpi);
604                 },
605             );
606         });
607     }
608 }
609
610 impl<'tcx> AnalysisDomain<'tcx> for EverInitializedPlaces<'_, 'tcx> {
611     type Domain = ChunkedBitSet<InitIndex>;
612
613     const NAME: &'static str = "ever_init";
614
615     fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
616         // bottom = no initialized variables by default
617         ChunkedBitSet::new_empty(self.move_data().inits.len())
618     }
619
620     fn initialize_start_block(&self, body: &mir::Body<'tcx>, state: &mut Self::Domain) {
621         for arg_init in 0..body.arg_count {
622             state.insert(InitIndex::new(arg_init));
623         }
624     }
625 }
626
627 impl<'tcx> GenKillAnalysis<'tcx> for EverInitializedPlaces<'_, 'tcx> {
628     type Idx = InitIndex;
629
630     #[instrument(skip(self, trans), level = "debug")]
631     fn statement_effect(
632         &self,
633         trans: &mut impl GenKill<Self::Idx>,
634         stmt: &mir::Statement<'tcx>,
635         location: Location,
636     ) {
637         let move_data = self.move_data();
638         let init_path_map = &move_data.init_path_map;
639         let init_loc_map = &move_data.init_loc_map;
640         let rev_lookup = &move_data.rev_lookup;
641
642         debug!("initializes move_indexes {:?}", &init_loc_map[location]);
643         trans.gen_all(init_loc_map[location].iter().copied());
644
645         if let mir::StatementKind::StorageDead(local) = stmt.kind {
646             // End inits for StorageDead, so that an immutable variable can
647             // be reinitialized on the next iteration of the loop.
648             let move_path_index = rev_lookup.find_local(local);
649             debug!("clears the ever initialized status of {:?}", init_path_map[move_path_index]);
650             trans.kill_all(init_path_map[move_path_index].iter().copied());
651         }
652     }
653
654     #[instrument(skip(self, trans, _terminator), level = "debug")]
655     fn terminator_effect(
656         &self,
657         trans: &mut impl GenKill<Self::Idx>,
658         _terminator: &mir::Terminator<'tcx>,
659         location: Location,
660     ) {
661         let (body, move_data) = (self.body, self.move_data());
662         let term = body[location.block].terminator();
663         let init_loc_map = &move_data.init_loc_map;
664         debug!(?term);
665         debug!("initializes move_indexes {:?}", init_loc_map[location]);
666         trans.gen_all(
667             init_loc_map[location]
668                 .iter()
669                 .filter(|init_index| {
670                     move_data.inits[**init_index].kind != InitKind::NonPanicPathOnly
671                 })
672                 .copied(),
673         );
674     }
675
676     fn call_return_effect(
677         &self,
678         trans: &mut impl GenKill<Self::Idx>,
679         block: mir::BasicBlock,
680         _return_places: CallReturnPlaces<'_, 'tcx>,
681     ) {
682         let move_data = self.move_data();
683         let init_loc_map = &move_data.init_loc_map;
684
685         let call_loc = self.body.terminator_loc(block);
686         for init_index in &init_loc_map[call_loc] {
687             trans.gen(*init_index);
688         }
689     }
690 }
691
692 /// Inspect a `SwitchInt`-terminated basic block to see if the condition of that `SwitchInt` is
693 /// an enum discriminant.
694 ///
695 /// We expect such blocks to have a call to `discriminant` as their last statement like so:
696 ///
697 /// ```text
698 /// ...
699 /// _42 = discriminant(_1)
700 /// SwitchInt(_42, ..)
701 /// ```
702 ///
703 /// If the basic block matches this pattern, this function returns the place corresponding to the
704 /// enum (`_1` in the example above) as well as the `AdtDef` of that enum.
705 fn switch_on_enum_discriminant<'mir, 'tcx>(
706     tcx: TyCtxt<'tcx>,
707     body: &'mir mir::Body<'tcx>,
708     block: &'mir mir::BasicBlockData<'tcx>,
709     switch_on: mir::Place<'tcx>,
710 ) -> Option<(mir::Place<'tcx>, ty::AdtDef<'tcx>)> {
711     for statement in block.statements.iter().rev() {
712         match &statement.kind {
713             mir::StatementKind::Assign(box (lhs, mir::Rvalue::Discriminant(discriminated)))
714                 if *lhs == switch_on =>
715             {
716                 match discriminated.ty(body, tcx).ty.kind() {
717                     ty::Adt(def, _) => return Some((*discriminated, *def)),
718
719                     // `Rvalue::Discriminant` is also used to get the active yield point for a
720                     // generator, but we do not need edge-specific effects in that case. This may
721                     // change in the future.
722                     ty::Generator(..) => return None,
723
724                     t => bug!("`discriminant` called on unexpected type {:?}", t),
725                 }
726             }
727             mir::StatementKind::Coverage(_) => continue,
728             _ => return None,
729         }
730     }
731     None
732 }
733
734 struct OnMutBorrow<F>(F);
735
736 impl<F> Visitor<'_> for OnMutBorrow<F>
737 where
738     F: FnMut(&mir::Place<'_>),
739 {
740     fn visit_rvalue(&mut self, rvalue: &mir::Rvalue<'_>, location: Location) {
741         // FIXME: Does `&raw const foo` allow mutation? See #90413.
742         match rvalue {
743             mir::Rvalue::Ref(_, mir::BorrowKind::Mut { .. }, place)
744             | mir::Rvalue::AddressOf(_, place) => (self.0)(place),
745
746             _ => {}
747         }
748
749         self.super_rvalue(rvalue, location)
750     }
751 }
752
753 /// Calls `f` for each mutable borrow or raw reference in the program.
754 ///
755 /// This DOES NOT call `f` for a shared borrow of a type with interior mutability.  That's okay for
756 /// initializedness, because we cannot move from an `UnsafeCell` (outside of `core::cell`), but
757 /// other analyses will likely need to check for `!Freeze`.
758 fn for_each_mut_borrow<'tcx>(
759     mir: &impl MirVisitable<'tcx>,
760     location: Location,
761     f: impl FnMut(&mir::Place<'_>),
762 ) {
763     let mut vis = OnMutBorrow(f);
764
765     mir.apply(location, &mut vis);
766 }