]> git.lizzy.rs Git - rust.git/blob - compiler/rustc_mir_dataflow/src/impls/mod.rs
5659fd2dc70750c62903799b906b7786fe19db10
[rust.git] / compiler / rustc_mir_dataflow / src / impls / mod.rs
1 //! Dataflow analyses are built upon some interpretation of the
2 //! bitvectors attached to each basic block, represented via a
3 //! zero-sized structure.
4
5 use rustc_index::bit_set::BitSet;
6 use rustc_index::vec::Idx;
7 use rustc_middle::mir::visit::{MirVisitable, Visitor};
8 use rustc_middle::mir::{self, Body, Location};
9 use rustc_middle::ty::{self, TyCtxt};
10
11 use crate::drop_flag_effects_for_function_entry;
12 use crate::drop_flag_effects_for_location;
13 use crate::elaborate_drops::DropFlagState;
14 use crate::framework::{CallReturnPlaces, SwitchIntEdgeEffects};
15 use crate::move_paths::{HasMoveData, InitIndex, InitKind, LookupResult, MoveData, MovePathIndex};
16 use crate::on_lookup_result_bits;
17 use crate::MoveDataParamEnv;
18 use crate::{drop_flag_effects, on_all_children_bits};
19 use crate::{lattice, AnalysisDomain, GenKill, GenKillAnalysis};
20
21 mod borrowed_locals;
22 mod init_locals;
23 mod liveness;
24 mod storage_liveness;
25
26 pub use self::borrowed_locals::MaybeBorrowedLocals;
27 pub use self::init_locals::MaybeInitializedLocals;
28 pub use self::liveness::MaybeLiveLocals;
29 pub use self::storage_liveness::{MaybeRequiresStorage, MaybeStorageLive};
30
31 /// `MaybeInitializedPlaces` tracks all places that might be
32 /// initialized upon reaching a particular point in the control flow
33 /// for a function.
34 ///
35 /// For example, in code like the following, we have corresponding
36 /// dataflow information shown in the right-hand comments.
37 ///
38 /// ```rust
39 /// struct S;
40 /// fn foo(pred: bool) {                       // maybe-init:
41 ///                                            // {}
42 ///     let a = S; let b = S; let c; let d;    // {a, b}
43 ///
44 ///     if pred {
45 ///         drop(a);                           // {   b}
46 ///         b = S;                             // {   b}
47 ///
48 ///     } else {
49 ///         drop(b);                           // {a}
50 ///         d = S;                             // {a,       d}
51 ///
52 ///     }                                      // {a, b,    d}
53 ///
54 ///     c = S;                                 // {a, b, c, d}
55 /// }
56 /// ```
57 ///
58 /// To determine whether a place *must* be initialized at a
59 /// particular control-flow point, one can take the set-difference
60 /// between this data and the data from `MaybeUninitializedPlaces` at the
61 /// corresponding control-flow point.
62 ///
63 /// Similarly, at a given `drop` statement, the set-intersection
64 /// between this data and `MaybeUninitializedPlaces` yields the set of
65 /// places that would require a dynamic drop-flag at that statement.
66 pub struct MaybeInitializedPlaces<'a, 'tcx> {
67     tcx: TyCtxt<'tcx>,
68     body: &'a Body<'tcx>,
69     mdpe: &'a MoveDataParamEnv<'tcx>,
70 }
71
72 impl<'a, 'tcx> MaybeInitializedPlaces<'a, 'tcx> {
73     pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
74         MaybeInitializedPlaces { tcx, body, mdpe }
75     }
76 }
77
78 impl<'a, 'tcx> HasMoveData<'tcx> for MaybeInitializedPlaces<'a, 'tcx> {
79     fn move_data(&self) -> &MoveData<'tcx> {
80         &self.mdpe.move_data
81     }
82 }
83
84 /// `MaybeUninitializedPlaces` tracks all places that might be
85 /// uninitialized upon reaching a particular point in the control flow
86 /// for a function.
87 ///
88 /// For example, in code like the following, we have corresponding
89 /// dataflow information shown in the right-hand comments.
90 ///
91 /// ```rust
92 /// struct S;
93 /// fn foo(pred: bool) {                       // maybe-uninit:
94 ///                                            // {a, b, c, d}
95 ///     let a = S; let b = S; let c; let d;    // {      c, d}
96 ///
97 ///     if pred {
98 ///         drop(a);                           // {a,    c, d}
99 ///         b = S;                             // {a,    c, d}
100 ///
101 ///     } else {
102 ///         drop(b);                           // {   b, c, d}
103 ///         d = S;                             // {   b, c   }
104 ///
105 ///     }                                      // {a, b, c, d}
106 ///
107 ///     c = S;                                 // {a, b,    d}
108 /// }
109 /// ```
110 ///
111 /// To determine whether a place *must* be uninitialized at a
112 /// particular control-flow point, one can take the set-difference
113 /// between this data and the data from `MaybeInitializedPlaces` at the
114 /// corresponding control-flow point.
115 ///
116 /// Similarly, at a given `drop` statement, the set-intersection
117 /// between this data and `MaybeInitializedPlaces` yields the set of
118 /// places that would require a dynamic drop-flag at that statement.
119 pub struct MaybeUninitializedPlaces<'a, 'tcx> {
120     tcx: TyCtxt<'tcx>,
121     body: &'a Body<'tcx>,
122     mdpe: &'a MoveDataParamEnv<'tcx>,
123
124     mark_inactive_variants_as_uninit: bool,
125 }
126
127 impl<'a, 'tcx> MaybeUninitializedPlaces<'a, 'tcx> {
128     pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
129         MaybeUninitializedPlaces { tcx, body, mdpe, mark_inactive_variants_as_uninit: false }
130     }
131
132     /// Causes inactive enum variants to be marked as "maybe uninitialized" after a switch on an
133     /// enum discriminant.
134     ///
135     /// This is correct in a vacuum but is not the default because it causes problems in the borrow
136     /// checker, where this information gets propagated along `FakeEdge`s.
137     pub fn mark_inactive_variants_as_uninit(mut self) -> Self {
138         self.mark_inactive_variants_as_uninit = true;
139         self
140     }
141 }
142
143 impl<'a, 'tcx> HasMoveData<'tcx> for MaybeUninitializedPlaces<'a, 'tcx> {
144     fn move_data(&self) -> &MoveData<'tcx> {
145         &self.mdpe.move_data
146     }
147 }
148
149 /// `DefinitelyInitializedPlaces` tracks all places that are definitely
150 /// initialized upon reaching a particular point in the control flow
151 /// for a function.
152 ///
153 /// For example, in code like the following, we have corresponding
154 /// dataflow information shown in the right-hand comments.
155 ///
156 /// ```rust
157 /// struct S;
158 /// fn foo(pred: bool) {                       // definite-init:
159 ///                                            // {          }
160 ///     let a = S; let b = S; let c; let d;    // {a, b      }
161 ///
162 ///     if pred {
163 ///         drop(a);                           // {   b,     }
164 ///         b = S;                             // {   b,     }
165 ///
166 ///     } else {
167 ///         drop(b);                           // {a,        }
168 ///         d = S;                             // {a,       d}
169 ///
170 ///     }                                      // {          }
171 ///
172 ///     c = S;                                 // {       c  }
173 /// }
174 /// ```
175 ///
176 /// To determine whether a place *may* be uninitialized at a
177 /// particular control-flow point, one can take the set-complement
178 /// of this data.
179 ///
180 /// Similarly, at a given `drop` statement, the set-difference between
181 /// this data and `MaybeInitializedPlaces` yields the set of places
182 /// that would require a dynamic drop-flag at that statement.
183 pub struct DefinitelyInitializedPlaces<'a, 'tcx> {
184     tcx: TyCtxt<'tcx>,
185     body: &'a Body<'tcx>,
186     mdpe: &'a MoveDataParamEnv<'tcx>,
187 }
188
189 impl<'a, 'tcx> DefinitelyInitializedPlaces<'a, 'tcx> {
190     pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
191         DefinitelyInitializedPlaces { tcx, body, mdpe }
192     }
193 }
194
195 impl<'a, 'tcx> HasMoveData<'tcx> for DefinitelyInitializedPlaces<'a, 'tcx> {
196     fn move_data(&self) -> &MoveData<'tcx> {
197         &self.mdpe.move_data
198     }
199 }
200
201 /// `EverInitializedPlaces` tracks all places that might have ever been
202 /// initialized upon reaching a particular point in the control flow
203 /// for a function, without an intervening `StorageDead`.
204 ///
205 /// This dataflow is used to determine if an immutable local variable may
206 /// be assigned to.
207 ///
208 /// For example, in code like the following, we have corresponding
209 /// dataflow information shown in the right-hand comments.
210 ///
211 /// ```rust
212 /// struct S;
213 /// fn foo(pred: bool) {                       // ever-init:
214 ///                                            // {          }
215 ///     let a = S; let b = S; let c; let d;    // {a, b      }
216 ///
217 ///     if pred {
218 ///         drop(a);                           // {a, b,     }
219 ///         b = S;                             // {a, b,     }
220 ///
221 ///     } else {
222 ///         drop(b);                           // {a, b,      }
223 ///         d = S;                             // {a, b,    d }
224 ///
225 ///     }                                      // {a, b,    d }
226 ///
227 ///     c = S;                                 // {a, b, c, d }
228 /// }
229 /// ```
230 pub struct EverInitializedPlaces<'a, 'tcx> {
231     #[allow(dead_code)]
232     tcx: TyCtxt<'tcx>,
233     body: &'a Body<'tcx>,
234     mdpe: &'a MoveDataParamEnv<'tcx>,
235 }
236
237 impl<'a, 'tcx> EverInitializedPlaces<'a, 'tcx> {
238     pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
239         EverInitializedPlaces { tcx, body, mdpe }
240     }
241 }
242
243 impl<'a, 'tcx> HasMoveData<'tcx> for EverInitializedPlaces<'a, 'tcx> {
244     fn move_data(&self) -> &MoveData<'tcx> {
245         &self.mdpe.move_data
246     }
247 }
248
249 impl<'a, 'tcx> MaybeInitializedPlaces<'a, 'tcx> {
250     fn update_bits(
251         trans: &mut impl GenKill<MovePathIndex>,
252         path: MovePathIndex,
253         state: DropFlagState,
254     ) {
255         match state {
256             DropFlagState::Absent => trans.kill(path),
257             DropFlagState::Present => trans.gen(path),
258         }
259     }
260 }
261
262 impl<'a, 'tcx> MaybeUninitializedPlaces<'a, 'tcx> {
263     fn update_bits(
264         trans: &mut impl GenKill<MovePathIndex>,
265         path: MovePathIndex,
266         state: DropFlagState,
267     ) {
268         match state {
269             DropFlagState::Absent => trans.gen(path),
270             DropFlagState::Present => trans.kill(path),
271         }
272     }
273 }
274
275 impl<'a, 'tcx> DefinitelyInitializedPlaces<'a, 'tcx> {
276     fn update_bits(
277         trans: &mut impl GenKill<MovePathIndex>,
278         path: MovePathIndex,
279         state: DropFlagState,
280     ) {
281         match state {
282             DropFlagState::Absent => trans.kill(path),
283             DropFlagState::Present => trans.gen(path),
284         }
285     }
286 }
287
288 impl<'tcx> AnalysisDomain<'tcx> for MaybeInitializedPlaces<'_, 'tcx> {
289     type Domain = BitSet<MovePathIndex>;
290     const NAME: &'static str = "maybe_init";
291
292     fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
293         // bottom = uninitialized
294         BitSet::new_empty(self.move_data().move_paths.len())
295     }
296
297     fn initialize_start_block(&self, _: &mir::Body<'tcx>, state: &mut Self::Domain) {
298         drop_flag_effects_for_function_entry(self.tcx, self.body, self.mdpe, |path, s| {
299             assert!(s == DropFlagState::Present);
300             state.insert(path);
301         });
302     }
303 }
304
305 impl<'tcx> GenKillAnalysis<'tcx> for MaybeInitializedPlaces<'_, 'tcx> {
306     type Idx = MovePathIndex;
307
308     fn statement_effect(
309         &self,
310         trans: &mut impl GenKill<Self::Idx>,
311         statement: &mir::Statement<'tcx>,
312         location: Location,
313     ) {
314         drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
315             Self::update_bits(trans, path, s)
316         });
317
318         if !self.tcx.sess.opts.debugging_opts.precise_enum_drop_elaboration {
319             return;
320         }
321
322         // Mark all places as "maybe init" if they are mutably borrowed. See #90752.
323         for_each_mut_borrow(statement, location, |place| {
324             let LookupResult::Exact(mpi) = self.move_data().rev_lookup.find(place.as_ref()) else { return };
325             on_all_children_bits(self.tcx, self.body, self.move_data(), mpi, |child| {
326                 trans.gen(child);
327             })
328         })
329     }
330
331     fn terminator_effect(
332         &self,
333         trans: &mut impl GenKill<Self::Idx>,
334         terminator: &mir::Terminator<'tcx>,
335         location: Location,
336     ) {
337         drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
338             Self::update_bits(trans, path, s)
339         });
340
341         if !self.tcx.sess.opts.debugging_opts.precise_enum_drop_elaboration {
342             return;
343         }
344
345         for_each_mut_borrow(terminator, location, |place| {
346             let LookupResult::Exact(mpi) = self.move_data().rev_lookup.find(place.as_ref()) else { return };
347             on_all_children_bits(self.tcx, self.body, self.move_data(), mpi, |child| {
348                 trans.gen(child);
349             })
350         })
351     }
352
353     fn call_return_effect(
354         &self,
355         trans: &mut impl GenKill<Self::Idx>,
356         _block: mir::BasicBlock,
357         return_places: CallReturnPlaces<'_, 'tcx>,
358     ) {
359         return_places.for_each(|place| {
360             // when a call returns successfully, that means we need to set
361             // the bits for that dest_place to 1 (initialized).
362             on_lookup_result_bits(
363                 self.tcx,
364                 self.body,
365                 self.move_data(),
366                 self.move_data().rev_lookup.find(place.as_ref()),
367                 |mpi| {
368                     trans.gen(mpi);
369                 },
370             );
371         });
372     }
373
374     fn switch_int_edge_effects<G: GenKill<Self::Idx>>(
375         &self,
376         block: mir::BasicBlock,
377         discr: &mir::Operand<'tcx>,
378         edge_effects: &mut impl SwitchIntEdgeEffects<G>,
379     ) {
380         if !self.tcx.sess.opts.debugging_opts.precise_enum_drop_elaboration {
381             return;
382         }
383
384         let enum_ = discr.place().and_then(|discr| {
385             switch_on_enum_discriminant(self.tcx, &self.body, &self.body[block], discr)
386         });
387
388         let (enum_place, enum_def) = match enum_ {
389             Some(x) => x,
390             None => return,
391         };
392
393         let mut discriminants = enum_def.discriminants(self.tcx);
394         edge_effects.apply(|trans, edge| {
395             let value = match edge.value {
396                 Some(x) => x,
397                 None => return,
398             };
399
400             // MIR building adds discriminants to the `values` array in the same order as they
401             // are yielded by `AdtDef::discriminants`. We rely on this to match each
402             // discriminant in `values` to its corresponding variant in linear time.
403             let (variant, _) = discriminants
404                 .find(|&(_, discr)| discr.val == value)
405                 .expect("Order of `AdtDef::discriminants` differed from `SwitchInt::values`");
406
407             // Kill all move paths that correspond to variants we know to be inactive along this
408             // particular outgoing edge of a `SwitchInt`.
409             drop_flag_effects::on_all_inactive_variants(
410                 self.tcx,
411                 self.body,
412                 self.move_data(),
413                 enum_place,
414                 variant,
415                 |mpi| trans.kill(mpi),
416             );
417         });
418     }
419 }
420
421 impl<'tcx> AnalysisDomain<'tcx> for MaybeUninitializedPlaces<'_, 'tcx> {
422     type Domain = BitSet<MovePathIndex>;
423
424     const NAME: &'static str = "maybe_uninit";
425
426     fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
427         // bottom = initialized (start_block_effect counters this at outset)
428         BitSet::new_empty(self.move_data().move_paths.len())
429     }
430
431     // sets on_entry bits for Arg places
432     fn initialize_start_block(&self, _: &mir::Body<'tcx>, state: &mut Self::Domain) {
433         // set all bits to 1 (uninit) before gathering counterevidence
434         state.insert_all();
435
436         drop_flag_effects_for_function_entry(self.tcx, self.body, self.mdpe, |path, s| {
437             assert!(s == DropFlagState::Present);
438             state.remove(path);
439         });
440     }
441 }
442
443 impl<'tcx> GenKillAnalysis<'tcx> for MaybeUninitializedPlaces<'_, 'tcx> {
444     type Idx = MovePathIndex;
445
446     fn statement_effect(
447         &self,
448         trans: &mut impl GenKill<Self::Idx>,
449         _statement: &mir::Statement<'tcx>,
450         location: Location,
451     ) {
452         drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
453             Self::update_bits(trans, path, s)
454         });
455
456         // Unlike in `MaybeInitializedPlaces` above, we don't need to change the state when a
457         // mutable borrow occurs. Places cannot become uninitialized through a mutable reference.
458     }
459
460     fn terminator_effect(
461         &self,
462         trans: &mut impl GenKill<Self::Idx>,
463         _terminator: &mir::Terminator<'tcx>,
464         location: Location,
465     ) {
466         drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
467             Self::update_bits(trans, path, s)
468         });
469     }
470
471     fn call_return_effect(
472         &self,
473         trans: &mut impl GenKill<Self::Idx>,
474         _block: mir::BasicBlock,
475         return_places: CallReturnPlaces<'_, 'tcx>,
476     ) {
477         return_places.for_each(|place| {
478             // when a call returns successfully, that means we need to set
479             // the bits for that dest_place to 0 (initialized).
480             on_lookup_result_bits(
481                 self.tcx,
482                 self.body,
483                 self.move_data(),
484                 self.move_data().rev_lookup.find(place.as_ref()),
485                 |mpi| {
486                     trans.kill(mpi);
487                 },
488             );
489         });
490     }
491
492     fn switch_int_edge_effects<G: GenKill<Self::Idx>>(
493         &self,
494         block: mir::BasicBlock,
495         discr: &mir::Operand<'tcx>,
496         edge_effects: &mut impl SwitchIntEdgeEffects<G>,
497     ) {
498         if !self.tcx.sess.opts.debugging_opts.precise_enum_drop_elaboration {
499             return;
500         }
501
502         if !self.mark_inactive_variants_as_uninit {
503             return;
504         }
505
506         let enum_ = discr.place().and_then(|discr| {
507             switch_on_enum_discriminant(self.tcx, &self.body, &self.body[block], discr)
508         });
509
510         let (enum_place, enum_def) = match enum_ {
511             Some(x) => x,
512             None => return,
513         };
514
515         let mut discriminants = enum_def.discriminants(self.tcx);
516         edge_effects.apply(|trans, edge| {
517             let value = match edge.value {
518                 Some(x) => x,
519                 None => return,
520             };
521
522             // MIR building adds discriminants to the `values` array in the same order as they
523             // are yielded by `AdtDef::discriminants`. We rely on this to match each
524             // discriminant in `values` to its corresponding variant in linear time.
525             let (variant, _) = discriminants
526                 .find(|&(_, discr)| discr.val == value)
527                 .expect("Order of `AdtDef::discriminants` differed from `SwitchInt::values`");
528
529             // Mark all move paths that correspond to variants other than this one as maybe
530             // uninitialized (in reality, they are *definitely* uninitialized).
531             drop_flag_effects::on_all_inactive_variants(
532                 self.tcx,
533                 self.body,
534                 self.move_data(),
535                 enum_place,
536                 variant,
537                 |mpi| trans.gen(mpi),
538             );
539         });
540     }
541 }
542
543 impl<'a, 'tcx> AnalysisDomain<'tcx> for DefinitelyInitializedPlaces<'a, 'tcx> {
544     /// Use set intersection as the join operator.
545     type Domain = lattice::Dual<BitSet<MovePathIndex>>;
546
547     const NAME: &'static str = "definite_init";
548
549     fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
550         // bottom = initialized (start_block_effect counters this at outset)
551         lattice::Dual(BitSet::new_filled(self.move_data().move_paths.len()))
552     }
553
554     // sets on_entry bits for Arg places
555     fn initialize_start_block(&self, _: &mir::Body<'tcx>, state: &mut Self::Domain) {
556         state.0.clear();
557
558         drop_flag_effects_for_function_entry(self.tcx, self.body, self.mdpe, |path, s| {
559             assert!(s == DropFlagState::Present);
560             state.0.insert(path);
561         });
562     }
563 }
564
565 impl<'tcx> GenKillAnalysis<'tcx> for DefinitelyInitializedPlaces<'_, 'tcx> {
566     type Idx = MovePathIndex;
567
568     fn statement_effect(
569         &self,
570         trans: &mut impl GenKill<Self::Idx>,
571         _statement: &mir::Statement<'tcx>,
572         location: Location,
573     ) {
574         drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
575             Self::update_bits(trans, path, s)
576         })
577     }
578
579     fn terminator_effect(
580         &self,
581         trans: &mut impl GenKill<Self::Idx>,
582         _terminator: &mir::Terminator<'tcx>,
583         location: Location,
584     ) {
585         drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
586             Self::update_bits(trans, path, s)
587         })
588     }
589
590     fn call_return_effect(
591         &self,
592         trans: &mut impl GenKill<Self::Idx>,
593         _block: mir::BasicBlock,
594         return_places: CallReturnPlaces<'_, 'tcx>,
595     ) {
596         return_places.for_each(|place| {
597             // when a call returns successfully, that means we need to set
598             // the bits for that dest_place to 1 (initialized).
599             on_lookup_result_bits(
600                 self.tcx,
601                 self.body,
602                 self.move_data(),
603                 self.move_data().rev_lookup.find(place.as_ref()),
604                 |mpi| {
605                     trans.gen(mpi);
606                 },
607             );
608         });
609     }
610 }
611
612 impl<'tcx> AnalysisDomain<'tcx> for EverInitializedPlaces<'_, 'tcx> {
613     type Domain = BitSet<InitIndex>;
614
615     const NAME: &'static str = "ever_init";
616
617     fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
618         // bottom = no initialized variables by default
619         BitSet::new_empty(self.move_data().inits.len())
620     }
621
622     fn initialize_start_block(&self, body: &mir::Body<'tcx>, state: &mut Self::Domain) {
623         for arg_init in 0..body.arg_count {
624             state.insert(InitIndex::new(arg_init));
625         }
626     }
627 }
628
629 impl<'tcx> GenKillAnalysis<'tcx> for EverInitializedPlaces<'_, 'tcx> {
630     type Idx = InitIndex;
631
632     #[instrument(skip(self, trans), level = "debug")]
633     fn statement_effect(
634         &self,
635         trans: &mut impl GenKill<Self::Idx>,
636         stmt: &mir::Statement<'tcx>,
637         location: Location,
638     ) {
639         let move_data = self.move_data();
640         let init_path_map = &move_data.init_path_map;
641         let init_loc_map = &move_data.init_loc_map;
642         let rev_lookup = &move_data.rev_lookup;
643
644         debug!("initializes move_indexes {:?}", &init_loc_map[location]);
645         trans.gen_all(init_loc_map[location].iter().copied());
646
647         if let mir::StatementKind::StorageDead(local) = stmt.kind {
648             // End inits for StorageDead, so that an immutable variable can
649             // be reinitialized on the next iteration of the loop.
650             let move_path_index = rev_lookup.find_local(local);
651             debug!("clears the ever initialized status of {:?}", init_path_map[move_path_index]);
652             trans.kill_all(init_path_map[move_path_index].iter().copied());
653         }
654     }
655
656     #[instrument(skip(self, trans, _terminator), level = "debug")]
657     fn terminator_effect(
658         &self,
659         trans: &mut impl GenKill<Self::Idx>,
660         _terminator: &mir::Terminator<'tcx>,
661         location: Location,
662     ) {
663         let (body, move_data) = (self.body, self.move_data());
664         let term = body[location.block].terminator();
665         let init_loc_map = &move_data.init_loc_map;
666         debug!(?term);
667         debug!("initializes move_indexes {:?}", init_loc_map[location]);
668         trans.gen_all(
669             init_loc_map[location]
670                 .iter()
671                 .filter(|init_index| {
672                     move_data.inits[**init_index].kind != InitKind::NonPanicPathOnly
673                 })
674                 .copied(),
675         );
676     }
677
678     fn call_return_effect(
679         &self,
680         trans: &mut impl GenKill<Self::Idx>,
681         block: mir::BasicBlock,
682         _return_places: CallReturnPlaces<'_, 'tcx>,
683     ) {
684         let move_data = self.move_data();
685         let init_loc_map = &move_data.init_loc_map;
686
687         let call_loc = self.body.terminator_loc(block);
688         for init_index in &init_loc_map[call_loc] {
689             trans.gen(*init_index);
690         }
691     }
692 }
693
694 /// Inspect a `SwitchInt`-terminated basic block to see if the condition of that `SwitchInt` is
695 /// an enum discriminant.
696 ///
697 /// We expect such blocks to have a call to `discriminant` as their last statement like so:
698 ///
699 /// ```text
700 /// ...
701 /// _42 = discriminant(_1)
702 /// SwitchInt(_42, ..)
703 /// ```
704 ///
705 /// If the basic block matches this pattern, this function returns the place corresponding to the
706 /// enum (`_1` in the example above) as well as the `AdtDef` of that enum.
707 fn switch_on_enum_discriminant(
708     tcx: TyCtxt<'tcx>,
709     body: &'mir mir::Body<'tcx>,
710     block: &'mir mir::BasicBlockData<'tcx>,
711     switch_on: mir::Place<'tcx>,
712 ) -> Option<(mir::Place<'tcx>, &'tcx ty::AdtDef)> {
713     match block.statements.last().map(|stmt| &stmt.kind) {
714         Some(mir::StatementKind::Assign(box (lhs, mir::Rvalue::Discriminant(discriminated))))
715             if *lhs == switch_on =>
716         {
717             match &discriminated.ty(body, tcx).ty.kind() {
718                 ty::Adt(def, _) => Some((*discriminated, def)),
719
720                 // `Rvalue::Discriminant` is also used to get the active yield point for a
721                 // generator, but we do not need edge-specific effects in that case. This may
722                 // change in the future.
723                 ty::Generator(..) => None,
724
725                 t => bug!("`discriminant` called on unexpected type {:?}", t),
726             }
727         }
728
729         _ => None,
730     }
731 }
732
733 struct OnMutBorrow<F>(F);
734
735 impl<F> Visitor<'_> for OnMutBorrow<F>
736 where
737     F: FnMut(&mir::Place<'_>),
738 {
739     fn visit_rvalue(&mut self, rvalue: &mir::Rvalue<'_>, location: Location) {
740         // FIXME: Does `&raw const foo` allow mutation? See #90413.
741         match rvalue {
742             mir::Rvalue::Ref(_, mir::BorrowKind::Mut { .. }, place)
743             | mir::Rvalue::AddressOf(_, place) => (self.0)(place),
744
745             _ => {}
746         }
747
748         self.super_rvalue(rvalue, location)
749     }
750 }
751
752 /// Calls `f` for each mutable borrow or raw reference in the program.
753 ///
754 /// This DOES NOT call `f` for a shared borrow of a type with interior mutability.  That's okay for
755 /// initializedness, because we cannot move from an `UnsafeCell` (outside of `core::cell`), but
756 /// other analyses will likely need to check for `!Freeze`.
757 fn for_each_mut_borrow<'tcx>(
758     mir: &impl MirVisitable<'tcx>,
759     location: Location,
760     f: impl FnMut(&mir::Place<'_>),
761 ) {
762     let mut vis = OnMutBorrow(f);
763
764     mir.apply(location, &mut vis);
765 }