1 //! Dataflow analyses are built upon some interpretation of the
2 //! bitvectors attached to each basic block, represented via a
3 //! zero-sized structure.
5 use rustc_index::bit_set::{BitSet, ChunkedBitSet};
6 use rustc_index::vec::Idx;
7 use rustc_middle::mir::visit::{MirVisitable, Visitor};
8 use rustc_middle::mir::{self, Body, Location};
9 use rustc_middle::ty::{self, TyCtxt};
11 use crate::drop_flag_effects_for_function_entry;
12 use crate::drop_flag_effects_for_location;
13 use crate::elaborate_drops::DropFlagState;
14 use crate::framework::{CallReturnPlaces, SwitchIntEdgeEffects};
15 use crate::move_paths::{HasMoveData, InitIndex, InitKind, LookupResult, MoveData, MovePathIndex};
16 use crate::on_lookup_result_bits;
17 use crate::MoveDataParamEnv;
18 use crate::{drop_flag_effects, on_all_children_bits};
19 use crate::{lattice, AnalysisDomain, GenKill, GenKillAnalysis};
26 pub use self::borrowed_locals::borrowed_locals;
27 pub use self::borrowed_locals::MaybeBorrowedLocals;
28 pub use self::init_locals::MaybeInitializedLocals;
29 pub use self::liveness::MaybeLiveLocals;
30 pub use self::liveness::MaybeTransitiveLiveLocals;
31 pub use self::storage_liveness::{MaybeRequiresStorage, MaybeStorageLive};
33 /// `MaybeInitializedPlaces` tracks all places that might be
34 /// initialized upon reaching a particular point in the control flow
37 /// For example, in code like the following, we have corresponding
38 /// dataflow information shown in the right-hand comments.
42 /// fn foo(pred: bool) { // maybe-init:
44 /// let a = S; let mut b = S; let c; let d; // {a, b}
56 /// c = S; // {a, b, c, d}
60 /// To determine whether a place *must* be initialized at a
61 /// particular control-flow point, one can take the set-difference
62 /// between this data and the data from `MaybeUninitializedPlaces` at the
63 /// corresponding control-flow point.
65 /// Similarly, at a given `drop` statement, the set-intersection
66 /// between this data and `MaybeUninitializedPlaces` yields the set of
67 /// places that would require a dynamic drop-flag at that statement.
68 pub struct MaybeInitializedPlaces<'a, 'tcx> {
71 mdpe: &'a MoveDataParamEnv<'tcx>,
74 impl<'a, 'tcx> MaybeInitializedPlaces<'a, 'tcx> {
75 pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
76 MaybeInitializedPlaces { tcx, body, mdpe }
80 impl<'a, 'tcx> HasMoveData<'tcx> for MaybeInitializedPlaces<'a, 'tcx> {
81 fn move_data(&self) -> &MoveData<'tcx> {
86 /// `MaybeUninitializedPlaces` tracks all places that might be
87 /// uninitialized upon reaching a particular point in the control flow
90 /// For example, in code like the following, we have corresponding
91 /// dataflow information shown in the right-hand comments.
95 /// fn foo(pred: bool) { // maybe-uninit:
97 /// let a = S; let mut b = S; let c; let d; // { c, d}
100 /// drop(a); // {a, c, d}
101 /// b = S; // {a, c, d}
104 /// drop(b); // { b, c, d}
105 /// d = S; // { b, c }
107 /// } // {a, b, c, d}
109 /// c = S; // {a, b, d}
113 /// To determine whether a place *must* be uninitialized at a
114 /// particular control-flow point, one can take the set-difference
115 /// between this data and the data from `MaybeInitializedPlaces` at the
116 /// corresponding control-flow point.
118 /// Similarly, at a given `drop` statement, the set-intersection
119 /// between this data and `MaybeInitializedPlaces` yields the set of
120 /// places that would require a dynamic drop-flag at that statement.
121 pub struct MaybeUninitializedPlaces<'a, 'tcx> {
123 body: &'a Body<'tcx>,
124 mdpe: &'a MoveDataParamEnv<'tcx>,
126 mark_inactive_variants_as_uninit: bool,
129 impl<'a, 'tcx> MaybeUninitializedPlaces<'a, 'tcx> {
130 pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
131 MaybeUninitializedPlaces { tcx, body, mdpe, mark_inactive_variants_as_uninit: false }
134 /// Causes inactive enum variants to be marked as "maybe uninitialized" after a switch on an
135 /// enum discriminant.
137 /// This is correct in a vacuum but is not the default because it causes problems in the borrow
138 /// checker, where this information gets propagated along `FakeEdge`s.
139 pub fn mark_inactive_variants_as_uninit(mut self) -> Self {
140 self.mark_inactive_variants_as_uninit = true;
145 impl<'a, 'tcx> HasMoveData<'tcx> for MaybeUninitializedPlaces<'a, 'tcx> {
146 fn move_data(&self) -> &MoveData<'tcx> {
151 /// `DefinitelyInitializedPlaces` tracks all places that are definitely
152 /// initialized upon reaching a particular point in the control flow
155 /// For example, in code like the following, we have corresponding
156 /// dataflow information shown in the right-hand comments.
160 /// fn foo(pred: bool) { // definite-init:
162 /// let a = S; let mut b = S; let c; let d; // {a, b }
165 /// drop(a); // { b, }
169 /// drop(b); // {a, }
178 /// To determine whether a place *may* be uninitialized at a
179 /// particular control-flow point, one can take the set-complement
182 /// Similarly, at a given `drop` statement, the set-difference between
183 /// this data and `MaybeInitializedPlaces` yields the set of places
184 /// that would require a dynamic drop-flag at that statement.
185 pub struct DefinitelyInitializedPlaces<'a, 'tcx> {
187 body: &'a Body<'tcx>,
188 mdpe: &'a MoveDataParamEnv<'tcx>,
191 impl<'a, 'tcx> DefinitelyInitializedPlaces<'a, 'tcx> {
192 pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
193 DefinitelyInitializedPlaces { tcx, body, mdpe }
197 impl<'a, 'tcx> HasMoveData<'tcx> for DefinitelyInitializedPlaces<'a, 'tcx> {
198 fn move_data(&self) -> &MoveData<'tcx> {
203 /// `EverInitializedPlaces` tracks all places that might have ever been
204 /// initialized upon reaching a particular point in the control flow
205 /// for a function, without an intervening `StorageDead`.
207 /// This dataflow is used to determine if an immutable local variable may
210 /// For example, in code like the following, we have corresponding
211 /// dataflow information shown in the right-hand comments.
215 /// fn foo(pred: bool) { // ever-init:
217 /// let a = S; let mut b = S; let c; let d; // {a, b }
220 /// drop(a); // {a, b, }
221 /// b = S; // {a, b, }
224 /// drop(b); // {a, b, }
225 /// d = S; // {a, b, d }
229 /// c = S; // {a, b, c, d }
232 pub struct EverInitializedPlaces<'a, 'tcx> {
235 body: &'a Body<'tcx>,
236 mdpe: &'a MoveDataParamEnv<'tcx>,
239 impl<'a, 'tcx> EverInitializedPlaces<'a, 'tcx> {
240 pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
241 EverInitializedPlaces { tcx, body, mdpe }
245 impl<'a, 'tcx> HasMoveData<'tcx> for EverInitializedPlaces<'a, 'tcx> {
246 fn move_data(&self) -> &MoveData<'tcx> {
251 impl<'a, 'tcx> MaybeInitializedPlaces<'a, 'tcx> {
253 trans: &mut impl GenKill<MovePathIndex>,
255 state: DropFlagState,
258 DropFlagState::Absent => trans.kill(path),
259 DropFlagState::Present => trans.gen(path),
264 impl<'a, 'tcx> MaybeUninitializedPlaces<'a, 'tcx> {
266 trans: &mut impl GenKill<MovePathIndex>,
268 state: DropFlagState,
271 DropFlagState::Absent => trans.gen(path),
272 DropFlagState::Present => trans.kill(path),
277 impl<'a, 'tcx> DefinitelyInitializedPlaces<'a, 'tcx> {
279 trans: &mut impl GenKill<MovePathIndex>,
281 state: DropFlagState,
284 DropFlagState::Absent => trans.kill(path),
285 DropFlagState::Present => trans.gen(path),
290 impl<'tcx> AnalysisDomain<'tcx> for MaybeInitializedPlaces<'_, 'tcx> {
291 type Domain = ChunkedBitSet<MovePathIndex>;
292 const NAME: &'static str = "maybe_init";
294 fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
295 // bottom = uninitialized
296 ChunkedBitSet::new_empty(self.move_data().move_paths.len())
299 fn initialize_start_block(&self, _: &mir::Body<'tcx>, state: &mut Self::Domain) {
300 drop_flag_effects_for_function_entry(self.tcx, self.body, self.mdpe, |path, s| {
301 assert!(s == DropFlagState::Present);
307 impl<'tcx> GenKillAnalysis<'tcx> for MaybeInitializedPlaces<'_, 'tcx> {
308 type Idx = MovePathIndex;
312 trans: &mut impl GenKill<Self::Idx>,
313 statement: &mir::Statement<'tcx>,
316 drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
317 Self::update_bits(trans, path, s)
320 if !self.tcx.sess.opts.unstable_opts.precise_enum_drop_elaboration {
324 // Mark all places as "maybe init" if they are mutably borrowed. See #90752.
325 for_each_mut_borrow(statement, location, |place| {
326 let LookupResult::Exact(mpi) = self.move_data().rev_lookup.find(place.as_ref()) else { return };
327 on_all_children_bits(self.tcx, self.body, self.move_data(), mpi, |child| {
333 fn terminator_effect(
335 trans: &mut impl GenKill<Self::Idx>,
336 terminator: &mir::Terminator<'tcx>,
339 drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
340 Self::update_bits(trans, path, s)
343 if !self.tcx.sess.opts.unstable_opts.precise_enum_drop_elaboration {
347 for_each_mut_borrow(terminator, location, |place| {
348 let LookupResult::Exact(mpi) = self.move_data().rev_lookup.find(place.as_ref()) else { return };
349 on_all_children_bits(self.tcx, self.body, self.move_data(), mpi, |child| {
355 fn call_return_effect(
357 trans: &mut impl GenKill<Self::Idx>,
358 _block: mir::BasicBlock,
359 return_places: CallReturnPlaces<'_, 'tcx>,
361 return_places.for_each(|place| {
362 // when a call returns successfully, that means we need to set
363 // the bits for that dest_place to 1 (initialized).
364 on_lookup_result_bits(
368 self.move_data().rev_lookup.find(place.as_ref()),
376 fn switch_int_edge_effects<G: GenKill<Self::Idx>>(
378 block: mir::BasicBlock,
379 discr: &mir::Operand<'tcx>,
380 edge_effects: &mut impl SwitchIntEdgeEffects<G>,
382 if !self.tcx.sess.opts.unstable_opts.precise_enum_drop_elaboration {
386 let enum_ = discr.place().and_then(|discr| {
387 switch_on_enum_discriminant(self.tcx, &self.body, &self.body[block], discr)
390 let Some((enum_place, enum_def)) = enum_ else {
394 let mut discriminants = enum_def.discriminants(self.tcx);
395 edge_effects.apply(|trans, edge| {
396 let Some(value) = edge.value else {
400 // MIR building adds discriminants to the `values` array in the same order as they
401 // are yielded by `AdtDef::discriminants`. We rely on this to match each
402 // discriminant in `values` to its corresponding variant in linear time.
403 let (variant, _) = discriminants
404 .find(|&(_, discr)| discr.val == value)
405 .expect("Order of `AdtDef::discriminants` differed from `SwitchInt::values`");
407 // Kill all move paths that correspond to variants we know to be inactive along this
408 // particular outgoing edge of a `SwitchInt`.
409 drop_flag_effects::on_all_inactive_variants(
415 |mpi| trans.kill(mpi),
421 impl<'tcx> AnalysisDomain<'tcx> for MaybeUninitializedPlaces<'_, 'tcx> {
422 type Domain = ChunkedBitSet<MovePathIndex>;
424 const NAME: &'static str = "maybe_uninit";
426 fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
427 // bottom = initialized (start_block_effect counters this at outset)
428 ChunkedBitSet::new_empty(self.move_data().move_paths.len())
431 // sets on_entry bits for Arg places
432 fn initialize_start_block(&self, _: &mir::Body<'tcx>, state: &mut Self::Domain) {
433 // set all bits to 1 (uninit) before gathering counter-evidence
436 drop_flag_effects_for_function_entry(self.tcx, self.body, self.mdpe, |path, s| {
437 assert!(s == DropFlagState::Present);
443 impl<'tcx> GenKillAnalysis<'tcx> for MaybeUninitializedPlaces<'_, 'tcx> {
444 type Idx = MovePathIndex;
448 trans: &mut impl GenKill<Self::Idx>,
449 _statement: &mir::Statement<'tcx>,
452 drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
453 Self::update_bits(trans, path, s)
456 // Unlike in `MaybeInitializedPlaces` above, we don't need to change the state when a
457 // mutable borrow occurs. Places cannot become uninitialized through a mutable reference.
460 fn terminator_effect(
462 trans: &mut impl GenKill<Self::Idx>,
463 _terminator: &mir::Terminator<'tcx>,
466 drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
467 Self::update_bits(trans, path, s)
471 fn call_return_effect(
473 trans: &mut impl GenKill<Self::Idx>,
474 _block: mir::BasicBlock,
475 return_places: CallReturnPlaces<'_, 'tcx>,
477 return_places.for_each(|place| {
478 // when a call returns successfully, that means we need to set
479 // the bits for that dest_place to 0 (initialized).
480 on_lookup_result_bits(
484 self.move_data().rev_lookup.find(place.as_ref()),
492 fn switch_int_edge_effects<G: GenKill<Self::Idx>>(
494 block: mir::BasicBlock,
495 discr: &mir::Operand<'tcx>,
496 edge_effects: &mut impl SwitchIntEdgeEffects<G>,
498 if !self.tcx.sess.opts.unstable_opts.precise_enum_drop_elaboration {
502 if !self.mark_inactive_variants_as_uninit {
506 let enum_ = discr.place().and_then(|discr| {
507 switch_on_enum_discriminant(self.tcx, &self.body, &self.body[block], discr)
510 let Some((enum_place, enum_def)) = enum_ else {
514 let mut discriminants = enum_def.discriminants(self.tcx);
515 edge_effects.apply(|trans, edge| {
516 let Some(value) = edge.value else {
520 // MIR building adds discriminants to the `values` array in the same order as they
521 // are yielded by `AdtDef::discriminants`. We rely on this to match each
522 // discriminant in `values` to its corresponding variant in linear time.
523 let (variant, _) = discriminants
524 .find(|&(_, discr)| discr.val == value)
525 .expect("Order of `AdtDef::discriminants` differed from `SwitchInt::values`");
527 // Mark all move paths that correspond to variants other than this one as maybe
528 // uninitialized (in reality, they are *definitely* uninitialized).
529 drop_flag_effects::on_all_inactive_variants(
535 |mpi| trans.gen(mpi),
541 impl<'a, 'tcx> AnalysisDomain<'tcx> for DefinitelyInitializedPlaces<'a, 'tcx> {
542 /// Use set intersection as the join operator.
543 type Domain = lattice::Dual<BitSet<MovePathIndex>>;
545 const NAME: &'static str = "definite_init";
547 fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
548 // bottom = initialized (start_block_effect counters this at outset)
549 lattice::Dual(BitSet::new_filled(self.move_data().move_paths.len()))
552 // sets on_entry bits for Arg places
553 fn initialize_start_block(&self, _: &mir::Body<'tcx>, state: &mut Self::Domain) {
556 drop_flag_effects_for_function_entry(self.tcx, self.body, self.mdpe, |path, s| {
557 assert!(s == DropFlagState::Present);
558 state.0.insert(path);
563 impl<'tcx> GenKillAnalysis<'tcx> for DefinitelyInitializedPlaces<'_, 'tcx> {
564 type Idx = MovePathIndex;
568 trans: &mut impl GenKill<Self::Idx>,
569 _statement: &mir::Statement<'tcx>,
572 drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
573 Self::update_bits(trans, path, s)
577 fn terminator_effect(
579 trans: &mut impl GenKill<Self::Idx>,
580 _terminator: &mir::Terminator<'tcx>,
583 drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
584 Self::update_bits(trans, path, s)
588 fn call_return_effect(
590 trans: &mut impl GenKill<Self::Idx>,
591 _block: mir::BasicBlock,
592 return_places: CallReturnPlaces<'_, 'tcx>,
594 return_places.for_each(|place| {
595 // when a call returns successfully, that means we need to set
596 // the bits for that dest_place to 1 (initialized).
597 on_lookup_result_bits(
601 self.move_data().rev_lookup.find(place.as_ref()),
610 impl<'tcx> AnalysisDomain<'tcx> for EverInitializedPlaces<'_, 'tcx> {
611 type Domain = ChunkedBitSet<InitIndex>;
613 const NAME: &'static str = "ever_init";
615 fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
616 // bottom = no initialized variables by default
617 ChunkedBitSet::new_empty(self.move_data().inits.len())
620 fn initialize_start_block(&self, body: &mir::Body<'tcx>, state: &mut Self::Domain) {
621 for arg_init in 0..body.arg_count {
622 state.insert(InitIndex::new(arg_init));
627 impl<'tcx> GenKillAnalysis<'tcx> for EverInitializedPlaces<'_, 'tcx> {
628 type Idx = InitIndex;
630 #[instrument(skip(self, trans), level = "debug")]
633 trans: &mut impl GenKill<Self::Idx>,
634 stmt: &mir::Statement<'tcx>,
637 let move_data = self.move_data();
638 let init_path_map = &move_data.init_path_map;
639 let init_loc_map = &move_data.init_loc_map;
640 let rev_lookup = &move_data.rev_lookup;
642 debug!("initializes move_indexes {:?}", &init_loc_map[location]);
643 trans.gen_all(init_loc_map[location].iter().copied());
645 if let mir::StatementKind::StorageDead(local) = stmt.kind {
646 // End inits for StorageDead, so that an immutable variable can
647 // be reinitialized on the next iteration of the loop.
648 let move_path_index = rev_lookup.find_local(local);
649 debug!("clears the ever initialized status of {:?}", init_path_map[move_path_index]);
650 trans.kill_all(init_path_map[move_path_index].iter().copied());
654 #[instrument(skip(self, trans, _terminator), level = "debug")]
655 fn terminator_effect(
657 trans: &mut impl GenKill<Self::Idx>,
658 _terminator: &mir::Terminator<'tcx>,
661 let (body, move_data) = (self.body, self.move_data());
662 let term = body[location.block].terminator();
663 let init_loc_map = &move_data.init_loc_map;
665 debug!("initializes move_indexes {:?}", init_loc_map[location]);
667 init_loc_map[location]
669 .filter(|init_index| {
670 move_data.inits[**init_index].kind != InitKind::NonPanicPathOnly
676 fn call_return_effect(
678 trans: &mut impl GenKill<Self::Idx>,
679 block: mir::BasicBlock,
680 _return_places: CallReturnPlaces<'_, 'tcx>,
682 let move_data = self.move_data();
683 let init_loc_map = &move_data.init_loc_map;
685 let call_loc = self.body.terminator_loc(block);
686 for init_index in &init_loc_map[call_loc] {
687 trans.gen(*init_index);
692 /// Inspect a `SwitchInt`-terminated basic block to see if the condition of that `SwitchInt` is
693 /// an enum discriminant.
695 /// We expect such blocks to have a call to `discriminant` as their last statement like so:
699 /// _42 = discriminant(_1)
700 /// SwitchInt(_42, ..)
703 /// If the basic block matches this pattern, this function returns the place corresponding to the
704 /// enum (`_1` in the example above) as well as the `AdtDef` of that enum.
705 fn switch_on_enum_discriminant<'mir, 'tcx>(
707 body: &'mir mir::Body<'tcx>,
708 block: &'mir mir::BasicBlockData<'tcx>,
709 switch_on: mir::Place<'tcx>,
710 ) -> Option<(mir::Place<'tcx>, ty::AdtDef<'tcx>)> {
711 for statement in block.statements.iter().rev() {
712 match &statement.kind {
713 mir::StatementKind::Assign(box (lhs, mir::Rvalue::Discriminant(discriminated)))
714 if *lhs == switch_on =>
716 match discriminated.ty(body, tcx).ty.kind() {
717 ty::Adt(def, _) => return Some((*discriminated, *def)),
719 // `Rvalue::Discriminant` is also used to get the active yield point for a
720 // generator, but we do not need edge-specific effects in that case. This may
721 // change in the future.
722 ty::Generator(..) => return None,
724 t => bug!("`discriminant` called on unexpected type {:?}", t),
727 mir::StatementKind::Coverage(_) => continue,
734 struct OnMutBorrow<F>(F);
736 impl<F> Visitor<'_> for OnMutBorrow<F>
738 F: FnMut(&mir::Place<'_>),
740 fn visit_rvalue(&mut self, rvalue: &mir::Rvalue<'_>, location: Location) {
741 // FIXME: Does `&raw const foo` allow mutation? See #90413.
743 mir::Rvalue::Ref(_, mir::BorrowKind::Mut { .. }, place)
744 | mir::Rvalue::AddressOf(_, place) => (self.0)(place),
749 self.super_rvalue(rvalue, location)
753 /// Calls `f` for each mutable borrow or raw reference in the program.
755 /// This DOES NOT call `f` for a shared borrow of a type with interior mutability. That's okay for
756 /// initializedness, because we cannot move from an `UnsafeCell` (outside of `core::cell`), but
757 /// other analyses will likely need to check for `!Freeze`.
758 fn for_each_mut_borrow<'tcx>(
759 mir: &impl MirVisitable<'tcx>,
761 f: impl FnMut(&mir::Place<'_>),
763 let mut vis = OnMutBorrow(f);
765 mir.apply(location, &mut vis);