1 //! Dataflow analyses are built upon some interpretation of the
2 //! bitvectors attached to each basic block, represented via a
3 //! zero-sized structure.
5 use rustc_index::bit_set::{BitSet, ChunkedBitSet};
6 use rustc_index::vec::Idx;
7 use rustc_middle::mir::visit::{MirVisitable, Visitor};
8 use rustc_middle::mir::{self, Body, Location};
9 use rustc_middle::ty::{self, TyCtxt};
11 use crate::drop_flag_effects_for_function_entry;
12 use crate::drop_flag_effects_for_location;
13 use crate::elaborate_drops::DropFlagState;
14 use crate::framework::{CallReturnPlaces, SwitchIntEdgeEffects};
15 use crate::move_paths::{HasMoveData, InitIndex, InitKind, LookupResult, MoveData, MovePathIndex};
16 use crate::on_lookup_result_bits;
17 use crate::MoveDataParamEnv;
18 use crate::{drop_flag_effects, on_all_children_bits};
19 use crate::{lattice, AnalysisDomain, GenKill, GenKillAnalysis};
25 pub use self::borrowed_locals::borrowed_locals;
26 pub use self::borrowed_locals::MaybeBorrowedLocals;
27 pub use self::liveness::MaybeLiveLocals;
28 pub use self::liveness::MaybeTransitiveLiveLocals;
29 pub use self::storage_liveness::{MaybeRequiresStorage, MaybeStorageLive};
31 /// `MaybeInitializedPlaces` tracks all places that might be
32 /// initialized upon reaching a particular point in the control flow
35 /// For example, in code like the following, we have corresponding
36 /// dataflow information shown in the right-hand comments.
40 /// fn foo(pred: bool) { // maybe-init:
42 /// let a = S; let mut b = S; let c; let d; // {a, b}
54 /// c = S; // {a, b, c, d}
58 /// To determine whether a place *must* be initialized at a
59 /// particular control-flow point, one can take the set-difference
60 /// between this data and the data from `MaybeUninitializedPlaces` at the
61 /// corresponding control-flow point.
63 /// Similarly, at a given `drop` statement, the set-intersection
64 /// between this data and `MaybeUninitializedPlaces` yields the set of
65 /// places that would require a dynamic drop-flag at that statement.
66 pub struct MaybeInitializedPlaces<'a, 'tcx> {
69 mdpe: &'a MoveDataParamEnv<'tcx>,
72 impl<'a, 'tcx> MaybeInitializedPlaces<'a, 'tcx> {
73 pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
74 MaybeInitializedPlaces { tcx, body, mdpe }
78 impl<'a, 'tcx> HasMoveData<'tcx> for MaybeInitializedPlaces<'a, 'tcx> {
79 fn move_data(&self) -> &MoveData<'tcx> {
84 /// `MaybeUninitializedPlaces` tracks all places that might be
85 /// uninitialized upon reaching a particular point in the control flow
88 /// For example, in code like the following, we have corresponding
89 /// dataflow information shown in the right-hand comments.
93 /// fn foo(pred: bool) { // maybe-uninit:
95 /// let a = S; let mut b = S; let c; let d; // { c, d}
98 /// drop(a); // {a, c, d}
99 /// b = S; // {a, c, d}
102 /// drop(b); // { b, c, d}
103 /// d = S; // { b, c }
105 /// } // {a, b, c, d}
107 /// c = S; // {a, b, d}
111 /// To determine whether a place *must* be uninitialized at a
112 /// particular control-flow point, one can take the set-difference
113 /// between this data and the data from `MaybeInitializedPlaces` at the
114 /// corresponding control-flow point.
116 /// Similarly, at a given `drop` statement, the set-intersection
117 /// between this data and `MaybeInitializedPlaces` yields the set of
118 /// places that would require a dynamic drop-flag at that statement.
119 pub struct MaybeUninitializedPlaces<'a, 'tcx> {
121 body: &'a Body<'tcx>,
122 mdpe: &'a MoveDataParamEnv<'tcx>,
124 mark_inactive_variants_as_uninit: bool,
127 impl<'a, 'tcx> MaybeUninitializedPlaces<'a, 'tcx> {
128 pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
129 MaybeUninitializedPlaces { tcx, body, mdpe, mark_inactive_variants_as_uninit: false }
132 /// Causes inactive enum variants to be marked as "maybe uninitialized" after a switch on an
133 /// enum discriminant.
135 /// This is correct in a vacuum but is not the default because it causes problems in the borrow
136 /// checker, where this information gets propagated along `FakeEdge`s.
137 pub fn mark_inactive_variants_as_uninit(mut self) -> Self {
138 self.mark_inactive_variants_as_uninit = true;
143 impl<'a, 'tcx> HasMoveData<'tcx> for MaybeUninitializedPlaces<'a, 'tcx> {
144 fn move_data(&self) -> &MoveData<'tcx> {
149 /// `DefinitelyInitializedPlaces` tracks all places that are definitely
150 /// initialized upon reaching a particular point in the control flow
153 /// For example, in code like the following, we have corresponding
154 /// dataflow information shown in the right-hand comments.
158 /// fn foo(pred: bool) { // definite-init:
160 /// let a = S; let mut b = S; let c; let d; // {a, b }
163 /// drop(a); // { b, }
167 /// drop(b); // {a, }
176 /// To determine whether a place *may* be uninitialized at a
177 /// particular control-flow point, one can take the set-complement
180 /// Similarly, at a given `drop` statement, the set-difference between
181 /// this data and `MaybeInitializedPlaces` yields the set of places
182 /// that would require a dynamic drop-flag at that statement.
183 pub struct DefinitelyInitializedPlaces<'a, 'tcx> {
185 body: &'a Body<'tcx>,
186 mdpe: &'a MoveDataParamEnv<'tcx>,
189 impl<'a, 'tcx> DefinitelyInitializedPlaces<'a, 'tcx> {
190 pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
191 DefinitelyInitializedPlaces { tcx, body, mdpe }
195 impl<'a, 'tcx> HasMoveData<'tcx> for DefinitelyInitializedPlaces<'a, 'tcx> {
196 fn move_data(&self) -> &MoveData<'tcx> {
201 /// `EverInitializedPlaces` tracks all places that might have ever been
202 /// initialized upon reaching a particular point in the control flow
203 /// for a function, without an intervening `StorageDead`.
205 /// This dataflow is used to determine if an immutable local variable may
208 /// For example, in code like the following, we have corresponding
209 /// dataflow information shown in the right-hand comments.
213 /// fn foo(pred: bool) { // ever-init:
215 /// let a = S; let mut b = S; let c; let d; // {a, b }
218 /// drop(a); // {a, b, }
219 /// b = S; // {a, b, }
222 /// drop(b); // {a, b, }
223 /// d = S; // {a, b, d }
227 /// c = S; // {a, b, c, d }
230 pub struct EverInitializedPlaces<'a, 'tcx> {
233 body: &'a Body<'tcx>,
234 mdpe: &'a MoveDataParamEnv<'tcx>,
237 impl<'a, 'tcx> EverInitializedPlaces<'a, 'tcx> {
238 pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
239 EverInitializedPlaces { tcx, body, mdpe }
243 impl<'a, 'tcx> HasMoveData<'tcx> for EverInitializedPlaces<'a, 'tcx> {
244 fn move_data(&self) -> &MoveData<'tcx> {
249 impl<'a, 'tcx> MaybeInitializedPlaces<'a, 'tcx> {
251 trans: &mut impl GenKill<MovePathIndex>,
253 state: DropFlagState,
256 DropFlagState::Absent => trans.kill(path),
257 DropFlagState::Present => trans.gen(path),
262 impl<'a, 'tcx> MaybeUninitializedPlaces<'a, 'tcx> {
264 trans: &mut impl GenKill<MovePathIndex>,
266 state: DropFlagState,
269 DropFlagState::Absent => trans.gen(path),
270 DropFlagState::Present => trans.kill(path),
275 impl<'a, 'tcx> DefinitelyInitializedPlaces<'a, 'tcx> {
277 trans: &mut impl GenKill<MovePathIndex>,
279 state: DropFlagState,
282 DropFlagState::Absent => trans.kill(path),
283 DropFlagState::Present => trans.gen(path),
288 impl<'tcx> AnalysisDomain<'tcx> for MaybeInitializedPlaces<'_, 'tcx> {
289 type Domain = ChunkedBitSet<MovePathIndex>;
290 const NAME: &'static str = "maybe_init";
292 fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
293 // bottom = uninitialized
294 ChunkedBitSet::new_empty(self.move_data().move_paths.len())
297 fn initialize_start_block(&self, _: &mir::Body<'tcx>, state: &mut Self::Domain) {
298 drop_flag_effects_for_function_entry(self.tcx, self.body, self.mdpe, |path, s| {
299 assert!(s == DropFlagState::Present);
305 impl<'tcx> GenKillAnalysis<'tcx> for MaybeInitializedPlaces<'_, 'tcx> {
306 type Idx = MovePathIndex;
310 trans: &mut impl GenKill<Self::Idx>,
311 statement: &mir::Statement<'tcx>,
314 drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
315 Self::update_bits(trans, path, s)
318 if !self.tcx.sess.opts.unstable_opts.precise_enum_drop_elaboration {
322 // Mark all places as "maybe init" if they are mutably borrowed. See #90752.
323 for_each_mut_borrow(statement, location, |place| {
324 let LookupResult::Exact(mpi) = self.move_data().rev_lookup.find(place.as_ref()) else { return };
325 on_all_children_bits(self.tcx, self.body, self.move_data(), mpi, |child| {
331 fn terminator_effect(
333 trans: &mut impl GenKill<Self::Idx>,
334 terminator: &mir::Terminator<'tcx>,
337 drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
338 Self::update_bits(trans, path, s)
341 if !self.tcx.sess.opts.unstable_opts.precise_enum_drop_elaboration {
345 for_each_mut_borrow(terminator, location, |place| {
346 let LookupResult::Exact(mpi) = self.move_data().rev_lookup.find(place.as_ref()) else { return };
347 on_all_children_bits(self.tcx, self.body, self.move_data(), mpi, |child| {
353 fn call_return_effect(
355 trans: &mut impl GenKill<Self::Idx>,
356 _block: mir::BasicBlock,
357 return_places: CallReturnPlaces<'_, 'tcx>,
359 return_places.for_each(|place| {
360 // when a call returns successfully, that means we need to set
361 // the bits for that dest_place to 1 (initialized).
362 on_lookup_result_bits(
366 self.move_data().rev_lookup.find(place.as_ref()),
374 fn switch_int_edge_effects<G: GenKill<Self::Idx>>(
376 block: mir::BasicBlock,
377 discr: &mir::Operand<'tcx>,
378 edge_effects: &mut impl SwitchIntEdgeEffects<G>,
380 if !self.tcx.sess.opts.unstable_opts.precise_enum_drop_elaboration {
384 let enum_ = discr.place().and_then(|discr| {
385 switch_on_enum_discriminant(self.tcx, &self.body, &self.body[block], discr)
388 let Some((enum_place, enum_def)) = enum_ else {
392 let mut discriminants = enum_def.discriminants(self.tcx);
393 edge_effects.apply(|trans, edge| {
394 let Some(value) = edge.value else {
398 // MIR building adds discriminants to the `values` array in the same order as they
399 // are yielded by `AdtDef::discriminants`. We rely on this to match each
400 // discriminant in `values` to its corresponding variant in linear time.
401 let (variant, _) = discriminants
402 .find(|&(_, discr)| discr.val == value)
403 .expect("Order of `AdtDef::discriminants` differed from `SwitchInt::values`");
405 // Kill all move paths that correspond to variants we know to be inactive along this
406 // particular outgoing edge of a `SwitchInt`.
407 drop_flag_effects::on_all_inactive_variants(
413 |mpi| trans.kill(mpi),
419 impl<'tcx> AnalysisDomain<'tcx> for MaybeUninitializedPlaces<'_, 'tcx> {
420 type Domain = ChunkedBitSet<MovePathIndex>;
422 const NAME: &'static str = "maybe_uninit";
424 fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
425 // bottom = initialized (start_block_effect counters this at outset)
426 ChunkedBitSet::new_empty(self.move_data().move_paths.len())
429 // sets on_entry bits for Arg places
430 fn initialize_start_block(&self, _: &mir::Body<'tcx>, state: &mut Self::Domain) {
431 // set all bits to 1 (uninit) before gathering counter-evidence
434 drop_flag_effects_for_function_entry(self.tcx, self.body, self.mdpe, |path, s| {
435 assert!(s == DropFlagState::Present);
441 impl<'tcx> GenKillAnalysis<'tcx> for MaybeUninitializedPlaces<'_, 'tcx> {
442 type Idx = MovePathIndex;
446 trans: &mut impl GenKill<Self::Idx>,
447 _statement: &mir::Statement<'tcx>,
450 drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
451 Self::update_bits(trans, path, s)
454 // Unlike in `MaybeInitializedPlaces` above, we don't need to change the state when a
455 // mutable borrow occurs. Places cannot become uninitialized through a mutable reference.
458 fn terminator_effect(
460 trans: &mut impl GenKill<Self::Idx>,
461 _terminator: &mir::Terminator<'tcx>,
464 drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
465 Self::update_bits(trans, path, s)
469 fn call_return_effect(
471 trans: &mut impl GenKill<Self::Idx>,
472 _block: mir::BasicBlock,
473 return_places: CallReturnPlaces<'_, 'tcx>,
475 return_places.for_each(|place| {
476 // when a call returns successfully, that means we need to set
477 // the bits for that dest_place to 0 (initialized).
478 on_lookup_result_bits(
482 self.move_data().rev_lookup.find(place.as_ref()),
490 fn switch_int_edge_effects<G: GenKill<Self::Idx>>(
492 block: mir::BasicBlock,
493 discr: &mir::Operand<'tcx>,
494 edge_effects: &mut impl SwitchIntEdgeEffects<G>,
496 if !self.tcx.sess.opts.unstable_opts.precise_enum_drop_elaboration {
500 if !self.mark_inactive_variants_as_uninit {
504 let enum_ = discr.place().and_then(|discr| {
505 switch_on_enum_discriminant(self.tcx, &self.body, &self.body[block], discr)
508 let Some((enum_place, enum_def)) = enum_ else {
512 let mut discriminants = enum_def.discriminants(self.tcx);
513 edge_effects.apply(|trans, edge| {
514 let Some(value) = edge.value else {
518 // MIR building adds discriminants to the `values` array in the same order as they
519 // are yielded by `AdtDef::discriminants`. We rely on this to match each
520 // discriminant in `values` to its corresponding variant in linear time.
521 let (variant, _) = discriminants
522 .find(|&(_, discr)| discr.val == value)
523 .expect("Order of `AdtDef::discriminants` differed from `SwitchInt::values`");
525 // Mark all move paths that correspond to variants other than this one as maybe
526 // uninitialized (in reality, they are *definitely* uninitialized).
527 drop_flag_effects::on_all_inactive_variants(
533 |mpi| trans.gen(mpi),
539 impl<'a, 'tcx> AnalysisDomain<'tcx> for DefinitelyInitializedPlaces<'a, 'tcx> {
540 /// Use set intersection as the join operator.
541 type Domain = lattice::Dual<BitSet<MovePathIndex>>;
543 const NAME: &'static str = "definite_init";
545 fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
546 // bottom = initialized (start_block_effect counters this at outset)
547 lattice::Dual(BitSet::new_filled(self.move_data().move_paths.len()))
550 // sets on_entry bits for Arg places
551 fn initialize_start_block(&self, _: &mir::Body<'tcx>, state: &mut Self::Domain) {
554 drop_flag_effects_for_function_entry(self.tcx, self.body, self.mdpe, |path, s| {
555 assert!(s == DropFlagState::Present);
556 state.0.insert(path);
561 impl<'tcx> GenKillAnalysis<'tcx> for DefinitelyInitializedPlaces<'_, 'tcx> {
562 type Idx = MovePathIndex;
566 trans: &mut impl GenKill<Self::Idx>,
567 _statement: &mir::Statement<'tcx>,
570 drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
571 Self::update_bits(trans, path, s)
575 fn terminator_effect(
577 trans: &mut impl GenKill<Self::Idx>,
578 _terminator: &mir::Terminator<'tcx>,
581 drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
582 Self::update_bits(trans, path, s)
586 fn call_return_effect(
588 trans: &mut impl GenKill<Self::Idx>,
589 _block: mir::BasicBlock,
590 return_places: CallReturnPlaces<'_, 'tcx>,
592 return_places.for_each(|place| {
593 // when a call returns successfully, that means we need to set
594 // the bits for that dest_place to 1 (initialized).
595 on_lookup_result_bits(
599 self.move_data().rev_lookup.find(place.as_ref()),
608 impl<'tcx> AnalysisDomain<'tcx> for EverInitializedPlaces<'_, 'tcx> {
609 type Domain = ChunkedBitSet<InitIndex>;
611 const NAME: &'static str = "ever_init";
613 fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
614 // bottom = no initialized variables by default
615 ChunkedBitSet::new_empty(self.move_data().inits.len())
618 fn initialize_start_block(&self, body: &mir::Body<'tcx>, state: &mut Self::Domain) {
619 for arg_init in 0..body.arg_count {
620 state.insert(InitIndex::new(arg_init));
625 impl<'tcx> GenKillAnalysis<'tcx> for EverInitializedPlaces<'_, 'tcx> {
626 type Idx = InitIndex;
628 #[instrument(skip(self, trans), level = "debug")]
631 trans: &mut impl GenKill<Self::Idx>,
632 stmt: &mir::Statement<'tcx>,
635 let move_data = self.move_data();
636 let init_path_map = &move_data.init_path_map;
637 let init_loc_map = &move_data.init_loc_map;
638 let rev_lookup = &move_data.rev_lookup;
640 debug!("initializes move_indexes {:?}", &init_loc_map[location]);
641 trans.gen_all(init_loc_map[location].iter().copied());
643 if let mir::StatementKind::StorageDead(local) = stmt.kind {
644 // End inits for StorageDead, so that an immutable variable can
645 // be reinitialized on the next iteration of the loop.
646 let move_path_index = rev_lookup.find_local(local);
647 debug!("clears the ever initialized status of {:?}", init_path_map[move_path_index]);
648 trans.kill_all(init_path_map[move_path_index].iter().copied());
652 #[instrument(skip(self, trans, _terminator), level = "debug")]
653 fn terminator_effect(
655 trans: &mut impl GenKill<Self::Idx>,
656 _terminator: &mir::Terminator<'tcx>,
659 let (body, move_data) = (self.body, self.move_data());
660 let term = body[location.block].terminator();
661 let init_loc_map = &move_data.init_loc_map;
663 debug!("initializes move_indexes {:?}", init_loc_map[location]);
665 init_loc_map[location]
667 .filter(|init_index| {
668 move_data.inits[**init_index].kind != InitKind::NonPanicPathOnly
674 fn call_return_effect(
676 trans: &mut impl GenKill<Self::Idx>,
677 block: mir::BasicBlock,
678 _return_places: CallReturnPlaces<'_, 'tcx>,
680 let move_data = self.move_data();
681 let init_loc_map = &move_data.init_loc_map;
683 let call_loc = self.body.terminator_loc(block);
684 for init_index in &init_loc_map[call_loc] {
685 trans.gen(*init_index);
690 /// Inspect a `SwitchInt`-terminated basic block to see if the condition of that `SwitchInt` is
691 /// an enum discriminant.
693 /// We expect such blocks to have a call to `discriminant` as their last statement like so:
697 /// _42 = discriminant(_1)
698 /// SwitchInt(_42, ..)
701 /// If the basic block matches this pattern, this function returns the place corresponding to the
702 /// enum (`_1` in the example above) as well as the `AdtDef` of that enum.
703 fn switch_on_enum_discriminant<'mir, 'tcx>(
705 body: &'mir mir::Body<'tcx>,
706 block: &'mir mir::BasicBlockData<'tcx>,
707 switch_on: mir::Place<'tcx>,
708 ) -> Option<(mir::Place<'tcx>, ty::AdtDef<'tcx>)> {
709 for statement in block.statements.iter().rev() {
710 match &statement.kind {
711 mir::StatementKind::Assign(box (lhs, mir::Rvalue::Discriminant(discriminated)))
712 if *lhs == switch_on =>
714 match discriminated.ty(body, tcx).ty.kind() {
715 ty::Adt(def, _) => return Some((*discriminated, *def)),
717 // `Rvalue::Discriminant` is also used to get the active yield point for a
718 // generator, but we do not need edge-specific effects in that case. This may
719 // change in the future.
720 ty::Generator(..) => return None,
722 t => bug!("`discriminant` called on unexpected type {:?}", t),
725 mir::StatementKind::Coverage(_) => continue,
732 struct OnMutBorrow<F>(F);
734 impl<F> Visitor<'_> for OnMutBorrow<F>
736 F: FnMut(&mir::Place<'_>),
738 fn visit_rvalue(&mut self, rvalue: &mir::Rvalue<'_>, location: Location) {
739 // FIXME: Does `&raw const foo` allow mutation? See #90413.
741 mir::Rvalue::Ref(_, mir::BorrowKind::Mut { .. }, place)
742 | mir::Rvalue::AddressOf(_, place) => (self.0)(place),
747 self.super_rvalue(rvalue, location)
751 /// Calls `f` for each mutable borrow or raw reference in the program.
753 /// This DOES NOT call `f` for a shared borrow of a type with interior mutability. That's okay for
754 /// initializedness, because we cannot move from an `UnsafeCell` (outside of `core::cell`), but
755 /// other analyses will likely need to check for `!Freeze`.
756 fn for_each_mut_borrow<'tcx>(
757 mir: &impl MirVisitable<'tcx>,
759 f: impl FnMut(&mir::Place<'_>),
761 let mut vis = OnMutBorrow(f);
763 mir.apply(location, &mut vis);