1 //! Dataflow analyses are built upon some interpretation of the
2 //! bitvectors attached to each basic block, represented via a
3 //! zero-sized structure.
6 use rustc::mir::{self, Mir, Location};
7 use rustc_data_structures::bit_set::{BitSet, BitSetOperator};
8 use rustc_data_structures::indexed_vec::Idx;
10 use super::MoveDataParamEnv;
12 use crate::util::elaborate_drops::DropFlagState;
14 use super::move_paths::{HasMoveData, MoveData, MovePathIndex, InitIndex};
15 use super::move_paths::{LookupResult, InitKind};
16 use super::{BitDenotation, BlockSets, InitialFlow};
18 use super::drop_flag_effects_for_function_entry;
19 use super::drop_flag_effects_for_location;
20 use super::on_lookup_result_bits;
24 pub use self::storage_liveness::*;
28 pub use self::borrowed_locals::*;
30 pub(super) mod borrows;
32 /// `MaybeInitializedPlaces` tracks all places that might be
33 /// initialized upon reaching a particular point in the control flow
36 /// For example, in code like the following, we have corresponding
37 /// dataflow information shown in the right-hand comments.
41 /// fn foo(pred: bool) { // maybe-init:
43 /// let a = S; let b = S; let c; let d; // {a, b}
55 /// c = S; // {a, b, c, d}
59 /// To determine whether a place *must* be initialized at a
60 /// particular control-flow point, one can take the set-difference
61 /// between this data and the data from `MaybeUninitializedPlaces` at the
62 /// corresponding control-flow point.
64 /// Similarly, at a given `drop` statement, the set-intersection
65 /// between this data and `MaybeUninitializedPlaces` yields the set of
66 /// places that would require a dynamic drop-flag at that statement.
67 pub struct MaybeInitializedPlaces<'a, 'gcx: 'tcx, 'tcx: 'a> {
68 tcx: TyCtxt<'a, 'gcx, 'tcx>,
70 mdpe: &'a MoveDataParamEnv<'gcx, 'tcx>,
73 impl<'a, 'gcx: 'tcx, 'tcx> MaybeInitializedPlaces<'a, 'gcx, 'tcx> {
74 pub fn new(tcx: TyCtxt<'a, 'gcx, 'tcx>,
76 mdpe: &'a MoveDataParamEnv<'gcx, 'tcx>)
79 MaybeInitializedPlaces { tcx: tcx, mir: mir, mdpe: mdpe }
83 impl<'a, 'gcx, 'tcx> HasMoveData<'tcx> for MaybeInitializedPlaces<'a, 'gcx, 'tcx> {
84 fn move_data(&self) -> &MoveData<'tcx> { &self.mdpe.move_data }
87 /// `MaybeUninitializedPlaces` tracks all places that might be
88 /// uninitialized upon reaching a particular point in the control flow
91 /// For example, in code like the following, we have corresponding
92 /// dataflow information shown in the right-hand comments.
96 /// fn foo(pred: bool) { // maybe-uninit:
98 /// let a = S; let b = S; let c; let d; // { c, d}
101 /// drop(a); // {a, c, d}
102 /// b = S; // {a, c, d}
105 /// drop(b); // { b, c, d}
106 /// d = S; // { b, c }
108 /// } // {a, b, c, d}
110 /// c = S; // {a, b, d}
114 /// To determine whether a place *must* be uninitialized at a
115 /// particular control-flow point, one can take the set-difference
116 /// between this data and the data from `MaybeInitializedPlaces` at the
117 /// corresponding control-flow point.
119 /// Similarly, at a given `drop` statement, the set-intersection
120 /// between this data and `MaybeInitializedPlaces` yields the set of
121 /// places that would require a dynamic drop-flag at that statement.
122 pub struct MaybeUninitializedPlaces<'a, 'gcx: 'tcx, 'tcx: 'a> {
123 tcx: TyCtxt<'a, 'gcx, 'tcx>,
125 mdpe: &'a MoveDataParamEnv<'gcx, 'tcx>,
128 impl<'a, 'gcx, 'tcx> MaybeUninitializedPlaces<'a, 'gcx, 'tcx> {
129 pub fn new(tcx: TyCtxt<'a, 'gcx, 'tcx>,
131 mdpe: &'a MoveDataParamEnv<'gcx, 'tcx>)
134 MaybeUninitializedPlaces { tcx: tcx, mir: mir, mdpe: mdpe }
138 impl<'a, 'gcx, 'tcx> HasMoveData<'tcx> for MaybeUninitializedPlaces<'a, 'gcx, 'tcx> {
139 fn move_data(&self) -> &MoveData<'tcx> { &self.mdpe.move_data }
142 /// `DefinitelyInitializedPlaces` tracks all places that are definitely
143 /// initialized upon reaching a particular point in the control flow
146 /// For example, in code like the following, we have corresponding
147 /// dataflow information shown in the right-hand comments.
151 /// fn foo(pred: bool) { // definite-init:
153 /// let a = S; let b = S; let c; let d; // {a, b }
156 /// drop(a); // { b, }
160 /// drop(b); // {a, }
169 /// To determine whether a place *may* be uninitialized at a
170 /// particular control-flow point, one can take the set-complement
173 /// Similarly, at a given `drop` statement, the set-difference between
174 /// this data and `MaybeInitializedPlaces` yields the set of places
175 /// that would require a dynamic drop-flag at that statement.
176 pub struct DefinitelyInitializedPlaces<'a, 'gcx: 'tcx, 'tcx: 'a> {
177 tcx: TyCtxt<'a, 'gcx, 'tcx>,
179 mdpe: &'a MoveDataParamEnv<'gcx, 'tcx>,
182 impl<'a, 'gcx, 'tcx: 'a> DefinitelyInitializedPlaces<'a, 'gcx, 'tcx> {
183 pub fn new(tcx: TyCtxt<'a, 'gcx, 'tcx>,
185 mdpe: &'a MoveDataParamEnv<'gcx, 'tcx>)
188 DefinitelyInitializedPlaces { tcx: tcx, mir: mir, mdpe: mdpe }
192 impl<'a, 'gcx, 'tcx: 'a> HasMoveData<'tcx> for DefinitelyInitializedPlaces<'a, 'gcx, 'tcx> {
193 fn move_data(&self) -> &MoveData<'tcx> { &self.mdpe.move_data }
196 /// `EverInitializedPlaces` tracks all places that might have ever been
197 /// initialized upon reaching a particular point in the control flow
198 /// for a function, without an intervening `Storage Dead`.
200 /// This dataflow is used to determine if an immutable local variable may
203 /// For example, in code like the following, we have corresponding
204 /// dataflow information shown in the right-hand comments.
208 /// fn foo(pred: bool) { // ever-init:
210 /// let a = S; let b = S; let c; let d; // {a, b }
213 /// drop(a); // {a, b, }
214 /// b = S; // {a, b, }
217 /// drop(b); // {a, b, }
218 /// d = S; // {a, b, d }
222 /// c = S; // {a, b, c, d }
225 pub struct EverInitializedPlaces<'a, 'gcx: 'tcx, 'tcx: 'a> {
226 tcx: TyCtxt<'a, 'gcx, 'tcx>,
228 mdpe: &'a MoveDataParamEnv<'gcx, 'tcx>,
231 impl<'a, 'gcx: 'tcx, 'tcx: 'a> EverInitializedPlaces<'a, 'gcx, 'tcx> {
232 pub fn new(tcx: TyCtxt<'a, 'gcx, 'tcx>,
234 mdpe: &'a MoveDataParamEnv<'gcx, 'tcx>)
237 EverInitializedPlaces { tcx: tcx, mir: mir, mdpe: mdpe }
241 impl<'a, 'gcx, 'tcx> HasMoveData<'tcx> for EverInitializedPlaces<'a, 'gcx, 'tcx> {
242 fn move_data(&self) -> &MoveData<'tcx> { &self.mdpe.move_data }
246 impl<'a, 'gcx, 'tcx> MaybeInitializedPlaces<'a, 'gcx, 'tcx> {
247 fn update_bits(sets: &mut BlockSets<'_, MovePathIndex>, path: MovePathIndex,
248 state: DropFlagState)
251 DropFlagState::Absent => sets.kill(path),
252 DropFlagState::Present => sets.gen(path),
257 impl<'a, 'gcx, 'tcx> MaybeUninitializedPlaces<'a, 'gcx, 'tcx> {
258 fn update_bits(sets: &mut BlockSets<'_, MovePathIndex>, path: MovePathIndex,
259 state: DropFlagState)
262 DropFlagState::Absent => sets.gen(path),
263 DropFlagState::Present => sets.kill(path),
268 impl<'a, 'gcx, 'tcx> DefinitelyInitializedPlaces<'a, 'gcx, 'tcx> {
269 fn update_bits(sets: &mut BlockSets<'_, MovePathIndex>, path: MovePathIndex,
270 state: DropFlagState)
273 DropFlagState::Absent => sets.kill(path),
274 DropFlagState::Present => sets.gen(path),
279 impl<'a, 'gcx, 'tcx> BitDenotation<'tcx> for MaybeInitializedPlaces<'a, 'gcx, 'tcx> {
280 type Idx = MovePathIndex;
281 fn name() -> &'static str { "maybe_init" }
282 fn bits_per_block(&self) -> usize {
283 self.move_data().move_paths.len()
286 fn start_block_effect(&self, entry_set: &mut BitSet<MovePathIndex>) {
287 drop_flag_effects_for_function_entry(
288 self.tcx, self.mir, self.mdpe,
290 assert!(s == DropFlagState::Present);
291 entry_set.insert(path);
295 fn statement_effect(&self,
296 sets: &mut BlockSets<'_, MovePathIndex>,
299 drop_flag_effects_for_location(
300 self.tcx, self.mir, self.mdpe,
302 |path, s| Self::update_bits(sets, path, s)
306 fn terminator_effect(&self,
307 sets: &mut BlockSets<'_, MovePathIndex>,
310 drop_flag_effects_for_location(
311 self.tcx, self.mir, self.mdpe,
313 |path, s| Self::update_bits(sets, path, s)
317 fn propagate_call_return(
319 in_out: &mut BitSet<MovePathIndex>,
320 _call_bb: mir::BasicBlock,
321 _dest_bb: mir::BasicBlock,
322 dest_place: &mir::Place<'tcx>,
324 // when a call returns successfully, that means we need to set
325 // the bits for that dest_place to 1 (initialized).
326 on_lookup_result_bits(self.tcx, self.mir, self.move_data(),
327 self.move_data().rev_lookup.find(dest_place),
328 |mpi| { in_out.insert(mpi); });
332 impl<'a, 'gcx, 'tcx> BitDenotation<'tcx> for MaybeUninitializedPlaces<'a, 'gcx, 'tcx> {
333 type Idx = MovePathIndex;
334 fn name() -> &'static str { "maybe_uninit" }
335 fn bits_per_block(&self) -> usize {
336 self.move_data().move_paths.len()
339 // sets on_entry bits for Arg places
340 fn start_block_effect(&self, entry_set: &mut BitSet<MovePathIndex>) {
341 // set all bits to 1 (uninit) before gathering counterevidence
342 assert!(self.bits_per_block() == entry_set.domain_size());
343 entry_set.insert_all();
345 drop_flag_effects_for_function_entry(
346 self.tcx, self.mir, self.mdpe,
348 assert!(s == DropFlagState::Present);
349 entry_set.remove(path);
353 fn statement_effect(&self,
354 sets: &mut BlockSets<'_, MovePathIndex>,
357 drop_flag_effects_for_location(
358 self.tcx, self.mir, self.mdpe,
360 |path, s| Self::update_bits(sets, path, s)
364 fn terminator_effect(&self,
365 sets: &mut BlockSets<'_, MovePathIndex>,
368 drop_flag_effects_for_location(
369 self.tcx, self.mir, self.mdpe,
371 |path, s| Self::update_bits(sets, path, s)
375 fn propagate_call_return(
377 in_out: &mut BitSet<MovePathIndex>,
378 _call_bb: mir::BasicBlock,
379 _dest_bb: mir::BasicBlock,
380 dest_place: &mir::Place<'tcx>,
382 // when a call returns successfully, that means we need to set
383 // the bits for that dest_place to 0 (initialized).
384 on_lookup_result_bits(self.tcx, self.mir, self.move_data(),
385 self.move_data().rev_lookup.find(dest_place),
386 |mpi| { in_out.remove(mpi); });
390 impl<'a, 'gcx, 'tcx> BitDenotation<'tcx> for DefinitelyInitializedPlaces<'a, 'gcx, 'tcx> {
391 type Idx = MovePathIndex;
392 fn name() -> &'static str { "definite_init" }
393 fn bits_per_block(&self) -> usize {
394 self.move_data().move_paths.len()
397 // sets on_entry bits for Arg places
398 fn start_block_effect(&self, entry_set: &mut BitSet<MovePathIndex>) {
401 drop_flag_effects_for_function_entry(
402 self.tcx, self.mir, self.mdpe,
404 assert!(s == DropFlagState::Present);
405 entry_set.insert(path);
409 fn statement_effect(&self,
410 sets: &mut BlockSets<'_, MovePathIndex>,
413 drop_flag_effects_for_location(
414 self.tcx, self.mir, self.mdpe,
416 |path, s| Self::update_bits(sets, path, s)
420 fn terminator_effect(&self,
421 sets: &mut BlockSets<'_, MovePathIndex>,
424 drop_flag_effects_for_location(
425 self.tcx, self.mir, self.mdpe,
427 |path, s| Self::update_bits(sets, path, s)
431 fn propagate_call_return(
433 in_out: &mut BitSet<MovePathIndex>,
434 _call_bb: mir::BasicBlock,
435 _dest_bb: mir::BasicBlock,
436 dest_place: &mir::Place<'tcx>,
438 // when a call returns successfully, that means we need to set
439 // the bits for that dest_place to 1 (initialized).
440 on_lookup_result_bits(self.tcx, self.mir, self.move_data(),
441 self.move_data().rev_lookup.find(dest_place),
442 |mpi| { in_out.insert(mpi); });
446 impl<'a, 'gcx, 'tcx> BitDenotation<'tcx> for EverInitializedPlaces<'a, 'gcx, 'tcx> {
447 type Idx = InitIndex;
448 fn name() -> &'static str { "ever_init" }
449 fn bits_per_block(&self) -> usize {
450 self.move_data().inits.len()
453 fn start_block_effect(&self, entry_set: &mut BitSet<InitIndex>) {
454 for arg_init in 0..self.mir.arg_count {
455 entry_set.insert(InitIndex::new(arg_init));
459 fn statement_effect(&self,
460 sets: &mut BlockSets<'_, InitIndex>,
461 location: Location) {
462 let (_, mir, move_data) = (self.tcx, self.mir, self.move_data());
463 let stmt = &mir[location.block].statements[location.statement_index];
464 let init_path_map = &move_data.init_path_map;
465 let init_loc_map = &move_data.init_loc_map;
466 let rev_lookup = &move_data.rev_lookup;
468 debug!("statement {:?} at loc {:?} initializes move_indexes {:?}",
469 stmt, location, &init_loc_map[location]);
470 sets.gen_all(&init_loc_map[location]);
473 mir::StatementKind::StorageDead(local) |
474 mir::StatementKind::StorageLive(local) => {
475 // End inits for StorageDead and StorageLive, so that an immutable
476 // variable can be reinitialized on the next iteration of the loop.
478 // FIXME(#46525): We *need* to do this for StorageLive as well as
479 // StorageDead, because lifetimes of match bindings with guards are
480 // weird - i.e., this code
486 // if { println!("a={}", a); false } => {}
492 // runs the guard twice, using the same binding for `a`, and only
493 // storagedeads after everything ends, so if we don't regard the
494 // storagelive as killing storage, we would have a multiple assignment
495 // to immutable data error.
496 if let LookupResult::Exact(mpi) = rev_lookup.find(&mir::Place::Local(local)) {
497 debug!("stmt {:?} at loc {:?} clears the ever initialized status of {:?}",
498 stmt, location, &init_path_map[mpi]);
499 sets.kill_all(&init_path_map[mpi]);
506 fn terminator_effect(&self,
507 sets: &mut BlockSets<'_, InitIndex>,
510 let (mir, move_data) = (self.mir, self.move_data());
511 let term = mir[location.block].terminator();
512 let init_loc_map = &move_data.init_loc_map;
513 debug!("terminator {:?} at loc {:?} initializes move_indexes {:?}",
514 term, location, &init_loc_map[location]);
516 init_loc_map[location].iter().filter(|init_index| {
517 move_data.inits[**init_index].kind != InitKind::NonPanicPathOnly
522 fn propagate_call_return(
524 in_out: &mut BitSet<InitIndex>,
525 call_bb: mir::BasicBlock,
526 _dest_bb: mir::BasicBlock,
527 _dest_place: &mir::Place<'tcx>,
529 let move_data = self.move_data();
530 let bits_per_block = self.bits_per_block();
531 let init_loc_map = &move_data.init_loc_map;
533 let call_loc = Location {
535 statement_index: self.mir[call_bb].statements.len(),
537 for init_index in &init_loc_map[call_loc] {
538 assert!(init_index.index() < bits_per_block);
539 in_out.insert(*init_index);
544 impl<'a, 'gcx, 'tcx> BitSetOperator for MaybeInitializedPlaces<'a, 'gcx, 'tcx> {
546 fn join<T: Idx>(&self, inout_set: &mut BitSet<T>, in_set: &BitSet<T>) -> bool {
547 inout_set.union(in_set) // "maybe" means we union effects of both preds
551 impl<'a, 'gcx, 'tcx> BitSetOperator for MaybeUninitializedPlaces<'a, 'gcx, 'tcx> {
553 fn join<T: Idx>(&self, inout_set: &mut BitSet<T>, in_set: &BitSet<T>) -> bool {
554 inout_set.union(in_set) // "maybe" means we union effects of both preds
558 impl<'a, 'gcx, 'tcx> BitSetOperator for DefinitelyInitializedPlaces<'a, 'gcx, 'tcx> {
560 fn join<T: Idx>(&self, inout_set: &mut BitSet<T>, in_set: &BitSet<T>) -> bool {
561 inout_set.intersect(in_set) // "definitely" means we intersect effects of both preds
565 impl<'a, 'gcx, 'tcx> BitSetOperator for EverInitializedPlaces<'a, 'gcx, 'tcx> {
567 fn join<T: Idx>(&self, inout_set: &mut BitSet<T>, in_set: &BitSet<T>) -> bool {
568 inout_set.union(in_set) // inits from both preds are in scope
572 // The way that dataflow fixed point iteration works, you want to
573 // start at bottom and work your way to a fixed point. Control-flow
574 // merges will apply the `join` operator to each block entry's current
575 // state (which starts at that bottom value).
577 // This means, for propagation across the graph, that you either want
578 // to start at all-zeroes and then use Union as your merge when
579 // propagating, or you start at all-ones and then use Intersect as
580 // your merge when propagating.
582 impl<'a, 'gcx, 'tcx> InitialFlow for MaybeInitializedPlaces<'a, 'gcx, 'tcx> {
584 fn bottom_value() -> bool {
585 false // bottom = uninitialized
589 impl<'a, 'gcx, 'tcx> InitialFlow for MaybeUninitializedPlaces<'a, 'gcx, 'tcx> {
591 fn bottom_value() -> bool {
592 false // bottom = initialized (start_block_effect counters this at outset)
596 impl<'a, 'gcx, 'tcx> InitialFlow for DefinitelyInitializedPlaces<'a, 'gcx, 'tcx> {
598 fn bottom_value() -> bool {
599 true // bottom = initialized (start_block_effect counters this at outset)
603 impl<'a, 'gcx, 'tcx> InitialFlow for EverInitializedPlaces<'a, 'gcx, 'tcx> {
605 fn bottom_value() -> bool {
606 false // bottom = no initialized variables by default