1 use syntax::ast::{self, MetaItem};
2 use syntax::symbol::{Symbol, sym};
4 use rustc_data_structures::bit_set::{BitSet, BitSetOperator, HybridBitSet};
5 use rustc_data_structures::indexed_vec::Idx;
6 use rustc_data_structures::work_queue::WorkQueue;
8 use rustc::hir::def_id::DefId;
9 use rustc::ty::{self, TyCtxt};
10 use rustc::mir::{self, Mir, BasicBlock, BasicBlockData, Location, Statement, Terminator};
11 use rustc::mir::traversal;
12 use rustc::session::Session;
14 use std::borrow::Borrow;
17 use std::path::PathBuf;
20 pub use self::impls::{MaybeStorageLive};
21 pub use self::impls::{MaybeInitializedPlaces, MaybeUninitializedPlaces};
22 pub use self::impls::DefinitelyInitializedPlaces;
23 pub use self::impls::EverInitializedPlaces;
24 pub use self::impls::borrows::Borrows;
25 pub use self::impls::HaveBeenBorrowedLocals;
26 pub use self::at_location::{FlowAtLocation, FlowsAtLocation};
27 pub(crate) use self::drop_flag_effects::*;
29 use self::move_paths::MoveData;
32 pub mod drop_flag_effects;
37 pub(crate) mod indexes {
38 pub(crate) use super::{
39 move_paths::{MovePathIndex, MoveOutIndex, InitIndex},
40 impls::borrows::BorrowIndex,
44 pub(crate) struct DataflowBuilder<'a, 'tcx: 'a, BD>
46 BD: BitDenotation<'tcx>
49 flow_state: DataflowAnalysis<'a, 'tcx, BD>,
50 print_preflow_to: Option<String>,
51 print_postflow_to: Option<String>,
54 /// `DebugFormatted` encapsulates the "{:?}" rendering of some
55 /// arbitrary value. This way: you pay cost of allocating an extra
56 /// string (as well as that of rendering up-front); in exchange, you
57 /// don't have to hand over ownership of your value or deal with
59 pub(crate) struct DebugFormatted(String);
62 pub fn new(input: &dyn fmt::Debug) -> DebugFormatted {
63 DebugFormatted(format!("{:?}", input))
67 impl fmt::Debug for DebugFormatted {
68 fn fmt(&self, w: &mut fmt::Formatter<'_>) -> fmt::Result {
69 write!(w, "{}", self.0)
73 pub(crate) trait Dataflow<'tcx, BD: BitDenotation<'tcx>> {
74 /// Sets up and runs the dataflow problem, using `p` to render results if
75 /// implementation so chooses.
76 fn dataflow<P>(&mut self, p: P) where P: Fn(&BD, BD::Idx) -> DebugFormatted {
77 let _ = p; // default implementation does not instrument process.
82 /// Sets up the entry, gen, and kill sets for this instance of a dataflow problem.
83 fn build_sets(&mut self);
85 /// Finds a fixed-point solution to this instance of a dataflow problem.
86 fn propagate(&mut self);
89 impl<'a, 'tcx: 'a, BD> Dataflow<'tcx, BD> for DataflowBuilder<'a, 'tcx, BD>
91 BD: BitDenotation<'tcx>
93 fn dataflow<P>(&mut self, p: P) where P: Fn(&BD, BD::Idx) -> DebugFormatted {
94 self.flow_state.build_sets();
95 self.pre_dataflow_instrumentation(|c,i| p(c,i)).unwrap();
96 self.flow_state.propagate();
97 self.post_dataflow_instrumentation(|c,i| p(c,i)).unwrap();
100 fn build_sets(&mut self) { self.flow_state.build_sets(); }
101 fn propagate(&mut self) { self.flow_state.propagate(); }
104 pub(crate) fn has_rustc_mir_with(attrs: &[ast::Attribute], name: Symbol) -> Option<MetaItem> {
106 if attr.check_name(sym::rustc_mir) {
107 let items = attr.meta_item_list();
108 for item in items.iter().flat_map(|l| l.iter()) {
109 match item.meta_item() {
110 Some(mi) if mi.check_name(name) => return Some(mi.clone()),
119 pub struct MoveDataParamEnv<'gcx, 'tcx> {
120 pub(crate) move_data: MoveData<'tcx>,
121 pub(crate) param_env: ty::ParamEnv<'gcx>,
124 pub(crate) fn do_dataflow<'a, 'gcx, 'tcx, BD, P>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
127 attributes: &[ast::Attribute],
128 dead_unwinds: &BitSet<BasicBlock>,
131 -> DataflowResults<'tcx, BD>
132 where BD: BitDenotation<'tcx> + InitialFlow,
133 P: Fn(&BD, BD::Idx) -> DebugFormatted
135 let flow_state = DataflowAnalysis::new(mir, dead_unwinds, bd);
136 flow_state.run(tcx, def_id, attributes, p)
139 impl<'a, 'gcx: 'tcx, 'tcx: 'a, BD> DataflowAnalysis<'a, 'tcx, BD> where BD: BitDenotation<'tcx>
141 pub(crate) fn run<P>(self,
142 tcx: TyCtxt<'a, 'gcx, 'tcx>,
144 attributes: &[ast::Attribute],
145 p: P) -> DataflowResults<'tcx, BD>
146 where P: Fn(&BD, BD::Idx) -> DebugFormatted
148 let name_found = |sess: &Session, attrs: &[ast::Attribute], name| -> Option<String> {
149 if let Some(item) = has_rustc_mir_with(attrs, name) {
150 if let Some(s) = item.value_str() {
151 return Some(s.to_string())
155 &format!("{} attribute requires a path", item.path));
162 let print_preflow_to = name_found(tcx.sess, attributes, sym::borrowck_graphviz_preflow);
163 let print_postflow_to = name_found(tcx.sess, attributes, sym::borrowck_graphviz_postflow);
165 let mut mbcx = DataflowBuilder {
167 print_preflow_to, print_postflow_to, flow_state: self,
171 mbcx.flow_state.results()
175 struct PropagationContext<'b, 'a: 'b, 'tcx: 'a, O> where O: 'b + BitDenotation<'tcx>
177 builder: &'b mut DataflowAnalysis<'a, 'tcx, O>,
180 impl<'a, 'tcx: 'a, BD> DataflowAnalysis<'a, 'tcx, BD> where BD: BitDenotation<'tcx>
182 fn propagate(&mut self) {
183 let mut temp = BitSet::new_empty(self.flow_state.sets.bits_per_block);
184 let mut propcx = PropagationContext {
187 propcx.walk_cfg(&mut temp);
190 fn build_sets(&mut self) {
191 // First we need to build the entry-, gen- and kill-sets.
194 let sets = &mut self.flow_state.sets.for_block(mir::START_BLOCK.index());
195 self.flow_state.operator.start_block_effect(&mut sets.on_entry);
198 for (bb, data) in self.mir.basic_blocks().iter_enumerated() {
199 let &mir::BasicBlockData { ref statements, ref terminator, is_cleanup: _ } = data;
201 let mut interim_state;
202 let sets = &mut self.flow_state.sets.for_block(bb.index());
203 let track_intrablock = BD::accumulates_intrablock_state();
204 if track_intrablock {
205 debug!("swapping in mutable on_entry, initially {:?}", sets.on_entry);
206 interim_state = sets.on_entry.to_owned();
207 sets.on_entry = &mut interim_state;
209 for j_stmt in 0..statements.len() {
210 let location = Location { block: bb, statement_index: j_stmt };
211 self.flow_state.operator.before_statement_effect(sets, location);
212 self.flow_state.operator.statement_effect(sets, location);
213 if track_intrablock {
214 sets.apply_local_effect();
218 if terminator.is_some() {
219 let location = Location { block: bb, statement_index: statements.len() };
220 self.flow_state.operator.before_terminator_effect(sets, location);
221 self.flow_state.operator.terminator_effect(sets, location);
222 if track_intrablock {
223 sets.apply_local_effect();
230 impl<'b, 'a: 'b, 'tcx: 'a, BD> PropagationContext<'b, 'a, 'tcx, BD> where BD: BitDenotation<'tcx>
232 fn walk_cfg(&mut self, in_out: &mut BitSet<BD::Idx>) {
233 let mut dirty_queue: WorkQueue<mir::BasicBlock> =
234 WorkQueue::with_all(self.builder.mir.basic_blocks().len());
235 let mir = self.builder.mir;
236 while let Some(bb) = dirty_queue.pop() {
237 let bb_data = &mir[bb];
239 let sets = self.builder.flow_state.sets.for_block(bb.index());
240 debug_assert!(in_out.words().len() == sets.on_entry.words().len());
241 in_out.overwrite(sets.on_entry);
242 in_out.union(sets.gen_set);
243 in_out.subtract(sets.kill_set);
245 self.builder.propagate_bits_into_graph_successors_of(
246 in_out, (bb, bb_data), &mut dirty_queue);
251 fn dataflow_path(context: &str, path: &str) -> PathBuf {
252 let mut path = PathBuf::from(path);
253 let new_file_name = {
254 let orig_file_name = path.file_name().unwrap().to_str().unwrap();
255 format!("{}_{}", context, orig_file_name)
257 path.set_file_name(new_file_name);
261 impl<'a, 'tcx: 'a, BD> DataflowBuilder<'a, 'tcx, BD> where BD: BitDenotation<'tcx>
263 fn pre_dataflow_instrumentation<P>(&self, p: P) -> io::Result<()>
264 where P: Fn(&BD, BD::Idx) -> DebugFormatted
266 if let Some(ref path_str) = self.print_preflow_to {
267 let path = dataflow_path(BD::name(), path_str);
268 graphviz::print_borrowck_graph_to(self, &path, p)
274 fn post_dataflow_instrumentation<P>(&self, p: P) -> io::Result<()>
275 where P: Fn(&BD, BD::Idx) -> DebugFormatted
277 if let Some(ref path_str) = self.print_postflow_to {
278 let path = dataflow_path(BD::name(), path_str);
279 graphviz::print_borrowck_graph_to(self, &path, p)
286 /// DataflowResultsConsumer abstracts over walking the MIR with some
287 /// already constructed dataflow results.
289 /// It abstracts over the FlowState and also completely hides the
290 /// underlying flow analysis results, because it needs to handle cases
291 /// where we are combining the results of *multiple* flow analyses
292 /// (e.g., borrows + inits + uninits).
293 pub(crate) trait DataflowResultsConsumer<'a, 'tcx: 'a> {
294 type FlowState: FlowsAtLocation;
296 // Observation Hooks: override (at least one of) these to get analysis feedback.
297 fn visit_block_entry(&mut self,
299 _flow_state: &Self::FlowState) {}
301 fn visit_statement_entry(&mut self,
303 _stmt: &Statement<'tcx>,
304 _flow_state: &Self::FlowState) {}
306 fn visit_terminator_entry(&mut self,
308 _term: &Terminator<'tcx>,
309 _flow_state: &Self::FlowState) {}
311 // Main entry point: this drives the processing of results.
313 fn analyze_results(&mut self, flow_uninit: &mut Self::FlowState) {
314 let flow = flow_uninit;
315 for (bb, _) in traversal::reverse_postorder(self.mir()) {
316 flow.reset_to_entry_of(bb);
317 self.process_basic_block(bb, flow);
321 fn process_basic_block(&mut self, bb: BasicBlock, flow_state: &mut Self::FlowState) {
322 let BasicBlockData { ref statements, ref terminator, is_cleanup: _ } =
324 let mut location = Location { block: bb, statement_index: 0 };
325 for stmt in statements.iter() {
326 flow_state.reconstruct_statement_effect(location);
327 self.visit_statement_entry(location, stmt, flow_state);
328 flow_state.apply_local_effect(location);
329 location.statement_index += 1;
332 if let Some(ref term) = *terminator {
333 flow_state.reconstruct_terminator_effect(location);
334 self.visit_terminator_entry(location, term, flow_state);
336 // We don't need to apply the effect of the terminator,
337 // since we are only visiting dataflow state on control
338 // flow entry to the various nodes. (But we still need to
339 // reconstruct the effect, because the visit method might
344 // Delegated Hooks: Provide access to the MIR and process the flow state.
346 fn mir(&self) -> &'a Mir<'tcx>;
349 pub fn state_for_location<'tcx, T: BitDenotation<'tcx>>(loc: Location,
351 result: &DataflowResults<'tcx, T>,
354 let mut on_entry = result.sets().on_entry_set_for(loc.block.index()).to_owned();
355 let mut kill_set = on_entry.to_hybrid();
356 let mut gen_set = kill_set.clone();
359 let mut sets = BlockSets {
360 on_entry: &mut on_entry,
361 kill_set: &mut kill_set,
362 gen_set: &mut gen_set,
365 for stmt in 0..loc.statement_index {
366 let mut stmt_loc = loc;
367 stmt_loc.statement_index = stmt;
368 analysis.before_statement_effect(&mut sets, stmt_loc);
369 analysis.statement_effect(&mut sets, stmt_loc);
372 // Apply the pre-statement effect of the statement we're evaluating.
373 if loc.statement_index == mir[loc.block].statements.len() {
374 analysis.before_terminator_effect(&mut sets, loc);
376 analysis.before_statement_effect(&mut sets, loc);
383 pub struct DataflowAnalysis<'a, 'tcx: 'a, O> where O: BitDenotation<'tcx>
385 flow_state: DataflowState<'tcx, O>,
386 dead_unwinds: &'a BitSet<mir::BasicBlock>,
390 impl<'a, 'tcx: 'a, O> DataflowAnalysis<'a, 'tcx, O> where O: BitDenotation<'tcx>
392 pub fn results(self) -> DataflowResults<'tcx, O> {
393 DataflowResults(self.flow_state)
396 pub fn mir(&self) -> &'a Mir<'tcx> { self.mir }
399 pub struct DataflowResults<'tcx, O>(pub(crate) DataflowState<'tcx, O>) where O: BitDenotation<'tcx>;
401 impl<'tcx, O: BitDenotation<'tcx>> DataflowResults<'tcx, O> {
402 pub fn sets(&self) -> &AllSets<O::Idx> {
406 pub fn operator(&self) -> &O {
411 /// State of a dataflow analysis; couples a collection of bit sets
412 /// with operator used to initialize and merge bits during analysis.
413 pub struct DataflowState<'tcx, O: BitDenotation<'tcx>>
415 /// All the sets for the analysis. (Factored into its
416 /// own structure so that we can borrow it mutably
417 /// on its own separate from other fields.)
418 pub sets: AllSets<O::Idx>,
420 /// operator used to initialize, combine, and interpret bits.
421 pub(crate) operator: O,
424 impl<'tcx, O: BitDenotation<'tcx>> DataflowState<'tcx, O> {
425 pub(crate) fn interpret_set<'c, P>(&self,
427 set: &BitSet<O::Idx>,
429 -> Vec<DebugFormatted>
430 where P: Fn(&O, O::Idx) -> DebugFormatted
432 set.iter().map(|i| render_idx(o, i)).collect()
435 pub(crate) fn interpret_hybrid_set<'c, P>(&self,
437 set: &HybridBitSet<O::Idx>,
439 -> Vec<DebugFormatted>
440 where P: Fn(&O, O::Idx) -> DebugFormatted
442 set.iter().map(|i| render_idx(o, i)).collect()
447 pub struct AllSets<E: Idx> {
448 /// Analysis bitwidth for each block.
449 bits_per_block: usize,
451 /// For each block, bits valid on entry to the block.
452 on_entry_sets: Vec<BitSet<E>>,
454 /// For each block, bits generated by executing the statements +
455 /// terminator in the block -- with one caveat. In particular, for
456 /// *call terminators*, the effect of storing the destination is
457 /// not included, since that only takes effect on the **success**
458 /// edge (and not the unwind edge).
459 gen_sets: Vec<HybridBitSet<E>>,
461 /// For each block, bits killed by executing the statements +
462 /// terminator in the block -- with one caveat. In particular, for
463 /// *call terminators*, the effect of storing the destination is
464 /// not included, since that only takes effect on the **success**
465 /// edge (and not the unwind edge).
466 kill_sets: Vec<HybridBitSet<E>>,
469 /// Triple of sets associated with a given block.
471 /// Generally, one sets up `on_entry`, `gen_set`, and `kill_set` for
472 /// each block individually, and then runs the dataflow analysis which
473 /// iteratively modifies the various `on_entry` sets (but leaves the
474 /// other two sets unchanged, since they represent the effect of the
475 /// block, which should be invariant over the course of the analysis).
477 /// It is best to ensure that the intersection of `gen_set` and
478 /// `kill_set` is empty; otherwise the results of the dataflow will
479 /// have a hidden dependency on what order the bits are generated and
480 /// killed during the iteration. (This is such a good idea that the
481 /// `fn gen` and `fn kill` methods that set their state enforce this
484 pub struct BlockSets<'a, E: Idx> {
485 /// Dataflow state immediately before control flow enters the given block.
486 pub(crate) on_entry: &'a mut BitSet<E>,
488 /// Bits that are set to 1 by the time we exit the given block. Hybrid
489 /// because it usually contains only 0 or 1 elements.
490 pub(crate) gen_set: &'a mut HybridBitSet<E>,
492 /// Bits that are set to 0 by the time we exit the given block. Hybrid
493 /// because it usually contains only 0 or 1 elements.
494 pub(crate) kill_set: &'a mut HybridBitSet<E>,
497 impl<'a, E:Idx> BlockSets<'a, E> {
498 fn gen(&mut self, e: E) {
499 self.gen_set.insert(e);
500 self.kill_set.remove(e);
502 fn gen_all<I>(&mut self, i: I)
503 where I: IntoIterator,
507 self.gen(*j.borrow());
511 fn kill(&mut self, e: E) {
512 self.gen_set.remove(e);
513 self.kill_set.insert(e);
516 fn kill_all<I>(&mut self, i: I)
517 where I: IntoIterator,
521 self.kill(*j.borrow());
525 fn apply_local_effect(&mut self) {
526 self.on_entry.union(self.gen_set);
527 self.on_entry.subtract(self.kill_set);
531 impl<E:Idx> AllSets<E> {
532 pub fn bits_per_block(&self) -> usize { self.bits_per_block }
533 pub fn for_block(&mut self, block_idx: usize) -> BlockSets<'_, E> {
535 on_entry: &mut self.on_entry_sets[block_idx],
536 gen_set: &mut self.gen_sets[block_idx],
537 kill_set: &mut self.kill_sets[block_idx],
541 pub fn on_entry_set_for(&self, block_idx: usize) -> &BitSet<E> {
542 &self.on_entry_sets[block_idx]
544 pub fn gen_set_for(&self, block_idx: usize) -> &HybridBitSet<E> {
545 &self.gen_sets[block_idx]
547 pub fn kill_set_for(&self, block_idx: usize) -> &HybridBitSet<E> {
548 &self.kill_sets[block_idx]
552 /// Parameterization for the precise form of data flow that is used.
553 /// `InitialFlow` handles initializing the bitvectors before any
554 /// code is inspected by the analysis. Analyses that need more nuanced
555 /// initialization (e.g., they need to consult the results of some other
556 /// dataflow analysis to set up the initial bitvectors) should not
558 pub trait InitialFlow {
559 /// Specifies the initial value for each bit in the `on_entry` set
560 fn bottom_value() -> bool;
563 pub trait BitDenotation<'tcx>: BitSetOperator {
564 /// Specifies what index type is used to access the bitvector.
567 /// Some analyses want to accumulate knowledge within a block when
568 /// analyzing its statements for building the gen/kill sets. Override
569 /// this method to return true in such cases.
571 /// When this returns true, the statement-effect (re)construction
572 /// will clone the `on_entry` state and pass along a reference via
573 /// `sets.on_entry` to that local clone into `statement_effect` and
574 /// `terminator_effect`).
576 /// When it's false, no local clone is constructed; instead a
577 /// reference directly into `on_entry` is passed along via
578 /// `sets.on_entry` instead, which represents the flow state at
579 /// the block's start, not necessarily the state immediately prior
580 /// to the statement/terminator under analysis.
582 /// In either case, the passed reference is mutable, but this is a
583 /// wart from using the `BlockSets` type in the API; the intention
584 /// is that the `statement_effect` and `terminator_effect` methods
585 /// mutate only the gen/kill sets.
587 // FIXME: we should consider enforcing the intention described in
588 // the previous paragraph by passing the three sets in separate
589 // parameters to encode their distinct mutabilities.
590 fn accumulates_intrablock_state() -> bool { false }
592 /// A name describing the dataflow analysis that this
593 /// `BitDenotation` is supporting. The name should be something
594 /// suitable for plugging in as part of a filename (i.e., avoid
595 /// space-characters or other things that tend to look bad on a
596 /// file system, like slashes or periods). It is also better for
597 /// the name to be reasonably short, again because it will be
598 /// plugged into a filename.
599 fn name() -> &'static str;
601 /// Size of each bitvector allocated for each block in the analysis.
602 fn bits_per_block(&self) -> usize;
604 /// Mutates the entry set according to the effects that
605 /// have been established *prior* to entering the start
606 /// block. This can't access the gen/kill sets, because
607 /// these won't be accounted for correctly.
609 /// (For example, establishing the call arguments.)
610 fn start_block_effect(&self, entry_set: &mut BitSet<Self::Idx>);
612 /// Similar to `statement_effect`, except it applies
613 /// *just before* the statement rather than *just after* it.
615 /// This matters for "dataflow at location" APIs, because the
616 /// before-statement effect is visible while visiting the
617 /// statement, while the after-statement effect only becomes
618 /// visible at the next statement.
620 /// Both the before-statement and after-statement effects are
621 /// applied, in that order, before moving for the next
623 fn before_statement_effect(&self,
624 _sets: &mut BlockSets<'_, Self::Idx>,
625 _location: Location) {}
627 /// Mutates the block-sets (the flow sets for the given
628 /// basic block) according to the effects of evaluating statement.
630 /// This is used, in particular, for building up the
631 /// "transfer-function" representing the overall-effect of the
632 /// block, represented via GEN and KILL sets.
634 /// The statement is identified as `bb_data[idx_stmt]`, where
635 /// `bb_data` is the sequence of statements identified by `bb` in
637 fn statement_effect(&self,
638 sets: &mut BlockSets<'_, Self::Idx>,
641 /// Similar to `terminator_effect`, except it applies
642 /// *just before* the terminator rather than *just after* it.
644 /// This matters for "dataflow at location" APIs, because the
645 /// before-terminator effect is visible while visiting the
646 /// terminator, while the after-terminator effect only becomes
647 /// visible at the terminator's successors.
649 /// Both the before-terminator and after-terminator effects are
650 /// applied, in that order, before moving for the next
652 fn before_terminator_effect(&self,
653 _sets: &mut BlockSets<'_, Self::Idx>,
654 _location: Location) {}
656 /// Mutates the block-sets (the flow sets for the given
657 /// basic block) according to the effects of evaluating
660 /// This is used, in particular, for building up the
661 /// "transfer-function" representing the overall-effect of the
662 /// block, represented via GEN and KILL sets.
664 /// The effects applied here cannot depend on which branch the
666 fn terminator_effect(&self,
667 sets: &mut BlockSets<'_, Self::Idx>,
670 /// Mutates the block-sets according to the (flow-dependent)
671 /// effect of a successful return from a Call terminator.
673 /// If basic-block BB_x ends with a call-instruction that, upon
674 /// successful return, flows to BB_y, then this method will be
675 /// called on the exit flow-state of BB_x in order to set up the
676 /// entry flow-state of BB_y.
678 /// This is used, in particular, as a special case during the
679 /// "propagate" loop where all of the basic blocks are repeatedly
680 /// visited. Since the effects of a Call terminator are
681 /// flow-dependent, the current MIR cannot encode them via just
682 /// GEN and KILL sets attached to the block, and so instead we add
683 /// this extra machinery to represent the flow-dependent effect.
685 // FIXME: right now this is a bit of a wart in the API. It might
686 // be better to represent this as an additional gen- and
687 // kill-sets associated with each edge coming out of the basic
689 fn propagate_call_return(
691 in_out: &mut BitSet<Self::Idx>,
692 call_bb: mir::BasicBlock,
693 dest_bb: mir::BasicBlock,
694 dest_place: &mir::Place<'tcx>,
698 impl<'a, 'tcx, D> DataflowAnalysis<'a, 'tcx, D> where D: BitDenotation<'tcx>
700 pub fn new(mir: &'a Mir<'tcx>,
701 dead_unwinds: &'a BitSet<mir::BasicBlock>,
702 denotation: D) -> Self where D: InitialFlow {
703 let bits_per_block = denotation.bits_per_block();
704 let num_blocks = mir.basic_blocks().len();
706 let on_entry_sets = if D::bottom_value() {
707 vec![BitSet::new_filled(bits_per_block); num_blocks]
709 vec![BitSet::new_empty(bits_per_block); num_blocks]
711 let gen_sets = vec![HybridBitSet::new_empty(bits_per_block); num_blocks];
712 let kill_sets = gen_sets.clone();
717 flow_state: DataflowState {
724 operator: denotation,
730 impl<'a, 'tcx: 'a, D> DataflowAnalysis<'a, 'tcx, D> where D: BitDenotation<'tcx> {
731 /// Propagates the bits of `in_out` into all the successors of `bb`,
732 /// using bitwise operator denoted by `self.operator`.
734 /// For most blocks, this is entirely uniform. However, for blocks
735 /// that end with a call terminator, the effect of the call on the
736 /// dataflow state may depend on whether the call returned
737 /// successfully or unwound.
739 /// To reflect this, the `propagate_call_return` method of the
740 /// `BitDenotation` mutates `in_out` when propagating `in_out` via
741 /// a call terminator; such mutation is performed *last*, to
742 /// ensure its side-effects do not leak elsewhere (e.g., into
744 fn propagate_bits_into_graph_successors_of(
746 in_out: &mut BitSet<D::Idx>,
747 (bb, bb_data): (mir::BasicBlock, &mir::BasicBlockData<'tcx>),
748 dirty_list: &mut WorkQueue<mir::BasicBlock>)
750 match bb_data.terminator().kind {
751 mir::TerminatorKind::Return |
752 mir::TerminatorKind::Resume |
753 mir::TerminatorKind::Abort |
754 mir::TerminatorKind::GeneratorDrop |
755 mir::TerminatorKind::Unreachable => {}
756 mir::TerminatorKind::Goto { target } |
757 mir::TerminatorKind::Assert { target, cleanup: None, .. } |
758 mir::TerminatorKind::Yield { resume: target, drop: None, .. } |
759 mir::TerminatorKind::Drop { target, location: _, unwind: None } |
760 mir::TerminatorKind::DropAndReplace {
761 target, value: _, location: _, unwind: None
763 self.propagate_bits_into_entry_set_for(in_out, target, dirty_list);
765 mir::TerminatorKind::Yield { resume: target, drop: Some(drop), .. } => {
766 self.propagate_bits_into_entry_set_for(in_out, target, dirty_list);
767 self.propagate_bits_into_entry_set_for(in_out, drop, dirty_list);
769 mir::TerminatorKind::Assert { target, cleanup: Some(unwind), .. } |
770 mir::TerminatorKind::Drop { target, location: _, unwind: Some(unwind) } |
771 mir::TerminatorKind::DropAndReplace {
772 target, value: _, location: _, unwind: Some(unwind)
774 self.propagate_bits_into_entry_set_for(in_out, target, dirty_list);
775 if !self.dead_unwinds.contains(bb) {
776 self.propagate_bits_into_entry_set_for(in_out, unwind, dirty_list);
779 mir::TerminatorKind::SwitchInt { ref targets, .. } => {
780 for target in targets {
781 self.propagate_bits_into_entry_set_for(in_out, *target, dirty_list);
784 mir::TerminatorKind::Call { cleanup, ref destination, .. } => {
785 if let Some(unwind) = cleanup {
786 if !self.dead_unwinds.contains(bb) {
787 self.propagate_bits_into_entry_set_for(in_out, unwind, dirty_list);
790 if let Some((ref dest_place, dest_bb)) = *destination {
791 // N.B.: This must be done *last*, after all other
792 // propagation, as documented in comment above.
793 self.flow_state.operator.propagate_call_return(
794 in_out, bb, dest_bb, dest_place);
795 self.propagate_bits_into_entry_set_for(in_out, dest_bb, dirty_list);
798 mir::TerminatorKind::FalseEdges { real_target, ref imaginary_targets } => {
799 self.propagate_bits_into_entry_set_for(in_out, real_target, dirty_list);
800 for target in imaginary_targets {
801 self.propagate_bits_into_entry_set_for(in_out, *target, dirty_list);
804 mir::TerminatorKind::FalseUnwind { real_target, unwind } => {
805 self.propagate_bits_into_entry_set_for(in_out, real_target, dirty_list);
806 if let Some(unwind) = unwind {
807 if !self.dead_unwinds.contains(bb) {
808 self.propagate_bits_into_entry_set_for(in_out, unwind, dirty_list);
815 fn propagate_bits_into_entry_set_for(&mut self,
816 in_out: &BitSet<D::Idx>,
818 dirty_queue: &mut WorkQueue<mir::BasicBlock>) {
819 let entry_set = &mut self.flow_state.sets.for_block(bb.index()).on_entry;
820 let set_changed = self.flow_state.operator.join(entry_set, &in_out);
822 dirty_queue.insert(bb);