1 use syntax::ast::{self, MetaItem};
2 use syntax::symbol::{Symbol, sym};
4 use rustc_data_structures::bit_set::{BitSet, BitSetOperator, HybridBitSet};
5 use rustc_data_structures::indexed_vec::Idx;
6 use rustc_data_structures::work_queue::WorkQueue;
8 use rustc::hir::def_id::DefId;
9 use rustc::ty::{self, TyCtxt};
10 use rustc::mir::{self, Body, BasicBlock, BasicBlockData, Location, Statement, Terminator};
11 use rustc::mir::traversal;
12 use rustc::session::Session;
14 use std::borrow::Borrow;
17 use std::path::PathBuf;
20 pub use self::impls::{MaybeStorageLive};
21 pub use self::impls::{MaybeInitializedPlaces, MaybeUninitializedPlaces};
22 pub use self::impls::DefinitelyInitializedPlaces;
23 pub use self::impls::EverInitializedPlaces;
24 pub use self::impls::borrows::Borrows;
25 pub use self::impls::HaveBeenBorrowedLocals;
26 pub use self::at_location::{FlowAtLocation, FlowsAtLocation};
27 pub(crate) use self::drop_flag_effects::*;
29 use self::move_paths::MoveData;
32 pub mod drop_flag_effects;
37 pub(crate) mod indexes {
38 pub(crate) use super::{
39 move_paths::{MovePathIndex, MoveOutIndex, InitIndex},
40 impls::borrows::BorrowIndex,
44 pub(crate) struct DataflowBuilder<'a, 'tcx, BD>
46 BD: BitDenotation<'tcx>,
49 flow_state: DataflowAnalysis<'a, 'tcx, BD>,
50 print_preflow_to: Option<String>,
51 print_postflow_to: Option<String>,
54 /// `DebugFormatted` encapsulates the "{:?}" rendering of some
55 /// arbitrary value. This way: you pay cost of allocating an extra
56 /// string (as well as that of rendering up-front); in exchange, you
57 /// don't have to hand over ownership of your value or deal with
59 pub(crate) struct DebugFormatted(String);
62 pub fn new(input: &dyn fmt::Debug) -> DebugFormatted {
63 DebugFormatted(format!("{:?}", input))
67 impl fmt::Debug for DebugFormatted {
68 fn fmt(&self, w: &mut fmt::Formatter<'_>) -> fmt::Result {
69 write!(w, "{}", self.0)
73 pub(crate) trait Dataflow<'tcx, BD: BitDenotation<'tcx>> {
74 /// Sets up and runs the dataflow problem, using `p` to render results if
75 /// implementation so chooses.
76 fn dataflow<P>(&mut self, p: P) where P: Fn(&BD, BD::Idx) -> DebugFormatted {
77 let _ = p; // default implementation does not instrument process.
82 /// Sets up the entry, gen, and kill sets for this instance of a dataflow problem.
83 fn build_sets(&mut self);
85 /// Finds a fixed-point solution to this instance of a dataflow problem.
86 fn propagate(&mut self);
89 impl<'a, 'tcx, BD> Dataflow<'tcx, BD> for DataflowBuilder<'a, 'tcx, BD>
91 BD: BitDenotation<'tcx>,
93 fn dataflow<P>(&mut self, p: P) where P: Fn(&BD, BD::Idx) -> DebugFormatted {
94 self.flow_state.build_sets();
95 self.pre_dataflow_instrumentation(|c,i| p(c,i)).unwrap();
96 self.flow_state.propagate();
97 self.post_dataflow_instrumentation(|c,i| p(c,i)).unwrap();
100 fn build_sets(&mut self) { self.flow_state.build_sets(); }
101 fn propagate(&mut self) { self.flow_state.propagate(); }
104 pub(crate) fn has_rustc_mir_with(attrs: &[ast::Attribute], name: Symbol) -> Option<MetaItem> {
106 if attr.check_name(sym::rustc_mir) {
107 let items = attr.meta_item_list();
108 for item in items.iter().flat_map(|l| l.iter()) {
109 match item.meta_item() {
110 Some(mi) if mi.check_name(name) => return Some(mi.clone()),
119 pub struct MoveDataParamEnv<'tcx> {
120 pub(crate) move_data: MoveData<'tcx>,
121 pub(crate) param_env: ty::ParamEnv<'tcx>,
124 pub(crate) fn do_dataflow<'a, 'tcx, BD, P>(
126 body: &'a Body<'tcx>,
128 attributes: &[ast::Attribute],
129 dead_unwinds: &BitSet<BasicBlock>,
132 ) -> DataflowResults<'tcx, BD>
134 BD: BitDenotation<'tcx>,
135 P: Fn(&BD, BD::Idx) -> DebugFormatted,
137 let flow_state = DataflowAnalysis::new(body, dead_unwinds, bd);
138 flow_state.run(tcx, def_id, attributes, p)
141 impl<'a, 'tcx, BD> DataflowAnalysis<'a, 'tcx, BD>
143 BD: BitDenotation<'tcx>,
145 pub(crate) fn run<P>(
149 attributes: &[ast::Attribute],
151 ) -> DataflowResults<'tcx, BD>
153 P: Fn(&BD, BD::Idx) -> DebugFormatted,
155 let name_found = |sess: &Session, attrs: &[ast::Attribute], name| -> Option<String> {
156 if let Some(item) = has_rustc_mir_with(attrs, name) {
157 if let Some(s) = item.value_str() {
158 return Some(s.to_string())
162 &format!("{} attribute requires a path", item.path));
169 let print_preflow_to = name_found(tcx.sess, attributes, sym::borrowck_graphviz_preflow);
170 let print_postflow_to = name_found(tcx.sess, attributes, sym::borrowck_graphviz_postflow);
172 let mut mbcx = DataflowBuilder {
174 print_preflow_to, print_postflow_to, flow_state: self,
178 mbcx.flow_state.results()
182 struct PropagationContext<'b, 'a, 'tcx, O>
184 O: 'b + BitDenotation<'tcx>,
186 builder: &'b mut DataflowAnalysis<'a, 'tcx, O>,
189 impl<'a, 'tcx, BD> DataflowAnalysis<'a, 'tcx, BD>
191 BD: BitDenotation<'tcx>,
193 fn propagate(&mut self) {
194 let mut temp = BitSet::new_empty(self.flow_state.sets.bits_per_block);
195 let mut propcx = PropagationContext {
198 propcx.walk_cfg(&mut temp);
201 fn build_sets(&mut self) {
202 // Build the transfer function for each block.
203 for (bb, data) in self.body.basic_blocks().iter_enumerated() {
204 let &mir::BasicBlockData { ref statements, ref terminator, is_cleanup: _ } = data;
206 let trans = self.flow_state.sets.trans_mut_for(bb.index());
207 for j_stmt in 0..statements.len() {
208 let location = Location { block: bb, statement_index: j_stmt };
209 self.flow_state.operator.before_statement_effect(trans, location);
210 self.flow_state.operator.statement_effect(trans, location);
213 if terminator.is_some() {
214 let location = Location { block: bb, statement_index: statements.len() };
215 self.flow_state.operator.before_terminator_effect(trans, location);
216 self.flow_state.operator.terminator_effect(trans, location);
220 // Initialize the flow state at entry to the start block.
221 let on_entry = self.flow_state.sets.entry_set_mut_for(mir::START_BLOCK.index());
222 self.flow_state.operator.start_block_effect(on_entry);
226 impl<'b, 'a, 'tcx, BD> PropagationContext<'b, 'a, 'tcx, BD>
228 BD: BitDenotation<'tcx>,
230 fn walk_cfg(&mut self, in_out: &mut BitSet<BD::Idx>) {
231 let mut dirty_queue: WorkQueue<mir::BasicBlock> =
232 WorkQueue::with_all(self.builder.body.basic_blocks().len());
233 let body = self.builder.body;
234 while let Some(bb) = dirty_queue.pop() {
235 let (on_entry, trans) = self.builder.flow_state.sets.get_mut(bb.index());
236 debug_assert!(in_out.words().len() == on_entry.words().len());
237 in_out.overwrite(on_entry);
240 let bb_data = &body[bb];
241 self.builder.propagate_bits_into_graph_successors_of(
242 in_out, (bb, bb_data), &mut dirty_queue);
247 fn dataflow_path(context: &str, path: &str) -> PathBuf {
248 let mut path = PathBuf::from(path);
249 let new_file_name = {
250 let orig_file_name = path.file_name().unwrap().to_str().unwrap();
251 format!("{}_{}", context, orig_file_name)
253 path.set_file_name(new_file_name);
257 impl<'a, 'tcx, BD> DataflowBuilder<'a, 'tcx, BD>
259 BD: BitDenotation<'tcx>,
261 fn pre_dataflow_instrumentation<P>(&self, p: P) -> io::Result<()>
262 where P: Fn(&BD, BD::Idx) -> DebugFormatted
264 if let Some(ref path_str) = self.print_preflow_to {
265 let path = dataflow_path(BD::name(), path_str);
266 graphviz::print_borrowck_graph_to(self, &path, p)
272 fn post_dataflow_instrumentation<P>(&self, p: P) -> io::Result<()>
273 where P: Fn(&BD, BD::Idx) -> DebugFormatted
275 if let Some(ref path_str) = self.print_postflow_to {
276 let path = dataflow_path(BD::name(), path_str);
277 graphviz::print_borrowck_graph_to(self, &path, p)
284 /// DataflowResultsConsumer abstracts over walking the MIR with some
285 /// already constructed dataflow results.
287 /// It abstracts over the FlowState and also completely hides the
288 /// underlying flow analysis results, because it needs to handle cases
289 /// where we are combining the results of *multiple* flow analyses
290 /// (e.g., borrows + inits + uninits).
291 pub(crate) trait DataflowResultsConsumer<'a, 'tcx: 'a> {
292 type FlowState: FlowsAtLocation;
294 // Observation Hooks: override (at least one of) these to get analysis feedback.
295 fn visit_block_entry(&mut self,
297 _flow_state: &Self::FlowState) {}
299 fn visit_statement_entry(&mut self,
301 _stmt: &Statement<'tcx>,
302 _flow_state: &Self::FlowState) {}
304 fn visit_terminator_entry(&mut self,
306 _term: &Terminator<'tcx>,
307 _flow_state: &Self::FlowState) {}
309 // Main entry point: this drives the processing of results.
311 fn analyze_results(&mut self, flow_uninit: &mut Self::FlowState) {
312 let flow = flow_uninit;
313 for (bb, _) in traversal::reverse_postorder(self.body()) {
314 flow.reset_to_entry_of(bb);
315 self.process_basic_block(bb, flow);
319 fn process_basic_block(&mut self, bb: BasicBlock, flow_state: &mut Self::FlowState) {
320 let BasicBlockData { ref statements, ref terminator, is_cleanup: _ } =
322 let mut location = Location { block: bb, statement_index: 0 };
323 for stmt in statements.iter() {
324 flow_state.reconstruct_statement_effect(location);
325 self.visit_statement_entry(location, stmt, flow_state);
326 flow_state.apply_local_effect(location);
327 location.statement_index += 1;
330 if let Some(ref term) = *terminator {
331 flow_state.reconstruct_terminator_effect(location);
332 self.visit_terminator_entry(location, term, flow_state);
334 // We don't need to apply the effect of the terminator,
335 // since we are only visiting dataflow state on control
336 // flow entry to the various nodes. (But we still need to
337 // reconstruct the effect, because the visit method might
342 // Delegated Hooks: Provide access to the MIR and process the flow state.
344 fn body(&self) -> &'a Body<'tcx>;
347 pub fn state_for_location<'tcx, T: BitDenotation<'tcx>>(loc: Location,
349 result: &DataflowResults<'tcx, T>,
352 let mut trans = GenKill::from_elem(HybridBitSet::new_empty(analysis.bits_per_block()));
354 for stmt in 0..loc.statement_index {
355 let mut stmt_loc = loc;
356 stmt_loc.statement_index = stmt;
357 analysis.before_statement_effect(&mut trans, stmt_loc);
358 analysis.statement_effect(&mut trans, stmt_loc);
361 // Apply the pre-statement effect of the statement we're evaluating.
362 if loc.statement_index == body[loc.block].statements.len() {
363 analysis.before_terminator_effect(&mut trans, loc);
365 analysis.before_statement_effect(&mut trans, loc);
368 // Apply the transfer function for all preceding statements to the fixpoint
369 // at the start of the block.
370 let mut state = result.sets().entry_set_for(loc.block.index()).to_owned();
371 trans.apply(&mut state);
375 pub struct DataflowAnalysis<'a, 'tcx, O>
377 O: BitDenotation<'tcx>,
379 flow_state: DataflowState<'tcx, O>,
380 dead_unwinds: &'a BitSet<mir::BasicBlock>,
381 body: &'a Body<'tcx>,
384 impl<'a, 'tcx, O> DataflowAnalysis<'a, 'tcx, O>
386 O: BitDenotation<'tcx>,
388 pub fn results(self) -> DataflowResults<'tcx, O> {
389 DataflowResults(self.flow_state)
392 pub fn body(&self) -> &'a Body<'tcx> { self.body }
395 pub struct DataflowResults<'tcx, O>(pub(crate) DataflowState<'tcx, O>) where O: BitDenotation<'tcx>;
397 impl<'tcx, O: BitDenotation<'tcx>> DataflowResults<'tcx, O> {
398 pub fn sets(&self) -> &AllSets<O::Idx> {
402 pub fn operator(&self) -> &O {
407 /// State of a dataflow analysis; couples a collection of bit sets
408 /// with operator used to initialize and merge bits during analysis.
409 pub struct DataflowState<'tcx, O: BitDenotation<'tcx>>
411 /// All the sets for the analysis. (Factored into its
412 /// own structure so that we can borrow it mutably
413 /// on its own separate from other fields.)
414 pub sets: AllSets<O::Idx>,
416 /// operator used to initialize, combine, and interpret bits.
417 pub(crate) operator: O,
420 impl<'tcx, O: BitDenotation<'tcx>> DataflowState<'tcx, O> {
421 pub(crate) fn interpret_set<'c, P>(&self,
423 set: &BitSet<O::Idx>,
425 -> Vec<DebugFormatted>
426 where P: Fn(&O, O::Idx) -> DebugFormatted
428 set.iter().map(|i| render_idx(o, i)).collect()
431 pub(crate) fn interpret_hybrid_set<'c, P>(&self,
433 set: &HybridBitSet<O::Idx>,
435 -> Vec<DebugFormatted>
436 where P: Fn(&O, O::Idx) -> DebugFormatted
438 set.iter().map(|i| render_idx(o, i)).collect()
442 /// A 2-tuple representing the "gen" and "kill" bitsets during
443 /// dataflow analysis.
445 /// It is best to ensure that the intersection of `gen_set` and
446 /// `kill_set` is empty; otherwise the results of the dataflow will
447 /// have a hidden dependency on what order the bits are generated and
448 /// killed during the iteration. (This is such a good idea that the
449 /// `fn gen` and `fn kill` methods that set their state enforce this
451 #[derive(Debug, Clone, Copy)]
452 pub struct GenKill<T> {
453 pub(crate) gen_set: T,
454 pub(crate) kill_set: T,
457 type GenKillSet<T> = GenKill<HybridBitSet<T>>;
460 /// Creates a new tuple where `gen_set == kill_set == elem`.
461 pub(crate) fn from_elem(elem: T) -> Self
465 gen_set: elem.clone(),
471 impl<E:Idx> GenKillSet<E> {
472 pub(crate) fn clear(&mut self) {
473 self.gen_set.clear();
474 self.kill_set.clear();
477 fn gen(&mut self, e: E) {
478 self.gen_set.insert(e);
479 self.kill_set.remove(e);
481 fn gen_all<I>(&mut self, i: I)
482 where I: IntoIterator,
486 self.gen(*j.borrow());
490 fn kill(&mut self, e: E) {
491 self.gen_set.remove(e);
492 self.kill_set.insert(e);
495 fn kill_all<I>(&mut self, i: I)
496 where I: IntoIterator,
500 self.kill(*j.borrow());
504 /// Computes `(set ∪ gen) - kill` and assigns the result to `set`.
505 pub(crate) fn apply(&self, set: &mut BitSet<E>) {
506 set.union(&self.gen_set);
507 set.subtract(&self.kill_set);
512 pub struct AllSets<E: Idx> {
513 /// Analysis bitwidth for each block.
514 bits_per_block: usize,
516 /// For each block, bits valid on entry to the block.
517 on_entry: Vec<BitSet<E>>,
519 /// The transfer function of each block expressed as the set of bits
520 /// generated and killed by executing the statements + terminator in the
521 /// block -- with one caveat. In particular, for *call terminators*, the
522 /// effect of storing the destination is not included, since that only takes
523 /// effect on the **success** edge (and not the unwind edge).
524 trans: Vec<GenKillSet<E>>,
527 impl<E:Idx> AllSets<E> {
528 pub fn bits_per_block(&self) -> usize { self.bits_per_block }
530 pub fn get_mut(&mut self, block_idx: usize) -> (&mut BitSet<E>, &mut GenKillSet<E>) {
531 (&mut self.on_entry[block_idx], &mut self.trans[block_idx])
534 pub fn trans_for(&self, block_idx: usize) -> &GenKillSet<E> {
535 &self.trans[block_idx]
537 pub fn trans_mut_for(&mut self, block_idx: usize) -> &mut GenKillSet<E> {
538 &mut self.trans[block_idx]
540 pub fn entry_set_for(&self, block_idx: usize) -> &BitSet<E> {
541 &self.on_entry[block_idx]
543 pub fn entry_set_mut_for(&mut self, block_idx: usize) -> &mut BitSet<E> {
544 &mut self.on_entry[block_idx]
546 pub fn gen_set_for(&self, block_idx: usize) -> &HybridBitSet<E> {
547 &self.trans_for(block_idx).gen_set
549 pub fn kill_set_for(&self, block_idx: usize) -> &HybridBitSet<E> {
550 &self.trans_for(block_idx).kill_set
554 /// Parameterization for the precise form of data flow that is used.
555 /// `InitialFlow` handles initializing the bitvectors before any
556 /// code is inspected by the analysis. Analyses that need more nuanced
557 /// initialization (e.g., they need to consult the results of some other
558 /// dataflow analysis to set up the initial bitvectors) should not
560 pub trait InitialFlow {
561 /// Specifies the initial value for each bit in the `on_entry` set
562 fn bottom_value() -> bool;
565 /// A specific flavor of dataflow analysis.
567 /// To run a dataflow analysis, one sets up an initial state for the
568 /// `START_BLOCK` via `start_block_effect` and a transfer function (`trans`)
569 /// for each block individually. The entry set for all other basic blocks is
570 /// initialized to `InitialFlow::bottom_value`. The dataflow analysis then
571 /// iteratively modifies the various entry sets (but leaves the the transfer
572 /// function unchanged).
573 pub trait BitDenotation<'tcx>: BitSetOperator + InitialFlow {
574 /// Specifies what index type is used to access the bitvector.
577 /// A name describing the dataflow analysis that this
578 /// `BitDenotation` is supporting. The name should be something
579 /// suitable for plugging in as part of a filename (i.e., avoid
580 /// space-characters or other things that tend to look bad on a
581 /// file system, like slashes or periods). It is also better for
582 /// the name to be reasonably short, again because it will be
583 /// plugged into a filename.
584 fn name() -> &'static str;
586 /// Size of each bitvector allocated for each block in the analysis.
587 fn bits_per_block(&self) -> usize;
589 /// Mutates the entry set according to the effects that
590 /// have been established *prior* to entering the start
591 /// block. This can't access the gen/kill sets, because
592 /// these won't be accounted for correctly.
594 /// (For example, establishing the call arguments.)
595 fn start_block_effect(&self, entry_set: &mut BitSet<Self::Idx>);
597 /// Similar to `statement_effect`, except it applies
598 /// *just before* the statement rather than *just after* it.
600 /// This matters for "dataflow at location" APIs, because the
601 /// before-statement effect is visible while visiting the
602 /// statement, while the after-statement effect only becomes
603 /// visible at the next statement.
605 /// Both the before-statement and after-statement effects are
606 /// applied, in that order, before moving for the next
608 fn before_statement_effect(&self,
609 _trans: &mut GenKillSet<Self::Idx>,
610 _location: Location) {}
612 /// Mutates the block-sets (the flow sets for the given
613 /// basic block) according to the effects of evaluating statement.
615 /// This is used, in particular, for building up the
616 /// "transfer-function" representing the overall-effect of the
617 /// block, represented via GEN and KILL sets.
619 /// The statement is identified as `bb_data[idx_stmt]`, where
620 /// `bb_data` is the sequence of statements identified by `bb` in
622 fn statement_effect(&self,
623 trans: &mut GenKillSet<Self::Idx>,
626 /// Similar to `terminator_effect`, except it applies
627 /// *just before* the terminator rather than *just after* it.
629 /// This matters for "dataflow at location" APIs, because the
630 /// before-terminator effect is visible while visiting the
631 /// terminator, while the after-terminator effect only becomes
632 /// visible at the terminator's successors.
634 /// Both the before-terminator and after-terminator effects are
635 /// applied, in that order, before moving for the next
637 fn before_terminator_effect(&self,
638 _trans: &mut GenKillSet<Self::Idx>,
639 _location: Location) {}
641 /// Mutates the block-sets (the flow sets for the given
642 /// basic block) according to the effects of evaluating
645 /// This is used, in particular, for building up the
646 /// "transfer-function" representing the overall-effect of the
647 /// block, represented via GEN and KILL sets.
649 /// The effects applied here cannot depend on which branch the
651 fn terminator_effect(&self,
652 trans: &mut GenKillSet<Self::Idx>,
655 /// Mutates the block-sets according to the (flow-dependent)
656 /// effect of a successful return from a Call terminator.
658 /// If basic-block BB_x ends with a call-instruction that, upon
659 /// successful return, flows to BB_y, then this method will be
660 /// called on the exit flow-state of BB_x in order to set up the
661 /// entry flow-state of BB_y.
663 /// This is used, in particular, as a special case during the
664 /// "propagate" loop where all of the basic blocks are repeatedly
665 /// visited. Since the effects of a Call terminator are
666 /// flow-dependent, the current MIR cannot encode them via just
667 /// GEN and KILL sets attached to the block, and so instead we add
668 /// this extra machinery to represent the flow-dependent effect.
670 // FIXME: right now this is a bit of a wart in the API. It might
671 // be better to represent this as an additional gen- and
672 // kill-sets associated with each edge coming out of the basic
674 fn propagate_call_return(
676 in_out: &mut BitSet<Self::Idx>,
677 call_bb: mir::BasicBlock,
678 dest_bb: mir::BasicBlock,
679 dest_place: &mir::Place<'tcx>,
683 impl<'a, 'tcx, D> DataflowAnalysis<'a, 'tcx, D> where D: BitDenotation<'tcx>
685 pub fn new(body: &'a Body<'tcx>,
686 dead_unwinds: &'a BitSet<mir::BasicBlock>,
687 denotation: D) -> Self {
688 let bits_per_block = denotation.bits_per_block();
689 let num_blocks = body.basic_blocks().len();
691 let on_entry = if D::bottom_value() {
692 vec![BitSet::new_filled(bits_per_block); num_blocks]
694 vec![BitSet::new_empty(bits_per_block); num_blocks]
696 let nop = GenKill::from_elem(HybridBitSet::new_empty(bits_per_block));
701 flow_state: DataflowState {
705 trans: vec![nop; num_blocks],
707 operator: denotation,
713 impl<'a, 'tcx, D> DataflowAnalysis<'a, 'tcx, D>
715 D: BitDenotation<'tcx>,
717 /// Propagates the bits of `in_out` into all the successors of `bb`,
718 /// using bitwise operator denoted by `self.operator`.
720 /// For most blocks, this is entirely uniform. However, for blocks
721 /// that end with a call terminator, the effect of the call on the
722 /// dataflow state may depend on whether the call returned
723 /// successfully or unwound.
725 /// To reflect this, the `propagate_call_return` method of the
726 /// `BitDenotation` mutates `in_out` when propagating `in_out` via
727 /// a call terminator; such mutation is performed *last*, to
728 /// ensure its side-effects do not leak elsewhere (e.g., into
730 fn propagate_bits_into_graph_successors_of(
732 in_out: &mut BitSet<D::Idx>,
733 (bb, bb_data): (mir::BasicBlock, &mir::BasicBlockData<'tcx>),
734 dirty_list: &mut WorkQueue<mir::BasicBlock>)
736 match bb_data.terminator().kind {
737 mir::TerminatorKind::Return |
738 mir::TerminatorKind::Resume |
739 mir::TerminatorKind::Abort |
740 mir::TerminatorKind::GeneratorDrop |
741 mir::TerminatorKind::Unreachable => {}
742 mir::TerminatorKind::Goto { target } |
743 mir::TerminatorKind::Assert { target, cleanup: None, .. } |
744 mir::TerminatorKind::Yield { resume: target, drop: None, .. } |
745 mir::TerminatorKind::Drop { target, location: _, unwind: None } |
746 mir::TerminatorKind::DropAndReplace {
747 target, value: _, location: _, unwind: None
749 self.propagate_bits_into_entry_set_for(in_out, target, dirty_list);
751 mir::TerminatorKind::Yield { resume: target, drop: Some(drop), .. } => {
752 self.propagate_bits_into_entry_set_for(in_out, target, dirty_list);
753 self.propagate_bits_into_entry_set_for(in_out, drop, dirty_list);
755 mir::TerminatorKind::Assert { target, cleanup: Some(unwind), .. } |
756 mir::TerminatorKind::Drop { target, location: _, unwind: Some(unwind) } |
757 mir::TerminatorKind::DropAndReplace {
758 target, value: _, location: _, unwind: Some(unwind)
760 self.propagate_bits_into_entry_set_for(in_out, target, dirty_list);
761 if !self.dead_unwinds.contains(bb) {
762 self.propagate_bits_into_entry_set_for(in_out, unwind, dirty_list);
765 mir::TerminatorKind::SwitchInt { ref targets, .. } => {
766 for target in targets {
767 self.propagate_bits_into_entry_set_for(in_out, *target, dirty_list);
770 mir::TerminatorKind::Call { cleanup, ref destination, .. } => {
771 if let Some(unwind) = cleanup {
772 if !self.dead_unwinds.contains(bb) {
773 self.propagate_bits_into_entry_set_for(in_out, unwind, dirty_list);
776 if let Some((ref dest_place, dest_bb)) = *destination {
777 // N.B.: This must be done *last*, after all other
778 // propagation, as documented in comment above.
779 self.flow_state.operator.propagate_call_return(
780 in_out, bb, dest_bb, dest_place);
781 self.propagate_bits_into_entry_set_for(in_out, dest_bb, dirty_list);
784 mir::TerminatorKind::FalseEdges { real_target, imaginary_target } => {
785 self.propagate_bits_into_entry_set_for(in_out, real_target, dirty_list);
786 self.propagate_bits_into_entry_set_for(in_out, imaginary_target, dirty_list);
788 mir::TerminatorKind::FalseUnwind { real_target, unwind } => {
789 self.propagate_bits_into_entry_set_for(in_out, real_target, dirty_list);
790 if let Some(unwind) = unwind {
791 if !self.dead_unwinds.contains(bb) {
792 self.propagate_bits_into_entry_set_for(in_out, unwind, dirty_list);
799 fn propagate_bits_into_entry_set_for(&mut self,
800 in_out: &BitSet<D::Idx>,
802 dirty_queue: &mut WorkQueue<mir::BasicBlock>) {
803 let entry_set = self.flow_state.sets.entry_set_mut_for(bb.index());
804 let set_changed = self.flow_state.operator.join(entry_set, &in_out);
806 dirty_queue.insert(bb);