1 use rustc_span::symbol::{sym, Symbol};
2 use syntax::ast::{self, MetaItem};
3 use syntax::print::pprust;
5 use rustc_data_structures::work_queue::WorkQueue;
6 use rustc_index::bit_set::{BitSet, HybridBitSet};
7 use rustc_index::vec::Idx;
9 use rustc::mir::traversal;
10 use rustc::mir::{self, BasicBlock, BasicBlockData, Body, Location, Statement, Terminator};
11 use rustc::session::Session;
12 use rustc::ty::{self, TyCtxt};
13 use rustc_hir::def_id::DefId;
15 use std::borrow::Borrow;
18 use std::path::PathBuf;
21 pub use self::at_location::{FlowAtLocation, FlowsAtLocation};
22 pub(crate) use self::drop_flag_effects::*;
23 pub use self::impls::borrows::Borrows;
24 pub use self::impls::DefinitelyInitializedPlaces;
25 pub use self::impls::EverInitializedPlaces;
26 pub use self::impls::HaveBeenBorrowedLocals;
27 pub use self::impls::IndirectlyMutableLocals;
28 pub use self::impls::{MaybeInitializedPlaces, MaybeUninitializedPlaces};
29 pub use self::impls::{MaybeStorageLive, RequiresStorage};
31 use self::move_paths::MoveData;
34 pub mod drop_flag_effects;
40 pub(crate) mod indexes {
41 pub(crate) use super::{
42 impls::borrows::BorrowIndex,
43 move_paths::{InitIndex, MoveOutIndex, MovePathIndex},
47 pub(crate) struct DataflowBuilder<'a, 'tcx, BD>
49 BD: BitDenotation<'tcx>,
52 flow_state: DataflowAnalysis<'a, 'tcx, BD>,
53 print_preflow_to: Option<String>,
54 print_postflow_to: Option<String>,
57 /// `DebugFormatted` encapsulates the "{:?}" rendering of some
58 /// arbitrary value. This way: you pay cost of allocating an extra
59 /// string (as well as that of rendering up-front); in exchange, you
60 /// don't have to hand over ownership of your value or deal with
62 pub struct DebugFormatted(String);
65 pub fn new(input: &dyn fmt::Debug) -> DebugFormatted {
66 DebugFormatted(format!("{:?}", input))
70 impl fmt::Debug for DebugFormatted {
71 fn fmt(&self, w: &mut fmt::Formatter<'_>) -> fmt::Result {
72 write!(w, "{}", self.0)
76 pub trait Dataflow<'tcx, BD: BitDenotation<'tcx>> {
77 /// Sets up and runs the dataflow problem, using `p` to render results if
78 /// implementation so chooses.
79 fn dataflow<P>(&mut self, p: P)
81 P: Fn(&BD, BD::Idx) -> DebugFormatted,
83 let _ = p; // default implementation does not instrument process.
88 /// Sets up the entry, gen, and kill sets for this instance of a dataflow problem.
89 fn build_sets(&mut self);
91 /// Finds a fixed-point solution to this instance of a dataflow problem.
92 fn propagate(&mut self);
95 impl<'a, 'tcx, BD> Dataflow<'tcx, BD> for DataflowBuilder<'a, 'tcx, BD>
97 BD: BitDenotation<'tcx>,
99 fn dataflow<P>(&mut self, p: P)
101 P: Fn(&BD, BD::Idx) -> DebugFormatted,
103 self.flow_state.build_sets();
104 self.pre_dataflow_instrumentation(|c, i| p(c, i)).unwrap();
105 self.flow_state.propagate();
106 self.post_dataflow_instrumentation(|c, i| p(c, i)).unwrap();
109 fn build_sets(&mut self) {
110 self.flow_state.build_sets();
112 fn propagate(&mut self) {
113 self.flow_state.propagate();
117 pub(crate) fn has_rustc_mir_with(attrs: &[ast::Attribute], name: Symbol) -> Option<MetaItem> {
119 if attr.check_name(sym::rustc_mir) {
120 let items = attr.meta_item_list();
121 for item in items.iter().flat_map(|l| l.iter()) {
122 match item.meta_item() {
123 Some(mi) if mi.check_name(name) => return Some(mi.clone()),
132 pub struct MoveDataParamEnv<'tcx> {
133 pub(crate) move_data: MoveData<'tcx>,
134 pub(crate) param_env: ty::ParamEnv<'tcx>,
137 pub fn do_dataflow<'a, 'tcx, BD, P>(
139 body: &'a Body<'tcx>,
141 attributes: &[ast::Attribute],
142 dead_unwinds: &BitSet<BasicBlock>,
145 ) -> DataflowResults<'tcx, BD>
147 BD: BitDenotation<'tcx>,
148 P: Fn(&BD, BD::Idx) -> DebugFormatted,
150 let flow_state = DataflowAnalysis::new(body, dead_unwinds, bd);
151 flow_state.run(tcx, def_id, attributes, p)
154 impl<'a, 'tcx, BD> DataflowAnalysis<'a, 'tcx, BD>
156 BD: BitDenotation<'tcx>,
158 pub(crate) fn run<P>(
162 attributes: &[ast::Attribute],
164 ) -> DataflowResults<'tcx, BD>
166 P: Fn(&BD, BD::Idx) -> DebugFormatted,
168 let name_found = |sess: &Session, attrs: &[ast::Attribute], name| -> Option<String> {
169 if let Some(item) = has_rustc_mir_with(attrs, name) {
170 if let Some(s) = item.value_str() {
171 return Some(s.to_string());
173 let path = pprust::path_to_string(&item.path);
174 sess.span_err(item.span, &format!("{} attribute requires a path", path));
181 let print_preflow_to = name_found(tcx.sess, attributes, sym::borrowck_graphviz_preflow);
182 let print_postflow_to = name_found(tcx.sess, attributes, sym::borrowck_graphviz_postflow);
185 DataflowBuilder { def_id, print_preflow_to, print_postflow_to, flow_state: self };
188 mbcx.flow_state.results()
192 struct PropagationContext<'b, 'a, 'tcx, O>
194 O: BitDenotation<'tcx>,
196 builder: &'b mut DataflowAnalysis<'a, 'tcx, O>,
199 impl<'a, 'tcx, BD> DataflowAnalysis<'a, 'tcx, BD>
201 BD: BitDenotation<'tcx>,
203 fn propagate(&mut self) {
204 let mut temp = BitSet::new_empty(self.flow_state.sets.bits_per_block);
205 let mut propcx = PropagationContext { builder: self };
206 propcx.walk_cfg(&mut temp);
209 fn build_sets(&mut self) {
210 // Build the transfer function for each block.
211 for (bb, data) in self.body.basic_blocks().iter_enumerated() {
212 let &mir::BasicBlockData { ref statements, ref terminator, is_cleanup: _ } = data;
214 let trans = self.flow_state.sets.trans_mut_for(bb.index());
215 for j_stmt in 0..statements.len() {
216 let location = Location { block: bb, statement_index: j_stmt };
217 self.flow_state.operator.before_statement_effect(trans, location);
218 self.flow_state.operator.statement_effect(trans, location);
221 if terminator.is_some() {
222 let location = Location { block: bb, statement_index: statements.len() };
223 self.flow_state.operator.before_terminator_effect(trans, location);
224 self.flow_state.operator.terminator_effect(trans, location);
228 // Initialize the flow state at entry to the start block.
229 let on_entry = self.flow_state.sets.entry_set_mut_for(mir::START_BLOCK.index());
230 self.flow_state.operator.start_block_effect(on_entry);
234 impl<'b, 'a, 'tcx, BD> PropagationContext<'b, 'a, 'tcx, BD>
236 BD: BitDenotation<'tcx>,
238 fn walk_cfg(&mut self, in_out: &mut BitSet<BD::Idx>) {
239 let body = self.builder.body;
241 // Initialize the dirty queue in reverse post-order. This makes it more likely that the
242 // entry state for each basic block will have the effects of its predecessors applied
243 // before it is processed. In fact, for CFGs without back edges, this guarantees that
244 // dataflow will converge in exactly `N` iterations, where `N` is the number of basic
246 let mut dirty_queue: WorkQueue<mir::BasicBlock> =
247 WorkQueue::with_none(body.basic_blocks().len());
248 for (bb, _) in traversal::reverse_postorder(body) {
249 dirty_queue.insert(bb);
252 // Add blocks which are not reachable from START_BLOCK to the work queue. These blocks will
253 // be processed after the ones added above.
254 for bb in body.basic_blocks().indices() {
255 dirty_queue.insert(bb);
258 while let Some(bb) = dirty_queue.pop() {
259 let (on_entry, trans) = self.builder.flow_state.sets.get_mut(bb.index());
260 debug_assert!(in_out.words().len() == on_entry.words().len());
261 in_out.overwrite(on_entry);
264 let bb_data = &body[bb];
265 self.builder.propagate_bits_into_graph_successors_of(
274 fn dataflow_path(context: &str, path: &str) -> PathBuf {
275 let mut path = PathBuf::from(path);
276 let new_file_name = {
277 let orig_file_name = path.file_name().unwrap().to_str().unwrap();
278 format!("{}_{}", context, orig_file_name)
280 path.set_file_name(new_file_name);
284 impl<'a, 'tcx, BD> DataflowBuilder<'a, 'tcx, BD>
286 BD: BitDenotation<'tcx>,
288 fn pre_dataflow_instrumentation<P>(&self, p: P) -> io::Result<()>
290 P: Fn(&BD, BD::Idx) -> DebugFormatted,
292 if let Some(ref path_str) = self.print_preflow_to {
293 let path = dataflow_path(BD::name(), path_str);
294 graphviz::print_borrowck_graph_to(self, &path, p)
300 fn post_dataflow_instrumentation<P>(&self, p: P) -> io::Result<()>
302 P: Fn(&BD, BD::Idx) -> DebugFormatted,
304 if let Some(ref path_str) = self.print_postflow_to {
305 let path = dataflow_path(BD::name(), path_str);
306 graphviz::print_borrowck_graph_to(self, &path, p)
313 /// DataflowResultsConsumer abstracts over walking the MIR with some
314 /// already constructed dataflow results.
316 /// It abstracts over the FlowState and also completely hides the
317 /// underlying flow analysis results, because it needs to handle cases
318 /// where we are combining the results of *multiple* flow analyses
319 /// (e.g., borrows + inits + uninits).
320 pub(crate) trait DataflowResultsConsumer<'a, 'tcx: 'a> {
321 type FlowState: FlowsAtLocation;
323 // Observation Hooks: override (at least one of) these to get analysis feedback.
324 fn visit_block_entry(&mut self, _bb: BasicBlock, _flow_state: &Self::FlowState) {}
326 fn visit_statement_entry(
329 _stmt: &'a Statement<'tcx>,
330 _flow_state: &Self::FlowState,
334 fn visit_terminator_entry(
337 _term: &'a Terminator<'tcx>,
338 _flow_state: &Self::FlowState,
342 // Main entry point: this drives the processing of results.
344 fn analyze_results(&mut self, flow_uninit: &mut Self::FlowState) {
345 let flow = flow_uninit;
346 for (bb, _) in traversal::reverse_postorder(self.body()) {
347 flow.reset_to_entry_of(bb);
348 self.process_basic_block(bb, flow);
352 fn process_basic_block(&mut self, bb: BasicBlock, flow_state: &mut Self::FlowState) {
353 self.visit_block_entry(bb, flow_state);
355 let BasicBlockData { ref statements, ref terminator, is_cleanup: _ } = self.body()[bb];
356 let mut location = Location { block: bb, statement_index: 0 };
357 for stmt in statements.iter() {
358 flow_state.reconstruct_statement_effect(location);
359 self.visit_statement_entry(location, stmt, flow_state);
360 flow_state.apply_local_effect(location);
361 location.statement_index += 1;
364 if let Some(ref term) = *terminator {
365 flow_state.reconstruct_terminator_effect(location);
366 self.visit_terminator_entry(location, term, flow_state);
368 // We don't need to apply the effect of the terminator,
369 // since we are only visiting dataflow state on control
370 // flow entry to the various nodes. (But we still need to
371 // reconstruct the effect, because the visit method might
376 // Delegated Hooks: Provide access to the MIR and process the flow state.
378 fn body(&self) -> &'a Body<'tcx>;
381 /// Allows iterating dataflow results in a flexible and reasonably fast way.
382 pub struct DataflowResultsCursor<'mir, 'tcx, BD, DR = DataflowResults<'tcx, BD>>
384 BD: BitDenotation<'tcx>,
385 DR: Borrow<DataflowResults<'tcx, BD>>,
387 flow_state: FlowAtLocation<'tcx, BD, DR>,
389 // The statement (or terminator) whose effect has been reconstructed in
391 curr_loc: Option<Location>,
393 body: &'mir Body<'tcx>,
396 pub type DataflowResultsRefCursor<'mir, 'tcx, BD> =
397 DataflowResultsCursor<'mir, 'tcx, BD, &'mir DataflowResults<'tcx, BD>>;
399 impl<'mir, 'tcx, BD, DR> DataflowResultsCursor<'mir, 'tcx, BD, DR>
401 BD: BitDenotation<'tcx>,
402 DR: Borrow<DataflowResults<'tcx, BD>>,
404 pub fn new(result: DR, body: &'mir Body<'tcx>) -> Self {
405 DataflowResultsCursor { flow_state: FlowAtLocation::new(result), curr_loc: None, body }
408 /// Seek to the given location in MIR. This method is fast if you are
409 /// traversing your MIR statements in order.
411 /// After calling `seek`, the current state will reflect all effects up to
412 /// and including the `before_statement_effect` of the statement at location
413 /// `loc`. The `statement_effect` of the statement at `loc` will be
414 /// available as the current effect (see e.g. `each_gen_bit`).
416 /// If `loc.statement_index` equals the number of statements in the block,
417 /// we will reconstruct the terminator effect in the same way as described
419 pub fn seek(&mut self, loc: Location) {
420 if self.curr_loc.map(|cur| loc == cur).unwrap_or(false) {
425 let should_reset = match self.curr_loc {
427 Some(cur) if loc.block != cur.block || loc.statement_index < cur.statement_index => {
433 self.flow_state.reset_to_entry_of(loc.block);
436 let curr_loc = self.curr_loc.unwrap();
437 start_index = curr_loc.statement_index;
438 // Apply the effect from the last seek to the current state.
439 self.flow_state.apply_local_effect(curr_loc);
442 for stmt in start_index..loc.statement_index {
443 let mut stmt_loc = loc;
444 stmt_loc.statement_index = stmt;
445 self.flow_state.reconstruct_statement_effect(stmt_loc);
446 self.flow_state.apply_local_effect(stmt_loc);
449 if loc.statement_index == self.body[loc.block].statements.len() {
450 self.flow_state.reconstruct_terminator_effect(loc);
452 self.flow_state.reconstruct_statement_effect(loc);
454 self.curr_loc = Some(loc);
457 /// Return whether the current state contains bit `x`.
458 pub fn contains(&self, x: BD::Idx) -> bool {
459 self.flow_state.contains(x)
462 /// Iterate over each `gen` bit in the current effect (invoke `seek` first).
463 pub fn each_gen_bit<F>(&self, f: F)
467 self.flow_state.each_gen_bit(f)
470 pub fn get(&self) -> &BitSet<BD::Idx> {
471 self.flow_state.as_dense()
475 pub struct DataflowAnalysis<'a, 'tcx, O>
477 O: BitDenotation<'tcx>,
479 flow_state: DataflowState<'tcx, O>,
480 dead_unwinds: &'a BitSet<mir::BasicBlock>,
481 body: &'a Body<'tcx>,
484 impl<'a, 'tcx, O> DataflowAnalysis<'a, 'tcx, O>
486 O: BitDenotation<'tcx>,
488 pub fn results(self) -> DataflowResults<'tcx, O> {
489 DataflowResults(self.flow_state)
492 pub fn body(&self) -> &'a Body<'tcx> {
497 pub struct DataflowResults<'tcx, O>(pub(crate) DataflowState<'tcx, O>)
499 O: BitDenotation<'tcx>;
501 impl<'tcx, O: BitDenotation<'tcx>> DataflowResults<'tcx, O> {
502 pub fn sets(&self) -> &AllSets<O::Idx> {
506 pub fn operator(&self) -> &O {
511 /// State of a dataflow analysis; couples a collection of bit sets
512 /// with operator used to initialize and merge bits during analysis.
513 pub struct DataflowState<'tcx, O: BitDenotation<'tcx>> {
514 /// All the sets for the analysis. (Factored into its
515 /// own structure so that we can borrow it mutably
516 /// on its own separate from other fields.)
517 pub sets: AllSets<O::Idx>,
519 /// operator used to initialize, combine, and interpret bits.
520 pub(crate) operator: O,
523 impl<'tcx, O: BitDenotation<'tcx>> DataflowState<'tcx, O> {
524 pub(crate) fn interpret_set<'c, P>(
527 set: &BitSet<O::Idx>,
529 ) -> Vec<DebugFormatted>
531 P: Fn(&O, O::Idx) -> DebugFormatted,
533 set.iter().map(|i| render_idx(o, i)).collect()
536 pub(crate) fn interpret_hybrid_set<'c, P>(
539 set: &HybridBitSet<O::Idx>,
541 ) -> Vec<DebugFormatted>
543 P: Fn(&O, O::Idx) -> DebugFormatted,
545 set.iter().map(|i| render_idx(o, i)).collect()
549 /// A 2-tuple representing the "gen" and "kill" bitsets during
550 /// dataflow analysis.
552 /// It is best to ensure that the intersection of `gen_set` and
553 /// `kill_set` is empty; otherwise the results of the dataflow will
554 /// have a hidden dependency on what order the bits are generated and
555 /// killed during the iteration. (This is such a good idea that the
556 /// `fn gen` and `fn kill` methods that set their state enforce this
558 #[derive(Debug, Clone, Copy)]
559 pub struct GenKill<T> {
560 pub(crate) gen_set: T,
561 pub(crate) kill_set: T,
564 pub type GenKillSet<T> = GenKill<HybridBitSet<T>>;
567 /// Creates a new tuple where `gen_set == kill_set == elem`.
568 pub(crate) fn from_elem(elem: T) -> Self
572 GenKill { gen_set: elem.clone(), kill_set: elem }
576 impl<E: Idx> GenKillSet<E> {
577 pub fn clear(&mut self) {
578 self.gen_set.clear();
579 self.kill_set.clear();
582 pub fn gen(&mut self, e: E) {
583 self.gen_set.insert(e);
584 self.kill_set.remove(e);
587 pub fn gen_all(&mut self, i: impl IntoIterator<Item: Borrow<E>>) {
589 self.gen(*j.borrow());
593 pub fn kill(&mut self, e: E) {
594 self.gen_set.remove(e);
595 self.kill_set.insert(e);
598 pub fn kill_all(&mut self, i: impl IntoIterator<Item: Borrow<E>>) {
600 self.kill(*j.borrow());
604 /// Computes `(set ∪ gen) - kill` and assigns the result to `set`.
605 pub(crate) fn apply(&self, set: &mut BitSet<E>) {
606 set.union(&self.gen_set);
607 set.subtract(&self.kill_set);
612 pub struct AllSets<E: Idx> {
613 /// Analysis bitwidth for each block.
614 bits_per_block: usize,
616 /// For each block, bits valid on entry to the block.
617 on_entry: Vec<BitSet<E>>,
619 /// The transfer function of each block expressed as the set of bits
620 /// generated and killed by executing the statements + terminator in the
621 /// block -- with one caveat. In particular, for *call terminators*, the
622 /// effect of storing the destination is not included, since that only takes
623 /// effect on the **success** edge (and not the unwind edge).
624 trans: Vec<GenKillSet<E>>,
627 impl<E: Idx> AllSets<E> {
628 pub fn bits_per_block(&self) -> usize {
632 pub fn get_mut(&mut self, block_idx: usize) -> (&mut BitSet<E>, &mut GenKillSet<E>) {
633 (&mut self.on_entry[block_idx], &mut self.trans[block_idx])
636 pub fn trans_for(&self, block_idx: usize) -> &GenKillSet<E> {
637 &self.trans[block_idx]
639 pub fn trans_mut_for(&mut self, block_idx: usize) -> &mut GenKillSet<E> {
640 &mut self.trans[block_idx]
642 pub fn entry_set_for(&self, block_idx: usize) -> &BitSet<E> {
643 &self.on_entry[block_idx]
645 pub fn entry_set_mut_for(&mut self, block_idx: usize) -> &mut BitSet<E> {
646 &mut self.on_entry[block_idx]
648 pub fn gen_set_for(&self, block_idx: usize) -> &HybridBitSet<E> {
649 &self.trans_for(block_idx).gen_set
651 pub fn kill_set_for(&self, block_idx: usize) -> &HybridBitSet<E> {
652 &self.trans_for(block_idx).kill_set
656 /// Parameterization for the precise form of data flow that is used.
658 /// `BottomValue` determines whether the initial entry set for each basic block is empty or full.
659 /// This also determines the semantics of the lattice `join` operator used to merge dataflow
660 /// results, since dataflow works by starting at the bottom and moving monotonically to a fixed
663 /// This means, for propagation across the graph, that you either want to start at all-zeroes and
664 /// then use Union as your merge when propagating, or you start at all-ones and then use Intersect
665 /// as your merge when propagating.
666 pub trait BottomValue {
667 /// Specifies the initial value for each bit in the entry set for each basic block.
668 const BOTTOM_VALUE: bool;
670 /// Merges `in_set` into `inout_set`, returning `true` if `inout_set` changed.
672 fn join<T: Idx>(&self, inout_set: &mut BitSet<T>, in_set: &BitSet<T>) -> bool {
673 if Self::BOTTOM_VALUE == false {
674 inout_set.union(in_set)
676 inout_set.intersect(in_set)
681 /// A specific flavor of dataflow analysis.
683 /// To run a dataflow analysis, one sets up an initial state for the
684 /// `START_BLOCK` via `start_block_effect` and a transfer function (`trans`)
685 /// for each block individually. The entry set for all other basic blocks is
686 /// initialized to `Self::BOTTOM_VALUE`. The dataflow analysis then
687 /// iteratively modifies the various entry sets (but leaves the the transfer
688 /// function unchanged).
689 pub trait BitDenotation<'tcx>: BottomValue {
690 /// Specifies what index type is used to access the bitvector.
693 /// A name describing the dataflow analysis that this
694 /// `BitDenotation` is supporting. The name should be something
695 /// suitable for plugging in as part of a filename (i.e., avoid
696 /// space-characters or other things that tend to look bad on a
697 /// file system, like slashes or periods). It is also better for
698 /// the name to be reasonably short, again because it will be
699 /// plugged into a filename.
700 fn name() -> &'static str;
702 /// Size of each bitvector allocated for each block in the analysis.
703 fn bits_per_block(&self) -> usize;
705 /// Mutates the entry set according to the effects that
706 /// have been established *prior* to entering the start
707 /// block. This can't access the gen/kill sets, because
708 /// these won't be accounted for correctly.
710 /// (For example, establishing the call arguments.)
711 fn start_block_effect(&self, entry_set: &mut BitSet<Self::Idx>);
713 /// Similar to `statement_effect`, except it applies
714 /// *just before* the statement rather than *just after* it.
716 /// This matters for "dataflow at location" APIs, because the
717 /// before-statement effect is visible while visiting the
718 /// statement, while the after-statement effect only becomes
719 /// visible at the next statement.
721 /// Both the before-statement and after-statement effects are
722 /// applied, in that order, before moving for the next
724 fn before_statement_effect(&self, _trans: &mut GenKillSet<Self::Idx>, _location: Location) {}
726 /// Mutates the block-sets (the flow sets for the given
727 /// basic block) according to the effects of evaluating statement.
729 /// This is used, in particular, for building up the
730 /// "transfer-function" representing the overall-effect of the
731 /// block, represented via GEN and KILL sets.
733 /// The statement is identified as `bb_data[idx_stmt]`, where
734 /// `bb_data` is the sequence of statements identified by `bb` in
736 fn statement_effect(&self, trans: &mut GenKillSet<Self::Idx>, location: Location);
738 /// Similar to `terminator_effect`, except it applies
739 /// *just before* the terminator rather than *just after* it.
741 /// This matters for "dataflow at location" APIs, because the
742 /// before-terminator effect is visible while visiting the
743 /// terminator, while the after-terminator effect only becomes
744 /// visible at the terminator's successors.
746 /// Both the before-terminator and after-terminator effects are
747 /// applied, in that order, before moving for the next
749 fn before_terminator_effect(&self, _trans: &mut GenKillSet<Self::Idx>, _location: Location) {}
751 /// Mutates the block-sets (the flow sets for the given
752 /// basic block) according to the effects of evaluating
755 /// This is used, in particular, for building up the
756 /// "transfer-function" representing the overall-effect of the
757 /// block, represented via GEN and KILL sets.
759 /// The effects applied here cannot depend on which branch the
761 fn terminator_effect(&self, trans: &mut GenKillSet<Self::Idx>, location: Location);
763 /// Mutates the block-sets according to the (flow-dependent)
764 /// effect of a successful return from a Call terminator.
766 /// If basic-block BB_x ends with a call-instruction that, upon
767 /// successful return, flows to BB_y, then this method will be
768 /// called on the exit flow-state of BB_x in order to set up the
769 /// entry flow-state of BB_y.
771 /// This is used, in particular, as a special case during the
772 /// "propagate" loop where all of the basic blocks are repeatedly
773 /// visited. Since the effects of a Call terminator are
774 /// flow-dependent, the current MIR cannot encode them via just
775 /// GEN and KILL sets attached to the block, and so instead we add
776 /// this extra machinery to represent the flow-dependent effect.
778 // FIXME: right now this is a bit of a wart in the API. It might
779 // be better to represent this as an additional gen- and
780 // kill-sets associated with each edge coming out of the basic
782 fn propagate_call_return(
784 in_out: &mut BitSet<Self::Idx>,
785 call_bb: mir::BasicBlock,
786 dest_bb: mir::BasicBlock,
787 dest_place: &mir::Place<'tcx>,
791 impl<'a, 'tcx, D> DataflowAnalysis<'a, 'tcx, D>
793 D: BitDenotation<'tcx>,
796 body: &'a Body<'tcx>,
797 dead_unwinds: &'a BitSet<mir::BasicBlock>,
800 let bits_per_block = denotation.bits_per_block();
801 let num_blocks = body.basic_blocks().len();
803 let on_entry = if D::BOTTOM_VALUE == true {
804 vec![BitSet::new_filled(bits_per_block); num_blocks]
806 vec![BitSet::new_empty(bits_per_block); num_blocks]
808 let nop = GenKill::from_elem(HybridBitSet::new_empty(bits_per_block));
813 flow_state: DataflowState {
814 sets: AllSets { bits_per_block, on_entry, trans: vec![nop; num_blocks] },
815 operator: denotation,
821 impl<'a, 'tcx, D> DataflowAnalysis<'a, 'tcx, D>
823 D: BitDenotation<'tcx>,
825 /// Propagates the bits of `in_out` into all the successors of `bb`,
826 /// using bitwise operator denoted by `self.operator`.
828 /// For most blocks, this is entirely uniform. However, for blocks
829 /// that end with a call terminator, the effect of the call on the
830 /// dataflow state may depend on whether the call returned
831 /// successfully or unwound.
833 /// To reflect this, the `propagate_call_return` method of the
834 /// `BitDenotation` mutates `in_out` when propagating `in_out` via
835 /// a call terminator; such mutation is performed *last*, to
836 /// ensure its side-effects do not leak elsewhere (e.g., into
838 fn propagate_bits_into_graph_successors_of(
840 in_out: &mut BitSet<D::Idx>,
841 (bb, bb_data): (mir::BasicBlock, &mir::BasicBlockData<'tcx>),
842 dirty_list: &mut WorkQueue<mir::BasicBlock>,
844 match bb_data.terminator().kind {
845 mir::TerminatorKind::Return
846 | mir::TerminatorKind::Resume
847 | mir::TerminatorKind::Abort
848 | mir::TerminatorKind::GeneratorDrop
849 | mir::TerminatorKind::Unreachable => {}
850 mir::TerminatorKind::Goto { target }
851 | mir::TerminatorKind::Assert { target, cleanup: None, .. }
852 | mir::TerminatorKind::Yield { resume: target, drop: None, .. }
853 | mir::TerminatorKind::Drop { target, location: _, unwind: None }
854 | mir::TerminatorKind::DropAndReplace { target, value: _, location: _, unwind: None } =>
856 self.propagate_bits_into_entry_set_for(in_out, target, dirty_list);
858 mir::TerminatorKind::Yield { resume: target, drop: Some(drop), .. } => {
859 self.propagate_bits_into_entry_set_for(in_out, target, dirty_list);
860 self.propagate_bits_into_entry_set_for(in_out, drop, dirty_list);
862 mir::TerminatorKind::Assert { target, cleanup: Some(unwind), .. }
863 | mir::TerminatorKind::Drop { target, location: _, unwind: Some(unwind) }
864 | mir::TerminatorKind::DropAndReplace {
868 unwind: Some(unwind),
870 self.propagate_bits_into_entry_set_for(in_out, target, dirty_list);
871 if !self.dead_unwinds.contains(bb) {
872 self.propagate_bits_into_entry_set_for(in_out, unwind, dirty_list);
875 mir::TerminatorKind::SwitchInt { ref targets, .. } => {
876 for target in targets {
877 self.propagate_bits_into_entry_set_for(in_out, *target, dirty_list);
880 mir::TerminatorKind::Call { cleanup, ref destination, .. } => {
881 if let Some(unwind) = cleanup {
882 if !self.dead_unwinds.contains(bb) {
883 self.propagate_bits_into_entry_set_for(in_out, unwind, dirty_list);
886 if let Some((ref dest_place, dest_bb)) = *destination {
887 // N.B.: This must be done *last*, after all other
888 // propagation, as documented in comment above.
889 self.flow_state.operator.propagate_call_return(in_out, bb, dest_bb, dest_place);
890 self.propagate_bits_into_entry_set_for(in_out, dest_bb, dirty_list);
893 mir::TerminatorKind::FalseEdges { real_target, imaginary_target } => {
894 self.propagate_bits_into_entry_set_for(in_out, real_target, dirty_list);
895 self.propagate_bits_into_entry_set_for(in_out, imaginary_target, dirty_list);
897 mir::TerminatorKind::FalseUnwind { real_target, unwind } => {
898 self.propagate_bits_into_entry_set_for(in_out, real_target, dirty_list);
899 if let Some(unwind) = unwind {
900 if !self.dead_unwinds.contains(bb) {
901 self.propagate_bits_into_entry_set_for(in_out, unwind, dirty_list);
908 fn propagate_bits_into_entry_set_for(
910 in_out: &BitSet<D::Idx>,
912 dirty_queue: &mut WorkQueue<mir::BasicBlock>,
914 let entry_set = self.flow_state.sets.entry_set_mut_for(bb.index());
915 let set_changed = self.flow_state.operator.join(entry_set, &in_out);
917 dirty_queue.insert(bb);