1 //! A module for propagating forward dataflow information. The analysis
2 //! assumes that the items to be propagated can be represented as bits
3 //! and thus uses bitvectors. Your job is simply to specify the so-called
4 //! GEN and KILL bits for each expression.
7 use rustc::cfg::CFGIndex;
13 use rustc_data_structures::graph::implementation::OUTGOING;
15 use rustc::util::nodemap::FxHashMap;
17 use rustc::hir::intravisit;
18 use rustc::hir::print as pprust;
20 #[derive(Copy, Clone, Debug)]
21 pub enum EntryOrExit {
27 pub struct DataFlowContext<'tcx, O> {
30 /// a name for the analysis using this dataflow instance
31 analysis_name: &'static str,
33 /// the data flow operator
36 /// number of bits to propagate per id
39 /// number of words we will use to store bits_per_id.
40 /// equal to bits_per_id/usize::BITS rounded up.
43 // mapping from node to cfg node index
44 // FIXME (#6298): Shouldn't this go with CFG?
45 local_id_to_index: FxHashMap<hir::ItemLocalId, Vec<CFGIndex>>,
47 // Bit sets per cfg node. The following three fields (`gens`, `kills`,
48 // and `on_entry`) all have the same structure. For each id in
49 // `id_range`, there is a range of words equal to `words_per_id`.
50 // So, to access the bits for any given id, you take a slice of
51 // the full vector (see the method `compute_id_range()`).
52 /// bits generated as we exit the cfg node. Updated by `add_gen()`.
55 /// bits killed as we exit the cfg node, or non-locally jump over
56 /// it. Updated by `add_kill(KillFrom::ScopeEnd)`.
57 scope_kills: Vec<usize>,
59 /// bits killed as we exit the cfg node directly; if it is jumped
60 /// over, e.g., via `break`, the kills are not reflected in the
61 /// jump's effects. Updated by `add_kill(KillFrom::Execution)`.
62 action_kills: Vec<usize>,
64 /// bits that are valid on entry to the cfg node. Updated by
69 pub trait BitwiseOperator {
70 /// Joins two predecessor bits together, typically either `|` or `&`
71 fn join(&self, succ: usize, pred: usize) -> usize;
74 /// Parameterization for the precise form of data flow that is used.
75 pub trait DataFlowOperator : BitwiseOperator {
76 /// Specifies the initial value for each bit in the `on_entry` set
77 fn initial_value(&self) -> bool;
80 struct PropagationContext<'a, 'tcx, O> {
81 dfcx: &'a mut DataFlowContext<'tcx, O>,
85 fn get_cfg_indices(id: hir::ItemLocalId,
86 index: &FxHashMap<hir::ItemLocalId, Vec<CFGIndex>>)
88 index.get(&id).map_or(&[], |v| &v[..])
91 impl<'tcx, O: DataFlowOperator> DataFlowContext<'tcx, O> {
92 fn has_bitset_for_local_id(&self, n: hir::ItemLocalId) -> bool {
93 assert!(n != hir::DUMMY_ITEM_LOCAL_ID);
94 self.local_id_to_index.contains_key(&n)
98 impl<'tcx, O: DataFlowOperator> pprust::PpAnn for DataFlowContext<'tcx, O> {
99 fn nested(&self, state: &mut pprust::State<'_>, nested: pprust::Nested) {
100 pprust::PpAnn::nested(self.tcx.hir(), state, nested)
103 ps: &mut pprust::State<'_>,
104 node: pprust::AnnNode<'_>) {
105 let id = match node {
106 pprust::AnnNode::Name(_) => return,
107 pprust::AnnNode::Expr(expr) => expr.hir_id.local_id,
108 pprust::AnnNode::Block(blk) => blk.hir_id.local_id,
109 pprust::AnnNode::Item(_) |
110 pprust::AnnNode::SubItem(_) => return,
111 pprust::AnnNode::Pat(pat) => pat.hir_id.local_id,
112 pprust::AnnNode::Arm(arm) => arm.hir_id.local_id,
115 if !self.has_bitset_for_local_id(id) {
119 assert!(self.bits_per_id > 0);
120 let indices = get_cfg_indices(id, &self.local_id_to_index);
121 for &cfgidx in indices {
122 let (start, end) = self.compute_id_range(cfgidx);
123 let on_entry = &self.on_entry[start.. end];
124 let entry_str = bits_to_string(on_entry);
126 let gens = &self.gens[start.. end];
127 let gens_str = if gens.iter().any(|&u| u != 0) {
128 format!(" gen: {}", bits_to_string(gens))
133 let action_kills = &self.action_kills[start .. end];
134 let action_kills_str = if action_kills.iter().any(|&u| u != 0) {
135 format!(" action_kill: {}", bits_to_string(action_kills))
140 let scope_kills = &self.scope_kills[start .. end];
141 let scope_kills_str = if scope_kills.iter().any(|&u| u != 0) {
142 format!(" scope_kill: {}", bits_to_string(scope_kills))
148 format!("id {}: {}{}{}{}", id.as_usize(), entry_str,
149 gens_str, action_kills_str, scope_kills_str));
155 fn build_local_id_to_index(body: Option<&hir::Body>,
157 -> FxHashMap<hir::ItemLocalId, Vec<CFGIndex>> {
158 let mut index = FxHashMap::default();
160 // FIXME(#15020) Would it be better to fold formals from decl
161 // into cfg itself? i.e., introduce a fn-based flow-graph in
162 // addition to the current block-based flow-graph, rather than
163 // have to put traversals like this here?
164 if let Some(body) = body {
165 add_entries_from_fn_body(&mut index, body, cfg.entry);
168 cfg.graph.each_node(|node_idx, node| {
169 if let cfg::CFGNodeData::AST(id) = node.data {
170 index.entry(id).or_default().push(node_idx);
177 /// Adds mappings from the ast nodes for the formal bindings to
178 /// the entry-node in the graph.
179 fn add_entries_from_fn_body(index: &mut FxHashMap<hir::ItemLocalId, Vec<CFGIndex>>,
182 use rustc::hir::intravisit::Visitor;
186 index: &'a mut FxHashMap<hir::ItemLocalId, Vec<CFGIndex>>,
188 let mut formals = Formals { entry: entry, index: index };
189 for param in &body.params {
190 formals.visit_pat(¶m.pat);
192 impl<'a, 'v> Visitor<'v> for Formals<'a> {
193 fn nested_visit_map<'this>(&'this mut self) -> intravisit::NestedVisitorMap<'this, 'v> {
194 intravisit::NestedVisitorMap::None
197 fn visit_pat(&mut self, p: &hir::Pat) {
198 self.index.entry(p.hir_id.local_id).or_default().push(self.entry);
199 intravisit::walk_pat(self, p)
205 /// Flag used by `add_kill` to indicate whether the provided kill
206 /// takes effect only when control flows directly through the node in
207 /// question, or if the kill's effect is associated with any
208 /// control-flow directly through or indirectly over the node.
209 #[derive(Copy, Clone, PartialEq, Debug)]
211 /// A `ScopeEnd` kill is one that takes effect when any control
212 /// flow goes over the node. A kill associated with the end of the
213 /// scope of a variable declaration `let x;` is an example of a
217 /// An `Execution` kill is one that takes effect only when control
218 /// flow goes through the node to completion. A kill associated
219 /// with an assignment statement `x = expr;` is an example of an
220 /// `Execution` kill.
224 impl<'tcx, O: DataFlowOperator> DataFlowContext<'tcx, O> {
227 analysis_name: &'static str,
228 body: Option<&hir::Body>,
232 ) -> DataFlowContext<'tcx, O> {
233 let usize_bits = mem::size_of::<usize>() * 8;
234 let words_per_id = (bits_per_id + usize_bits - 1) / usize_bits;
235 let num_nodes = cfg.graph.all_nodes().len();
237 debug!("DataFlowContext::new(analysis_name: {}, \
238 bits_per_id={}, words_per_id={}) \
240 analysis_name, bits_per_id, words_per_id,
243 let entry = if oper.initial_value() { usize::MAX } else {0};
245 let zeroes = vec![0; num_nodes * words_per_id];
246 let gens = zeroes.clone();
247 let kills1 = zeroes.clone();
249 let on_entry = vec![entry; num_nodes * words_per_id];
251 let local_id_to_index = build_local_id_to_index(body, cfg);
261 action_kills: kills1,
267 pub fn add_gen(&mut self, id: hir::ItemLocalId, bit: usize) {
268 //! Indicates that `id` generates `bit`
269 debug!("{} add_gen(id={:?}, bit={})",
270 self.analysis_name, id, bit);
271 assert!(self.local_id_to_index.contains_key(&id));
272 assert!(self.bits_per_id > 0);
274 let indices = get_cfg_indices(id, &self.local_id_to_index);
275 for &cfgidx in indices {
276 let (start, end) = self.compute_id_range(cfgidx);
277 let gens = &mut self.gens[start.. end];
282 pub fn add_kill(&mut self, kind: KillFrom, id: hir::ItemLocalId, bit: usize) {
283 //! Indicates that `id` kills `bit`
284 debug!("{} add_kill(id={:?}, bit={})",
285 self.analysis_name, id, bit);
286 assert!(self.local_id_to_index.contains_key(&id));
287 assert!(self.bits_per_id > 0);
289 let indices = get_cfg_indices(id, &self.local_id_to_index);
290 for &cfgidx in indices {
291 let (start, end) = self.compute_id_range(cfgidx);
292 let kills = match kind {
293 KillFrom::Execution => &mut self.action_kills[start.. end],
294 KillFrom::ScopeEnd => &mut self.scope_kills[start.. end],
300 fn apply_gen_kill(&self, cfgidx: CFGIndex, bits: &mut [usize]) {
301 //! Applies the gen and kill sets for `cfgidx` to `bits`
302 debug!("{} apply_gen_kill(cfgidx={:?}, bits={}) [before]",
303 self.analysis_name, cfgidx, mut_bits_to_string(bits));
304 assert!(self.bits_per_id > 0);
306 let (start, end) = self.compute_id_range(cfgidx);
307 let gens = &self.gens[start.. end];
308 bitwise(bits, gens, &Union);
309 let kills = &self.action_kills[start.. end];
310 bitwise(bits, kills, &Subtract);
311 let kills = &self.scope_kills[start.. end];
312 bitwise(bits, kills, &Subtract);
314 debug!("{} apply_gen_kill(cfgidx={:?}, bits={}) [after]",
315 self.analysis_name, cfgidx, mut_bits_to_string(bits));
318 fn compute_id_range(&self, cfgidx: CFGIndex) -> (usize, usize) {
319 let n = cfgidx.node_id();
320 let start = n * self.words_per_id;
321 let end = start + self.words_per_id;
323 assert!(start < self.gens.len());
324 assert!(end <= self.gens.len());
325 assert!(self.gens.len() == self.action_kills.len());
326 assert!(self.gens.len() == self.scope_kills.len());
327 assert!(self.gens.len() == self.on_entry.len());
333 pub fn each_bit_on_entry<F>(&self, id: hir::ItemLocalId, mut f: F) -> bool where
334 F: FnMut(usize) -> bool,
336 //! Iterates through each bit that is set on entry to `id`.
337 //! Only useful after `propagate()` has been called.
338 if !self.has_bitset_for_local_id(id) {
341 let indices = get_cfg_indices(id, &self.local_id_to_index);
342 for &cfgidx in indices {
343 if !self.each_bit_for_node(EntryOrExit::Entry, cfgidx, |i| f(i)) {
350 pub fn each_bit_for_node<F>(&self, e: EntryOrExit, cfgidx: CFGIndex, f: F) -> bool where
351 F: FnMut(usize) -> bool,
353 //! Iterates through each bit that is set on entry/exit to `cfgidx`.
354 //! Only useful after `propagate()` has been called.
356 if self.bits_per_id == 0 {
357 // Skip the surprisingly common degenerate case. (Note
358 // compute_id_range requires self.words_per_id > 0.)
362 let (start, end) = self.compute_id_range(cfgidx);
363 let on_entry = &self.on_entry[start.. end];
365 let slice = match e {
366 EntryOrExit::Entry => on_entry,
367 EntryOrExit::Exit => {
368 let mut t = on_entry.to_vec();
369 self.apply_gen_kill(cfgidx, &mut t);
374 debug!("{} each_bit_for_node({:?}, cfgidx={:?}) bits={}",
375 self.analysis_name, e, cfgidx, bits_to_string(slice));
376 self.each_bit(slice, f)
379 pub fn each_gen_bit<F>(&self, id: hir::ItemLocalId, mut f: F) -> bool where
380 F: FnMut(usize) -> bool,
382 //! Iterates through each bit in the gen set for `id`.
383 if !self.has_bitset_for_local_id(id) {
387 if self.bits_per_id == 0 {
388 // Skip the surprisingly common degenerate case. (Note
389 // compute_id_range requires self.words_per_id > 0.)
393 let indices = get_cfg_indices(id, &self.local_id_to_index);
394 for &cfgidx in indices {
395 let (start, end) = self.compute_id_range(cfgidx);
396 let gens = &self.gens[start.. end];
397 debug!("{} each_gen_bit(id={:?}, gens={})",
398 self.analysis_name, id, bits_to_string(gens));
399 if !self.each_bit(gens, |i| f(i)) {
406 fn each_bit<F>(&self, words: &[usize], mut f: F) -> bool where
407 F: FnMut(usize) -> bool,
409 //! Helper for iterating over the bits in a bit set.
410 //! Returns false on the first call to `f` that returns false;
411 //! if all calls to `f` return true, then returns true.
413 let usize_bits = mem::size_of::<usize>() * 8;
414 for (word_index, &word) in words.iter().enumerate() {
416 let base_index = word_index * usize_bits;
417 for offset in 0..usize_bits {
418 let bit = 1 << offset;
419 if (word & bit) != 0 {
420 // N.B., we round up the total number of bits
421 // that we store in any given bit set so that
422 // it is an even multiple of usize::BITS. This
423 // means that there may be some stray bits at
424 // the end that do not correspond to any
425 // actual value. So before we callback, check
426 // whether the bit_index is greater than the
427 // actual value the user specified and stop
429 let bit_index = base_index + offset as usize;
430 if bit_index >= self.bits_per_id {
432 } else if !f(bit_index) {
442 pub fn add_kills_from_flow_exits(&mut self, cfg: &cfg::CFG) {
443 //! Whenever you have a `break` or `continue` statement, flow
444 //! exits through any number of enclosing scopes on its way to
445 //! the new destination. This function infers the kill bits of
446 //! those control operators based on the kill bits associated
447 //! with those scopes.
449 //! This is usually called (if it is called at all), after
450 //! all add_gen and add_kill calls, but before propagate.
452 debug!("{} add_kills_from_flow_exits", self.analysis_name);
453 if self.bits_per_id == 0 {
454 // Skip the surprisingly common degenerate case. (Note
455 // compute_id_range requires self.words_per_id > 0.)
458 cfg.graph.each_edge(|_edge_index, edge| {
459 let flow_exit = edge.source();
460 let (start, end) = self.compute_id_range(flow_exit);
461 let mut orig_kills = self.scope_kills[start.. end].to_vec();
463 let mut changed = false;
464 for &id in &edge.data.exiting_scopes {
465 let opt_cfg_idx = self.local_id_to_index.get(&id);
468 for &cfg_idx in indices {
469 let (start, end) = self.compute_id_range(cfg_idx);
470 let kills = &self.scope_kills[start.. end];
471 if bitwise(&mut orig_kills, kills, &Union) {
472 debug!("scope exits: scope id={:?} \
473 (node={:?} of {:?}) added killset: {}",
474 id, cfg_idx, indices,
475 bits_to_string(kills));
481 debug!("{} add_kills_from_flow_exits flow_exit={:?} \
482 no cfg_idx for exiting_scope={:?}",
483 self.analysis_name, flow_exit, id);
489 let bits = &mut self.scope_kills[start.. end];
490 debug!("{} add_kills_from_flow_exits flow_exit={:?} bits={} [before]",
491 self.analysis_name, flow_exit, mut_bits_to_string(bits));
492 bits.copy_from_slice(&orig_kills[..]);
493 debug!("{} add_kills_from_flow_exits flow_exit={:?} bits={} [after]",
494 self.analysis_name, flow_exit, mut_bits_to_string(bits));
501 // N.B. `Clone + 'static` only needed for pretty printing.
502 impl<'tcx, O: DataFlowOperator + Clone + 'static> DataFlowContext<'tcx, O> {
503 pub fn propagate(&mut self, cfg: &cfg::CFG, body: &hir::Body) {
504 //! Performs the data flow analysis.
506 if self.bits_per_id == 0 {
507 // Optimize the surprisingly common degenerate case.
512 let words_per_id = self.words_per_id;
513 let mut propcx = PropagationContext {
518 let nodes_po = cfg.graph.nodes_in_postorder(OUTGOING, cfg.entry);
519 let mut temp = vec![0; words_per_id];
520 let mut num_passes = 0;
521 while propcx.changed {
523 propcx.changed = false;
524 propcx.reset(&mut temp);
525 propcx.walk_cfg(cfg, &nodes_po, &mut temp);
527 debug!("finished in {} iterations", num_passes);
530 debug!("Dataflow result for {}:", self.analysis_name);
531 debug!("{}", pprust::to_string(self, |s| {
532 s.cbox(pprust::INDENT_UNIT);
534 s.print_expr(&body.value)
539 impl<O: DataFlowOperator> PropagationContext<'_, 'tcx, O> {
540 fn walk_cfg(&mut self,
542 nodes_po: &[CFGIndex],
543 in_out: &mut [usize]) {
544 debug!("DataFlowContext::walk_cfg(in_out={}) {}",
545 bits_to_string(in_out), self.dfcx.analysis_name);
546 assert!(self.dfcx.bits_per_id > 0);
548 // Iterate over nodes in reverse post-order.
549 for &node_index in nodes_po.iter().rev() {
550 let node = cfg.graph.node(node_index);
551 debug!("DataFlowContext::walk_cfg idx={:?} id={:?} begin in_out={}",
552 node_index, node.data.id(), bits_to_string(in_out));
554 let (start, end) = self.dfcx.compute_id_range(node_index);
556 // Initialize local bitvector with state on-entry.
557 in_out.copy_from_slice(&self.dfcx.on_entry[start.. end]);
559 // Compute state on-exit by applying transfer function to
561 self.dfcx.apply_gen_kill(node_index, in_out);
563 // Propagate state on-exit from node into its successors.
564 self.propagate_bits_into_graph_successors_of(in_out, cfg, node_index);
568 fn reset(&mut self, bits: &mut [usize]) {
569 let e = if self.dfcx.oper.initial_value() {usize::MAX} else {0};
575 fn propagate_bits_into_graph_successors_of(&mut self,
579 for (_, edge) in cfg.graph.outgoing_edges(cfgidx) {
580 self.propagate_bits_into_entry_set_for(pred_bits, edge);
584 fn propagate_bits_into_entry_set_for(&mut self,
586 edge: &cfg::CFGEdge) {
587 let source = edge.source();
588 let cfgidx = edge.target();
589 debug!("{} propagate_bits_into_entry_set_for(pred_bits={}, {:?} to {:?})",
590 self.dfcx.analysis_name, bits_to_string(pred_bits), source, cfgidx);
591 assert!(self.dfcx.bits_per_id > 0);
593 let (start, end) = self.dfcx.compute_id_range(cfgidx);
595 // (scoping mutable borrow of self.dfcx.on_entry)
596 let on_entry = &mut self.dfcx.on_entry[start.. end];
597 bitwise(on_entry, pred_bits, &self.dfcx.oper)
600 debug!("{} changed entry set for {:?} to {}",
601 self.dfcx.analysis_name, cfgidx,
602 bits_to_string(&self.dfcx.on_entry[start.. end]));
608 fn mut_bits_to_string(words: &mut [usize]) -> String {
609 bits_to_string(words)
612 fn bits_to_string(words: &[usize]) -> String {
613 let mut result = String::new();
616 // Note: this is a little endian printout of bytes.
620 for _ in 0..mem::size_of::<usize>() {
622 result.push_str(&format!("{:02x}", v & 0xFF));
632 fn bitwise<Op: BitwiseOperator>(out_vec: &mut [usize],
635 assert_eq!(out_vec.len(), in_vec.len());
636 let mut changed = false;
637 for (out_elt, in_elt) in out_vec.iter_mut().zip(in_vec) {
638 let old_val = *out_elt;
639 let new_val = op.join(old_val, *in_elt);
641 changed |= old_val != new_val;
646 fn set_bit(words: &mut [usize], bit: usize) -> bool {
647 debug!("set_bit: words={} bit={}",
648 mut_bits_to_string(words), bit_str(bit));
649 let usize_bits = mem::size_of::<usize>() * 8;
650 let word = bit / usize_bits;
651 let bit_in_word = bit % usize_bits;
652 let bit_mask = 1 << bit_in_word;
653 debug!("word={} bit_in_word={} bit_mask={}", word, bit_in_word, bit_mask);
654 let oldv = words[word];
655 let newv = oldv | bit_mask;
660 fn bit_str(bit: usize) -> String {
662 let lobits = 1 << (bit & 0b111);
663 format!("[{}:{}-{:02x}]", bit, byte, lobits)
667 impl BitwiseOperator for Union {
668 fn join(&self, a: usize, b: usize) -> usize { a | b }
671 impl BitwiseOperator for Subtract {
672 fn join(&self, a: usize, b: usize) -> usize { a & !b }