1 //! A solver for dataflow problems.
3 use std::ffi::OsString;
5 use std::path::PathBuf;
7 use rustc::mir::{self, traversal, BasicBlock, Location};
8 use rustc::ty::{self, TyCtxt};
9 use rustc_data_structures::work_queue::WorkQueue;
10 use rustc_hir::def_id::DefId;
11 use rustc_index::bit_set::BitSet;
12 use rustc_index::vec::IndexVec;
13 use rustc_span::symbol::{sym, Symbol};
17 use super::{Analysis, GenKillAnalysis, GenKillSet, Results};
19 /// A solver for dataflow problems.
20 pub struct Engine<'a, 'tcx, A>
24 bits_per_block: usize,
26 body: &'a mir::Body<'tcx>,
28 dead_unwinds: Option<&'a BitSet<BasicBlock>>,
29 entry_sets: IndexVec<BasicBlock, BitSet<A::Idx>>,
32 /// Cached, cumulative transfer functions for each block.
33 trans_for_block: Option<IndexVec<BasicBlock, GenKillSet<A::Idx>>>,
36 impl<A> Engine<'a, 'tcx, A>
38 A: GenKillAnalysis<'tcx>,
40 /// Creates a new `Engine` to solve a gen-kill dataflow problem.
43 body: &'a mir::Body<'tcx>,
47 // If there are no back-edges in the control-flow graph, we only ever need to apply the
48 // transfer function for each block exactly once (assuming that we process blocks in RPO).
50 // In this case, there's no need to compute the block transfer functions ahead of time.
51 if !body.is_cfg_cyclic() {
52 return Self::new(tcx, body, def_id, analysis, None);
55 // Otherwise, compute and store the cumulative transfer function for each block.
57 let bits_per_block = analysis.bits_per_block(body);
58 let mut trans_for_block =
59 IndexVec::from_elem(GenKillSet::identity(bits_per_block), body.basic_blocks());
61 for (block, block_data) in body.basic_blocks().iter_enumerated() {
62 let trans = &mut trans_for_block[block];
64 for (i, statement) in block_data.statements.iter().enumerate() {
65 let loc = Location { block, statement_index: i };
66 analysis.before_statement_effect(trans, statement, loc);
67 analysis.statement_effect(trans, statement, loc);
70 let terminator = block_data.terminator();
71 let loc = Location { block, statement_index: block_data.statements.len() };
72 analysis.before_terminator_effect(trans, terminator, loc);
73 analysis.terminator_effect(trans, terminator, loc);
76 Self::new(tcx, body, def_id, analysis, Some(trans_for_block))
80 impl<A> Engine<'a, 'tcx, A>
84 /// Creates a new `Engine` to solve a dataflow problem with an arbitrary transfer
87 /// Gen-kill problems should use `new_gen_kill`, which will coalesce transfer functions for
88 /// better performance.
91 body: &'a mir::Body<'tcx>,
95 Self::new(tcx, body, def_id, analysis, None)
100 body: &'a mir::Body<'tcx>,
103 trans_for_block: Option<IndexVec<BasicBlock, GenKillSet<A::Idx>>>,
105 let bits_per_block = analysis.bits_per_block(body);
107 let bottom_value_set = if A::BOTTOM_VALUE {
108 BitSet::new_filled(bits_per_block)
110 BitSet::new_empty(bits_per_block)
113 let mut entry_sets = IndexVec::from_elem(bottom_value_set, body.basic_blocks());
114 analysis.initialize_start_block(body, &mut entry_sets[mir::START_BLOCK]);
128 /// Signals that we do not want dataflow state to propagate across unwind edges for these
131 /// You must take care that `dead_unwinds` does not contain a `BasicBlock` that *can* actually
132 /// unwind during execution. Otherwise, your dataflow results will not be correct.
133 pub fn dead_unwinds(mut self, dead_unwinds: &'a BitSet<BasicBlock>) -> Self {
134 self.dead_unwinds = Some(dead_unwinds);
138 /// Computes the fixpoint for this dataflow problem and returns it.
139 pub fn iterate_to_fixpoint(mut self) -> Results<'tcx, A> {
140 let mut temp_state = BitSet::new_empty(self.bits_per_block);
142 let mut dirty_queue: WorkQueue<BasicBlock> =
143 WorkQueue::with_none(self.body.basic_blocks().len());
145 for (bb, _) in traversal::reverse_postorder(self.body) {
146 dirty_queue.insert(bb);
149 // Add blocks that are not reachable from START_BLOCK to the work queue. These blocks will
150 // be processed after the ones added above.
151 for bb in self.body.basic_blocks().indices() {
152 dirty_queue.insert(bb);
155 while let Some(bb) = dirty_queue.pop() {
156 let bb_data = &self.body[bb];
157 let on_entry = &self.entry_sets[bb];
159 temp_state.overwrite(on_entry);
160 self.apply_whole_block_effect(&mut temp_state, bb, bb_data);
162 self.propagate_bits_into_graph_successors_of(
169 let Engine { tcx, body, def_id, trans_for_block, entry_sets, analysis, .. } = self;
170 let results = Results { analysis, entry_sets };
172 let res = write_graphviz_results(tcx, def_id, body, &results, trans_for_block);
173 if let Err(e) = res {
174 warn!("Failed to write graphviz dataflow results: {}", e);
180 /// Applies the cumulative effect of an entire block, excluding the call return effect if one
182 fn apply_whole_block_effect(
184 state: &mut BitSet<A::Idx>,
186 block_data: &mir::BasicBlockData<'tcx>,
188 // Use the cached block transfer function if available.
189 if let Some(trans_for_block) = &self.trans_for_block {
190 trans_for_block[block].apply(state);
194 // Otherwise apply effects one-by-one.
196 for (statement_index, statement) in block_data.statements.iter().enumerate() {
197 let location = Location { block, statement_index };
198 self.analysis.apply_before_statement_effect(state, statement, location);
199 self.analysis.apply_statement_effect(state, statement, location);
202 let terminator = block_data.terminator();
203 let location = Location { block, statement_index: block_data.statements.len() };
204 self.analysis.apply_before_terminator_effect(state, terminator, location);
205 self.analysis.apply_terminator_effect(state, terminator, location);
208 fn propagate_bits_into_graph_successors_of(
210 in_out: &mut BitSet<A::Idx>,
211 (bb, bb_data): (BasicBlock, &'a mir::BasicBlockData<'tcx>),
212 dirty_list: &mut WorkQueue<BasicBlock>,
214 use mir::TerminatorKind::*;
216 match bb_data.terminator().kind {
217 Return | Resume | Abort | GeneratorDrop | Unreachable => {}
220 | Assert { target, cleanup: None, .. }
221 | Yield { resume: target, drop: None, .. }
222 | Drop { target, location: _, unwind: None }
223 | DropAndReplace { target, value: _, location: _, unwind: None } => {
224 self.propagate_bits_into_entry_set_for(in_out, target, dirty_list)
227 Yield { resume: target, drop: Some(drop), .. } => {
228 self.propagate_bits_into_entry_set_for(in_out, target, dirty_list);
229 self.propagate_bits_into_entry_set_for(in_out, drop, dirty_list);
232 Assert { target, cleanup: Some(unwind), .. }
233 | Drop { target, location: _, unwind: Some(unwind) }
234 | DropAndReplace { target, value: _, location: _, unwind: Some(unwind) } => {
235 self.propagate_bits_into_entry_set_for(in_out, target, dirty_list);
236 if self.dead_unwinds.map_or(true, |bbs| !bbs.contains(bb)) {
237 self.propagate_bits_into_entry_set_for(in_out, unwind, dirty_list);
241 SwitchInt { ref targets, ref values, ref discr, .. } => {
242 // If this is a switch on an enum discriminant, a custom effect may be applied
243 // along each outgoing edge.
244 if let Some(place) = discr.place() {
245 let enum_def = switch_on_enum_discriminant(self.tcx, self.body, bb_data, place);
246 if let Some(enum_def) = enum_def {
247 self.propagate_bits_into_enum_discriminant_switch_successors(
248 in_out, bb, enum_def, place, dirty_list, &*values, &*targets,
255 // Otherwise, it's just a normal `SwitchInt`, and every successor sees the same
257 for target in targets.iter().copied() {
258 self.propagate_bits_into_entry_set_for(&in_out, target, dirty_list);
262 Call { cleanup, ref destination, ref func, ref args, .. } => {
263 if let Some(unwind) = cleanup {
264 if self.dead_unwinds.map_or(true, |bbs| !bbs.contains(bb)) {
265 self.propagate_bits_into_entry_set_for(in_out, unwind, dirty_list);
269 if let Some((ref dest_place, dest_bb)) = *destination {
270 // N.B.: This must be done *last*, otherwise the unwind path will see the call
272 self.analysis.apply_call_return_effect(in_out, bb, func, args, dest_place);
273 self.propagate_bits_into_entry_set_for(in_out, dest_bb, dirty_list);
277 FalseEdges { real_target, imaginary_target } => {
278 self.propagate_bits_into_entry_set_for(in_out, real_target, dirty_list);
279 self.propagate_bits_into_entry_set_for(in_out, imaginary_target, dirty_list);
282 FalseUnwind { real_target, unwind } => {
283 self.propagate_bits_into_entry_set_for(in_out, real_target, dirty_list);
284 if let Some(unwind) = unwind {
285 if self.dead_unwinds.map_or(true, |bbs| !bbs.contains(bb)) {
286 self.propagate_bits_into_entry_set_for(in_out, unwind, dirty_list);
293 fn propagate_bits_into_entry_set_for(
295 in_out: &BitSet<A::Idx>,
297 dirty_queue: &mut WorkQueue<BasicBlock>,
299 let entry_set = &mut self.entry_sets[bb];
300 let set_changed = self.analysis.join(entry_set, &in_out);
302 dirty_queue.insert(bb);
306 fn propagate_bits_into_enum_discriminant_switch_successors(
308 in_out: &mut BitSet<A::Idx>,
310 enum_def: &'tcx ty::AdtDef,
311 enum_place: &mir::Place<'tcx>,
312 dirty_list: &mut WorkQueue<BasicBlock>,
314 targets: &[BasicBlock],
316 // MIR building adds discriminants to the `values` array in the same order as they
317 // are yielded by `AdtDef::discriminants`. We rely on this to match each
318 // discriminant in `values` to its corresponding variant in linear time.
319 let mut tmp = BitSet::new_empty(in_out.domain_size());
320 let mut discriminants = enum_def.discriminants(self.tcx);
321 for (value, target) in values.iter().zip(targets.iter().copied()) {
322 let (variant_idx, _) = discriminants.find(|&(_, discr)| discr.val == *value).expect(
323 "Order of `AdtDef::discriminants` differed from that of `SwitchInt::values`",
326 tmp.overwrite(in_out);
327 self.analysis.apply_discriminant_switch_effect(
334 self.propagate_bits_into_entry_set_for(&tmp, target, dirty_list);
339 // Propagate dataflow state along the "otherwise" edge.
340 let otherwise = targets.last().copied().unwrap();
341 self.propagate_bits_into_entry_set_for(&in_out, otherwise, dirty_list);
345 /// Look at the last statement of a block that ends with to see if it is an assignment of an enum
346 /// discriminant to the local that determines the target of a `SwitchInt` like so:
347 /// _42 = discriminant(..)
348 /// SwitchInt(_42, ..)
349 fn switch_on_enum_discriminant(
351 body: &mir::Body<'tcx>,
352 block: &mir::BasicBlockData<'tcx>,
353 switch_on: &mir::Place<'tcx>,
354 ) -> Option<&'tcx ty::AdtDef> {
355 match block.statements.last().map(|stmt| &stmt.kind) {
356 Some(mir::StatementKind::Assign(box (lhs, mir::Rvalue::Discriminant(discriminated))))
357 if lhs == switch_on =>
359 match &discriminated.ty(body, tcx).ty.kind {
360 ty::Adt(def, _) => Some(def),
362 // `Rvalue::Discriminant` is also used to get the active yield point for a
363 // generator, but we do not need edge-specific effects in that case. This may
364 // change in the future.
365 ty::Generator(..) => None,
367 t => bug!("`discriminant` called on unexpected type {:?}", t),
377 /// Writes a DOT file containing the results of a dataflow analysis if the user requested it via
378 /// `rustc_mir` attributes.
379 fn write_graphviz_results<A>(
382 body: &mir::Body<'tcx>,
383 results: &Results<'tcx, A>,
384 block_transfer_functions: Option<IndexVec<BasicBlock, GenKillSet<A::Idx>>>,
385 ) -> std::io::Result<()>
389 let attrs = match RustcMirAttrs::parse(tcx, def_id) {
392 // Invalid `rustc_mir` attrs will be reported using `span_err`.
393 Err(()) => return Ok(()),
396 let path = match attrs.output_path(A::NAME) {
398 None => return Ok(()),
401 let bits_per_block = results.analysis.bits_per_block(body);
403 let mut formatter: Box<dyn graphviz::StateFormatter<'tcx, _>> = match attrs.formatter {
404 Some(sym::two_phase) => Box::new(graphviz::TwoPhaseDiff::new(bits_per_block)),
405 Some(sym::gen_kill) => {
406 if let Some(trans_for_block) = block_transfer_functions {
407 Box::new(graphviz::BlockTransferFunc::new(body, trans_for_block))
409 Box::new(graphviz::SimpleDiff::new(bits_per_block))
413 // Default to the `SimpleDiff` output style.
414 _ => Box::new(graphviz::SimpleDiff::new(bits_per_block)),
417 debug!("printing dataflow results for {:?} to {}", def_id, path.display());
418 let mut buf = Vec::new();
420 let graphviz = graphviz::Formatter::new(body, def_id, results, &mut *formatter);
421 dot::render_opts(&graphviz, &mut buf, &[dot::RenderOption::Monospace])?;
422 fs::write(&path, buf)?;
427 struct RustcMirAttrs {
428 basename_and_suffix: Option<PathBuf>,
429 formatter: Option<Symbol>,
433 fn parse(tcx: TyCtxt<'tcx>, def_id: DefId) -> Result<Self, ()> {
434 let attrs = tcx.get_attrs(def_id);
436 let mut result = Ok(());
437 let mut ret = RustcMirAttrs::default();
439 let rustc_mir_attrs = attrs
441 .filter(|attr| attr.check_name(sym::rustc_mir))
442 .flat_map(|attr| attr.meta_item_list().into_iter().flat_map(|v| v.into_iter()));
444 for attr in rustc_mir_attrs {
445 let attr_result = if attr.check_name(sym::borrowck_graphviz_postflow) {
446 Self::set_field(&mut ret.basename_and_suffix, tcx, &attr, |s| {
447 let path = PathBuf::from(s.to_string());
448 match path.file_name() {
451 tcx.sess.span_err(attr.span(), "path must end in a filename");
456 } else if attr.check_name(sym::borrowck_graphviz_format) {
457 Self::set_field(&mut ret.formatter, tcx, &attr, |s| match s {
458 sym::gen_kill | sym::two_phase => Ok(s),
460 tcx.sess.span_err(attr.span(), "unknown formatter");
468 result = result.and(attr_result);
475 field: &mut Option<T>,
477 attr: &ast::NestedMetaItem,
478 mapper: impl FnOnce(Symbol) -> Result<T, ()>,
479 ) -> Result<(), ()> {
482 .span_err(attr.span(), &format!("duplicate values for `{}`", attr.name_or_empty()));
487 if let Some(s) = attr.value_str() {
488 *field = Some(mapper(s)?);
492 .span_err(attr.span(), &format!("`{}` requires an argument", attr.name_or_empty()));
497 /// Returns the path where dataflow results should be written, or `None`
498 /// `borrowck_graphviz_postflow` was not specified.
500 /// This performs the following transformation to the argument of `borrowck_graphviz_postflow`:
502 /// "path/suffix.dot" -> "path/analysis_name_suffix.dot"
503 fn output_path(&self, analysis_name: &str) -> Option<PathBuf> {
504 let mut ret = self.basename_and_suffix.as_ref().cloned()?;
505 let suffix = ret.file_name().unwrap(); // Checked when parsing attrs
507 let mut file_name: OsString = analysis_name.into();
509 file_name.push(suffix);
510 ret.set_file_name(file_name);