]> git.lizzy.rs Git - rust.git/blob - src/librustc_mir/dataflow/generic/engine.rs
Don't bug when taking discriminant of generator
[rust.git] / src / librustc_mir / dataflow / generic / engine.rs
1 //! A solver for dataflow problems.
2
3 use std::ffi::OsString;
4 use std::fs;
5 use std::path::PathBuf;
6
7 use rustc::mir::{self, traversal, BasicBlock, Location};
8 use rustc::ty::{self, TyCtxt};
9 use rustc_data_structures::work_queue::WorkQueue;
10 use rustc_hir::def_id::DefId;
11 use rustc_index::bit_set::BitSet;
12 use rustc_index::vec::IndexVec;
13 use rustc_span::symbol::{sym, Symbol};
14 use syntax::ast;
15
16 use super::graphviz;
17 use super::{Analysis, GenKillAnalysis, GenKillSet, Results};
18
19 /// A solver for dataflow problems.
20 pub struct Engine<'a, 'tcx, A>
21 where
22     A: Analysis<'tcx>,
23 {
24     bits_per_block: usize,
25     tcx: TyCtxt<'tcx>,
26     body: &'a mir::Body<'tcx>,
27     def_id: DefId,
28     dead_unwinds: Option<&'a BitSet<BasicBlock>>,
29     entry_sets: IndexVec<BasicBlock, BitSet<A::Idx>>,
30     analysis: A,
31
32     /// Cached, cumulative transfer functions for each block.
33     trans_for_block: Option<IndexVec<BasicBlock, GenKillSet<A::Idx>>>,
34 }
35
36 impl<A> Engine<'a, 'tcx, A>
37 where
38     A: GenKillAnalysis<'tcx>,
39 {
40     /// Creates a new `Engine` to solve a gen-kill dataflow problem.
41     pub fn new_gen_kill(
42         tcx: TyCtxt<'tcx>,
43         body: &'a mir::Body<'tcx>,
44         def_id: DefId,
45         analysis: A,
46     ) -> Self {
47         // If there are no back-edges in the control-flow graph, we only ever need to apply the
48         // transfer function for each block exactly once (assuming that we process blocks in RPO).
49         //
50         // In this case, there's no need to compute the block transfer functions ahead of time.
51         if !body.is_cfg_cyclic() {
52             return Self::new(tcx, body, def_id, analysis, None);
53         }
54
55         // Otherwise, compute and store the cumulative transfer function for each block.
56
57         let bits_per_block = analysis.bits_per_block(body);
58         let mut trans_for_block =
59             IndexVec::from_elem(GenKillSet::identity(bits_per_block), body.basic_blocks());
60
61         for (block, block_data) in body.basic_blocks().iter_enumerated() {
62             let trans = &mut trans_for_block[block];
63
64             for (i, statement) in block_data.statements.iter().enumerate() {
65                 let loc = Location { block, statement_index: i };
66                 analysis.before_statement_effect(trans, statement, loc);
67                 analysis.statement_effect(trans, statement, loc);
68             }
69
70             let terminator = block_data.terminator();
71             let loc = Location { block, statement_index: block_data.statements.len() };
72             analysis.before_terminator_effect(trans, terminator, loc);
73             analysis.terminator_effect(trans, terminator, loc);
74         }
75
76         Self::new(tcx, body, def_id, analysis, Some(trans_for_block))
77     }
78 }
79
80 impl<A> Engine<'a, 'tcx, A>
81 where
82     A: Analysis<'tcx>,
83 {
84     /// Creates a new `Engine` to solve a dataflow problem with an arbitrary transfer
85     /// function.
86     ///
87     /// Gen-kill problems should use `new_gen_kill`, which will coalesce transfer functions for
88     /// better performance.
89     pub fn new_generic(
90         tcx: TyCtxt<'tcx>,
91         body: &'a mir::Body<'tcx>,
92         def_id: DefId,
93         analysis: A,
94     ) -> Self {
95         Self::new(tcx, body, def_id, analysis, None)
96     }
97
98     fn new(
99         tcx: TyCtxt<'tcx>,
100         body: &'a mir::Body<'tcx>,
101         def_id: DefId,
102         analysis: A,
103         trans_for_block: Option<IndexVec<BasicBlock, GenKillSet<A::Idx>>>,
104     ) -> Self {
105         let bits_per_block = analysis.bits_per_block(body);
106
107         let bottom_value_set = if A::BOTTOM_VALUE {
108             BitSet::new_filled(bits_per_block)
109         } else {
110             BitSet::new_empty(bits_per_block)
111         };
112
113         let mut entry_sets = IndexVec::from_elem(bottom_value_set, body.basic_blocks());
114         analysis.initialize_start_block(body, &mut entry_sets[mir::START_BLOCK]);
115
116         Engine {
117             analysis,
118             bits_per_block,
119             tcx,
120             body,
121             def_id,
122             dead_unwinds: None,
123             entry_sets,
124             trans_for_block,
125         }
126     }
127
128     /// Signals that we do not want dataflow state to propagate across unwind edges for these
129     /// `BasicBlock`s.
130     ///
131     /// You must take care that `dead_unwinds` does not contain a `BasicBlock` that *can* actually
132     /// unwind during execution. Otherwise, your dataflow results will not be correct.
133     pub fn dead_unwinds(mut self, dead_unwinds: &'a BitSet<BasicBlock>) -> Self {
134         self.dead_unwinds = Some(dead_unwinds);
135         self
136     }
137
138     /// Computes the fixpoint for this dataflow problem and returns it.
139     pub fn iterate_to_fixpoint(mut self) -> Results<'tcx, A> {
140         let mut temp_state = BitSet::new_empty(self.bits_per_block);
141
142         let mut dirty_queue: WorkQueue<BasicBlock> =
143             WorkQueue::with_none(self.body.basic_blocks().len());
144
145         for (bb, _) in traversal::reverse_postorder(self.body) {
146             dirty_queue.insert(bb);
147         }
148
149         // Add blocks that are not reachable from START_BLOCK to the work queue. These blocks will
150         // be processed after the ones added above.
151         for bb in self.body.basic_blocks().indices() {
152             dirty_queue.insert(bb);
153         }
154
155         while let Some(bb) = dirty_queue.pop() {
156             let bb_data = &self.body[bb];
157             let on_entry = &self.entry_sets[bb];
158
159             temp_state.overwrite(on_entry);
160             self.apply_whole_block_effect(&mut temp_state, bb, bb_data);
161
162             self.propagate_bits_into_graph_successors_of(
163                 &mut temp_state,
164                 (bb, bb_data),
165                 &mut dirty_queue,
166             );
167         }
168
169         let Engine { tcx, body, def_id, trans_for_block, entry_sets, analysis, .. } = self;
170         let results = Results { analysis, entry_sets };
171
172         let res = write_graphviz_results(tcx, def_id, body, &results, trans_for_block);
173         if let Err(e) = res {
174             warn!("Failed to write graphviz dataflow results: {}", e);
175         }
176
177         results
178     }
179
180     /// Applies the cumulative effect of an entire block, excluding the call return effect if one
181     /// exists.
182     fn apply_whole_block_effect(
183         &self,
184         state: &mut BitSet<A::Idx>,
185         block: BasicBlock,
186         block_data: &mir::BasicBlockData<'tcx>,
187     ) {
188         // Use the cached block transfer function if available.
189         if let Some(trans_for_block) = &self.trans_for_block {
190             trans_for_block[block].apply(state);
191             return;
192         }
193
194         // Otherwise apply effects one-by-one.
195
196         for (statement_index, statement) in block_data.statements.iter().enumerate() {
197             let location = Location { block, statement_index };
198             self.analysis.apply_before_statement_effect(state, statement, location);
199             self.analysis.apply_statement_effect(state, statement, location);
200         }
201
202         let terminator = block_data.terminator();
203         let location = Location { block, statement_index: block_data.statements.len() };
204         self.analysis.apply_before_terminator_effect(state, terminator, location);
205         self.analysis.apply_terminator_effect(state, terminator, location);
206     }
207
208     fn propagate_bits_into_graph_successors_of(
209         &mut self,
210         in_out: &mut BitSet<A::Idx>,
211         (bb, bb_data): (BasicBlock, &'a mir::BasicBlockData<'tcx>),
212         dirty_list: &mut WorkQueue<BasicBlock>,
213     ) {
214         use mir::TerminatorKind::*;
215
216         match bb_data.terminator().kind {
217             Return | Resume | Abort | GeneratorDrop | Unreachable => {}
218
219             Goto { target }
220             | Assert { target, cleanup: None, .. }
221             | Yield { resume: target, drop: None, .. }
222             | Drop { target, location: _, unwind: None }
223             | DropAndReplace { target, value: _, location: _, unwind: None } => {
224                 self.propagate_bits_into_entry_set_for(in_out, target, dirty_list)
225             }
226
227             Yield { resume: target, drop: Some(drop), .. } => {
228                 self.propagate_bits_into_entry_set_for(in_out, target, dirty_list);
229                 self.propagate_bits_into_entry_set_for(in_out, drop, dirty_list);
230             }
231
232             Assert { target, cleanup: Some(unwind), .. }
233             | Drop { target, location: _, unwind: Some(unwind) }
234             | DropAndReplace { target, value: _, location: _, unwind: Some(unwind) } => {
235                 self.propagate_bits_into_entry_set_for(in_out, target, dirty_list);
236                 if self.dead_unwinds.map_or(true, |bbs| !bbs.contains(bb)) {
237                     self.propagate_bits_into_entry_set_for(in_out, unwind, dirty_list);
238                 }
239             }
240
241             SwitchInt { ref targets, ref values, ref discr, .. } => {
242                 // If this is a switch on an enum discriminant, a custom effect may be applied
243                 // along each outgoing edge.
244                 if let Some(place) = discr.place() {
245                     let enum_def = switch_on_enum_discriminant(self.tcx, self.body, bb_data, place);
246                     if let Some(enum_def) = enum_def {
247                         self.propagate_bits_into_enum_discriminant_switch_successors(
248                             in_out, bb, enum_def, place, dirty_list, &*values, &*targets,
249                         );
250
251                         return;
252                     }
253                 }
254
255                 // Otherwise, it's just a normal `SwitchInt`, and every successor sees the same
256                 // exit state.
257                 for target in targets.iter().copied() {
258                     self.propagate_bits_into_entry_set_for(&in_out, target, dirty_list);
259                 }
260             }
261
262             Call { cleanup, ref destination, ref func, ref args, .. } => {
263                 if let Some(unwind) = cleanup {
264                     if self.dead_unwinds.map_or(true, |bbs| !bbs.contains(bb)) {
265                         self.propagate_bits_into_entry_set_for(in_out, unwind, dirty_list);
266                     }
267                 }
268
269                 if let Some((ref dest_place, dest_bb)) = *destination {
270                     // N.B.: This must be done *last*, otherwise the unwind path will see the call
271                     // return effect.
272                     self.analysis.apply_call_return_effect(in_out, bb, func, args, dest_place);
273                     self.propagate_bits_into_entry_set_for(in_out, dest_bb, dirty_list);
274                 }
275             }
276
277             FalseEdges { real_target, imaginary_target } => {
278                 self.propagate_bits_into_entry_set_for(in_out, real_target, dirty_list);
279                 self.propagate_bits_into_entry_set_for(in_out, imaginary_target, dirty_list);
280             }
281
282             FalseUnwind { real_target, unwind } => {
283                 self.propagate_bits_into_entry_set_for(in_out, real_target, dirty_list);
284                 if let Some(unwind) = unwind {
285                     if self.dead_unwinds.map_or(true, |bbs| !bbs.contains(bb)) {
286                         self.propagate_bits_into_entry_set_for(in_out, unwind, dirty_list);
287                     }
288                 }
289             }
290         }
291     }
292
293     fn propagate_bits_into_entry_set_for(
294         &mut self,
295         in_out: &BitSet<A::Idx>,
296         bb: BasicBlock,
297         dirty_queue: &mut WorkQueue<BasicBlock>,
298     ) {
299         let entry_set = &mut self.entry_sets[bb];
300         let set_changed = self.analysis.join(entry_set, &in_out);
301         if set_changed {
302             dirty_queue.insert(bb);
303         }
304     }
305
306     fn propagate_bits_into_enum_discriminant_switch_successors(
307         &mut self,
308         in_out: &mut BitSet<A::Idx>,
309         bb: BasicBlock,
310         enum_def: &'tcx ty::AdtDef,
311         enum_place: &mir::Place<'tcx>,
312         dirty_list: &mut WorkQueue<BasicBlock>,
313         values: &[u128],
314         targets: &[BasicBlock],
315     ) {
316         // MIR building adds discriminants to the `values` array in the same order as they
317         // are yielded by `AdtDef::discriminants`. We rely on this to match each
318         // discriminant in `values` to its corresponding variant in linear time.
319         let mut tmp = BitSet::new_empty(in_out.domain_size());
320         let mut discriminants = enum_def.discriminants(self.tcx);
321         for (value, target) in values.iter().zip(targets.iter().copied()) {
322             let (variant_idx, _) = discriminants.find(|&(_, discr)| discr.val == *value).expect(
323                 "Order of `AdtDef::discriminants` differed from that of `SwitchInt::values`",
324             );
325
326             tmp.overwrite(in_out);
327             self.analysis.apply_discriminant_switch_effect(
328                 &mut tmp,
329                 bb,
330                 enum_place,
331                 enum_def,
332                 variant_idx,
333             );
334             self.propagate_bits_into_entry_set_for(&tmp, target, dirty_list);
335         }
336
337         std::mem::drop(tmp);
338
339         // Propagate dataflow state along the "otherwise" edge.
340         let otherwise = targets.last().copied().unwrap();
341         self.propagate_bits_into_entry_set_for(&in_out, otherwise, dirty_list);
342     }
343 }
344
345 /// Look at the last statement of a block that ends with  to see if it is an assignment of an enum
346 /// discriminant to the local that determines the target of a `SwitchInt` like so:
347 ///   _42 = discriminant(..)
348 ///   SwitchInt(_42, ..)
349 fn switch_on_enum_discriminant(
350     tcx: TyCtxt<'tcx>,
351     body: &mir::Body<'tcx>,
352     block: &mir::BasicBlockData<'tcx>,
353     switch_on: &mir::Place<'tcx>,
354 ) -> Option<&'tcx ty::AdtDef> {
355     match block.statements.last().map(|stmt| &stmt.kind) {
356         Some(mir::StatementKind::Assign(box (lhs, mir::Rvalue::Discriminant(discriminated))))
357             if lhs == switch_on =>
358         {
359             match &discriminated.ty(body, tcx).ty.kind {
360                 ty::Adt(def, _) => Some(def),
361
362                 // `Rvalue::Discriminant` is also used to get the active yield point for a
363                 // generator, but we do not need edge-specific effects in that case. This may
364                 // change in the future.
365                 ty::Generator(..) => None,
366
367                 t => bug!("`discriminant` called on unexpected type {:?}", t),
368             }
369         }
370
371         _ => None,
372     }
373 }
374
375 // Graphviz
376
377 /// Writes a DOT file containing the results of a dataflow analysis if the user requested it via
378 /// `rustc_mir` attributes.
379 fn write_graphviz_results<A>(
380     tcx: TyCtxt<'tcx>,
381     def_id: DefId,
382     body: &mir::Body<'tcx>,
383     results: &Results<'tcx, A>,
384     block_transfer_functions: Option<IndexVec<BasicBlock, GenKillSet<A::Idx>>>,
385 ) -> std::io::Result<()>
386 where
387     A: Analysis<'tcx>,
388 {
389     let attrs = match RustcMirAttrs::parse(tcx, def_id) {
390         Ok(attrs) => attrs,
391
392         // Invalid `rustc_mir` attrs will be reported using `span_err`.
393         Err(()) => return Ok(()),
394     };
395
396     let path = match attrs.output_path(A::NAME) {
397         Some(path) => path,
398         None => return Ok(()),
399     };
400
401     let bits_per_block = results.analysis.bits_per_block(body);
402
403     let mut formatter: Box<dyn graphviz::StateFormatter<'tcx, _>> = match attrs.formatter {
404         Some(sym::two_phase) => Box::new(graphviz::TwoPhaseDiff::new(bits_per_block)),
405         Some(sym::gen_kill) => {
406             if let Some(trans_for_block) = block_transfer_functions {
407                 Box::new(graphviz::BlockTransferFunc::new(body, trans_for_block))
408             } else {
409                 Box::new(graphviz::SimpleDiff::new(bits_per_block))
410             }
411         }
412
413         // Default to the `SimpleDiff` output style.
414         _ => Box::new(graphviz::SimpleDiff::new(bits_per_block)),
415     };
416
417     debug!("printing dataflow results for {:?} to {}", def_id, path.display());
418     let mut buf = Vec::new();
419
420     let graphviz = graphviz::Formatter::new(body, def_id, results, &mut *formatter);
421     dot::render_opts(&graphviz, &mut buf, &[dot::RenderOption::Monospace])?;
422     fs::write(&path, buf)?;
423     Ok(())
424 }
425
426 #[derive(Default)]
427 struct RustcMirAttrs {
428     basename_and_suffix: Option<PathBuf>,
429     formatter: Option<Symbol>,
430 }
431
432 impl RustcMirAttrs {
433     fn parse(tcx: TyCtxt<'tcx>, def_id: DefId) -> Result<Self, ()> {
434         let attrs = tcx.get_attrs(def_id);
435
436         let mut result = Ok(());
437         let mut ret = RustcMirAttrs::default();
438
439         let rustc_mir_attrs = attrs
440             .into_iter()
441             .filter(|attr| attr.check_name(sym::rustc_mir))
442             .flat_map(|attr| attr.meta_item_list().into_iter().flat_map(|v| v.into_iter()));
443
444         for attr in rustc_mir_attrs {
445             let attr_result = if attr.check_name(sym::borrowck_graphviz_postflow) {
446                 Self::set_field(&mut ret.basename_and_suffix, tcx, &attr, |s| {
447                     let path = PathBuf::from(s.to_string());
448                     match path.file_name() {
449                         Some(_) => Ok(path),
450                         None => {
451                             tcx.sess.span_err(attr.span(), "path must end in a filename");
452                             Err(())
453                         }
454                     }
455                 })
456             } else if attr.check_name(sym::borrowck_graphviz_format) {
457                 Self::set_field(&mut ret.formatter, tcx, &attr, |s| match s {
458                     sym::gen_kill | sym::two_phase => Ok(s),
459                     _ => {
460                         tcx.sess.span_err(attr.span(), "unknown formatter");
461                         Err(())
462                     }
463                 })
464             } else {
465                 Ok(())
466             };
467
468             result = result.and(attr_result);
469         }
470
471         result.map(|()| ret)
472     }
473
474     fn set_field<T>(
475         field: &mut Option<T>,
476         tcx: TyCtxt<'tcx>,
477         attr: &ast::NestedMetaItem,
478         mapper: impl FnOnce(Symbol) -> Result<T, ()>,
479     ) -> Result<(), ()> {
480         if field.is_some() {
481             tcx.sess
482                 .span_err(attr.span(), &format!("duplicate values for `{}`", attr.name_or_empty()));
483
484             return Err(());
485         }
486
487         if let Some(s) = attr.value_str() {
488             *field = Some(mapper(s)?);
489             Ok(())
490         } else {
491             tcx.sess
492                 .span_err(attr.span(), &format!("`{}` requires an argument", attr.name_or_empty()));
493             Err(())
494         }
495     }
496
497     /// Returns the path where dataflow results should be written, or `None`
498     /// `borrowck_graphviz_postflow` was not specified.
499     ///
500     /// This performs the following transformation to the argument of `borrowck_graphviz_postflow`:
501     ///
502     /// "path/suffix.dot" -> "path/analysis_name_suffix.dot"
503     fn output_path(&self, analysis_name: &str) -> Option<PathBuf> {
504         let mut ret = self.basename_and_suffix.as_ref().cloned()?;
505         let suffix = ret.file_name().unwrap(); // Checked when parsing attrs
506
507         let mut file_name: OsString = analysis_name.into();
508         file_name.push("_");
509         file_name.push(suffix);
510         ret.set_file_name(file_name);
511
512         Some(ret)
513     }
514 }