]> git.lizzy.rs Git - rust.git/blob - src/librustc_codegen_ssa/mir/analyze.rs
Auto merge of #71044 - ecstatic-morse:body-predecessor-cache, r=oli-obk
[rust.git] / src / librustc_codegen_ssa / mir / analyze.rs
1 //! An analysis to determine which locals require allocas and
2 //! which do not.
3
4 use super::FunctionCx;
5 use crate::traits::*;
6 use rustc_data_structures::graph::dominators::Dominators;
7 use rustc_index::bit_set::BitSet;
8 use rustc_index::vec::{Idx, IndexVec};
9 use rustc_middle::mir::traversal;
10 use rustc_middle::mir::visit::{
11     MutatingUseContext, NonMutatingUseContext, NonUseContext, PlaceContext, Visitor,
12 };
13 use rustc_middle::mir::{self, Location, TerminatorKind};
14 use rustc_middle::ty;
15 use rustc_middle::ty::layout::HasTyCtxt;
16 use rustc_target::abi::LayoutOf;
17
18 pub fn non_ssa_locals<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
19     fx: &FunctionCx<'a, 'tcx, Bx>,
20 ) -> BitSet<mir::Local> {
21     let mir = fx.mir;
22     let mut analyzer = LocalAnalyzer::new(fx);
23
24     analyzer.visit_body(&mir);
25
26     for (local, decl) in mir.local_decls.iter_enumerated() {
27         let ty = fx.monomorphize(&decl.ty);
28         debug!("local {:?} has type `{}`", local, ty);
29         let layout = fx.cx.spanned_layout_of(ty, decl.source_info.span);
30         if fx.cx.is_backend_immediate(layout) {
31             // These sorts of types are immediates that we can store
32             // in an Value without an alloca.
33         } else if fx.cx.is_backend_scalar_pair(layout) {
34             // We allow pairs and uses of any of their 2 fields.
35         } else {
36             // These sorts of types require an alloca. Note that
37             // is_llvm_immediate() may *still* be true, particularly
38             // for newtypes, but we currently force some types
39             // (e.g., structs) into an alloca unconditionally, just so
40             // that we don't have to deal with having two pathways
41             // (gep vs extractvalue etc).
42             analyzer.not_ssa(local);
43         }
44     }
45
46     analyzer.non_ssa_locals
47 }
48
49 struct LocalAnalyzer<'mir, 'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> {
50     fx: &'mir FunctionCx<'a, 'tcx, Bx>,
51     dominators: Dominators<mir::BasicBlock>,
52     non_ssa_locals: BitSet<mir::Local>,
53     // The location of the first visited direct assignment to each
54     // local, or an invalid location (out of bounds `block` index).
55     first_assignment: IndexVec<mir::Local, Location>,
56 }
57
58 impl<Bx: BuilderMethods<'a, 'tcx>> LocalAnalyzer<'mir, 'a, 'tcx, Bx> {
59     fn new(fx: &'mir FunctionCx<'a, 'tcx, Bx>) -> Self {
60         let invalid_location = mir::BasicBlock::new(fx.mir.basic_blocks().len()).start_location();
61         let dominators = fx.mir.dominators();
62         let mut analyzer = LocalAnalyzer {
63             fx,
64             dominators,
65             non_ssa_locals: BitSet::new_empty(fx.mir.local_decls.len()),
66             first_assignment: IndexVec::from_elem(invalid_location, &fx.mir.local_decls),
67         };
68
69         // Arguments get assigned to by means of the function being called
70         for arg in fx.mir.args_iter() {
71             analyzer.first_assignment[arg] = mir::START_BLOCK.start_location();
72         }
73
74         analyzer
75     }
76
77     fn first_assignment(&self, local: mir::Local) -> Option<Location> {
78         let location = self.first_assignment[local];
79         if location.block.index() < self.fx.mir.basic_blocks().len() {
80             Some(location)
81         } else {
82             None
83         }
84     }
85
86     fn not_ssa(&mut self, local: mir::Local) {
87         debug!("marking {:?} as non-SSA", local);
88         self.non_ssa_locals.insert(local);
89     }
90
91     fn assign(&mut self, local: mir::Local, location: Location) {
92         if self.first_assignment(local).is_some() {
93             self.not_ssa(local);
94         } else {
95             self.first_assignment[local] = location;
96         }
97     }
98
99     fn process_place(
100         &mut self,
101         place_ref: &mir::PlaceRef<'tcx>,
102         context: PlaceContext,
103         location: Location,
104     ) {
105         let cx = self.fx.cx;
106
107         if let [proj_base @ .., elem] = place_ref.projection {
108             let mut base_context = if context.is_mutating_use() {
109                 PlaceContext::MutatingUse(MutatingUseContext::Projection)
110             } else {
111                 PlaceContext::NonMutatingUse(NonMutatingUseContext::Projection)
112             };
113
114             // Allow uses of projections that are ZSTs or from scalar fields.
115             let is_consume = match context {
116                 PlaceContext::NonMutatingUse(
117                     NonMutatingUseContext::Copy | NonMutatingUseContext::Move,
118                 ) => true,
119                 _ => false,
120             };
121             if is_consume {
122                 let base_ty =
123                     mir::Place::ty_from(place_ref.local, proj_base, self.fx.mir, cx.tcx());
124                 let base_ty = self.fx.monomorphize(&base_ty);
125
126                 // ZSTs don't require any actual memory access.
127                 let elem_ty = base_ty.projection_ty(cx.tcx(), elem).ty;
128                 let elem_ty = self.fx.monomorphize(&elem_ty);
129                 let span = self.fx.mir.local_decls[place_ref.local].source_info.span;
130                 if cx.spanned_layout_of(elem_ty, span).is_zst() {
131                     return;
132                 }
133
134                 if let mir::ProjectionElem::Field(..) = elem {
135                     let layout = cx.spanned_layout_of(base_ty.ty, span);
136                     if cx.is_backend_immediate(layout) || cx.is_backend_scalar_pair(layout) {
137                         // Recurse with the same context, instead of `Projection`,
138                         // potentially stopping at non-operand projections,
139                         // which would trigger `not_ssa` on locals.
140                         base_context = context;
141                     }
142                 }
143             }
144
145             if let mir::ProjectionElem::Deref = elem {
146                 // Deref projections typically only read the pointer.
147                 // (the exception being `VarDebugInfo` contexts, handled below)
148                 base_context = PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy);
149
150                 // Indirect debuginfo requires going through memory, that only
151                 // the debugger accesses, following our emitted DWARF pointer ops.
152                 //
153                 // FIXME(eddyb) Investigate the possibility of relaxing this, but
154                 // note that `llvm.dbg.declare` *must* be used for indirect places,
155                 // even if we start using `llvm.dbg.value` for all other cases,
156                 // as we don't necessarily know when the value changes, but only
157                 // where it lives in memory.
158                 //
159                 // It's possible `llvm.dbg.declare` could support starting from
160                 // a pointer that doesn't point to an `alloca`, but this would
161                 // only be useful if we know the pointer being `Deref`'d comes
162                 // from an immutable place, and if `llvm.dbg.declare` calls
163                 // must be at the very start of the function, then only function
164                 // arguments could contain such pointers.
165                 if context == PlaceContext::NonUse(NonUseContext::VarDebugInfo) {
166                     // We use `NonUseContext::VarDebugInfo` for the base,
167                     // which might not force the base local to memory,
168                     // so we have to do it manually.
169                     self.visit_local(&place_ref.local, context, location);
170                 }
171             }
172
173             // `NonUseContext::VarDebugInfo` needs to flow all the
174             // way down to the base local (see `visit_local`).
175             if context == PlaceContext::NonUse(NonUseContext::VarDebugInfo) {
176                 base_context = context;
177             }
178
179             self.process_place(
180                 &mir::PlaceRef { local: place_ref.local, projection: proj_base },
181                 base_context,
182                 location,
183             );
184             // HACK(eddyb) this emulates the old `visit_projection_elem`, this
185             // entire `visit_place`-like `process_place` method should be rewritten,
186             // now that we have moved to the "slice of projections" representation.
187             if let mir::ProjectionElem::Index(local) = elem {
188                 self.visit_local(
189                     local,
190                     PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy),
191                     location,
192                 );
193             }
194         } else {
195             // FIXME this is super_place code, is repeated here to avoid cloning place or changing
196             // visit_place API
197             let mut context = context;
198
199             if !place_ref.projection.is_empty() {
200                 context = if context.is_mutating_use() {
201                     PlaceContext::MutatingUse(MutatingUseContext::Projection)
202                 } else {
203                     PlaceContext::NonMutatingUse(NonMutatingUseContext::Projection)
204                 };
205             }
206
207             self.visit_local(&place_ref.local, context, location);
208             self.visit_projection(place_ref.local, place_ref.projection, context, location);
209         }
210     }
211 }
212
213 impl<'mir, 'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> Visitor<'tcx>
214     for LocalAnalyzer<'mir, 'a, 'tcx, Bx>
215 {
216     fn visit_assign(
217         &mut self,
218         place: &mir::Place<'tcx>,
219         rvalue: &mir::Rvalue<'tcx>,
220         location: Location,
221     ) {
222         debug!("visit_assign(place={:?}, rvalue={:?})", place, rvalue);
223
224         if let Some(index) = place.as_local() {
225             self.assign(index, location);
226             let decl_span = self.fx.mir.local_decls[index].source_info.span;
227             if !self.fx.rvalue_creates_operand(rvalue, decl_span) {
228                 self.not_ssa(index);
229             }
230         } else {
231             self.visit_place(place, PlaceContext::MutatingUse(MutatingUseContext::Store), location);
232         }
233
234         self.visit_rvalue(rvalue, location);
235     }
236
237     fn visit_terminator_kind(&mut self, kind: &mir::TerminatorKind<'tcx>, location: Location) {
238         let check = match *kind {
239             mir::TerminatorKind::Call { func: mir::Operand::Constant(ref c), ref args, .. } => {
240                 match c.literal.ty.kind {
241                     ty::FnDef(did, _) => Some((did, args)),
242                     _ => None,
243                 }
244             }
245             _ => None,
246         };
247         if let Some((def_id, args)) = check {
248             if Some(def_id) == self.fx.cx.tcx().lang_items().box_free_fn() {
249                 // box_free(x) shares with `drop x` the property that it
250                 // is not guaranteed to be statically dominated by the
251                 // definition of x, so x must always be in an alloca.
252                 if let mir::Operand::Move(ref place) = args[0] {
253                     self.visit_place(
254                         place,
255                         PlaceContext::MutatingUse(MutatingUseContext::Drop),
256                         location,
257                     );
258                 }
259             }
260         }
261
262         self.super_terminator_kind(kind, location);
263     }
264
265     fn visit_place(&mut self, place: &mir::Place<'tcx>, context: PlaceContext, location: Location) {
266         debug!("visit_place(place={:?}, context={:?})", place, context);
267         self.process_place(&place.as_ref(), context, location);
268     }
269
270     fn visit_local(&mut self, &local: &mir::Local, context: PlaceContext, location: Location) {
271         match context {
272             PlaceContext::MutatingUse(MutatingUseContext::Call) => {
273                 self.assign(local, location);
274             }
275
276             PlaceContext::NonUse(_) | PlaceContext::MutatingUse(MutatingUseContext::Retag) => {}
277
278             PlaceContext::NonMutatingUse(
279                 NonMutatingUseContext::Copy | NonMutatingUseContext::Move,
280             ) => {
281                 // Reads from uninitialized variables (e.g., in dead code, after
282                 // optimizations) require locals to be in (uninitialized) memory.
283                 // N.B., there can be uninitialized reads of a local visited after
284                 // an assignment to that local, if they happen on disjoint paths.
285                 let ssa_read = match self.first_assignment(local) {
286                     Some(assignment_location) => {
287                         assignment_location.dominates(location, &self.dominators)
288                     }
289                     None => false,
290                 };
291                 if !ssa_read {
292                     self.not_ssa(local);
293                 }
294             }
295
296             PlaceContext::MutatingUse(
297                 MutatingUseContext::Store
298                 | MutatingUseContext::AsmOutput
299                 | MutatingUseContext::Borrow
300                 | MutatingUseContext::AddressOf
301                 | MutatingUseContext::Projection,
302             )
303             | PlaceContext::NonMutatingUse(
304                 NonMutatingUseContext::Inspect
305                 | NonMutatingUseContext::SharedBorrow
306                 | NonMutatingUseContext::UniqueBorrow
307                 | NonMutatingUseContext::ShallowBorrow
308                 | NonMutatingUseContext::AddressOf
309                 | NonMutatingUseContext::Projection,
310             ) => {
311                 self.not_ssa(local);
312             }
313
314             PlaceContext::MutatingUse(MutatingUseContext::Drop) => {
315                 let ty = self.fx.mir.local_decls[local].ty;
316                 let ty = self.fx.monomorphize(&ty);
317
318                 // Only need the place if we're actually dropping it.
319                 if self.fx.cx.type_needs_drop(ty) {
320                     self.not_ssa(local);
321                 }
322             }
323         }
324     }
325 }
326
327 #[derive(Copy, Clone, Debug, PartialEq, Eq)]
328 pub enum CleanupKind {
329     NotCleanup,
330     Funclet,
331     Internal { funclet: mir::BasicBlock },
332 }
333
334 impl CleanupKind {
335     pub fn funclet_bb(self, for_bb: mir::BasicBlock) -> Option<mir::BasicBlock> {
336         match self {
337             CleanupKind::NotCleanup => None,
338             CleanupKind::Funclet => Some(for_bb),
339             CleanupKind::Internal { funclet } => Some(funclet),
340         }
341     }
342 }
343
344 pub fn cleanup_kinds(mir: &mir::Body<'_>) -> IndexVec<mir::BasicBlock, CleanupKind> {
345     fn discover_masters<'tcx>(
346         result: &mut IndexVec<mir::BasicBlock, CleanupKind>,
347         mir: &mir::Body<'tcx>,
348     ) {
349         for (bb, data) in mir.basic_blocks().iter_enumerated() {
350             match data.terminator().kind {
351                 TerminatorKind::Goto { .. }
352                 | TerminatorKind::Resume
353                 | TerminatorKind::Abort
354                 | TerminatorKind::Return
355                 | TerminatorKind::GeneratorDrop
356                 | TerminatorKind::Unreachable
357                 | TerminatorKind::SwitchInt { .. }
358                 | TerminatorKind::Yield { .. }
359                 | TerminatorKind::FalseEdges { .. }
360                 | TerminatorKind::FalseUnwind { .. } => { /* nothing to do */ }
361                 TerminatorKind::Call { cleanup: unwind, .. }
362                 | TerminatorKind::Assert { cleanup: unwind, .. }
363                 | TerminatorKind::DropAndReplace { unwind, .. }
364                 | TerminatorKind::Drop { unwind, .. } => {
365                     if let Some(unwind) = unwind {
366                         debug!(
367                             "cleanup_kinds: {:?}/{:?} registering {:?} as funclet",
368                             bb, data, unwind
369                         );
370                         result[unwind] = CleanupKind::Funclet;
371                     }
372                 }
373             }
374         }
375     }
376
377     fn propagate<'tcx>(result: &mut IndexVec<mir::BasicBlock, CleanupKind>, mir: &mir::Body<'tcx>) {
378         let mut funclet_succs = IndexVec::from_elem(None, mir.basic_blocks());
379
380         let mut set_successor = |funclet: mir::BasicBlock, succ| match funclet_succs[funclet] {
381             ref mut s @ None => {
382                 debug!("set_successor: updating successor of {:?} to {:?}", funclet, succ);
383                 *s = Some(succ);
384             }
385             Some(s) => {
386                 if s != succ {
387                     span_bug!(
388                         mir.span,
389                         "funclet {:?} has 2 parents - {:?} and {:?}",
390                         funclet,
391                         s,
392                         succ
393                     );
394                 }
395             }
396         };
397
398         for (bb, data) in traversal::reverse_postorder(mir) {
399             let funclet = match result[bb] {
400                 CleanupKind::NotCleanup => continue,
401                 CleanupKind::Funclet => bb,
402                 CleanupKind::Internal { funclet } => funclet,
403             };
404
405             debug!(
406                 "cleanup_kinds: {:?}/{:?}/{:?} propagating funclet {:?}",
407                 bb, data, result[bb], funclet
408             );
409
410             for &succ in data.terminator().successors() {
411                 let kind = result[succ];
412                 debug!("cleanup_kinds: propagating {:?} to {:?}/{:?}", funclet, succ, kind);
413                 match kind {
414                     CleanupKind::NotCleanup => {
415                         result[succ] = CleanupKind::Internal { funclet };
416                     }
417                     CleanupKind::Funclet => {
418                         if funclet != succ {
419                             set_successor(funclet, succ);
420                         }
421                     }
422                     CleanupKind::Internal { funclet: succ_funclet } => {
423                         if funclet != succ_funclet {
424                             // `succ` has 2 different funclet going into it, so it must
425                             // be a funclet by itself.
426
427                             debug!(
428                                 "promoting {:?} to a funclet and updating {:?}",
429                                 succ, succ_funclet
430                             );
431                             result[succ] = CleanupKind::Funclet;
432                             set_successor(succ_funclet, succ);
433                             set_successor(funclet, succ);
434                         }
435                     }
436                 }
437             }
438         }
439     }
440
441     let mut result = IndexVec::from_elem(CleanupKind::NotCleanup, mir.basic_blocks());
442
443     discover_masters(&mut result, mir);
444     propagate(&mut result, mir);
445     debug!("cleanup_kinds: result={:?}", result);
446     result
447 }