]> git.lizzy.rs Git - rust.git/blob - src/librustc_codegen_ssa/mir/analyze.rs
Rollup merge of #62324 - Centril:reduce-await-macro-reliance, r=cramertj
[rust.git] / src / librustc_codegen_ssa / mir / analyze.rs
1 //! An analysis to determine which locals require allocas and
2 //! which do not.
3
4 use rustc_data_structures::bit_set::BitSet;
5 use rustc_data_structures::graph::dominators::Dominators;
6 use rustc_data_structures::indexed_vec::{Idx, IndexVec};
7 use rustc::mir::{self, Location, TerminatorKind};
8 use rustc::mir::visit::{Visitor, PlaceContext, MutatingUseContext, NonMutatingUseContext};
9 use rustc::mir::traversal;
10 use rustc::ty;
11 use rustc::ty::layout::{LayoutOf, HasTyCtxt};
12 use super::FunctionCx;
13 use crate::traits::*;
14
15 pub fn non_ssa_locals<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
16     fx: &FunctionCx<'a, 'tcx, Bx>,
17 ) -> BitSet<mir::Local> {
18     let mir = fx.mir;
19     let mut analyzer = LocalAnalyzer::new(fx);
20
21     analyzer.visit_body(mir);
22
23     for (index, ty) in mir.local_decls.iter().map(|l| l.ty).enumerate() {
24         let ty = fx.monomorphize(&ty);
25         debug!("local {} has type {:?}", index, ty);
26         let layout = fx.cx.layout_of(ty);
27         if fx.cx.is_backend_immediate(layout) {
28             // These sorts of types are immediates that we can store
29             // in an Value without an alloca.
30         } else if fx.cx.is_backend_scalar_pair(layout) {
31             // We allow pairs and uses of any of their 2 fields.
32         } else {
33             // These sorts of types require an alloca. Note that
34             // is_llvm_immediate() may *still* be true, particularly
35             // for newtypes, but we currently force some types
36             // (e.g., structs) into an alloca unconditionally, just so
37             // that we don't have to deal with having two pathways
38             // (gep vs extractvalue etc).
39             analyzer.not_ssa(mir::Local::new(index));
40         }
41     }
42
43     analyzer.non_ssa_locals
44 }
45
46 struct LocalAnalyzer<'mir, 'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> {
47     fx: &'mir FunctionCx<'a, 'tcx, Bx>,
48     dominators: Dominators<mir::BasicBlock>,
49     non_ssa_locals: BitSet<mir::Local>,
50     // The location of the first visited direct assignment to each
51     // local, or an invalid location (out of bounds `block` index).
52     first_assignment: IndexVec<mir::Local, Location>,
53 }
54
55 impl<Bx: BuilderMethods<'a, 'tcx>> LocalAnalyzer<'mir, 'a, 'tcx, Bx> {
56     fn new(fx: &'mir FunctionCx<'a, 'tcx, Bx>) -> Self {
57         let invalid_location =
58             mir::BasicBlock::new(fx.mir.basic_blocks().len()).start_location();
59         let mut analyzer = LocalAnalyzer {
60             fx,
61             dominators: fx.mir.dominators(),
62             non_ssa_locals: BitSet::new_empty(fx.mir.local_decls.len()),
63             first_assignment: IndexVec::from_elem(invalid_location, &fx.mir.local_decls)
64         };
65
66         // Arguments get assigned to by means of the function being called
67         for arg in fx.mir.args_iter() {
68             analyzer.first_assignment[arg] = mir::START_BLOCK.start_location();
69         }
70
71         analyzer
72     }
73
74     fn first_assignment(&self, local: mir::Local) -> Option<Location> {
75         let location = self.first_assignment[local];
76         if location.block.index() < self.fx.mir.basic_blocks().len() {
77             Some(location)
78         } else {
79             None
80         }
81     }
82
83     fn not_ssa(&mut self, local: mir::Local) {
84         debug!("marking {:?} as non-SSA", local);
85         self.non_ssa_locals.insert(local);
86     }
87
88     fn assign(&mut self, local: mir::Local, location: Location) {
89         if self.first_assignment(local).is_some() {
90             self.not_ssa(local);
91         } else {
92             self.first_assignment[local] = location;
93         }
94     }
95 }
96
97 impl<'mir, 'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> Visitor<'tcx>
98     for LocalAnalyzer<'mir, 'a, 'tcx, Bx>
99 {
100     fn visit_assign(&mut self,
101                     place: &mir::Place<'tcx>,
102                     rvalue: &mir::Rvalue<'tcx>,
103                     location: Location) {
104         debug!("visit_assign(place={:?}, rvalue={:?})", place, rvalue);
105
106         if let mir::Place::Base(mir::PlaceBase::Local(index)) = *place {
107             self.assign(index, location);
108             if !self.fx.rvalue_creates_operand(rvalue) {
109                 self.not_ssa(index);
110             }
111         } else {
112             self.visit_place(
113                 place,
114                 PlaceContext::MutatingUse(MutatingUseContext::Store),
115                 location
116             );
117         }
118
119         self.visit_rvalue(rvalue, location);
120     }
121
122     fn visit_terminator_kind(&mut self,
123                              kind: &mir::TerminatorKind<'tcx>,
124                              location: Location) {
125         let check = match *kind {
126             mir::TerminatorKind::Call {
127                 func: mir::Operand::Constant(ref c),
128                 ref args, ..
129             } => match c.ty.sty {
130                 ty::FnDef(did, _) => Some((did, args)),
131                 _ => None,
132             },
133             _ => None,
134         };
135         if let Some((def_id, args)) = check {
136             if Some(def_id) == self.fx.cx.tcx().lang_items().box_free_fn() {
137                 // box_free(x) shares with `drop x` the property that it
138                 // is not guaranteed to be statically dominated by the
139                 // definition of x, so x must always be in an alloca.
140                 if let mir::Operand::Move(ref place) = args[0] {
141                     self.visit_place(
142                         place,
143                         PlaceContext::MutatingUse(MutatingUseContext::Drop),
144                         location
145                     );
146                 }
147             }
148         }
149
150         self.super_terminator_kind(kind, location);
151     }
152
153     fn visit_place(&mut self,
154                    place: &mir::Place<'tcx>,
155                    context: PlaceContext,
156                    location: Location) {
157         debug!("visit_place(place={:?}, context={:?})", place, context);
158         let cx = self.fx.cx;
159
160         if let mir::Place::Projection(ref proj) = *place {
161             // Allow uses of projections that are ZSTs or from scalar fields.
162             let is_consume = match context {
163                 PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy) |
164                 PlaceContext::NonMutatingUse(NonMutatingUseContext::Move) => true,
165                 _ => false
166             };
167             if is_consume {
168                 let base_ty = proj.base.ty(self.fx.mir, cx.tcx());
169                 let base_ty = self.fx.monomorphize(&base_ty);
170
171                 // ZSTs don't require any actual memory access.
172                 let elem_ty = base_ty
173                     .projection_ty(cx.tcx(), &proj.elem)
174                     .ty;
175                 let elem_ty = self.fx.monomorphize(&elem_ty);
176                 if cx.layout_of(elem_ty).is_zst() {
177                     return;
178                 }
179
180                 if let mir::ProjectionElem::Field(..) = proj.elem {
181                     let layout = cx.layout_of(base_ty.ty);
182                     if cx.is_backend_immediate(layout) || cx.is_backend_scalar_pair(layout) {
183                         // Recurse with the same context, instead of `Projection`,
184                         // potentially stopping at non-operand projections,
185                         // which would trigger `not_ssa` on locals.
186                         self.visit_place(&proj.base, context, location);
187                         return;
188                     }
189                 }
190             }
191
192             // A deref projection only reads the pointer, never needs the place.
193             if let mir::ProjectionElem::Deref = proj.elem {
194                 return self.visit_place(
195                     &proj.base,
196                     PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy),
197                     location
198                 );
199             }
200         }
201
202         self.super_place(place, context, location);
203     }
204
205     fn visit_local(&mut self,
206                    &local: &mir::Local,
207                    context: PlaceContext,
208                    location: Location) {
209         match context {
210             PlaceContext::MutatingUse(MutatingUseContext::Call) => {
211                 self.assign(local, location);
212             }
213
214             PlaceContext::NonUse(_) |
215             PlaceContext::MutatingUse(MutatingUseContext::Retag) => {}
216
217             PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy) |
218             PlaceContext::NonMutatingUse(NonMutatingUseContext::Move) => {
219                 // Reads from uninitialized variables (e.g., in dead code, after
220                 // optimizations) require locals to be in (uninitialized) memory.
221                 // N.B., there can be uninitialized reads of a local visited after
222                 // an assignment to that local, if they happen on disjoint paths.
223                 let ssa_read = match self.first_assignment(local) {
224                     Some(assignment_location) => {
225                         assignment_location.dominates(location, &self.dominators)
226                     }
227                     None => false
228                 };
229                 if !ssa_read {
230                     self.not_ssa(local);
231                 }
232             }
233
234             PlaceContext::NonMutatingUse(NonMutatingUseContext::Inspect) |
235             PlaceContext::MutatingUse(MutatingUseContext::Store) |
236             PlaceContext::MutatingUse(MutatingUseContext::AsmOutput) |
237             PlaceContext::MutatingUse(MutatingUseContext::Borrow) |
238             PlaceContext::MutatingUse(MutatingUseContext::Projection) |
239             PlaceContext::NonMutatingUse(NonMutatingUseContext::SharedBorrow) |
240             PlaceContext::NonMutatingUse(NonMutatingUseContext::UniqueBorrow) |
241             PlaceContext::NonMutatingUse(NonMutatingUseContext::ShallowBorrow) |
242             PlaceContext::NonMutatingUse(NonMutatingUseContext::Projection) => {
243                 self.not_ssa(local);
244             }
245
246             PlaceContext::MutatingUse(MutatingUseContext::Drop) => {
247                 let ty = self.fx.mir.local_decls[local].ty;
248                 let ty = self.fx.monomorphize(&ty);
249
250                 // Only need the place if we're actually dropping it.
251                 if self.fx.cx.type_needs_drop(ty) {
252                     self.not_ssa(local);
253                 }
254             }
255         }
256     }
257 }
258
259 #[derive(Copy, Clone, Debug, PartialEq, Eq)]
260 pub enum CleanupKind {
261     NotCleanup,
262     Funclet,
263     Internal { funclet: mir::BasicBlock }
264 }
265
266 impl CleanupKind {
267     pub fn funclet_bb(self, for_bb: mir::BasicBlock) -> Option<mir::BasicBlock> {
268         match self {
269             CleanupKind::NotCleanup => None,
270             CleanupKind::Funclet => Some(for_bb),
271             CleanupKind::Internal { funclet } => Some(funclet),
272         }
273     }
274 }
275
276 pub fn cleanup_kinds(mir: &mir::Body<'_>) -> IndexVec<mir::BasicBlock, CleanupKind> {
277     fn discover_masters<'tcx>(result: &mut IndexVec<mir::BasicBlock, CleanupKind>,
278                               mir: &mir::Body<'tcx>) {
279         for (bb, data) in mir.basic_blocks().iter_enumerated() {
280             match data.terminator().kind {
281                 TerminatorKind::Goto { .. } |
282                 TerminatorKind::Resume |
283                 TerminatorKind::Abort |
284                 TerminatorKind::Return |
285                 TerminatorKind::GeneratorDrop |
286                 TerminatorKind::Unreachable |
287                 TerminatorKind::SwitchInt { .. } |
288                 TerminatorKind::Yield { .. } |
289                 TerminatorKind::FalseEdges { .. } |
290                 TerminatorKind::FalseUnwind { .. } => {
291                     /* nothing to do */
292                 }
293                 TerminatorKind::Call { cleanup: unwind, .. } |
294                 TerminatorKind::Assert { cleanup: unwind, .. } |
295                 TerminatorKind::DropAndReplace { unwind, .. } |
296                 TerminatorKind::Drop { unwind, .. } => {
297                     if let Some(unwind) = unwind {
298                         debug!("cleanup_kinds: {:?}/{:?} registering {:?} as funclet",
299                                bb, data, unwind);
300                         result[unwind] = CleanupKind::Funclet;
301                     }
302                 }
303             }
304         }
305     }
306
307     fn propagate<'tcx>(result: &mut IndexVec<mir::BasicBlock, CleanupKind>,
308                        mir: &mir::Body<'tcx>) {
309         let mut funclet_succs = IndexVec::from_elem(None, mir.basic_blocks());
310
311         let mut set_successor = |funclet: mir::BasicBlock, succ| {
312             match funclet_succs[funclet] {
313                 ref mut s @ None => {
314                     debug!("set_successor: updating successor of {:?} to {:?}",
315                            funclet, succ);
316                     *s = Some(succ);
317                 },
318                 Some(s) => if s != succ {
319                     span_bug!(mir.span, "funclet {:?} has 2 parents - {:?} and {:?}",
320                               funclet, s, succ);
321                 }
322             }
323         };
324
325         for (bb, data) in traversal::reverse_postorder(mir) {
326             let funclet = match result[bb] {
327                 CleanupKind::NotCleanup => continue,
328                 CleanupKind::Funclet => bb,
329                 CleanupKind::Internal { funclet } => funclet,
330             };
331
332             debug!("cleanup_kinds: {:?}/{:?}/{:?} propagating funclet {:?}",
333                    bb, data, result[bb], funclet);
334
335             for &succ in data.terminator().successors() {
336                 let kind = result[succ];
337                 debug!("cleanup_kinds: propagating {:?} to {:?}/{:?}",
338                        funclet, succ, kind);
339                 match kind {
340                     CleanupKind::NotCleanup => {
341                         result[succ] = CleanupKind::Internal { funclet };
342                     }
343                     CleanupKind::Funclet => {
344                         if funclet != succ {
345                             set_successor(funclet, succ);
346                         }
347                     }
348                     CleanupKind::Internal { funclet: succ_funclet } => {
349                         if funclet != succ_funclet {
350                             // `succ` has 2 different funclet going into it, so it must
351                             // be a funclet by itself.
352
353                             debug!("promoting {:?} to a funclet and updating {:?}", succ,
354                                    succ_funclet);
355                             result[succ] = CleanupKind::Funclet;
356                             set_successor(succ_funclet, succ);
357                             set_successor(funclet, succ);
358                         }
359                     }
360                 }
361             }
362         }
363     }
364
365     let mut result = IndexVec::from_elem(CleanupKind::NotCleanup, mir.basic_blocks());
366
367     discover_masters(&mut result, mir);
368     propagate(&mut result, mir);
369     debug!("cleanup_kinds: result={:?}", result);
370     result
371 }