]> git.lizzy.rs Git - rust.git/blob - compiler/rustc_codegen_ssa/src/mir/analyze.rs
a5aa170deced9972cdcd739848c06e110cafb0b5
[rust.git] / compiler / rustc_codegen_ssa / src / mir / analyze.rs
1 //! An analysis to determine which locals require allocas and
2 //! which do not.
3
4 use super::FunctionCx;
5 use crate::traits::*;
6 use rustc_data_structures::graph::dominators::Dominators;
7 use rustc_index::bit_set::BitSet;
8 use rustc_index::vec::{Idx, IndexVec};
9 use rustc_middle::mir::traversal;
10 use rustc_middle::mir::visit::{MutatingUseContext, NonMutatingUseContext, PlaceContext, Visitor};
11 use rustc_middle::mir::{self, Location, TerminatorKind};
12 use rustc_middle::ty;
13 use rustc_middle::ty::layout::HasTyCtxt;
14 use rustc_target::abi::LayoutOf;
15
16 pub fn non_ssa_locals<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
17     fx: &FunctionCx<'a, 'tcx, Bx>,
18 ) -> BitSet<mir::Local> {
19     let mir = fx.mir;
20     let mut analyzer = LocalAnalyzer::new(fx);
21
22     for (bb, data) in mir.basic_blocks().iter_enumerated() {
23         analyzer.visit_basic_block_data(bb, data);
24     }
25
26     for (local, decl) in mir.local_decls.iter_enumerated() {
27         let ty = fx.monomorphize(decl.ty);
28         debug!("local {:?} has type `{}`", local, ty);
29         let layout = fx.cx.spanned_layout_of(ty, decl.source_info.span);
30         if fx.cx.is_backend_immediate(layout) {
31             // These sorts of types are immediates that we can store
32             // in an Value without an alloca.
33         } else if fx.cx.is_backend_scalar_pair(layout) {
34             // We allow pairs and uses of any of their 2 fields.
35         } else {
36             // These sorts of types require an alloca. Note that
37             // is_llvm_immediate() may *still* be true, particularly
38             // for newtypes, but we currently force some types
39             // (e.g., structs) into an alloca unconditionally, just so
40             // that we don't have to deal with having two pathways
41             // (gep vs extractvalue etc).
42             analyzer.not_ssa(local);
43         }
44     }
45
46     analyzer.non_ssa_locals
47 }
48
49 struct LocalAnalyzer<'mir, 'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> {
50     fx: &'mir FunctionCx<'a, 'tcx, Bx>,
51     dominators: Dominators<mir::BasicBlock>,
52     non_ssa_locals: BitSet<mir::Local>,
53     // The location of the first visited direct assignment to each
54     // local, or an invalid location (out of bounds `block` index).
55     first_assignment: IndexVec<mir::Local, Location>,
56 }
57
58 impl<Bx: BuilderMethods<'a, 'tcx>> LocalAnalyzer<'mir, 'a, 'tcx, Bx> {
59     fn new(fx: &'mir FunctionCx<'a, 'tcx, Bx>) -> Self {
60         let invalid_location = mir::BasicBlock::new(fx.mir.basic_blocks().len()).start_location();
61         let dominators = fx.mir.dominators();
62         let mut analyzer = LocalAnalyzer {
63             fx,
64             dominators,
65             non_ssa_locals: BitSet::new_empty(fx.mir.local_decls.len()),
66             first_assignment: IndexVec::from_elem(invalid_location, &fx.mir.local_decls),
67         };
68
69         // Arguments get assigned to by means of the function being called
70         for arg in fx.mir.args_iter() {
71             analyzer.first_assignment[arg] = mir::START_BLOCK.start_location();
72         }
73
74         analyzer
75     }
76
77     fn first_assignment(&self, local: mir::Local) -> Option<Location> {
78         let location = self.first_assignment[local];
79         if location.block.index() < self.fx.mir.basic_blocks().len() {
80             Some(location)
81         } else {
82             None
83         }
84     }
85
86     fn not_ssa(&mut self, local: mir::Local) {
87         debug!("marking {:?} as non-SSA", local);
88         self.non_ssa_locals.insert(local);
89     }
90
91     fn assign(&mut self, local: mir::Local, location: Location) {
92         if self.first_assignment(local).is_some() {
93             self.not_ssa(local);
94         } else {
95             self.first_assignment[local] = location;
96         }
97     }
98
99     fn process_place(
100         &mut self,
101         place_ref: &mir::PlaceRef<'tcx>,
102         context: PlaceContext,
103         location: Location,
104     ) {
105         let cx = self.fx.cx;
106
107         if let Some((place_base, elem)) = place_ref.last_projection() {
108             let mut base_context = if context.is_mutating_use() {
109                 PlaceContext::MutatingUse(MutatingUseContext::Projection)
110             } else {
111                 PlaceContext::NonMutatingUse(NonMutatingUseContext::Projection)
112             };
113
114             // Allow uses of projections that are ZSTs or from scalar fields.
115             let is_consume = matches!(
116                 context,
117                 PlaceContext::NonMutatingUse(
118                     NonMutatingUseContext::Copy | NonMutatingUseContext::Move,
119                 )
120             );
121             if is_consume {
122                 let base_ty = place_base.ty(self.fx.mir, cx.tcx());
123                 let base_ty = self.fx.monomorphize(base_ty);
124
125                 // ZSTs don't require any actual memory access.
126                 let elem_ty = base_ty.projection_ty(cx.tcx(), self.fx.monomorphize(elem)).ty;
127                 let span = self.fx.mir.local_decls[place_ref.local].source_info.span;
128                 if cx.spanned_layout_of(elem_ty, span).is_zst() {
129                     return;
130                 }
131
132                 if let mir::ProjectionElem::Field(..) = elem {
133                     let layout = cx.spanned_layout_of(base_ty.ty, span);
134                     if cx.is_backend_immediate(layout) || cx.is_backend_scalar_pair(layout) {
135                         // Recurse with the same context, instead of `Projection`,
136                         // potentially stopping at non-operand projections,
137                         // which would trigger `not_ssa` on locals.
138                         base_context = context;
139                     }
140                 }
141             }
142
143             if let mir::ProjectionElem::Deref = elem {
144                 // Deref projections typically only read the pointer.
145                 base_context = PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy);
146             }
147
148             self.process_place(&place_base, base_context, location);
149             // HACK(eddyb) this emulates the old `visit_projection_elem`, this
150             // entire `visit_place`-like `process_place` method should be rewritten,
151             // now that we have moved to the "slice of projections" representation.
152             if let mir::ProjectionElem::Index(local) = elem {
153                 self.visit_local(
154                     &local,
155                     PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy),
156                     location,
157                 );
158             }
159         } else {
160             // FIXME this is super_place code, is repeated here to avoid cloning place or changing
161             // visit_place API
162             let mut context = context;
163
164             if !place_ref.projection.is_empty() {
165                 context = if context.is_mutating_use() {
166                     PlaceContext::MutatingUse(MutatingUseContext::Projection)
167                 } else {
168                     PlaceContext::NonMutatingUse(NonMutatingUseContext::Projection)
169                 };
170             }
171
172             self.visit_local(&place_ref.local, context, location);
173             self.visit_projection(*place_ref, context, location);
174         }
175     }
176 }
177
178 impl<'mir, 'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> Visitor<'tcx>
179     for LocalAnalyzer<'mir, 'a, 'tcx, Bx>
180 {
181     fn visit_assign(
182         &mut self,
183         place: &mir::Place<'tcx>,
184         rvalue: &mir::Rvalue<'tcx>,
185         location: Location,
186     ) {
187         debug!("visit_assign(place={:?}, rvalue={:?})", place, rvalue);
188
189         if let Some(index) = place.as_local() {
190             self.assign(index, location);
191             let decl_span = self.fx.mir.local_decls[index].source_info.span;
192             if !self.fx.rvalue_creates_operand(rvalue, decl_span) {
193                 self.not_ssa(index);
194             }
195         } else {
196             self.visit_place(place, PlaceContext::MutatingUse(MutatingUseContext::Store), location);
197         }
198
199         self.visit_rvalue(rvalue, location);
200     }
201
202     fn visit_terminator(&mut self, terminator: &mir::Terminator<'tcx>, location: Location) {
203         let check = match terminator.kind {
204             mir::TerminatorKind::Call { func: mir::Operand::Constant(ref c), ref args, .. } => {
205                 match *c.ty().kind() {
206                     ty::FnDef(did, _) => Some((did, args)),
207                     _ => None,
208                 }
209             }
210             _ => None,
211         };
212         if let Some((def_id, args)) = check {
213             if Some(def_id) == self.fx.cx.tcx().lang_items().box_free_fn() {
214                 // box_free(x) shares with `drop x` the property that it
215                 // is not guaranteed to be statically dominated by the
216                 // definition of x, so x must always be in an alloca.
217                 if let mir::Operand::Move(ref place) = args[0] {
218                     self.visit_place(
219                         place,
220                         PlaceContext::MutatingUse(MutatingUseContext::Drop),
221                         location,
222                     );
223                 }
224             }
225         }
226
227         self.super_terminator(terminator, location);
228     }
229
230     fn visit_place(&mut self, place: &mir::Place<'tcx>, context: PlaceContext, location: Location) {
231         debug!("visit_place(place={:?}, context={:?})", place, context);
232         self.process_place(&place.as_ref(), context, location);
233     }
234
235     fn visit_local(&mut self, &local: &mir::Local, context: PlaceContext, location: Location) {
236         match context {
237             PlaceContext::MutatingUse(MutatingUseContext::Call)
238             | PlaceContext::MutatingUse(MutatingUseContext::Yield) => {
239                 self.assign(local, location);
240             }
241
242             PlaceContext::NonUse(_) | PlaceContext::MutatingUse(MutatingUseContext::Retag) => {}
243
244             PlaceContext::NonMutatingUse(
245                 NonMutatingUseContext::Copy | NonMutatingUseContext::Move,
246             ) => {
247                 // Reads from uninitialized variables (e.g., in dead code, after
248                 // optimizations) require locals to be in (uninitialized) memory.
249                 // N.B., there can be uninitialized reads of a local visited after
250                 // an assignment to that local, if they happen on disjoint paths.
251                 let ssa_read = match self.first_assignment(local) {
252                     Some(assignment_location) => {
253                         assignment_location.dominates(location, &self.dominators)
254                     }
255                     None => {
256                         debug!("No first assignment found for {:?}", local);
257                         // We have not seen any assignment to the local yet,
258                         // but before marking not_ssa, check if it is a ZST,
259                         // in which case we don't need to initialize the local.
260                         let ty = self.fx.mir.local_decls[local].ty;
261                         let ty = self.fx.monomorphize(ty);
262
263                         let is_zst = self.fx.cx.layout_of(ty).is_zst();
264                         debug!("is_zst: {}", is_zst);
265                         is_zst
266                     }
267                 };
268                 if !ssa_read {
269                     self.not_ssa(local);
270                 }
271             }
272
273             PlaceContext::MutatingUse(
274                 MutatingUseContext::Store
275                 | MutatingUseContext::AsmOutput
276                 | MutatingUseContext::Borrow
277                 | MutatingUseContext::AddressOf
278                 | MutatingUseContext::Projection,
279             )
280             | PlaceContext::NonMutatingUse(
281                 NonMutatingUseContext::Inspect
282                 | NonMutatingUseContext::SharedBorrow
283                 | NonMutatingUseContext::UniqueBorrow
284                 | NonMutatingUseContext::ShallowBorrow
285                 | NonMutatingUseContext::AddressOf
286                 | NonMutatingUseContext::Projection,
287             ) => {
288                 self.not_ssa(local);
289             }
290
291             PlaceContext::MutatingUse(MutatingUseContext::Drop) => {
292                 let ty = self.fx.mir.local_decls[local].ty;
293                 let ty = self.fx.monomorphize(ty);
294
295                 // Only need the place if we're actually dropping it.
296                 if self.fx.cx.type_needs_drop(ty) {
297                     self.not_ssa(local);
298                 }
299             }
300         }
301     }
302 }
303
304 #[derive(Copy, Clone, Debug, PartialEq, Eq)]
305 pub enum CleanupKind {
306     NotCleanup,
307     Funclet,
308     Internal { funclet: mir::BasicBlock },
309 }
310
311 impl CleanupKind {
312     pub fn funclet_bb(self, for_bb: mir::BasicBlock) -> Option<mir::BasicBlock> {
313         match self {
314             CleanupKind::NotCleanup => None,
315             CleanupKind::Funclet => Some(for_bb),
316             CleanupKind::Internal { funclet } => Some(funclet),
317         }
318     }
319 }
320
321 pub fn cleanup_kinds(mir: &mir::Body<'_>) -> IndexVec<mir::BasicBlock, CleanupKind> {
322     fn discover_masters<'tcx>(
323         result: &mut IndexVec<mir::BasicBlock, CleanupKind>,
324         mir: &mir::Body<'tcx>,
325     ) {
326         for (bb, data) in mir.basic_blocks().iter_enumerated() {
327             match data.terminator().kind {
328                 TerminatorKind::Goto { .. }
329                 | TerminatorKind::Resume
330                 | TerminatorKind::Abort
331                 | TerminatorKind::Return
332                 | TerminatorKind::GeneratorDrop
333                 | TerminatorKind::Unreachable
334                 | TerminatorKind::SwitchInt { .. }
335                 | TerminatorKind::Yield { .. }
336                 | TerminatorKind::FalseEdge { .. }
337                 | TerminatorKind::FalseUnwind { .. }
338                 | TerminatorKind::InlineAsm { .. } => { /* nothing to do */ }
339                 TerminatorKind::Call { cleanup: unwind, .. }
340                 | TerminatorKind::Assert { cleanup: unwind, .. }
341                 | TerminatorKind::DropAndReplace { unwind, .. }
342                 | TerminatorKind::Drop { unwind, .. } => {
343                     if let Some(unwind) = unwind {
344                         debug!(
345                             "cleanup_kinds: {:?}/{:?} registering {:?} as funclet",
346                             bb, data, unwind
347                         );
348                         result[unwind] = CleanupKind::Funclet;
349                     }
350                 }
351             }
352         }
353     }
354
355     fn propagate<'tcx>(result: &mut IndexVec<mir::BasicBlock, CleanupKind>, mir: &mir::Body<'tcx>) {
356         let mut funclet_succs = IndexVec::from_elem(None, mir.basic_blocks());
357
358         let mut set_successor = |funclet: mir::BasicBlock, succ| match funclet_succs[funclet] {
359             ref mut s @ None => {
360                 debug!("set_successor: updating successor of {:?} to {:?}", funclet, succ);
361                 *s = Some(succ);
362             }
363             Some(s) => {
364                 if s != succ {
365                     span_bug!(
366                         mir.span,
367                         "funclet {:?} has 2 parents - {:?} and {:?}",
368                         funclet,
369                         s,
370                         succ
371                     );
372                 }
373             }
374         };
375
376         for (bb, data) in traversal::reverse_postorder(mir) {
377             let funclet = match result[bb] {
378                 CleanupKind::NotCleanup => continue,
379                 CleanupKind::Funclet => bb,
380                 CleanupKind::Internal { funclet } => funclet,
381             };
382
383             debug!(
384                 "cleanup_kinds: {:?}/{:?}/{:?} propagating funclet {:?}",
385                 bb, data, result[bb], funclet
386             );
387
388             for &succ in data.terminator().successors() {
389                 let kind = result[succ];
390                 debug!("cleanup_kinds: propagating {:?} to {:?}/{:?}", funclet, succ, kind);
391                 match kind {
392                     CleanupKind::NotCleanup => {
393                         result[succ] = CleanupKind::Internal { funclet };
394                     }
395                     CleanupKind::Funclet => {
396                         if funclet != succ {
397                             set_successor(funclet, succ);
398                         }
399                     }
400                     CleanupKind::Internal { funclet: succ_funclet } => {
401                         if funclet != succ_funclet {
402                             // `succ` has 2 different funclet going into it, so it must
403                             // be a funclet by itself.
404
405                             debug!(
406                                 "promoting {:?} to a funclet and updating {:?}",
407                                 succ, succ_funclet
408                             );
409                             result[succ] = CleanupKind::Funclet;
410                             set_successor(succ_funclet, succ);
411                             set_successor(funclet, succ);
412                         }
413                     }
414                 }
415             }
416         }
417     }
418
419     let mut result = IndexVec::from_elem(CleanupKind::NotCleanup, mir.basic_blocks());
420
421     discover_masters(&mut result, mir);
422     propagate(&mut result, mir);
423     debug!("cleanup_kinds: result={:?}", result);
424     result
425 }