]> git.lizzy.rs Git - rust.git/blob - compiler/rustc_codegen_ssa/src/mir/analyze.rs
Auto merge of #107443 - cjgillot:generator-less-query, r=compiler-errors
[rust.git] / compiler / rustc_codegen_ssa / src / mir / analyze.rs
1 //! An analysis to determine which locals require allocas and
2 //! which do not.
3
4 use super::FunctionCx;
5 use crate::traits::*;
6 use rustc_data_structures::graph::dominators::Dominators;
7 use rustc_index::bit_set::BitSet;
8 use rustc_index::vec::IndexVec;
9 use rustc_middle::mir::traversal;
10 use rustc_middle::mir::visit::{MutatingUseContext, NonMutatingUseContext, PlaceContext, Visitor};
11 use rustc_middle::mir::{self, Location, TerminatorKind};
12 use rustc_middle::ty::layout::{HasTyCtxt, LayoutOf};
13
14 pub fn non_ssa_locals<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
15     fx: &FunctionCx<'a, 'tcx, Bx>,
16 ) -> BitSet<mir::Local> {
17     let mir = fx.mir;
18     let dominators = mir.basic_blocks.dominators();
19     let locals = mir
20         .local_decls
21         .iter()
22         .map(|decl| {
23             let ty = fx.monomorphize(decl.ty);
24             let layout = fx.cx.spanned_layout_of(ty, decl.source_info.span);
25             if layout.is_zst() {
26                 LocalKind::ZST
27             } else if fx.cx.is_backend_immediate(layout) || fx.cx.is_backend_scalar_pair(layout) {
28                 LocalKind::Unused
29             } else {
30                 LocalKind::Memory
31             }
32         })
33         .collect();
34
35     let mut analyzer = LocalAnalyzer { fx, dominators, locals };
36
37     // Arguments get assigned to by means of the function being called
38     for arg in mir.args_iter() {
39         analyzer.assign(arg, DefLocation::Argument);
40     }
41
42     // If there exists a local definition that dominates all uses of that local,
43     // the definition should be visited first. Traverse blocks in an order that
44     // is a topological sort of dominance partial order.
45     for (bb, data) in traversal::reverse_postorder(&mir) {
46         analyzer.visit_basic_block_data(bb, data);
47     }
48
49     let mut non_ssa_locals = BitSet::new_empty(analyzer.locals.len());
50     for (local, kind) in analyzer.locals.iter_enumerated() {
51         if matches!(kind, LocalKind::Memory) {
52             non_ssa_locals.insert(local);
53         }
54     }
55
56     non_ssa_locals
57 }
58
59 #[derive(Copy, Clone, PartialEq, Eq)]
60 enum LocalKind {
61     ZST,
62     /// A local that requires an alloca.
63     Memory,
64     /// A scalar or a scalar pair local that is neither defined nor used.
65     Unused,
66     /// A scalar or a scalar pair local with a single definition that dominates all uses.
67     SSA(DefLocation),
68 }
69
70 #[derive(Copy, Clone, PartialEq, Eq)]
71 enum DefLocation {
72     Argument,
73     Body(Location),
74 }
75
76 impl DefLocation {
77     fn dominates(self, location: Location, dominators: &Dominators<mir::BasicBlock>) -> bool {
78         match self {
79             DefLocation::Argument => true,
80             DefLocation::Body(def) => def.successor_within_block().dominates(location, dominators),
81         }
82     }
83 }
84
85 struct LocalAnalyzer<'mir, 'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> {
86     fx: &'mir FunctionCx<'a, 'tcx, Bx>,
87     dominators: Dominators<mir::BasicBlock>,
88     locals: IndexVec<mir::Local, LocalKind>,
89 }
90
91 impl<'mir, 'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> LocalAnalyzer<'mir, 'a, 'tcx, Bx> {
92     fn assign(&mut self, local: mir::Local, location: DefLocation) {
93         let kind = &mut self.locals[local];
94         match *kind {
95             LocalKind::ZST => {}
96             LocalKind::Memory => {}
97             LocalKind::Unused => *kind = LocalKind::SSA(location),
98             LocalKind::SSA(_) => *kind = LocalKind::Memory,
99         }
100     }
101
102     fn process_place(
103         &mut self,
104         place_ref: &mir::PlaceRef<'tcx>,
105         context: PlaceContext,
106         location: Location,
107     ) {
108         let cx = self.fx.cx;
109
110         if let Some((place_base, elem)) = place_ref.last_projection() {
111             let mut base_context = if context.is_mutating_use() {
112                 PlaceContext::MutatingUse(MutatingUseContext::Projection)
113             } else {
114                 PlaceContext::NonMutatingUse(NonMutatingUseContext::Projection)
115             };
116
117             // Allow uses of projections that are ZSTs or from scalar fields.
118             let is_consume = matches!(
119                 context,
120                 PlaceContext::NonMutatingUse(
121                     NonMutatingUseContext::Copy | NonMutatingUseContext::Move,
122                 )
123             );
124             if is_consume {
125                 let base_ty = place_base.ty(self.fx.mir, cx.tcx());
126                 let base_ty = self.fx.monomorphize(base_ty);
127
128                 // ZSTs don't require any actual memory access.
129                 let elem_ty = base_ty.projection_ty(cx.tcx(), self.fx.monomorphize(elem)).ty;
130                 let span = self.fx.mir.local_decls[place_ref.local].source_info.span;
131                 if cx.spanned_layout_of(elem_ty, span).is_zst() {
132                     return;
133                 }
134
135                 if let mir::ProjectionElem::Field(..) = elem {
136                     let layout = cx.spanned_layout_of(base_ty.ty, span);
137                     if cx.is_backend_immediate(layout) || cx.is_backend_scalar_pair(layout) {
138                         // Recurse with the same context, instead of `Projection`,
139                         // potentially stopping at non-operand projections,
140                         // which would trigger `not_ssa` on locals.
141                         base_context = context;
142                     }
143                 }
144             }
145
146             if let mir::ProjectionElem::Deref = elem {
147                 // Deref projections typically only read the pointer.
148                 base_context = PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy);
149             }
150
151             self.process_place(&place_base, base_context, location);
152             // HACK(eddyb) this emulates the old `visit_projection_elem`, this
153             // entire `visit_place`-like `process_place` method should be rewritten,
154             // now that we have moved to the "slice of projections" representation.
155             if let mir::ProjectionElem::Index(local) = elem {
156                 self.visit_local(
157                     local,
158                     PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy),
159                     location,
160                 );
161             }
162         } else {
163             self.visit_local(place_ref.local, context, location);
164         }
165     }
166 }
167
168 impl<'mir, 'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> Visitor<'tcx>
169     for LocalAnalyzer<'mir, 'a, 'tcx, Bx>
170 {
171     fn visit_assign(
172         &mut self,
173         place: &mir::Place<'tcx>,
174         rvalue: &mir::Rvalue<'tcx>,
175         location: Location,
176     ) {
177         debug!("visit_assign(place={:?}, rvalue={:?})", place, rvalue);
178
179         if let Some(local) = place.as_local() {
180             self.assign(local, DefLocation::Body(location));
181             if self.locals[local] != LocalKind::Memory {
182                 let decl_span = self.fx.mir.local_decls[local].source_info.span;
183                 if !self.fx.rvalue_creates_operand(rvalue, decl_span) {
184                     self.locals[local] = LocalKind::Memory;
185                 }
186             }
187         } else {
188             self.visit_place(place, PlaceContext::MutatingUse(MutatingUseContext::Store), location);
189         }
190
191         self.visit_rvalue(rvalue, location);
192     }
193
194     fn visit_place(&mut self, place: &mir::Place<'tcx>, context: PlaceContext, location: Location) {
195         debug!("visit_place(place={:?}, context={:?})", place, context);
196         self.process_place(&place.as_ref(), context, location);
197     }
198
199     fn visit_local(&mut self, local: mir::Local, context: PlaceContext, location: Location) {
200         match context {
201             PlaceContext::MutatingUse(MutatingUseContext::Call)
202             | PlaceContext::MutatingUse(MutatingUseContext::Yield) => {
203                 self.assign(local, DefLocation::Body(location));
204             }
205
206             PlaceContext::NonUse(_) | PlaceContext::MutatingUse(MutatingUseContext::Retag) => {}
207
208             PlaceContext::NonMutatingUse(
209                 NonMutatingUseContext::Copy | NonMutatingUseContext::Move,
210             ) => match &mut self.locals[local] {
211                 LocalKind::ZST => {}
212                 LocalKind::Memory => {}
213                 LocalKind::SSA(def) if def.dominates(location, &self.dominators) => {}
214                 // Reads from uninitialized variables (e.g., in dead code, after
215                 // optimizations) require locals to be in (uninitialized) memory.
216                 // N.B., there can be uninitialized reads of a local visited after
217                 // an assignment to that local, if they happen on disjoint paths.
218                 kind @ (LocalKind::Unused | LocalKind::SSA(_)) => {
219                     *kind = LocalKind::Memory;
220                 }
221             },
222
223             PlaceContext::MutatingUse(
224                 MutatingUseContext::Store
225                 | MutatingUseContext::Deinit
226                 | MutatingUseContext::SetDiscriminant
227                 | MutatingUseContext::AsmOutput
228                 | MutatingUseContext::Borrow
229                 | MutatingUseContext::AddressOf
230                 | MutatingUseContext::Projection,
231             )
232             | PlaceContext::NonMutatingUse(
233                 NonMutatingUseContext::Inspect
234                 | NonMutatingUseContext::SharedBorrow
235                 | NonMutatingUseContext::UniqueBorrow
236                 | NonMutatingUseContext::ShallowBorrow
237                 | NonMutatingUseContext::AddressOf
238                 | NonMutatingUseContext::Projection,
239             ) => {
240                 self.locals[local] = LocalKind::Memory;
241             }
242
243             PlaceContext::MutatingUse(MutatingUseContext::Drop) => {
244                 let kind = &mut self.locals[local];
245                 if *kind != LocalKind::Memory {
246                     let ty = self.fx.mir.local_decls[local].ty;
247                     let ty = self.fx.monomorphize(ty);
248                     if self.fx.cx.type_needs_drop(ty) {
249                         // Only need the place if we're actually dropping it.
250                         *kind = LocalKind::Memory;
251                     }
252                 }
253             }
254         }
255     }
256 }
257
258 #[derive(Copy, Clone, Debug, PartialEq, Eq)]
259 pub enum CleanupKind {
260     NotCleanup,
261     Funclet,
262     Internal { funclet: mir::BasicBlock },
263 }
264
265 impl CleanupKind {
266     pub fn funclet_bb(self, for_bb: mir::BasicBlock) -> Option<mir::BasicBlock> {
267         match self {
268             CleanupKind::NotCleanup => None,
269             CleanupKind::Funclet => Some(for_bb),
270             CleanupKind::Internal { funclet } => Some(funclet),
271         }
272     }
273 }
274
275 /// MSVC requires unwinding code to be split to a tree of *funclets*, where each funclet can only
276 /// branch to itself or to its parent. Luckily, the code we generates matches this pattern.
277 /// Recover that structure in an analyze pass.
278 pub fn cleanup_kinds(mir: &mir::Body<'_>) -> IndexVec<mir::BasicBlock, CleanupKind> {
279     fn discover_masters<'tcx>(
280         result: &mut IndexVec<mir::BasicBlock, CleanupKind>,
281         mir: &mir::Body<'tcx>,
282     ) {
283         for (bb, data) in mir.basic_blocks.iter_enumerated() {
284             match data.terminator().kind {
285                 TerminatorKind::Goto { .. }
286                 | TerminatorKind::Resume
287                 | TerminatorKind::Abort
288                 | TerminatorKind::Return
289                 | TerminatorKind::GeneratorDrop
290                 | TerminatorKind::Unreachable
291                 | TerminatorKind::SwitchInt { .. }
292                 | TerminatorKind::Yield { .. }
293                 | TerminatorKind::FalseEdge { .. }
294                 | TerminatorKind::FalseUnwind { .. } => { /* nothing to do */ }
295                 TerminatorKind::Call { cleanup: unwind, .. }
296                 | TerminatorKind::InlineAsm { cleanup: unwind, .. }
297                 | TerminatorKind::Assert { cleanup: unwind, .. }
298                 | TerminatorKind::DropAndReplace { unwind, .. }
299                 | TerminatorKind::Drop { unwind, .. } => {
300                     if let Some(unwind) = unwind {
301                         debug!(
302                             "cleanup_kinds: {:?}/{:?} registering {:?} as funclet",
303                             bb, data, unwind
304                         );
305                         result[unwind] = CleanupKind::Funclet;
306                     }
307                 }
308             }
309         }
310     }
311
312     fn propagate<'tcx>(result: &mut IndexVec<mir::BasicBlock, CleanupKind>, mir: &mir::Body<'tcx>) {
313         let mut funclet_succs = IndexVec::from_elem(None, &mir.basic_blocks);
314
315         let mut set_successor = |funclet: mir::BasicBlock, succ| match funclet_succs[funclet] {
316             ref mut s @ None => {
317                 debug!("set_successor: updating successor of {:?} to {:?}", funclet, succ);
318                 *s = Some(succ);
319             }
320             Some(s) => {
321                 if s != succ {
322                     span_bug!(
323                         mir.span,
324                         "funclet {:?} has 2 parents - {:?} and {:?}",
325                         funclet,
326                         s,
327                         succ
328                     );
329                 }
330             }
331         };
332
333         for (bb, data) in traversal::reverse_postorder(mir) {
334             let funclet = match result[bb] {
335                 CleanupKind::NotCleanup => continue,
336                 CleanupKind::Funclet => bb,
337                 CleanupKind::Internal { funclet } => funclet,
338             };
339
340             debug!(
341                 "cleanup_kinds: {:?}/{:?}/{:?} propagating funclet {:?}",
342                 bb, data, result[bb], funclet
343             );
344
345             for succ in data.terminator().successors() {
346                 let kind = result[succ];
347                 debug!("cleanup_kinds: propagating {:?} to {:?}/{:?}", funclet, succ, kind);
348                 match kind {
349                     CleanupKind::NotCleanup => {
350                         result[succ] = CleanupKind::Internal { funclet };
351                     }
352                     CleanupKind::Funclet => {
353                         if funclet != succ {
354                             set_successor(funclet, succ);
355                         }
356                     }
357                     CleanupKind::Internal { funclet: succ_funclet } => {
358                         if funclet != succ_funclet {
359                             // `succ` has 2 different funclet going into it, so it must
360                             // be a funclet by itself.
361
362                             debug!(
363                                 "promoting {:?} to a funclet and updating {:?}",
364                                 succ, succ_funclet
365                             );
366                             result[succ] = CleanupKind::Funclet;
367                             set_successor(succ_funclet, succ);
368                             set_successor(funclet, succ);
369                         }
370                     }
371                 }
372             }
373         }
374     }
375
376     let mut result = IndexVec::from_elem(CleanupKind::NotCleanup, &mir.basic_blocks);
377
378     discover_masters(&mut result, mir);
379     propagate(&mut result, mir);
380     debug!("cleanup_kinds: result={:?}", result);
381     result
382 }