]> git.lizzy.rs Git - rust.git/blob - src/librustc_codegen_ssa/mir/analyze.rs
Remove PlaceBase enum and make Place base field be local: Local
[rust.git] / src / librustc_codegen_ssa / mir / analyze.rs
1 //! An analysis to determine which locals require allocas and
2 //! which do not.
3
4 use super::FunctionCx;
5 use crate::traits::*;
6 use rustc::mir::traversal;
7 use rustc::mir::visit::{
8     MutatingUseContext, NonMutatingUseContext, NonUseContext, PlaceContext, Visitor,
9 };
10 use rustc::mir::{self, Location, TerminatorKind};
11 use rustc::session::config::DebugInfo;
12 use rustc::ty;
13 use rustc::ty::layout::{HasTyCtxt, LayoutOf};
14 use rustc_data_structures::graph::dominators::Dominators;
15 use rustc_index::bit_set::BitSet;
16 use rustc_index::vec::{Idx, IndexVec};
17
18 pub fn non_ssa_locals<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
19     fx: &FunctionCx<'a, 'tcx, Bx>,
20 ) -> BitSet<mir::Local> {
21     let mir = fx.mir;
22     let mut analyzer = LocalAnalyzer::new(fx);
23
24     analyzer.visit_body(mir);
25
26     for (local, decl) in mir.local_decls.iter_enumerated() {
27         // FIXME(eddyb): We should figure out how to use llvm.dbg.value instead
28         // of putting everything in allocas just so we can use llvm.dbg.declare.
29         if fx.cx.sess().opts.debuginfo == DebugInfo::Full {
30             if fx.mir.local_kind(local) == mir::LocalKind::Arg {
31                 analyzer.not_ssa(local);
32                 continue;
33             }
34         }
35
36         let ty = fx.monomorphize(&decl.ty);
37         debug!("local {:?} has type `{}`", local, ty);
38         let layout = fx.cx.spanned_layout_of(ty, decl.source_info.span);
39         if fx.cx.is_backend_immediate(layout) {
40             // These sorts of types are immediates that we can store
41             // in an Value without an alloca.
42         } else if fx.cx.is_backend_scalar_pair(layout) {
43             // We allow pairs and uses of any of their 2 fields.
44         } else {
45             // These sorts of types require an alloca. Note that
46             // is_llvm_immediate() may *still* be true, particularly
47             // for newtypes, but we currently force some types
48             // (e.g., structs) into an alloca unconditionally, just so
49             // that we don't have to deal with having two pathways
50             // (gep vs extractvalue etc).
51             analyzer.not_ssa(local);
52         }
53     }
54
55     analyzer.non_ssa_locals
56 }
57
58 struct LocalAnalyzer<'mir, 'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> {
59     fx: &'mir FunctionCx<'a, 'tcx, Bx>,
60     dominators: Dominators<mir::BasicBlock>,
61     non_ssa_locals: BitSet<mir::Local>,
62     // The location of the first visited direct assignment to each
63     // local, or an invalid location (out of bounds `block` index).
64     first_assignment: IndexVec<mir::Local, Location>,
65 }
66
67 impl<Bx: BuilderMethods<'a, 'tcx>> LocalAnalyzer<'mir, 'a, 'tcx, Bx> {
68     fn new(fx: &'mir FunctionCx<'a, 'tcx, Bx>) -> Self {
69         let invalid_location = mir::BasicBlock::new(fx.mir.basic_blocks().len()).start_location();
70         let dominators = fx.mir.dominators();
71         let mut analyzer = LocalAnalyzer {
72             fx,
73             dominators,
74             non_ssa_locals: BitSet::new_empty(fx.mir.local_decls.len()),
75             first_assignment: IndexVec::from_elem(invalid_location, &fx.mir.local_decls),
76         };
77
78         // Arguments get assigned to by means of the function being called
79         for arg in fx.mir.args_iter() {
80             analyzer.first_assignment[arg] = mir::START_BLOCK.start_location();
81         }
82
83         analyzer
84     }
85
86     fn first_assignment(&self, local: mir::Local) -> Option<Location> {
87         let location = self.first_assignment[local];
88         if location.block.index() < self.fx.mir.basic_blocks().len() {
89             Some(location)
90         } else {
91             None
92         }
93     }
94
95     fn not_ssa(&mut self, local: mir::Local) {
96         debug!("marking {:?} as non-SSA", local);
97         self.non_ssa_locals.insert(local);
98     }
99
100     fn assign(&mut self, local: mir::Local, location: Location) {
101         if self.first_assignment(local).is_some() {
102             self.not_ssa(local);
103         } else {
104             self.first_assignment[local] = location;
105         }
106     }
107
108     fn process_place(
109         &mut self,
110         place_ref: &mir::PlaceRef<'_, 'tcx>,
111         context: PlaceContext,
112         location: Location,
113     ) {
114         let cx = self.fx.cx;
115
116         if let [proj_base @ .., elem] = place_ref.projection {
117             let mut base_context = if context.is_mutating_use() {
118                 PlaceContext::MutatingUse(MutatingUseContext::Projection)
119             } else {
120                 PlaceContext::NonMutatingUse(NonMutatingUseContext::Projection)
121             };
122
123             // Allow uses of projections that are ZSTs or from scalar fields.
124             let is_consume = match context {
125                 PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy)
126                 | PlaceContext::NonMutatingUse(NonMutatingUseContext::Move) => true,
127                 _ => false,
128             };
129             if is_consume {
130                 let base_ty =
131                     mir::Place::ty_from(place_ref.local, proj_base, *self.fx.mir, cx.tcx());
132                 let base_ty = self.fx.monomorphize(&base_ty);
133
134                 // ZSTs don't require any actual memory access.
135                 let elem_ty = base_ty.projection_ty(cx.tcx(), elem).ty;
136                 let elem_ty = self.fx.monomorphize(&elem_ty);
137                 let span = self.fx.mir.local_decls[*place_ref.local].source_info.span;
138                 if cx.spanned_layout_of(elem_ty, span).is_zst() {
139                     return;
140                 }
141
142                 if let mir::ProjectionElem::Field(..) = elem {
143                     let layout = cx.spanned_layout_of(base_ty.ty, span);
144                     if cx.is_backend_immediate(layout) || cx.is_backend_scalar_pair(layout) {
145                         // Recurse with the same context, instead of `Projection`,
146                         // potentially stopping at non-operand projections,
147                         // which would trigger `not_ssa` on locals.
148                         base_context = context;
149                     }
150                 }
151             }
152
153             if let mir::ProjectionElem::Deref = elem {
154                 // Deref projections typically only read the pointer.
155                 // (the exception being `VarDebugInfo` contexts, handled below)
156                 base_context = PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy);
157
158                 // Indirect debuginfo requires going through memory, that only
159                 // the debugger accesses, following our emitted DWARF pointer ops.
160                 //
161                 // FIXME(eddyb) Investigate the possibility of relaxing this, but
162                 // note that `llvm.dbg.declare` *must* be used for indirect places,
163                 // even if we start using `llvm.dbg.value` for all other cases,
164                 // as we don't necessarily know when the value changes, but only
165                 // where it lives in memory.
166                 //
167                 // It's possible `llvm.dbg.declare` could support starting from
168                 // a pointer that doesn't point to an `alloca`, but this would
169                 // only be useful if we know the pointer being `Deref`'d comes
170                 // from an immutable place, and if `llvm.dbg.declare` calls
171                 // must be at the very start of the function, then only function
172                 // arguments could contain such pointers.
173                 if context == PlaceContext::NonUse(NonUseContext::VarDebugInfo) {
174                     // We use `NonUseContext::VarDebugInfo` for the base,
175                     // which might not force the base local to memory,
176                     // so we have to do it manually.
177                     self.visit_local(place_ref.local, context, location);
178                 }
179             }
180
181             // `NonUseContext::VarDebugInfo` needs to flow all the
182             // way down to the base local (see `visit_local`).
183             if context == PlaceContext::NonUse(NonUseContext::VarDebugInfo) {
184                 base_context = context;
185             }
186
187             self.process_place(
188                 &mir::PlaceRef { local: place_ref.local, projection: proj_base },
189                 base_context,
190                 location,
191             );
192             // HACK(eddyb) this emulates the old `visit_projection_elem`, this
193             // entire `visit_place`-like `process_place` method should be rewritten,
194             // now that we have moved to the "slice of projections" representation.
195             if let mir::ProjectionElem::Index(local) = elem {
196                 self.visit_local(
197                     local,
198                     PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy),
199                     location,
200                 );
201             }
202         } else {
203             // FIXME this is super_place code, is repeated here to avoid cloning place or changing
204             // visit_place API
205             let mut context = context;
206
207             if !place_ref.projection.is_empty() {
208                 context = if context.is_mutating_use() {
209                     PlaceContext::MutatingUse(MutatingUseContext::Projection)
210                 } else {
211                     PlaceContext::NonMutatingUse(NonMutatingUseContext::Projection)
212                 };
213             }
214
215             self.visit_place_base(place_ref.local, context, location);
216             self.visit_projection(place_ref.local, place_ref.projection, context, location);
217         }
218     }
219 }
220
221 impl<'mir, 'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> Visitor<'tcx>
222     for LocalAnalyzer<'mir, 'a, 'tcx, Bx>
223 {
224     fn visit_assign(
225         &mut self,
226         place: &mir::Place<'tcx>,
227         rvalue: &mir::Rvalue<'tcx>,
228         location: Location,
229     ) {
230         debug!("visit_assign(place={:?}, rvalue={:?})", place, rvalue);
231
232         if let Some(index) = place.as_local() {
233             self.assign(index, location);
234             let decl_span = self.fx.mir.local_decls[index].source_info.span;
235             if !self.fx.rvalue_creates_operand(rvalue, decl_span) {
236                 self.not_ssa(index);
237             }
238         } else {
239             self.visit_place(place, PlaceContext::MutatingUse(MutatingUseContext::Store), location);
240         }
241
242         self.visit_rvalue(rvalue, location);
243     }
244
245     fn visit_terminator_kind(&mut self, kind: &mir::TerminatorKind<'tcx>, location: Location) {
246         let check = match *kind {
247             mir::TerminatorKind::Call { func: mir::Operand::Constant(ref c), ref args, .. } => {
248                 match c.literal.ty.kind {
249                     ty::FnDef(did, _) => Some((did, args)),
250                     _ => None,
251                 }
252             }
253             _ => None,
254         };
255         if let Some((def_id, args)) = check {
256             if Some(def_id) == self.fx.cx.tcx().lang_items().box_free_fn() {
257                 // box_free(x) shares with `drop x` the property that it
258                 // is not guaranteed to be statically dominated by the
259                 // definition of x, so x must always be in an alloca.
260                 if let mir::Operand::Move(ref place) = args[0] {
261                     self.visit_place(
262                         place,
263                         PlaceContext::MutatingUse(MutatingUseContext::Drop),
264                         location,
265                     );
266                 }
267             }
268         }
269
270         self.super_terminator_kind(kind, location);
271     }
272
273     fn visit_place(&mut self, place: &mir::Place<'tcx>, context: PlaceContext, location: Location) {
274         debug!("visit_place(place={:?}, context={:?})", place, context);
275         self.process_place(&place.as_ref(), context, location);
276     }
277
278     fn visit_local(&mut self, &local: &mir::Local, context: PlaceContext, location: Location) {
279         match context {
280             PlaceContext::MutatingUse(MutatingUseContext::Call) => {
281                 self.assign(local, location);
282             }
283
284             PlaceContext::NonUse(NonUseContext::VarDebugInfo) => {
285                 // We need to keep locals in `alloca`s for debuginfo.
286                 // FIXME(eddyb): We should figure out how to use `llvm.dbg.value` instead
287                 // of putting everything in allocas just so we can use `llvm.dbg.declare`.
288                 if self.fx.cx.sess().opts.debuginfo == DebugInfo::Full {
289                     self.not_ssa(local);
290                 }
291             }
292
293             PlaceContext::NonUse(_) | PlaceContext::MutatingUse(MutatingUseContext::Retag) => {}
294
295             PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy)
296             | PlaceContext::NonMutatingUse(NonMutatingUseContext::Move) => {
297                 // Reads from uninitialized variables (e.g., in dead code, after
298                 // optimizations) require locals to be in (uninitialized) memory.
299                 // N.B., there can be uninitialized reads of a local visited after
300                 // an assignment to that local, if they happen on disjoint paths.
301                 let ssa_read = match self.first_assignment(local) {
302                     Some(assignment_location) => {
303                         assignment_location.dominates(location, &self.dominators)
304                     }
305                     None => false,
306                 };
307                 if !ssa_read {
308                     self.not_ssa(local);
309                 }
310             }
311
312             PlaceContext::NonMutatingUse(NonMutatingUseContext::Inspect)
313             | PlaceContext::MutatingUse(MutatingUseContext::Store)
314             | PlaceContext::MutatingUse(MutatingUseContext::AsmOutput)
315             | PlaceContext::MutatingUse(MutatingUseContext::Borrow)
316             | PlaceContext::MutatingUse(MutatingUseContext::AddressOf)
317             | PlaceContext::MutatingUse(MutatingUseContext::Projection)
318             | PlaceContext::NonMutatingUse(NonMutatingUseContext::SharedBorrow)
319             | PlaceContext::NonMutatingUse(NonMutatingUseContext::UniqueBorrow)
320             | PlaceContext::NonMutatingUse(NonMutatingUseContext::ShallowBorrow)
321             | PlaceContext::NonMutatingUse(NonMutatingUseContext::AddressOf)
322             | PlaceContext::NonMutatingUse(NonMutatingUseContext::Projection) => {
323                 self.not_ssa(local);
324             }
325
326             PlaceContext::MutatingUse(MutatingUseContext::Drop) => {
327                 let ty = self.fx.mir.local_decls[local].ty;
328                 let ty = self.fx.monomorphize(&ty);
329
330                 // Only need the place if we're actually dropping it.
331                 if self.fx.cx.type_needs_drop(ty) {
332                     self.not_ssa(local);
333                 }
334             }
335         }
336     }
337 }
338
339 #[derive(Copy, Clone, Debug, PartialEq, Eq)]
340 pub enum CleanupKind {
341     NotCleanup,
342     Funclet,
343     Internal { funclet: mir::BasicBlock },
344 }
345
346 impl CleanupKind {
347     pub fn funclet_bb(self, for_bb: mir::BasicBlock) -> Option<mir::BasicBlock> {
348         match self {
349             CleanupKind::NotCleanup => None,
350             CleanupKind::Funclet => Some(for_bb),
351             CleanupKind::Internal { funclet } => Some(funclet),
352         }
353     }
354 }
355
356 pub fn cleanup_kinds(mir: &mir::Body<'_>) -> IndexVec<mir::BasicBlock, CleanupKind> {
357     fn discover_masters<'tcx>(
358         result: &mut IndexVec<mir::BasicBlock, CleanupKind>,
359         mir: &mir::Body<'tcx>,
360     ) {
361         for (bb, data) in mir.basic_blocks().iter_enumerated() {
362             match data.terminator().kind {
363                 TerminatorKind::Goto { .. }
364                 | TerminatorKind::Resume
365                 | TerminatorKind::Abort
366                 | TerminatorKind::Return
367                 | TerminatorKind::GeneratorDrop
368                 | TerminatorKind::Unreachable
369                 | TerminatorKind::SwitchInt { .. }
370                 | TerminatorKind::Yield { .. }
371                 | TerminatorKind::FalseEdges { .. }
372                 | TerminatorKind::FalseUnwind { .. } => { /* nothing to do */ }
373                 TerminatorKind::Call { cleanup: unwind, .. }
374                 | TerminatorKind::Assert { cleanup: unwind, .. }
375                 | TerminatorKind::DropAndReplace { unwind, .. }
376                 | TerminatorKind::Drop { unwind, .. } => {
377                     if let Some(unwind) = unwind {
378                         debug!(
379                             "cleanup_kinds: {:?}/{:?} registering {:?} as funclet",
380                             bb, data, unwind
381                         );
382                         result[unwind] = CleanupKind::Funclet;
383                     }
384                 }
385             }
386         }
387     }
388
389     fn propagate<'tcx>(result: &mut IndexVec<mir::BasicBlock, CleanupKind>, mir: &mir::Body<'tcx>) {
390         let mut funclet_succs = IndexVec::from_elem(None, mir.basic_blocks());
391
392         let mut set_successor = |funclet: mir::BasicBlock, succ| match funclet_succs[funclet] {
393             ref mut s @ None => {
394                 debug!("set_successor: updating successor of {:?} to {:?}", funclet, succ);
395                 *s = Some(succ);
396             }
397             Some(s) => {
398                 if s != succ {
399                     span_bug!(
400                         mir.span,
401                         "funclet {:?} has 2 parents - {:?} and {:?}",
402                         funclet,
403                         s,
404                         succ
405                     );
406                 }
407             }
408         };
409
410         for (bb, data) in traversal::reverse_postorder(mir) {
411             let funclet = match result[bb] {
412                 CleanupKind::NotCleanup => continue,
413                 CleanupKind::Funclet => bb,
414                 CleanupKind::Internal { funclet } => funclet,
415             };
416
417             debug!(
418                 "cleanup_kinds: {:?}/{:?}/{:?} propagating funclet {:?}",
419                 bb, data, result[bb], funclet
420             );
421
422             for &succ in data.terminator().successors() {
423                 let kind = result[succ];
424                 debug!("cleanup_kinds: propagating {:?} to {:?}/{:?}", funclet, succ, kind);
425                 match kind {
426                     CleanupKind::NotCleanup => {
427                         result[succ] = CleanupKind::Internal { funclet };
428                     }
429                     CleanupKind::Funclet => {
430                         if funclet != succ {
431                             set_successor(funclet, succ);
432                         }
433                     }
434                     CleanupKind::Internal { funclet: succ_funclet } => {
435                         if funclet != succ_funclet {
436                             // `succ` has 2 different funclet going into it, so it must
437                             // be a funclet by itself.
438
439                             debug!(
440                                 "promoting {:?} to a funclet and updating {:?}",
441                                 succ, succ_funclet
442                             );
443                             result[succ] = CleanupKind::Funclet;
444                             set_successor(succ_funclet, succ);
445                             set_successor(funclet, succ);
446                         }
447                     }
448                 }
449             }
450         }
451     }
452
453     let mut result = IndexVec::from_elem(CleanupKind::NotCleanup, mir.basic_blocks());
454
455     discover_masters(&mut result, mir);
456     propagate(&mut result, mir);
457     debug!("cleanup_kinds: result={:?}", result);
458     result
459 }