]> git.lizzy.rs Git - rust.git/blob - src/librustc_codegen_ssa/mir/analyze.rs
Convert Place's projection to a boxed slice
[rust.git] / src / librustc_codegen_ssa / mir / analyze.rs
1 //! An analysis to determine which locals require allocas and
2 //! which do not.
3
4 use rustc_data_structures::bit_set::BitSet;
5 use rustc_data_structures::graph::dominators::Dominators;
6 use rustc_data_structures::indexed_vec::{Idx, IndexVec};
7 use rustc::mir::{self, Location, TerminatorKind};
8 use rustc::mir::visit::{Visitor, PlaceContext, MutatingUseContext, NonMutatingUseContext};
9 use rustc::mir::traversal;
10 use rustc::ty;
11 use rustc::ty::layout::{LayoutOf, HasTyCtxt};
12 use syntax_pos::DUMMY_SP;
13 use super::FunctionCx;
14 use crate::traits::*;
15
16 pub fn non_ssa_locals<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
17     fx: &FunctionCx<'a, 'tcx, Bx>,
18 ) -> BitSet<mir::Local> {
19     let mir = fx.mir;
20     let mut analyzer = LocalAnalyzer::new(fx);
21
22     analyzer.visit_body(mir);
23
24     for (index, (ty, span)) in mir.local_decls.iter()
25         .map(|l| (l.ty, l.source_info.span))
26         .enumerate()
27     {
28         let ty = fx.monomorphize(&ty);
29         debug!("local {} has type {:?}", index, ty);
30         let layout = fx.cx.spanned_layout_of(ty, span);
31         if fx.cx.is_backend_immediate(layout) {
32             // These sorts of types are immediates that we can store
33             // in an Value without an alloca.
34         } else if fx.cx.is_backend_scalar_pair(layout) {
35             // We allow pairs and uses of any of their 2 fields.
36         } else {
37             // These sorts of types require an alloca. Note that
38             // is_llvm_immediate() may *still* be true, particularly
39             // for newtypes, but we currently force some types
40             // (e.g., structs) into an alloca unconditionally, just so
41             // that we don't have to deal with having two pathways
42             // (gep vs extractvalue etc).
43             analyzer.not_ssa(mir::Local::new(index));
44         }
45     }
46
47     analyzer.non_ssa_locals
48 }
49
50 struct LocalAnalyzer<'mir, 'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> {
51     fx: &'mir FunctionCx<'a, 'tcx, Bx>,
52     dominators: Dominators<mir::BasicBlock>,
53     non_ssa_locals: BitSet<mir::Local>,
54     // The location of the first visited direct assignment to each
55     // local, or an invalid location (out of bounds `block` index).
56     first_assignment: IndexVec<mir::Local, Location>,
57 }
58
59 impl<Bx: BuilderMethods<'a, 'tcx>> LocalAnalyzer<'mir, 'a, 'tcx, Bx> {
60     fn new(fx: &'mir FunctionCx<'a, 'tcx, Bx>) -> Self {
61         let invalid_location =
62             mir::BasicBlock::new(fx.mir.basic_blocks().len()).start_location();
63         let mut analyzer = LocalAnalyzer {
64             fx,
65             dominators: fx.mir.dominators(),
66             non_ssa_locals: BitSet::new_empty(fx.mir.local_decls.len()),
67             first_assignment: IndexVec::from_elem(invalid_location, &fx.mir.local_decls)
68         };
69
70         // Arguments get assigned to by means of the function being called
71         for arg in fx.mir.args_iter() {
72             analyzer.first_assignment[arg] = mir::START_BLOCK.start_location();
73         }
74
75         analyzer
76     }
77
78     fn first_assignment(&self, local: mir::Local) -> Option<Location> {
79         let location = self.first_assignment[local];
80         if location.block.index() < self.fx.mir.basic_blocks().len() {
81             Some(location)
82         } else {
83             None
84         }
85     }
86
87     fn not_ssa(&mut self, local: mir::Local) {
88         debug!("marking {:?} as non-SSA", local);
89         self.non_ssa_locals.insert(local);
90     }
91
92     fn assign(&mut self, local: mir::Local, location: Location) {
93         if self.first_assignment(local).is_some() {
94             self.not_ssa(local);
95         } else {
96             self.first_assignment[local] = location;
97         }
98     }
99
100     fn process_place(
101         &mut self,
102         place_ref: &mir::PlaceRef<'_, 'tcx>,
103         context: PlaceContext,
104         location: Location,
105     ) {
106         let cx = self.fx.cx;
107
108         if let [.., elem] = place_ref.projection {
109             // FIXME(spastorino) include this in the pattern when stabilized
110             let proj_base = &place_ref.projection[..place_ref.projection.len() - 1];
111
112             // Allow uses of projections that are ZSTs or from scalar fields.
113             let is_consume = match context {
114                 PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy) |
115                 PlaceContext::NonMutatingUse(NonMutatingUseContext::Move) => true,
116                 _ => false
117             };
118             if is_consume {
119                 let base_ty =
120                     mir::Place::ty_from(place_ref.base, proj_base, self.fx.mir, cx.tcx());
121                 let base_ty = self.fx.monomorphize(&base_ty);
122
123                 // ZSTs don't require any actual memory access.
124                 let elem_ty = base_ty
125                     .projection_ty(cx.tcx(), elem)
126                     .ty;
127                 let elem_ty = self.fx.monomorphize(&elem_ty);
128                 let span = if let mir::PlaceBase::Local(index) = place_ref.base {
129                     self.fx.mir.local_decls[*index].source_info.span
130                 } else {
131                     DUMMY_SP
132                 };
133                 if cx.spanned_layout_of(elem_ty, span).is_zst() {
134                     return;
135                 }
136
137                 if let mir::ProjectionElem::Field(..) = elem {
138                     let layout = cx.spanned_layout_of(base_ty.ty, span);
139                     if cx.is_backend_immediate(layout) || cx.is_backend_scalar_pair(layout) {
140                         // Recurse with the same context, instead of `Projection`,
141                         // potentially stopping at non-operand projections,
142                         // which would trigger `not_ssa` on locals.
143                         self.process_place(
144                             &mir::PlaceRef {
145                                 base: place_ref.base,
146                                 projection: proj_base,
147                             },
148                             context,
149                             location,
150                         );
151                         return;
152                     }
153                 }
154             }
155
156             // A deref projection only reads the pointer, never needs the place.
157             if let mir::ProjectionElem::Deref = elem {
158                 self.process_place(
159                     &mir::PlaceRef {
160                         base: place_ref.base,
161                         projection: proj_base,
162                     },
163                     PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy),
164                     location
165                 );
166                 return;
167             }
168         }
169
170         // FIXME this is super_place code, is repeated here to avoid cloning place or changing
171         // visit_place API
172         let mut context = context;
173
174         if !place_ref.projection.is_empty() {
175             context = if context.is_mutating_use() {
176                 PlaceContext::MutatingUse(MutatingUseContext::Projection)
177             } else {
178                 PlaceContext::NonMutatingUse(NonMutatingUseContext::Projection)
179             };
180         }
181
182         self.visit_place_base(place_ref.base, context, location);
183         self.visit_projection(place_ref.base, place_ref.projection, context, location);
184     }
185
186 }
187
188 impl<'mir, 'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> Visitor<'tcx>
189     for LocalAnalyzer<'mir, 'a, 'tcx, Bx>
190 {
191     fn visit_assign(&mut self,
192                     place: &mir::Place<'tcx>,
193                     rvalue: &mir::Rvalue<'tcx>,
194                     location: Location) {
195         debug!("visit_assign(place={:?}, rvalue={:?})", place, rvalue);
196
197         if let mir::Place {
198             base: mir::PlaceBase::Local(index),
199             projection: box [],
200         } = *place {
201             self.assign(index, location);
202             let decl_span = self.fx.mir.local_decls[index].source_info.span;
203             if !self.fx.rvalue_creates_operand(rvalue, decl_span) {
204                 self.not_ssa(index);
205             }
206         } else {
207             self.visit_place(
208                 place,
209                 PlaceContext::MutatingUse(MutatingUseContext::Store),
210                 location
211             );
212         }
213
214         self.visit_rvalue(rvalue, location);
215     }
216
217     fn visit_terminator_kind(&mut self,
218                              kind: &mir::TerminatorKind<'tcx>,
219                              location: Location) {
220         let check = match *kind {
221             mir::TerminatorKind::Call {
222                 func: mir::Operand::Constant(ref c),
223                 ref args, ..
224             } => match c.literal.ty.sty {
225                 ty::FnDef(did, _) => Some((did, args)),
226                 _ => None,
227             },
228             _ => None,
229         };
230         if let Some((def_id, args)) = check {
231             if Some(def_id) == self.fx.cx.tcx().lang_items().box_free_fn() {
232                 // box_free(x) shares with `drop x` the property that it
233                 // is not guaranteed to be statically dominated by the
234                 // definition of x, so x must always be in an alloca.
235                 if let mir::Operand::Move(ref place) = args[0] {
236                     self.visit_place(
237                         place,
238                         PlaceContext::MutatingUse(MutatingUseContext::Drop),
239                         location
240                     );
241                 }
242             }
243         }
244
245         self.super_terminator_kind(kind, location);
246     }
247
248     fn visit_place(&mut self,
249                    place: &mir::Place<'tcx>,
250                    context: PlaceContext,
251                    location: Location) {
252         debug!("visit_place(place={:?}, context={:?})", place, context);
253         self.process_place(&place.as_ref(), context, location);
254     }
255
256     fn visit_local(&mut self,
257                    &local: &mir::Local,
258                    context: PlaceContext,
259                    location: Location) {
260         match context {
261             PlaceContext::MutatingUse(MutatingUseContext::Call) => {
262                 self.assign(local, location);
263             }
264
265             PlaceContext::NonUse(_) |
266             PlaceContext::MutatingUse(MutatingUseContext::Retag) => {}
267
268             PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy) |
269             PlaceContext::NonMutatingUse(NonMutatingUseContext::Move) => {
270                 // Reads from uninitialized variables (e.g., in dead code, after
271                 // optimizations) require locals to be in (uninitialized) memory.
272                 // N.B., there can be uninitialized reads of a local visited after
273                 // an assignment to that local, if they happen on disjoint paths.
274                 let ssa_read = match self.first_assignment(local) {
275                     Some(assignment_location) => {
276                         assignment_location.dominates(location, &self.dominators)
277                     }
278                     None => false
279                 };
280                 if !ssa_read {
281                     self.not_ssa(local);
282                 }
283             }
284
285             PlaceContext::NonMutatingUse(NonMutatingUseContext::Inspect) |
286             PlaceContext::MutatingUse(MutatingUseContext::Store) |
287             PlaceContext::MutatingUse(MutatingUseContext::AsmOutput) |
288             PlaceContext::MutatingUse(MutatingUseContext::Borrow) |
289             PlaceContext::MutatingUse(MutatingUseContext::Projection) |
290             PlaceContext::NonMutatingUse(NonMutatingUseContext::SharedBorrow) |
291             PlaceContext::NonMutatingUse(NonMutatingUseContext::UniqueBorrow) |
292             PlaceContext::NonMutatingUse(NonMutatingUseContext::ShallowBorrow) |
293             PlaceContext::NonMutatingUse(NonMutatingUseContext::Projection) => {
294                 self.not_ssa(local);
295             }
296
297             PlaceContext::MutatingUse(MutatingUseContext::Drop) => {
298                 let ty = self.fx.mir.local_decls[local].ty;
299                 let ty = self.fx.monomorphize(&ty);
300
301                 // Only need the place if we're actually dropping it.
302                 if self.fx.cx.type_needs_drop(ty) {
303                     self.not_ssa(local);
304                 }
305             }
306         }
307     }
308 }
309
310 #[derive(Copy, Clone, Debug, PartialEq, Eq)]
311 pub enum CleanupKind {
312     NotCleanup,
313     Funclet,
314     Internal { funclet: mir::BasicBlock }
315 }
316
317 impl CleanupKind {
318     pub fn funclet_bb(self, for_bb: mir::BasicBlock) -> Option<mir::BasicBlock> {
319         match self {
320             CleanupKind::NotCleanup => None,
321             CleanupKind::Funclet => Some(for_bb),
322             CleanupKind::Internal { funclet } => Some(funclet),
323         }
324     }
325 }
326
327 pub fn cleanup_kinds(mir: &mir::Body<'_>) -> IndexVec<mir::BasicBlock, CleanupKind> {
328     fn discover_masters<'tcx>(result: &mut IndexVec<mir::BasicBlock, CleanupKind>,
329                               mir: &mir::Body<'tcx>) {
330         for (bb, data) in mir.basic_blocks().iter_enumerated() {
331             match data.terminator().kind {
332                 TerminatorKind::Goto { .. } |
333                 TerminatorKind::Resume |
334                 TerminatorKind::Abort |
335                 TerminatorKind::Return |
336                 TerminatorKind::GeneratorDrop |
337                 TerminatorKind::Unreachable |
338                 TerminatorKind::SwitchInt { .. } |
339                 TerminatorKind::Yield { .. } |
340                 TerminatorKind::FalseEdges { .. } |
341                 TerminatorKind::FalseUnwind { .. } => {
342                     /* nothing to do */
343                 }
344                 TerminatorKind::Call { cleanup: unwind, .. } |
345                 TerminatorKind::Assert { cleanup: unwind, .. } |
346                 TerminatorKind::DropAndReplace { unwind, .. } |
347                 TerminatorKind::Drop { unwind, .. } => {
348                     if let Some(unwind) = unwind {
349                         debug!("cleanup_kinds: {:?}/{:?} registering {:?} as funclet",
350                                bb, data, unwind);
351                         result[unwind] = CleanupKind::Funclet;
352                     }
353                 }
354             }
355         }
356     }
357
358     fn propagate<'tcx>(result: &mut IndexVec<mir::BasicBlock, CleanupKind>,
359                        mir: &mir::Body<'tcx>) {
360         let mut funclet_succs = IndexVec::from_elem(None, mir.basic_blocks());
361
362         let mut set_successor = |funclet: mir::BasicBlock, succ| {
363             match funclet_succs[funclet] {
364                 ref mut s @ None => {
365                     debug!("set_successor: updating successor of {:?} to {:?}",
366                            funclet, succ);
367                     *s = Some(succ);
368                 },
369                 Some(s) => if s != succ {
370                     span_bug!(mir.span, "funclet {:?} has 2 parents - {:?} and {:?}",
371                               funclet, s, succ);
372                 }
373             }
374         };
375
376         for (bb, data) in traversal::reverse_postorder(mir) {
377             let funclet = match result[bb] {
378                 CleanupKind::NotCleanup => continue,
379                 CleanupKind::Funclet => bb,
380                 CleanupKind::Internal { funclet } => funclet,
381             };
382
383             debug!("cleanup_kinds: {:?}/{:?}/{:?} propagating funclet {:?}",
384                    bb, data, result[bb], funclet);
385
386             for &succ in data.terminator().successors() {
387                 let kind = result[succ];
388                 debug!("cleanup_kinds: propagating {:?} to {:?}/{:?}",
389                        funclet, succ, kind);
390                 match kind {
391                     CleanupKind::NotCleanup => {
392                         result[succ] = CleanupKind::Internal { funclet };
393                     }
394                     CleanupKind::Funclet => {
395                         if funclet != succ {
396                             set_successor(funclet, succ);
397                         }
398                     }
399                     CleanupKind::Internal { funclet: succ_funclet } => {
400                         if funclet != succ_funclet {
401                             // `succ` has 2 different funclet going into it, so it must
402                             // be a funclet by itself.
403
404                             debug!("promoting {:?} to a funclet and updating {:?}", succ,
405                                    succ_funclet);
406                             result[succ] = CleanupKind::Funclet;
407                             set_successor(succ_funclet, succ);
408                             set_successor(funclet, succ);
409                         }
410                     }
411                 }
412             }
413         }
414     }
415
416     let mut result = IndexVec::from_elem(CleanupKind::NotCleanup, mir.basic_blocks());
417
418     discover_masters(&mut result, mir);
419     propagate(&mut result, mir);
420     debug!("cleanup_kinds: result={:?}", result);
421     result
422 }