]> git.lizzy.rs Git - rust.git/blob - src/librustc_codegen_llvm/mir/analyze.rs
Generalized base::unsize_thin_ptr
[rust.git] / src / librustc_codegen_llvm / mir / analyze.rs
1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 //! An analysis to determine which locals require allocas and
12 //! which do not.
13
14 use rustc_data_structures::bit_set::BitSet;
15 use rustc_data_structures::graph::dominators::Dominators;
16 use rustc_data_structures::indexed_vec::{Idx, IndexVec};
17 use rustc::mir::{self, Location, TerminatorKind};
18 use rustc::mir::visit::{Visitor, PlaceContext, MutatingUseContext, NonMutatingUseContext};
19 use rustc::mir::traversal;
20 use rustc::ty;
21 use rustc::ty::layout::LayoutOf;
22 use type_of::LayoutLlvmExt;
23 use super::FunctionCx;
24 use value::Value;
25 use interfaces::*;
26
27 pub fn non_ssa_locals(fx: &FunctionCx<'a, 'll, 'tcx, &'ll Value>) -> BitSet<mir::Local> {
28     let mir = fx.mir;
29     let mut analyzer = LocalAnalyzer::new(fx);
30
31     analyzer.visit_mir(mir);
32
33     for (index, ty) in mir.local_decls.iter().map(|l| l.ty).enumerate() {
34         let ty = fx.monomorphize(&ty);
35         debug!("local {} has type {:?}", index, ty);
36         let layout = fx.cx.layout_of(ty);
37         if layout.is_llvm_immediate() {
38             // These sorts of types are immediates that we can store
39             // in an Value without an alloca.
40         } else if layout.is_llvm_scalar_pair() {
41             // We allow pairs and uses of any of their 2 fields.
42         } else {
43             // These sorts of types require an alloca. Note that
44             // is_llvm_immediate() may *still* be true, particularly
45             // for newtypes, but we currently force some types
46             // (e.g. structs) into an alloca unconditionally, just so
47             // that we don't have to deal with having two pathways
48             // (gep vs extractvalue etc).
49             analyzer.not_ssa(mir::Local::new(index));
50         }
51     }
52
53     analyzer.non_ssa_locals
54 }
55
56 struct LocalAnalyzer<'mir, 'a: 'mir, 'll: 'a, 'tcx: 'll, V: 'll> {
57     fx: &'mir FunctionCx<'a, 'll, 'tcx, V>,
58     dominators: Dominators<mir::BasicBlock>,
59     non_ssa_locals: BitSet<mir::Local>,
60     // The location of the first visited direct assignment to each
61     // local, or an invalid location (out of bounds `block` index).
62     first_assignment: IndexVec<mir::Local, Location>
63 }
64
65 impl LocalAnalyzer<'mir, 'a, 'll, 'tcx, &'ll Value> {
66     fn new(fx: &'mir FunctionCx<'a, 'll, 'tcx, &'ll Value>) -> Self {
67         let invalid_location =
68             mir::BasicBlock::new(fx.mir.basic_blocks().len()).start_location();
69         let mut analyzer = LocalAnalyzer {
70             fx,
71             dominators: fx.mir.dominators(),
72             non_ssa_locals: BitSet::new_empty(fx.mir.local_decls.len()),
73             first_assignment: IndexVec::from_elem(invalid_location, &fx.mir.local_decls)
74         };
75
76         // Arguments get assigned to by means of the function being called
77         for arg in fx.mir.args_iter() {
78             analyzer.first_assignment[arg] = mir::START_BLOCK.start_location();
79         }
80
81         analyzer
82     }
83
84     fn first_assignment(&self, local: mir::Local) -> Option<Location> {
85         let location = self.first_assignment[local];
86         if location.block.index() < self.fx.mir.basic_blocks().len() {
87             Some(location)
88         } else {
89             None
90         }
91     }
92
93     fn not_ssa(&mut self, local: mir::Local) {
94         debug!("marking {:?} as non-SSA", local);
95         self.non_ssa_locals.insert(local);
96     }
97
98     fn assign(&mut self, local: mir::Local, location: Location) {
99         if self.first_assignment(local).is_some() {
100             self.not_ssa(local);
101         } else {
102             self.first_assignment[local] = location;
103         }
104     }
105 }
106
107 impl Visitor<'tcx> for LocalAnalyzer<'mir, 'a, 'll, 'tcx, &'ll Value> {
108     fn visit_assign(&mut self,
109                     block: mir::BasicBlock,
110                     place: &mir::Place<'tcx>,
111                     rvalue: &mir::Rvalue<'tcx>,
112                     location: Location) {
113         debug!("visit_assign(block={:?}, place={:?}, rvalue={:?})", block, place, rvalue);
114
115         if let mir::Place::Local(index) = *place {
116             self.assign(index, location);
117             if !self.fx.rvalue_creates_operand(rvalue) {
118                 self.not_ssa(index);
119             }
120         } else {
121             self.visit_place(
122                 place,
123                 PlaceContext::MutatingUse(MutatingUseContext::Store),
124                 location
125             );
126         }
127
128         self.visit_rvalue(rvalue, location);
129     }
130
131     fn visit_terminator_kind(&mut self,
132                              block: mir::BasicBlock,
133                              kind: &mir::TerminatorKind<'tcx>,
134                              location: Location) {
135         let check = match *kind {
136             mir::TerminatorKind::Call {
137                 func: mir::Operand::Constant(ref c),
138                 ref args, ..
139             } => match c.ty.sty {
140                 ty::FnDef(did, _) => Some((did, args)),
141                 _ => None,
142             },
143             _ => None,
144         };
145         if let Some((def_id, args)) = check {
146             if Some(def_id) == self.fx.cx.tcx.lang_items().box_free_fn() {
147                 // box_free(x) shares with `drop x` the property that it
148                 // is not guaranteed to be statically dominated by the
149                 // definition of x, so x must always be in an alloca.
150                 if let mir::Operand::Move(ref place) = args[0] {
151                     self.visit_place(
152                         place,
153                         PlaceContext::MutatingUse(MutatingUseContext::Drop),
154                         location
155                     );
156                 }
157             }
158         }
159
160         self.super_terminator_kind(block, kind, location);
161     }
162
163     fn visit_place(&mut self,
164                    place: &mir::Place<'tcx>,
165                    context: PlaceContext<'tcx>,
166                    location: Location) {
167         debug!("visit_place(place={:?}, context={:?})", place, context);
168         let cx = self.fx.cx;
169
170         if let mir::Place::Projection(ref proj) = *place {
171             // Allow uses of projections that are ZSTs or from scalar fields.
172             let is_consume = match context {
173                 PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy) |
174                 PlaceContext::NonMutatingUse(NonMutatingUseContext::Move) => true,
175                 _ => false
176             };
177             if is_consume {
178                 let base_ty = proj.base.ty(self.fx.mir, cx.tcx);
179                 let base_ty = self.fx.monomorphize(&base_ty);
180
181                 // ZSTs don't require any actual memory access.
182                 let elem_ty = base_ty
183                     .projection_ty(cx.tcx, &proj.elem)
184                     .to_ty(cx.tcx);
185                 let elem_ty = self.fx.monomorphize(&elem_ty);
186                 if cx.layout_of(elem_ty).is_zst() {
187                     return;
188                 }
189
190                 if let mir::ProjectionElem::Field(..) = proj.elem {
191                     let layout = cx.layout_of(base_ty.to_ty(cx.tcx));
192                     if layout.is_llvm_immediate() || layout.is_llvm_scalar_pair() {
193                         // Recurse with the same context, instead of `Projection`,
194                         // potentially stopping at non-operand projections,
195                         // which would trigger `not_ssa` on locals.
196                         self.visit_place(&proj.base, context, location);
197                         return;
198                     }
199                 }
200             }
201
202             // A deref projection only reads the pointer, never needs the place.
203             if let mir::ProjectionElem::Deref = proj.elem {
204                 return self.visit_place(
205                     &proj.base,
206                     PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy),
207                     location
208                 );
209             }
210         }
211
212         self.super_place(place, context, location);
213     }
214
215     fn visit_local(&mut self,
216                    &local: &mir::Local,
217                    context: PlaceContext<'tcx>,
218                    location: Location) {
219         match context {
220             PlaceContext::MutatingUse(MutatingUseContext::Call) => {
221                 self.assign(local, location);
222             }
223
224             PlaceContext::NonUse(_) |
225             PlaceContext::MutatingUse(MutatingUseContext::Retag) => {}
226
227             PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy) |
228             PlaceContext::NonMutatingUse(NonMutatingUseContext::Move) => {
229                 // Reads from uninitialized variables (e.g. in dead code, after
230                 // optimizations) require locals to be in (uninitialized) memory.
231                 // NB: there can be uninitialized reads of a local visited after
232                 // an assignment to that local, if they happen on disjoint paths.
233                 let ssa_read = match self.first_assignment(local) {
234                     Some(assignment_location) => {
235                         assignment_location.dominates(location, &self.dominators)
236                     }
237                     None => false
238                 };
239                 if !ssa_read {
240                     self.not_ssa(local);
241                 }
242             }
243
244             PlaceContext::NonMutatingUse(NonMutatingUseContext::Inspect) |
245             PlaceContext::MutatingUse(MutatingUseContext::Store) |
246             PlaceContext::MutatingUse(MutatingUseContext::AsmOutput) |
247             PlaceContext::MutatingUse(MutatingUseContext::Borrow(..)) |
248             PlaceContext::MutatingUse(MutatingUseContext::Projection) |
249             PlaceContext::NonMutatingUse(NonMutatingUseContext::SharedBorrow(..)) |
250             PlaceContext::NonMutatingUse(NonMutatingUseContext::UniqueBorrow(..)) |
251             PlaceContext::NonMutatingUse(NonMutatingUseContext::ShallowBorrow(..)) |
252             PlaceContext::NonMutatingUse(NonMutatingUseContext::Projection) => {
253                 self.not_ssa(local);
254             }
255
256             PlaceContext::MutatingUse(MutatingUseContext::Drop) => {
257                 let ty = mir::Place::Local(local).ty(self.fx.mir, self.fx.cx.tcx);
258                 let ty = self.fx.monomorphize(&ty.to_ty(self.fx.cx.tcx));
259
260                 // Only need the place if we're actually dropping it.
261                 if self.fx.cx.type_needs_drop(ty) {
262                     self.not_ssa(local);
263                 }
264             }
265         }
266     }
267 }
268
269 #[derive(Copy, Clone, Debug, PartialEq, Eq)]
270 pub enum CleanupKind {
271     NotCleanup,
272     Funclet,
273     Internal { funclet: mir::BasicBlock }
274 }
275
276 impl CleanupKind {
277     pub fn funclet_bb(self, for_bb: mir::BasicBlock) -> Option<mir::BasicBlock> {
278         match self {
279             CleanupKind::NotCleanup => None,
280             CleanupKind::Funclet => Some(for_bb),
281             CleanupKind::Internal { funclet } => Some(funclet),
282         }
283     }
284 }
285
286 pub fn cleanup_kinds<'a, 'tcx>(mir: &mir::Mir<'tcx>) -> IndexVec<mir::BasicBlock, CleanupKind> {
287     fn discover_masters<'tcx>(result: &mut IndexVec<mir::BasicBlock, CleanupKind>,
288                               mir: &mir::Mir<'tcx>) {
289         for (bb, data) in mir.basic_blocks().iter_enumerated() {
290             match data.terminator().kind {
291                 TerminatorKind::Goto { .. } |
292                 TerminatorKind::Resume |
293                 TerminatorKind::Abort |
294                 TerminatorKind::Return |
295                 TerminatorKind::GeneratorDrop |
296                 TerminatorKind::Unreachable |
297                 TerminatorKind::SwitchInt { .. } |
298                 TerminatorKind::Yield { .. } |
299                 TerminatorKind::FalseEdges { .. } |
300                 TerminatorKind::FalseUnwind { .. } => {
301                     /* nothing to do */
302                 }
303                 TerminatorKind::Call { cleanup: unwind, .. } |
304                 TerminatorKind::Assert { cleanup: unwind, .. } |
305                 TerminatorKind::DropAndReplace { unwind, .. } |
306                 TerminatorKind::Drop { unwind, .. } => {
307                     if let Some(unwind) = unwind {
308                         debug!("cleanup_kinds: {:?}/{:?} registering {:?} as funclet",
309                                bb, data, unwind);
310                         result[unwind] = CleanupKind::Funclet;
311                     }
312                 }
313             }
314         }
315     }
316
317     fn propagate<'tcx>(result: &mut IndexVec<mir::BasicBlock, CleanupKind>,
318                        mir: &mir::Mir<'tcx>) {
319         let mut funclet_succs = IndexVec::from_elem(None, mir.basic_blocks());
320
321         let mut set_successor = |funclet: mir::BasicBlock, succ| {
322             match funclet_succs[funclet] {
323                 ref mut s @ None => {
324                     debug!("set_successor: updating successor of {:?} to {:?}",
325                            funclet, succ);
326                     *s = Some(succ);
327                 },
328                 Some(s) => if s != succ {
329                     span_bug!(mir.span, "funclet {:?} has 2 parents - {:?} and {:?}",
330                               funclet, s, succ);
331                 }
332             }
333         };
334
335         for (bb, data) in traversal::reverse_postorder(mir) {
336             let funclet = match result[bb] {
337                 CleanupKind::NotCleanup => continue,
338                 CleanupKind::Funclet => bb,
339                 CleanupKind::Internal { funclet } => funclet,
340             };
341
342             debug!("cleanup_kinds: {:?}/{:?}/{:?} propagating funclet {:?}",
343                    bb, data, result[bb], funclet);
344
345             for &succ in data.terminator().successors() {
346                 let kind = result[succ];
347                 debug!("cleanup_kinds: propagating {:?} to {:?}/{:?}",
348                        funclet, succ, kind);
349                 match kind {
350                     CleanupKind::NotCleanup => {
351                         result[succ] = CleanupKind::Internal { funclet };
352                     }
353                     CleanupKind::Funclet => {
354                         if funclet != succ {
355                             set_successor(funclet, succ);
356                         }
357                     }
358                     CleanupKind::Internal { funclet: succ_funclet } => {
359                         if funclet != succ_funclet {
360                             // `succ` has 2 different funclet going into it, so it must
361                             // be a funclet by itself.
362
363                             debug!("promoting {:?} to a funclet and updating {:?}", succ,
364                                    succ_funclet);
365                             result[succ] = CleanupKind::Funclet;
366                             set_successor(succ_funclet, succ);
367                             set_successor(funclet, succ);
368                         }
369                     }
370                 }
371             }
372         }
373     }
374
375     let mut result = IndexVec::from_elem(CleanupKind::NotCleanup, mir.basic_blocks());
376
377     discover_masters(&mut result, mir);
378     propagate(&mut result, mir);
379     debug!("cleanup_kinds: result={:?}", result);
380     result
381 }