]> git.lizzy.rs Git - rust.git/blob - src/librustc_codegen_ssa/mir/analyze.rs
Rollup merge of #61273 - RalfJung:maybe-uninit, r=Centril
[rust.git] / src / librustc_codegen_ssa / mir / analyze.rs
1 //! An analysis to determine which locals require allocas and
2 //! which do not.
3
4 use rustc_data_structures::bit_set::BitSet;
5 use rustc_data_structures::graph::dominators::Dominators;
6 use rustc_data_structures::indexed_vec::{Idx, IndexVec};
7 use rustc::mir::{self, Location, TerminatorKind};
8 use rustc::mir::visit::{Visitor, PlaceContext, MutatingUseContext, NonMutatingUseContext};
9 use rustc::mir::traversal;
10 use rustc::ty;
11 use rustc::ty::layout::{LayoutOf, HasTyCtxt};
12 use super::FunctionCx;
13 use crate::traits::*;
14
15 pub fn non_ssa_locals<'a, 'tcx: 'a, Bx: BuilderMethods<'a, 'tcx>>(
16     fx: &FunctionCx<'a, 'tcx, Bx>
17 ) -> BitSet<mir::Local> {
18     let mir = fx.mir;
19     let mut analyzer = LocalAnalyzer::new(fx);
20
21     analyzer.visit_body(mir);
22
23     for (index, ty) in mir.local_decls.iter().map(|l| l.ty).enumerate() {
24         let ty = fx.monomorphize(&ty);
25         debug!("local {} has type {:?}", index, ty);
26         let layout = fx.cx.layout_of(ty);
27         if fx.cx.is_backend_immediate(layout) {
28             // These sorts of types are immediates that we can store
29             // in an Value without an alloca.
30         } else if fx.cx.is_backend_scalar_pair(layout) {
31             // We allow pairs and uses of any of their 2 fields.
32         } else {
33             // These sorts of types require an alloca. Note that
34             // is_llvm_immediate() may *still* be true, particularly
35             // for newtypes, but we currently force some types
36             // (e.g., structs) into an alloca unconditionally, just so
37             // that we don't have to deal with having two pathways
38             // (gep vs extractvalue etc).
39             analyzer.not_ssa(mir::Local::new(index));
40         }
41     }
42
43     analyzer.non_ssa_locals
44 }
45
46 struct LocalAnalyzer<'mir, 'a: 'mir, 'tcx: 'a, Bx: BuilderMethods<'a, 'tcx>> {
47     fx: &'mir FunctionCx<'a, 'tcx, Bx>,
48     dominators: Dominators<mir::BasicBlock>,
49     non_ssa_locals: BitSet<mir::Local>,
50     // The location of the first visited direct assignment to each
51     // local, or an invalid location (out of bounds `block` index).
52     first_assignment: IndexVec<mir::Local, Location>
53 }
54
55 impl<Bx: BuilderMethods<'a, 'tcx>> LocalAnalyzer<'mir, 'a, 'tcx, Bx> {
56     fn new(fx: &'mir FunctionCx<'a, 'tcx, Bx>) -> Self {
57         let invalid_location =
58             mir::BasicBlock::new(fx.mir.basic_blocks().len()).start_location();
59         let mut analyzer = LocalAnalyzer {
60             fx,
61             dominators: fx.mir.dominators(),
62             non_ssa_locals: BitSet::new_empty(fx.mir.local_decls.len()),
63             first_assignment: IndexVec::from_elem(invalid_location, &fx.mir.local_decls)
64         };
65
66         // Arguments get assigned to by means of the function being called
67         for arg in fx.mir.args_iter() {
68             analyzer.first_assignment[arg] = mir::START_BLOCK.start_location();
69         }
70
71         analyzer
72     }
73
74     fn first_assignment(&self, local: mir::Local) -> Option<Location> {
75         let location = self.first_assignment[local];
76         if location.block.index() < self.fx.mir.basic_blocks().len() {
77             Some(location)
78         } else {
79             None
80         }
81     }
82
83     fn not_ssa(&mut self, local: mir::Local) {
84         debug!("marking {:?} as non-SSA", local);
85         self.non_ssa_locals.insert(local);
86     }
87
88     fn assign(&mut self, local: mir::Local, location: Location) {
89         if self.first_assignment(local).is_some() {
90             self.not_ssa(local);
91         } else {
92             self.first_assignment[local] = location;
93         }
94     }
95 }
96
97 impl<'mir, 'a: 'mir, 'tcx: 'a, Bx: BuilderMethods<'a, 'tcx>> Visitor<'tcx>
98     for LocalAnalyzer<'mir, 'a, 'tcx, Bx> {
99     fn visit_assign(&mut self,
100                     place: &mir::Place<'tcx>,
101                     rvalue: &mir::Rvalue<'tcx>,
102                     location: Location) {
103         debug!("visit_assign(place={:?}, rvalue={:?})", place, rvalue);
104
105         if let mir::Place::Base(mir::PlaceBase::Local(index)) = *place {
106             self.assign(index, location);
107             if !self.fx.rvalue_creates_operand(rvalue) {
108                 self.not_ssa(index);
109             }
110         } else {
111             self.visit_place(
112                 place,
113                 PlaceContext::MutatingUse(MutatingUseContext::Store),
114                 location
115             );
116         }
117
118         self.visit_rvalue(rvalue, location);
119     }
120
121     fn visit_terminator_kind(&mut self,
122                              kind: &mir::TerminatorKind<'tcx>,
123                              location: Location) {
124         let check = match *kind {
125             mir::TerminatorKind::Call {
126                 func: mir::Operand::Constant(ref c),
127                 ref args, ..
128             } => match c.ty.sty {
129                 ty::FnDef(did, _) => Some((did, args)),
130                 _ => None,
131             },
132             _ => None,
133         };
134         if let Some((def_id, args)) = check {
135             if Some(def_id) == self.fx.cx.tcx().lang_items().box_free_fn() {
136                 // box_free(x) shares with `drop x` the property that it
137                 // is not guaranteed to be statically dominated by the
138                 // definition of x, so x must always be in an alloca.
139                 if let mir::Operand::Move(ref place) = args[0] {
140                     self.visit_place(
141                         place,
142                         PlaceContext::MutatingUse(MutatingUseContext::Drop),
143                         location
144                     );
145                 }
146             }
147         }
148
149         self.super_terminator_kind(kind, location);
150     }
151
152     fn visit_place(&mut self,
153                    place: &mir::Place<'tcx>,
154                    context: PlaceContext,
155                    location: Location) {
156         debug!("visit_place(place={:?}, context={:?})", place, context);
157         let cx = self.fx.cx;
158
159         if let mir::Place::Projection(ref proj) = *place {
160             // Allow uses of projections that are ZSTs or from scalar fields.
161             let is_consume = match context {
162                 PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy) |
163                 PlaceContext::NonMutatingUse(NonMutatingUseContext::Move) => true,
164                 _ => false
165             };
166             if is_consume {
167                 let base_ty = proj.base.ty(self.fx.mir, cx.tcx());
168                 let base_ty = self.fx.monomorphize(&base_ty);
169
170                 // ZSTs don't require any actual memory access.
171                 let elem_ty = base_ty
172                     .projection_ty(cx.tcx(), &proj.elem)
173                     .ty;
174                 let elem_ty = self.fx.monomorphize(&elem_ty);
175                 if cx.layout_of(elem_ty).is_zst() {
176                     return;
177                 }
178
179                 if let mir::ProjectionElem::Field(..) = proj.elem {
180                     let layout = cx.layout_of(base_ty.ty);
181                     if cx.is_backend_immediate(layout) || cx.is_backend_scalar_pair(layout) {
182                         // Recurse with the same context, instead of `Projection`,
183                         // potentially stopping at non-operand projections,
184                         // which would trigger `not_ssa` on locals.
185                         self.visit_place(&proj.base, context, location);
186                         return;
187                     }
188                 }
189             }
190
191             // A deref projection only reads the pointer, never needs the place.
192             if let mir::ProjectionElem::Deref = proj.elem {
193                 return self.visit_place(
194                     &proj.base,
195                     PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy),
196                     location
197                 );
198             }
199         }
200
201         self.super_place(place, context, location);
202     }
203
204     fn visit_local(&mut self,
205                    &local: &mir::Local,
206                    context: PlaceContext,
207                    location: Location) {
208         match context {
209             PlaceContext::MutatingUse(MutatingUseContext::Call) => {
210                 self.assign(local, location);
211             }
212
213             PlaceContext::NonUse(_) |
214             PlaceContext::MutatingUse(MutatingUseContext::Retag) => {}
215
216             PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy) |
217             PlaceContext::NonMutatingUse(NonMutatingUseContext::Move) => {
218                 // Reads from uninitialized variables (e.g., in dead code, after
219                 // optimizations) require locals to be in (uninitialized) memory.
220                 // N.B., there can be uninitialized reads of a local visited after
221                 // an assignment to that local, if they happen on disjoint paths.
222                 let ssa_read = match self.first_assignment(local) {
223                     Some(assignment_location) => {
224                         assignment_location.dominates(location, &self.dominators)
225                     }
226                     None => false
227                 };
228                 if !ssa_read {
229                     self.not_ssa(local);
230                 }
231             }
232
233             PlaceContext::NonMutatingUse(NonMutatingUseContext::Inspect) |
234             PlaceContext::MutatingUse(MutatingUseContext::Store) |
235             PlaceContext::MutatingUse(MutatingUseContext::AsmOutput) |
236             PlaceContext::MutatingUse(MutatingUseContext::Borrow) |
237             PlaceContext::MutatingUse(MutatingUseContext::Projection) |
238             PlaceContext::NonMutatingUse(NonMutatingUseContext::SharedBorrow) |
239             PlaceContext::NonMutatingUse(NonMutatingUseContext::UniqueBorrow) |
240             PlaceContext::NonMutatingUse(NonMutatingUseContext::ShallowBorrow) |
241             PlaceContext::NonMutatingUse(NonMutatingUseContext::Projection) => {
242                 self.not_ssa(local);
243             }
244
245             PlaceContext::MutatingUse(MutatingUseContext::Drop) => {
246                 let ty = self.fx.mir.local_decls[local].ty;
247                 let ty = self.fx.monomorphize(&ty);
248
249                 // Only need the place if we're actually dropping it.
250                 if self.fx.cx.type_needs_drop(ty) {
251                     self.not_ssa(local);
252                 }
253             }
254         }
255     }
256 }
257
258 #[derive(Copy, Clone, Debug, PartialEq, Eq)]
259 pub enum CleanupKind {
260     NotCleanup,
261     Funclet,
262     Internal { funclet: mir::BasicBlock }
263 }
264
265 impl CleanupKind {
266     pub fn funclet_bb(self, for_bb: mir::BasicBlock) -> Option<mir::BasicBlock> {
267         match self {
268             CleanupKind::NotCleanup => None,
269             CleanupKind::Funclet => Some(for_bb),
270             CleanupKind::Internal { funclet } => Some(funclet),
271         }
272     }
273 }
274
275 pub fn cleanup_kinds<'a, 'tcx>(mir: &mir::Body<'tcx>) -> IndexVec<mir::BasicBlock, CleanupKind> {
276     fn discover_masters<'tcx>(result: &mut IndexVec<mir::BasicBlock, CleanupKind>,
277                               mir: &mir::Body<'tcx>) {
278         for (bb, data) in mir.basic_blocks().iter_enumerated() {
279             match data.terminator().kind {
280                 TerminatorKind::Goto { .. } |
281                 TerminatorKind::Resume |
282                 TerminatorKind::Abort |
283                 TerminatorKind::Return |
284                 TerminatorKind::GeneratorDrop |
285                 TerminatorKind::Unreachable |
286                 TerminatorKind::SwitchInt { .. } |
287                 TerminatorKind::Yield { .. } |
288                 TerminatorKind::FalseEdges { .. } |
289                 TerminatorKind::FalseUnwind { .. } => {
290                     /* nothing to do */
291                 }
292                 TerminatorKind::Call { cleanup: unwind, .. } |
293                 TerminatorKind::Assert { cleanup: unwind, .. } |
294                 TerminatorKind::DropAndReplace { unwind, .. } |
295                 TerminatorKind::Drop { unwind, .. } => {
296                     if let Some(unwind) = unwind {
297                         debug!("cleanup_kinds: {:?}/{:?} registering {:?} as funclet",
298                                bb, data, unwind);
299                         result[unwind] = CleanupKind::Funclet;
300                     }
301                 }
302             }
303         }
304     }
305
306     fn propagate<'tcx>(result: &mut IndexVec<mir::BasicBlock, CleanupKind>,
307                        mir: &mir::Body<'tcx>) {
308         let mut funclet_succs = IndexVec::from_elem(None, mir.basic_blocks());
309
310         let mut set_successor = |funclet: mir::BasicBlock, succ| {
311             match funclet_succs[funclet] {
312                 ref mut s @ None => {
313                     debug!("set_successor: updating successor of {:?} to {:?}",
314                            funclet, succ);
315                     *s = Some(succ);
316                 },
317                 Some(s) => if s != succ {
318                     span_bug!(mir.span, "funclet {:?} has 2 parents - {:?} and {:?}",
319                               funclet, s, succ);
320                 }
321             }
322         };
323
324         for (bb, data) in traversal::reverse_postorder(mir) {
325             let funclet = match result[bb] {
326                 CleanupKind::NotCleanup => continue,
327                 CleanupKind::Funclet => bb,
328                 CleanupKind::Internal { funclet } => funclet,
329             };
330
331             debug!("cleanup_kinds: {:?}/{:?}/{:?} propagating funclet {:?}",
332                    bb, data, result[bb], funclet);
333
334             for &succ in data.terminator().successors() {
335                 let kind = result[succ];
336                 debug!("cleanup_kinds: propagating {:?} to {:?}/{:?}",
337                        funclet, succ, kind);
338                 match kind {
339                     CleanupKind::NotCleanup => {
340                         result[succ] = CleanupKind::Internal { funclet };
341                     }
342                     CleanupKind::Funclet => {
343                         if funclet != succ {
344                             set_successor(funclet, succ);
345                         }
346                     }
347                     CleanupKind::Internal { funclet: succ_funclet } => {
348                         if funclet != succ_funclet {
349                             // `succ` has 2 different funclet going into it, so it must
350                             // be a funclet by itself.
351
352                             debug!("promoting {:?} to a funclet and updating {:?}", succ,
353                                    succ_funclet);
354                             result[succ] = CleanupKind::Funclet;
355                             set_successor(succ_funclet, succ);
356                             set_successor(funclet, succ);
357                         }
358                     }
359                 }
360             }
361         }
362     }
363
364     let mut result = IndexVec::from_elem(CleanupKind::NotCleanup, mir.basic_blocks());
365
366     discover_masters(&mut result, mir);
367     propagate(&mut result, mir);
368     debug!("cleanup_kinds: result={:?}", result);
369     result
370 }