1 // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
12 Managing the scope stack. The scopes are tied to lexical scopes, so as
13 we descend the HAIR, we push a scope on the stack, translate ite
14 contents, and then pop it off. Every scope is named by a
19 When pushing a new scope, we record the current point in the graph (a
20 basic block); this marks the entry to the scope. We then generate more
21 stuff in the control-flow graph. Whenever the scope is exited, either
22 via a `break` or `return` or just by fallthrough, that marks an exit
23 from the scope. Each lexical scope thus corresponds to a single-entry,
24 multiple-exit (SEME) region in the control-flow graph.
26 For now, we keep a mapping from each `CodeExtent` to its
27 corresponding SEME region for later reference (see caveat in next
28 paragraph). This is because region scopes are tied to
29 them. Eventually, when we shift to non-lexical lifetimes, three should
30 be no need to remember this mapping.
32 There is one additional wrinkle, actually, that I wanted to hide from
33 you but duty compels me to mention. In the course of translating
34 matches, it sometimes happen that certain code (namely guards) gets
35 executed multiple times. This means that the scope lexical scope may
36 in fact correspond to multiple, disjoint SEME regions. So in fact our
37 mapping is from one scope to a vector of SEME regions.
41 The primary purpose for scopes is to insert drops: while translating
42 the contents, we also accumulate lvalues that need to be dropped upon
43 exit from each scope. This is done by calling `schedule_drop`. Once a
44 drop is scheduled, whenever we branch out we will insert drops of all
45 those lvalues onto the outgoing edge. Note that we don't know the full
46 set of scheduled drops up front, and so whenever we exit from the
47 scope we only drop the values scheduled thus far. For example, consider
48 the scope S corresponding to this loop:
58 When processing the `let x`, we will add one drop to the scope for
59 `x`. The break will then insert a drop for `x`. When we process `let
60 y`, we will add another drop (in fact, to a subscope, but let's ignore
61 that for now); any later drops would also drop `y`.
65 There are numerous "normal" ways to early exit a scope: `break`,
66 `continue`, `return` (panics are handled separately). Whenever an
67 early exit occurs, the method `exit_scope` is called. It is given the
68 current point in execution where the early exit occurs, as well as the
69 scope you want to branch to (note that all early exits from to some
70 other enclosing scope). `exit_scope` will record thid exit point and
73 Panics are handled in a similar fashion, except that a panic always
74 returns out to the `DIVERGE_BLOCK`. To trigger a panic, simply call
75 `panic(p)` with the current point `p`. Or else you can call
76 `diverge_cleanup`, which will produce a block that you can branch to
77 which does the appropriate cleanup and then diverges. `panic(p)`
78 simply calls `diverge_cleanup()` and adds an edge from `p` to the
83 In addition to the normal scope stack, we track a loop scope stack
84 that contains only loops. It tracks where a `break` and `continue`
89 use build::{BlockAnd, BlockAndExtension, Builder};
90 use rustc::middle::region::CodeExtent;
91 use rustc::middle::lang_items;
92 use rustc::middle::subst::Substs;
93 use rustc::middle::ty::{self, Ty};
94 use rustc::mir::repr::*;
95 use syntax::codemap::{Span, DUMMY_SP};
96 use syntax::parse::token::intern_and_get_ident;
98 pub struct Scope<'tcx> {
100 drops: Vec<(DropKind, Span, Lvalue<'tcx>)>,
101 cached_block: Option<BasicBlock>,
104 #[derive(Clone, Debug)]
105 pub struct LoopScope {
106 /// Extent of the loop
107 pub extent: CodeExtent,
108 /// Where the body of the loop begins
109 pub continue_block: BasicBlock,
110 /// Block to branch into when the loop terminates (either by being `break`-en out from, or by
111 /// having its condition to become false)
112 pub break_block: BasicBlock, // where to go on a `break
113 /// Indicates the reachability of the break_block for this loop
114 pub might_break: bool
117 impl<'a,'tcx> Builder<'a,'tcx> {
118 /// Start a loop scope, which tracks where `continue` and `break`
119 /// should branch to. See module comment for more details.
121 /// Returns the might_break attribute of the LoopScope used.
122 pub fn in_loop_scope<F>(&mut self,
123 loop_block: BasicBlock,
124 break_block: BasicBlock,
127 where F: FnOnce(&mut Builder<'a, 'tcx>)
129 let extent = self.extent_of_innermost_scope();
130 let loop_scope = LoopScope {
131 extent: extent.clone(),
132 continue_block: loop_block,
133 break_block: break_block,
136 self.loop_scopes.push(loop_scope);
138 let loop_scope = self.loop_scopes.pop().unwrap();
139 assert!(loop_scope.extent == extent);
140 loop_scope.might_break
143 /// Convenience wrapper that pushes a scope and then executes `f`
144 /// to build its contents, popping the scope afterwards.
145 pub fn in_scope<F, R>(&mut self, extent: CodeExtent, mut block: BasicBlock, f: F) -> BlockAnd<R>
146 where F: FnOnce(&mut Builder<'a, 'tcx>) -> BlockAnd<R>
148 debug!("in_scope(extent={:?}, block={:?})", extent, block);
149 self.push_scope(extent, block);
150 let rv = unpack!(block = f(self));
151 self.pop_scope(extent, block);
152 debug!("in_scope: exiting extent={:?} block={:?}", extent, block);
156 /// Push a scope onto the stack. You can then build code in this
157 /// scope and call `pop_scope` afterwards. Note that these two
158 /// calls must be paired; using `in_scope` as a convenience
159 /// wrapper maybe preferable.
160 pub fn push_scope(&mut self, extent: CodeExtent, block: BasicBlock) {
161 debug!("push_scope({:?}, {:?})", extent, block);
163 // push scope, execute `f`, then pop scope again
164 self.scopes.push(Scope {
165 extent: extent.clone(),
171 /// Pops a scope, which should have extent `extent`, adding any
172 /// drops onto the end of `block` that are needed. This must
173 /// match 1-to-1 with `push_scope`.
174 pub fn pop_scope(&mut self, extent: CodeExtent, block: BasicBlock) {
175 debug!("pop_scope({:?}, {:?})", extent, block);
176 let scope = self.scopes.pop().unwrap();
178 assert_eq!(scope.extent, extent);
180 // add in any drops needed on the fallthrough path (any other
181 // exiting paths, such as those that arise from `break`, will
182 // have drops already)
183 for (kind, span, lvalue) in scope.drops {
184 self.cfg.push_drop(block, span, kind, &lvalue);
189 /// Finds the loop scope for a given label. This is used for
190 /// resolving `break` and `continue`.
191 pub fn find_loop_scope(&mut self,
193 label: Option<CodeExtent>)
195 let Builder { ref mut loop_scopes, ref mut hir, .. } = *self;
198 // no label? return the innermost loop scope
199 loop_scopes.iter_mut().rev().next()
202 // otherwise, find the loop-scope with the correct id
203 loop_scopes.iter_mut()
205 .filter(|loop_scope| loop_scope.extent == label)
208 }.unwrap_or_else(|| hir.span_bug(span, "no enclosing loop scope found?"))
211 /// Branch out of `block` to `target`, exiting all scopes up to
212 /// and including `extent`. This will insert whatever drops are
213 /// needed, as well as tracking this exit for the SEME region. See
214 /// module comment for details.
215 pub fn exit_scope(&mut self,
219 target: BasicBlock) {
220 let Builder { ref mut scopes, ref mut cfg, ref mut hir, .. } = *self;
222 let scope_count = 1 + scopes.iter().rev().position(|scope| scope.extent == extent)
224 hir.span_bug(span, &format!("extent {:?} does not enclose", extent))
227 for scope in scopes.iter_mut().rev().take(scope_count) {
228 for &(kind, drop_span, ref lvalue) in &scope.drops {
229 cfg.push_drop(block, drop_span, kind, lvalue);
232 cfg.terminate(block, Terminator::Goto { target: target });
235 /// Creates a path that performs all required cleanup for unwinding.
237 /// This path terminates in Resume. Returns the start of the path.
238 /// See module comment for more details. None indicates there’s no
239 /// cleanup to do at this point.
240 pub fn diverge_cleanup(&mut self) -> Option<BasicBlock> {
241 if self.scopes.is_empty() {
245 let mut terminator = Terminator::Resume;
246 // Given an array of scopes, we generate these from the outermost scope to the innermost
247 // one. Thus for array [S0, S1, S2] with corresponding cleanup blocks [B0, B1, B2], we will
248 // generate B0 <- B1 <- B2 in left-to-right order. The outermost scope (B0) will always
249 // terminate with a Resume terminator.
250 for scope in self.scopes.iter_mut().filter(|s| !s.drops.is_empty()) {
251 if let Some(b) = scope.cached_block {
252 terminator = Terminator::Goto { target: b };
255 let new_block = self.cfg.start_new_block();
256 self.cfg.block_data_mut(new_block).is_cleanup = true;
257 self.cfg.terminate(new_block, terminator);
258 terminator = Terminator::Goto { target: new_block };
259 for &(kind, span, ref lvalue) in scope.drops.iter().rev() {
260 self.cfg.push_drop(new_block, span, kind, lvalue);
262 scope.cached_block = Some(new_block);
265 // Return the innermost cached block, most likely the one we just generated.
266 // Note that if there are no cleanups in scope we return None.
267 self.scopes.iter().rev().flat_map(|b| b.cached_block).next()
270 /// Indicates that `lvalue` should be dropped on exit from
272 pub fn schedule_drop(&mut self,
276 lvalue: &Lvalue<'tcx>,
277 lvalue_ty: Ty<'tcx>) {
278 if self.hir.needs_drop(lvalue_ty) {
279 for scope in self.scopes.iter_mut().rev() {
280 // We must invalidate all the cached_blocks leading up to the scope we’re looking
281 // for, because otherwise some/most of the blocks in the chain might become
282 // incorrect (i.e. they still are pointing at old cached_block).
283 scope.cached_block = None;
284 if scope.extent == extent {
285 scope.drops.push((kind, span, lvalue.clone()));
289 self.hir.span_bug(span,
290 &format!("extent {:?} not in scope to drop {:?}", extent, lvalue));
294 pub fn extent_of_innermost_scope(&self) -> CodeExtent {
295 self.scopes.last().map(|scope| scope.extent).unwrap()
298 pub fn extent_of_outermost_scope(&self) -> CodeExtent {
299 self.scopes.first().map(|scope| scope.extent).unwrap()
302 pub fn panic_bounds_check(&mut self,
304 index: Operand<'tcx>,
307 // fn(&(filename: &'static str, line: u32), index: usize, length: usize) -> !
308 let func = self.lang_function(lang_items::PanicBoundsCheckFnLangItem);
309 let args = func.ty.fn_args();
310 let ref_ty = args.skip_binder()[0];
311 let (region, tup_ty) = if let ty::TyRef(region, tyandmut) = ref_ty.sty {
312 (region, tyandmut.ty)
314 self.hir.span_bug(span, &format!("unexpected panic_bound_check type: {:?}", func.ty));
316 let (tuple, tuple_ref) = (self.temp(tup_ty), self.temp(ref_ty));
317 let (file, line) = self.span_to_fileline_args(span);
318 let elems = vec![Operand::Constant(file), Operand::Constant(line)];
319 // FIXME: We should have this as a constant, rather than a stack variable (to not pollute
320 // icache with cold branch code), however to achieve that we either have to rely on rvalue
321 // promotion or have some way, in MIR, to create constants.
322 self.cfg.push_assign(block, DUMMY_SP, &tuple, // tuple = (file_arg, line_arg);
323 Rvalue::Aggregate(AggregateKind::Tuple, elems));
324 // FIXME: is this region really correct here?
325 self.cfg.push_assign(block, DUMMY_SP, &tuple_ref, // tuple_ref = &tuple;
326 Rvalue::Ref(*region, BorrowKind::Unique, tuple));
327 let cleanup = self.diverge_cleanup();
328 self.cfg.terminate(block, Terminator::Call {
329 func: Operand::Constant(func),
330 args: vec![Operand::Consume(tuple_ref), index, len],
331 kind: match cleanup {
332 None => CallKind::Diverging,
333 Some(c) => CallKind::DivergingCleanup(c)
338 /// Create diverge cleanup and branch to it from `block`.
339 pub fn panic(&mut self, block: BasicBlock, msg: &'static str, span: Span) {
340 // fn(&(msg: &'static str filename: &'static str, line: u32)) -> !
341 let func = self.lang_function(lang_items::PanicFnLangItem);
342 let args = func.ty.fn_args();
343 let ref_ty = args.skip_binder()[0];
344 let (region, tup_ty) = if let ty::TyRef(region, tyandmut) = ref_ty.sty {
345 (region, tyandmut.ty)
347 self.hir.span_bug(span, &format!("unexpected panic type: {:?}", func.ty));
349 let (tuple, tuple_ref) = (self.temp(tup_ty), self.temp(ref_ty));
350 let (file, line) = self.span_to_fileline_args(span);
351 let message = Constant {
353 ty: self.hir.tcx().mk_static_str(),
354 literal: self.hir.str_literal(intern_and_get_ident(msg))
356 let elems = vec![Operand::Constant(message),
357 Operand::Constant(file),
358 Operand::Constant(line)];
359 // FIXME: We should have this as a constant, rather than a stack variable (to not pollute
360 // icache with cold branch code), however to achieve that we either have to rely on rvalue
361 // promotion or have some way, in MIR, to create constants.
362 self.cfg.push_assign(block, DUMMY_SP, &tuple, // tuple = (message_arg, file_arg, line_arg);
363 Rvalue::Aggregate(AggregateKind::Tuple, elems));
364 // FIXME: is this region really correct here?
365 self.cfg.push_assign(block, DUMMY_SP, &tuple_ref, // tuple_ref = &tuple;
366 Rvalue::Ref(*region, BorrowKind::Unique, tuple));
367 let cleanup = self.diverge_cleanup();
368 self.cfg.terminate(block, Terminator::Call {
369 func: Operand::Constant(func),
370 args: vec![Operand::Consume(tuple_ref)],
371 kind: match cleanup {
372 None => CallKind::Diverging,
373 Some(c) => CallKind::DivergingCleanup(c)
378 fn lang_function(&mut self, lang_item: lang_items::LangItem) -> Constant<'tcx> {
379 let funcdid = match self.hir.tcx().lang_items.require(lang_item) {
382 self.hir.tcx().sess.fatal(&*m)
387 ty: self.hir.tcx().lookup_item_type(funcdid).ty,
388 literal: Literal::Item {
390 kind: ItemKind::Function,
391 substs: self.hir.tcx().mk_substs(Substs::empty())
396 fn span_to_fileline_args(&mut self, span: Span) -> (Constant<'tcx>, Constant<'tcx>) {
397 let span_lines = self.hir.tcx().sess.codemap().lookup_char_pos(span.lo);
400 ty: self.hir.tcx().mk_static_str(),
401 literal: self.hir.str_literal(intern_and_get_ident(&span_lines.file.name))
404 ty: self.hir.tcx().types.u32,
405 literal: self.hir.usize_literal(span_lines.line)