1 //! This file builds up the `ScopeTree`, which describes
2 //! the parent links in the region hierarchy.
4 //! For more information about how MIR-based region-checking works,
5 //! see the [rustc guide].
7 //! [rustc guide]: https://rust-lang.github.io/rustc-guide/mir/borrowck.html
11 use crate::hir::def_id::DefId;
12 use crate::hir::intravisit::{self, Visitor, NestedVisitorMap};
13 use crate::hir::{Block, Arm, Pat, PatKind, Stmt, Expr, Local};
14 use crate::ich::{StableHashingContext, NodeIdHashingMode};
15 use crate::util::nodemap::{FxHashMap, FxHashSet};
16 use crate::ty::{self, DefIdTree, TyCtxt};
17 use crate::ty::query::Providers;
19 use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
20 use rustc_index::vec::Idx;
21 use rustc_macros::HashStable;
22 use syntax::source_map;
23 use syntax_pos::{Span, DUMMY_SP};
28 /// Represents a statically-describable scope that can be used to
29 /// bound the lifetime/region for values.
31 /// `Node(node_id)`: Any AST node that has any scope at all has the
32 /// `Node(node_id)` scope. Other variants represent special cases not
33 /// immediately derivable from the abstract syntax tree structure.
35 /// `DestructionScope(node_id)` represents the scope of destructors
36 /// implicitly-attached to `node_id` that run immediately after the
37 /// expression for `node_id` itself. Not every AST node carries a
38 /// `DestructionScope`, but those that are `terminating_scopes` do;
39 /// see discussion with `ScopeTree`.
41 /// `Remainder { block, statement_index }` represents
42 /// the scope of user code running immediately after the initializer
43 /// expression for the indexed statement, until the end of the block.
45 /// So: the following code can be broken down into the scopes beneath:
48 /// let a = f().g( 'b: { let x = d(); let y = d(); x.h(y) } ) ;
52 /// +---------+ (R10.)
54 /// +----------+ (M8.)
55 /// +----------------------+ (R7.)
57 /// +----------+ (M5.)
58 /// +-----------------------------------+ (M4.)
59 /// +--------------------------------------------------+ (M3.)
61 /// +-----------------------------------------------------------+ (M1.)
63 /// (M1.): Node scope of the whole `let a = ...;` statement.
64 /// (M2.): Node scope of the `f()` expression.
65 /// (M3.): Node scope of the `f().g(..)` expression.
66 /// (M4.): Node scope of the block labeled `'b:`.
67 /// (M5.): Node scope of the `let x = d();` statement
68 /// (D6.): DestructionScope for temporaries created during M5.
69 /// (R7.): Remainder scope for block `'b:`, stmt 0 (let x = ...).
70 /// (M8.): Node scope of the `let y = d();` statement.
71 /// (D9.): DestructionScope for temporaries created during M8.
72 /// (R10.): Remainder scope for block `'b:`, stmt 1 (let y = ...).
73 /// (D11.): DestructionScope for temporaries and bindings from block `'b:`.
74 /// (D12.): DestructionScope for temporaries created during M1 (e.g., f()).
77 /// Note that while the above picture shows the destruction scopes
78 /// as following their corresponding node scopes, in the internal
79 /// data structures of the compiler the destruction scopes are
80 /// represented as enclosing parents. This is sound because we use the
81 /// enclosing parent relationship just to ensure that referenced
82 /// values live long enough; phrased another way, the starting point
83 /// of each range is not really the important thing in the above
84 /// picture, but rather the ending point.
86 // FIXME(pnkfelix): this currently derives `PartialOrd` and `Ord` to
87 // placate the same deriving in `ty::FreeRegion`, but we may want to
88 // actually attach a more meaningful ordering to scopes than the one
89 // generated via deriving here.
90 #[derive(Clone, PartialEq, PartialOrd, Eq, Ord, Hash, Copy,
91 RustcEncodable, RustcDecodable, HashStable)]
93 pub id: hir::ItemLocalId,
97 impl fmt::Debug for Scope {
98 fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
100 ScopeData::Node => write!(fmt, "Node({:?})", self.id),
101 ScopeData::CallSite => write!(fmt, "CallSite({:?})", self.id),
102 ScopeData::Arguments => write!(fmt, "Arguments({:?})", self.id),
103 ScopeData::Destruction => write!(fmt, "Destruction({:?})", self.id),
104 ScopeData::Remainder(fsi) => write!(
106 "Remainder {{ block: {:?}, first_statement_index: {}}}",
114 #[derive(Clone, PartialEq, PartialOrd, Eq, Ord, Hash, Debug, Copy,
115 RustcEncodable, RustcDecodable, HashStable)]
119 /// Scope of the call-site for a function or closure
120 /// (outlives the arguments as well as the body).
123 /// Scope of arguments passed to a function or closure
124 /// (they outlive its body).
127 /// Scope of destructors for temporaries of node-id.
130 /// Scope following a `let id = expr;` binding in a block.
131 Remainder(FirstStatementIndex)
134 rustc_index::newtype_index! {
135 /// Represents a subscope of `block` for a binding that is introduced
136 /// by `block.stmts[first_statement_index]`. Such subscopes represent
137 /// a suffix of the block. Note that each subscope does not include
138 /// the initializer expression, if any, for the statement indexed by
139 /// `first_statement_index`.
141 /// For example, given `{ let (a, b) = EXPR_1; let c = EXPR_2; ... }`:
143 /// * The subscope with `first_statement_index == 0` is scope of both
144 /// `a` and `b`; it does not include EXPR_1, but does include
145 /// everything after that first `let`. (If you want a scope that
146 /// includes EXPR_1 as well, then do not use `Scope::Remainder`,
147 /// but instead another `Scope` that encompasses the whole block,
148 /// e.g., `Scope::Node`.
150 /// * The subscope with `first_statement_index == 1` is scope of `c`,
151 /// and thus does not include EXPR_2, but covers the `...`.
152 pub struct FirstStatementIndex {
157 // compilation error if size of `ScopeData` is not the same as a `u32`
158 static_assert_size!(ScopeData, 4);
161 /// Returns a item-local ID associated with this scope.
163 /// N.B., likely to be replaced as API is refined; e.g., pnkfelix
164 /// anticipates `fn entry_node_id` and `fn each_exit_node_id`.
165 pub fn item_local_id(&self) -> hir::ItemLocalId {
169 pub fn hir_id(&self, scope_tree: &ScopeTree) -> hir::HirId {
170 match scope_tree.root_body {
174 local_id: self.item_local_id()
177 None => hir::DUMMY_HIR_ID
181 /// Returns the span of this `Scope`. Note that in general the
182 /// returned span may not correspond to the span of any `NodeId` in
184 pub fn span(&self, tcx: TyCtxt<'_>, scope_tree: &ScopeTree) -> Span {
185 let hir_id = self.hir_id(scope_tree);
186 if hir_id == hir::DUMMY_HIR_ID {
189 let span = tcx.hir().span(hir_id);
190 if let ScopeData::Remainder(first_statement_index) = self.data {
191 if let Node::Block(ref blk) = tcx.hir().get(hir_id) {
192 // Want span for scope starting after the
193 // indexed statement and ending at end of
194 // `blk`; reuse span of `blk` and shift `lo`
195 // forward to end of indexed statement.
197 // (This is the special case aluded to in the
198 // doc-comment for this method)
200 let stmt_span = blk.stmts[first_statement_index.index()].span;
202 // To avoid issues with macro-generated spans, the span
203 // of the statement must be nested in that of the block.
204 if span.lo() <= stmt_span.lo() && stmt_span.lo() <= span.hi() {
205 return Span::new(stmt_span.lo(), span.hi(), span.ctxt());
213 pub type ScopeDepth = u32;
215 /// The region scope tree encodes information about region relationships.
216 #[derive(Default, Debug)]
217 pub struct ScopeTree {
218 /// If not empty, this body is the root of this region hierarchy.
219 root_body: Option<hir::HirId>,
221 /// The parent of the root body owner, if the latter is an
222 /// an associated const or method, as impls/traits can also
223 /// have lifetime parameters free in this body.
224 root_parent: Option<hir::HirId>,
226 /// Maps from a scope ID to the enclosing scope id;
227 /// this is usually corresponding to the lexical nesting, though
228 /// in the case of closures the parent scope is the innermost
229 /// conditional expression or repeating block. (Note that the
230 /// enclosing scope ID for the block associated with a closure is
231 /// the closure itself.)
232 parent_map: FxHashMap<Scope, (Scope, ScopeDepth)>,
234 /// Maps from a variable or binding ID to the block in which that
235 /// variable is declared.
236 var_map: FxHashMap<hir::ItemLocalId, Scope>,
238 /// Maps from a `NodeId` to the associated destruction scope (if any).
239 destruction_scopes: FxHashMap<hir::ItemLocalId, Scope>,
241 /// `rvalue_scopes` includes entries for those expressions whose
242 /// cleanup scope is larger than the default. The map goes from the
243 /// expression ID to the cleanup scope id. For rvalues not present in
244 /// this table, the appropriate cleanup scope is the innermost
245 /// enclosing statement, conditional expression, or repeating
246 /// block (see `terminating_scopes`).
247 /// In constants, None is used to indicate that certain expressions
248 /// escape into 'static and should have no local cleanup scope.
249 rvalue_scopes: FxHashMap<hir::ItemLocalId, Option<Scope>>,
251 /// Encodes the hierarchy of fn bodies. Every fn body (including
252 /// closures) forms its own distinct region hierarchy, rooted in
253 /// the block that is the fn body. This map points from the ID of
254 /// that root block to the ID of the root block for the enclosing
255 /// fn, if any. Thus the map structures the fn bodies into a
256 /// hierarchy based on their lexical mapping. This is used to
257 /// handle the relationships between regions in a fn and in a
258 /// closure defined by that fn. See the "Modeling closures"
259 /// section of the README in infer::region_constraints for
261 closure_tree: FxHashMap<hir::ItemLocalId, hir::ItemLocalId>,
263 /// If there are any `yield` nested within a scope, this map
264 /// stores the `Span` of the last one and its index in the
265 /// postorder of the Visitor traversal on the HIR.
267 /// HIR Visitor postorder indexes might seem like a peculiar
268 /// thing to care about. but it turns out that HIR bindings
269 /// and the temporary results of HIR expressions are never
270 /// storage-live at the end of HIR nodes with postorder indexes
271 /// lower than theirs, and therefore don't need to be suspended
272 /// at yield-points at these indexes.
274 /// For an example, suppose we have some code such as:
275 /// ```rust,ignore (example)
276 /// foo(f(), yield y, bar(g()))
279 /// With the HIR tree (calls numbered for expository purposes)
281 /// Call#0(foo, [Call#1(f), Yield(y), Call#2(bar, Call#3(g))])
284 /// Obviously, the result of `f()` was created before the yield
285 /// (and therefore needs to be kept valid over the yield) while
286 /// the result of `g()` occurs after the yield (and therefore
287 /// doesn't). If we want to infer that, we can look at the
288 /// postorder traversal:
290 /// `foo` `f` Call#1 `y` Yield `bar` `g` Call#3 Call#2 Call#0
293 /// In which we can easily see that `Call#1` occurs before the yield,
294 /// and `Call#3` after it.
296 /// To see that this method works, consider:
298 /// Let `D` be our binding/temporary and `U` be our other HIR node, with
299 /// `HIR-postorder(U) < HIR-postorder(D)` (in our example, U would be
300 /// the yield and D would be one of the calls). Let's show that
301 /// `D` is storage-dead at `U`.
303 /// Remember that storage-live/storage-dead refers to the state of
304 /// the *storage*, and does not consider moves/drop flags.
307 /// 1. From the ordering guarantee of HIR visitors (see
308 /// `rustc::hir::intravisit`), `D` does not dominate `U`.
309 /// 2. Therefore, `D` is *potentially* storage-dead at `U` (because
310 /// we might visit `U` without ever getting to `D`).
311 /// 3. However, we guarantee that at each HIR point, each
312 /// binding/temporary is always either always storage-live
313 /// or always storage-dead. This is what is being guaranteed
314 /// by `terminating_scopes` including all blocks where the
315 /// count of executions is not guaranteed.
316 /// 4. By `2.` and `3.`, `D` is *statically* storage-dead at `U`,
319 /// This property ought to not on (3) in an essential way -- it
320 /// is probably still correct even if we have "unrestricted" terminating
321 /// scopes. However, why use the complicated proof when a simple one
324 /// A subtle thing: `box` expressions, such as `box (&x, yield 2, &y)`. It
325 /// might seem that a `box` expression creates a `Box<T>` temporary
326 /// when it *starts* executing, at `HIR-preorder(BOX-EXPR)`. That might
327 /// be true in the MIR desugaring, but it is not important in the semantics.
329 /// The reason is that semantically, until the `box` expression returns,
330 /// the values are still owned by their containing expressions. So
331 /// we'll see that `&x`.
332 yield_in_scope: FxHashMap<Scope, YieldData>,
334 /// The number of visit_expr and visit_pat calls done in the body.
335 /// Used to sanity check visit_expr/visit_pat call count when
336 /// calculating generator interiors.
337 body_expr_count: FxHashMap<hir::BodyId, usize>,
340 #[derive(Debug, Copy, Clone, RustcEncodable, RustcDecodable, HashStable)]
341 pub struct YieldData {
342 /// The `Span` of the yield.
344 /// The number of expressions and patterns appearing before the `yield` in the body plus one.
345 pub expr_and_pat_count: usize,
346 pub source: hir::YieldSource,
349 #[derive(Debug, Copy, Clone)]
351 /// The root of the current region tree. This is typically the id
352 /// of the innermost fn body. Each fn forms its own disjoint tree
353 /// in the region hierarchy. These fn bodies are themselves
354 /// arranged into a tree. See the "Modeling closures" section of
355 /// the README in `infer::region_constraints` for more
357 root_id: Option<hir::ItemLocalId>,
359 /// The scope that contains any new variables declared, plus its depth in
361 var_parent: Option<(Scope, ScopeDepth)>,
363 /// Region parent of expressions, etc., plus its depth in the scope tree.
364 parent: Option<(Scope, ScopeDepth)>,
367 struct RegionResolutionVisitor<'tcx> {
370 // The number of expressions and patterns visited in the current body.
371 expr_and_pat_count: usize,
372 // When this is `true`, we record the `Scopes` we encounter
373 // when processing a Yield expression. This allows us to fix
375 pessimistic_yield: bool,
376 // Stores scopes when `pessimistic_yield` is `true`.
377 fixup_scopes: Vec<Scope>,
378 // The generated scope tree.
379 scope_tree: ScopeTree,
383 /// `terminating_scopes` is a set containing the ids of each
384 /// statement, or conditional/repeating expression. These scopes
385 /// are calling "terminating scopes" because, when attempting to
386 /// find the scope of a temporary, by default we search up the
387 /// enclosing scopes until we encounter the terminating scope. A
388 /// conditional/repeating expression is one which is not
389 /// guaranteed to execute exactly once upon entering the parent
390 /// scope. This could be because the expression only executes
391 /// conditionally, such as the expression `b` in `a && b`, or
392 /// because the expression may execute many times, such as a loop
393 /// body. The reason that we distinguish such expressions is that,
394 /// upon exiting the parent scope, we cannot statically know how
395 /// many times the expression executed, and thus if the expression
396 /// creates temporaries we cannot know statically how many such
397 /// temporaries we would have to cleanup. Therefore, we ensure that
398 /// the temporaries never outlast the conditional/repeating
399 /// expression, preventing the need for dynamic checks and/or
400 /// arbitrary amounts of stack space. Terminating scopes end
401 /// up being contained in a DestructionScope that contains the
402 /// destructor's execution.
403 terminating_scopes: FxHashSet<hir::ItemLocalId>,
406 struct ExprLocatorVisitor {
408 result: Option<usize>,
409 expr_and_pat_count: usize,
412 // This visitor has to have the same `visit_expr` calls as `RegionResolutionVisitor`
413 // since `expr_count` is compared against the results there.
414 impl<'tcx> Visitor<'tcx> for ExprLocatorVisitor {
415 fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
416 NestedVisitorMap::None
419 fn visit_pat(&mut self, pat: &'tcx Pat) {
420 intravisit::walk_pat(self, pat);
422 self.expr_and_pat_count += 1;
424 if pat.hir_id == self.hir_id {
425 self.result = Some(self.expr_and_pat_count);
429 fn visit_expr(&mut self, expr: &'tcx Expr) {
430 debug!("ExprLocatorVisitor - pre-increment {} expr = {:?}",
431 self.expr_and_pat_count,
434 intravisit::walk_expr(self, expr);
436 self.expr_and_pat_count += 1;
438 debug!("ExprLocatorVisitor - post-increment {} expr = {:?}",
439 self.expr_and_pat_count,
442 if expr.hir_id == self.hir_id {
443 self.result = Some(self.expr_and_pat_count);
448 impl<'tcx> ScopeTree {
449 pub fn record_scope_parent(&mut self, child: Scope, parent: Option<(Scope, ScopeDepth)>) {
450 debug!("{:?}.parent = {:?}", child, parent);
452 if let Some(p) = parent {
453 let prev = self.parent_map.insert(child, p);
454 assert!(prev.is_none());
457 // Record the destruction scopes for later so we can query them.
458 if let ScopeData::Destruction = child.data {
459 self.destruction_scopes.insert(child.item_local_id(), child);
463 pub fn each_encl_scope<E>(&self, mut e: E) where E: FnMut(Scope, Scope) {
464 for (&child, &parent) in &self.parent_map {
469 pub fn each_var_scope<E>(&self, mut e: E) where E: FnMut(&hir::ItemLocalId, Scope) {
470 for (child, &parent) in self.var_map.iter() {
475 pub fn opt_destruction_scope(&self, n: hir::ItemLocalId) -> Option<Scope> {
476 self.destruction_scopes.get(&n).cloned()
479 /// Records that `sub_closure` is defined within `sup_closure`. These IDs
480 /// should be the ID of the block that is the fn body, which is
481 /// also the root of the region hierarchy for that fn.
482 fn record_closure_parent(&mut self,
483 sub_closure: hir::ItemLocalId,
484 sup_closure: hir::ItemLocalId) {
485 debug!("record_closure_parent(sub_closure={:?}, sup_closure={:?})",
486 sub_closure, sup_closure);
487 assert!(sub_closure != sup_closure);
488 let previous = self.closure_tree.insert(sub_closure, sup_closure);
489 assert!(previous.is_none());
492 fn record_var_scope(&mut self, var: hir::ItemLocalId, lifetime: Scope) {
493 debug!("record_var_scope(sub={:?}, sup={:?})", var, lifetime);
494 assert!(var != lifetime.item_local_id());
495 self.var_map.insert(var, lifetime);
498 fn record_rvalue_scope(&mut self, var: hir::ItemLocalId, lifetime: Option<Scope>) {
499 debug!("record_rvalue_scope(sub={:?}, sup={:?})", var, lifetime);
500 if let Some(lifetime) = lifetime {
501 assert!(var != lifetime.item_local_id());
503 self.rvalue_scopes.insert(var, lifetime);
506 /// Returns the narrowest scope that encloses `id`, if any.
507 pub fn opt_encl_scope(&self, id: Scope) -> Option<Scope> {
508 self.parent_map.get(&id).cloned().map(|(p, _)| p)
511 /// Returns the narrowest scope that encloses `id`, if any.
512 #[allow(dead_code)] // used in cfg
513 pub fn encl_scope(&self, id: Scope) -> Scope {
514 self.opt_encl_scope(id).unwrap()
517 /// Returns the lifetime of the local variable `var_id`
518 pub fn var_scope(&self, var_id: hir::ItemLocalId) -> Scope {
519 self.var_map.get(&var_id).cloned().unwrap_or_else(||
520 bug!("no enclosing scope for id {:?}", var_id))
523 /// Returns the scope when the temp created by `expr_id` will be cleaned up.
524 pub fn temporary_scope(&self, expr_id: hir::ItemLocalId) -> Option<Scope> {
525 // Check for a designated rvalue scope.
526 if let Some(&s) = self.rvalue_scopes.get(&expr_id) {
527 debug!("temporary_scope({:?}) = {:?} [custom]", expr_id, s);
531 // Otherwise, locate the innermost terminating scope
532 // if there's one. Static items, for instance, won't
533 // have an enclosing scope, hence no scope will be
535 let mut id = Scope { id: expr_id, data: ScopeData::Node };
537 while let Some(&(p, _)) = self.parent_map.get(&id) {
539 ScopeData::Destruction => {
540 debug!("temporary_scope({:?}) = {:?} [enclosing]",
548 debug!("temporary_scope({:?}) = None", expr_id);
552 /// Returns the lifetime of the variable `id`.
553 pub fn var_region(&self, id: hir::ItemLocalId) -> ty::RegionKind {
554 let scope = ty::ReScope(self.var_scope(id));
555 debug!("var_region({:?}) = {:?}", id, scope);
559 pub fn scopes_intersect(&self, scope1: Scope, scope2: Scope) -> bool {
560 self.is_subscope_of(scope1, scope2) ||
561 self.is_subscope_of(scope2, scope1)
564 /// Returns `true` if `subscope` is equal to or is lexically nested inside `superscope`, and
565 /// `false` otherwise.
566 pub fn is_subscope_of(&self,
570 let mut s = subscope;
571 debug!("is_subscope_of({:?}, {:?})", subscope, superscope);
572 while superscope != s {
573 match self.opt_encl_scope(s) {
575 debug!("is_subscope_of({:?}, {:?}, s={:?})=false",
576 subscope, superscope, s);
579 Some(scope) => s = scope
583 debug!("is_subscope_of({:?}, {:?})=true", subscope, superscope);
588 /// Returns the ID of the innermost containing body.
589 pub fn containing_body(&self, mut scope: Scope) -> Option<hir::ItemLocalId> {
591 if let ScopeData::CallSite = scope.data {
592 return Some(scope.item_local_id());
595 scope = self.opt_encl_scope(scope)?;
599 /// Finds the nearest common ancestor of two scopes. That is, finds the
600 /// smallest scope which is greater than or equal to both `scope_a` and
602 pub fn nearest_common_ancestor(&self, scope_a: Scope, scope_b: Scope) -> Scope {
603 if scope_a == scope_b { return scope_a; }
608 // Get the depth of each scope's parent. If either scope has no parent,
609 // it must be the root, which means we can stop immediately because the
610 // root must be the nearest common ancestor. (In practice, this is
611 // moderately common.)
612 let (parent_a, parent_a_depth) = match self.parent_map.get(&a) {
616 let (parent_b, parent_b_depth) = match self.parent_map.get(&b) {
621 if parent_a_depth > parent_b_depth {
622 // `a` is lower than `b`. Move `a` up until it's at the same depth
623 // as `b`. The first move up is trivial because we already found
624 // `parent_a` above; the loop does the remaining N-1 moves.
626 for _ in 0..(parent_a_depth - parent_b_depth - 1) {
627 a = self.parent_map.get(&a).unwrap().0;
629 } else if parent_b_depth > parent_a_depth {
630 // `b` is lower than `a`.
632 for _ in 0..(parent_b_depth - parent_a_depth - 1) {
633 b = self.parent_map.get(&b).unwrap().0;
636 // Both scopes are at the same depth, and we know they're not equal
637 // because that case was tested for at the top of this function. So
638 // we can trivially move them both up one level now.
639 assert!(parent_a_depth != 0);
644 // Now both scopes are at the same level. We move upwards in lockstep
645 // until they match. In practice, this loop is almost always executed
646 // zero times because `a` is almost always a direct ancestor of `b` or
649 a = self.parent_map.get(&a).unwrap().0;
650 b = self.parent_map.get(&b).unwrap().0;
656 /// Assuming that the provided region was defined within this `ScopeTree`,
657 /// returns the outermost `Scope` that the region outlives.
658 pub fn early_free_scope(&self, tcx: TyCtxt<'tcx>, br: &ty::EarlyBoundRegion) -> Scope {
659 let param_owner = tcx.parent(br.def_id).unwrap();
661 let param_owner_id = tcx.hir().as_local_hir_id(param_owner).unwrap();
662 let scope = tcx.hir().maybe_body_owned_by(param_owner_id).map(|body_id| {
663 tcx.hir().body(body_id).value.hir_id.local_id
664 }).unwrap_or_else(|| {
665 // The lifetime was defined on node that doesn't own a body,
666 // which in practice can only mean a trait or an impl, that
667 // is the parent of a method, and that is enforced below.
668 if Some(param_owner_id) != self.root_parent {
669 tcx.sess.delay_span_bug(
671 &format!("free_scope: {:?} not recognized by the \
672 region scope tree for {:?} / {:?}",
674 self.root_parent.map(|id| tcx.hir().local_def_id(id)),
675 self.root_body.map(|hir_id| DefId::local(hir_id.owner))));
678 // The trait/impl lifetime is in scope for the method's body.
679 self.root_body.unwrap().local_id
682 Scope { id: scope, data: ScopeData::CallSite }
685 /// Assuming that the provided region was defined within this `ScopeTree`,
686 /// returns the outermost `Scope` that the region outlives.
687 pub fn free_scope(&self, tcx: TyCtxt<'tcx>, fr: &ty::FreeRegion) -> Scope {
688 let param_owner = match fr.bound_region {
689 ty::BoundRegion::BrNamed(def_id, _) => {
690 tcx.parent(def_id).unwrap()
695 // Ensure that the named late-bound lifetimes were defined
696 // on the same function that they ended up being freed in.
697 assert_eq!(param_owner, fr.scope);
699 let param_owner_id = tcx.hir().as_local_hir_id(param_owner).unwrap();
700 let body_id = tcx.hir().body_owned_by(param_owner_id);
701 Scope { id: tcx.hir().body(body_id).value.hir_id.local_id, data: ScopeData::CallSite }
704 /// Checks whether the given scope contains a `yield`. If so,
705 /// returns `Some((span, expr_count))` with the span of a yield we found and
706 /// the number of expressions and patterns appearing before the `yield` in the body + 1.
707 /// If there a are multiple yields in a scope, the one with the highest number is returned.
708 pub fn yield_in_scope(&self, scope: Scope) -> Option<YieldData> {
709 self.yield_in_scope.get(&scope).cloned()
712 /// Checks whether the given scope contains a `yield` and if that yield could execute
713 /// after `expr`. If so, it returns the span of that `yield`.
714 /// `scope` must be inside the body.
715 pub fn yield_in_scope_for_expr(&self,
717 expr_hir_id: hir::HirId,
718 body: &'tcx hir::Body) -> Option<Span> {
719 self.yield_in_scope(scope).and_then(|YieldData { span, expr_and_pat_count, .. }| {
720 let mut visitor = ExprLocatorVisitor {
723 expr_and_pat_count: 0,
725 visitor.visit_body(body);
726 if expr_and_pat_count >= visitor.result.unwrap() {
734 /// Gives the number of expressions visited in a body.
735 /// Used to sanity check visit_expr call count when
736 /// calculating generator interiors.
737 pub fn body_expr_count(&self, body_id: hir::BodyId) -> Option<usize> {
738 self.body_expr_count.get(&body_id).map(|r| *r)
742 /// Records the lifetime of a local variable as `cx.var_parent`
743 fn record_var_lifetime(
744 visitor: &mut RegionResolutionVisitor<'_>,
745 var_id: hir::ItemLocalId,
748 match visitor.cx.var_parent {
750 // this can happen in extern fn declarations like
752 // extern fn isalnum(c: c_int) -> c_int
754 Some((parent_scope, _)) =>
755 visitor.scope_tree.record_var_scope(var_id, parent_scope),
759 fn resolve_block<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, blk: &'tcx hir::Block) {
760 debug!("resolve_block(blk.hir_id={:?})", blk.hir_id);
762 let prev_cx = visitor.cx;
764 // We treat the tail expression in the block (if any) somewhat
765 // differently from the statements. The issue has to do with
766 // temporary lifetimes. Consider the following:
769 // let inner = ... (&bar()) ...;
771 // (... (&foo()) ...) // (the tail expression)
772 // }, other_argument());
774 // Each of the statements within the block is a terminating
775 // scope, and thus a temporary (e.g., the result of calling
776 // `bar()` in the initializer expression for `let inner = ...;`)
777 // will be cleaned up immediately after its corresponding
778 // statement (i.e., `let inner = ...;`) executes.
780 // On the other hand, temporaries associated with evaluating the
781 // tail expression for the block are assigned lifetimes so that
782 // they will be cleaned up as part of the terminating scope
783 // *surrounding* the block expression. Here, the terminating
784 // scope for the block expression is the `quux(..)` call; so
785 // those temporaries will only be cleaned up *after* both
786 // `other_argument()` has run and also the call to `quux(..)`
787 // itself has returned.
789 visitor.enter_node_scope_with_dtor(blk.hir_id.local_id);
790 visitor.cx.var_parent = visitor.cx.parent;
793 // This block should be kept approximately in sync with
794 // `intravisit::walk_block`. (We manually walk the block, rather
795 // than call `walk_block`, in order to maintain precise
796 // index information.)
798 for (i, statement) in blk.stmts.iter().enumerate() {
799 match statement.kind {
800 hir::StmtKind::Local(..) |
801 hir::StmtKind::Item(..) => {
802 // Each declaration introduces a subscope for bindings
803 // introduced by the declaration; this subscope covers a
804 // suffix of the block. Each subscope in a block has the
805 // previous subscope in the block as a parent, except for
806 // the first such subscope, which has the block itself as a
810 id: blk.hir_id.local_id,
811 data: ScopeData::Remainder(FirstStatementIndex::new(i))
814 visitor.cx.var_parent = visitor.cx.parent;
816 hir::StmtKind::Expr(..) |
817 hir::StmtKind::Semi(..) => {}
819 visitor.visit_stmt(statement)
821 walk_list!(visitor, visit_expr, &blk.expr);
824 visitor.cx = prev_cx;
827 fn resolve_arm<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, arm: &'tcx hir::Arm) {
828 let prev_cx = visitor.cx;
832 id: arm.hir_id.local_id,
833 data: ScopeData::Node,
836 visitor.cx.var_parent = visitor.cx.parent;
838 visitor.terminating_scopes.insert(arm.body.hir_id.local_id);
840 if let Some(hir::Guard::If(ref expr)) = arm.guard {
841 visitor.terminating_scopes.insert(expr.hir_id.local_id);
844 intravisit::walk_arm(visitor, arm);
846 visitor.cx = prev_cx;
849 fn resolve_pat<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, pat: &'tcx hir::Pat) {
850 visitor.record_child_scope(Scope { id: pat.hir_id.local_id, data: ScopeData::Node });
852 // If this is a binding then record the lifetime of that binding.
853 if let PatKind::Binding(..) = pat.kind {
854 record_var_lifetime(visitor, pat.hir_id.local_id, pat.span);
857 debug!("resolve_pat - pre-increment {} pat = {:?}", visitor.expr_and_pat_count, pat);
859 intravisit::walk_pat(visitor, pat);
861 visitor.expr_and_pat_count += 1;
863 debug!("resolve_pat - post-increment {} pat = {:?}", visitor.expr_and_pat_count, pat);
866 fn resolve_stmt<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, stmt: &'tcx hir::Stmt) {
867 let stmt_id = stmt.hir_id.local_id;
868 debug!("resolve_stmt(stmt.id={:?})", stmt_id);
870 // Every statement will clean up the temporaries created during
871 // execution of that statement. Therefore each statement has an
872 // associated destruction scope that represents the scope of the
873 // statement plus its destructors, and thus the scope for which
874 // regions referenced by the destructors need to survive.
875 visitor.terminating_scopes.insert(stmt_id);
877 let prev_parent = visitor.cx.parent;
878 visitor.enter_node_scope_with_dtor(stmt_id);
880 intravisit::walk_stmt(visitor, stmt);
882 visitor.cx.parent = prev_parent;
885 fn resolve_expr<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, expr: &'tcx hir::Expr) {
886 debug!("resolve_expr - pre-increment {} expr = {:?}", visitor.expr_and_pat_count, expr);
888 let prev_cx = visitor.cx;
889 visitor.enter_node_scope_with_dtor(expr.hir_id.local_id);
892 let terminating_scopes = &mut visitor.terminating_scopes;
893 let mut terminating = |id: hir::ItemLocalId| {
894 terminating_scopes.insert(id);
897 // Conditional or repeating scopes are always terminating
898 // scopes, meaning that temporaries cannot outlive them.
899 // This ensures fixed size stacks.
901 hir::ExprKind::Binary(
902 source_map::Spanned { node: hir::BinOpKind::And, .. }, _, ref r) |
903 hir::ExprKind::Binary(
904 source_map::Spanned { node: hir::BinOpKind::Or, .. }, _, ref r) => {
905 // For shortcircuiting operators, mark the RHS as a terminating
906 // scope since it only executes conditionally.
907 terminating(r.hir_id.local_id);
910 hir::ExprKind::Loop(ref body, _, _) => {
911 terminating(body.hir_id.local_id);
914 hir::ExprKind::DropTemps(ref expr) => {
915 // `DropTemps(expr)` does not denote a conditional scope.
916 // Rather, we want to achieve the same behavior as `{ let _t = expr; _t }`.
917 terminating(expr.hir_id.local_id);
920 hir::ExprKind::AssignOp(..) | hir::ExprKind::Index(..) |
921 hir::ExprKind::Unary(..) | hir::ExprKind::Call(..) | hir::ExprKind::MethodCall(..) => {
922 // FIXME(https://github.com/rust-lang/rfcs/issues/811) Nested method calls
924 // The lifetimes for a call or method call look as follows:
932 // The idea is that call.callee_id represents *the time when
933 // the invoked function is actually running* and call.id
934 // represents *the time to prepare the arguments and make the
935 // call*. See the section "Borrows in Calls" borrowck/README.md
936 // for an extended explanation of why this distinction is
939 // record_superlifetime(new_cx, expr.callee_id);
946 let prev_pessimistic = visitor.pessimistic_yield;
948 // Ordinarily, we can rely on the visit order of HIR intravisit
949 // to correspond to the actual execution order of statements.
950 // However, there's a weird corner case with compund assignment
951 // operators (e.g. `a += b`). The evaluation order depends on whether
952 // or not the operator is overloaded (e.g. whether or not a trait
953 // like AddAssign is implemented).
955 // For primitive types (which, despite having a trait impl, don't actually
956 // end up calling it), the evluation order is right-to-left. For example,
957 // the following code snippet:
960 // *{println!("LHS!"); y} += {println!("RHS!"); 1};
967 // However, if the operator is used on a non-primitive type,
968 // the evaluation order will be left-to-right, since the operator
969 // actually get desugared to a method call. For example, this
970 // nearly identical code snippet:
972 // let y = &mut String::new();
973 // *{println!("LHS String"); y} += {println!("RHS String"); "hi"};
979 // To determine the actual execution order, we need to perform
980 // trait resolution. Unfortunately, we need to be able to compute
981 // yield_in_scope before type checking is even done, as it gets
982 // used by AST borrowcheck.
984 // Fortunately, we don't need to know the actual execution order.
985 // It suffices to know the 'worst case' order with respect to yields.
986 // Specifically, we need to know the highest 'expr_and_pat_count'
987 // that we could assign to the yield expression. To do this,
988 // we pick the greater of the two values from the left-hand
989 // and right-hand expressions. This makes us overly conservative
990 // about what types could possibly live across yield points,
991 // but we will never fail to detect that a type does actually
992 // live across a yield point. The latter part is critical -
993 // we're already overly conservative about what types will live
994 // across yield points, as the generated MIR will determine
995 // when things are actually live. However, for typecheck to work
996 // properly, we can't miss any types.
1000 // Manually recurse over closures, because they are the only
1001 // case of nested bodies that share the parent environment.
1002 hir::ExprKind::Closure(.., body, _, _) => {
1003 let body = visitor.tcx.hir().body(body);
1004 visitor.visit_body(body);
1006 hir::ExprKind::AssignOp(_, ref left_expr, ref right_expr) => {
1007 debug!("resolve_expr - enabling pessimistic_yield, was previously {}",
1010 let start_point = visitor.fixup_scopes.len();
1011 visitor.pessimistic_yield = true;
1013 // If the actual execution order turns out to be right-to-left,
1014 // then we're fine. However, if the actual execution order is left-to-right,
1015 // then we'll assign too low a count to any `yield` expressions
1016 // we encounter in 'right_expression' - they should really occur after all of the
1017 // expressions in 'left_expression'.
1018 visitor.visit_expr(&right_expr);
1019 visitor.pessimistic_yield = prev_pessimistic;
1021 debug!("resolve_expr - restoring pessimistic_yield to {}", prev_pessimistic);
1022 visitor.visit_expr(&left_expr);
1023 debug!("resolve_expr - fixing up counts to {}", visitor.expr_and_pat_count);
1025 // Remove and process any scopes pushed by the visitor
1026 let target_scopes = visitor.fixup_scopes.drain(start_point..);
1028 for scope in target_scopes {
1029 let mut yield_data = visitor.scope_tree.yield_in_scope.get_mut(&scope).unwrap();
1030 let count = yield_data.expr_and_pat_count;
1031 let span = yield_data.span;
1033 // expr_and_pat_count never decreases. Since we recorded counts in yield_in_scope
1034 // before walking the left-hand side, it should be impossible for the recorded
1035 // count to be greater than the left-hand side count.
1036 if count > visitor.expr_and_pat_count {
1037 bug!("Encountered greater count {} at span {:?} - expected no greater than {}",
1038 count, span, visitor.expr_and_pat_count);
1040 let new_count = visitor.expr_and_pat_count;
1041 debug!("resolve_expr - increasing count for scope {:?} from {} to {} at span {:?}",
1042 scope, count, new_count, span);
1044 yield_data.expr_and_pat_count = new_count;
1049 _ => intravisit::walk_expr(visitor, expr)
1052 visitor.expr_and_pat_count += 1;
1054 debug!("resolve_expr post-increment {}, expr = {:?}", visitor.expr_and_pat_count, expr);
1056 if let hir::ExprKind::Yield(_, source) = &expr.kind {
1057 // Mark this expr's scope and all parent scopes as containing `yield`.
1058 let mut scope = Scope { id: expr.hir_id.local_id, data: ScopeData::Node };
1060 let data = YieldData {
1062 expr_and_pat_count: visitor.expr_and_pat_count,
1065 visitor.scope_tree.yield_in_scope.insert(scope, data);
1066 if visitor.pessimistic_yield {
1067 debug!("resolve_expr in pessimistic_yield - marking scope {:?} for fixup", scope);
1068 visitor.fixup_scopes.push(scope);
1071 // Keep traversing up while we can.
1072 match visitor.scope_tree.parent_map.get(&scope) {
1073 // Don't cross from closure bodies to their parent.
1074 Some(&(superscope, _)) => match superscope.data {
1075 ScopeData::CallSite => break,
1076 _ => scope = superscope
1083 visitor.cx = prev_cx;
1086 fn resolve_local<'tcx>(
1087 visitor: &mut RegionResolutionVisitor<'tcx>,
1088 pat: Option<&'tcx hir::Pat>,
1089 init: Option<&'tcx hir::Expr>,
1091 debug!("resolve_local(pat={:?}, init={:?})", pat, init);
1093 let blk_scope = visitor.cx.var_parent.map(|(p, _)| p);
1095 // As an exception to the normal rules governing temporary
1096 // lifetimes, initializers in a let have a temporary lifetime
1097 // of the enclosing block. This means that e.g., a program
1098 // like the following is legal:
1100 // let ref x = HashMap::new();
1102 // Because the hash map will be freed in the enclosing block.
1104 // We express the rules more formally based on 3 grammars (defined
1105 // fully in the helpers below that implement them):
1107 // 1. `E&`, which matches expressions like `&<rvalue>` that
1108 // own a pointer into the stack.
1110 // 2. `P&`, which matches patterns like `ref x` or `(ref x, ref
1111 // y)` that produce ref bindings into the value they are
1112 // matched against or something (at least partially) owned by
1113 // the value they are matched against. (By partially owned,
1114 // I mean that creating a binding into a ref-counted or managed value
1115 // would still count.)
1117 // 3. `ET`, which matches both rvalues like `foo()` as well as places
1118 // based on rvalues like `foo().x[2].y`.
1120 // A subexpression `<rvalue>` that appears in a let initializer
1121 // `let pat [: ty] = expr` has an extended temporary lifetime if
1122 // any of the following conditions are met:
1124 // A. `pat` matches `P&` and `expr` matches `ET`
1125 // (covers cases where `pat` creates ref bindings into an rvalue
1126 // produced by `expr`)
1127 // B. `ty` is a borrowed pointer and `expr` matches `ET`
1128 // (covers cases where coercion creates a borrow)
1129 // C. `expr` matches `E&`
1130 // (covers cases `expr` borrows an rvalue that is then assigned
1131 // to memory (at least partially) owned by the binding)
1133 // Here are some examples hopefully giving an intuition where each
1134 // rule comes into play and why:
1136 // Rule A. `let (ref x, ref y) = (foo().x, 44)`. The rvalue `(22, 44)`
1137 // would have an extended lifetime, but not `foo()`.
1139 // Rule B. `let x = &foo().x`. The rvalue `foo()` would have extended
1142 // In some cases, multiple rules may apply (though not to the same
1143 // rvalue). For example:
1145 // let ref x = [&a(), &b()];
1147 // Here, the expression `[...]` has an extended lifetime due to rule
1148 // A, but the inner rvalues `a()` and `b()` have an extended lifetime
1151 if let Some(expr) = init {
1152 record_rvalue_scope_if_borrow_expr(visitor, &expr, blk_scope);
1154 if let Some(pat) = pat {
1155 if is_binding_pat(pat) {
1156 record_rvalue_scope(visitor, &expr, blk_scope);
1161 // Make sure we visit the initializer first, so expr_and_pat_count remains correct
1162 if let Some(expr) = init {
1163 visitor.visit_expr(expr);
1165 if let Some(pat) = pat {
1166 visitor.visit_pat(pat);
1169 /// Returns `true` if `pat` match the `P&` non-terminal.
1172 /// | StructName { ..., P&, ... }
1173 /// | VariantName(..., P&, ...)
1174 /// | [ ..., P&, ... ]
1175 /// | ( ..., P&, ... )
1177 fn is_binding_pat(pat: &hir::Pat) -> bool {
1178 // Note that the code below looks for *explicit* refs only, that is, it won't
1179 // know about *implicit* refs as introduced in #42640.
1181 // This is not a problem. For example, consider
1183 // let (ref x, ref y) = (Foo { .. }, Bar { .. });
1185 // Due to the explicit refs on the left hand side, the below code would signal
1186 // that the temporary value on the right hand side should live until the end of
1187 // the enclosing block (as opposed to being dropped after the let is complete).
1189 // To create an implicit ref, however, you must have a borrowed value on the RHS
1190 // already, as in this example (which won't compile before #42640):
1192 // let Foo { x, .. } = &Foo { x: ..., ... };
1196 // let Foo { ref x, .. } = Foo { ... };
1198 // In the former case (the implicit ref version), the temporary is created by the
1199 // & expression, and its lifetime would be extended to the end of the block (due
1200 // to a different rule, not the below code).
1202 PatKind::Binding(hir::BindingAnnotation::Ref, ..) |
1203 PatKind::Binding(hir::BindingAnnotation::RefMut, ..) => true,
1205 PatKind::Struct(_, ref field_pats, _) => {
1206 field_pats.iter().any(|fp| is_binding_pat(&fp.pat))
1209 PatKind::Slice(ref pats1, ref pats2, ref pats3) => {
1210 pats1.iter().any(|p| is_binding_pat(&p)) ||
1211 pats2.iter().any(|p| is_binding_pat(&p)) ||
1212 pats3.iter().any(|p| is_binding_pat(&p))
1215 PatKind::TupleStruct(_, ref subpats, _) |
1216 PatKind::Tuple(ref subpats, _) => {
1217 subpats.iter().any(|p| is_binding_pat(&p))
1220 PatKind::Box(ref subpat) => {
1221 is_binding_pat(&subpat)
1228 /// If `expr` matches the `E&` grammar, then records an extended rvalue scope as appropriate:
1231 /// | StructName { ..., f: E&, ... }
1232 /// | [ ..., E&, ... ]
1233 /// | ( ..., E&, ... )
1238 fn record_rvalue_scope_if_borrow_expr<'tcx>(
1239 visitor: &mut RegionResolutionVisitor<'tcx>,
1241 blk_id: Option<Scope>,
1244 hir::ExprKind::AddrOf(_, ref subexpr) => {
1245 record_rvalue_scope_if_borrow_expr(visitor, &subexpr, blk_id);
1246 record_rvalue_scope(visitor, &subexpr, blk_id);
1248 hir::ExprKind::Struct(_, ref fields, _) => {
1249 for field in fields {
1250 record_rvalue_scope_if_borrow_expr(
1251 visitor, &field.expr, blk_id);
1254 hir::ExprKind::Array(ref subexprs) |
1255 hir::ExprKind::Tup(ref subexprs) => {
1256 for subexpr in subexprs {
1257 record_rvalue_scope_if_borrow_expr(
1258 visitor, &subexpr, blk_id);
1261 hir::ExprKind::Cast(ref subexpr, _) => {
1262 record_rvalue_scope_if_borrow_expr(visitor, &subexpr, blk_id)
1264 hir::ExprKind::Block(ref block, _) => {
1265 if let Some(ref subexpr) = block.expr {
1266 record_rvalue_scope_if_borrow_expr(
1267 visitor, &subexpr, blk_id);
1274 /// Applied to an expression `expr` if `expr` -- or something owned or partially owned by
1275 /// `expr` -- is going to be indirectly referenced by a variable in a let statement. In that
1276 /// case, the "temporary lifetime" or `expr` is extended to be the block enclosing the `let`
1279 /// More formally, if `expr` matches the grammar `ET`, record the rvalue scope of the matching
1280 /// `<rvalue>` as `blk_id`:
1288 /// Note: ET is intended to match "rvalues or places based on rvalues".
1289 fn record_rvalue_scope<'tcx>(
1290 visitor: &mut RegionResolutionVisitor<'tcx>,
1292 blk_scope: Option<Scope>,
1294 let mut expr = expr;
1296 // Note: give all the expressions matching `ET` with the
1297 // extended temporary lifetime, not just the innermost rvalue,
1298 // because in codegen if we must compile e.g., `*rvalue()`
1299 // into a temporary, we request the temporary scope of the
1300 // outer expression.
1301 visitor.scope_tree.record_rvalue_scope(expr.hir_id.local_id, blk_scope);
1304 hir::ExprKind::AddrOf(_, ref subexpr) |
1305 hir::ExprKind::Unary(hir::UnDeref, ref subexpr) |
1306 hir::ExprKind::Field(ref subexpr, _) |
1307 hir::ExprKind::Index(ref subexpr, _) => {
1318 impl<'tcx> RegionResolutionVisitor<'tcx> {
1319 /// Records the current parent (if any) as the parent of `child_scope`.
1320 /// Returns the depth of `child_scope`.
1321 fn record_child_scope(&mut self, child_scope: Scope) -> ScopeDepth {
1322 let parent = self.cx.parent;
1323 self.scope_tree.record_scope_parent(child_scope, parent);
1324 // If `child_scope` has no parent, it must be the root node, and so has
1325 // a depth of 1. Otherwise, its depth is one more than its parent's.
1326 parent.map_or(1, |(_p, d)| d + 1)
1329 /// Records the current parent (if any) as the parent of `child_scope`,
1330 /// and sets `child_scope` as the new current parent.
1331 fn enter_scope(&mut self, child_scope: Scope) {
1332 let child_depth = self.record_child_scope(child_scope);
1333 self.cx.parent = Some((child_scope, child_depth));
1336 fn enter_node_scope_with_dtor(&mut self, id: hir::ItemLocalId) {
1337 // If node was previously marked as a terminating scope during the
1338 // recursive visit of its parent node in the AST, then we need to
1339 // account for the destruction scope representing the scope of
1340 // the destructors that run immediately after it completes.
1341 if self.terminating_scopes.contains(&id) {
1342 self.enter_scope(Scope { id, data: ScopeData::Destruction });
1344 self.enter_scope(Scope { id, data: ScopeData::Node });
1348 impl<'tcx> Visitor<'tcx> for RegionResolutionVisitor<'tcx> {
1349 fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
1350 NestedVisitorMap::None
1353 fn visit_block(&mut self, b: &'tcx Block) {
1354 resolve_block(self, b);
1357 fn visit_body(&mut self, body: &'tcx hir::Body) {
1358 let body_id = body.id();
1359 let owner_id = self.tcx.hir().body_owner(body_id);
1361 debug!("visit_body(id={:?}, span={:?}, body.id={:?}, cx.parent={:?})",
1363 self.tcx.sess.source_map().span_to_string(body.value.span),
1367 let outer_ec = mem::replace(&mut self.expr_and_pat_count, 0);
1368 let outer_cx = self.cx;
1369 let outer_ts = mem::take(&mut self.terminating_scopes);
1370 self.terminating_scopes.insert(body.value.hir_id.local_id);
1372 if let Some(root_id) = self.cx.root_id {
1373 self.scope_tree.record_closure_parent(body.value.hir_id.local_id, root_id);
1375 self.cx.root_id = Some(body.value.hir_id.local_id);
1377 self.enter_scope(Scope { id: body.value.hir_id.local_id, data: ScopeData::CallSite });
1378 self.enter_scope(Scope { id: body.value.hir_id.local_id, data: ScopeData::Arguments });
1380 // The arguments and `self` are parented to the fn.
1381 self.cx.var_parent = self.cx.parent.take();
1382 for param in &body.params {
1383 self.visit_pat(¶m.pat);
1386 // The body of the every fn is a root scope.
1387 self.cx.parent = self.cx.var_parent;
1388 if self.tcx.hir().body_owner_kind(owner_id).is_fn_or_closure() {
1389 self.visit_expr(&body.value)
1391 // Only functions have an outer terminating (drop) scope, while
1392 // temporaries in constant initializers may be 'static, but only
1393 // according to rvalue lifetime semantics, using the same
1394 // syntactical rules used for let initializers.
1396 // e.g., in `let x = &f();`, the temporary holding the result from
1397 // the `f()` call lives for the entirety of the surrounding block.
1399 // Similarly, `const X: ... = &f();` would have the result of `f()`
1400 // live for `'static`, implying (if Drop restrictions on constants
1401 // ever get lifted) that the value *could* have a destructor, but
1402 // it'd get leaked instead of the destructor running during the
1403 // evaluation of `X` (if at all allowed by CTFE).
1405 // However, `const Y: ... = g(&f());`, like `let y = g(&f());`,
1406 // would *not* let the `f()` temporary escape into an outer scope
1407 // (i.e., `'static`), which means that after `g` returns, it drops,
1408 // and all the associated destruction scope rules apply.
1409 self.cx.var_parent = None;
1410 resolve_local(self, None, Some(&body.value));
1413 if body.generator_kind.is_some() {
1414 self.scope_tree.body_expr_count.insert(body_id, self.expr_and_pat_count);
1417 // Restore context we had at the start.
1418 self.expr_and_pat_count = outer_ec;
1420 self.terminating_scopes = outer_ts;
1423 fn visit_arm(&mut self, a: &'tcx Arm) {
1424 resolve_arm(self, a);
1426 fn visit_pat(&mut self, p: &'tcx Pat) {
1427 resolve_pat(self, p);
1429 fn visit_stmt(&mut self, s: &'tcx Stmt) {
1430 resolve_stmt(self, s);
1432 fn visit_expr(&mut self, ex: &'tcx Expr) {
1433 resolve_expr(self, ex);
1435 fn visit_local(&mut self, l: &'tcx Local) {
1436 resolve_local(self, Some(&l.pat), l.init.as_ref().map(|e| &**e));
1440 fn region_scope_tree(tcx: TyCtxt<'_>, def_id: DefId) -> &ScopeTree {
1441 let closure_base_def_id = tcx.closure_base_def_id(def_id);
1442 if closure_base_def_id != def_id {
1443 return tcx.region_scope_tree(closure_base_def_id);
1446 let id = tcx.hir().as_local_hir_id(def_id).unwrap();
1447 let scope_tree = if let Some(body_id) = tcx.hir().maybe_body_owned_by(id) {
1448 let mut visitor = RegionResolutionVisitor {
1450 scope_tree: ScopeTree::default(),
1451 expr_and_pat_count: 0,
1457 terminating_scopes: Default::default(),
1458 pessimistic_yield: false,
1459 fixup_scopes: vec![],
1462 let body = tcx.hir().body(body_id);
1463 visitor.scope_tree.root_body = Some(body.value.hir_id);
1465 // If the item is an associated const or a method,
1466 // record its impl/trait parent, as it can also have
1467 // lifetime parameters free in this body.
1468 match tcx.hir().get(id) {
1470 Node::TraitItem(_) => {
1471 visitor.scope_tree.root_parent = Some(tcx.hir().get_parent_item(id));
1476 visitor.visit_body(body);
1480 ScopeTree::default()
1483 tcx.arena.alloc(scope_tree)
1486 pub fn provide(providers: &mut Providers<'_>) {
1487 *providers = Providers {
1493 impl<'a> HashStable<StableHashingContext<'a>> for ScopeTree {
1494 fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
1498 ref body_expr_count,
1501 ref destruction_scopes,
1507 hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashDefPath, |hcx| {
1508 root_body.hash_stable(hcx, hasher);
1509 root_parent.hash_stable(hcx, hasher);
1512 body_expr_count.hash_stable(hcx, hasher);
1513 parent_map.hash_stable(hcx, hasher);
1514 var_map.hash_stable(hcx, hasher);
1515 destruction_scopes.hash_stable(hcx, hasher);
1516 rvalue_scopes.hash_stable(hcx, hasher);
1517 closure_tree.hash_stable(hcx, hasher);
1518 yield_in_scope.hash_stable(hcx, hasher);