1 //! This file builds up the `ScopeTree`, which describes
2 //! the parent links in the region hierarchy.
4 //! For more information about how MIR-based region-checking works,
5 //! see the [rustc dev guide].
7 //! [rustc dev guide]: https://rustc-dev-guide.rust-lang.org/borrow_check.html
9 use rustc_ast::walk_list;
10 use rustc_data_structures::fx::FxHashSet;
12 use rustc_hir::def_id::DefId;
13 use rustc_hir::intravisit::{self, Visitor};
14 use rustc_hir::{Arm, Block, Expr, Local, Pat, PatKind, Stmt};
15 use rustc_index::vec::Idx;
16 use rustc_middle::middle::region::*;
17 use rustc_middle::ty::TyCtxt;
18 use rustc_span::source_map;
23 #[derive(Debug, Copy, Clone)]
25 /// The scope that contains any new variables declared, plus its depth in
27 var_parent: Option<(Scope, ScopeDepth)>,
29 /// Region parent of expressions, etc., plus its depth in the scope tree.
30 parent: Option<(Scope, ScopeDepth)>,
33 struct RegionResolutionVisitor<'tcx> {
36 // The number of expressions and patterns visited in the current body.
37 expr_and_pat_count: usize,
38 // When this is `true`, we record the `Scopes` we encounter
39 // when processing a Yield expression. This allows us to fix
41 pessimistic_yield: bool,
42 // Stores scopes when `pessimistic_yield` is `true`.
43 fixup_scopes: Vec<Scope>,
44 // The generated scope tree.
45 scope_tree: ScopeTree,
49 /// `terminating_scopes` is a set containing the ids of each
50 /// statement, or conditional/repeating expression. These scopes
51 /// are calling "terminating scopes" because, when attempting to
52 /// find the scope of a temporary, by default we search up the
53 /// enclosing scopes until we encounter the terminating scope. A
54 /// conditional/repeating expression is one which is not
55 /// guaranteed to execute exactly once upon entering the parent
56 /// scope. This could be because the expression only executes
57 /// conditionally, such as the expression `b` in `a && b`, or
58 /// because the expression may execute many times, such as a loop
59 /// body. The reason that we distinguish such expressions is that,
60 /// upon exiting the parent scope, we cannot statically know how
61 /// many times the expression executed, and thus if the expression
62 /// creates temporaries we cannot know statically how many such
63 /// temporaries we would have to cleanup. Therefore, we ensure that
64 /// the temporaries never outlast the conditional/repeating
65 /// expression, preventing the need for dynamic checks and/or
66 /// arbitrary amounts of stack space. Terminating scopes end
67 /// up being contained in a DestructionScope that contains the
68 /// destructor's execution.
69 terminating_scopes: FxHashSet<hir::ItemLocalId>,
72 /// Records the lifetime of a local variable as `cx.var_parent`
73 fn record_var_lifetime(
74 visitor: &mut RegionResolutionVisitor<'_>,
75 var_id: hir::ItemLocalId,
78 match visitor.cx.var_parent {
80 // this can happen in extern fn declarations like
82 // extern fn isalnum(c: c_int) -> c_int
84 Some((parent_scope, _)) => visitor.scope_tree.record_var_scope(var_id, parent_scope),
88 fn resolve_block<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, blk: &'tcx hir::Block<'tcx>) {
89 debug!("resolve_block(blk.hir_id={:?})", blk.hir_id);
91 let prev_cx = visitor.cx;
93 // We treat the tail expression in the block (if any) somewhat
94 // differently from the statements. The issue has to do with
95 // temporary lifetimes. Consider the following:
98 // let inner = ... (&bar()) ...;
100 // (... (&foo()) ...) // (the tail expression)
101 // }, other_argument());
103 // Each of the statements within the block is a terminating
104 // scope, and thus a temporary (e.g., the result of calling
105 // `bar()` in the initializer expression for `let inner = ...;`)
106 // will be cleaned up immediately after its corresponding
107 // statement (i.e., `let inner = ...;`) executes.
109 // On the other hand, temporaries associated with evaluating the
110 // tail expression for the block are assigned lifetimes so that
111 // they will be cleaned up as part of the terminating scope
112 // *surrounding* the block expression. Here, the terminating
113 // scope for the block expression is the `quux(..)` call; so
114 // those temporaries will only be cleaned up *after* both
115 // `other_argument()` has run and also the call to `quux(..)`
116 // itself has returned.
118 visitor.enter_node_scope_with_dtor(blk.hir_id.local_id);
119 visitor.cx.var_parent = visitor.cx.parent;
122 // This block should be kept approximately in sync with
123 // `intravisit::walk_block`. (We manually walk the block, rather
124 // than call `walk_block`, in order to maintain precise
125 // index information.)
127 for (i, statement) in blk.stmts.iter().enumerate() {
128 match statement.kind {
129 hir::StmtKind::Local(hir::Local { els: Some(els), .. }) => {
130 // Let-else has a special lexical structure for variables.
131 // First we take a checkpoint of the current scope context here.
132 let mut prev_cx = visitor.cx;
134 visitor.enter_scope(Scope {
135 id: blk.hir_id.local_id,
136 data: ScopeData::Remainder(FirstStatementIndex::new(i)),
138 visitor.cx.var_parent = visitor.cx.parent;
139 visitor.visit_stmt(statement);
140 // We need to back out temporarily to the last enclosing scope
141 // for the `else` block, so that even the temporaries receiving
142 // extended lifetime will be dropped inside this block.
143 // We are visiting the `else` block in this order so that
144 // the sequence of visits agree with the order in the default
145 // `hir::intravisit` visitor.
146 mem::swap(&mut prev_cx, &mut visitor.cx);
147 visitor.terminating_scopes.insert(els.hir_id.local_id);
148 visitor.visit_block(els);
149 // From now on, we continue normally.
150 visitor.cx = prev_cx;
152 hir::StmtKind::Local(..) | hir::StmtKind::Item(..) => {
153 // Each declaration introduces a subscope for bindings
154 // introduced by the declaration; this subscope covers a
155 // suffix of the block. Each subscope in a block has the
156 // previous subscope in the block as a parent, except for
157 // the first such subscope, which has the block itself as a
159 visitor.enter_scope(Scope {
160 id: blk.hir_id.local_id,
161 data: ScopeData::Remainder(FirstStatementIndex::new(i)),
163 visitor.cx.var_parent = visitor.cx.parent;
164 visitor.visit_stmt(statement)
166 hir::StmtKind::Expr(..) | hir::StmtKind::Semi(..) => visitor.visit_stmt(statement),
169 walk_list!(visitor, visit_expr, &blk.expr);
172 visitor.cx = prev_cx;
175 fn resolve_arm<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, arm: &'tcx hir::Arm<'tcx>) {
176 let prev_cx = visitor.cx;
178 visitor.enter_scope(Scope { id: arm.hir_id.local_id, data: ScopeData::Node });
179 visitor.cx.var_parent = visitor.cx.parent;
181 visitor.terminating_scopes.insert(arm.body.hir_id.local_id);
183 if let Some(hir::Guard::If(ref expr)) = arm.guard {
184 visitor.terminating_scopes.insert(expr.hir_id.local_id);
187 intravisit::walk_arm(visitor, arm);
189 visitor.cx = prev_cx;
192 fn resolve_pat<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, pat: &'tcx hir::Pat<'tcx>) {
193 visitor.record_child_scope(Scope { id: pat.hir_id.local_id, data: ScopeData::Node });
195 // If this is a binding then record the lifetime of that binding.
196 if let PatKind::Binding(..) = pat.kind {
197 record_var_lifetime(visitor, pat.hir_id.local_id, pat.span);
200 debug!("resolve_pat - pre-increment {} pat = {:?}", visitor.expr_and_pat_count, pat);
202 intravisit::walk_pat(visitor, pat);
204 visitor.expr_and_pat_count += 1;
206 debug!("resolve_pat - post-increment {} pat = {:?}", visitor.expr_and_pat_count, pat);
209 fn resolve_stmt<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, stmt: &'tcx hir::Stmt<'tcx>) {
210 let stmt_id = stmt.hir_id.local_id;
211 debug!("resolve_stmt(stmt.id={:?})", stmt_id);
213 // Every statement will clean up the temporaries created during
214 // execution of that statement. Therefore each statement has an
215 // associated destruction scope that represents the scope of the
216 // statement plus its destructors, and thus the scope for which
217 // regions referenced by the destructors need to survive.
218 visitor.terminating_scopes.insert(stmt_id);
220 let prev_parent = visitor.cx.parent;
221 visitor.enter_node_scope_with_dtor(stmt_id);
223 intravisit::walk_stmt(visitor, stmt);
225 visitor.cx.parent = prev_parent;
228 fn resolve_expr<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, expr: &'tcx hir::Expr<'tcx>) {
229 debug!("resolve_expr - pre-increment {} expr = {:?}", visitor.expr_and_pat_count, expr);
231 let prev_cx = visitor.cx;
232 visitor.enter_node_scope_with_dtor(expr.hir_id.local_id);
235 let terminating_scopes = &mut visitor.terminating_scopes;
236 let mut terminating = |id: hir::ItemLocalId| {
237 terminating_scopes.insert(id);
240 // Conditional or repeating scopes are always terminating
241 // scopes, meaning that temporaries cannot outlive them.
242 // This ensures fixed size stacks.
243 hir::ExprKind::Binary(
244 source_map::Spanned { node: hir::BinOpKind::And | hir::BinOpKind::Or, .. },
248 // expr is a short circuiting operator (|| or &&). As its
249 // functionality can't be overridden by traits, it always
250 // processes bool sub-expressions. bools are Copy and thus we
251 // can drop any temporaries in evaluation (read) order
252 // (with the exception of potentially failing let expressions).
253 // We achieve this by enclosing the operands in a terminating
254 // scope, both the LHS and the RHS.
256 // We optimize this a little in the presence of chains.
257 // Chains like a && b && c get lowered to AND(AND(a, b), c).
258 // In here, b and c are RHS, while a is the only LHS operand in
259 // that chain. This holds true for longer chains as well: the
260 // leading operand is always the only LHS operand that is not a
261 // binop itself. Putting a binop like AND(a, b) into a
262 // terminating scope is not useful, thus we only put the LHS
263 // into a terminating scope if it is not a binop.
265 let terminate_lhs = match l.kind {
266 // let expressions can create temporaries that live on
267 hir::ExprKind::Let(_) => false,
268 // binops already drop their temporaries, so there is no
269 // need to put them into a terminating scope.
270 // This is purely an optimization to reduce the number of
271 // terminating scopes.
272 hir::ExprKind::Binary(
273 source_map::Spanned {
274 node: hir::BinOpKind::And | hir::BinOpKind::Or, ..
278 // otherwise: mark it as terminating
282 terminating(l.hir_id.local_id);
285 // `Let` expressions (in a let-chain) shouldn't be terminating, as their temporaries
286 // should live beyond the immediate expression
287 if !matches!(r.kind, hir::ExprKind::Let(_)) {
288 terminating(r.hir_id.local_id);
291 hir::ExprKind::If(_, ref then, Some(ref otherwise)) => {
292 terminating(then.hir_id.local_id);
293 terminating(otherwise.hir_id.local_id);
296 hir::ExprKind::If(_, ref then, None) => {
297 terminating(then.hir_id.local_id);
300 hir::ExprKind::Loop(ref body, _, _, _) => {
301 terminating(body.hir_id.local_id);
304 hir::ExprKind::DropTemps(ref expr) => {
305 // `DropTemps(expr)` does not denote a conditional scope.
306 // Rather, we want to achieve the same behavior as `{ let _t = expr; _t }`.
307 terminating(expr.hir_id.local_id);
310 hir::ExprKind::AssignOp(..)
311 | hir::ExprKind::Index(..)
312 | hir::ExprKind::Unary(..)
313 | hir::ExprKind::Call(..)
314 | hir::ExprKind::MethodCall(..) => {
315 // FIXME(https://github.com/rust-lang/rfcs/issues/811) Nested method calls
317 // The lifetimes for a call or method call look as follows:
325 // The idea is that call.callee_id represents *the time when
326 // the invoked function is actually running* and call.id
327 // represents *the time to prepare the arguments and make the
328 // call*. See the section "Borrows in Calls" borrowck/README.md
329 // for an extended explanation of why this distinction is
332 // record_superlifetime(new_cx, expr.callee_id);
339 let prev_pessimistic = visitor.pessimistic_yield;
341 // Ordinarily, we can rely on the visit order of HIR intravisit
342 // to correspond to the actual execution order of statements.
343 // However, there's a weird corner case with compound assignment
344 // operators (e.g. `a += b`). The evaluation order depends on whether
345 // or not the operator is overloaded (e.g. whether or not a trait
346 // like AddAssign is implemented).
348 // For primitive types (which, despite having a trait impl, don't actually
349 // end up calling it), the evaluation order is right-to-left. For example,
350 // the following code snippet:
353 // *{println!("LHS!"); y} += {println!("RHS!"); 1};
360 // However, if the operator is used on a non-primitive type,
361 // the evaluation order will be left-to-right, since the operator
362 // actually get desugared to a method call. For example, this
363 // nearly identical code snippet:
365 // let y = &mut String::new();
366 // *{println!("LHS String"); y} += {println!("RHS String"); "hi"};
372 // To determine the actual execution order, we need to perform
373 // trait resolution. Unfortunately, we need to be able to compute
374 // yield_in_scope before type checking is even done, as it gets
375 // used by AST borrowcheck.
377 // Fortunately, we don't need to know the actual execution order.
378 // It suffices to know the 'worst case' order with respect to yields.
379 // Specifically, we need to know the highest 'expr_and_pat_count'
380 // that we could assign to the yield expression. To do this,
381 // we pick the greater of the two values from the left-hand
382 // and right-hand expressions. This makes us overly conservative
383 // about what types could possibly live across yield points,
384 // but we will never fail to detect that a type does actually
385 // live across a yield point. The latter part is critical -
386 // we're already overly conservative about what types will live
387 // across yield points, as the generated MIR will determine
388 // when things are actually live. However, for typecheck to work
389 // properly, we can't miss any types.
392 // Manually recurse over closures and inline consts, because they are the only
393 // case of nested bodies that share the parent environment.
394 hir::ExprKind::Closure(&hir::Closure { body, .. })
395 | hir::ExprKind::ConstBlock(hir::AnonConst { body, .. }) => {
396 let body = visitor.tcx.hir().body(body);
397 visitor.visit_body(body);
399 hir::ExprKind::AssignOp(_, ref left_expr, ref right_expr) => {
401 "resolve_expr - enabling pessimistic_yield, was previously {}",
405 let start_point = visitor.fixup_scopes.len();
406 visitor.pessimistic_yield = true;
408 // If the actual execution order turns out to be right-to-left,
409 // then we're fine. However, if the actual execution order is left-to-right,
410 // then we'll assign too low a count to any `yield` expressions
411 // we encounter in 'right_expression' - they should really occur after all of the
412 // expressions in 'left_expression'.
413 visitor.visit_expr(&right_expr);
414 visitor.pessimistic_yield = prev_pessimistic;
416 debug!("resolve_expr - restoring pessimistic_yield to {}", prev_pessimistic);
417 visitor.visit_expr(&left_expr);
418 debug!("resolve_expr - fixing up counts to {}", visitor.expr_and_pat_count);
420 // Remove and process any scopes pushed by the visitor
421 let target_scopes = visitor.fixup_scopes.drain(start_point..);
423 for scope in target_scopes {
425 visitor.scope_tree.yield_in_scope.get_mut(&scope).unwrap().last_mut().unwrap();
426 let count = yield_data.expr_and_pat_count;
427 let span = yield_data.span;
429 // expr_and_pat_count never decreases. Since we recorded counts in yield_in_scope
430 // before walking the left-hand side, it should be impossible for the recorded
431 // count to be greater than the left-hand side count.
432 if count > visitor.expr_and_pat_count {
434 "Encountered greater count {} at span {:?} - expected no greater than {}",
437 visitor.expr_and_pat_count
440 let new_count = visitor.expr_and_pat_count;
442 "resolve_expr - increasing count for scope {:?} from {} to {} at span {:?}",
443 scope, count, new_count, span
446 yield_data.expr_and_pat_count = new_count;
450 hir::ExprKind::If(ref cond, ref then, Some(ref otherwise)) => {
451 let expr_cx = visitor.cx;
452 visitor.enter_scope(Scope { id: then.hir_id.local_id, data: ScopeData::IfThen });
453 visitor.cx.var_parent = visitor.cx.parent;
454 visitor.visit_expr(cond);
455 visitor.visit_expr(then);
456 visitor.cx = expr_cx;
457 visitor.visit_expr(otherwise);
460 hir::ExprKind::If(ref cond, ref then, None) => {
461 let expr_cx = visitor.cx;
462 visitor.enter_scope(Scope { id: then.hir_id.local_id, data: ScopeData::IfThen });
463 visitor.cx.var_parent = visitor.cx.parent;
464 visitor.visit_expr(cond);
465 visitor.visit_expr(then);
466 visitor.cx = expr_cx;
469 _ => intravisit::walk_expr(visitor, expr),
472 visitor.expr_and_pat_count += 1;
474 debug!("resolve_expr post-increment {}, expr = {:?}", visitor.expr_and_pat_count, expr);
476 if let hir::ExprKind::Yield(_, source) = &expr.kind {
477 // Mark this expr's scope and all parent scopes as containing `yield`.
478 let mut scope = Scope { id: expr.hir_id.local_id, data: ScopeData::Node };
480 let span = match expr.kind {
481 hir::ExprKind::Yield(expr, hir::YieldSource::Await { .. }) => {
482 expr.span.shrink_to_hi().to(expr.span)
487 YieldData { span, expr_and_pat_count: visitor.expr_and_pat_count, source: *source };
488 match visitor.scope_tree.yield_in_scope.get_mut(&scope) {
489 Some(yields) => yields.push(data),
491 visitor.scope_tree.yield_in_scope.insert(scope, vec![data]);
495 if visitor.pessimistic_yield {
496 debug!("resolve_expr in pessimistic_yield - marking scope {:?} for fixup", scope);
497 visitor.fixup_scopes.push(scope);
500 // Keep traversing up while we can.
501 match visitor.scope_tree.parent_map.get(&scope) {
502 // Don't cross from closure bodies to their parent.
503 Some(&(superscope, _)) => match superscope.data {
504 ScopeData::CallSite => break,
505 _ => scope = superscope,
512 visitor.cx = prev_cx;
515 fn resolve_local<'tcx>(
516 visitor: &mut RegionResolutionVisitor<'tcx>,
517 pat: Option<&'tcx hir::Pat<'tcx>>,
518 init: Option<&'tcx hir::Expr<'tcx>>,
520 debug!("resolve_local(pat={:?}, init={:?})", pat, init);
522 let blk_scope = visitor.cx.var_parent.map(|(p, _)| p);
524 // As an exception to the normal rules governing temporary
525 // lifetimes, initializers in a let have a temporary lifetime
526 // of the enclosing block. This means that e.g., a program
527 // like the following is legal:
529 // let ref x = HashMap::new();
531 // Because the hash map will be freed in the enclosing block.
533 // We express the rules more formally based on 3 grammars (defined
534 // fully in the helpers below that implement them):
536 // 1. `E&`, which matches expressions like `&<rvalue>` that
537 // own a pointer into the stack.
539 // 2. `P&`, which matches patterns like `ref x` or `(ref x, ref
540 // y)` that produce ref bindings into the value they are
541 // matched against or something (at least partially) owned by
542 // the value they are matched against. (By partially owned,
543 // I mean that creating a binding into a ref-counted or managed value
544 // would still count.)
546 // 3. `ET`, which matches both rvalues like `foo()` as well as places
547 // based on rvalues like `foo().x[2].y`.
549 // A subexpression `<rvalue>` that appears in a let initializer
550 // `let pat [: ty] = expr` has an extended temporary lifetime if
551 // any of the following conditions are met:
553 // A. `pat` matches `P&` and `expr` matches `ET`
554 // (covers cases where `pat` creates ref bindings into an rvalue
555 // produced by `expr`)
556 // B. `ty` is a borrowed pointer and `expr` matches `ET`
557 // (covers cases where coercion creates a borrow)
558 // C. `expr` matches `E&`
559 // (covers cases `expr` borrows an rvalue that is then assigned
560 // to memory (at least partially) owned by the binding)
562 // Here are some examples hopefully giving an intuition where each
563 // rule comes into play and why:
565 // Rule A. `let (ref x, ref y) = (foo().x, 44)`. The rvalue `(22, 44)`
566 // would have an extended lifetime, but not `foo()`.
568 // Rule B. `let x = &foo().x`. The rvalue `foo()` would have extended
571 // In some cases, multiple rules may apply (though not to the same
572 // rvalue). For example:
574 // let ref x = [&a(), &b()];
576 // Here, the expression `[...]` has an extended lifetime due to rule
577 // A, but the inner rvalues `a()` and `b()` have an extended lifetime
580 if let Some(expr) = init {
581 record_rvalue_scope_if_borrow_expr(visitor, &expr, blk_scope);
583 if let Some(pat) = pat {
584 if is_binding_pat(pat) {
585 visitor.scope_tree.record_rvalue_candidate(
587 RvalueCandidateType::Pattern {
588 target: expr.hir_id.local_id,
596 // Make sure we visit the initializer first, so expr_and_pat_count remains correct.
597 // The correct order, as shared between generator_interior, drop_ranges and intravisitor,
598 // is to walk initializer, followed by pattern bindings, finally followed by the `else` block.
599 if let Some(expr) = init {
600 visitor.visit_expr(expr);
602 if let Some(pat) = pat {
603 visitor.visit_pat(pat);
606 /// Returns `true` if `pat` match the `P&` non-terminal.
610 /// | StructName { ..., P&, ... }
611 /// | VariantName(..., P&, ...)
612 /// | [ ..., P&, ... ]
613 /// | ( ..., P&, ... )
614 /// | ... "|" P& "|" ...
617 fn is_binding_pat(pat: &hir::Pat<'_>) -> bool {
618 // Note that the code below looks for *explicit* refs only, that is, it won't
619 // know about *implicit* refs as introduced in #42640.
621 // This is not a problem. For example, consider
623 // let (ref x, ref y) = (Foo { .. }, Bar { .. });
625 // Due to the explicit refs on the left hand side, the below code would signal
626 // that the temporary value on the right hand side should live until the end of
627 // the enclosing block (as opposed to being dropped after the let is complete).
629 // To create an implicit ref, however, you must have a borrowed value on the RHS
630 // already, as in this example (which won't compile before #42640):
632 // let Foo { x, .. } = &Foo { x: ..., ... };
636 // let Foo { ref x, .. } = Foo { ... };
638 // In the former case (the implicit ref version), the temporary is created by the
639 // & expression, and its lifetime would be extended to the end of the block (due
640 // to a different rule, not the below code).
642 PatKind::Binding(hir::BindingAnnotation(hir::ByRef::Yes, _), ..) => true,
644 PatKind::Struct(_, ref field_pats, _) => {
645 field_pats.iter().any(|fp| is_binding_pat(&fp.pat))
648 PatKind::Slice(ref pats1, ref pats2, ref pats3) => {
649 pats1.iter().any(|p| is_binding_pat(&p))
650 || pats2.iter().any(|p| is_binding_pat(&p))
651 || pats3.iter().any(|p| is_binding_pat(&p))
654 PatKind::Or(ref subpats)
655 | PatKind::TupleStruct(_, ref subpats, _)
656 | PatKind::Tuple(ref subpats, _) => subpats.iter().any(|p| is_binding_pat(&p)),
658 PatKind::Box(ref subpat) => is_binding_pat(&subpat),
661 | PatKind::Binding(hir::BindingAnnotation(hir::ByRef::No, _), ..)
665 | PatKind::Range(_, _, _) => false,
669 /// If `expr` matches the `E&` grammar, then records an extended rvalue scope as appropriate:
673 /// | StructName { ..., f: E&, ... }
674 /// | [ ..., E&, ... ]
675 /// | ( ..., E&, ... )
681 fn record_rvalue_scope_if_borrow_expr<'tcx>(
682 visitor: &mut RegionResolutionVisitor<'tcx>,
683 expr: &hir::Expr<'_>,
684 blk_id: Option<Scope>,
687 hir::ExprKind::AddrOf(_, _, subexpr) => {
688 record_rvalue_scope_if_borrow_expr(visitor, subexpr, blk_id);
689 visitor.scope_tree.record_rvalue_candidate(
691 RvalueCandidateType::Borrow {
692 target: subexpr.hir_id.local_id,
697 hir::ExprKind::Struct(_, fields, _) => {
698 for field in fields {
699 record_rvalue_scope_if_borrow_expr(visitor, &field.expr, blk_id);
702 hir::ExprKind::Array(subexprs) | hir::ExprKind::Tup(subexprs) => {
703 for subexpr in subexprs {
704 record_rvalue_scope_if_borrow_expr(visitor, &subexpr, blk_id);
707 hir::ExprKind::Cast(ref subexpr, _) => {
708 record_rvalue_scope_if_borrow_expr(visitor, &subexpr, blk_id)
710 hir::ExprKind::Block(ref block, _) => {
711 if let Some(ref subexpr) = block.expr {
712 record_rvalue_scope_if_borrow_expr(visitor, &subexpr, blk_id);
715 hir::ExprKind::Call(..) | hir::ExprKind::MethodCall(..) => {
716 // FIXME(@dingxiangfei2009): choose call arguments here
717 // for candidacy for extended parameter rule application
719 hir::ExprKind::Index(..) => {
720 // FIXME(@dingxiangfei2009): select the indices
721 // as candidate for rvalue scope rules
728 impl<'tcx> RegionResolutionVisitor<'tcx> {
729 /// Records the current parent (if any) as the parent of `child_scope`.
730 /// Returns the depth of `child_scope`.
731 fn record_child_scope(&mut self, child_scope: Scope) -> ScopeDepth {
732 let parent = self.cx.parent;
733 self.scope_tree.record_scope_parent(child_scope, parent);
734 // If `child_scope` has no parent, it must be the root node, and so has
735 // a depth of 1. Otherwise, its depth is one more than its parent's.
736 parent.map_or(1, |(_p, d)| d + 1)
739 /// Records the current parent (if any) as the parent of `child_scope`,
740 /// and sets `child_scope` as the new current parent.
741 fn enter_scope(&mut self, child_scope: Scope) {
742 let child_depth = self.record_child_scope(child_scope);
743 self.cx.parent = Some((child_scope, child_depth));
746 fn enter_node_scope_with_dtor(&mut self, id: hir::ItemLocalId) {
747 // If node was previously marked as a terminating scope during the
748 // recursive visit of its parent node in the AST, then we need to
749 // account for the destruction scope representing the scope of
750 // the destructors that run immediately after it completes.
751 if self.terminating_scopes.contains(&id) {
752 self.enter_scope(Scope { id, data: ScopeData::Destruction });
754 self.enter_scope(Scope { id, data: ScopeData::Node });
758 impl<'tcx> Visitor<'tcx> for RegionResolutionVisitor<'tcx> {
759 fn visit_block(&mut self, b: &'tcx Block<'tcx>) {
760 resolve_block(self, b);
763 fn visit_body(&mut self, body: &'tcx hir::Body<'tcx>) {
764 let body_id = body.id();
765 let owner_id = self.tcx.hir().body_owner_def_id(body_id);
768 "visit_body(id={:?}, span={:?}, body.id={:?}, cx.parent={:?})",
770 self.tcx.sess.source_map().span_to_diagnostic_string(body.value.span),
775 // Save all state that is specific to the outer function
776 // body. These will be restored once down below, once we've
778 let outer_ec = mem::replace(&mut self.expr_and_pat_count, 0);
779 let outer_cx = self.cx;
780 let outer_ts = mem::take(&mut self.terminating_scopes);
781 // The 'pessimistic yield' flag is set to true when we are
782 // processing a `+=` statement and have to make pessimistic
783 // control flow assumptions. This doesn't apply to nested
784 // bodies within the `+=` statements. See #69307.
785 let outer_pessimistic_yield = mem::replace(&mut self.pessimistic_yield, false);
786 self.terminating_scopes.insert(body.value.hir_id.local_id);
788 self.enter_scope(Scope { id: body.value.hir_id.local_id, data: ScopeData::CallSite });
789 self.enter_scope(Scope { id: body.value.hir_id.local_id, data: ScopeData::Arguments });
791 // The arguments and `self` are parented to the fn.
792 self.cx.var_parent = self.cx.parent.take();
793 for param in body.params {
794 self.visit_pat(¶m.pat);
797 // The body of the every fn is a root scope.
798 self.cx.parent = self.cx.var_parent;
799 if self.tcx.hir().body_owner_kind(owner_id).is_fn_or_closure() {
800 self.visit_expr(&body.value)
802 // Only functions have an outer terminating (drop) scope, while
803 // temporaries in constant initializers may be 'static, but only
804 // according to rvalue lifetime semantics, using the same
805 // syntactical rules used for let initializers.
807 // e.g., in `let x = &f();`, the temporary holding the result from
808 // the `f()` call lives for the entirety of the surrounding block.
810 // Similarly, `const X: ... = &f();` would have the result of `f()`
811 // live for `'static`, implying (if Drop restrictions on constants
812 // ever get lifted) that the value *could* have a destructor, but
813 // it'd get leaked instead of the destructor running during the
814 // evaluation of `X` (if at all allowed by CTFE).
816 // However, `const Y: ... = g(&f());`, like `let y = g(&f());`,
817 // would *not* let the `f()` temporary escape into an outer scope
818 // (i.e., `'static`), which means that after `g` returns, it drops,
819 // and all the associated destruction scope rules apply.
820 self.cx.var_parent = None;
821 resolve_local(self, None, Some(&body.value));
824 if body.generator_kind.is_some() {
825 self.scope_tree.body_expr_count.insert(body_id, self.expr_and_pat_count);
828 // Restore context we had at the start.
829 self.expr_and_pat_count = outer_ec;
831 self.terminating_scopes = outer_ts;
832 self.pessimistic_yield = outer_pessimistic_yield;
835 fn visit_arm(&mut self, a: &'tcx Arm<'tcx>) {
836 resolve_arm(self, a);
838 fn visit_pat(&mut self, p: &'tcx Pat<'tcx>) {
839 resolve_pat(self, p);
841 fn visit_stmt(&mut self, s: &'tcx Stmt<'tcx>) {
842 resolve_stmt(self, s);
844 fn visit_expr(&mut self, ex: &'tcx Expr<'tcx>) {
845 resolve_expr(self, ex);
847 fn visit_local(&mut self, l: &'tcx Local<'tcx>) {
848 resolve_local(self, Some(&l.pat), l.init)
852 /// Per-body `region::ScopeTree`. The `DefId` should be the owner `DefId` for the body;
853 /// in the case of closures, this will be redirected to the enclosing function.
855 /// Performance: This is a query rather than a simple function to enable
856 /// re-use in incremental scenarios. We may sometimes need to rerun the
857 /// type checker even when the HIR hasn't changed, and in those cases
858 /// we can avoid reconstructing the region scope tree.
859 pub fn region_scope_tree(tcx: TyCtxt<'_>, def_id: DefId) -> &ScopeTree {
860 let typeck_root_def_id = tcx.typeck_root_def_id(def_id);
861 if typeck_root_def_id != def_id {
862 return tcx.region_scope_tree(typeck_root_def_id);
865 let scope_tree = if let Some(body_id) = tcx.hir().maybe_body_owned_by(def_id.expect_local()) {
866 let mut visitor = RegionResolutionVisitor {
868 scope_tree: ScopeTree::default(),
869 expr_and_pat_count: 0,
870 cx: Context { parent: None, var_parent: None },
871 terminating_scopes: Default::default(),
872 pessimistic_yield: false,
873 fixup_scopes: vec![],
876 let body = tcx.hir().body(body_id);
877 visitor.scope_tree.root_body = Some(body.value.hir_id);
878 visitor.visit_body(body);
884 tcx.arena.alloc(scope_tree)