1 // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! A different sort of visitor for walking fn bodies. Unlike the
12 //! normal visitor, which just walks the entire body in one shot, the
13 //! `ExprUseVisitor` determines how expressions are being used.
15 pub use self::LoanCause::*;
16 pub use self::ConsumeMode::*;
17 pub use self::MoveReason::*;
18 pub use self::MatchMode::*;
19 use self::TrackMatchMode::*;
20 use self::OverloadedCallType::*;
23 use hir::def_id::{DefId};
25 use middle::mem_categorization as mc;
27 use ty::{self, TyCtxt, adjustment};
29 use hir::{self, PatKind};
35 ///////////////////////////////////////////////////////////////////////////
38 /// This trait defines the callbacks you can expect to receive when
39 /// employing the ExprUseVisitor.
40 pub trait Delegate<'tcx> {
41 // The value found at `cmt` is either copied or moved, depending
44 consume_id: ast::NodeId,
49 // The value found at `cmt` has been determined to match the
50 // pattern binding `matched_pat`, and its subparts are being
51 // copied or moved depending on `mode`. Note that `matched_pat`
52 // is called on all variant/structs in the pattern (i.e., the
53 // interior nodes of the pattern's tree structure) while
54 // consume_pat is called on the binding identifiers in the pattern
55 // (which are leaves of the pattern's tree structure).
57 // Note that variants/structs and identifiers are disjoint; thus
58 // `matched_pat` and `consume_pat` are never both called on the
59 // same input pattern structure (though of `consume_pat` can be
60 // called on a subpart of an input passed to `matched_pat).
61 fn matched_pat(&mut self,
62 matched_pat: &hir::Pat,
66 // The value found at `cmt` is either copied or moved via the
67 // pattern binding `consume_pat`, depending on mode.
68 fn consume_pat(&mut self,
69 consume_pat: &hir::Pat,
73 // The value found at `borrow` is being borrowed at the point
74 // `borrow_id` for the region `loan_region` with kind `bk`.
76 borrow_id: ast::NodeId,
79 loan_region: ty::Region<'tcx>,
81 loan_cause: LoanCause);
83 // The local variable `id` is declared but not initialized.
84 fn decl_without_init(&mut self,
88 // The path at `cmt` is being assigned to.
90 assignment_id: ast::NodeId,
91 assignment_span: Span,
92 assignee_cmt: mc::cmt<'tcx>,
96 #[derive(Copy, Clone, PartialEq, Debug)]
109 #[derive(Copy, Clone, PartialEq, Debug)]
110 pub enum ConsumeMode {
111 Copy, // reference to x where x has a type that copies
112 Move(MoveReason), // reference to x where x has a type that moves
115 #[derive(Copy, Clone, PartialEq, Debug)]
116 pub enum MoveReason {
122 #[derive(Copy, Clone, PartialEq, Debug)]
130 #[derive(Copy, Clone, PartialEq, Debug)]
131 enum TrackMatchMode {
137 impl TrackMatchMode {
138 // Builds up the whole match mode for a pattern from its constituent
139 // parts. The lattice looks like this:
155 // * `(_, some_int)` pattern is Copying, since
156 // NonBinding + Copying => Copying
158 // * `(some_int, some_box)` pattern is Moving, since
159 // Copying + Moving => Moving
161 // * `(ref x, some_box)` pattern is Conflicting, since
162 // Borrowing + Moving => Conflicting
164 // Note that the `Unknown` and `Conflicting` states are
165 // represented separately from the other more interesting
166 // `Definite` states, which simplifies logic here somewhat.
167 fn lub(&mut self, mode: MatchMode) {
168 *self = match (*self, mode) {
169 // Note that clause order below is very significant.
170 (Unknown, new) => Definite(new),
171 (Definite(old), new) if old == new => Definite(old),
173 (Definite(old), NonBindingMatch) => Definite(old),
174 (Definite(NonBindingMatch), new) => Definite(new),
176 (Definite(old), CopyingMatch) => Definite(old),
177 (Definite(CopyingMatch), new) => Definite(new),
179 (Definite(_), _) => Conflicting,
180 (Conflicting, _) => *self,
184 fn match_mode(&self) -> MatchMode {
186 Unknown => NonBindingMatch,
187 Definite(mode) => mode,
189 // Conservatively return MovingMatch to let the
190 // compiler continue to make progress.
197 #[derive(Copy, Clone, PartialEq, Debug)]
198 pub enum MutateMode {
201 WriteAndRead, // x += y
204 #[derive(Copy, Clone)]
205 enum OverloadedCallType {
208 FnOnceOverloadedCall,
211 impl OverloadedCallType {
212 fn from_trait_id(tcx: TyCtxt, trait_id: DefId) -> OverloadedCallType {
213 for &(maybe_function_trait, overloaded_call_type) in &[
214 (tcx.lang_items().fn_once_trait(), FnOnceOverloadedCall),
215 (tcx.lang_items().fn_mut_trait(), FnMutOverloadedCall),
216 (tcx.lang_items().fn_trait(), FnOverloadedCall)
218 match maybe_function_trait {
219 Some(function_trait) if function_trait == trait_id => {
220 return overloaded_call_type
226 bug!("overloaded call didn't map to known function trait")
229 fn from_method_id(tcx: TyCtxt, method_id: DefId) -> OverloadedCallType {
230 let method = tcx.associated_item(method_id);
231 OverloadedCallType::from_trait_id(tcx, method.container.id())
235 ///////////////////////////////////////////////////////////////////////////
236 // The ExprUseVisitor type
238 // This is the code that actually walks the tree.
239 pub struct ExprUseVisitor<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
240 mc: mc::MemCategorizationContext<'a, 'gcx, 'tcx>,
241 delegate: &'a mut Delegate<'tcx>,
242 param_env: ty::ParamEnv<'tcx>,
245 // If the MC results in an error, it's because the type check
246 // failed (or will fail, when the error is uncovered and reported
247 // during writeback). In this case, we just ignore this part of the
250 // Note that this macro appears similar to try!(), but, unlike try!(),
251 // it does not propagate the error.
252 macro_rules! return_if_err {
257 debug!("mc reported err");
264 impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx, 'tcx> {
265 pub fn new(delegate: &'a mut (Delegate<'tcx>+'a),
266 tcx: TyCtxt<'a, 'tcx, 'tcx>,
267 param_env: ty::ParamEnv<'tcx>,
268 region_scope_tree: &'a region::ScopeTree,
269 tables: &'a ty::TypeckTables<'tcx>)
273 mc: mc::MemCategorizationContext::new(tcx, region_scope_tree, tables),
280 impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> {
281 pub fn with_infer(delegate: &'a mut (Delegate<'tcx>+'a),
282 infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
283 param_env: ty::ParamEnv<'tcx>,
284 region_scope_tree: &'a region::ScopeTree,
285 tables: &'a ty::TypeckTables<'tcx>)
289 mc: mc::MemCategorizationContext::with_infer(infcx, region_scope_tree, tables),
295 pub fn consume_body(&mut self, body: &hir::Body) {
296 debug!("consume_body(body={:?})", body);
298 for arg in &body.arguments {
299 let arg_ty = return_if_err!(self.mc.node_ty(arg.pat.hir_id));
301 let fn_body_scope_r =
302 self.tcx().mk_region(ty::ReScope(region::Scope::Node(body.value.hir_id.local_id)));
303 let arg_cmt = self.mc.cat_rvalue(
306 fn_body_scope_r, // Args live only as long as the fn body.
309 self.walk_irrefutable_pat(arg_cmt, &arg.pat);
312 self.consume_expr(&body.value);
315 fn tcx(&self) -> TyCtxt<'a, 'gcx, 'tcx> {
319 fn delegate_consume(&mut self,
320 consume_id: ast::NodeId,
322 cmt: mc::cmt<'tcx>) {
323 debug!("delegate_consume(consume_id={}, cmt={:?})",
326 let mode = copy_or_move(&self.mc, self.param_env, &cmt, DirectRefMove);
327 self.delegate.consume(consume_id, consume_span, cmt, mode);
330 fn consume_exprs(&mut self, exprs: &[hir::Expr]) {
332 self.consume_expr(&expr);
336 pub fn consume_expr(&mut self, expr: &hir::Expr) {
337 debug!("consume_expr(expr={:?})", expr);
339 let cmt = return_if_err!(self.mc.cat_expr(expr));
340 self.delegate_consume(expr.id, expr.span, cmt);
341 self.walk_expr(expr);
344 fn mutate_expr(&mut self,
345 assignment_expr: &hir::Expr,
348 let cmt = return_if_err!(self.mc.cat_expr(expr));
349 self.delegate.mutate(assignment_expr.id, assignment_expr.span, cmt, mode);
350 self.walk_expr(expr);
353 fn borrow_expr(&mut self,
358 debug!("borrow_expr(expr={:?}, r={:?}, bk={:?})",
361 let cmt = return_if_err!(self.mc.cat_expr(expr));
362 self.delegate.borrow(expr.id, expr.span, cmt, r, bk, cause);
367 fn select_from_expr(&mut self, expr: &hir::Expr) {
371 pub fn walk_expr(&mut self, expr: &hir::Expr) {
372 debug!("walk_expr(expr={:?})", expr);
374 self.walk_adjustment(expr);
377 hir::ExprPath(_) => { }
379 hir::ExprType(ref subexpr, _) => {
380 self.walk_expr(&subexpr)
383 hir::ExprUnary(hir::UnDeref, ref base) => { // *base
384 self.select_from_expr(&base);
387 hir::ExprField(ref base, _) => { // base.f
388 self.select_from_expr(&base);
391 hir::ExprTupField(ref base, _) => { // base.<n>
392 self.select_from_expr(&base);
395 hir::ExprIndex(ref lhs, ref rhs) => { // lhs[rhs]
396 self.select_from_expr(&lhs);
397 self.consume_expr(&rhs);
400 hir::ExprCall(ref callee, ref args) => { // callee(args)
401 self.walk_callee(expr, &callee);
402 self.consume_exprs(args);
405 hir::ExprMethodCall(.., ref args) => { // callee.m(args)
406 self.consume_exprs(args);
409 hir::ExprStruct(_, ref fields, ref opt_with) => {
410 self.walk_struct_expr(fields, opt_with);
413 hir::ExprTup(ref exprs) => {
414 self.consume_exprs(exprs);
417 hir::ExprIf(ref cond_expr, ref then_expr, ref opt_else_expr) => {
418 self.consume_expr(&cond_expr);
419 self.walk_expr(&then_expr);
420 if let Some(ref else_expr) = *opt_else_expr {
421 self.consume_expr(&else_expr);
425 hir::ExprMatch(ref discr, ref arms, _) => {
426 let discr_cmt = return_if_err!(self.mc.cat_expr(&discr));
427 let r = self.tcx().types.re_empty;
428 self.borrow_expr(&discr, r, ty::ImmBorrow, MatchDiscriminant);
430 // treatment of the discriminant is handled while walking the arms.
432 let mode = self.arm_move_mode(discr_cmt.clone(), arm);
433 let mode = mode.match_mode();
434 self.walk_arm(discr_cmt.clone(), arm, mode);
438 hir::ExprArray(ref exprs) => {
439 self.consume_exprs(exprs);
442 hir::ExprAddrOf(m, ref base) => { // &base
443 // make sure that the thing we are pointing out stays valid
444 // for the lifetime `scope_r` of the resulting ptr:
445 let expr_ty = return_if_err!(self.mc.expr_ty(expr));
446 if let ty::TyRef(r, _) = expr_ty.sty {
447 let bk = ty::BorrowKind::from_mutbl(m);
448 self.borrow_expr(&base, r, bk, AddrOf);
452 hir::ExprInlineAsm(ref ia, ref outputs, ref inputs) => {
453 for (o, output) in ia.outputs.iter().zip(outputs) {
455 self.consume_expr(output);
457 self.mutate_expr(expr, output,
459 MutateMode::WriteAndRead
461 MutateMode::JustWrite
465 self.consume_exprs(inputs);
469 hir::ExprLit(..) => {}
471 hir::ExprLoop(ref blk, _, _) => {
472 self.walk_block(&blk);
475 hir::ExprWhile(ref cond_expr, ref blk, _) => {
476 self.consume_expr(&cond_expr);
477 self.walk_block(&blk);
480 hir::ExprUnary(_, ref lhs) => {
481 self.consume_expr(&lhs);
484 hir::ExprBinary(_, ref lhs, ref rhs) => {
485 self.consume_expr(&lhs);
486 self.consume_expr(&rhs);
489 hir::ExprBlock(ref blk) => {
490 self.walk_block(&blk);
493 hir::ExprBreak(_, ref opt_expr) | hir::ExprRet(ref opt_expr) => {
494 if let Some(ref expr) = *opt_expr {
495 self.consume_expr(&expr);
499 hir::ExprAssign(ref lhs, ref rhs) => {
500 self.mutate_expr(expr, &lhs, MutateMode::JustWrite);
501 self.consume_expr(&rhs);
504 hir::ExprCast(ref base, _) => {
505 self.consume_expr(&base);
508 hir::ExprAssignOp(_, ref lhs, ref rhs) => {
509 if self.mc.tables.is_method_call(expr) {
510 self.consume_expr(lhs);
512 self.mutate_expr(expr, &lhs, MutateMode::WriteAndRead);
514 self.consume_expr(&rhs);
517 hir::ExprRepeat(ref base, _) => {
518 self.consume_expr(&base);
521 hir::ExprClosure(.., fn_decl_span, _) => {
522 self.walk_captures(expr, fn_decl_span)
525 hir::ExprBox(ref base) => {
526 self.consume_expr(&base);
529 hir::ExprYield(ref value) => {
530 self.consume_expr(&value);
535 fn walk_callee(&mut self, call: &hir::Expr, callee: &hir::Expr) {
536 let callee_ty = return_if_err!(self.mc.expr_ty_adjusted(callee));
537 debug!("walk_callee: callee={:?} callee_ty={:?}",
539 match callee_ty.sty {
540 ty::TyFnDef(..) | ty::TyFnPtr(_) => {
541 self.consume_expr(callee);
545 let def_id = self.mc.tables.type_dependent_defs()[call.hir_id].def_id();
546 let call_scope = region::Scope::Node(call.hir_id.local_id);
547 match OverloadedCallType::from_method_id(self.tcx(), def_id) {
548 FnMutOverloadedCall => {
549 let call_scope_r = self.tcx().mk_region(ty::ReScope(call_scope));
550 self.borrow_expr(callee,
555 FnOverloadedCall => {
556 let call_scope_r = self.tcx().mk_region(ty::ReScope(call_scope));
557 self.borrow_expr(callee,
562 FnOnceOverloadedCall => self.consume_expr(callee),
568 fn walk_stmt(&mut self, stmt: &hir::Stmt) {
570 hir::StmtDecl(ref decl, _) => {
572 hir::DeclLocal(ref local) => {
573 self.walk_local(&local);
576 hir::DeclItem(_) => {
577 // we don't visit nested items in this visitor,
578 // only the fn body we were given.
583 hir::StmtExpr(ref expr, _) |
584 hir::StmtSemi(ref expr, _) => {
585 self.consume_expr(&expr);
590 fn walk_local(&mut self, local: &hir::Local) {
593 let delegate = &mut self.delegate;
594 local.pat.each_binding(|_, id, span, _| {
595 delegate.decl_without_init(id, span);
600 // Variable declarations with
601 // initializers are considered
602 // "assigns", which is handled by
604 self.walk_expr(&expr);
605 let init_cmt = return_if_err!(self.mc.cat_expr(&expr));
606 self.walk_irrefutable_pat(init_cmt, &local.pat);
611 /// Indicates that the value of `blk` will be consumed, meaning either copied or moved
612 /// depending on its type.
613 fn walk_block(&mut self, blk: &hir::Block) {
614 debug!("walk_block(blk.id={})", blk.id);
616 for stmt in &blk.stmts {
617 self.walk_stmt(stmt);
620 if let Some(ref tail_expr) = blk.expr {
621 self.consume_expr(&tail_expr);
625 fn walk_struct_expr(&mut self,
626 fields: &[hir::Field],
627 opt_with: &Option<P<hir::Expr>>) {
628 // Consume the expressions supplying values for each field.
629 for field in fields {
630 self.consume_expr(&field.expr);
633 let with_expr = match *opt_with {
638 let with_cmt = return_if_err!(self.mc.cat_expr(&with_expr));
640 // Select just those fields of the `with`
641 // expression that will actually be used
642 match with_cmt.ty.sty {
643 ty::TyAdt(adt, substs) if adt.is_struct() => {
644 // Consume those fields of the with expression that are needed.
645 for with_field in &adt.struct_variant().fields {
646 if !contains_field_named(with_field, fields) {
647 let cmt_field = self.mc.cat_field(
651 with_field.ty(self.tcx(), substs)
653 self.delegate_consume(with_expr.id, with_expr.span, cmt_field);
658 // the base expression should always evaluate to a
659 // struct; however, when EUV is run during typeck, it
660 // may not. This will generate an error earlier in typeck,
661 // so we can just ignore it.
662 if !self.tcx().sess.has_errors() {
665 "with expression doesn't evaluate to a struct");
670 // walk the with expression so that complex expressions
671 // are properly handled.
672 self.walk_expr(with_expr);
674 fn contains_field_named(field: &ty::FieldDef,
675 fields: &[hir::Field])
679 |f| f.name.node == field.name)
683 // Invoke the appropriate delegate calls for anything that gets
684 // consumed or borrowed as part of the automatic adjustment
686 fn walk_adjustment(&mut self, expr: &hir::Expr) {
687 let adjustments = self.mc.tables.expr_adjustments(expr);
688 let mut cmt = return_if_err!(self.mc.cat_expr_unadjusted(expr));
689 for adjustment in adjustments {
690 debug!("walk_adjustment expr={:?} adj={:?}", expr, adjustment);
691 match adjustment.kind {
692 adjustment::Adjust::NeverToAny |
693 adjustment::Adjust::ReifyFnPointer |
694 adjustment::Adjust::UnsafeFnPointer |
695 adjustment::Adjust::ClosureFnPointer |
696 adjustment::Adjust::MutToConstPointer |
697 adjustment::Adjust::Unsize => {
698 // Creating a closure/fn-pointer or unsizing consumes
699 // the input and stores it into the resulting rvalue.
700 self.delegate_consume(expr.id, expr.span, cmt.clone());
703 adjustment::Adjust::Deref(None) => {}
705 // Autoderefs for overloaded Deref calls in fact reference
706 // their receiver. That is, if we have `(*x)` where `x`
707 // is of type `Rc<T>`, then this in fact is equivalent to
708 // `x.deref()`. Since `deref()` is declared with `&self`,
709 // this is an autoref of `x`.
710 adjustment::Adjust::Deref(Some(ref deref)) => {
711 let bk = ty::BorrowKind::from_mutbl(deref.mutbl);
712 self.delegate.borrow(expr.id, expr.span, cmt.clone(),
713 deref.region, bk, AutoRef);
716 adjustment::Adjust::Borrow(ref autoref) => {
717 self.walk_autoref(expr, cmt.clone(), autoref);
720 cmt = return_if_err!(self.mc.cat_expr_adjusted(expr, cmt, &adjustment));
724 /// Walks the autoref `autoref` applied to the autoderef'd
725 /// `expr`. `cmt_base` is the mem-categorized form of `expr`
726 /// after all relevant autoderefs have occurred.
727 fn walk_autoref(&mut self,
729 cmt_base: mc::cmt<'tcx>,
730 autoref: &adjustment::AutoBorrow<'tcx>) {
731 debug!("walk_autoref(expr.id={} cmt_base={:?} autoref={:?})",
737 adjustment::AutoBorrow::Ref(r, m) => {
738 self.delegate.borrow(expr.id,
742 ty::BorrowKind::from_mutbl(m),
746 adjustment::AutoBorrow::RawPtr(m) => {
747 debug!("walk_autoref: expr.id={} cmt_base={:?}",
751 // Converting from a &T to *T (or &mut T to *mut T) is
752 // treated as borrowing it for the enclosing temporary
754 let r = self.tcx().mk_region(ty::ReScope(
755 region::Scope::Node(expr.hir_id.local_id)));
757 self.delegate.borrow(expr.id,
761 ty::BorrowKind::from_mutbl(m),
767 fn arm_move_mode(&mut self, discr_cmt: mc::cmt<'tcx>, arm: &hir::Arm) -> TrackMatchMode {
768 let mut mode = Unknown;
769 for pat in &arm.pats {
770 self.determine_pat_move_mode(discr_cmt.clone(), &pat, &mut mode);
775 fn walk_arm(&mut self, discr_cmt: mc::cmt<'tcx>, arm: &hir::Arm, mode: MatchMode) {
776 for pat in &arm.pats {
777 self.walk_pat(discr_cmt.clone(), &pat, mode);
780 if let Some(ref guard) = arm.guard {
781 self.consume_expr(&guard);
784 self.consume_expr(&arm.body);
787 /// Walks a pat that occurs in isolation (i.e. top-level of fn
788 /// arg or let binding. *Not* a match arm or nested pat.)
789 fn walk_irrefutable_pat(&mut self, cmt_discr: mc::cmt<'tcx>, pat: &hir::Pat) {
790 let mut mode = Unknown;
791 self.determine_pat_move_mode(cmt_discr.clone(), pat, &mut mode);
792 let mode = mode.match_mode();
793 self.walk_pat(cmt_discr, pat, mode);
796 /// Identifies any bindings within `pat` and accumulates within
797 /// `mode` whether the overall pattern/match structure is a move,
799 fn determine_pat_move_mode(&mut self,
800 cmt_discr: mc::cmt<'tcx>,
802 mode: &mut TrackMatchMode) {
803 debug!("determine_pat_move_mode cmt_discr={:?} pat={:?}", cmt_discr,
805 return_if_err!(self.mc.cat_pattern(cmt_discr, pat, |cmt_pat, pat| {
806 if let PatKind::Binding(..) = pat.node {
807 let bm = *self.mc.tables.pat_binding_modes().get(pat.hir_id)
808 .expect("missing binding mode");
810 ty::BindByReference(..) =>
811 mode.lub(BorrowingMatch),
812 ty::BindByValue(..) => {
813 match copy_or_move(&self.mc, self.param_env, &cmt_pat, PatBindingMove) {
814 Copy => mode.lub(CopyingMatch),
815 Move(..) => mode.lub(MovingMatch),
823 /// The core driver for walking a pattern; `match_mode` must be
824 /// established up front, e.g. via `determine_pat_move_mode` (see
825 /// also `walk_irrefutable_pat` for patterns that stand alone).
826 fn walk_pat(&mut self, cmt_discr: mc::cmt<'tcx>, pat: &hir::Pat, match_mode: MatchMode) {
827 debug!("walk_pat cmt_discr={:?} pat={:?}", cmt_discr, pat);
829 let ExprUseVisitor { ref mc, ref mut delegate, param_env } = *self;
830 return_if_err!(mc.cat_pattern(cmt_discr.clone(), pat, |cmt_pat, pat| {
831 if let PatKind::Binding(_, def_id, ..) = pat.node {
832 debug!("binding cmt_pat={:?} pat={:?} match_mode={:?}", cmt_pat, pat, match_mode);
833 let bm = *mc.tables.pat_binding_modes().get(pat.hir_id)
834 .expect("missing binding mode");
836 // pat_ty: the type of the binding being produced.
837 let pat_ty = return_if_err!(mc.node_ty(pat.hir_id));
839 // Each match binding is effectively an assignment to the
840 // binding being produced.
841 let def = Def::Local(def_id);
842 if let Ok(binding_cmt) = mc.cat_def(pat.id, pat.span, pat_ty, def) {
843 delegate.mutate(pat.id, pat.span, binding_cmt, MutateMode::Init);
846 // It is also a borrow or copy/move of the value being matched.
848 ty::BindByReference(m) => {
849 if let ty::TyRef(r, _) = pat_ty.sty {
850 let bk = ty::BorrowKind::from_mutbl(m);
851 delegate.borrow(pat.id, pat.span, cmt_pat, r, bk, RefBinding);
854 ty::BindByValue(..) => {
855 let mode = copy_or_move(mc, param_env, &cmt_pat, PatBindingMove);
856 debug!("walk_pat binding consuming pat");
857 delegate.consume_pat(pat, cmt_pat, mode);
863 // Do a second pass over the pattern, calling `matched_pat` on
864 // the interior nodes (enum variants and structs), as opposed
865 // to the above loop's visit of than the bindings that form
866 // the leaves of the pattern tree structure.
867 return_if_err!(mc.cat_pattern(cmt_discr, pat, |cmt_pat, pat| {
868 let qpath = match pat.node {
869 PatKind::Path(ref qpath) |
870 PatKind::TupleStruct(ref qpath, ..) |
871 PatKind::Struct(ref qpath, ..) => qpath,
874 let def = mc.tables.qpath_def(qpath, pat.hir_id);
876 Def::Variant(variant_did) |
877 Def::VariantCtor(variant_did, ..) => {
878 let downcast_cmt = mc.cat_downcast_if_needed(pat, cmt_pat, variant_did);
880 debug!("variant downcast_cmt={:?} pat={:?}", downcast_cmt, pat);
881 delegate.matched_pat(pat, downcast_cmt, match_mode);
883 Def::Struct(..) | Def::StructCtor(..) | Def::Union(..) |
884 Def::TyAlias(..) | Def::AssociatedTy(..) | Def::SelfTy(..) => {
885 debug!("struct cmt_pat={:?} pat={:?}", cmt_pat, pat);
886 delegate.matched_pat(pat, cmt_pat, match_mode);
893 fn walk_captures(&mut self, closure_expr: &hir::Expr, fn_decl_span: Span) {
894 debug!("walk_captures({:?})", closure_expr);
896 self.tcx().with_freevars(closure_expr.id, |freevars| {
897 for freevar in freevars {
898 let var_def_id = freevar.def.def_id();
899 debug_assert!(var_def_id.is_local());
900 let closure_def_id = self.tcx().hir.local_def_id(closure_expr.id);
901 let upvar_id = ty::UpvarId {
902 var_id: var_def_id.index,
903 closure_expr_id: closure_def_id.index
905 let upvar_capture = self.mc.tables.upvar_capture(upvar_id);
906 let cmt_var = return_if_err!(self.cat_captured_var(closure_expr.id,
909 match upvar_capture {
910 ty::UpvarCapture::ByValue => {
911 let mode = copy_or_move(&self.mc,
915 self.delegate.consume(closure_expr.id, freevar.span, cmt_var, mode);
917 ty::UpvarCapture::ByRef(upvar_borrow) => {
918 self.delegate.borrow(closure_expr.id,
923 ClosureCapture(freevar.span));
930 fn cat_captured_var(&mut self,
931 closure_id: ast::NodeId,
934 -> mc::McResult<mc::cmt<'tcx>> {
935 // Create the cmt for the variable being borrowed, from the
936 // caller's perspective
937 let var_node_id = self.tcx().hir.as_local_node_id(upvar_def.def_id()).unwrap();
938 let var_hir_id = self.tcx().hir.node_to_hir_id(var_node_id);
939 let var_ty = self.mc.node_ty(var_hir_id)?;
940 self.mc.cat_def(closure_id, closure_span, var_ty, upvar_def)
944 fn copy_or_move<'a, 'gcx, 'tcx>(mc: &mc::MemCategorizationContext<'a, 'gcx, 'tcx>,
945 param_env: ty::ParamEnv<'tcx>,
947 move_reason: MoveReason)
950 if mc.type_moves_by_default(param_env, cmt.ty, cmt.span) {