1 //! A different sort of visitor for walking fn bodies. Unlike the
2 //! normal visitor, which just walks the entire body in one shot, the
3 //! `ExprUseVisitor` determines how expressions are being used.
5 pub use self::LoanCause::*;
6 pub use self::ConsumeMode::*;
7 pub use self::MoveReason::*;
8 pub use self::MatchMode::*;
9 use self::TrackMatchMode::*;
10 use self::OverloadedCallType::*;
12 use crate::hir::def::{CtorOf, Res, DefKind};
13 use crate::hir::def_id::DefId;
14 use crate::infer::InferCtxt;
15 use crate::middle::mem_categorization as mc;
16 use crate::middle::region;
17 use crate::ty::{self, DefIdTree, TyCtxt, adjustment};
19 use crate::hir::{self, PatKind};
23 use crate::util::nodemap::ItemLocalSet;
25 ///////////////////////////////////////////////////////////////////////////
28 /// This trait defines the callbacks you can expect to receive when
29 /// employing the ExprUseVisitor.
30 pub trait Delegate<'tcx> {
31 // The value found at `cmt` is either copied or moved, depending
34 consume_id: hir::HirId,
39 // The value found at `cmt` has been determined to match the
40 // pattern binding `matched_pat`, and its subparts are being
41 // copied or moved depending on `mode`. Note that `matched_pat`
42 // is called on all variant/structs in the pattern (i.e., the
43 // interior nodes of the pattern's tree structure) while
44 // consume_pat is called on the binding identifiers in the pattern
45 // (which are leaves of the pattern's tree structure).
47 // Note that variants/structs and identifiers are disjoint; thus
48 // `matched_pat` and `consume_pat` are never both called on the
49 // same input pattern structure (though of `consume_pat` can be
50 // called on a subpart of an input passed to `matched_pat).
51 fn matched_pat(&mut self,
52 matched_pat: &hir::Pat,
56 // The value found at `cmt` is either copied or moved via the
57 // pattern binding `consume_pat`, depending on mode.
58 fn consume_pat(&mut self,
59 consume_pat: &hir::Pat,
63 // The value found at `borrow` is being borrowed at the point
64 // `borrow_id` for the region `loan_region` with kind `bk`.
66 borrow_id: hir::HirId,
69 loan_region: ty::Region<'tcx>,
71 loan_cause: LoanCause);
73 // The local variable `id` is declared but not initialized.
74 fn decl_without_init(&mut self,
78 // The path at `cmt` is being assigned to.
80 assignment_id: hir::HirId,
81 assignment_span: Span,
82 assignee_cmt: &mc::cmt_<'tcx>,
85 // A nested closure or generator - only one layer deep.
86 fn nested_body(&mut self, _body_id: hir::BodyId) {}
89 #[derive(Copy, Clone, PartialEq, Debug)]
102 #[derive(Copy, Clone, PartialEq, Debug)]
103 pub enum ConsumeMode {
104 Copy, // reference to x where x has a type that copies
105 Move(MoveReason), // reference to x where x has a type that moves
108 #[derive(Copy, Clone, PartialEq, Debug)]
109 pub enum MoveReason {
115 #[derive(Copy, Clone, PartialEq, Debug)]
123 #[derive(Copy, Clone, PartialEq, Debug)]
124 enum TrackMatchMode {
130 impl TrackMatchMode {
131 // Builds up the whole match mode for a pattern from its constituent
132 // parts. The lattice looks like this:
148 // * `(_, some_int)` pattern is Copying, since
149 // NonBinding + Copying => Copying
151 // * `(some_int, some_box)` pattern is Moving, since
152 // Copying + Moving => Moving
154 // * `(ref x, some_box)` pattern is Conflicting, since
155 // Borrowing + Moving => Conflicting
157 // Note that the `Unknown` and `Conflicting` states are
158 // represented separately from the other more interesting
159 // `Definite` states, which simplifies logic here somewhat.
160 fn lub(&mut self, mode: MatchMode) {
161 *self = match (*self, mode) {
162 // Note that clause order below is very significant.
163 (Unknown, new) => Definite(new),
164 (Definite(old), new) if old == new => Definite(old),
166 (Definite(old), NonBindingMatch) => Definite(old),
167 (Definite(NonBindingMatch), new) => Definite(new),
169 (Definite(old), CopyingMatch) => Definite(old),
170 (Definite(CopyingMatch), new) => Definite(new),
172 (Definite(_), _) => Conflicting,
173 (Conflicting, _) => *self,
177 fn match_mode(&self) -> MatchMode {
179 Unknown => NonBindingMatch,
180 Definite(mode) => mode,
182 // Conservatively return MovingMatch to let the
183 // compiler continue to make progress.
190 #[derive(Copy, Clone, PartialEq, Debug)]
191 pub enum MutateMode {
194 WriteAndRead, // x += y
197 #[derive(Copy, Clone)]
198 enum OverloadedCallType {
201 FnOnceOverloadedCall,
204 impl OverloadedCallType {
205 fn from_trait_id(tcx: TyCtxt<'_, '_, '_>, trait_id: DefId) -> OverloadedCallType {
206 for &(maybe_function_trait, overloaded_call_type) in &[
207 (tcx.lang_items().fn_once_trait(), FnOnceOverloadedCall),
208 (tcx.lang_items().fn_mut_trait(), FnMutOverloadedCall),
209 (tcx.lang_items().fn_trait(), FnOverloadedCall)
211 match maybe_function_trait {
212 Some(function_trait) if function_trait == trait_id => {
213 return overloaded_call_type
219 bug!("overloaded call didn't map to known function trait")
222 fn from_method_id(tcx: TyCtxt<'_, '_, '_>, method_id: DefId) -> OverloadedCallType {
223 let method = tcx.associated_item(method_id);
224 OverloadedCallType::from_trait_id(tcx, method.container.id())
228 ///////////////////////////////////////////////////////////////////////////
229 // The ExprUseVisitor type
231 // This is the code that actually walks the tree.
232 pub struct ExprUseVisitor<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
233 mc: mc::MemCategorizationContext<'a, 'gcx, 'tcx>,
234 delegate: &'a mut dyn Delegate<'tcx>,
235 param_env: ty::ParamEnv<'tcx>,
238 // If the MC results in an error, it's because the type check
239 // failed (or will fail, when the error is uncovered and reported
240 // during writeback). In this case, we just ignore this part of the
243 // Note that this macro appears similar to try!(), but, unlike try!(),
244 // it does not propagate the error.
245 macro_rules! return_if_err {
250 debug!("mc reported err");
257 impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx, 'tcx> {
258 /// Creates the ExprUseVisitor, configuring it with the various options provided:
260 /// - `delegate` -- who receives the callbacks
261 /// - `param_env` --- parameter environment for trait lookups (esp. pertaining to `Copy`)
262 /// - `region_scope_tree` --- region scope tree for the code being analyzed
263 /// - `tables` --- typeck results for the code being analyzed
264 /// - `rvalue_promotable_map` --- if you care about rvalue promotion, then provide
265 /// the map here (it can be computed with `tcx.rvalue_promotable_map(def_id)`).
266 /// `None` means that rvalues will be given more conservative lifetimes.
268 /// See also `with_infer`, which is used *during* typeck.
269 pub fn new(delegate: &'a mut (dyn Delegate<'tcx>+'a),
270 tcx: TyCtxt<'a, 'tcx, 'tcx>,
271 param_env: ty::ParamEnv<'tcx>,
272 region_scope_tree: &'a region::ScopeTree,
273 tables: &'a ty::TypeckTables<'tcx>,
274 rvalue_promotable_map: Option<&'tcx ItemLocalSet>)
278 mc: mc::MemCategorizationContext::new(tcx,
281 rvalue_promotable_map),
288 impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> {
289 pub fn with_infer(delegate: &'a mut (dyn Delegate<'tcx>+'a),
290 infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
291 param_env: ty::ParamEnv<'tcx>,
292 region_scope_tree: &'a region::ScopeTree,
293 tables: &'a ty::TypeckTables<'tcx>)
297 mc: mc::MemCategorizationContext::with_infer(infcx, region_scope_tree, tables),
303 pub fn consume_body(&mut self, body: &hir::Body) {
304 debug!("consume_body(body={:?})", body);
306 for arg in &body.arguments {
307 let arg_ty = return_if_err!(self.mc.pat_ty_adjusted(&arg.pat));
308 debug!("consume_body: arg_ty = {:?}", arg_ty);
310 let fn_body_scope_r =
311 self.tcx().mk_region(ty::ReScope(
313 id: body.value.hir_id.local_id,
314 data: region::ScopeData::Node
316 let arg_cmt = Rc::new(self.mc.cat_rvalue(
319 fn_body_scope_r, // Args live only as long as the fn body.
322 self.walk_irrefutable_pat(arg_cmt, &arg.pat);
325 self.consume_expr(&body.value);
328 fn tcx(&self) -> TyCtxt<'a, 'gcx, 'tcx> {
332 fn delegate_consume(&mut self,
333 consume_id: hir::HirId,
335 cmt: &mc::cmt_<'tcx>) {
336 debug!("delegate_consume(consume_id={}, cmt={:?})",
339 let mode = copy_or_move(&self.mc, self.param_env, cmt, DirectRefMove);
340 self.delegate.consume(consume_id, consume_span, cmt, mode);
343 fn consume_exprs(&mut self, exprs: &[hir::Expr]) {
345 self.consume_expr(&expr);
349 pub fn consume_expr(&mut self, expr: &hir::Expr) {
350 debug!("consume_expr(expr={:?})", expr);
352 let cmt = return_if_err!(self.mc.cat_expr(expr));
353 self.delegate_consume(expr.hir_id, expr.span, &cmt);
354 self.walk_expr(expr);
357 fn mutate_expr(&mut self,
359 assignment_expr: &hir::Expr,
362 let cmt = return_if_err!(self.mc.cat_expr(expr));
363 self.delegate.mutate(assignment_expr.hir_id, span, &cmt, mode);
364 self.walk_expr(expr);
367 fn borrow_expr(&mut self,
372 debug!("borrow_expr(expr={:?}, r={:?}, bk={:?})",
375 let cmt = return_if_err!(self.mc.cat_expr(expr));
376 self.delegate.borrow(expr.hir_id, expr.span, &cmt, r, bk, cause);
381 fn select_from_expr(&mut self, expr: &hir::Expr) {
385 pub fn walk_expr(&mut self, expr: &hir::Expr) {
386 debug!("walk_expr(expr={:?})", expr);
388 self.walk_adjustment(expr);
391 hir::ExprKind::Path(_) => { }
393 hir::ExprKind::Type(ref subexpr, _) => {
394 self.walk_expr(&subexpr)
397 hir::ExprKind::Unary(hir::UnDeref, ref base) => { // *base
398 self.select_from_expr(&base);
401 hir::ExprKind::Field(ref base, _) => { // base.f
402 self.select_from_expr(&base);
405 hir::ExprKind::Index(ref lhs, ref rhs) => { // lhs[rhs]
406 self.select_from_expr(&lhs);
407 self.consume_expr(&rhs);
410 hir::ExprKind::Call(ref callee, ref args) => { // callee(args)
411 self.walk_callee(expr, &callee);
412 self.consume_exprs(args);
415 hir::ExprKind::MethodCall(.., ref args) => { // callee.m(args)
416 self.consume_exprs(args);
419 hir::ExprKind::Struct(_, ref fields, ref opt_with) => {
420 self.walk_struct_expr(fields, opt_with);
423 hir::ExprKind::Tup(ref exprs) => {
424 self.consume_exprs(exprs);
427 hir::ExprKind::If(ref cond_expr, ref then_expr, ref opt_else_expr) => {
428 self.consume_expr(&cond_expr);
429 self.walk_expr(&then_expr);
430 if let Some(ref else_expr) = *opt_else_expr {
431 self.consume_expr(&else_expr);
435 hir::ExprKind::Match(ref discr, ref arms, _) => {
436 let discr_cmt = Rc::new(return_if_err!(self.mc.cat_expr(&discr)));
437 let r = self.tcx().lifetimes.re_empty;
438 self.borrow_expr(&discr, r, ty::ImmBorrow, MatchDiscriminant);
440 // treatment of the discriminant is handled while walking the arms.
442 let mode = self.arm_move_mode(discr_cmt.clone(), arm);
443 let mode = mode.match_mode();
444 self.walk_arm(discr_cmt.clone(), arm, mode);
448 hir::ExprKind::Array(ref exprs) => {
449 self.consume_exprs(exprs);
452 hir::ExprKind::AddrOf(m, ref base) => { // &base
453 // make sure that the thing we are pointing out stays valid
454 // for the lifetime `scope_r` of the resulting ptr:
455 let expr_ty = return_if_err!(self.mc.expr_ty(expr));
456 if let ty::Ref(r, _, _) = expr_ty.sty {
457 let bk = ty::BorrowKind::from_mutbl(m);
458 self.borrow_expr(&base, r, bk, AddrOf);
462 hir::ExprKind::InlineAsm(ref ia, ref outputs, ref inputs) => {
463 for (o, output) in ia.outputs.iter().zip(outputs) {
465 self.consume_expr(output);
472 MutateMode::WriteAndRead
474 MutateMode::JustWrite
479 self.consume_exprs(inputs);
482 hir::ExprKind::Continue(..) |
483 hir::ExprKind::Lit(..) |
484 hir::ExprKind::Err => {}
486 hir::ExprKind::Loop(ref blk, _, _) => {
487 self.walk_block(&blk);
490 hir::ExprKind::While(ref cond_expr, ref blk, _) => {
491 self.consume_expr(&cond_expr);
492 self.walk_block(&blk);
495 hir::ExprKind::Unary(_, ref lhs) => {
496 self.consume_expr(&lhs);
499 hir::ExprKind::Binary(_, ref lhs, ref rhs) => {
500 self.consume_expr(&lhs);
501 self.consume_expr(&rhs);
504 hir::ExprKind::Block(ref blk, _) => {
505 self.walk_block(&blk);
508 hir::ExprKind::Break(_, ref opt_expr) | hir::ExprKind::Ret(ref opt_expr) => {
509 if let Some(ref expr) = *opt_expr {
510 self.consume_expr(&expr);
514 hir::ExprKind::Assign(ref lhs, ref rhs) => {
515 self.mutate_expr(expr.span, expr, &lhs, MutateMode::JustWrite);
516 self.consume_expr(&rhs);
519 hir::ExprKind::Cast(ref base, _) => {
520 self.consume_expr(&base);
523 hir::ExprKind::DropTemps(ref expr) => {
524 self.consume_expr(&expr);
527 hir::ExprKind::AssignOp(_, ref lhs, ref rhs) => {
528 if self.mc.tables.is_method_call(expr) {
529 self.consume_expr(lhs);
531 self.mutate_expr(expr.span, expr, &lhs, MutateMode::WriteAndRead);
533 self.consume_expr(&rhs);
536 hir::ExprKind::Repeat(ref base, _) => {
537 self.consume_expr(&base);
540 hir::ExprKind::Closure(_, _, body_id, fn_decl_span, _) => {
541 self.delegate.nested_body(body_id);
542 self.walk_captures(expr, fn_decl_span);
545 hir::ExprKind::Box(ref base) => {
546 self.consume_expr(&base);
549 hir::ExprKind::Yield(ref value) => {
550 self.consume_expr(&value);
555 fn walk_callee(&mut self, call: &hir::Expr, callee: &hir::Expr) {
556 let callee_ty = return_if_err!(self.mc.expr_ty_adjusted(callee));
557 debug!("walk_callee: callee={:?} callee_ty={:?}",
559 match callee_ty.sty {
560 ty::FnDef(..) | ty::FnPtr(_) => {
561 self.consume_expr(callee);
565 if let Some(def_id) = self.mc.tables.type_dependent_def_id(call.hir_id) {
566 let call_scope = region::Scope {
567 id: call.hir_id.local_id,
568 data: region::ScopeData::Node
570 match OverloadedCallType::from_method_id(self.tcx(), def_id) {
571 FnMutOverloadedCall => {
572 let call_scope_r = self.tcx().mk_region(ty::ReScope(call_scope));
573 self.borrow_expr(callee,
578 FnOverloadedCall => {
579 let call_scope_r = self.tcx().mk_region(ty::ReScope(call_scope));
580 self.borrow_expr(callee,
585 FnOnceOverloadedCall => self.consume_expr(callee),
588 self.tcx().sess.delay_span_bug(call.span,
589 "no type-dependent def for overloaded call");
595 fn walk_stmt(&mut self, stmt: &hir::Stmt) {
597 hir::StmtKind::Local(ref local) => {
598 self.walk_local(&local);
601 hir::StmtKind::Item(_) => {
602 // we don't visit nested items in this visitor,
603 // only the fn body we were given.
606 hir::StmtKind::Expr(ref expr) |
607 hir::StmtKind::Semi(ref expr) => {
608 self.consume_expr(&expr);
613 fn walk_local(&mut self, local: &hir::Local) {
616 local.pat.each_binding(|_, hir_id, span, _| {
617 self.delegate.decl_without_init(hir_id, span);
622 // Variable declarations with
623 // initializers are considered
624 // "assigns", which is handled by
626 self.walk_expr(&expr);
627 let init_cmt = Rc::new(return_if_err!(self.mc.cat_expr(&expr)));
628 self.walk_irrefutable_pat(init_cmt, &local.pat);
633 /// Indicates that the value of `blk` will be consumed, meaning either copied or moved
634 /// depending on its type.
635 fn walk_block(&mut self, blk: &hir::Block) {
636 debug!("walk_block(blk.hir_id={})", blk.hir_id);
638 for stmt in &blk.stmts {
639 self.walk_stmt(stmt);
642 if let Some(ref tail_expr) = blk.expr {
643 self.consume_expr(&tail_expr);
647 fn walk_struct_expr(&mut self,
648 fields: &[hir::Field],
649 opt_with: &Option<P<hir::Expr>>) {
650 // Consume the expressions supplying values for each field.
651 for field in fields {
652 self.consume_expr(&field.expr);
655 let with_expr = match *opt_with {
660 let with_cmt = Rc::new(return_if_err!(self.mc.cat_expr(&with_expr)));
662 // Select just those fields of the `with`
663 // expression that will actually be used
664 match with_cmt.ty.sty {
665 ty::Adt(adt, substs) if adt.is_struct() => {
666 // Consume those fields of the with expression that are needed.
667 for (f_index, with_field) in adt.non_enum_variant().fields.iter().enumerate() {
668 let is_mentioned = fields.iter().any(|f| {
669 self.tcx().field_index(f.hir_id, self.mc.tables) == f_index
672 let cmt_field = self.mc.cat_field(
677 with_field.ty(self.tcx(), substs)
679 self.delegate_consume(with_expr.hir_id, with_expr.span, &cmt_field);
684 // the base expression should always evaluate to a
685 // struct; however, when EUV is run during typeck, it
686 // may not. This will generate an error earlier in typeck,
687 // so we can just ignore it.
688 if !self.tcx().sess.has_errors() {
691 "with expression doesn't evaluate to a struct");
696 // walk the with expression so that complex expressions
697 // are properly handled.
698 self.walk_expr(with_expr);
701 // Invoke the appropriate delegate calls for anything that gets
702 // consumed or borrowed as part of the automatic adjustment
704 fn walk_adjustment(&mut self, expr: &hir::Expr) {
705 let adjustments = self.mc.tables.expr_adjustments(expr);
706 let mut cmt = return_if_err!(self.mc.cat_expr_unadjusted(expr));
707 for adjustment in adjustments {
708 debug!("walk_adjustment expr={:?} adj={:?}", expr, adjustment);
709 match adjustment.kind {
710 adjustment::Adjust::NeverToAny |
711 adjustment::Adjust::Pointer(_) => {
712 // Creating a closure/fn-pointer or unsizing consumes
713 // the input and stores it into the resulting rvalue.
714 self.delegate_consume(expr.hir_id, expr.span, &cmt);
717 adjustment::Adjust::Deref(None) => {}
719 // Autoderefs for overloaded Deref calls in fact reference
720 // their receiver. That is, if we have `(*x)` where `x`
721 // is of type `Rc<T>`, then this in fact is equivalent to
722 // `x.deref()`. Since `deref()` is declared with `&self`,
723 // this is an autoref of `x`.
724 adjustment::Adjust::Deref(Some(ref deref)) => {
725 let bk = ty::BorrowKind::from_mutbl(deref.mutbl);
726 self.delegate.borrow(expr.hir_id, expr.span, &cmt, deref.region, bk, AutoRef);
729 adjustment::Adjust::Borrow(ref autoref) => {
730 self.walk_autoref(expr, &cmt, autoref);
733 cmt = return_if_err!(self.mc.cat_expr_adjusted(expr, cmt, &adjustment));
737 /// Walks the autoref `autoref` applied to the autoderef'd
738 /// `expr`. `cmt_base` is the mem-categorized form of `expr`
739 /// after all relevant autoderefs have occurred.
740 fn walk_autoref(&mut self,
742 cmt_base: &mc::cmt_<'tcx>,
743 autoref: &adjustment::AutoBorrow<'tcx>) {
744 debug!("walk_autoref(expr.hir_id={} cmt_base={:?} autoref={:?})",
750 adjustment::AutoBorrow::Ref(r, m) => {
751 self.delegate.borrow(expr.hir_id,
755 ty::BorrowKind::from_mutbl(m.into()),
759 adjustment::AutoBorrow::RawPtr(m) => {
760 debug!("walk_autoref: expr.hir_id={} cmt_base={:?}",
764 // Converting from a &T to *T (or &mut T to *mut T) is
765 // treated as borrowing it for the enclosing temporary
767 let r = self.tcx().mk_region(ty::ReScope(
769 id: expr.hir_id.local_id,
770 data: region::ScopeData::Node
773 self.delegate.borrow(expr.hir_id,
777 ty::BorrowKind::from_mutbl(m),
783 fn arm_move_mode(&mut self, discr_cmt: mc::cmt<'tcx>, arm: &hir::Arm) -> TrackMatchMode {
784 let mut mode = Unknown;
785 for pat in &arm.pats {
786 self.determine_pat_move_mode(discr_cmt.clone(), &pat, &mut mode);
791 fn walk_arm(&mut self, discr_cmt: mc::cmt<'tcx>, arm: &hir::Arm, mode: MatchMode) {
792 for pat in &arm.pats {
793 self.walk_pat(discr_cmt.clone(), &pat, mode);
796 if let Some(hir::Guard::If(ref e)) = arm.guard {
800 self.consume_expr(&arm.body);
803 /// Walks a pat that occurs in isolation (i.e., top-level of fn argument or
804 /// let binding, and *not* a match arm or nested pat.)
805 fn walk_irrefutable_pat(&mut self, cmt_discr: mc::cmt<'tcx>, pat: &hir::Pat) {
806 let mut mode = Unknown;
807 self.determine_pat_move_mode(cmt_discr.clone(), pat, &mut mode);
808 let mode = mode.match_mode();
809 self.walk_pat(cmt_discr, pat, mode);
812 /// Identifies any bindings within `pat` and accumulates within
813 /// `mode` whether the overall pattern/match structure is a move,
815 fn determine_pat_move_mode(&mut self,
816 cmt_discr: mc::cmt<'tcx>,
818 mode: &mut TrackMatchMode) {
819 debug!("determine_pat_move_mode cmt_discr={:?} pat={:?}", cmt_discr, pat);
821 return_if_err!(self.mc.cat_pattern(cmt_discr, pat, |cmt_pat, pat| {
822 if let PatKind::Binding(..) = pat.node {
823 let bm = *self.mc.tables.pat_binding_modes()
825 .expect("missing binding mode");
827 ty::BindByReference(..) =>
828 mode.lub(BorrowingMatch),
829 ty::BindByValue(..) => {
830 match copy_or_move(&self.mc, self.param_env, &cmt_pat, PatBindingMove) {
831 Copy => mode.lub(CopyingMatch),
832 Move(..) => mode.lub(MovingMatch),
840 /// The core driver for walking a pattern; `match_mode` must be
841 /// established up front, e.g., via `determine_pat_move_mode` (see
842 /// also `walk_irrefutable_pat` for patterns that stand alone).
843 fn walk_pat(&mut self, cmt_discr: mc::cmt<'tcx>, pat: &hir::Pat, match_mode: MatchMode) {
844 debug!("walk_pat(cmt_discr={:?}, pat={:?})", cmt_discr, pat);
846 let tcx = self.tcx();
847 let ExprUseVisitor { ref mc, ref mut delegate, param_env } = *self;
848 return_if_err!(mc.cat_pattern(cmt_discr.clone(), pat, |cmt_pat, pat| {
849 if let PatKind::Binding(_, canonical_id, ..) = pat.node {
851 "walk_pat: binding cmt_pat={:?} pat={:?} match_mode={:?}",
856 if let Some(&bm) = mc.tables.pat_binding_modes().get(pat.hir_id) {
857 debug!("walk_pat: pat.hir_id={:?} bm={:?}", pat.hir_id, bm);
859 // pat_ty: the type of the binding being produced.
860 let pat_ty = return_if_err!(mc.node_ty(pat.hir_id));
861 debug!("walk_pat: pat_ty={:?}", pat_ty);
863 // Each match binding is effectively an assignment to the
864 // binding being produced.
865 let def = Res::Local(canonical_id);
866 if let Ok(ref binding_cmt) = mc.cat_res(pat.hir_id, pat.span, pat_ty, def) {
867 delegate.mutate(pat.hir_id, pat.span, binding_cmt, MutateMode::Init);
870 // It is also a borrow or copy/move of the value being matched.
872 ty::BindByReference(m) => {
873 if let ty::Ref(r, _, _) = pat_ty.sty {
874 let bk = ty::BorrowKind::from_mutbl(m);
875 delegate.borrow(pat.hir_id, pat.span, &cmt_pat, r, bk, RefBinding);
878 ty::BindByValue(..) => {
879 let mode = copy_or_move(mc, param_env, &cmt_pat, PatBindingMove);
880 debug!("walk_pat binding consuming pat");
881 delegate.consume_pat(pat, &cmt_pat, mode);
885 tcx.sess.delay_span_bug(pat.span, "missing binding mode");
890 // Do a second pass over the pattern, calling `matched_pat` on
891 // the interior nodes (enum variants and structs), as opposed
892 // to the above loop's visit of than the bindings that form
893 // the leaves of the pattern tree structure.
894 return_if_err!(mc.cat_pattern(cmt_discr, pat, |cmt_pat, pat| {
895 let qpath = match pat.node {
896 PatKind::Path(ref qpath) |
897 PatKind::TupleStruct(ref qpath, ..) |
898 PatKind::Struct(ref qpath, ..) => qpath,
901 let res = mc.tables.qpath_res(qpath, pat.hir_id);
903 Res::Def(DefKind::Ctor(CtorOf::Variant, ..), variant_ctor_did) => {
904 let variant_did = mc.tcx.parent(variant_ctor_did).unwrap();
905 let downcast_cmt = mc.cat_downcast_if_needed(pat, cmt_pat, variant_did);
907 debug!("variantctor downcast_cmt={:?} pat={:?}", downcast_cmt, pat);
908 delegate.matched_pat(pat, &downcast_cmt, match_mode);
910 Res::Def(DefKind::Variant, variant_did) => {
911 let downcast_cmt = mc.cat_downcast_if_needed(pat, cmt_pat, variant_did);
913 debug!("variant downcast_cmt={:?} pat={:?}", downcast_cmt, pat);
914 delegate.matched_pat(pat, &downcast_cmt, match_mode);
916 Res::Def(DefKind::Struct, _)
917 | Res::Def(DefKind::Ctor(..), _)
918 | Res::Def(DefKind::Union, _)
919 | Res::Def(DefKind::TyAlias, _)
920 | Res::Def(DefKind::AssociatedTy, _)
921 | Res::SelfTy(..) => {
922 debug!("struct cmt_pat={:?} pat={:?}", cmt_pat, pat);
923 delegate.matched_pat(pat, &cmt_pat, match_mode);
930 fn walk_captures(&mut self, closure_expr: &hir::Expr, fn_decl_span: Span) {
931 debug!("walk_captures({:?})", closure_expr);
933 let closure_def_id = self.tcx().hir().local_def_id_from_hir_id(closure_expr.hir_id);
934 if let Some(upvars) = self.tcx().upvars(closure_def_id) {
935 for upvar in upvars.iter() {
936 let var_hir_id = upvar.var_id();
937 let upvar_id = ty::UpvarId {
938 var_path: ty::UpvarPath { hir_id: var_hir_id },
939 closure_expr_id: closure_def_id.to_local(),
941 let upvar_capture = self.mc.tables.upvar_capture(upvar_id);
942 let cmt_var = return_if_err!(self.cat_captured_var(closure_expr.hir_id,
945 match upvar_capture {
946 ty::UpvarCapture::ByValue => {
947 let mode = copy_or_move(&self.mc,
951 self.delegate.consume(closure_expr.hir_id, upvar.span, &cmt_var, mode);
953 ty::UpvarCapture::ByRef(upvar_borrow) => {
954 self.delegate.borrow(closure_expr.hir_id,
959 ClosureCapture(upvar.span));
966 fn cat_captured_var(&mut self,
967 closure_hir_id: hir::HirId,
970 -> mc::McResult<mc::cmt_<'tcx>> {
971 // Create the cmt for the variable being borrowed, from the
972 // caller's perspective
973 let var_hir_id = upvar.var_id();
974 let var_ty = self.mc.node_ty(var_hir_id)?;
975 self.mc.cat_res(closure_hir_id, closure_span, var_ty, upvar.res)
979 fn copy_or_move<'a, 'gcx, 'tcx>(mc: &mc::MemCategorizationContext<'a, 'gcx, 'tcx>,
980 param_env: ty::ParamEnv<'tcx>,
981 cmt: &mc::cmt_<'tcx>,
982 move_reason: MoveReason)
985 if !mc.type_is_copy_modulo_regions(param_env, cmt.ty, cmt.span) {