]> git.lizzy.rs Git - rust.git/blob - src/librustc/middle/expr_use_visitor.rs
82e7d972c579a0cb5ff9607ce3cdff359cb617dc
[rust.git] / src / librustc / middle / expr_use_visitor.rs
1 // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 //! A different sort of visitor for walking fn bodies.  Unlike the
12 //! normal visitor, which just walks the entire body in one shot, the
13 //! `ExprUseVisitor` determines how expressions are being used.
14
15 pub use self::LoanCause::*;
16 pub use self::ConsumeMode::*;
17 pub use self::MoveReason::*;
18 pub use self::MatchMode::*;
19 use self::TrackMatchMode::*;
20 use self::OverloadedCallType::*;
21
22 use hir::def::Def;
23 use hir::def_id::{DefId};
24 use infer::InferCtxt;
25 use middle::mem_categorization as mc;
26 use ty::{self, TyCtxt, adjustment};
27
28 use hir::{self, PatKind};
29
30 use syntax::ast;
31 use syntax::ptr::P;
32 use syntax_pos::Span;
33
34 ///////////////////////////////////////////////////////////////////////////
35 // The Delegate trait
36
37 /// This trait defines the callbacks you can expect to receive when
38 /// employing the ExprUseVisitor.
39 pub trait Delegate<'tcx> {
40     // The value found at `cmt` is either copied or moved, depending
41     // on mode.
42     fn consume(&mut self,
43                consume_id: ast::NodeId,
44                consume_span: Span,
45                cmt: mc::cmt<'tcx>,
46                mode: ConsumeMode);
47
48     // The value found at `cmt` has been determined to match the
49     // pattern binding `matched_pat`, and its subparts are being
50     // copied or moved depending on `mode`.  Note that `matched_pat`
51     // is called on all variant/structs in the pattern (i.e., the
52     // interior nodes of the pattern's tree structure) while
53     // consume_pat is called on the binding identifiers in the pattern
54     // (which are leaves of the pattern's tree structure).
55     //
56     // Note that variants/structs and identifiers are disjoint; thus
57     // `matched_pat` and `consume_pat` are never both called on the
58     // same input pattern structure (though of `consume_pat` can be
59     // called on a subpart of an input passed to `matched_pat).
60     fn matched_pat(&mut self,
61                    matched_pat: &hir::Pat,
62                    cmt: mc::cmt<'tcx>,
63                    mode: MatchMode);
64
65     // The value found at `cmt` is either copied or moved via the
66     // pattern binding `consume_pat`, depending on mode.
67     fn consume_pat(&mut self,
68                    consume_pat: &hir::Pat,
69                    cmt: mc::cmt<'tcx>,
70                    mode: ConsumeMode);
71
72     // The value found at `borrow` is being borrowed at the point
73     // `borrow_id` for the region `loan_region` with kind `bk`.
74     fn borrow(&mut self,
75               borrow_id: ast::NodeId,
76               borrow_span: Span,
77               cmt: mc::cmt<'tcx>,
78               loan_region: &'tcx ty::Region,
79               bk: ty::BorrowKind,
80               loan_cause: LoanCause);
81
82     // The local variable `id` is declared but not initialized.
83     fn decl_without_init(&mut self,
84                          id: ast::NodeId,
85                          span: Span);
86
87     // The path at `cmt` is being assigned to.
88     fn mutate(&mut self,
89               assignment_id: ast::NodeId,
90               assignment_span: Span,
91               assignee_cmt: mc::cmt<'tcx>,
92               mode: MutateMode);
93 }
94
95 #[derive(Copy, Clone, PartialEq, Debug)]
96 pub enum LoanCause {
97     ClosureCapture(Span),
98     AddrOf,
99     AutoRef,
100     AutoUnsafe,
101     RefBinding,
102     OverloadedOperator,
103     ClosureInvocation,
104     ForLoop,
105     MatchDiscriminant
106 }
107
108 #[derive(Copy, Clone, PartialEq, Debug)]
109 pub enum ConsumeMode {
110     Copy,                // reference to x where x has a type that copies
111     Move(MoveReason),    // reference to x where x has a type that moves
112 }
113
114 #[derive(Copy, Clone, PartialEq, Debug)]
115 pub enum MoveReason {
116     DirectRefMove,
117     PatBindingMove,
118     CaptureMove,
119 }
120
121 #[derive(Copy, Clone, PartialEq, Debug)]
122 pub enum MatchMode {
123     NonBindingMatch,
124     BorrowingMatch,
125     CopyingMatch,
126     MovingMatch,
127 }
128
129 #[derive(Copy, Clone, PartialEq, Debug)]
130 enum TrackMatchMode {
131     Unknown,
132     Definite(MatchMode),
133     Conflicting,
134 }
135
136 impl TrackMatchMode {
137     // Builds up the whole match mode for a pattern from its constituent
138     // parts.  The lattice looks like this:
139     //
140     //          Conflicting
141     //            /     \
142     //           /       \
143     //      Borrowing   Moving
144     //           \       /
145     //            \     /
146     //            Copying
147     //               |
148     //          NonBinding
149     //               |
150     //            Unknown
151     //
152     // examples:
153     //
154     // * `(_, some_int)` pattern is Copying, since
155     //   NonBinding + Copying => Copying
156     //
157     // * `(some_int, some_box)` pattern is Moving, since
158     //   Copying + Moving => Moving
159     //
160     // * `(ref x, some_box)` pattern is Conflicting, since
161     //   Borrowing + Moving => Conflicting
162     //
163     // Note that the `Unknown` and `Conflicting` states are
164     // represented separately from the other more interesting
165     // `Definite` states, which simplifies logic here somewhat.
166     fn lub(&mut self, mode: MatchMode) {
167         *self = match (*self, mode) {
168             // Note that clause order below is very significant.
169             (Unknown, new) => Definite(new),
170             (Definite(old), new) if old == new => Definite(old),
171
172             (Definite(old), NonBindingMatch) => Definite(old),
173             (Definite(NonBindingMatch), new) => Definite(new),
174
175             (Definite(old), CopyingMatch) => Definite(old),
176             (Definite(CopyingMatch), new) => Definite(new),
177
178             (Definite(_), _) => Conflicting,
179             (Conflicting, _) => *self,
180         };
181     }
182
183     fn match_mode(&self) -> MatchMode {
184         match *self {
185             Unknown => NonBindingMatch,
186             Definite(mode) => mode,
187             Conflicting => {
188                 // Conservatively return MovingMatch to let the
189                 // compiler continue to make progress.
190                 MovingMatch
191             }
192         }
193     }
194 }
195
196 #[derive(Copy, Clone, PartialEq, Debug)]
197 pub enum MutateMode {
198     Init,
199     JustWrite,    // x = y
200     WriteAndRead, // x += y
201 }
202
203 #[derive(Copy, Clone)]
204 enum OverloadedCallType {
205     FnOverloadedCall,
206     FnMutOverloadedCall,
207     FnOnceOverloadedCall,
208 }
209
210 impl OverloadedCallType {
211     fn from_trait_id(tcx: TyCtxt, trait_id: DefId) -> OverloadedCallType {
212         for &(maybe_function_trait, overloaded_call_type) in &[
213             (tcx.lang_items.fn_once_trait(), FnOnceOverloadedCall),
214             (tcx.lang_items.fn_mut_trait(), FnMutOverloadedCall),
215             (tcx.lang_items.fn_trait(), FnOverloadedCall)
216         ] {
217             match maybe_function_trait {
218                 Some(function_trait) if function_trait == trait_id => {
219                     return overloaded_call_type
220                 }
221                 _ => continue,
222             }
223         }
224
225         bug!("overloaded call didn't map to known function trait")
226     }
227
228     fn from_method_id(tcx: TyCtxt, method_id: DefId) -> OverloadedCallType {
229         let method = tcx.associated_item(method_id);
230         OverloadedCallType::from_trait_id(tcx, method.container.id())
231     }
232 }
233
234 ///////////////////////////////////////////////////////////////////////////
235 // The ExprUseVisitor type
236 //
237 // This is the code that actually walks the tree. Like
238 // mem_categorization, it requires a TYPER, which is a type that
239 // supplies types from the tree. After type checking is complete, you
240 // can just use the tcx as the typer.
241 pub struct ExprUseVisitor<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
242     mc: mc::MemCategorizationContext<'a, 'gcx, 'tcx>,
243     delegate: &'a mut Delegate<'tcx>,
244 }
245
246 // If the TYPER results in an error, it's because the type check
247 // failed (or will fail, when the error is uncovered and reported
248 // during writeback). In this case, we just ignore this part of the
249 // code.
250 //
251 // Note that this macro appears similar to try!(), but, unlike try!(),
252 // it does not propagate the error.
253 macro_rules! return_if_err {
254     ($inp: expr) => (
255         match $inp {
256             Ok(v) => v,
257             Err(()) => {
258                 debug!("mc reported err");
259                 return
260             }
261         }
262     )
263 }
264
265 /// Whether the elements of an overloaded operation are passed by value or by reference
266 enum PassArgs {
267     ByValue,
268     ByRef,
269 }
270
271 impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> {
272     pub fn new(delegate: &'a mut (Delegate<'tcx>+'a),
273                infcx: &'a InferCtxt<'a, 'gcx, 'tcx>)
274                -> Self
275     {
276         ExprUseVisitor::with_options(delegate, infcx, mc::MemCategorizationOptions::default())
277     }
278
279     pub fn with_options(delegate: &'a mut (Delegate<'tcx>+'a),
280                         infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
281                         options: mc::MemCategorizationOptions)
282                -> Self
283     {
284         ExprUseVisitor {
285             mc: mc::MemCategorizationContext::with_options(infcx, options),
286             delegate: delegate
287         }
288     }
289
290     pub fn consume_body(&mut self, body: &hir::Body) {
291         debug!("consume_body(body={:?})", body);
292
293         for arg in &body.arguments {
294             let arg_ty = return_if_err!(self.mc.infcx.node_ty(arg.pat.id));
295
296             let fn_body_scope_r = self.tcx().node_scope_region(body.value.id);
297             let arg_cmt = self.mc.cat_rvalue(
298                 arg.id,
299                 arg.pat.span,
300                 fn_body_scope_r, // Args live only as long as the fn body.
301                 fn_body_scope_r,
302                 arg_ty);
303
304             self.walk_irrefutable_pat(arg_cmt, &arg.pat);
305         }
306
307         self.consume_expr(&body.value);
308     }
309
310     fn tcx(&self) -> TyCtxt<'a, 'gcx, 'tcx> {
311         self.mc.infcx.tcx
312     }
313
314     fn delegate_consume(&mut self,
315                         consume_id: ast::NodeId,
316                         consume_span: Span,
317                         cmt: mc::cmt<'tcx>) {
318         debug!("delegate_consume(consume_id={}, cmt={:?})",
319                consume_id, cmt);
320
321         let mode = copy_or_move(self.mc.infcx, &cmt, DirectRefMove);
322         self.delegate.consume(consume_id, consume_span, cmt, mode);
323     }
324
325     fn consume_exprs(&mut self, exprs: &[hir::Expr]) {
326         for expr in exprs {
327             self.consume_expr(&expr);
328         }
329     }
330
331     pub fn consume_expr(&mut self, expr: &hir::Expr) {
332         debug!("consume_expr(expr={:?})", expr);
333
334         let cmt = return_if_err!(self.mc.cat_expr(expr));
335         self.delegate_consume(expr.id, expr.span, cmt);
336         self.walk_expr(expr);
337     }
338
339     fn mutate_expr(&mut self,
340                    assignment_expr: &hir::Expr,
341                    expr: &hir::Expr,
342                    mode: MutateMode) {
343         let cmt = return_if_err!(self.mc.cat_expr(expr));
344         self.delegate.mutate(assignment_expr.id, assignment_expr.span, cmt, mode);
345         self.walk_expr(expr);
346     }
347
348     fn borrow_expr(&mut self,
349                    expr: &hir::Expr,
350                    r: &'tcx ty::Region,
351                    bk: ty::BorrowKind,
352                    cause: LoanCause) {
353         debug!("borrow_expr(expr={:?}, r={:?}, bk={:?})",
354                expr, r, bk);
355
356         let cmt = return_if_err!(self.mc.cat_expr(expr));
357         self.delegate.borrow(expr.id, expr.span, cmt, r, bk, cause);
358
359         self.walk_expr(expr)
360     }
361
362     fn select_from_expr(&mut self, expr: &hir::Expr) {
363         self.walk_expr(expr)
364     }
365
366     pub fn walk_expr(&mut self, expr: &hir::Expr) {
367         debug!("walk_expr(expr={:?})", expr);
368
369         self.walk_adjustment(expr);
370
371         match expr.node {
372             hir::ExprPath(_) => { }
373
374             hir::ExprType(ref subexpr, _) => {
375                 self.walk_expr(&subexpr)
376             }
377
378             hir::ExprUnary(hir::UnDeref, ref base) => {      // *base
379                 if !self.walk_overloaded_operator(expr, &base, Vec::new(), PassArgs::ByRef) {
380                     self.select_from_expr(&base);
381                 }
382             }
383
384             hir::ExprField(ref base, _) => {         // base.f
385                 self.select_from_expr(&base);
386             }
387
388             hir::ExprTupField(ref base, _) => {         // base.<n>
389                 self.select_from_expr(&base);
390             }
391
392             hir::ExprIndex(ref lhs, ref rhs) => {       // lhs[rhs]
393                 if !self.walk_overloaded_operator(expr,
394                                                   &lhs,
395                                                   vec![&rhs],
396                                                   PassArgs::ByValue) {
397                     self.select_from_expr(&lhs);
398                     self.consume_expr(&rhs);
399                 }
400             }
401
402             hir::ExprCall(ref callee, ref args) => {    // callee(args)
403                 self.walk_callee(expr, &callee);
404                 self.consume_exprs(args);
405             }
406
407             hir::ExprMethodCall(.., ref args) => { // callee.m(args)
408                 self.consume_exprs(args);
409             }
410
411             hir::ExprStruct(_, ref fields, ref opt_with) => {
412                 self.walk_struct_expr(fields, opt_with);
413             }
414
415             hir::ExprTup(ref exprs) => {
416                 self.consume_exprs(exprs);
417             }
418
419             hir::ExprIf(ref cond_expr, ref then_expr, ref opt_else_expr) => {
420                 self.consume_expr(&cond_expr);
421                 self.walk_expr(&then_expr);
422                 if let Some(ref else_expr) = *opt_else_expr {
423                     self.consume_expr(&else_expr);
424                 }
425             }
426
427             hir::ExprMatch(ref discr, ref arms, _) => {
428                 let discr_cmt = return_if_err!(self.mc.cat_expr(&discr));
429                 let r = self.tcx().types.re_empty;
430                 self.borrow_expr(&discr, r, ty::ImmBorrow, MatchDiscriminant);
431
432                 // treatment of the discriminant is handled while walking the arms.
433                 for arm in arms {
434                     let mode = self.arm_move_mode(discr_cmt.clone(), arm);
435                     let mode = mode.match_mode();
436                     self.walk_arm(discr_cmt.clone(), arm, mode);
437                 }
438             }
439
440             hir::ExprArray(ref exprs) => {
441                 self.consume_exprs(exprs);
442             }
443
444             hir::ExprAddrOf(m, ref base) => {   // &base
445                 // make sure that the thing we are pointing out stays valid
446                 // for the lifetime `scope_r` of the resulting ptr:
447                 let expr_ty = return_if_err!(self.mc.infcx.node_ty(expr.id));
448                 if let ty::TyRef(r, _) = expr_ty.sty {
449                     let bk = ty::BorrowKind::from_mutbl(m);
450                     self.borrow_expr(&base, r, bk, AddrOf);
451                 }
452             }
453
454             hir::ExprInlineAsm(ref ia, ref outputs, ref inputs) => {
455                 for (o, output) in ia.outputs.iter().zip(outputs) {
456                     if o.is_indirect {
457                         self.consume_expr(output);
458                     } else {
459                         self.mutate_expr(expr, output,
460                                          if o.is_rw {
461                                              MutateMode::WriteAndRead
462                                          } else {
463                                              MutateMode::JustWrite
464                                          });
465                     }
466                 }
467                 self.consume_exprs(inputs);
468             }
469
470             hir::ExprAgain(..) |
471             hir::ExprLit(..) => {}
472
473             hir::ExprLoop(ref blk, _, _) => {
474                 self.walk_block(&blk);
475             }
476
477             hir::ExprWhile(ref cond_expr, ref blk, _) => {
478                 self.consume_expr(&cond_expr);
479                 self.walk_block(&blk);
480             }
481
482             hir::ExprUnary(op, ref lhs) => {
483                 let pass_args = if op.is_by_value() {
484                     PassArgs::ByValue
485                 } else {
486                     PassArgs::ByRef
487                 };
488
489                 if !self.walk_overloaded_operator(expr, &lhs, Vec::new(), pass_args) {
490                     self.consume_expr(&lhs);
491                 }
492             }
493
494             hir::ExprBinary(op, ref lhs, ref rhs) => {
495                 let pass_args = if op.node.is_by_value() {
496                     PassArgs::ByValue
497                 } else {
498                     PassArgs::ByRef
499                 };
500
501                 if !self.walk_overloaded_operator(expr, &lhs, vec![&rhs], pass_args) {
502                     self.consume_expr(&lhs);
503                     self.consume_expr(&rhs);
504                 }
505             }
506
507             hir::ExprBlock(ref blk) => {
508                 self.walk_block(&blk);
509             }
510
511             hir::ExprBreak(_, ref opt_expr) | hir::ExprRet(ref opt_expr) => {
512                 if let Some(ref expr) = *opt_expr {
513                     self.consume_expr(&expr);
514                 }
515             }
516
517             hir::ExprAssign(ref lhs, ref rhs) => {
518                 self.mutate_expr(expr, &lhs, MutateMode::JustWrite);
519                 self.consume_expr(&rhs);
520             }
521
522             hir::ExprCast(ref base, _) => {
523                 self.consume_expr(&base);
524             }
525
526             hir::ExprAssignOp(op, ref lhs, ref rhs) => {
527                 // NB All our assignment operations take the RHS by value
528                 assert!(op.node.is_by_value());
529
530                 if !self.walk_overloaded_operator(expr, lhs, vec![rhs], PassArgs::ByValue) {
531                     self.mutate_expr(expr, &lhs, MutateMode::WriteAndRead);
532                     self.consume_expr(&rhs);
533                 }
534             }
535
536             hir::ExprRepeat(ref base, _) => {
537                 self.consume_expr(&base);
538             }
539
540             hir::ExprClosure(.., fn_decl_span) => {
541                 self.walk_captures(expr, fn_decl_span)
542             }
543
544             hir::ExprBox(ref base) => {
545                 self.consume_expr(&base);
546             }
547         }
548     }
549
550     fn walk_callee(&mut self, call: &hir::Expr, callee: &hir::Expr) {
551         let callee_ty = return_if_err!(self.mc.infcx.expr_ty_adjusted(callee));
552         debug!("walk_callee: callee={:?} callee_ty={:?}",
553                callee, callee_ty);
554         match callee_ty.sty {
555             ty::TyFnDef(..) | ty::TyFnPtr(_) => {
556                 self.consume_expr(callee);
557             }
558             ty::TyError => { }
559             _ => {
560                 let overloaded_call_type =
561                     match self.mc.infcx.node_method_id(ty::MethodCall::expr(call.id)) {
562                         Some(method_id) => {
563                             OverloadedCallType::from_method_id(self.tcx(), method_id)
564                         }
565                         None => {
566                             span_bug!(
567                                 callee.span,
568                                 "unexpected callee type {}",
569                                 callee_ty)
570                         }
571                     };
572                 match overloaded_call_type {
573                     FnMutOverloadedCall => {
574                         let call_scope_r = self.tcx().node_scope_region(call.id);
575                         self.borrow_expr(callee,
576                                          call_scope_r,
577                                          ty::MutBorrow,
578                                          ClosureInvocation);
579                     }
580                     FnOverloadedCall => {
581                         let call_scope_r = self.tcx().node_scope_region(call.id);
582                         self.borrow_expr(callee,
583                                          call_scope_r,
584                                          ty::ImmBorrow,
585                                          ClosureInvocation);
586                     }
587                     FnOnceOverloadedCall => self.consume_expr(callee),
588                 }
589             }
590         }
591     }
592
593     fn walk_stmt(&mut self, stmt: &hir::Stmt) {
594         match stmt.node {
595             hir::StmtDecl(ref decl, _) => {
596                 match decl.node {
597                     hir::DeclLocal(ref local) => {
598                         self.walk_local(&local);
599                     }
600
601                     hir::DeclItem(_) => {
602                         // we don't visit nested items in this visitor,
603                         // only the fn body we were given.
604                     }
605                 }
606             }
607
608             hir::StmtExpr(ref expr, _) |
609             hir::StmtSemi(ref expr, _) => {
610                 self.consume_expr(&expr);
611             }
612         }
613     }
614
615     fn walk_local(&mut self, local: &hir::Local) {
616         match local.init {
617             None => {
618                 let delegate = &mut self.delegate;
619                 local.pat.each_binding(|_, id, span, _| {
620                     delegate.decl_without_init(id, span);
621                 })
622             }
623
624             Some(ref expr) => {
625                 // Variable declarations with
626                 // initializers are considered
627                 // "assigns", which is handled by
628                 // `walk_pat`:
629                 self.walk_expr(&expr);
630                 let init_cmt = return_if_err!(self.mc.cat_expr(&expr));
631                 self.walk_irrefutable_pat(init_cmt, &local.pat);
632             }
633         }
634     }
635
636     /// Indicates that the value of `blk` will be consumed, meaning either copied or moved
637     /// depending on its type.
638     fn walk_block(&mut self, blk: &hir::Block) {
639         debug!("walk_block(blk.id={})", blk.id);
640
641         for stmt in &blk.stmts {
642             self.walk_stmt(stmt);
643         }
644
645         if let Some(ref tail_expr) = blk.expr {
646             self.consume_expr(&tail_expr);
647         }
648     }
649
650     fn walk_struct_expr(&mut self,
651                         fields: &[hir::Field],
652                         opt_with: &Option<P<hir::Expr>>) {
653         // Consume the expressions supplying values for each field.
654         for field in fields {
655             self.consume_expr(&field.expr);
656         }
657
658         let with_expr = match *opt_with {
659             Some(ref w) => &**w,
660             None => { return; }
661         };
662
663         let with_cmt = return_if_err!(self.mc.cat_expr(&with_expr));
664
665         // Select just those fields of the `with`
666         // expression that will actually be used
667         match with_cmt.ty.sty {
668             ty::TyAdt(adt, substs) if adt.is_struct() => {
669                 // Consume those fields of the with expression that are needed.
670                 for with_field in &adt.struct_variant().fields {
671                     if !contains_field_named(with_field, fields) {
672                         let cmt_field = self.mc.cat_field(
673                             &*with_expr,
674                             with_cmt.clone(),
675                             with_field.name,
676                             with_field.ty(self.tcx(), substs)
677                         );
678                         self.delegate_consume(with_expr.id, with_expr.span, cmt_field);
679                     }
680                 }
681             }
682             _ => {
683                 // the base expression should always evaluate to a
684                 // struct; however, when EUV is run during typeck, it
685                 // may not. This will generate an error earlier in typeck,
686                 // so we can just ignore it.
687                 if !self.tcx().sess.has_errors() {
688                     span_bug!(
689                         with_expr.span,
690                         "with expression doesn't evaluate to a struct");
691                 }
692             }
693         }
694
695         // walk the with expression so that complex expressions
696         // are properly handled.
697         self.walk_expr(with_expr);
698
699         fn contains_field_named(field: &ty::FieldDef,
700                                 fields: &[hir::Field])
701                                 -> bool
702         {
703             fields.iter().any(
704                 |f| f.name.node == field.name)
705         }
706     }
707
708     // Invoke the appropriate delegate calls for anything that gets
709     // consumed or borrowed as part of the automatic adjustment
710     // process.
711     fn walk_adjustment(&mut self, expr: &hir::Expr) {
712         let infcx = self.mc.infcx;
713         //NOTE(@jroesch): mixed RefCell borrow causes crash
714         let adj = infcx.tables.borrow().adjustments.get(&expr.id).map(|x| x.clone());
715         if let Some(adjustment) = adj {
716             match adjustment.kind {
717                 adjustment::Adjust::NeverToAny |
718                 adjustment::Adjust::ReifyFnPointer |
719                 adjustment::Adjust::UnsafeFnPointer |
720                 adjustment::Adjust::ClosureFnPointer |
721                 adjustment::Adjust::MutToConstPointer => {
722                     // Creating a closure/fn-pointer or unsizing consumes
723                     // the input and stores it into the resulting rvalue.
724                     debug!("walk_adjustment: trivial adjustment");
725                     let cmt_unadjusted =
726                         return_if_err!(self.mc.cat_expr_unadjusted(expr));
727                     self.delegate_consume(expr.id, expr.span, cmt_unadjusted);
728                 }
729                 adjustment::Adjust::DerefRef { autoderefs, autoref, unsize } => {
730                     debug!("walk_adjustment expr={:?} adj={:?}", expr, adjustment);
731
732                     self.walk_autoderefs(expr, autoderefs);
733
734                     let cmt_derefd =
735                         return_if_err!(self.mc.cat_expr_autoderefd(expr, autoderefs));
736
737                     let cmt_refd =
738                         self.walk_autoref(expr, cmt_derefd, autoref);
739
740                     if unsize {
741                         // Unsizing consumes the thin pointer and produces a fat one.
742                         self.delegate_consume(expr.id, expr.span, cmt_refd);
743                     }
744                 }
745             }
746         }
747     }
748
749     /// Autoderefs for overloaded Deref calls in fact reference their receiver. That is, if we have
750     /// `(*x)` where `x` is of type `Rc<T>`, then this in fact is equivalent to `x.deref()`. Since
751     /// `deref()` is declared with `&self`, this is an autoref of `x`.
752     fn walk_autoderefs(&mut self,
753                        expr: &hir::Expr,
754                        autoderefs: usize) {
755         debug!("walk_autoderefs expr={:?} autoderefs={}", expr, autoderefs);
756
757         for i in 0..autoderefs {
758             let deref_id = ty::MethodCall::autoderef(expr.id, i as u32);
759             if let Some(method_ty) = self.mc.infcx.node_method_ty(deref_id) {
760                 let cmt = return_if_err!(self.mc.cat_expr_autoderefd(expr, i));
761
762                 // the method call infrastructure should have
763                 // replaced all late-bound regions with variables:
764                 let self_ty = method_ty.fn_sig().input(0);
765                 let self_ty = self.tcx().no_late_bound_regions(&self_ty).unwrap();
766
767                 let (m, r) = match self_ty.sty {
768                     ty::TyRef(r, ref m) => (m.mutbl, r),
769                     _ => span_bug!(expr.span,
770                                    "bad overloaded deref type {:?}",
771                                    method_ty)
772                 };
773                 let bk = ty::BorrowKind::from_mutbl(m);
774                 self.delegate.borrow(expr.id, expr.span, cmt,
775                                      r, bk, AutoRef);
776             }
777         }
778     }
779
780     /// Walks the autoref `opt_autoref` applied to the autoderef'd
781     /// `expr`. `cmt_derefd` is the mem-categorized form of `expr`
782     /// after all relevant autoderefs have occurred. Because AutoRefs
783     /// can be recursive, this function is recursive: it first walks
784     /// deeply all the way down the autoref chain, and then processes
785     /// the autorefs on the way out. At each point, it returns the
786     /// `cmt` for the rvalue that will be produced by introduced an
787     /// autoref.
788     fn walk_autoref(&mut self,
789                     expr: &hir::Expr,
790                     cmt_base: mc::cmt<'tcx>,
791                     opt_autoref: Option<adjustment::AutoBorrow<'tcx>>)
792                     -> mc::cmt<'tcx>
793     {
794         debug!("walk_autoref(expr.id={} cmt_derefd={:?} opt_autoref={:?})",
795                expr.id,
796                cmt_base,
797                opt_autoref);
798
799         let cmt_base_ty = cmt_base.ty;
800
801         let autoref = match opt_autoref {
802             Some(ref autoref) => autoref,
803             None => {
804                 // No AutoRef.
805                 return cmt_base;
806             }
807         };
808
809         match *autoref {
810             adjustment::AutoBorrow::Ref(r, m) => {
811                 self.delegate.borrow(expr.id,
812                                      expr.span,
813                                      cmt_base,
814                                      r,
815                                      ty::BorrowKind::from_mutbl(m),
816                                      AutoRef);
817             }
818
819             adjustment::AutoBorrow::RawPtr(m) => {
820                 debug!("walk_autoref: expr.id={} cmt_base={:?}",
821                        expr.id,
822                        cmt_base);
823
824                 // Converting from a &T to *T (or &mut T to *mut T) is
825                 // treated as borrowing it for the enclosing temporary
826                 // scope.
827                 let r = self.tcx().node_scope_region(expr.id);
828
829                 self.delegate.borrow(expr.id,
830                                      expr.span,
831                                      cmt_base,
832                                      r,
833                                      ty::BorrowKind::from_mutbl(m),
834                                      AutoUnsafe);
835             }
836         }
837
838         // Construct the categorization for the result of the autoref.
839         // This is always an rvalue, since we are producing a new
840         // (temporary) indirection.
841
842         let adj_ty = cmt_base_ty.adjust_for_autoref(self.tcx(), opt_autoref);
843
844         self.mc.cat_rvalue_node(expr.id, expr.span, adj_ty)
845     }
846
847
848     // When this returns true, it means that the expression *is* a
849     // method-call (i.e. via the operator-overload).  This true result
850     // also implies that walk_overloaded_operator already took care of
851     // recursively processing the input arguments, and thus the caller
852     // should not do so.
853     fn walk_overloaded_operator(&mut self,
854                                 expr: &hir::Expr,
855                                 receiver: &hir::Expr,
856                                 rhs: Vec<&hir::Expr>,
857                                 pass_args: PassArgs)
858                                 -> bool
859     {
860         if !self.mc.infcx.is_method_call(expr.id) {
861             return false;
862         }
863
864         match pass_args {
865             PassArgs::ByValue => {
866                 self.consume_expr(receiver);
867                 for &arg in &rhs {
868                     self.consume_expr(arg);
869                 }
870
871                 return true;
872             },
873             PassArgs::ByRef => {},
874         }
875
876         self.walk_expr(receiver);
877
878         // Arguments (but not receivers) to overloaded operator
879         // methods are implicitly autoref'd which sadly does not use
880         // adjustments, so we must hardcode the borrow here.
881
882         let r = self.tcx().node_scope_region(expr.id);
883         let bk = ty::ImmBorrow;
884
885         for &arg in &rhs {
886             self.borrow_expr(arg, r, bk, OverloadedOperator);
887         }
888         return true;
889     }
890
891     fn arm_move_mode(&mut self, discr_cmt: mc::cmt<'tcx>, arm: &hir::Arm) -> TrackMatchMode {
892         let mut mode = Unknown;
893         for pat in &arm.pats {
894             self.determine_pat_move_mode(discr_cmt.clone(), &pat, &mut mode);
895         }
896         mode
897     }
898
899     fn walk_arm(&mut self, discr_cmt: mc::cmt<'tcx>, arm: &hir::Arm, mode: MatchMode) {
900         for pat in &arm.pats {
901             self.walk_pat(discr_cmt.clone(), &pat, mode);
902         }
903
904         if let Some(ref guard) = arm.guard {
905             self.consume_expr(&guard);
906         }
907
908         self.consume_expr(&arm.body);
909     }
910
911     /// Walks a pat that occurs in isolation (i.e. top-level of fn
912     /// arg or let binding.  *Not* a match arm or nested pat.)
913     fn walk_irrefutable_pat(&mut self, cmt_discr: mc::cmt<'tcx>, pat: &hir::Pat) {
914         let mut mode = Unknown;
915         self.determine_pat_move_mode(cmt_discr.clone(), pat, &mut mode);
916         let mode = mode.match_mode();
917         self.walk_pat(cmt_discr, pat, mode);
918     }
919
920     /// Identifies any bindings within `pat` and accumulates within
921     /// `mode` whether the overall pattern/match structure is a move,
922     /// copy, or borrow.
923     fn determine_pat_move_mode(&mut self,
924                                cmt_discr: mc::cmt<'tcx>,
925                                pat: &hir::Pat,
926                                mode: &mut TrackMatchMode) {
927         debug!("determine_pat_move_mode cmt_discr={:?} pat={:?}", cmt_discr,
928                pat);
929         return_if_err!(self.mc.cat_pattern(cmt_discr, pat, |_mc, cmt_pat, pat| {
930             match pat.node {
931                 PatKind::Binding(hir::BindByRef(..), ..) =>
932                     mode.lub(BorrowingMatch),
933                 PatKind::Binding(hir::BindByValue(..), ..) => {
934                     match copy_or_move(self.mc.infcx, &cmt_pat, PatBindingMove) {
935                         Copy => mode.lub(CopyingMatch),
936                         Move(..) => mode.lub(MovingMatch),
937                     }
938                 }
939                 _ => {}
940             }
941         }));
942     }
943
944     /// The core driver for walking a pattern; `match_mode` must be
945     /// established up front, e.g. via `determine_pat_move_mode` (see
946     /// also `walk_irrefutable_pat` for patterns that stand alone).
947     fn walk_pat(&mut self, cmt_discr: mc::cmt<'tcx>, pat: &hir::Pat, match_mode: MatchMode) {
948         debug!("walk_pat cmt_discr={:?} pat={:?}", cmt_discr, pat);
949
950         let tcx = &self.tcx();
951         let mc = &self.mc;
952         let infcx = self.mc.infcx;
953         let delegate = &mut self.delegate;
954         return_if_err!(mc.cat_pattern(cmt_discr.clone(), pat, |mc, cmt_pat, pat| {
955             if let PatKind::Binding(bmode, def_id, ..) = pat.node {
956                 debug!("binding cmt_pat={:?} pat={:?} match_mode={:?}", cmt_pat, pat, match_mode);
957
958                 // pat_ty: the type of the binding being produced.
959                 let pat_ty = return_if_err!(infcx.node_ty(pat.id));
960
961                 // Each match binding is effectively an assignment to the
962                 // binding being produced.
963                 let def = Def::Local(def_id);
964                 if let Ok(binding_cmt) = mc.cat_def(pat.id, pat.span, pat_ty, def) {
965                     delegate.mutate(pat.id, pat.span, binding_cmt, MutateMode::Init);
966                 }
967
968                 // It is also a borrow or copy/move of the value being matched.
969                 match bmode {
970                     hir::BindByRef(m) => {
971                         if let ty::TyRef(r, _) = pat_ty.sty {
972                             let bk = ty::BorrowKind::from_mutbl(m);
973                             delegate.borrow(pat.id, pat.span, cmt_pat, r, bk, RefBinding);
974                         }
975                     }
976                     hir::BindByValue(..) => {
977                         let mode = copy_or_move(infcx, &cmt_pat, PatBindingMove);
978                         debug!("walk_pat binding consuming pat");
979                         delegate.consume_pat(pat, cmt_pat, mode);
980                     }
981                 }
982             }
983         }));
984
985         // Do a second pass over the pattern, calling `matched_pat` on
986         // the interior nodes (enum variants and structs), as opposed
987         // to the above loop's visit of than the bindings that form
988         // the leaves of the pattern tree structure.
989         return_if_err!(mc.cat_pattern(cmt_discr, pat, |mc, cmt_pat, pat| {
990             let qpath = match pat.node {
991                 PatKind::Path(ref qpath) |
992                 PatKind::TupleStruct(ref qpath, ..) |
993                 PatKind::Struct(ref qpath, ..) => qpath,
994                 _ => return
995             };
996             let def = infcx.tables.borrow().qpath_def(qpath, pat.id);
997             match def {
998                 Def::Variant(variant_did) |
999                 Def::VariantCtor(variant_did, ..) => {
1000                     let enum_did = tcx.parent_def_id(variant_did).unwrap();
1001                     let downcast_cmt = if tcx.adt_def(enum_did).is_univariant() {
1002                         cmt_pat
1003                     } else {
1004                         let cmt_pat_ty = cmt_pat.ty;
1005                         mc.cat_downcast(pat, cmt_pat, cmt_pat_ty, variant_did)
1006                     };
1007
1008                     debug!("variant downcast_cmt={:?} pat={:?}", downcast_cmt, pat);
1009                     delegate.matched_pat(pat, downcast_cmt, match_mode);
1010                 }
1011                 Def::Struct(..) | Def::StructCtor(..) | Def::Union(..) |
1012                 Def::TyAlias(..) | Def::AssociatedTy(..) | Def::SelfTy(..) => {
1013                     debug!("struct cmt_pat={:?} pat={:?}", cmt_pat, pat);
1014                     delegate.matched_pat(pat, cmt_pat, match_mode);
1015                 }
1016                 _ => {}
1017             }
1018         }));
1019     }
1020
1021     fn walk_captures(&mut self, closure_expr: &hir::Expr, fn_decl_span: Span) {
1022         debug!("walk_captures({:?})", closure_expr);
1023
1024         self.tcx().with_freevars(closure_expr.id, |freevars| {
1025             for freevar in freevars {
1026                 let def_id = freevar.def.def_id();
1027                 let id_var = self.tcx().hir.as_local_node_id(def_id).unwrap();
1028                 let upvar_id = ty::UpvarId { var_id: id_var,
1029                                              closure_expr_id: closure_expr.id };
1030                 let upvar_capture = self.mc.infcx.upvar_capture(upvar_id).unwrap();
1031                 let cmt_var = return_if_err!(self.cat_captured_var(closure_expr.id,
1032                                                                    fn_decl_span,
1033                                                                    freevar.def));
1034                 match upvar_capture {
1035                     ty::UpvarCapture::ByValue => {
1036                         let mode = copy_or_move(self.mc.infcx, &cmt_var, CaptureMove);
1037                         self.delegate.consume(closure_expr.id, freevar.span, cmt_var, mode);
1038                     }
1039                     ty::UpvarCapture::ByRef(upvar_borrow) => {
1040                         self.delegate.borrow(closure_expr.id,
1041                                              fn_decl_span,
1042                                              cmt_var,
1043                                              upvar_borrow.region,
1044                                              upvar_borrow.kind,
1045                                              ClosureCapture(freevar.span));
1046                     }
1047                 }
1048             }
1049         });
1050     }
1051
1052     fn cat_captured_var(&mut self,
1053                         closure_id: ast::NodeId,
1054                         closure_span: Span,
1055                         upvar_def: Def)
1056                         -> mc::McResult<mc::cmt<'tcx>> {
1057         // Create the cmt for the variable being borrowed, from the
1058         // caller's perspective
1059         let var_id = self.tcx().hir.as_local_node_id(upvar_def.def_id()).unwrap();
1060         let var_ty = self.mc.infcx.node_ty(var_id)?;
1061         self.mc.cat_def(closure_id, closure_span, var_ty, upvar_def)
1062     }
1063 }
1064
1065 fn copy_or_move<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>,
1066                                 cmt: &mc::cmt<'tcx>,
1067                                 move_reason: MoveReason)
1068                                 -> ConsumeMode
1069 {
1070     if infcx.type_moves_by_default(cmt.ty, cmt.span) {
1071         Move(move_reason)
1072     } else {
1073         Copy
1074     }
1075 }