]> git.lizzy.rs Git - rust.git/blob - src/librustc/middle/expr_use_visitor.rs
Rollup merge of #42343 - cuviper:install-executables, r=alexcrichton
[rust.git] / src / librustc / middle / expr_use_visitor.rs
1 // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 //! A different sort of visitor for walking fn bodies.  Unlike the
12 //! normal visitor, which just walks the entire body in one shot, the
13 //! `ExprUseVisitor` determines how expressions are being used.
14
15 pub use self::LoanCause::*;
16 pub use self::ConsumeMode::*;
17 pub use self::MoveReason::*;
18 pub use self::MatchMode::*;
19 use self::TrackMatchMode::*;
20 use self::OverloadedCallType::*;
21
22 use hir::def::Def;
23 use hir::def_id::{DefId};
24 use infer::InferCtxt;
25 use middle::mem_categorization as mc;
26 use middle::region::RegionMaps;
27 use ty::{self, TyCtxt, adjustment};
28
29 use hir::{self, PatKind};
30
31 use syntax::ast;
32 use syntax::ptr::P;
33 use syntax_pos::Span;
34
35 ///////////////////////////////////////////////////////////////////////////
36 // The Delegate trait
37
38 /// This trait defines the callbacks you can expect to receive when
39 /// employing the ExprUseVisitor.
40 pub trait Delegate<'tcx> {
41     // The value found at `cmt` is either copied or moved, depending
42     // on mode.
43     fn consume(&mut self,
44                consume_id: ast::NodeId,
45                consume_span: Span,
46                cmt: mc::cmt<'tcx>,
47                mode: ConsumeMode);
48
49     // The value found at `cmt` has been determined to match the
50     // pattern binding `matched_pat`, and its subparts are being
51     // copied or moved depending on `mode`.  Note that `matched_pat`
52     // is called on all variant/structs in the pattern (i.e., the
53     // interior nodes of the pattern's tree structure) while
54     // consume_pat is called on the binding identifiers in the pattern
55     // (which are leaves of the pattern's tree structure).
56     //
57     // Note that variants/structs and identifiers are disjoint; thus
58     // `matched_pat` and `consume_pat` are never both called on the
59     // same input pattern structure (though of `consume_pat` can be
60     // called on a subpart of an input passed to `matched_pat).
61     fn matched_pat(&mut self,
62                    matched_pat: &hir::Pat,
63                    cmt: mc::cmt<'tcx>,
64                    mode: MatchMode);
65
66     // The value found at `cmt` is either copied or moved via the
67     // pattern binding `consume_pat`, depending on mode.
68     fn consume_pat(&mut self,
69                    consume_pat: &hir::Pat,
70                    cmt: mc::cmt<'tcx>,
71                    mode: ConsumeMode);
72
73     // The value found at `borrow` is being borrowed at the point
74     // `borrow_id` for the region `loan_region` with kind `bk`.
75     fn borrow(&mut self,
76               borrow_id: ast::NodeId,
77               borrow_span: Span,
78               cmt: mc::cmt<'tcx>,
79               loan_region: ty::Region<'tcx>,
80               bk: ty::BorrowKind,
81               loan_cause: LoanCause);
82
83     // The local variable `id` is declared but not initialized.
84     fn decl_without_init(&mut self,
85                          id: ast::NodeId,
86                          span: Span);
87
88     // The path at `cmt` is being assigned to.
89     fn mutate(&mut self,
90               assignment_id: ast::NodeId,
91               assignment_span: Span,
92               assignee_cmt: mc::cmt<'tcx>,
93               mode: MutateMode);
94 }
95
96 #[derive(Copy, Clone, PartialEq, Debug)]
97 pub enum LoanCause {
98     ClosureCapture(Span),
99     AddrOf,
100     AutoRef,
101     AutoUnsafe,
102     RefBinding,
103     OverloadedOperator,
104     ClosureInvocation,
105     ForLoop,
106     MatchDiscriminant
107 }
108
109 #[derive(Copy, Clone, PartialEq, Debug)]
110 pub enum ConsumeMode {
111     Copy,                // reference to x where x has a type that copies
112     Move(MoveReason),    // reference to x where x has a type that moves
113 }
114
115 #[derive(Copy, Clone, PartialEq, Debug)]
116 pub enum MoveReason {
117     DirectRefMove,
118     PatBindingMove,
119     CaptureMove,
120 }
121
122 #[derive(Copy, Clone, PartialEq, Debug)]
123 pub enum MatchMode {
124     NonBindingMatch,
125     BorrowingMatch,
126     CopyingMatch,
127     MovingMatch,
128 }
129
130 #[derive(Copy, Clone, PartialEq, Debug)]
131 enum TrackMatchMode {
132     Unknown,
133     Definite(MatchMode),
134     Conflicting,
135 }
136
137 impl TrackMatchMode {
138     // Builds up the whole match mode for a pattern from its constituent
139     // parts.  The lattice looks like this:
140     //
141     //          Conflicting
142     //            /     \
143     //           /       \
144     //      Borrowing   Moving
145     //           \       /
146     //            \     /
147     //            Copying
148     //               |
149     //          NonBinding
150     //               |
151     //            Unknown
152     //
153     // examples:
154     //
155     // * `(_, some_int)` pattern is Copying, since
156     //   NonBinding + Copying => Copying
157     //
158     // * `(some_int, some_box)` pattern is Moving, since
159     //   Copying + Moving => Moving
160     //
161     // * `(ref x, some_box)` pattern is Conflicting, since
162     //   Borrowing + Moving => Conflicting
163     //
164     // Note that the `Unknown` and `Conflicting` states are
165     // represented separately from the other more interesting
166     // `Definite` states, which simplifies logic here somewhat.
167     fn lub(&mut self, mode: MatchMode) {
168         *self = match (*self, mode) {
169             // Note that clause order below is very significant.
170             (Unknown, new) => Definite(new),
171             (Definite(old), new) if old == new => Definite(old),
172
173             (Definite(old), NonBindingMatch) => Definite(old),
174             (Definite(NonBindingMatch), new) => Definite(new),
175
176             (Definite(old), CopyingMatch) => Definite(old),
177             (Definite(CopyingMatch), new) => Definite(new),
178
179             (Definite(_), _) => Conflicting,
180             (Conflicting, _) => *self,
181         };
182     }
183
184     fn match_mode(&self) -> MatchMode {
185         match *self {
186             Unknown => NonBindingMatch,
187             Definite(mode) => mode,
188             Conflicting => {
189                 // Conservatively return MovingMatch to let the
190                 // compiler continue to make progress.
191                 MovingMatch
192             }
193         }
194     }
195 }
196
197 #[derive(Copy, Clone, PartialEq, Debug)]
198 pub enum MutateMode {
199     Init,
200     JustWrite,    // x = y
201     WriteAndRead, // x += y
202 }
203
204 #[derive(Copy, Clone)]
205 enum OverloadedCallType {
206     FnOverloadedCall,
207     FnMutOverloadedCall,
208     FnOnceOverloadedCall,
209 }
210
211 impl OverloadedCallType {
212     fn from_trait_id(tcx: TyCtxt, trait_id: DefId) -> OverloadedCallType {
213         for &(maybe_function_trait, overloaded_call_type) in &[
214             (tcx.lang_items.fn_once_trait(), FnOnceOverloadedCall),
215             (tcx.lang_items.fn_mut_trait(), FnMutOverloadedCall),
216             (tcx.lang_items.fn_trait(), FnOverloadedCall)
217         ] {
218             match maybe_function_trait {
219                 Some(function_trait) if function_trait == trait_id => {
220                     return overloaded_call_type
221                 }
222                 _ => continue,
223             }
224         }
225
226         bug!("overloaded call didn't map to known function trait")
227     }
228
229     fn from_method_id(tcx: TyCtxt, method_id: DefId) -> OverloadedCallType {
230         let method = tcx.associated_item(method_id);
231         OverloadedCallType::from_trait_id(tcx, method.container.id())
232     }
233 }
234
235 ///////////////////////////////////////////////////////////////////////////
236 // The ExprUseVisitor type
237 //
238 // This is the code that actually walks the tree. Like
239 // mem_categorization, it requires a TYPER, which is a type that
240 // supplies types from the tree. After type checking is complete, you
241 // can just use the tcx as the typer.
242 pub struct ExprUseVisitor<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
243     mc: mc::MemCategorizationContext<'a, 'gcx, 'tcx>,
244     delegate: &'a mut Delegate<'tcx>,
245 }
246
247 // If the TYPER results in an error, it's because the type check
248 // failed (or will fail, when the error is uncovered and reported
249 // during writeback). In this case, we just ignore this part of the
250 // code.
251 //
252 // Note that this macro appears similar to try!(), but, unlike try!(),
253 // it does not propagate the error.
254 macro_rules! return_if_err {
255     ($inp: expr) => (
256         match $inp {
257             Ok(v) => v,
258             Err(()) => {
259                 debug!("mc reported err");
260                 return
261             }
262         }
263     )
264 }
265
266 impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> {
267     pub fn new(delegate: &'a mut (Delegate<'tcx>+'a),
268                region_maps: &'a RegionMaps,
269                infcx: &'a InferCtxt<'a, 'gcx, 'tcx>)
270                -> Self
271     {
272         ExprUseVisitor::with_options(delegate,
273                                      infcx,
274                                      region_maps,
275                                      mc::MemCategorizationOptions::default())
276     }
277
278     pub fn with_options(delegate: &'a mut (Delegate<'tcx>+'a),
279                         infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
280                         region_maps: &'a RegionMaps,
281                         options: mc::MemCategorizationOptions)
282                -> Self
283     {
284         ExprUseVisitor {
285             mc: mc::MemCategorizationContext::with_options(infcx, region_maps, options),
286             delegate: delegate
287         }
288     }
289
290     pub fn consume_body(&mut self, body: &hir::Body) {
291         debug!("consume_body(body={:?})", body);
292
293         for arg in &body.arguments {
294             let arg_ty = return_if_err!(self.mc.infcx.node_ty(arg.pat.id));
295
296             let fn_body_scope_r = self.tcx().node_scope_region(body.value.id);
297             let arg_cmt = self.mc.cat_rvalue(
298                 arg.id,
299                 arg.pat.span,
300                 fn_body_scope_r, // Args live only as long as the fn body.
301                 fn_body_scope_r,
302                 arg_ty);
303
304             self.walk_irrefutable_pat(arg_cmt, &arg.pat);
305         }
306
307         self.consume_expr(&body.value);
308     }
309
310     fn tcx(&self) -> TyCtxt<'a, 'gcx, 'tcx> {
311         self.mc.infcx.tcx
312     }
313
314     fn delegate_consume(&mut self,
315                         consume_id: ast::NodeId,
316                         consume_span: Span,
317                         cmt: mc::cmt<'tcx>) {
318         debug!("delegate_consume(consume_id={}, cmt={:?})",
319                consume_id, cmt);
320
321         let mode = copy_or_move(self.mc.infcx, &cmt, DirectRefMove);
322         self.delegate.consume(consume_id, consume_span, cmt, mode);
323     }
324
325     fn consume_exprs(&mut self, exprs: &[hir::Expr]) {
326         for expr in exprs {
327             self.consume_expr(&expr);
328         }
329     }
330
331     pub fn consume_expr(&mut self, expr: &hir::Expr) {
332         debug!("consume_expr(expr={:?})", expr);
333
334         let cmt = return_if_err!(self.mc.cat_expr(expr));
335         self.delegate_consume(expr.id, expr.span, cmt);
336         self.walk_expr(expr);
337     }
338
339     fn mutate_expr(&mut self,
340                    assignment_expr: &hir::Expr,
341                    expr: &hir::Expr,
342                    mode: MutateMode) {
343         let cmt = return_if_err!(self.mc.cat_expr(expr));
344         self.delegate.mutate(assignment_expr.id, assignment_expr.span, cmt, mode);
345         self.walk_expr(expr);
346     }
347
348     fn borrow_expr(&mut self,
349                    expr: &hir::Expr,
350                    r: ty::Region<'tcx>,
351                    bk: ty::BorrowKind,
352                    cause: LoanCause) {
353         debug!("borrow_expr(expr={:?}, r={:?}, bk={:?})",
354                expr, r, bk);
355
356         let cmt = return_if_err!(self.mc.cat_expr(expr));
357         self.delegate.borrow(expr.id, expr.span, cmt, r, bk, cause);
358
359         self.walk_expr(expr)
360     }
361
362     fn select_from_expr(&mut self, expr: &hir::Expr) {
363         self.walk_expr(expr)
364     }
365
366     pub fn walk_expr(&mut self, expr: &hir::Expr) {
367         debug!("walk_expr(expr={:?})", expr);
368
369         self.walk_adjustment(expr);
370
371         match expr.node {
372             hir::ExprPath(_) => { }
373
374             hir::ExprType(ref subexpr, _) => {
375                 self.walk_expr(&subexpr)
376             }
377
378             hir::ExprUnary(hir::UnDeref, ref base) => {      // *base
379                 self.select_from_expr(&base);
380             }
381
382             hir::ExprField(ref base, _) => {         // base.f
383                 self.select_from_expr(&base);
384             }
385
386             hir::ExprTupField(ref base, _) => {         // base.<n>
387                 self.select_from_expr(&base);
388             }
389
390             hir::ExprIndex(ref lhs, ref rhs) => {       // lhs[rhs]
391                 self.select_from_expr(&lhs);
392                 self.consume_expr(&rhs);
393             }
394
395             hir::ExprCall(ref callee, ref args) => {    // callee(args)
396                 self.walk_callee(expr, &callee);
397                 self.consume_exprs(args);
398             }
399
400             hir::ExprMethodCall(.., ref args) => { // callee.m(args)
401                 self.consume_exprs(args);
402             }
403
404             hir::ExprStruct(_, ref fields, ref opt_with) => {
405                 self.walk_struct_expr(fields, opt_with);
406             }
407
408             hir::ExprTup(ref exprs) => {
409                 self.consume_exprs(exprs);
410             }
411
412             hir::ExprIf(ref cond_expr, ref then_expr, ref opt_else_expr) => {
413                 self.consume_expr(&cond_expr);
414                 self.walk_expr(&then_expr);
415                 if let Some(ref else_expr) = *opt_else_expr {
416                     self.consume_expr(&else_expr);
417                 }
418             }
419
420             hir::ExprMatch(ref discr, ref arms, _) => {
421                 let discr_cmt = return_if_err!(self.mc.cat_expr(&discr));
422                 let r = self.tcx().types.re_empty;
423                 self.borrow_expr(&discr, r, ty::ImmBorrow, MatchDiscriminant);
424
425                 // treatment of the discriminant is handled while walking the arms.
426                 for arm in arms {
427                     let mode = self.arm_move_mode(discr_cmt.clone(), arm);
428                     let mode = mode.match_mode();
429                     self.walk_arm(discr_cmt.clone(), arm, mode);
430                 }
431             }
432
433             hir::ExprArray(ref exprs) => {
434                 self.consume_exprs(exprs);
435             }
436
437             hir::ExprAddrOf(m, ref base) => {   // &base
438                 // make sure that the thing we are pointing out stays valid
439                 // for the lifetime `scope_r` of the resulting ptr:
440                 let expr_ty = return_if_err!(self.mc.infcx.node_ty(expr.id));
441                 if let ty::TyRef(r, _) = expr_ty.sty {
442                     let bk = ty::BorrowKind::from_mutbl(m);
443                     self.borrow_expr(&base, r, bk, AddrOf);
444                 }
445             }
446
447             hir::ExprInlineAsm(ref ia, ref outputs, ref inputs) => {
448                 for (o, output) in ia.outputs.iter().zip(outputs) {
449                     if o.is_indirect {
450                         self.consume_expr(output);
451                     } else {
452                         self.mutate_expr(expr, output,
453                                          if o.is_rw {
454                                              MutateMode::WriteAndRead
455                                          } else {
456                                              MutateMode::JustWrite
457                                          });
458                     }
459                 }
460                 self.consume_exprs(inputs);
461             }
462
463             hir::ExprAgain(..) |
464             hir::ExprLit(..) => {}
465
466             hir::ExprLoop(ref blk, _, _) => {
467                 self.walk_block(&blk);
468             }
469
470             hir::ExprWhile(ref cond_expr, ref blk, _) => {
471                 self.consume_expr(&cond_expr);
472                 self.walk_block(&blk);
473             }
474
475             hir::ExprUnary(_, ref lhs) => {
476                 self.consume_expr(&lhs);
477             }
478
479             hir::ExprBinary(_, ref lhs, ref rhs) => {
480                 self.consume_expr(&lhs);
481                 self.consume_expr(&rhs);
482             }
483
484             hir::ExprBlock(ref blk) => {
485                 self.walk_block(&blk);
486             }
487
488             hir::ExprBreak(_, ref opt_expr) | hir::ExprRet(ref opt_expr) => {
489                 if let Some(ref expr) = *opt_expr {
490                     self.consume_expr(&expr);
491                 }
492             }
493
494             hir::ExprAssign(ref lhs, ref rhs) => {
495                 self.mutate_expr(expr, &lhs, MutateMode::JustWrite);
496                 self.consume_expr(&rhs);
497             }
498
499             hir::ExprCast(ref base, _) => {
500                 self.consume_expr(&base);
501             }
502
503             hir::ExprAssignOp(_, ref lhs, ref rhs) => {
504                 if self.mc.infcx.tables.borrow().is_method_call(expr) {
505                     self.consume_expr(lhs);
506                 } else {
507                     self.mutate_expr(expr, &lhs, MutateMode::WriteAndRead);
508                 }
509                 self.consume_expr(&rhs);
510             }
511
512             hir::ExprRepeat(ref base, _) => {
513                 self.consume_expr(&base);
514             }
515
516             hir::ExprClosure(.., fn_decl_span) => {
517                 self.walk_captures(expr, fn_decl_span)
518             }
519
520             hir::ExprBox(ref base) => {
521                 self.consume_expr(&base);
522             }
523         }
524     }
525
526     fn walk_callee(&mut self, call: &hir::Expr, callee: &hir::Expr) {
527         let callee_ty = return_if_err!(self.mc.infcx.expr_ty_adjusted(callee));
528         debug!("walk_callee: callee={:?} callee_ty={:?}",
529                callee, callee_ty);
530         match callee_ty.sty {
531             ty::TyFnDef(..) | ty::TyFnPtr(_) => {
532                 self.consume_expr(callee);
533             }
534             ty::TyError => { }
535             _ => {
536                 let def_id = self.mc.infcx.tables.borrow().type_dependent_defs[&call.id].def_id();
537                 match OverloadedCallType::from_method_id(self.tcx(), def_id) {
538                     FnMutOverloadedCall => {
539                         let call_scope_r = self.tcx().node_scope_region(call.id);
540                         self.borrow_expr(callee,
541                                          call_scope_r,
542                                          ty::MutBorrow,
543                                          ClosureInvocation);
544                     }
545                     FnOverloadedCall => {
546                         let call_scope_r = self.tcx().node_scope_region(call.id);
547                         self.borrow_expr(callee,
548                                          call_scope_r,
549                                          ty::ImmBorrow,
550                                          ClosureInvocation);
551                     }
552                     FnOnceOverloadedCall => self.consume_expr(callee),
553                 }
554             }
555         }
556     }
557
558     fn walk_stmt(&mut self, stmt: &hir::Stmt) {
559         match stmt.node {
560             hir::StmtDecl(ref decl, _) => {
561                 match decl.node {
562                     hir::DeclLocal(ref local) => {
563                         self.walk_local(&local);
564                     }
565
566                     hir::DeclItem(_) => {
567                         // we don't visit nested items in this visitor,
568                         // only the fn body we were given.
569                     }
570                 }
571             }
572
573             hir::StmtExpr(ref expr, _) |
574             hir::StmtSemi(ref expr, _) => {
575                 self.consume_expr(&expr);
576             }
577         }
578     }
579
580     fn walk_local(&mut self, local: &hir::Local) {
581         match local.init {
582             None => {
583                 let delegate = &mut self.delegate;
584                 local.pat.each_binding(|_, id, span, _| {
585                     delegate.decl_without_init(id, span);
586                 })
587             }
588
589             Some(ref expr) => {
590                 // Variable declarations with
591                 // initializers are considered
592                 // "assigns", which is handled by
593                 // `walk_pat`:
594                 self.walk_expr(&expr);
595                 let init_cmt = return_if_err!(self.mc.cat_expr(&expr));
596                 self.walk_irrefutable_pat(init_cmt, &local.pat);
597             }
598         }
599     }
600
601     /// Indicates that the value of `blk` will be consumed, meaning either copied or moved
602     /// depending on its type.
603     fn walk_block(&mut self, blk: &hir::Block) {
604         debug!("walk_block(blk.id={})", blk.id);
605
606         for stmt in &blk.stmts {
607             self.walk_stmt(stmt);
608         }
609
610         if let Some(ref tail_expr) = blk.expr {
611             self.consume_expr(&tail_expr);
612         }
613     }
614
615     fn walk_struct_expr(&mut self,
616                         fields: &[hir::Field],
617                         opt_with: &Option<P<hir::Expr>>) {
618         // Consume the expressions supplying values for each field.
619         for field in fields {
620             self.consume_expr(&field.expr);
621         }
622
623         let with_expr = match *opt_with {
624             Some(ref w) => &**w,
625             None => { return; }
626         };
627
628         let with_cmt = return_if_err!(self.mc.cat_expr(&with_expr));
629
630         // Select just those fields of the `with`
631         // expression that will actually be used
632         match with_cmt.ty.sty {
633             ty::TyAdt(adt, substs) if adt.is_struct() => {
634                 // Consume those fields of the with expression that are needed.
635                 for with_field in &adt.struct_variant().fields {
636                     if !contains_field_named(with_field, fields) {
637                         let cmt_field = self.mc.cat_field(
638                             &*with_expr,
639                             with_cmt.clone(),
640                             with_field.name,
641                             with_field.ty(self.tcx(), substs)
642                         );
643                         self.delegate_consume(with_expr.id, with_expr.span, cmt_field);
644                     }
645                 }
646             }
647             _ => {
648                 // the base expression should always evaluate to a
649                 // struct; however, when EUV is run during typeck, it
650                 // may not. This will generate an error earlier in typeck,
651                 // so we can just ignore it.
652                 if !self.tcx().sess.has_errors() {
653                     span_bug!(
654                         with_expr.span,
655                         "with expression doesn't evaluate to a struct");
656                 }
657             }
658         }
659
660         // walk the with expression so that complex expressions
661         // are properly handled.
662         self.walk_expr(with_expr);
663
664         fn contains_field_named(field: &ty::FieldDef,
665                                 fields: &[hir::Field])
666                                 -> bool
667         {
668             fields.iter().any(
669                 |f| f.name.node == field.name)
670         }
671     }
672
673     // Invoke the appropriate delegate calls for anything that gets
674     // consumed or borrowed as part of the automatic adjustment
675     // process.
676     fn walk_adjustment(&mut self, expr: &hir::Expr) {
677         //NOTE(@jroesch): mixed RefCell borrow causes crash
678         let adjustments = self.mc.infcx.tables.borrow().expr_adjustments(expr).to_vec();
679         let mut cmt = return_if_err!(self.mc.cat_expr_unadjusted(expr));
680         for adjustment in adjustments {
681             debug!("walk_adjustment expr={:?} adj={:?}", expr, adjustment);
682             match adjustment.kind {
683                 adjustment::Adjust::NeverToAny |
684                 adjustment::Adjust::ReifyFnPointer |
685                 adjustment::Adjust::UnsafeFnPointer |
686                 adjustment::Adjust::ClosureFnPointer |
687                 adjustment::Adjust::MutToConstPointer |
688                 adjustment::Adjust::Unsize => {
689                     // Creating a closure/fn-pointer or unsizing consumes
690                     // the input and stores it into the resulting rvalue.
691                     self.delegate_consume(expr.id, expr.span, cmt.clone());
692                 }
693
694                 adjustment::Adjust::Deref(None) => {}
695
696                 // Autoderefs for overloaded Deref calls in fact reference
697                 // their receiver. That is, if we have `(*x)` where `x`
698                 // is of type `Rc<T>`, then this in fact is equivalent to
699                 // `x.deref()`. Since `deref()` is declared with `&self`,
700                 // this is an autoref of `x`.
701                 adjustment::Adjust::Deref(Some(ref deref)) => {
702                     let bk = ty::BorrowKind::from_mutbl(deref.mutbl);
703                     self.delegate.borrow(expr.id, expr.span, cmt.clone(),
704                                          deref.region, bk, AutoRef);
705                 }
706
707                 adjustment::Adjust::Borrow(ref autoref) => {
708                     self.walk_autoref(expr, cmt.clone(), autoref);
709                 }
710             }
711             cmt = return_if_err!(self.mc.cat_expr_adjusted(expr, cmt, &adjustment));
712         }
713     }
714
715     /// Walks the autoref `autoref` applied to the autoderef'd
716     /// `expr`. `cmt_base` is the mem-categorized form of `expr`
717     /// after all relevant autoderefs have occurred.
718     fn walk_autoref(&mut self,
719                     expr: &hir::Expr,
720                     cmt_base: mc::cmt<'tcx>,
721                     autoref: &adjustment::AutoBorrow<'tcx>) {
722         debug!("walk_autoref(expr.id={} cmt_base={:?} autoref={:?})",
723                expr.id,
724                cmt_base,
725                autoref);
726
727         match *autoref {
728             adjustment::AutoBorrow::Ref(r, m) => {
729                 self.delegate.borrow(expr.id,
730                                      expr.span,
731                                      cmt_base,
732                                      r,
733                                      ty::BorrowKind::from_mutbl(m),
734                                      AutoRef);
735             }
736
737             adjustment::AutoBorrow::RawPtr(m) => {
738                 debug!("walk_autoref: expr.id={} cmt_base={:?}",
739                        expr.id,
740                        cmt_base);
741
742                 // Converting from a &T to *T (or &mut T to *mut T) is
743                 // treated as borrowing it for the enclosing temporary
744                 // scope.
745                 let r = self.tcx().node_scope_region(expr.id);
746
747                 self.delegate.borrow(expr.id,
748                                      expr.span,
749                                      cmt_base,
750                                      r,
751                                      ty::BorrowKind::from_mutbl(m),
752                                      AutoUnsafe);
753             }
754         }
755     }
756
757     fn arm_move_mode(&mut self, discr_cmt: mc::cmt<'tcx>, arm: &hir::Arm) -> TrackMatchMode {
758         let mut mode = Unknown;
759         for pat in &arm.pats {
760             self.determine_pat_move_mode(discr_cmt.clone(), &pat, &mut mode);
761         }
762         mode
763     }
764
765     fn walk_arm(&mut self, discr_cmt: mc::cmt<'tcx>, arm: &hir::Arm, mode: MatchMode) {
766         for pat in &arm.pats {
767             self.walk_pat(discr_cmt.clone(), &pat, mode);
768         }
769
770         if let Some(ref guard) = arm.guard {
771             self.consume_expr(&guard);
772         }
773
774         self.consume_expr(&arm.body);
775     }
776
777     /// Walks a pat that occurs in isolation (i.e. top-level of fn
778     /// arg or let binding.  *Not* a match arm or nested pat.)
779     fn walk_irrefutable_pat(&mut self, cmt_discr: mc::cmt<'tcx>, pat: &hir::Pat) {
780         let mut mode = Unknown;
781         self.determine_pat_move_mode(cmt_discr.clone(), pat, &mut mode);
782         let mode = mode.match_mode();
783         self.walk_pat(cmt_discr, pat, mode);
784     }
785
786     /// Identifies any bindings within `pat` and accumulates within
787     /// `mode` whether the overall pattern/match structure is a move,
788     /// copy, or borrow.
789     fn determine_pat_move_mode(&mut self,
790                                cmt_discr: mc::cmt<'tcx>,
791                                pat: &hir::Pat,
792                                mode: &mut TrackMatchMode) {
793         debug!("determine_pat_move_mode cmt_discr={:?} pat={:?}", cmt_discr,
794                pat);
795         return_if_err!(self.mc.cat_pattern(cmt_discr, pat, |_mc, cmt_pat, pat| {
796             match pat.node {
797                 PatKind::Binding(hir::BindByRef(..), ..) =>
798                     mode.lub(BorrowingMatch),
799                 PatKind::Binding(hir::BindByValue(..), ..) => {
800                     match copy_or_move(self.mc.infcx, &cmt_pat, PatBindingMove) {
801                         Copy => mode.lub(CopyingMatch),
802                         Move(..) => mode.lub(MovingMatch),
803                     }
804                 }
805                 _ => {}
806             }
807         }));
808     }
809
810     /// The core driver for walking a pattern; `match_mode` must be
811     /// established up front, e.g. via `determine_pat_move_mode` (see
812     /// also `walk_irrefutable_pat` for patterns that stand alone).
813     fn walk_pat(&mut self, cmt_discr: mc::cmt<'tcx>, pat: &hir::Pat, match_mode: MatchMode) {
814         debug!("walk_pat cmt_discr={:?} pat={:?}", cmt_discr, pat);
815
816         let tcx = &self.tcx();
817         let mc = &self.mc;
818         let infcx = self.mc.infcx;
819         let delegate = &mut self.delegate;
820         return_if_err!(mc.cat_pattern(cmt_discr.clone(), pat, |mc, cmt_pat, pat| {
821             if let PatKind::Binding(bmode, def_id, ..) = pat.node {
822                 debug!("binding cmt_pat={:?} pat={:?} match_mode={:?}", cmt_pat, pat, match_mode);
823
824                 // pat_ty: the type of the binding being produced.
825                 let pat_ty = return_if_err!(infcx.node_ty(pat.id));
826
827                 // Each match binding is effectively an assignment to the
828                 // binding being produced.
829                 let def = Def::Local(def_id);
830                 if let Ok(binding_cmt) = mc.cat_def(pat.id, pat.span, pat_ty, def) {
831                     delegate.mutate(pat.id, pat.span, binding_cmt, MutateMode::Init);
832                 }
833
834                 // It is also a borrow or copy/move of the value being matched.
835                 match bmode {
836                     hir::BindByRef(m) => {
837                         if let ty::TyRef(r, _) = pat_ty.sty {
838                             let bk = ty::BorrowKind::from_mutbl(m);
839                             delegate.borrow(pat.id, pat.span, cmt_pat, r, bk, RefBinding);
840                         }
841                     }
842                     hir::BindByValue(..) => {
843                         let mode = copy_or_move(infcx, &cmt_pat, PatBindingMove);
844                         debug!("walk_pat binding consuming pat");
845                         delegate.consume_pat(pat, cmt_pat, mode);
846                     }
847                 }
848             }
849         }));
850
851         // Do a second pass over the pattern, calling `matched_pat` on
852         // the interior nodes (enum variants and structs), as opposed
853         // to the above loop's visit of than the bindings that form
854         // the leaves of the pattern tree structure.
855         return_if_err!(mc.cat_pattern(cmt_discr, pat, |mc, cmt_pat, pat| {
856             let qpath = match pat.node {
857                 PatKind::Path(ref qpath) |
858                 PatKind::TupleStruct(ref qpath, ..) |
859                 PatKind::Struct(ref qpath, ..) => qpath,
860                 _ => return
861             };
862             let def = infcx.tables.borrow().qpath_def(qpath, pat.id);
863             match def {
864                 Def::Variant(variant_did) |
865                 Def::VariantCtor(variant_did, ..) => {
866                     let enum_did = tcx.parent_def_id(variant_did).unwrap();
867                     let downcast_cmt = if tcx.adt_def(enum_did).is_univariant() {
868                         cmt_pat
869                     } else {
870                         let cmt_pat_ty = cmt_pat.ty;
871                         mc.cat_downcast(pat, cmt_pat, cmt_pat_ty, variant_did)
872                     };
873
874                     debug!("variant downcast_cmt={:?} pat={:?}", downcast_cmt, pat);
875                     delegate.matched_pat(pat, downcast_cmt, match_mode);
876                 }
877                 Def::Struct(..) | Def::StructCtor(..) | Def::Union(..) |
878                 Def::TyAlias(..) | Def::AssociatedTy(..) | Def::SelfTy(..) => {
879                     debug!("struct cmt_pat={:?} pat={:?}", cmt_pat, pat);
880                     delegate.matched_pat(pat, cmt_pat, match_mode);
881                 }
882                 _ => {}
883             }
884         }));
885     }
886
887     fn walk_captures(&mut self, closure_expr: &hir::Expr, fn_decl_span: Span) {
888         debug!("walk_captures({:?})", closure_expr);
889
890         self.tcx().with_freevars(closure_expr.id, |freevars| {
891             for freevar in freevars {
892                 let def_id = freevar.def.def_id();
893                 let id_var = self.tcx().hir.as_local_node_id(def_id).unwrap();
894                 let upvar_id = ty::UpvarId { var_id: id_var,
895                                              closure_expr_id: closure_expr.id };
896                 let upvar_capture = self.mc.infcx.upvar_capture(upvar_id).unwrap();
897                 let cmt_var = return_if_err!(self.cat_captured_var(closure_expr.id,
898                                                                    fn_decl_span,
899                                                                    freevar.def));
900                 match upvar_capture {
901                     ty::UpvarCapture::ByValue => {
902                         let mode = copy_or_move(self.mc.infcx, &cmt_var, CaptureMove);
903                         self.delegate.consume(closure_expr.id, freevar.span, cmt_var, mode);
904                     }
905                     ty::UpvarCapture::ByRef(upvar_borrow) => {
906                         self.delegate.borrow(closure_expr.id,
907                                              fn_decl_span,
908                                              cmt_var,
909                                              upvar_borrow.region,
910                                              upvar_borrow.kind,
911                                              ClosureCapture(freevar.span));
912                     }
913                 }
914             }
915         });
916     }
917
918     fn cat_captured_var(&mut self,
919                         closure_id: ast::NodeId,
920                         closure_span: Span,
921                         upvar_def: Def)
922                         -> mc::McResult<mc::cmt<'tcx>> {
923         // Create the cmt for the variable being borrowed, from the
924         // caller's perspective
925         let var_id = self.tcx().hir.as_local_node_id(upvar_def.def_id()).unwrap();
926         let var_ty = self.mc.infcx.node_ty(var_id)?;
927         self.mc.cat_def(closure_id, closure_span, var_ty, upvar_def)
928     }
929 }
930
931 fn copy_or_move<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>,
932                                 cmt: &mc::cmt<'tcx>,
933                                 move_reason: MoveReason)
934                                 -> ConsumeMode
935 {
936     if infcx.type_moves_by_default(cmt.ty, cmt.span) {
937         Move(move_reason)
938     } else {
939         Copy
940     }
941 }