]> git.lizzy.rs Git - rust.git/blob - src/librustc/middle/expr_use_visitor.rs
Changed issue number to 36105
[rust.git] / src / librustc / middle / expr_use_visitor.rs
1 // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 //! A different sort of visitor for walking fn bodies.  Unlike the
12 //! normal visitor, which just walks the entire body in one shot, the
13 //! `ExprUseVisitor` determines how expressions are being used.
14
15 pub use self::LoanCause::*;
16 pub use self::ConsumeMode::*;
17 pub use self::MoveReason::*;
18 pub use self::MatchMode::*;
19 use self::TrackMatchMode::*;
20 use self::OverloadedCallType::*;
21
22 use hir::pat_util;
23 use hir::def::Def;
24 use hir::def_id::{DefId};
25 use infer::InferCtxt;
26 use middle::mem_categorization as mc;
27 use ty::{self, TyCtxt, adjustment};
28
29 use hir::{self, PatKind};
30
31 use syntax::ast;
32 use syntax::ptr::P;
33 use syntax_pos::Span;
34
35 ///////////////////////////////////////////////////////////////////////////
36 // The Delegate trait
37
38 /// This trait defines the callbacks you can expect to receive when
39 /// employing the ExprUseVisitor.
40 pub trait Delegate<'tcx> {
41     // The value found at `cmt` is either copied or moved, depending
42     // on mode.
43     fn consume(&mut self,
44                consume_id: ast::NodeId,
45                consume_span: Span,
46                cmt: mc::cmt<'tcx>,
47                mode: ConsumeMode);
48
49     // The value found at `cmt` has been determined to match the
50     // pattern binding `matched_pat`, and its subparts are being
51     // copied or moved depending on `mode`.  Note that `matched_pat`
52     // is called on all variant/structs in the pattern (i.e., the
53     // interior nodes of the pattern's tree structure) while
54     // consume_pat is called on the binding identifiers in the pattern
55     // (which are leaves of the pattern's tree structure).
56     //
57     // Note that variants/structs and identifiers are disjoint; thus
58     // `matched_pat` and `consume_pat` are never both called on the
59     // same input pattern structure (though of `consume_pat` can be
60     // called on a subpart of an input passed to `matched_pat).
61     fn matched_pat(&mut self,
62                    matched_pat: &hir::Pat,
63                    cmt: mc::cmt<'tcx>,
64                    mode: MatchMode);
65
66     // The value found at `cmt` is either copied or moved via the
67     // pattern binding `consume_pat`, depending on mode.
68     fn consume_pat(&mut self,
69                    consume_pat: &hir::Pat,
70                    cmt: mc::cmt<'tcx>,
71                    mode: ConsumeMode);
72
73     // The value found at `borrow` is being borrowed at the point
74     // `borrow_id` for the region `loan_region` with kind `bk`.
75     fn borrow(&mut self,
76               borrow_id: ast::NodeId,
77               borrow_span: Span,
78               cmt: mc::cmt<'tcx>,
79               loan_region: ty::Region,
80               bk: ty::BorrowKind,
81               loan_cause: LoanCause);
82
83     // The local variable `id` is declared but not initialized.
84     fn decl_without_init(&mut self,
85                          id: ast::NodeId,
86                          span: Span);
87
88     // The path at `cmt` is being assigned to.
89     fn mutate(&mut self,
90               assignment_id: ast::NodeId,
91               assignment_span: Span,
92               assignee_cmt: mc::cmt<'tcx>,
93               mode: MutateMode);
94 }
95
96 #[derive(Copy, Clone, PartialEq, Debug)]
97 pub enum LoanCause {
98     ClosureCapture(Span),
99     AddrOf,
100     AutoRef,
101     AutoUnsafe,
102     RefBinding,
103     OverloadedOperator,
104     ClosureInvocation,
105     ForLoop,
106     MatchDiscriminant
107 }
108
109 #[derive(Copy, Clone, PartialEq, Debug)]
110 pub enum ConsumeMode {
111     Copy,                // reference to x where x has a type that copies
112     Move(MoveReason),    // reference to x where x has a type that moves
113 }
114
115 #[derive(Copy, Clone, PartialEq, Debug)]
116 pub enum MoveReason {
117     DirectRefMove,
118     PatBindingMove,
119     CaptureMove,
120 }
121
122 #[derive(Copy, Clone, PartialEq, Debug)]
123 pub enum MatchMode {
124     NonBindingMatch,
125     BorrowingMatch,
126     CopyingMatch,
127     MovingMatch,
128 }
129
130 #[derive(Copy, Clone, PartialEq, Debug)]
131 enum TrackMatchMode {
132     Unknown,
133     Definite(MatchMode),
134     Conflicting,
135 }
136
137 impl TrackMatchMode {
138     // Builds up the whole match mode for a pattern from its constituent
139     // parts.  The lattice looks like this:
140     //
141     //          Conflicting
142     //            /     \
143     //           /       \
144     //      Borrowing   Moving
145     //           \       /
146     //            \     /
147     //            Copying
148     //               |
149     //          NonBinding
150     //               |
151     //            Unknown
152     //
153     // examples:
154     //
155     // * `(_, some_int)` pattern is Copying, since
156     //   NonBinding + Copying => Copying
157     //
158     // * `(some_int, some_box)` pattern is Moving, since
159     //   Copying + Moving => Moving
160     //
161     // * `(ref x, some_box)` pattern is Conflicting, since
162     //   Borrowing + Moving => Conflicting
163     //
164     // Note that the `Unknown` and `Conflicting` states are
165     // represented separately from the other more interesting
166     // `Definite` states, which simplifies logic here somewhat.
167     fn lub(&mut self, mode: MatchMode) {
168         *self = match (*self, mode) {
169             // Note that clause order below is very significant.
170             (Unknown, new) => Definite(new),
171             (Definite(old), new) if old == new => Definite(old),
172
173             (Definite(old), NonBindingMatch) => Definite(old),
174             (Definite(NonBindingMatch), new) => Definite(new),
175
176             (Definite(old), CopyingMatch) => Definite(old),
177             (Definite(CopyingMatch), new) => Definite(new),
178
179             (Definite(_), _) => Conflicting,
180             (Conflicting, _) => *self,
181         };
182     }
183
184     fn match_mode(&self) -> MatchMode {
185         match *self {
186             Unknown => NonBindingMatch,
187             Definite(mode) => mode,
188             Conflicting => {
189                 // Conservatively return MovingMatch to let the
190                 // compiler continue to make progress.
191                 MovingMatch
192             }
193         }
194     }
195 }
196
197 #[derive(Copy, Clone, PartialEq, Debug)]
198 pub enum MutateMode {
199     Init,
200     JustWrite,    // x = y
201     WriteAndRead, // x += y
202 }
203
204 #[derive(Copy, Clone)]
205 enum OverloadedCallType {
206     FnOverloadedCall,
207     FnMutOverloadedCall,
208     FnOnceOverloadedCall,
209 }
210
211 impl OverloadedCallType {
212     fn from_trait_id(tcx: TyCtxt, trait_id: DefId) -> OverloadedCallType {
213         for &(maybe_function_trait, overloaded_call_type) in &[
214             (tcx.lang_items.fn_once_trait(), FnOnceOverloadedCall),
215             (tcx.lang_items.fn_mut_trait(), FnMutOverloadedCall),
216             (tcx.lang_items.fn_trait(), FnOverloadedCall)
217         ] {
218             match maybe_function_trait {
219                 Some(function_trait) if function_trait == trait_id => {
220                     return overloaded_call_type
221                 }
222                 _ => continue,
223             }
224         }
225
226         bug!("overloaded call didn't map to known function trait")
227     }
228
229     fn from_method_id(tcx: TyCtxt, method_id: DefId) -> OverloadedCallType {
230         let method = tcx.impl_or_trait_item(method_id);
231         OverloadedCallType::from_trait_id(tcx, method.container().id())
232     }
233 }
234
235 ///////////////////////////////////////////////////////////////////////////
236 // The ExprUseVisitor type
237 //
238 // This is the code that actually walks the tree. Like
239 // mem_categorization, it requires a TYPER, which is a type that
240 // supplies types from the tree. After type checking is complete, you
241 // can just use the tcx as the typer.
242 pub struct ExprUseVisitor<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
243     mc: mc::MemCategorizationContext<'a, 'gcx, 'tcx>,
244     delegate: &'a mut Delegate<'tcx>,
245 }
246
247 // If the TYPER results in an error, it's because the type check
248 // failed (or will fail, when the error is uncovered and reported
249 // during writeback). In this case, we just ignore this part of the
250 // code.
251 //
252 // Note that this macro appears similar to try!(), but, unlike try!(),
253 // it does not propagate the error.
254 macro_rules! return_if_err {
255     ($inp: expr) => (
256         match $inp {
257             Ok(v) => v,
258             Err(()) => {
259                 debug!("mc reported err");
260                 return
261             }
262         }
263     )
264 }
265
266 /// Whether the elements of an overloaded operation are passed by value or by reference
267 enum PassArgs {
268     ByValue,
269     ByRef,
270 }
271
272 impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> {
273     pub fn new(delegate: &'a mut (Delegate<'tcx>+'a),
274                infcx: &'a InferCtxt<'a, 'gcx, 'tcx>)
275                -> Self
276     {
277         ExprUseVisitor::with_options(delegate, infcx, mc::MemCategorizationOptions::default())
278     }
279
280     pub fn with_options(delegate: &'a mut (Delegate<'tcx>+'a),
281                         infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
282                         options: mc::MemCategorizationOptions)
283                -> Self
284     {
285         ExprUseVisitor {
286             mc: mc::MemCategorizationContext::with_options(infcx, options),
287             delegate: delegate
288         }
289     }
290
291     pub fn walk_fn(&mut self,
292                    decl: &hir::FnDecl,
293                    body: &hir::Block) {
294         self.walk_arg_patterns(decl, body);
295         self.walk_block(body);
296     }
297
298     fn walk_arg_patterns(&mut self,
299                          decl: &hir::FnDecl,
300                          body: &hir::Block) {
301         for arg in &decl.inputs {
302             let arg_ty = return_if_err!(self.mc.infcx.node_ty(arg.pat.id));
303
304             let fn_body_scope = self.tcx().region_maps.node_extent(body.id);
305             let arg_cmt = self.mc.cat_rvalue(
306                 arg.id,
307                 arg.pat.span,
308                 ty::ReScope(fn_body_scope), // Args live only as long as the fn body.
309                 arg_ty);
310
311             self.walk_irrefutable_pat(arg_cmt, &arg.pat);
312         }
313     }
314
315     fn tcx(&self) -> TyCtxt<'a, 'gcx, 'tcx> {
316         self.mc.infcx.tcx
317     }
318
319     fn delegate_consume(&mut self,
320                         consume_id: ast::NodeId,
321                         consume_span: Span,
322                         cmt: mc::cmt<'tcx>) {
323         debug!("delegate_consume(consume_id={}, cmt={:?})",
324                consume_id, cmt);
325
326         let mode = copy_or_move(self.mc.infcx, &cmt, DirectRefMove);
327         self.delegate.consume(consume_id, consume_span, cmt, mode);
328     }
329
330     fn consume_exprs(&mut self, exprs: &[P<hir::Expr>]) {
331         for expr in exprs {
332             self.consume_expr(&expr);
333         }
334     }
335
336     pub fn consume_expr(&mut self, expr: &hir::Expr) {
337         debug!("consume_expr(expr={:?})", expr);
338
339         let cmt = return_if_err!(self.mc.cat_expr(expr));
340         self.delegate_consume(expr.id, expr.span, cmt);
341         self.walk_expr(expr);
342     }
343
344     fn mutate_expr(&mut self,
345                    assignment_expr: &hir::Expr,
346                    expr: &hir::Expr,
347                    mode: MutateMode) {
348         let cmt = return_if_err!(self.mc.cat_expr(expr));
349         self.delegate.mutate(assignment_expr.id, assignment_expr.span, cmt, mode);
350         self.walk_expr(expr);
351     }
352
353     fn borrow_expr(&mut self,
354                    expr: &hir::Expr,
355                    r: ty::Region,
356                    bk: ty::BorrowKind,
357                    cause: LoanCause) {
358         debug!("borrow_expr(expr={:?}, r={:?}, bk={:?})",
359                expr, r, bk);
360
361         let cmt = return_if_err!(self.mc.cat_expr(expr));
362         self.delegate.borrow(expr.id, expr.span, cmt, r, bk, cause);
363
364         self.walk_expr(expr)
365     }
366
367     fn select_from_expr(&mut self, expr: &hir::Expr) {
368         self.walk_expr(expr)
369     }
370
371     pub fn walk_expr(&mut self, expr: &hir::Expr) {
372         debug!("walk_expr(expr={:?})", expr);
373
374         self.walk_adjustment(expr);
375
376         match expr.node {
377             hir::ExprPath(..) => { }
378
379             hir::ExprType(ref subexpr, _) => {
380                 self.walk_expr(&subexpr)
381             }
382
383             hir::ExprUnary(hir::UnDeref, ref base) => {      // *base
384                 if !self.walk_overloaded_operator(expr, &base, Vec::new(), PassArgs::ByRef) {
385                     self.select_from_expr(&base);
386                 }
387             }
388
389             hir::ExprField(ref base, _) => {         // base.f
390                 self.select_from_expr(&base);
391             }
392
393             hir::ExprTupField(ref base, _) => {         // base.<n>
394                 self.select_from_expr(&base);
395             }
396
397             hir::ExprIndex(ref lhs, ref rhs) => {       // lhs[rhs]
398                 if !self.walk_overloaded_operator(expr,
399                                                   &lhs,
400                                                   vec![&rhs],
401                                                   PassArgs::ByValue) {
402                     self.select_from_expr(&lhs);
403                     self.consume_expr(&rhs);
404                 }
405             }
406
407             hir::ExprCall(ref callee, ref args) => {    // callee(args)
408                 self.walk_callee(expr, &callee);
409                 self.consume_exprs(args);
410             }
411
412             hir::ExprMethodCall(_, _, ref args) => { // callee.m(args)
413                 self.consume_exprs(args);
414             }
415
416             hir::ExprStruct(_, ref fields, ref opt_with) => {
417                 self.walk_struct_expr(expr, fields, opt_with);
418             }
419
420             hir::ExprTup(ref exprs) => {
421                 self.consume_exprs(exprs);
422             }
423
424             hir::ExprIf(ref cond_expr, ref then_blk, ref opt_else_expr) => {
425                 self.consume_expr(&cond_expr);
426                 self.walk_block(&then_blk);
427                 if let Some(ref else_expr) = *opt_else_expr {
428                     self.consume_expr(&else_expr);
429                 }
430             }
431
432             hir::ExprMatch(ref discr, ref arms, _) => {
433                 let discr_cmt = return_if_err!(self.mc.cat_expr(&discr));
434                 self.borrow_expr(&discr, ty::ReEmpty, ty::ImmBorrow, MatchDiscriminant);
435
436                 // treatment of the discriminant is handled while walking the arms.
437                 for arm in arms {
438                     let mode = self.arm_move_mode(discr_cmt.clone(), arm);
439                     let mode = mode.match_mode();
440                     self.walk_arm(discr_cmt.clone(), arm, mode);
441                 }
442             }
443
444             hir::ExprVec(ref exprs) => {
445                 self.consume_exprs(exprs);
446             }
447
448             hir::ExprAddrOf(m, ref base) => {   // &base
449                 // make sure that the thing we are pointing out stays valid
450                 // for the lifetime `scope_r` of the resulting ptr:
451                 let expr_ty = return_if_err!(self.mc.infcx.node_ty(expr.id));
452                 if let ty::TyRef(&r, _) = expr_ty.sty {
453                     let bk = ty::BorrowKind::from_mutbl(m);
454                     self.borrow_expr(&base, r, bk, AddrOf);
455                 }
456             }
457
458             hir::ExprInlineAsm(ref ia, ref outputs, ref inputs) => {
459                 for (o, output) in ia.outputs.iter().zip(outputs) {
460                     if o.is_indirect {
461                         self.consume_expr(output);
462                     } else {
463                         self.mutate_expr(expr, output,
464                                          if o.is_rw {
465                                              MutateMode::WriteAndRead
466                                          } else {
467                                              MutateMode::JustWrite
468                                          });
469                     }
470                 }
471                 self.consume_exprs(inputs);
472             }
473
474             hir::ExprBreak(..) |
475             hir::ExprAgain(..) |
476             hir::ExprLit(..) => {}
477
478             hir::ExprLoop(ref blk, _) => {
479                 self.walk_block(&blk);
480             }
481
482             hir::ExprWhile(ref cond_expr, ref blk, _) => {
483                 self.consume_expr(&cond_expr);
484                 self.walk_block(&blk);
485             }
486
487             hir::ExprUnary(op, ref lhs) => {
488                 let pass_args = if op.is_by_value() {
489                     PassArgs::ByValue
490                 } else {
491                     PassArgs::ByRef
492                 };
493
494                 if !self.walk_overloaded_operator(expr, &lhs, Vec::new(), pass_args) {
495                     self.consume_expr(&lhs);
496                 }
497             }
498
499             hir::ExprBinary(op, ref lhs, ref rhs) => {
500                 let pass_args = if op.node.is_by_value() {
501                     PassArgs::ByValue
502                 } else {
503                     PassArgs::ByRef
504                 };
505
506                 if !self.walk_overloaded_operator(expr, &lhs, vec![&rhs], pass_args) {
507                     self.consume_expr(&lhs);
508                     self.consume_expr(&rhs);
509                 }
510             }
511
512             hir::ExprBlock(ref blk) => {
513                 self.walk_block(&blk);
514             }
515
516             hir::ExprRet(ref opt_expr) => {
517                 if let Some(ref expr) = *opt_expr {
518                     self.consume_expr(&expr);
519                 }
520             }
521
522             hir::ExprAssign(ref lhs, ref rhs) => {
523                 self.mutate_expr(expr, &lhs, MutateMode::JustWrite);
524                 self.consume_expr(&rhs);
525             }
526
527             hir::ExprCast(ref base, _) => {
528                 self.consume_expr(&base);
529             }
530
531             hir::ExprAssignOp(op, ref lhs, ref rhs) => {
532                 // NB All our assignment operations take the RHS by value
533                 assert!(op.node.is_by_value());
534
535                 if !self.walk_overloaded_operator(expr, lhs, vec![rhs], PassArgs::ByValue) {
536                     self.mutate_expr(expr, &lhs, MutateMode::WriteAndRead);
537                     self.consume_expr(&rhs);
538                 }
539             }
540
541             hir::ExprRepeat(ref base, ref count) => {
542                 self.consume_expr(&base);
543                 self.consume_expr(&count);
544             }
545
546             hir::ExprClosure(_, _, _, fn_decl_span) => {
547                 self.walk_captures(expr, fn_decl_span)
548             }
549
550             hir::ExprBox(ref base) => {
551                 self.consume_expr(&base);
552             }
553         }
554     }
555
556     fn walk_callee(&mut self, call: &hir::Expr, callee: &hir::Expr) {
557         let callee_ty = return_if_err!(self.mc.infcx.expr_ty_adjusted(callee));
558         debug!("walk_callee: callee={:?} callee_ty={:?}",
559                callee, callee_ty);
560         let call_scope = self.tcx().region_maps.node_extent(call.id);
561         match callee_ty.sty {
562             ty::TyFnDef(..) | ty::TyFnPtr(_) => {
563                 self.consume_expr(callee);
564             }
565             ty::TyError => { }
566             _ => {
567                 let overloaded_call_type =
568                     match self.mc.infcx.node_method_id(ty::MethodCall::expr(call.id)) {
569                         Some(method_id) => {
570                             OverloadedCallType::from_method_id(self.tcx(), method_id)
571                         }
572                         None => {
573                             span_bug!(
574                                 callee.span,
575                                 "unexpected callee type {}",
576                                 callee_ty)
577                         }
578                     };
579                 match overloaded_call_type {
580                     FnMutOverloadedCall => {
581                         self.borrow_expr(callee,
582                                          ty::ReScope(call_scope),
583                                          ty::MutBorrow,
584                                          ClosureInvocation);
585                     }
586                     FnOverloadedCall => {
587                         self.borrow_expr(callee,
588                                          ty::ReScope(call_scope),
589                                          ty::ImmBorrow,
590                                          ClosureInvocation);
591                     }
592                     FnOnceOverloadedCall => self.consume_expr(callee),
593                 }
594             }
595         }
596     }
597
598     fn walk_stmt(&mut self, stmt: &hir::Stmt) {
599         match stmt.node {
600             hir::StmtDecl(ref decl, _) => {
601                 match decl.node {
602                     hir::DeclLocal(ref local) => {
603                         self.walk_local(&local);
604                     }
605
606                     hir::DeclItem(_) => {
607                         // we don't visit nested items in this visitor,
608                         // only the fn body we were given.
609                     }
610                 }
611             }
612
613             hir::StmtExpr(ref expr, _) |
614             hir::StmtSemi(ref expr, _) => {
615                 self.consume_expr(&expr);
616             }
617         }
618     }
619
620     fn walk_local(&mut self, local: &hir::Local) {
621         match local.init {
622             None => {
623                 let delegate = &mut self.delegate;
624                 pat_util::pat_bindings(&local.pat, |_, id, span, _| {
625                     delegate.decl_without_init(id, span);
626                 })
627             }
628
629             Some(ref expr) => {
630                 // Variable declarations with
631                 // initializers are considered
632                 // "assigns", which is handled by
633                 // `walk_pat`:
634                 self.walk_expr(&expr);
635                 let init_cmt = return_if_err!(self.mc.cat_expr(&expr));
636                 self.walk_irrefutable_pat(init_cmt, &local.pat);
637             }
638         }
639     }
640
641     /// Indicates that the value of `blk` will be consumed, meaning either copied or moved
642     /// depending on its type.
643     fn walk_block(&mut self, blk: &hir::Block) {
644         debug!("walk_block(blk.id={})", blk.id);
645
646         for stmt in &blk.stmts {
647             self.walk_stmt(stmt);
648         }
649
650         if let Some(ref tail_expr) = blk.expr {
651             self.consume_expr(&tail_expr);
652         }
653     }
654
655     fn walk_struct_expr(&mut self,
656                         _expr: &hir::Expr,
657                         fields: &[hir::Field],
658                         opt_with: &Option<P<hir::Expr>>) {
659         // Consume the expressions supplying values for each field.
660         for field in fields {
661             self.consume_expr(&field.expr);
662         }
663
664         let with_expr = match *opt_with {
665             Some(ref w) => &**w,
666             None => { return; }
667         };
668
669         let with_cmt = return_if_err!(self.mc.cat_expr(&with_expr));
670
671         // Select just those fields of the `with`
672         // expression that will actually be used
673         if let ty::TyStruct(def, substs) = with_cmt.ty.sty {
674             // Consume those fields of the with expression that are needed.
675             for with_field in &def.struct_variant().fields {
676                 if !contains_field_named(with_field, fields) {
677                     let cmt_field = self.mc.cat_field(
678                         &*with_expr,
679                         with_cmt.clone(),
680                         with_field.name,
681                         with_field.ty(self.tcx(), substs)
682                     );
683                     self.delegate_consume(with_expr.id, with_expr.span, cmt_field);
684                 }
685             }
686         } else {
687             // the base expression should always evaluate to a
688             // struct; however, when EUV is run during typeck, it
689             // may not. This will generate an error earlier in typeck,
690             // so we can just ignore it.
691             if !self.tcx().sess.has_errors() {
692                 span_bug!(
693                     with_expr.span,
694                     "with expression doesn't evaluate to a struct");
695             }
696         };
697
698         // walk the with expression so that complex expressions
699         // are properly handled.
700         self.walk_expr(with_expr);
701
702         fn contains_field_named(field: ty::FieldDef,
703                                 fields: &[hir::Field])
704                                 -> bool
705         {
706             fields.iter().any(
707                 |f| f.name.node == field.name)
708         }
709     }
710
711     // Invoke the appropriate delegate calls for anything that gets
712     // consumed or borrowed as part of the automatic adjustment
713     // process.
714     fn walk_adjustment(&mut self, expr: &hir::Expr) {
715         let infcx = self.mc.infcx;
716         //NOTE(@jroesch): mixed RefCell borrow causes crash
717         let adj = infcx.adjustments().get(&expr.id).map(|x| x.clone());
718         if let Some(adjustment) = adj {
719             match adjustment {
720                 adjustment::AdjustNeverToAny(..) |
721                 adjustment::AdjustReifyFnPointer |
722                 adjustment::AdjustUnsafeFnPointer |
723                 adjustment::AdjustMutToConstPointer => {
724                     // Creating a closure/fn-pointer or unsizing consumes
725                     // the input and stores it into the resulting rvalue.
726                     debug!("walk_adjustment: trivial adjustment");
727                     let cmt_unadjusted =
728                         return_if_err!(self.mc.cat_expr_unadjusted(expr));
729                     self.delegate_consume(expr.id, expr.span, cmt_unadjusted);
730                 }
731                 adjustment::AdjustDerefRef(ref adj) => {
732                     self.walk_autoderefref(expr, adj);
733                 }
734             }
735         }
736     }
737
738     /// Autoderefs for overloaded Deref calls in fact reference their receiver. That is, if we have
739     /// `(*x)` where `x` is of type `Rc<T>`, then this in fact is equivalent to `x.deref()`. Since
740     /// `deref()` is declared with `&self`, this is an autoref of `x`.
741     fn walk_autoderefs(&mut self,
742                        expr: &hir::Expr,
743                        autoderefs: usize) {
744         debug!("walk_autoderefs expr={:?} autoderefs={}", expr, autoderefs);
745
746         for i in 0..autoderefs {
747             let deref_id = ty::MethodCall::autoderef(expr.id, i as u32);
748             if let Some(method_ty) = self.mc.infcx.node_method_ty(deref_id) {
749                 let cmt = return_if_err!(self.mc.cat_expr_autoderefd(expr, i));
750
751                 // the method call infrastructure should have
752                 // replaced all late-bound regions with variables:
753                 let self_ty = method_ty.fn_sig().input(0);
754                 let self_ty = self.tcx().no_late_bound_regions(&self_ty).unwrap();
755
756                 let (m, r) = match self_ty.sty {
757                     ty::TyRef(r, ref m) => (m.mutbl, r),
758                     _ => span_bug!(expr.span,
759                                    "bad overloaded deref type {:?}",
760                                    method_ty)
761                 };
762                 let bk = ty::BorrowKind::from_mutbl(m);
763                 self.delegate.borrow(expr.id, expr.span, cmt,
764                                      *r, bk, AutoRef);
765             }
766         }
767     }
768
769     fn walk_autoderefref(&mut self,
770                          expr: &hir::Expr,
771                          adj: &adjustment::AutoDerefRef<'tcx>) {
772         debug!("walk_autoderefref expr={:?} adj={:?}",
773                expr,
774                adj);
775
776         self.walk_autoderefs(expr, adj.autoderefs);
777
778         let cmt_derefd =
779             return_if_err!(self.mc.cat_expr_autoderefd(expr, adj.autoderefs));
780
781         let cmt_refd =
782             self.walk_autoref(expr, cmt_derefd, adj.autoref);
783
784         if adj.unsize.is_some() {
785             // Unsizing consumes the thin pointer and produces a fat one.
786             self.delegate_consume(expr.id, expr.span, cmt_refd);
787         }
788     }
789
790
791     /// Walks the autoref `opt_autoref` applied to the autoderef'd
792     /// `expr`. `cmt_derefd` is the mem-categorized form of `expr`
793     /// after all relevant autoderefs have occurred. Because AutoRefs
794     /// can be recursive, this function is recursive: it first walks
795     /// deeply all the way down the autoref chain, and then processes
796     /// the autorefs on the way out. At each point, it returns the
797     /// `cmt` for the rvalue that will be produced by introduced an
798     /// autoref.
799     fn walk_autoref(&mut self,
800                     expr: &hir::Expr,
801                     cmt_base: mc::cmt<'tcx>,
802                     opt_autoref: Option<adjustment::AutoRef<'tcx>>)
803                     -> mc::cmt<'tcx>
804     {
805         debug!("walk_autoref(expr.id={} cmt_derefd={:?} opt_autoref={:?})",
806                expr.id,
807                cmt_base,
808                opt_autoref);
809
810         let cmt_base_ty = cmt_base.ty;
811
812         let autoref = match opt_autoref {
813             Some(ref autoref) => autoref,
814             None => {
815                 // No AutoRef.
816                 return cmt_base;
817             }
818         };
819
820         match *autoref {
821             adjustment::AutoPtr(r, m) => {
822                 self.delegate.borrow(expr.id,
823                                      expr.span,
824                                      cmt_base,
825                                      *r,
826                                      ty::BorrowKind::from_mutbl(m),
827                                      AutoRef);
828             }
829
830             adjustment::AutoUnsafe(m) => {
831                 debug!("walk_autoref: expr.id={} cmt_base={:?}",
832                        expr.id,
833                        cmt_base);
834
835                 // Converting from a &T to *T (or &mut T to *mut T) is
836                 // treated as borrowing it for the enclosing temporary
837                 // scope.
838                 let r = ty::ReScope(self.tcx().region_maps.node_extent(expr.id));
839
840                 self.delegate.borrow(expr.id,
841                                      expr.span,
842                                      cmt_base,
843                                      r,
844                                      ty::BorrowKind::from_mutbl(m),
845                                      AutoUnsafe);
846             }
847         }
848
849         // Construct the categorization for the result of the autoref.
850         // This is always an rvalue, since we are producing a new
851         // (temporary) indirection.
852
853         let adj_ty = cmt_base_ty.adjust_for_autoref(self.tcx(), opt_autoref);
854
855         self.mc.cat_rvalue_node(expr.id, expr.span, adj_ty)
856     }
857
858
859     // When this returns true, it means that the expression *is* a
860     // method-call (i.e. via the operator-overload).  This true result
861     // also implies that walk_overloaded_operator already took care of
862     // recursively processing the input arguments, and thus the caller
863     // should not do so.
864     fn walk_overloaded_operator(&mut self,
865                                 expr: &hir::Expr,
866                                 receiver: &hir::Expr,
867                                 rhs: Vec<&hir::Expr>,
868                                 pass_args: PassArgs)
869                                 -> bool
870     {
871         if !self.mc.infcx.is_method_call(expr.id) {
872             return false;
873         }
874
875         match pass_args {
876             PassArgs::ByValue => {
877                 self.consume_expr(receiver);
878                 for &arg in &rhs {
879                     self.consume_expr(arg);
880                 }
881
882                 return true;
883             },
884             PassArgs::ByRef => {},
885         }
886
887         self.walk_expr(receiver);
888
889         // Arguments (but not receivers) to overloaded operator
890         // methods are implicitly autoref'd which sadly does not use
891         // adjustments, so we must hardcode the borrow here.
892
893         let r = ty::ReScope(self.tcx().region_maps.node_extent(expr.id));
894         let bk = ty::ImmBorrow;
895
896         for &arg in &rhs {
897             self.borrow_expr(arg, r, bk, OverloadedOperator);
898         }
899         return true;
900     }
901
902     fn arm_move_mode(&mut self, discr_cmt: mc::cmt<'tcx>, arm: &hir::Arm) -> TrackMatchMode {
903         let mut mode = Unknown;
904         for pat in &arm.pats {
905             self.determine_pat_move_mode(discr_cmt.clone(), &pat, &mut mode);
906         }
907         mode
908     }
909
910     fn walk_arm(&mut self, discr_cmt: mc::cmt<'tcx>, arm: &hir::Arm, mode: MatchMode) {
911         for pat in &arm.pats {
912             self.walk_pat(discr_cmt.clone(), &pat, mode);
913         }
914
915         if let Some(ref guard) = arm.guard {
916             self.consume_expr(&guard);
917         }
918
919         self.consume_expr(&arm.body);
920     }
921
922     /// Walks a pat that occurs in isolation (i.e. top-level of fn
923     /// arg or let binding.  *Not* a match arm or nested pat.)
924     fn walk_irrefutable_pat(&mut self, cmt_discr: mc::cmt<'tcx>, pat: &hir::Pat) {
925         let mut mode = Unknown;
926         self.determine_pat_move_mode(cmt_discr.clone(), pat, &mut mode);
927         let mode = mode.match_mode();
928         self.walk_pat(cmt_discr, pat, mode);
929     }
930
931     /// Identifies any bindings within `pat` and accumulates within
932     /// `mode` whether the overall pattern/match structure is a move,
933     /// copy, or borrow.
934     fn determine_pat_move_mode(&mut self,
935                                cmt_discr: mc::cmt<'tcx>,
936                                pat: &hir::Pat,
937                                mode: &mut TrackMatchMode) {
938         debug!("determine_pat_move_mode cmt_discr={:?} pat={:?}", cmt_discr,
939                pat);
940         return_if_err!(self.mc.cat_pattern(cmt_discr, pat, |_mc, cmt_pat, pat| {
941             match pat.node {
942                 PatKind::Binding(hir::BindByRef(..), _, _) =>
943                     mode.lub(BorrowingMatch),
944                 PatKind::Binding(hir::BindByValue(..), _, _) => {
945                     match copy_or_move(self.mc.infcx, &cmt_pat, PatBindingMove) {
946                         Copy => mode.lub(CopyingMatch),
947                         Move(..) => mode.lub(MovingMatch),
948                     }
949                 }
950                 _ => {}
951             }
952         }));
953     }
954
955     /// The core driver for walking a pattern; `match_mode` must be
956     /// established up front, e.g. via `determine_pat_move_mode` (see
957     /// also `walk_irrefutable_pat` for patterns that stand alone).
958     fn walk_pat(&mut self, cmt_discr: mc::cmt<'tcx>, pat: &hir::Pat, match_mode: MatchMode) {
959         debug!("walk_pat cmt_discr={:?} pat={:?}", cmt_discr, pat);
960
961         let tcx = &self.tcx();
962         let mc = &self.mc;
963         let infcx = self.mc.infcx;
964         let delegate = &mut self.delegate;
965         return_if_err!(mc.cat_pattern(cmt_discr.clone(), pat, |mc, cmt_pat, pat| {
966             if let PatKind::Binding(bmode, _, _) = pat.node {
967                 debug!("binding cmt_pat={:?} pat={:?} match_mode={:?}", cmt_pat, pat, match_mode);
968
969                 // pat_ty: the type of the binding being produced.
970                 let pat_ty = return_if_err!(infcx.node_ty(pat.id));
971
972                 // Each match binding is effectively an assignment to the
973                 // binding being produced.
974                 if let Ok(binding_cmt) = mc.cat_def(pat.id, pat.span, pat_ty,
975                                                     tcx.expect_def(pat.id)) {
976                     delegate.mutate(pat.id, pat.span, binding_cmt, MutateMode::Init);
977                 }
978
979                 // It is also a borrow or copy/move of the value being matched.
980                 match bmode {
981                     hir::BindByRef(m) => {
982                         if let ty::TyRef(&r, _) = pat_ty.sty {
983                             let bk = ty::BorrowKind::from_mutbl(m);
984                             delegate.borrow(pat.id, pat.span, cmt_pat, r, bk, RefBinding);
985                         }
986                     }
987                     hir::BindByValue(..) => {
988                         let mode = copy_or_move(infcx, &cmt_pat, PatBindingMove);
989                         debug!("walk_pat binding consuming pat");
990                         delegate.consume_pat(pat, cmt_pat, mode);
991                     }
992                 }
993             }
994         }));
995
996         // Do a second pass over the pattern, calling `matched_pat` on
997         // the interior nodes (enum variants and structs), as opposed
998         // to the above loop's visit of than the bindings that form
999         // the leaves of the pattern tree structure.
1000         return_if_err!(mc.cat_pattern(cmt_discr, pat, |mc, cmt_pat, pat| {
1001             match tcx.expect_def_or_none(pat.id) {
1002                 Some(Def::Variant(enum_did, variant_did)) => {
1003                     let downcast_cmt = if tcx.lookup_adt_def(enum_did).is_univariant() {
1004                         cmt_pat
1005                     } else {
1006                         let cmt_pat_ty = cmt_pat.ty;
1007                         mc.cat_downcast(pat, cmt_pat, cmt_pat_ty, variant_did)
1008                     };
1009
1010                     debug!("variant downcast_cmt={:?} pat={:?}", downcast_cmt, pat);
1011                     delegate.matched_pat(pat, downcast_cmt, match_mode);
1012                 }
1013                 Some(Def::Struct(..)) | Some(Def::TyAlias(..)) | Some(Def::AssociatedTy(..)) => {
1014                     debug!("struct cmt_pat={:?} pat={:?}", cmt_pat, pat);
1015                     delegate.matched_pat(pat, cmt_pat, match_mode);
1016                 }
1017                 _ => {}
1018             }
1019         }));
1020     }
1021
1022     fn walk_captures(&mut self, closure_expr: &hir::Expr, fn_decl_span: Span) {
1023         debug!("walk_captures({:?})", closure_expr);
1024
1025         self.tcx().with_freevars(closure_expr.id, |freevars| {
1026             for freevar in freevars {
1027                 let id_var = freevar.def.var_id();
1028                 let upvar_id = ty::UpvarId { var_id: id_var,
1029                                              closure_expr_id: closure_expr.id };
1030                 let upvar_capture = self.mc.infcx.upvar_capture(upvar_id).unwrap();
1031                 let cmt_var = return_if_err!(self.cat_captured_var(closure_expr.id,
1032                                                                    fn_decl_span,
1033                                                                    freevar.def));
1034                 match upvar_capture {
1035                     ty::UpvarCapture::ByValue => {
1036                         let mode = copy_or_move(self.mc.infcx, &cmt_var, CaptureMove);
1037                         self.delegate.consume(closure_expr.id, freevar.span, cmt_var, mode);
1038                     }
1039                     ty::UpvarCapture::ByRef(upvar_borrow) => {
1040                         self.delegate.borrow(closure_expr.id,
1041                                              fn_decl_span,
1042                                              cmt_var,
1043                                              upvar_borrow.region,
1044                                              upvar_borrow.kind,
1045                                              ClosureCapture(freevar.span));
1046                     }
1047                 }
1048             }
1049         });
1050     }
1051
1052     fn cat_captured_var(&mut self,
1053                         closure_id: ast::NodeId,
1054                         closure_span: Span,
1055                         upvar_def: Def)
1056                         -> mc::McResult<mc::cmt<'tcx>> {
1057         // Create the cmt for the variable being borrowed, from the
1058         // caller's perspective
1059         let var_id = upvar_def.var_id();
1060         let var_ty = self.mc.infcx.node_ty(var_id)?;
1061         self.mc.cat_def(closure_id, closure_span, var_ty, upvar_def)
1062     }
1063 }
1064
1065 fn copy_or_move<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>,
1066                                 cmt: &mc::cmt<'tcx>,
1067                                 move_reason: MoveReason)
1068                                 -> ConsumeMode
1069 {
1070     if infcx.type_moves_by_default(cmt.ty, cmt.span) {
1071         Move(move_reason)
1072     } else {
1073         Copy
1074     }
1075 }