]> git.lizzy.rs Git - rust.git/blob - src/librustc/middle/expr_use_visitor.rs
split ty::util and ty::adjustment
[rust.git] / src / librustc / middle / expr_use_visitor.rs
1 // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 //! A different sort of visitor for walking fn bodies.  Unlike the
12 //! normal visitor, which just walks the entire body in one shot, the
13 //! `ExprUseVisitor` determines how expressions are being used.
14
15 pub use self::MutateMode::*;
16 pub use self::LoanCause::*;
17 pub use self::ConsumeMode::*;
18 pub use self::MoveReason::*;
19 pub use self::MatchMode::*;
20 use self::TrackMatchMode::*;
21 use self::OverloadedCallType::*;
22
23 use middle::{def, pat_util};
24 use middle::def_id::{DefId};
25 use middle::infer;
26 use middle::mem_categorization as mc;
27 use middle::ty;
28 use middle::ty::adjustment;
29
30 use rustc_front::hir;
31
32 use syntax::ast;
33 use syntax::ptr::P;
34 use syntax::codemap::Span;
35
36 ///////////////////////////////////////////////////////////////////////////
37 // The Delegate trait
38
39 /// This trait defines the callbacks you can expect to receive when
40 /// employing the ExprUseVisitor.
41 pub trait Delegate<'tcx> {
42     // The value found at `cmt` is either copied or moved, depending
43     // on mode.
44     fn consume(&mut self,
45                consume_id: ast::NodeId,
46                consume_span: Span,
47                cmt: mc::cmt<'tcx>,
48                mode: ConsumeMode);
49
50     // The value found at `cmt` has been determined to match the
51     // pattern binding `matched_pat`, and its subparts are being
52     // copied or moved depending on `mode`.  Note that `matched_pat`
53     // is called on all variant/structs in the pattern (i.e., the
54     // interior nodes of the pattern's tree structure) while
55     // consume_pat is called on the binding identifiers in the pattern
56     // (which are leaves of the pattern's tree structure).
57     //
58     // Note that variants/structs and identifiers are disjoint; thus
59     // `matched_pat` and `consume_pat` are never both called on the
60     // same input pattern structure (though of `consume_pat` can be
61     // called on a subpart of an input passed to `matched_pat).
62     fn matched_pat(&mut self,
63                    matched_pat: &hir::Pat,
64                    cmt: mc::cmt<'tcx>,
65                    mode: MatchMode);
66
67     // The value found at `cmt` is either copied or moved via the
68     // pattern binding `consume_pat`, depending on mode.
69     fn consume_pat(&mut self,
70                    consume_pat: &hir::Pat,
71                    cmt: mc::cmt<'tcx>,
72                    mode: ConsumeMode);
73
74     // The value found at `borrow` is being borrowed at the point
75     // `borrow_id` for the region `loan_region` with kind `bk`.
76     fn borrow(&mut self,
77               borrow_id: ast::NodeId,
78               borrow_span: Span,
79               cmt: mc::cmt<'tcx>,
80               loan_region: ty::Region,
81               bk: ty::BorrowKind,
82               loan_cause: LoanCause);
83
84     // The local variable `id` is declared but not initialized.
85     fn decl_without_init(&mut self,
86                          id: ast::NodeId,
87                          span: Span);
88
89     // The path at `cmt` is being assigned to.
90     fn mutate(&mut self,
91               assignment_id: ast::NodeId,
92               assignment_span: Span,
93               assignee_cmt: mc::cmt<'tcx>,
94               mode: MutateMode);
95 }
96
97 #[derive(Copy, Clone, PartialEq, Debug)]
98 pub enum LoanCause {
99     ClosureCapture(Span),
100     AddrOf,
101     AutoRef,
102     AutoUnsafe,
103     RefBinding,
104     OverloadedOperator,
105     ClosureInvocation,
106     ForLoop,
107     MatchDiscriminant
108 }
109
110 #[derive(Copy, Clone, PartialEq, Debug)]
111 pub enum ConsumeMode {
112     Copy,                // reference to x where x has a type that copies
113     Move(MoveReason),    // reference to x where x has a type that moves
114 }
115
116 #[derive(Copy, Clone, PartialEq, Debug)]
117 pub enum MoveReason {
118     DirectRefMove,
119     PatBindingMove,
120     CaptureMove,
121 }
122
123 #[derive(Copy, Clone, PartialEq, Debug)]
124 pub enum MatchMode {
125     NonBindingMatch,
126     BorrowingMatch,
127     CopyingMatch,
128     MovingMatch,
129 }
130
131 #[derive(Copy, Clone, PartialEq, Debug)]
132 enum TrackMatchMode {
133     Unknown,
134     Definite(MatchMode),
135     Conflicting,
136 }
137
138 impl TrackMatchMode {
139     // Builds up the whole match mode for a pattern from its constituent
140     // parts.  The lattice looks like this:
141     //
142     //          Conflicting
143     //            /     \
144     //           /       \
145     //      Borrowing   Moving
146     //           \       /
147     //            \     /
148     //            Copying
149     //               |
150     //          NonBinding
151     //               |
152     //            Unknown
153     //
154     // examples:
155     //
156     // * `(_, some_int)` pattern is Copying, since
157     //   NonBinding + Copying => Copying
158     //
159     // * `(some_int, some_box)` pattern is Moving, since
160     //   Copying + Moving => Moving
161     //
162     // * `(ref x, some_box)` pattern is Conflicting, since
163     //   Borrowing + Moving => Conflicting
164     //
165     // Note that the `Unknown` and `Conflicting` states are
166     // represented separately from the other more interesting
167     // `Definite` states, which simplifies logic here somewhat.
168     fn lub(&mut self, mode: MatchMode) {
169         *self = match (*self, mode) {
170             // Note that clause order below is very significant.
171             (Unknown, new) => Definite(new),
172             (Definite(old), new) if old == new => Definite(old),
173
174             (Definite(old), NonBindingMatch) => Definite(old),
175             (Definite(NonBindingMatch), new) => Definite(new),
176
177             (Definite(old), CopyingMatch) => Definite(old),
178             (Definite(CopyingMatch), new) => Definite(new),
179
180             (Definite(_), _) => Conflicting,
181             (Conflicting, _) => *self,
182         };
183     }
184
185     fn match_mode(&self) -> MatchMode {
186         match *self {
187             Unknown => NonBindingMatch,
188             Definite(mode) => mode,
189             Conflicting => {
190                 // Conservatively return MovingMatch to let the
191                 // compiler continue to make progress.
192                 MovingMatch
193             }
194         }
195     }
196 }
197
198 #[derive(Copy, Clone, PartialEq, Debug)]
199 pub enum MutateMode {
200     Init,
201     JustWrite,    // x = y
202     WriteAndRead, // x += y
203 }
204
205 #[derive(Copy, Clone)]
206 enum OverloadedCallType {
207     FnOverloadedCall,
208     FnMutOverloadedCall,
209     FnOnceOverloadedCall,
210 }
211
212 impl OverloadedCallType {
213     fn from_trait_id(tcx: &ty::ctxt, trait_id: DefId)
214                      -> OverloadedCallType {
215         for &(maybe_function_trait, overloaded_call_type) in &[
216             (tcx.lang_items.fn_once_trait(), FnOnceOverloadedCall),
217             (tcx.lang_items.fn_mut_trait(), FnMutOverloadedCall),
218             (tcx.lang_items.fn_trait(), FnOverloadedCall)
219         ] {
220             match maybe_function_trait {
221                 Some(function_trait) if function_trait == trait_id => {
222                     return overloaded_call_type
223                 }
224                 _ => continue,
225             }
226         }
227
228         tcx.sess.bug("overloaded call didn't map to known function trait")
229     }
230
231     fn from_method_id(tcx: &ty::ctxt, method_id: DefId)
232                       -> OverloadedCallType {
233         let method = tcx.impl_or_trait_item(method_id);
234         OverloadedCallType::from_trait_id(tcx, method.container().id())
235     }
236 }
237
238 ///////////////////////////////////////////////////////////////////////////
239 // The ExprUseVisitor type
240 //
241 // This is the code that actually walks the tree. Like
242 // mem_categorization, it requires a TYPER, which is a type that
243 // supplies types from the tree. After type checking is complete, you
244 // can just use the tcx as the typer.
245 //
246 // FIXME(stage0): the :'t here is probably only important for stage0
247 pub struct ExprUseVisitor<'d, 't, 'a: 't, 'tcx:'a+'d+'t> {
248     typer: &'t infer::InferCtxt<'a, 'tcx>,
249     mc: mc::MemCategorizationContext<'t, 'a, 'tcx>,
250     delegate: &'d mut Delegate<'tcx>,
251 }
252
253 // If the TYPER results in an error, it's because the type check
254 // failed (or will fail, when the error is uncovered and reported
255 // during writeback). In this case, we just ignore this part of the
256 // code.
257 //
258 // Note that this macro appears similar to try!(), but, unlike try!(),
259 // it does not propagate the error.
260 macro_rules! return_if_err {
261     ($inp: expr) => (
262         match $inp {
263             Ok(v) => v,
264             Err(()) => {
265                 debug!("mc reported err");
266                 return
267             }
268         }
269     )
270 }
271
272 /// Whether the elements of an overloaded operation are passed by value or by reference
273 enum PassArgs {
274     ByValue,
275     ByRef,
276 }
277
278 impl<'d,'t,'a,'tcx> ExprUseVisitor<'d,'t,'a,'tcx> {
279     pub fn new(delegate: &'d mut Delegate<'tcx>,
280                typer: &'t infer::InferCtxt<'a, 'tcx>)
281                -> ExprUseVisitor<'d,'t,'a,'tcx>
282     {
283         let result = ExprUseVisitor {
284             typer: typer,
285             mc: mc::MemCategorizationContext::new(typer),
286             delegate: delegate,
287         };
288
289         result
290     }
291
292     pub fn walk_fn(&mut self,
293                    decl: &hir::FnDecl,
294                    body: &hir::Block) {
295         self.walk_arg_patterns(decl, body);
296         self.walk_block(body);
297     }
298
299     fn walk_arg_patterns(&mut self,
300                          decl: &hir::FnDecl,
301                          body: &hir::Block) {
302         for arg in &decl.inputs {
303             let arg_ty = return_if_err!(self.typer.node_ty(arg.pat.id));
304
305             let fn_body_scope = self.tcx().region_maps.node_extent(body.id);
306             let arg_cmt = self.mc.cat_rvalue(
307                 arg.id,
308                 arg.pat.span,
309                 ty::ReScope(fn_body_scope), // Args live only as long as the fn body.
310                 arg_ty);
311
312             self.walk_irrefutable_pat(arg_cmt, &*arg.pat);
313         }
314     }
315
316     fn tcx(&self) -> &'t ty::ctxt<'tcx> {
317         self.typer.tcx
318     }
319
320     fn delegate_consume(&mut self,
321                         consume_id: ast::NodeId,
322                         consume_span: Span,
323                         cmt: mc::cmt<'tcx>) {
324         debug!("delegate_consume(consume_id={}, cmt={:?})",
325                consume_id, cmt);
326
327         let mode = copy_or_move(self.typer, &cmt, DirectRefMove);
328         self.delegate.consume(consume_id, consume_span, cmt, mode);
329     }
330
331     fn consume_exprs(&mut self, exprs: &Vec<P<hir::Expr>>) {
332         for expr in exprs {
333             self.consume_expr(&**expr);
334         }
335     }
336
337     pub fn consume_expr(&mut self, expr: &hir::Expr) {
338         debug!("consume_expr(expr={:?})", expr);
339
340         let cmt = return_if_err!(self.mc.cat_expr(expr));
341         self.delegate_consume(expr.id, expr.span, cmt);
342         self.walk_expr(expr);
343     }
344
345     fn mutate_expr(&mut self,
346                    assignment_expr: &hir::Expr,
347                    expr: &hir::Expr,
348                    mode: MutateMode) {
349         let cmt = return_if_err!(self.mc.cat_expr(expr));
350         self.delegate.mutate(assignment_expr.id, assignment_expr.span, cmt, mode);
351         self.walk_expr(expr);
352     }
353
354     fn borrow_expr(&mut self,
355                    expr: &hir::Expr,
356                    r: ty::Region,
357                    bk: ty::BorrowKind,
358                    cause: LoanCause) {
359         debug!("borrow_expr(expr={:?}, r={:?}, bk={:?})",
360                expr, r, bk);
361
362         let cmt = return_if_err!(self.mc.cat_expr(expr));
363         self.delegate.borrow(expr.id, expr.span, cmt, r, bk, cause);
364
365         // Note: Unlike consume, we can ignore ExprParen. cat_expr
366         // already skips over them, and walk will uncover any
367         // attachments or whatever.
368         self.walk_expr(expr)
369     }
370
371     fn select_from_expr(&mut self, expr: &hir::Expr) {
372         self.walk_expr(expr)
373     }
374
375     pub fn walk_expr(&mut self, expr: &hir::Expr) {
376         debug!("walk_expr(expr={:?})", expr);
377
378         self.walk_adjustment(expr);
379
380         match expr.node {
381             hir::ExprParen(ref subexpr) => {
382                 self.walk_expr(&**subexpr)
383             }
384
385             hir::ExprPath(..) => { }
386
387             hir::ExprUnary(hir::UnDeref, ref base) => {      // *base
388                 if !self.walk_overloaded_operator(expr, &**base, Vec::new(), PassArgs::ByRef) {
389                     self.select_from_expr(&**base);
390                 }
391             }
392
393             hir::ExprField(ref base, _) => {         // base.f
394                 self.select_from_expr(&**base);
395             }
396
397             hir::ExprTupField(ref base, _) => {         // base.<n>
398                 self.select_from_expr(&**base);
399             }
400
401             hir::ExprIndex(ref lhs, ref rhs) => {       // lhs[rhs]
402                 if !self.walk_overloaded_operator(expr,
403                                                   &**lhs,
404                                                   vec![&**rhs],
405                                                   PassArgs::ByValue) {
406                     self.select_from_expr(&**lhs);
407                     self.consume_expr(&**rhs);
408                 }
409             }
410
411             hir::ExprRange(ref start, ref end) => {
412                 start.as_ref().map(|e| self.consume_expr(&**e));
413                 end.as_ref().map(|e| self.consume_expr(&**e));
414             }
415
416             hir::ExprCall(ref callee, ref args) => {    // callee(args)
417                 self.walk_callee(expr, &**callee);
418                 self.consume_exprs(args);
419             }
420
421             hir::ExprMethodCall(_, _, ref args) => { // callee.m(args)
422                 self.consume_exprs(args);
423             }
424
425             hir::ExprStruct(_, ref fields, ref opt_with) => {
426                 self.walk_struct_expr(expr, fields, opt_with);
427             }
428
429             hir::ExprTup(ref exprs) => {
430                 self.consume_exprs(exprs);
431             }
432
433             hir::ExprIf(ref cond_expr, ref then_blk, ref opt_else_expr) => {
434                 self.consume_expr(&**cond_expr);
435                 self.walk_block(&**then_blk);
436                 if let Some(ref else_expr) = *opt_else_expr {
437                     self.consume_expr(&**else_expr);
438                 }
439             }
440
441             hir::ExprMatch(ref discr, ref arms, _) => {
442                 let discr_cmt = return_if_err!(self.mc.cat_expr(&**discr));
443                 self.borrow_expr(&**discr, ty::ReEmpty, ty::ImmBorrow, MatchDiscriminant);
444
445                 // treatment of the discriminant is handled while walking the arms.
446                 for arm in arms {
447                     let mode = self.arm_move_mode(discr_cmt.clone(), arm);
448                     let mode = mode.match_mode();
449                     self.walk_arm(discr_cmt.clone(), arm, mode);
450                 }
451             }
452
453             hir::ExprVec(ref exprs) => {
454                 self.consume_exprs(exprs);
455             }
456
457             hir::ExprAddrOf(m, ref base) => {   // &base
458                 // make sure that the thing we are pointing out stays valid
459                 // for the lifetime `scope_r` of the resulting ptr:
460                 let expr_ty = return_if_err!(self.typer.node_ty(expr.id));
461                 if let ty::TyRef(&r, _) = expr_ty.sty {
462                     let bk = ty::BorrowKind::from_mutbl(m);
463                     self.borrow_expr(&**base, r, bk, AddrOf);
464                 }
465             }
466
467             hir::ExprInlineAsm(ref ia) => {
468                 for &(_, ref input) in &ia.inputs {
469                     self.consume_expr(&**input);
470                 }
471
472                 for &(_, ref output, is_rw) in &ia.outputs {
473                     self.mutate_expr(expr, &**output,
474                                            if is_rw { WriteAndRead } else { JustWrite });
475                 }
476             }
477
478             hir::ExprBreak(..) |
479             hir::ExprAgain(..) |
480             hir::ExprLit(..) => {}
481
482             hir::ExprLoop(ref blk, _) => {
483                 self.walk_block(&**blk);
484             }
485
486             hir::ExprWhile(ref cond_expr, ref blk, _) => {
487                 self.consume_expr(&**cond_expr);
488                 self.walk_block(&**blk);
489             }
490
491             hir::ExprUnary(op, ref lhs) => {
492                 let pass_args = if ::rustc_front::util::is_by_value_unop(op) {
493                     PassArgs::ByValue
494                 } else {
495                     PassArgs::ByRef
496                 };
497
498                 if !self.walk_overloaded_operator(expr, &**lhs, Vec::new(), pass_args) {
499                     self.consume_expr(&**lhs);
500                 }
501             }
502
503             hir::ExprBinary(op, ref lhs, ref rhs) => {
504                 let pass_args = if ::rustc_front::util::is_by_value_binop(op.node) {
505                     PassArgs::ByValue
506                 } else {
507                     PassArgs::ByRef
508                 };
509
510                 if !self.walk_overloaded_operator(expr, &**lhs, vec![&**rhs], pass_args) {
511                     self.consume_expr(&**lhs);
512                     self.consume_expr(&**rhs);
513                 }
514             }
515
516             hir::ExprBlock(ref blk) => {
517                 self.walk_block(&**blk);
518             }
519
520             hir::ExprRet(ref opt_expr) => {
521                 if let Some(ref expr) = *opt_expr {
522                     self.consume_expr(&**expr);
523                 }
524             }
525
526             hir::ExprAssign(ref lhs, ref rhs) => {
527                 self.mutate_expr(expr, &**lhs, JustWrite);
528                 self.consume_expr(&**rhs);
529             }
530
531             hir::ExprCast(ref base, _) => {
532                 self.consume_expr(&**base);
533             }
534
535             hir::ExprAssignOp(_, ref lhs, ref rhs) => {
536                 // This will have to change if/when we support
537                 // overloaded operators for `+=` and so forth.
538                 self.mutate_expr(expr, &**lhs, WriteAndRead);
539                 self.consume_expr(&**rhs);
540             }
541
542             hir::ExprRepeat(ref base, ref count) => {
543                 self.consume_expr(&**base);
544                 self.consume_expr(&**count);
545             }
546
547             hir::ExprClosure(..) => {
548                 self.walk_captures(expr)
549             }
550
551             hir::ExprBox(ref place, ref base) => {
552                 match *place {
553                     Some(ref place) => self.consume_expr(&**place),
554                     None => {}
555                 }
556                 self.consume_expr(&**base);
557                 if place.is_some() {
558                     self.tcx().sess.span_bug(
559                         expr.span,
560                         "box with explicit place remains after expansion");
561                 }
562             }
563         }
564     }
565
566     fn walk_callee(&mut self, call: &hir::Expr, callee: &hir::Expr) {
567         let callee_ty = return_if_err!(self.typer.expr_ty_adjusted(callee));
568         debug!("walk_callee: callee={:?} callee_ty={:?}",
569                callee, callee_ty);
570         let call_scope = self.tcx().region_maps.node_extent(call.id);
571         match callee_ty.sty {
572             ty::TyBareFn(..) => {
573                 self.consume_expr(callee);
574             }
575             ty::TyError => { }
576             _ => {
577                 let overloaded_call_type =
578                     match self.typer.node_method_id(ty::MethodCall::expr(call.id)) {
579                         Some(method_id) => {
580                             OverloadedCallType::from_method_id(self.tcx(), method_id)
581                         }
582                         None => {
583                             self.tcx().sess.span_bug(
584                                 callee.span,
585                                 &format!("unexpected callee type {}", callee_ty))
586                         }
587                     };
588                 match overloaded_call_type {
589                     FnMutOverloadedCall => {
590                         self.borrow_expr(callee,
591                                          ty::ReScope(call_scope),
592                                          ty::MutBorrow,
593                                          ClosureInvocation);
594                     }
595                     FnOverloadedCall => {
596                         self.borrow_expr(callee,
597                                          ty::ReScope(call_scope),
598                                          ty::ImmBorrow,
599                                          ClosureInvocation);
600                     }
601                     FnOnceOverloadedCall => self.consume_expr(callee),
602                 }
603             }
604         }
605     }
606
607     fn walk_stmt(&mut self, stmt: &hir::Stmt) {
608         match stmt.node {
609             hir::StmtDecl(ref decl, _) => {
610                 match decl.node {
611                     hir::DeclLocal(ref local) => {
612                         self.walk_local(&**local);
613                     }
614
615                     hir::DeclItem(_) => {
616                         // we don't visit nested items in this visitor,
617                         // only the fn body we were given.
618                     }
619                 }
620             }
621
622             hir::StmtExpr(ref expr, _) |
623             hir::StmtSemi(ref expr, _) => {
624                 self.consume_expr(&**expr);
625             }
626         }
627     }
628
629     fn walk_local(&mut self, local: &hir::Local) {
630         match local.init {
631             None => {
632                 let delegate = &mut self.delegate;
633                 pat_util::pat_bindings(&self.typer.tcx.def_map, &*local.pat,
634                                        |_, id, span, _| {
635                     delegate.decl_without_init(id, span);
636                 })
637             }
638
639             Some(ref expr) => {
640                 // Variable declarations with
641                 // initializers are considered
642                 // "assigns", which is handled by
643                 // `walk_pat`:
644                 self.walk_expr(&**expr);
645                 let init_cmt = return_if_err!(self.mc.cat_expr(&**expr));
646                 self.walk_irrefutable_pat(init_cmt, &*local.pat);
647             }
648         }
649     }
650
651     /// Indicates that the value of `blk` will be consumed, meaning either copied or moved
652     /// depending on its type.
653     fn walk_block(&mut self, blk: &hir::Block) {
654         debug!("walk_block(blk.id={})", blk.id);
655
656         for stmt in &blk.stmts {
657             self.walk_stmt(&**stmt);
658         }
659
660         if let Some(ref tail_expr) = blk.expr {
661             self.consume_expr(&**tail_expr);
662         }
663     }
664
665     fn walk_struct_expr(&mut self,
666                         _expr: &hir::Expr,
667                         fields: &Vec<hir::Field>,
668                         opt_with: &Option<P<hir::Expr>>) {
669         // Consume the expressions supplying values for each field.
670         for field in fields {
671             self.consume_expr(&*field.expr);
672         }
673
674         let with_expr = match *opt_with {
675             Some(ref w) => &**w,
676             None => { return; }
677         };
678
679         let with_cmt = return_if_err!(self.mc.cat_expr(&*with_expr));
680
681         // Select just those fields of the `with`
682         // expression that will actually be used
683         if let ty::TyStruct(def, substs) = with_cmt.ty.sty {
684             // Consume those fields of the with expression that are needed.
685             for with_field in &def.struct_variant().fields {
686                 if !contains_field_named(with_field, fields) {
687                     let cmt_field = self.mc.cat_field(
688                         &*with_expr,
689                         with_cmt.clone(),
690                         with_field.name,
691                         with_field.ty(self.tcx(), substs)
692                     );
693                     self.delegate_consume(with_expr.id, with_expr.span, cmt_field);
694                 }
695             }
696         } else {
697             // the base expression should always evaluate to a
698             // struct; however, when EUV is run during typeck, it
699             // may not. This will generate an error earlier in typeck,
700             // so we can just ignore it.
701             if !self.tcx().sess.has_errors() {
702                 self.tcx().sess.span_bug(
703                     with_expr.span,
704                     "with expression doesn't evaluate to a struct");
705             }
706         };
707
708         // walk the with expression so that complex expressions
709         // are properly handled.
710         self.walk_expr(with_expr);
711
712         fn contains_field_named(field: ty::FieldDef,
713                                 fields: &Vec<hir::Field>)
714                                 -> bool
715         {
716             fields.iter().any(
717                 |f| f.ident.node.name == field.name)
718         }
719     }
720
721     // Invoke the appropriate delegate calls for anything that gets
722     // consumed or borrowed as part of the automatic adjustment
723     // process.
724     fn walk_adjustment(&mut self, expr: &hir::Expr) {
725         let typer = self.typer;
726         //NOTE(@jroesch): mixed RefCell borrow causes crash
727         let adj = typer.adjustments().get(&expr.id).map(|x| x.clone());
728         if let Some(adjustment) = adj {
729             match adjustment {
730                 adjustment::AdjustReifyFnPointer |
731                 adjustment::AdjustUnsafeFnPointer => {
732                     // Creating a closure/fn-pointer or unsizing consumes
733                     // the input and stores it into the resulting rvalue.
734                     debug!("walk_adjustment(AdjustReifyFnPointer|AdjustUnsafeFnPointer)");
735                     let cmt_unadjusted =
736                         return_if_err!(self.mc.cat_expr_unadjusted(expr));
737                     self.delegate_consume(expr.id, expr.span, cmt_unadjusted);
738                 }
739                 adjustment::AdjustDerefRef(ref adj) => {
740                     self.walk_autoderefref(expr, adj);
741                 }
742             }
743         }
744     }
745
746     /// Autoderefs for overloaded Deref calls in fact reference their receiver. That is, if we have
747     /// `(*x)` where `x` is of type `Rc<T>`, then this in fact is equivalent to `x.deref()`. Since
748     /// `deref()` is declared with `&self`, this is an autoref of `x`.
749     fn walk_autoderefs(&mut self,
750                        expr: &hir::Expr,
751                        autoderefs: usize) {
752         debug!("walk_autoderefs expr={:?} autoderefs={}", expr, autoderefs);
753
754         for i in 0..autoderefs {
755             let deref_id = ty::MethodCall::autoderef(expr.id, i as u32);
756             match self.typer.node_method_ty(deref_id) {
757                 None => {}
758                 Some(method_ty) => {
759                     let cmt = return_if_err!(self.mc.cat_expr_autoderefd(expr, i));
760
761                     // the method call infrastructure should have
762                     // replaced all late-bound regions with variables:
763                     let self_ty = method_ty.fn_sig().input(0);
764                     let self_ty = self.tcx().no_late_bound_regions(&self_ty).unwrap();
765
766                     let (m, r) = match self_ty.sty {
767                         ty::TyRef(r, ref m) => (m.mutbl, r),
768                         _ => self.tcx().sess.span_bug(expr.span,
769                                 &format!("bad overloaded deref type {:?}",
770                                     method_ty))
771                     };
772                     let bk = ty::BorrowKind::from_mutbl(m);
773                     self.delegate.borrow(expr.id, expr.span, cmt,
774                                          *r, bk, AutoRef);
775                 }
776             }
777         }
778     }
779
780     fn walk_autoderefref(&mut self,
781                          expr: &hir::Expr,
782                          adj: &adjustment::AutoDerefRef<'tcx>) {
783         debug!("walk_autoderefref expr={:?} adj={:?}",
784                expr,
785                adj);
786
787         self.walk_autoderefs(expr, adj.autoderefs);
788
789         let cmt_derefd =
790             return_if_err!(self.mc.cat_expr_autoderefd(expr, adj.autoderefs));
791
792         let cmt_refd =
793             self.walk_autoref(expr, cmt_derefd, adj.autoref);
794
795         if adj.unsize.is_some() {
796             // Unsizing consumes the thin pointer and produces a fat one.
797             self.delegate_consume(expr.id, expr.span, cmt_refd);
798         }
799     }
800
801
802     /// Walks the autoref `opt_autoref` applied to the autoderef'd
803     /// `expr`. `cmt_derefd` is the mem-categorized form of `expr`
804     /// after all relevant autoderefs have occurred. Because AutoRefs
805     /// can be recursive, this function is recursive: it first walks
806     /// deeply all the way down the autoref chain, and then processes
807     /// the autorefs on the way out. At each point, it returns the
808     /// `cmt` for the rvalue that will be produced by introduced an
809     /// autoref.
810     fn walk_autoref(&mut self,
811                     expr: &hir::Expr,
812                     cmt_base: mc::cmt<'tcx>,
813                     opt_autoref: Option<adjustment::AutoRef<'tcx>>)
814                     -> mc::cmt<'tcx>
815     {
816         debug!("walk_autoref(expr.id={} cmt_derefd={:?} opt_autoref={:?})",
817                expr.id,
818                cmt_base,
819                opt_autoref);
820
821         let cmt_base_ty = cmt_base.ty;
822
823         let autoref = match opt_autoref {
824             Some(ref autoref) => autoref,
825             None => {
826                 // No AutoRef.
827                 return cmt_base;
828             }
829         };
830
831         match *autoref {
832             adjustment::AutoPtr(r, m) => {
833                 self.delegate.borrow(expr.id,
834                                      expr.span,
835                                      cmt_base,
836                                      *r,
837                                      ty::BorrowKind::from_mutbl(m),
838                                      AutoRef);
839             }
840
841             adjustment::AutoUnsafe(m) => {
842                 debug!("walk_autoref: expr.id={} cmt_base={:?}",
843                        expr.id,
844                        cmt_base);
845
846                 // Converting from a &T to *T (or &mut T to *mut T) is
847                 // treated as borrowing it for the enclosing temporary
848                 // scope.
849                 let r = ty::ReScope(self.tcx().region_maps.node_extent(expr.id));
850
851                 self.delegate.borrow(expr.id,
852                                      expr.span,
853                                      cmt_base,
854                                      r,
855                                      ty::BorrowKind::from_mutbl(m),
856                                      AutoUnsafe);
857             }
858         }
859
860         // Construct the categorization for the result of the autoref.
861         // This is always an rvalue, since we are producing a new
862         // (temporary) indirection.
863
864         let adj_ty = cmt_base_ty.adjust_for_autoref(self.tcx(), opt_autoref);
865
866         self.mc.cat_rvalue_node(expr.id, expr.span, adj_ty)
867     }
868
869
870     // When this returns true, it means that the expression *is* a
871     // method-call (i.e. via the operator-overload).  This true result
872     // also implies that walk_overloaded_operator already took care of
873     // recursively processing the input arguments, and thus the caller
874     // should not do so.
875     fn walk_overloaded_operator(&mut self,
876                                 expr: &hir::Expr,
877                                 receiver: &hir::Expr,
878                                 rhs: Vec<&hir::Expr>,
879                                 pass_args: PassArgs)
880                                 -> bool
881     {
882         if !self.typer.is_method_call(expr.id) {
883             return false;
884         }
885
886         match pass_args {
887             PassArgs::ByValue => {
888                 self.consume_expr(receiver);
889                 for &arg in &rhs {
890                     self.consume_expr(arg);
891                 }
892
893                 return true;
894             },
895             PassArgs::ByRef => {},
896         }
897
898         self.walk_expr(receiver);
899
900         // Arguments (but not receivers) to overloaded operator
901         // methods are implicitly autoref'd which sadly does not use
902         // adjustments, so we must hardcode the borrow here.
903
904         let r = ty::ReScope(self.tcx().region_maps.node_extent(expr.id));
905         let bk = ty::ImmBorrow;
906
907         for &arg in &rhs {
908             self.borrow_expr(arg, r, bk, OverloadedOperator);
909         }
910         return true;
911     }
912
913     fn arm_move_mode(&mut self, discr_cmt: mc::cmt<'tcx>, arm: &hir::Arm) -> TrackMatchMode {
914         let mut mode = Unknown;
915         for pat in &arm.pats {
916             self.determine_pat_move_mode(discr_cmt.clone(), &**pat, &mut mode);
917         }
918         mode
919     }
920
921     fn walk_arm(&mut self, discr_cmt: mc::cmt<'tcx>, arm: &hir::Arm, mode: MatchMode) {
922         for pat in &arm.pats {
923             self.walk_pat(discr_cmt.clone(), &**pat, mode);
924         }
925
926         if let Some(ref guard) = arm.guard {
927             self.consume_expr(&**guard);
928         }
929
930         self.consume_expr(&*arm.body);
931     }
932
933     /// Walks an pat that occurs in isolation (i.e. top-level of fn
934     /// arg or let binding.  *Not* a match arm or nested pat.)
935     fn walk_irrefutable_pat(&mut self, cmt_discr: mc::cmt<'tcx>, pat: &hir::Pat) {
936         let mut mode = Unknown;
937         self.determine_pat_move_mode(cmt_discr.clone(), pat, &mut mode);
938         let mode = mode.match_mode();
939         self.walk_pat(cmt_discr, pat, mode);
940     }
941
942     /// Identifies any bindings within `pat` and accumulates within
943     /// `mode` whether the overall pattern/match structure is a move,
944     /// copy, or borrow.
945     fn determine_pat_move_mode(&mut self,
946                                cmt_discr: mc::cmt<'tcx>,
947                                pat: &hir::Pat,
948                                mode: &mut TrackMatchMode) {
949         debug!("determine_pat_move_mode cmt_discr={:?} pat={:?}", cmt_discr,
950                pat);
951         return_if_err!(self.mc.cat_pattern(cmt_discr, pat, |_mc, cmt_pat, pat| {
952             let tcx = self.tcx();
953             let def_map = &self.tcx().def_map;
954             if pat_util::pat_is_binding(def_map, pat) {
955                 match pat.node {
956                     hir::PatIdent(hir::BindByRef(_), _, _) =>
957                         mode.lub(BorrowingMatch),
958                     hir::PatIdent(hir::BindByValue(_), _, _) => {
959                         match copy_or_move(self.typer, &cmt_pat, PatBindingMove) {
960                             Copy => mode.lub(CopyingMatch),
961                             Move(_) => mode.lub(MovingMatch),
962                         }
963                     }
964                     _ => {
965                         tcx.sess.span_bug(
966                             pat.span,
967                             "binding pattern not an identifier");
968                     }
969                 }
970             }
971         }));
972     }
973
974     /// The core driver for walking a pattern; `match_mode` must be
975     /// established up front, e.g. via `determine_pat_move_mode` (see
976     /// also `walk_irrefutable_pat` for patterns that stand alone).
977     fn walk_pat(&mut self,
978                 cmt_discr: mc::cmt<'tcx>,
979                 pat: &hir::Pat,
980                 match_mode: MatchMode) {
981         debug!("walk_pat cmt_discr={:?} pat={:?}", cmt_discr,
982                pat);
983
984         let mc = &self.mc;
985         let typer = self.typer;
986         let def_map = &self.tcx().def_map;
987         let delegate = &mut self.delegate;
988         return_if_err!(mc.cat_pattern(cmt_discr.clone(), pat, |mc, cmt_pat, pat| {
989             if pat_util::pat_is_binding(def_map, pat) {
990                 let tcx = typer.tcx;
991
992                 debug!("binding cmt_pat={:?} pat={:?} match_mode={:?}",
993                        cmt_pat,
994                        pat,
995                        match_mode);
996
997                 // pat_ty: the type of the binding being produced.
998                 let pat_ty = return_if_err!(typer.node_ty(pat.id));
999
1000                 // Each match binding is effectively an assignment to the
1001                 // binding being produced.
1002                 let def = def_map.borrow().get(&pat.id).unwrap().full_def();
1003                 match mc.cat_def(pat.id, pat.span, pat_ty, def) {
1004                     Ok(binding_cmt) => {
1005                         delegate.mutate(pat.id, pat.span, binding_cmt, Init);
1006                     }
1007                     Err(_) => { }
1008                 }
1009
1010                 // It is also a borrow or copy/move of the value being matched.
1011                 match pat.node {
1012                     hir::PatIdent(hir::BindByRef(m), _, _) => {
1013                         if let ty::TyRef(&r, _) = pat_ty.sty {
1014                             let bk = ty::BorrowKind::from_mutbl(m);
1015                             delegate.borrow(pat.id, pat.span, cmt_pat,
1016                                             r, bk, RefBinding);
1017                         }
1018                     }
1019                     hir::PatIdent(hir::BindByValue(_), _, _) => {
1020                         let mode = copy_or_move(typer, &cmt_pat, PatBindingMove);
1021                         debug!("walk_pat binding consuming pat");
1022                         delegate.consume_pat(pat, cmt_pat, mode);
1023                     }
1024                     _ => {
1025                         tcx.sess.span_bug(
1026                             pat.span,
1027                             "binding pattern not an identifier");
1028                     }
1029                 }
1030             } else {
1031                 match pat.node {
1032                     hir::PatVec(_, Some(ref slice_pat), _) => {
1033                         // The `slice_pat` here creates a slice into
1034                         // the original vector.  This is effectively a
1035                         // borrow of the elements of the vector being
1036                         // matched.
1037
1038                         let (slice_cmt, slice_mutbl, slice_r) =
1039                             return_if_err!(mc.cat_slice_pattern(cmt_pat, &**slice_pat));
1040
1041                         // Note: We declare here that the borrow
1042                         // occurs upon entering the `[...]`
1043                         // pattern. This implies that something like
1044                         // `[a; b]` where `a` is a move is illegal,
1045                         // because the borrow is already in effect.
1046                         // In fact such a move would be safe-ish, but
1047                         // it effectively *requires* that we use the
1048                         // nulling out semantics to indicate when a
1049                         // value has been moved, which we are trying
1050                         // to move away from.  Otherwise, how can we
1051                         // indicate that the first element in the
1052                         // vector has been moved?  Eventually, we
1053                         // could perhaps modify this rule to permit
1054                         // `[..a, b]` where `b` is a move, because in
1055                         // that case we can adjust the length of the
1056                         // original vec accordingly, but we'd have to
1057                         // make trans do the right thing, and it would
1058                         // only work for `Box<[T]>`s. It seems simpler
1059                         // to just require that people call
1060                         // `vec.pop()` or `vec.unshift()`.
1061                         let slice_bk = ty::BorrowKind::from_mutbl(slice_mutbl);
1062                         delegate.borrow(pat.id, pat.span,
1063                                         slice_cmt, slice_r,
1064                                         slice_bk, RefBinding);
1065                     }
1066                     _ => { }
1067                 }
1068             }
1069         }));
1070
1071         // Do a second pass over the pattern, calling `matched_pat` on
1072         // the interior nodes (enum variants and structs), as opposed
1073         // to the above loop's visit of than the bindings that form
1074         // the leaves of the pattern tree structure.
1075         return_if_err!(mc.cat_pattern(cmt_discr, pat, |mc, cmt_pat, pat| {
1076             let def_map = def_map.borrow();
1077             let tcx = typer.tcx;
1078
1079             match pat.node {
1080                 hir::PatEnum(_, _) | hir::PatQPath(..) |
1081                 hir::PatIdent(_, _, None) | hir::PatStruct(..) => {
1082                     match def_map.get(&pat.id).map(|d| d.full_def()) {
1083                         None => {
1084                             // no definition found: pat is not a
1085                             // struct or enum pattern.
1086                         }
1087
1088                         Some(def::DefVariant(enum_did, variant_did, _is_struct)) => {
1089                             let downcast_cmt =
1090                                 if tcx.lookup_adt_def(enum_did).is_univariant() {
1091                                     cmt_pat
1092                                 } else {
1093                                     let cmt_pat_ty = cmt_pat.ty;
1094                                     mc.cat_downcast(pat, cmt_pat, cmt_pat_ty, variant_did)
1095                                 };
1096
1097                             debug!("variant downcast_cmt={:?} pat={:?}",
1098                                    downcast_cmt,
1099                                    pat);
1100
1101                             delegate.matched_pat(pat, downcast_cmt, match_mode);
1102                         }
1103
1104                         Some(def::DefStruct(..)) | Some(def::DefTy(_, false)) => {
1105                             // A struct (in either the value or type
1106                             // namespace; we encounter the former on
1107                             // e.g. patterns for unit structs).
1108
1109                             debug!("struct cmt_pat={:?} pat={:?}",
1110                                    cmt_pat,
1111                                    pat);
1112
1113                             delegate.matched_pat(pat, cmt_pat, match_mode);
1114                         }
1115
1116                         Some(def::DefConst(..)) |
1117                         Some(def::DefAssociatedConst(..)) |
1118                         Some(def::DefLocal(..)) => {
1119                             // This is a leaf (i.e. identifier binding
1120                             // or constant value to match); thus no
1121                             // `matched_pat` call.
1122                         }
1123
1124                         Some(def @ def::DefTy(_, true)) => {
1125                             // An enum's type -- should never be in a
1126                             // pattern.
1127
1128                             if !tcx.sess.has_errors() {
1129                                 let msg = format!("Pattern has unexpected type: {:?} and type {:?}",
1130                                                   def,
1131                                                   cmt_pat.ty);
1132                                 tcx.sess.span_bug(pat.span, &msg)
1133                             }
1134                         }
1135
1136                         Some(def) => {
1137                             // Remaining cases are e.g. DefFn, to
1138                             // which identifiers within patterns
1139                             // should not resolve. However, we do
1140                             // encouter this when using the
1141                             // expr-use-visitor during typeck. So just
1142                             // ignore it, an error should have been
1143                             // reported.
1144
1145                             if !tcx.sess.has_errors() {
1146                                 let msg = format!("Pattern has unexpected def: {:?} and type {:?}",
1147                                                   def,
1148                                                   cmt_pat.ty);
1149                                 tcx.sess.span_bug(pat.span, &msg[..])
1150                             }
1151                         }
1152                     }
1153                 }
1154
1155                 hir::PatIdent(_, _, Some(_)) => {
1156                     // Do nothing; this is a binding (not a enum
1157                     // variant or struct), and the cat_pattern call
1158                     // will visit the substructure recursively.
1159                 }
1160
1161                 hir::PatWild(_) | hir::PatTup(..) | hir::PatBox(..) |
1162                 hir::PatRegion(..) | hir::PatLit(..) | hir::PatRange(..) |
1163                 hir::PatVec(..) => {
1164                     // Similarly, each of these cases does not
1165                     // correspond to a enum variant or struct, so we
1166                     // do not do any `matched_pat` calls for these
1167                     // cases either.
1168                 }
1169             }
1170         }));
1171     }
1172
1173     fn walk_captures(&mut self, closure_expr: &hir::Expr) {
1174         debug!("walk_captures({:?})", closure_expr);
1175
1176         self.tcx().with_freevars(closure_expr.id, |freevars| {
1177             for freevar in freevars {
1178                 let id_var = freevar.def.def_id().node;
1179                 let upvar_id = ty::UpvarId { var_id: id_var,
1180                                              closure_expr_id: closure_expr.id };
1181                 let upvar_capture = self.typer.upvar_capture(upvar_id).unwrap();
1182                 let cmt_var = return_if_err!(self.cat_captured_var(closure_expr.id,
1183                                                                    closure_expr.span,
1184                                                                    freevar.def));
1185                 match upvar_capture {
1186                     ty::UpvarCapture::ByValue => {
1187                         let mode = copy_or_move(self.typer, &cmt_var, CaptureMove);
1188                         self.delegate.consume(closure_expr.id, freevar.span, cmt_var, mode);
1189                     }
1190                     ty::UpvarCapture::ByRef(upvar_borrow) => {
1191                         self.delegate.borrow(closure_expr.id,
1192                                              closure_expr.span,
1193                                              cmt_var,
1194                                              upvar_borrow.region,
1195                                              upvar_borrow.kind,
1196                                              ClosureCapture(freevar.span));
1197                     }
1198                 }
1199             }
1200         });
1201     }
1202
1203     fn cat_captured_var(&mut self,
1204                         closure_id: ast::NodeId,
1205                         closure_span: Span,
1206                         upvar_def: def::Def)
1207                         -> mc::McResult<mc::cmt<'tcx>> {
1208         // Create the cmt for the variable being borrowed, from the
1209         // caller's perspective
1210         let var_id = upvar_def.def_id().node;
1211         let var_ty = try!(self.typer.node_ty(var_id));
1212         self.mc.cat_def(closure_id, closure_span, var_ty, upvar_def)
1213     }
1214 }
1215
1216 fn copy_or_move<'a, 'tcx>(typer: &infer::InferCtxt<'a, 'tcx>,
1217                       cmt: &mc::cmt<'tcx>,
1218                       move_reason: MoveReason)
1219                       -> ConsumeMode
1220 {
1221     if typer.type_moves_by_default(cmt.ty, cmt.span) {
1222         Move(move_reason)
1223     } else {
1224         Copy
1225     }
1226 }