]> git.lizzy.rs Git - rust.git/blob - src/librustc/middle/expr_use_visitor.rs
Rollup merge of #34175 - rwz:patch-2, r=alexcrichton
[rust.git] / src / librustc / middle / expr_use_visitor.rs
1 // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 //! A different sort of visitor for walking fn bodies.  Unlike the
12 //! normal visitor, which just walks the entire body in one shot, the
13 //! `ExprUseVisitor` determines how expressions are being used.
14
15 pub use self::LoanCause::*;
16 pub use self::ConsumeMode::*;
17 pub use self::MoveReason::*;
18 pub use self::MatchMode::*;
19 use self::TrackMatchMode::*;
20 use self::OverloadedCallType::*;
21
22 use hir::pat_util;
23 use hir::def::Def;
24 use hir::def_id::{DefId};
25 use infer::InferCtxt;
26 use middle::mem_categorization as mc;
27 use ty::{self, TyCtxt, adjustment};
28
29 use hir::{self, PatKind};
30
31 use syntax::ast;
32 use syntax::ptr::P;
33 use syntax::codemap::Span;
34
35 ///////////////////////////////////////////////////////////////////////////
36 // The Delegate trait
37
38 /// This trait defines the callbacks you can expect to receive when
39 /// employing the ExprUseVisitor.
40 pub trait Delegate<'tcx> {
41     // The value found at `cmt` is either copied or moved, depending
42     // on mode.
43     fn consume(&mut self,
44                consume_id: ast::NodeId,
45                consume_span: Span,
46                cmt: mc::cmt<'tcx>,
47                mode: ConsumeMode);
48
49     // The value found at `cmt` has been determined to match the
50     // pattern binding `matched_pat`, and its subparts are being
51     // copied or moved depending on `mode`.  Note that `matched_pat`
52     // is called on all variant/structs in the pattern (i.e., the
53     // interior nodes of the pattern's tree structure) while
54     // consume_pat is called on the binding identifiers in the pattern
55     // (which are leaves of the pattern's tree structure).
56     //
57     // Note that variants/structs and identifiers are disjoint; thus
58     // `matched_pat` and `consume_pat` are never both called on the
59     // same input pattern structure (though of `consume_pat` can be
60     // called on a subpart of an input passed to `matched_pat).
61     fn matched_pat(&mut self,
62                    matched_pat: &hir::Pat,
63                    cmt: mc::cmt<'tcx>,
64                    mode: MatchMode);
65
66     // The value found at `cmt` is either copied or moved via the
67     // pattern binding `consume_pat`, depending on mode.
68     fn consume_pat(&mut self,
69                    consume_pat: &hir::Pat,
70                    cmt: mc::cmt<'tcx>,
71                    mode: ConsumeMode);
72
73     // The value found at `borrow` is being borrowed at the point
74     // `borrow_id` for the region `loan_region` with kind `bk`.
75     fn borrow(&mut self,
76               borrow_id: ast::NodeId,
77               borrow_span: Span,
78               cmt: mc::cmt<'tcx>,
79               loan_region: ty::Region,
80               bk: ty::BorrowKind,
81               loan_cause: LoanCause);
82
83     // The local variable `id` is declared but not initialized.
84     fn decl_without_init(&mut self,
85                          id: ast::NodeId,
86                          span: Span);
87
88     // The path at `cmt` is being assigned to.
89     fn mutate(&mut self,
90               assignment_id: ast::NodeId,
91               assignment_span: Span,
92               assignee_cmt: mc::cmt<'tcx>,
93               mode: MutateMode);
94 }
95
96 #[derive(Copy, Clone, PartialEq, Debug)]
97 pub enum LoanCause {
98     ClosureCapture(Span),
99     AddrOf,
100     AutoRef,
101     AutoUnsafe,
102     RefBinding,
103     OverloadedOperator,
104     ClosureInvocation,
105     ForLoop,
106     MatchDiscriminant
107 }
108
109 #[derive(Copy, Clone, PartialEq, Debug)]
110 pub enum ConsumeMode {
111     Copy,                // reference to x where x has a type that copies
112     Move(MoveReason),    // reference to x where x has a type that moves
113 }
114
115 #[derive(Copy, Clone, PartialEq, Debug)]
116 pub enum MoveReason {
117     DirectRefMove,
118     PatBindingMove,
119     CaptureMove,
120 }
121
122 #[derive(Copy, Clone, PartialEq, Debug)]
123 pub enum MatchMode {
124     NonBindingMatch,
125     BorrowingMatch,
126     CopyingMatch,
127     MovingMatch,
128 }
129
130 #[derive(Copy, Clone, PartialEq, Debug)]
131 enum TrackMatchMode {
132     Unknown,
133     Definite(MatchMode),
134     Conflicting,
135 }
136
137 impl TrackMatchMode {
138     // Builds up the whole match mode for a pattern from its constituent
139     // parts.  The lattice looks like this:
140     //
141     //          Conflicting
142     //            /     \
143     //           /       \
144     //      Borrowing   Moving
145     //           \       /
146     //            \     /
147     //            Copying
148     //               |
149     //          NonBinding
150     //               |
151     //            Unknown
152     //
153     // examples:
154     //
155     // * `(_, some_int)` pattern is Copying, since
156     //   NonBinding + Copying => Copying
157     //
158     // * `(some_int, some_box)` pattern is Moving, since
159     //   Copying + Moving => Moving
160     //
161     // * `(ref x, some_box)` pattern is Conflicting, since
162     //   Borrowing + Moving => Conflicting
163     //
164     // Note that the `Unknown` and `Conflicting` states are
165     // represented separately from the other more interesting
166     // `Definite` states, which simplifies logic here somewhat.
167     fn lub(&mut self, mode: MatchMode) {
168         *self = match (*self, mode) {
169             // Note that clause order below is very significant.
170             (Unknown, new) => Definite(new),
171             (Definite(old), new) if old == new => Definite(old),
172
173             (Definite(old), NonBindingMatch) => Definite(old),
174             (Definite(NonBindingMatch), new) => Definite(new),
175
176             (Definite(old), CopyingMatch) => Definite(old),
177             (Definite(CopyingMatch), new) => Definite(new),
178
179             (Definite(_), _) => Conflicting,
180             (Conflicting, _) => *self,
181         };
182     }
183
184     fn match_mode(&self) -> MatchMode {
185         match *self {
186             Unknown => NonBindingMatch,
187             Definite(mode) => mode,
188             Conflicting => {
189                 // Conservatively return MovingMatch to let the
190                 // compiler continue to make progress.
191                 MovingMatch
192             }
193         }
194     }
195 }
196
197 #[derive(Copy, Clone, PartialEq, Debug)]
198 pub enum MutateMode {
199     Init,
200     JustWrite,    // x = y
201     WriteAndRead, // x += y
202 }
203
204 #[derive(Copy, Clone)]
205 enum OverloadedCallType {
206     FnOverloadedCall,
207     FnMutOverloadedCall,
208     FnOnceOverloadedCall,
209 }
210
211 impl OverloadedCallType {
212     fn from_trait_id(tcx: TyCtxt, trait_id: DefId) -> OverloadedCallType {
213         for &(maybe_function_trait, overloaded_call_type) in &[
214             (tcx.lang_items.fn_once_trait(), FnOnceOverloadedCall),
215             (tcx.lang_items.fn_mut_trait(), FnMutOverloadedCall),
216             (tcx.lang_items.fn_trait(), FnOverloadedCall)
217         ] {
218             match maybe_function_trait {
219                 Some(function_trait) if function_trait == trait_id => {
220                     return overloaded_call_type
221                 }
222                 _ => continue,
223             }
224         }
225
226         bug!("overloaded call didn't map to known function trait")
227     }
228
229     fn from_method_id(tcx: TyCtxt, method_id: DefId) -> OverloadedCallType {
230         let method = tcx.impl_or_trait_item(method_id);
231         OverloadedCallType::from_trait_id(tcx, method.container().id())
232     }
233 }
234
235 ///////////////////////////////////////////////////////////////////////////
236 // The ExprUseVisitor type
237 //
238 // This is the code that actually walks the tree. Like
239 // mem_categorization, it requires a TYPER, which is a type that
240 // supplies types from the tree. After type checking is complete, you
241 // can just use the tcx as the typer.
242 pub struct ExprUseVisitor<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
243     mc: mc::MemCategorizationContext<'a, 'gcx, 'tcx>,
244     delegate: &'a mut Delegate<'tcx>,
245 }
246
247 // If the TYPER results in an error, it's because the type check
248 // failed (or will fail, when the error is uncovered and reported
249 // during writeback). In this case, we just ignore this part of the
250 // code.
251 //
252 // Note that this macro appears similar to try!(), but, unlike try!(),
253 // it does not propagate the error.
254 macro_rules! return_if_err {
255     ($inp: expr) => (
256         match $inp {
257             Ok(v) => v,
258             Err(()) => {
259                 debug!("mc reported err");
260                 return
261             }
262         }
263     )
264 }
265
266 /// Whether the elements of an overloaded operation are passed by value or by reference
267 enum PassArgs {
268     ByValue,
269     ByRef,
270 }
271
272 impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> {
273     pub fn new(delegate: &'a mut (Delegate<'tcx>+'a),
274                infcx: &'a InferCtxt<'a, 'gcx, 'tcx>) -> Self
275     {
276         ExprUseVisitor {
277             mc: mc::MemCategorizationContext::new(infcx),
278             delegate: delegate
279         }
280     }
281
282     pub fn walk_fn(&mut self,
283                    decl: &hir::FnDecl,
284                    body: &hir::Block) {
285         self.walk_arg_patterns(decl, body);
286         self.walk_block(body);
287     }
288
289     fn walk_arg_patterns(&mut self,
290                          decl: &hir::FnDecl,
291                          body: &hir::Block) {
292         for arg in &decl.inputs {
293             let arg_ty = return_if_err!(self.mc.infcx.node_ty(arg.pat.id));
294
295             let fn_body_scope = self.tcx().region_maps.node_extent(body.id);
296             let arg_cmt = self.mc.cat_rvalue(
297                 arg.id,
298                 arg.pat.span,
299                 ty::ReScope(fn_body_scope), // Args live only as long as the fn body.
300                 arg_ty);
301
302             self.walk_irrefutable_pat(arg_cmt, &arg.pat);
303         }
304     }
305
306     fn tcx(&self) -> TyCtxt<'a, 'gcx, 'tcx> {
307         self.mc.infcx.tcx
308     }
309
310     fn delegate_consume(&mut self,
311                         consume_id: ast::NodeId,
312                         consume_span: Span,
313                         cmt: mc::cmt<'tcx>) {
314         debug!("delegate_consume(consume_id={}, cmt={:?})",
315                consume_id, cmt);
316
317         let mode = copy_or_move(self.mc.infcx, &cmt, DirectRefMove);
318         self.delegate.consume(consume_id, consume_span, cmt, mode);
319     }
320
321     fn consume_exprs(&mut self, exprs: &[P<hir::Expr>]) {
322         for expr in exprs {
323             self.consume_expr(&expr);
324         }
325     }
326
327     pub fn consume_expr(&mut self, expr: &hir::Expr) {
328         debug!("consume_expr(expr={:?})", expr);
329
330         let cmt = return_if_err!(self.mc.cat_expr(expr));
331         self.delegate_consume(expr.id, expr.span, cmt);
332         self.walk_expr(expr);
333     }
334
335     fn mutate_expr(&mut self,
336                    assignment_expr: &hir::Expr,
337                    expr: &hir::Expr,
338                    mode: MutateMode) {
339         let cmt = return_if_err!(self.mc.cat_expr(expr));
340         self.delegate.mutate(assignment_expr.id, assignment_expr.span, cmt, mode);
341         self.walk_expr(expr);
342     }
343
344     fn borrow_expr(&mut self,
345                    expr: &hir::Expr,
346                    r: ty::Region,
347                    bk: ty::BorrowKind,
348                    cause: LoanCause) {
349         debug!("borrow_expr(expr={:?}, r={:?}, bk={:?})",
350                expr, r, bk);
351
352         let cmt = return_if_err!(self.mc.cat_expr(expr));
353         self.delegate.borrow(expr.id, expr.span, cmt, r, bk, cause);
354
355         self.walk_expr(expr)
356     }
357
358     fn select_from_expr(&mut self, expr: &hir::Expr) {
359         self.walk_expr(expr)
360     }
361
362     pub fn walk_expr(&mut self, expr: &hir::Expr) {
363         debug!("walk_expr(expr={:?})", expr);
364
365         self.walk_adjustment(expr);
366
367         match expr.node {
368             hir::ExprPath(..) => { }
369
370             hir::ExprType(ref subexpr, _) => {
371                 self.walk_expr(&subexpr)
372             }
373
374             hir::ExprUnary(hir::UnDeref, ref base) => {      // *base
375                 if !self.walk_overloaded_operator(expr, &base, Vec::new(), PassArgs::ByRef) {
376                     self.select_from_expr(&base);
377                 }
378             }
379
380             hir::ExprField(ref base, _) => {         // base.f
381                 self.select_from_expr(&base);
382             }
383
384             hir::ExprTupField(ref base, _) => {         // base.<n>
385                 self.select_from_expr(&base);
386             }
387
388             hir::ExprIndex(ref lhs, ref rhs) => {       // lhs[rhs]
389                 if !self.walk_overloaded_operator(expr,
390                                                   &lhs,
391                                                   vec![&rhs],
392                                                   PassArgs::ByValue) {
393                     self.select_from_expr(&lhs);
394                     self.consume_expr(&rhs);
395                 }
396             }
397
398             hir::ExprCall(ref callee, ref args) => {    // callee(args)
399                 self.walk_callee(expr, &callee);
400                 self.consume_exprs(args);
401             }
402
403             hir::ExprMethodCall(_, _, ref args) => { // callee.m(args)
404                 self.consume_exprs(args);
405             }
406
407             hir::ExprStruct(_, ref fields, ref opt_with) => {
408                 self.walk_struct_expr(expr, fields, opt_with);
409             }
410
411             hir::ExprTup(ref exprs) => {
412                 self.consume_exprs(exprs);
413             }
414
415             hir::ExprIf(ref cond_expr, ref then_blk, ref opt_else_expr) => {
416                 self.consume_expr(&cond_expr);
417                 self.walk_block(&then_blk);
418                 if let Some(ref else_expr) = *opt_else_expr {
419                     self.consume_expr(&else_expr);
420                 }
421             }
422
423             hir::ExprMatch(ref discr, ref arms, _) => {
424                 let discr_cmt = return_if_err!(self.mc.cat_expr(&discr));
425                 self.borrow_expr(&discr, ty::ReEmpty, ty::ImmBorrow, MatchDiscriminant);
426
427                 // treatment of the discriminant is handled while walking the arms.
428                 for arm in arms {
429                     let mode = self.arm_move_mode(discr_cmt.clone(), arm);
430                     let mode = mode.match_mode();
431                     self.walk_arm(discr_cmt.clone(), arm, mode);
432                 }
433             }
434
435             hir::ExprVec(ref exprs) => {
436                 self.consume_exprs(exprs);
437             }
438
439             hir::ExprAddrOf(m, ref base) => {   // &base
440                 // make sure that the thing we are pointing out stays valid
441                 // for the lifetime `scope_r` of the resulting ptr:
442                 let expr_ty = return_if_err!(self.mc.infcx.node_ty(expr.id));
443                 if let ty::TyRef(&r, _) = expr_ty.sty {
444                     let bk = ty::BorrowKind::from_mutbl(m);
445                     self.borrow_expr(&base, r, bk, AddrOf);
446                 }
447             }
448
449             hir::ExprInlineAsm(ref ia, ref outputs, ref inputs) => {
450                 for (o, output) in ia.outputs.iter().zip(outputs) {
451                     if o.is_indirect {
452                         self.consume_expr(output);
453                     } else {
454                         self.mutate_expr(expr, output,
455                                          if o.is_rw {
456                                              MutateMode::WriteAndRead
457                                          } else {
458                                              MutateMode::JustWrite
459                                          });
460                     }
461                 }
462                 self.consume_exprs(inputs);
463             }
464
465             hir::ExprBreak(..) |
466             hir::ExprAgain(..) |
467             hir::ExprLit(..) => {}
468
469             hir::ExprLoop(ref blk, _) => {
470                 self.walk_block(&blk);
471             }
472
473             hir::ExprWhile(ref cond_expr, ref blk, _) => {
474                 self.consume_expr(&cond_expr);
475                 self.walk_block(&blk);
476             }
477
478             hir::ExprUnary(op, ref lhs) => {
479                 let pass_args = if op.is_by_value() {
480                     PassArgs::ByValue
481                 } else {
482                     PassArgs::ByRef
483                 };
484
485                 if !self.walk_overloaded_operator(expr, &lhs, Vec::new(), pass_args) {
486                     self.consume_expr(&lhs);
487                 }
488             }
489
490             hir::ExprBinary(op, ref lhs, ref rhs) => {
491                 let pass_args = if op.node.is_by_value() {
492                     PassArgs::ByValue
493                 } else {
494                     PassArgs::ByRef
495                 };
496
497                 if !self.walk_overloaded_operator(expr, &lhs, vec![&rhs], pass_args) {
498                     self.consume_expr(&lhs);
499                     self.consume_expr(&rhs);
500                 }
501             }
502
503             hir::ExprBlock(ref blk) => {
504                 self.walk_block(&blk);
505             }
506
507             hir::ExprRet(ref opt_expr) => {
508                 if let Some(ref expr) = *opt_expr {
509                     self.consume_expr(&expr);
510                 }
511             }
512
513             hir::ExprAssign(ref lhs, ref rhs) => {
514                 self.mutate_expr(expr, &lhs, MutateMode::JustWrite);
515                 self.consume_expr(&rhs);
516             }
517
518             hir::ExprCast(ref base, _) => {
519                 self.consume_expr(&base);
520             }
521
522             hir::ExprAssignOp(op, ref lhs, ref rhs) => {
523                 // NB All our assignment operations take the RHS by value
524                 assert!(op.node.is_by_value());
525
526                 if !self.walk_overloaded_operator(expr, lhs, vec![rhs], PassArgs::ByValue) {
527                     self.mutate_expr(expr, &lhs, MutateMode::WriteAndRead);
528                     self.consume_expr(&rhs);
529                 }
530             }
531
532             hir::ExprRepeat(ref base, ref count) => {
533                 self.consume_expr(&base);
534                 self.consume_expr(&count);
535             }
536
537             hir::ExprClosure(_, _, _, fn_decl_span) => {
538                 self.walk_captures(expr, fn_decl_span)
539             }
540
541             hir::ExprBox(ref base) => {
542                 self.consume_expr(&base);
543             }
544         }
545     }
546
547     fn walk_callee(&mut self, call: &hir::Expr, callee: &hir::Expr) {
548         let callee_ty = return_if_err!(self.mc.infcx.expr_ty_adjusted(callee));
549         debug!("walk_callee: callee={:?} callee_ty={:?}",
550                callee, callee_ty);
551         let call_scope = self.tcx().region_maps.node_extent(call.id);
552         match callee_ty.sty {
553             ty::TyFnDef(..) | ty::TyFnPtr(_) => {
554                 self.consume_expr(callee);
555             }
556             ty::TyError => { }
557             _ => {
558                 let overloaded_call_type =
559                     match self.mc.infcx.node_method_id(ty::MethodCall::expr(call.id)) {
560                         Some(method_id) => {
561                             OverloadedCallType::from_method_id(self.tcx(), method_id)
562                         }
563                         None => {
564                             span_bug!(
565                                 callee.span,
566                                 "unexpected callee type {}",
567                                 callee_ty)
568                         }
569                     };
570                 match overloaded_call_type {
571                     FnMutOverloadedCall => {
572                         self.borrow_expr(callee,
573                                          ty::ReScope(call_scope),
574                                          ty::MutBorrow,
575                                          ClosureInvocation);
576                     }
577                     FnOverloadedCall => {
578                         self.borrow_expr(callee,
579                                          ty::ReScope(call_scope),
580                                          ty::ImmBorrow,
581                                          ClosureInvocation);
582                     }
583                     FnOnceOverloadedCall => self.consume_expr(callee),
584                 }
585             }
586         }
587     }
588
589     fn walk_stmt(&mut self, stmt: &hir::Stmt) {
590         match stmt.node {
591             hir::StmtDecl(ref decl, _) => {
592                 match decl.node {
593                     hir::DeclLocal(ref local) => {
594                         self.walk_local(&local);
595                     }
596
597                     hir::DeclItem(_) => {
598                         // we don't visit nested items in this visitor,
599                         // only the fn body we were given.
600                     }
601                 }
602             }
603
604             hir::StmtExpr(ref expr, _) |
605             hir::StmtSemi(ref expr, _) => {
606                 self.consume_expr(&expr);
607             }
608         }
609     }
610
611     fn walk_local(&mut self, local: &hir::Local) {
612         match local.init {
613             None => {
614                 let delegate = &mut self.delegate;
615                 pat_util::pat_bindings(&local.pat, |_, id, span, _| {
616                     delegate.decl_without_init(id, span);
617                 })
618             }
619
620             Some(ref expr) => {
621                 // Variable declarations with
622                 // initializers are considered
623                 // "assigns", which is handled by
624                 // `walk_pat`:
625                 self.walk_expr(&expr);
626                 let init_cmt = return_if_err!(self.mc.cat_expr(&expr));
627                 self.walk_irrefutable_pat(init_cmt, &local.pat);
628             }
629         }
630     }
631
632     /// Indicates that the value of `blk` will be consumed, meaning either copied or moved
633     /// depending on its type.
634     fn walk_block(&mut self, blk: &hir::Block) {
635         debug!("walk_block(blk.id={})", blk.id);
636
637         for stmt in &blk.stmts {
638             self.walk_stmt(stmt);
639         }
640
641         if let Some(ref tail_expr) = blk.expr {
642             self.consume_expr(&tail_expr);
643         }
644     }
645
646     fn walk_struct_expr(&mut self,
647                         _expr: &hir::Expr,
648                         fields: &[hir::Field],
649                         opt_with: &Option<P<hir::Expr>>) {
650         // Consume the expressions supplying values for each field.
651         for field in fields {
652             self.consume_expr(&field.expr);
653         }
654
655         let with_expr = match *opt_with {
656             Some(ref w) => &**w,
657             None => { return; }
658         };
659
660         let with_cmt = return_if_err!(self.mc.cat_expr(&with_expr));
661
662         // Select just those fields of the `with`
663         // expression that will actually be used
664         if let ty::TyStruct(def, substs) = with_cmt.ty.sty {
665             // Consume those fields of the with expression that are needed.
666             for with_field in &def.struct_variant().fields {
667                 if !contains_field_named(with_field, fields) {
668                     let cmt_field = self.mc.cat_field(
669                         &*with_expr,
670                         with_cmt.clone(),
671                         with_field.name,
672                         with_field.ty(self.tcx(), substs)
673                     );
674                     self.delegate_consume(with_expr.id, with_expr.span, cmt_field);
675                 }
676             }
677         } else {
678             // the base expression should always evaluate to a
679             // struct; however, when EUV is run during typeck, it
680             // may not. This will generate an error earlier in typeck,
681             // so we can just ignore it.
682             if !self.tcx().sess.has_errors() {
683                 span_bug!(
684                     with_expr.span,
685                     "with expression doesn't evaluate to a struct");
686             }
687         };
688
689         // walk the with expression so that complex expressions
690         // are properly handled.
691         self.walk_expr(with_expr);
692
693         fn contains_field_named(field: ty::FieldDef,
694                                 fields: &[hir::Field])
695                                 -> bool
696         {
697             fields.iter().any(
698                 |f| f.name.node == field.name)
699         }
700     }
701
702     // Invoke the appropriate delegate calls for anything that gets
703     // consumed or borrowed as part of the automatic adjustment
704     // process.
705     fn walk_adjustment(&mut self, expr: &hir::Expr) {
706         let infcx = self.mc.infcx;
707         //NOTE(@jroesch): mixed RefCell borrow causes crash
708         let adj = infcx.adjustments().get(&expr.id).map(|x| x.clone());
709         if let Some(adjustment) = adj {
710             match adjustment {
711                 adjustment::AdjustReifyFnPointer |
712                 adjustment::AdjustUnsafeFnPointer |
713                 adjustment::AdjustMutToConstPointer => {
714                     // Creating a closure/fn-pointer or unsizing consumes
715                     // the input and stores it into the resulting rvalue.
716                     debug!("walk_adjustment: trivial adjustment");
717                     let cmt_unadjusted =
718                         return_if_err!(self.mc.cat_expr_unadjusted(expr));
719                     self.delegate_consume(expr.id, expr.span, cmt_unadjusted);
720                 }
721                 adjustment::AdjustDerefRef(ref adj) => {
722                     self.walk_autoderefref(expr, adj);
723                 }
724             }
725         }
726     }
727
728     /// Autoderefs for overloaded Deref calls in fact reference their receiver. That is, if we have
729     /// `(*x)` where `x` is of type `Rc<T>`, then this in fact is equivalent to `x.deref()`. Since
730     /// `deref()` is declared with `&self`, this is an autoref of `x`.
731     fn walk_autoderefs(&mut self,
732                        expr: &hir::Expr,
733                        autoderefs: usize) {
734         debug!("walk_autoderefs expr={:?} autoderefs={}", expr, autoderefs);
735
736         for i in 0..autoderefs {
737             let deref_id = ty::MethodCall::autoderef(expr.id, i as u32);
738             match self.mc.infcx.node_method_ty(deref_id) {
739                 None => {}
740                 Some(method_ty) => {
741                     let cmt = return_if_err!(self.mc.cat_expr_autoderefd(expr, i));
742
743                     // the method call infrastructure should have
744                     // replaced all late-bound regions with variables:
745                     let self_ty = method_ty.fn_sig().input(0);
746                     let self_ty = self.tcx().no_late_bound_regions(&self_ty).unwrap();
747
748                     let (m, r) = match self_ty.sty {
749                         ty::TyRef(r, ref m) => (m.mutbl, r),
750                         _ => span_bug!(expr.span,
751                                 "bad overloaded deref type {:?}",
752                                 method_ty)
753                     };
754                     let bk = ty::BorrowKind::from_mutbl(m);
755                     self.delegate.borrow(expr.id, expr.span, cmt,
756                                          *r, bk, AutoRef);
757                 }
758             }
759         }
760     }
761
762     fn walk_autoderefref(&mut self,
763                          expr: &hir::Expr,
764                          adj: &adjustment::AutoDerefRef<'tcx>) {
765         debug!("walk_autoderefref expr={:?} adj={:?}",
766                expr,
767                adj);
768
769         self.walk_autoderefs(expr, adj.autoderefs);
770
771         let cmt_derefd =
772             return_if_err!(self.mc.cat_expr_autoderefd(expr, adj.autoderefs));
773
774         let cmt_refd =
775             self.walk_autoref(expr, cmt_derefd, adj.autoref);
776
777         if adj.unsize.is_some() {
778             // Unsizing consumes the thin pointer and produces a fat one.
779             self.delegate_consume(expr.id, expr.span, cmt_refd);
780         }
781     }
782
783
784     /// Walks the autoref `opt_autoref` applied to the autoderef'd
785     /// `expr`. `cmt_derefd` is the mem-categorized form of `expr`
786     /// after all relevant autoderefs have occurred. Because AutoRefs
787     /// can be recursive, this function is recursive: it first walks
788     /// deeply all the way down the autoref chain, and then processes
789     /// the autorefs on the way out. At each point, it returns the
790     /// `cmt` for the rvalue that will be produced by introduced an
791     /// autoref.
792     fn walk_autoref(&mut self,
793                     expr: &hir::Expr,
794                     cmt_base: mc::cmt<'tcx>,
795                     opt_autoref: Option<adjustment::AutoRef<'tcx>>)
796                     -> mc::cmt<'tcx>
797     {
798         debug!("walk_autoref(expr.id={} cmt_derefd={:?} opt_autoref={:?})",
799                expr.id,
800                cmt_base,
801                opt_autoref);
802
803         let cmt_base_ty = cmt_base.ty;
804
805         let autoref = match opt_autoref {
806             Some(ref autoref) => autoref,
807             None => {
808                 // No AutoRef.
809                 return cmt_base;
810             }
811         };
812
813         match *autoref {
814             adjustment::AutoPtr(r, m) => {
815                 self.delegate.borrow(expr.id,
816                                      expr.span,
817                                      cmt_base,
818                                      *r,
819                                      ty::BorrowKind::from_mutbl(m),
820                                      AutoRef);
821             }
822
823             adjustment::AutoUnsafe(m) => {
824                 debug!("walk_autoref: expr.id={} cmt_base={:?}",
825                        expr.id,
826                        cmt_base);
827
828                 // Converting from a &T to *T (or &mut T to *mut T) is
829                 // treated as borrowing it for the enclosing temporary
830                 // scope.
831                 let r = ty::ReScope(self.tcx().region_maps.node_extent(expr.id));
832
833                 self.delegate.borrow(expr.id,
834                                      expr.span,
835                                      cmt_base,
836                                      r,
837                                      ty::BorrowKind::from_mutbl(m),
838                                      AutoUnsafe);
839             }
840         }
841
842         // Construct the categorization for the result of the autoref.
843         // This is always an rvalue, since we are producing a new
844         // (temporary) indirection.
845
846         let adj_ty = cmt_base_ty.adjust_for_autoref(self.tcx(), opt_autoref);
847
848         self.mc.cat_rvalue_node(expr.id, expr.span, adj_ty)
849     }
850
851
852     // When this returns true, it means that the expression *is* a
853     // method-call (i.e. via the operator-overload).  This true result
854     // also implies that walk_overloaded_operator already took care of
855     // recursively processing the input arguments, and thus the caller
856     // should not do so.
857     fn walk_overloaded_operator(&mut self,
858                                 expr: &hir::Expr,
859                                 receiver: &hir::Expr,
860                                 rhs: Vec<&hir::Expr>,
861                                 pass_args: PassArgs)
862                                 -> bool
863     {
864         if !self.mc.infcx.is_method_call(expr.id) {
865             return false;
866         }
867
868         match pass_args {
869             PassArgs::ByValue => {
870                 self.consume_expr(receiver);
871                 for &arg in &rhs {
872                     self.consume_expr(arg);
873                 }
874
875                 return true;
876             },
877             PassArgs::ByRef => {},
878         }
879
880         self.walk_expr(receiver);
881
882         // Arguments (but not receivers) to overloaded operator
883         // methods are implicitly autoref'd which sadly does not use
884         // adjustments, so we must hardcode the borrow here.
885
886         let r = ty::ReScope(self.tcx().region_maps.node_extent(expr.id));
887         let bk = ty::ImmBorrow;
888
889         for &arg in &rhs {
890             self.borrow_expr(arg, r, bk, OverloadedOperator);
891         }
892         return true;
893     }
894
895     fn arm_move_mode(&mut self, discr_cmt: mc::cmt<'tcx>, arm: &hir::Arm) -> TrackMatchMode {
896         let mut mode = Unknown;
897         for pat in &arm.pats {
898             self.determine_pat_move_mode(discr_cmt.clone(), &pat, &mut mode);
899         }
900         mode
901     }
902
903     fn walk_arm(&mut self, discr_cmt: mc::cmt<'tcx>, arm: &hir::Arm, mode: MatchMode) {
904         for pat in &arm.pats {
905             self.walk_pat(discr_cmt.clone(), &pat, mode);
906         }
907
908         if let Some(ref guard) = arm.guard {
909             self.consume_expr(&guard);
910         }
911
912         self.consume_expr(&arm.body);
913     }
914
915     /// Walks a pat that occurs in isolation (i.e. top-level of fn
916     /// arg or let binding.  *Not* a match arm or nested pat.)
917     fn walk_irrefutable_pat(&mut self, cmt_discr: mc::cmt<'tcx>, pat: &hir::Pat) {
918         let mut mode = Unknown;
919         self.determine_pat_move_mode(cmt_discr.clone(), pat, &mut mode);
920         let mode = mode.match_mode();
921         self.walk_pat(cmt_discr, pat, mode);
922     }
923
924     /// Identifies any bindings within `pat` and accumulates within
925     /// `mode` whether the overall pattern/match structure is a move,
926     /// copy, or borrow.
927     fn determine_pat_move_mode(&mut self,
928                                cmt_discr: mc::cmt<'tcx>,
929                                pat: &hir::Pat,
930                                mode: &mut TrackMatchMode) {
931         debug!("determine_pat_move_mode cmt_discr={:?} pat={:?}", cmt_discr,
932                pat);
933         return_if_err!(self.mc.cat_pattern(cmt_discr, pat, |_mc, cmt_pat, pat| {
934             match pat.node {
935                 PatKind::Binding(hir::BindByRef(..), _, _) =>
936                     mode.lub(BorrowingMatch),
937                 PatKind::Binding(hir::BindByValue(..), _, _) => {
938                     match copy_or_move(self.mc.infcx, &cmt_pat, PatBindingMove) {
939                         Copy => mode.lub(CopyingMatch),
940                         Move(..) => mode.lub(MovingMatch),
941                     }
942                 }
943                 _ => {}
944             }
945         }));
946     }
947
948     /// The core driver for walking a pattern; `match_mode` must be
949     /// established up front, e.g. via `determine_pat_move_mode` (see
950     /// also `walk_irrefutable_pat` for patterns that stand alone).
951     fn walk_pat(&mut self,
952                 cmt_discr: mc::cmt<'tcx>,
953                 pat: &hir::Pat,
954                 match_mode: MatchMode) {
955         debug!("walk_pat cmt_discr={:?} pat={:?}", cmt_discr,
956                pat);
957
958         let tcx = &self.tcx();
959         let mc = &self.mc;
960         let infcx = self.mc.infcx;
961         let delegate = &mut self.delegate;
962         return_if_err!(mc.cat_pattern(cmt_discr.clone(), pat, |mc, cmt_pat, pat| {
963             match pat.node {
964                 PatKind::Binding(bmode, _, _) => {
965                     debug!("binding cmt_pat={:?} pat={:?} match_mode={:?}",
966                            cmt_pat,
967                            pat,
968                            match_mode);
969
970                     // pat_ty: the type of the binding being produced.
971                     let pat_ty = return_if_err!(infcx.node_ty(pat.id));
972
973                     // Each match binding is effectively an assignment to the
974                     // binding being produced.
975                     if let Ok(binding_cmt) = mc.cat_def(pat.id, pat.span, pat_ty,
976                                                         tcx.expect_def(pat.id)) {
977                         delegate.mutate(pat.id, pat.span, binding_cmt, MutateMode::Init);
978                     }
979
980                     // It is also a borrow or copy/move of the value being matched.
981                     match bmode {
982                         hir::BindByRef(m) => {
983                             if let ty::TyRef(&r, _) = pat_ty.sty {
984                                 let bk = ty::BorrowKind::from_mutbl(m);
985                                 delegate.borrow(pat.id, pat.span, cmt_pat,
986                                                 r, bk, RefBinding);
987                             }
988                         }
989                         hir::BindByValue(..) => {
990                             let mode = copy_or_move(infcx, &cmt_pat, PatBindingMove);
991                             debug!("walk_pat binding consuming pat");
992                             delegate.consume_pat(pat, cmt_pat, mode);
993                         }
994                     }
995                 }
996                 _ => {}
997             }
998         }));
999
1000         // Do a second pass over the pattern, calling `matched_pat` on
1001         // the interior nodes (enum variants and structs), as opposed
1002         // to the above loop's visit of than the bindings that form
1003         // the leaves of the pattern tree structure.
1004         return_if_err!(mc.cat_pattern(cmt_discr, pat, |mc, cmt_pat, pat| {
1005             match pat.node {
1006                 PatKind::Struct(..) | PatKind::TupleStruct(..) |
1007                 PatKind::Path(..) | PatKind::QPath(..) => {
1008                     match tcx.expect_def(pat.id) {
1009                         Def::Variant(enum_did, variant_did) => {
1010                             let downcast_cmt =
1011                                 if tcx.lookup_adt_def(enum_did).is_univariant() {
1012                                     cmt_pat
1013                                 } else {
1014                                     let cmt_pat_ty = cmt_pat.ty;
1015                                     mc.cat_downcast(pat, cmt_pat, cmt_pat_ty, variant_did)
1016                                 };
1017
1018                             debug!("variant downcast_cmt={:?} pat={:?}",
1019                                    downcast_cmt,
1020                                    pat);
1021
1022                             delegate.matched_pat(pat, downcast_cmt, match_mode);
1023                         }
1024
1025                         Def::Struct(..) | Def::TyAlias(..) => {
1026                             // A struct (in either the value or type
1027                             // namespace; we encounter the former on
1028                             // e.g. patterns for unit structs).
1029
1030                             debug!("struct cmt_pat={:?} pat={:?}",
1031                                    cmt_pat,
1032                                    pat);
1033
1034                             delegate.matched_pat(pat, cmt_pat, match_mode);
1035                         }
1036
1037                         Def::Const(..) | Def::AssociatedConst(..) => {
1038                             // This is a leaf (i.e. identifier binding
1039                             // or constant value to match); thus no
1040                             // `matched_pat` call.
1041                         }
1042
1043                         def => {
1044                             // An enum type should never be in a pattern.
1045                             // Remaining cases are e.g. Def::Fn, to
1046                             // which identifiers within patterns
1047                             // should not resolve. However, we do
1048                             // encouter this when using the
1049                             // expr-use-visitor during typeck. So just
1050                             // ignore it, an error should have been
1051                             // reported.
1052
1053                             if !tcx.sess.has_errors() {
1054                                 span_bug!(pat.span,
1055                                           "Pattern has unexpected def: {:?} and type {:?}",
1056                                           def,
1057                                           cmt_pat.ty);
1058                             }
1059                         }
1060                     }
1061                 }
1062
1063                 PatKind::Wild | PatKind::Tuple(..) | PatKind::Box(..) |
1064                 PatKind::Ref(..) | PatKind::Lit(..) | PatKind::Range(..) |
1065                 PatKind::Vec(..) | PatKind::Binding(..) => {
1066                     // Each of these cases does not
1067                     // correspond to an enum variant or struct, so we
1068                     // do not do any `matched_pat` calls for these
1069                     // cases either.
1070                 }
1071             }
1072         }));
1073     }
1074
1075     fn walk_captures(&mut self, closure_expr: &hir::Expr, fn_decl_span: Span) {
1076         debug!("walk_captures({:?})", closure_expr);
1077
1078         self.tcx().with_freevars(closure_expr.id, |freevars| {
1079             for freevar in freevars {
1080                 let id_var = freevar.def.var_id();
1081                 let upvar_id = ty::UpvarId { var_id: id_var,
1082                                              closure_expr_id: closure_expr.id };
1083                 let upvar_capture = self.mc.infcx.upvar_capture(upvar_id).unwrap();
1084                 let cmt_var = return_if_err!(self.cat_captured_var(closure_expr.id,
1085                                                                    fn_decl_span,
1086                                                                    freevar.def));
1087                 match upvar_capture {
1088                     ty::UpvarCapture::ByValue => {
1089                         let mode = copy_or_move(self.mc.infcx, &cmt_var, CaptureMove);
1090                         self.delegate.consume(closure_expr.id, freevar.span, cmt_var, mode);
1091                     }
1092                     ty::UpvarCapture::ByRef(upvar_borrow) => {
1093                         self.delegate.borrow(closure_expr.id,
1094                                              fn_decl_span,
1095                                              cmt_var,
1096                                              upvar_borrow.region,
1097                                              upvar_borrow.kind,
1098                                              ClosureCapture(freevar.span));
1099                     }
1100                 }
1101             }
1102         });
1103     }
1104
1105     fn cat_captured_var(&mut self,
1106                         closure_id: ast::NodeId,
1107                         closure_span: Span,
1108                         upvar_def: Def)
1109                         -> mc::McResult<mc::cmt<'tcx>> {
1110         // Create the cmt for the variable being borrowed, from the
1111         // caller's perspective
1112         let var_id = upvar_def.var_id();
1113         let var_ty = self.mc.infcx.node_ty(var_id)?;
1114         self.mc.cat_def(closure_id, closure_span, var_ty, upvar_def)
1115     }
1116 }
1117
1118 fn copy_or_move<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>,
1119                                 cmt: &mc::cmt<'tcx>,
1120                                 move_reason: MoveReason)
1121                                 -> ConsumeMode
1122 {
1123     if infcx.type_moves_by_default(cmt.ty, cmt.span) {
1124         Move(move_reason)
1125     } else {
1126         Copy
1127     }
1128 }