1 // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! A different sort of visitor for walking fn bodies. Unlike the
12 //! normal visitor, which just walks the entire body in one shot, the
13 //! `ExprUseVisitor` determines how expressions are being used.
15 pub use self::MutateMode::*;
16 pub use self::LoanCause::*;
17 pub use self::ConsumeMode::*;
18 pub use self::MoveReason::*;
19 pub use self::MatchMode::*;
20 use self::TrackMatchMode::*;
21 use self::OverloadedCallType::*;
23 use middle::{def, pat_util};
24 use middle::def_id::{DefId};
26 use middle::mem_categorization as mc;
28 use middle::ty::adjustment;
34 use syntax::codemap::Span;
36 ///////////////////////////////////////////////////////////////////////////
39 /// This trait defines the callbacks you can expect to receive when
40 /// employing the ExprUseVisitor.
41 pub trait Delegate<'tcx> {
42 // The value found at `cmt` is either copied or moved, depending
45 consume_id: ast::NodeId,
50 // The value found at `cmt` has been determined to match the
51 // pattern binding `matched_pat`, and its subparts are being
52 // copied or moved depending on `mode`. Note that `matched_pat`
53 // is called on all variant/structs in the pattern (i.e., the
54 // interior nodes of the pattern's tree structure) while
55 // consume_pat is called on the binding identifiers in the pattern
56 // (which are leaves of the pattern's tree structure).
58 // Note that variants/structs and identifiers are disjoint; thus
59 // `matched_pat` and `consume_pat` are never both called on the
60 // same input pattern structure (though of `consume_pat` can be
61 // called on a subpart of an input passed to `matched_pat).
62 fn matched_pat(&mut self,
63 matched_pat: &hir::Pat,
67 // The value found at `cmt` is either copied or moved via the
68 // pattern binding `consume_pat`, depending on mode.
69 fn consume_pat(&mut self,
70 consume_pat: &hir::Pat,
74 // The value found at `borrow` is being borrowed at the point
75 // `borrow_id` for the region `loan_region` with kind `bk`.
77 borrow_id: ast::NodeId,
80 loan_region: ty::Region,
82 loan_cause: LoanCause);
84 // The local variable `id` is declared but not initialized.
85 fn decl_without_init(&mut self,
89 // The path at `cmt` is being assigned to.
91 assignment_id: ast::NodeId,
92 assignment_span: Span,
93 assignee_cmt: mc::cmt<'tcx>,
97 #[derive(Copy, Clone, PartialEq, Debug)]
110 #[derive(Copy, Clone, PartialEq, Debug)]
111 pub enum ConsumeMode {
112 Copy, // reference to x where x has a type that copies
113 Move(MoveReason), // reference to x where x has a type that moves
116 #[derive(Copy, Clone, PartialEq, Debug)]
117 pub enum MoveReason {
123 #[derive(Copy, Clone, PartialEq, Debug)]
131 #[derive(Copy, Clone, PartialEq, Debug)]
132 enum TrackMatchMode {
138 impl TrackMatchMode {
139 // Builds up the whole match mode for a pattern from its constituent
140 // parts. The lattice looks like this:
156 // * `(_, some_int)` pattern is Copying, since
157 // NonBinding + Copying => Copying
159 // * `(some_int, some_box)` pattern is Moving, since
160 // Copying + Moving => Moving
162 // * `(ref x, some_box)` pattern is Conflicting, since
163 // Borrowing + Moving => Conflicting
165 // Note that the `Unknown` and `Conflicting` states are
166 // represented separately from the other more interesting
167 // `Definite` states, which simplifies logic here somewhat.
168 fn lub(&mut self, mode: MatchMode) {
169 *self = match (*self, mode) {
170 // Note that clause order below is very significant.
171 (Unknown, new) => Definite(new),
172 (Definite(old), new) if old == new => Definite(old),
174 (Definite(old), NonBindingMatch) => Definite(old),
175 (Definite(NonBindingMatch), new) => Definite(new),
177 (Definite(old), CopyingMatch) => Definite(old),
178 (Definite(CopyingMatch), new) => Definite(new),
180 (Definite(_), _) => Conflicting,
181 (Conflicting, _) => *self,
185 fn match_mode(&self) -> MatchMode {
187 Unknown => NonBindingMatch,
188 Definite(mode) => mode,
190 // Conservatively return MovingMatch to let the
191 // compiler continue to make progress.
198 #[derive(Copy, Clone, PartialEq, Debug)]
199 pub enum MutateMode {
202 WriteAndRead, // x += y
205 #[derive(Copy, Clone)]
206 enum OverloadedCallType {
209 FnOnceOverloadedCall,
212 impl OverloadedCallType {
213 fn from_trait_id(tcx: &ty::ctxt, trait_id: DefId)
214 -> OverloadedCallType {
215 for &(maybe_function_trait, overloaded_call_type) in &[
216 (tcx.lang_items.fn_once_trait(), FnOnceOverloadedCall),
217 (tcx.lang_items.fn_mut_trait(), FnMutOverloadedCall),
218 (tcx.lang_items.fn_trait(), FnOverloadedCall)
220 match maybe_function_trait {
221 Some(function_trait) if function_trait == trait_id => {
222 return overloaded_call_type
228 tcx.sess.bug("overloaded call didn't map to known function trait")
231 fn from_method_id(tcx: &ty::ctxt, method_id: DefId)
232 -> OverloadedCallType {
233 let method = tcx.impl_or_trait_item(method_id);
234 OverloadedCallType::from_trait_id(tcx, method.container().id())
238 ///////////////////////////////////////////////////////////////////////////
239 // The ExprUseVisitor type
241 // This is the code that actually walks the tree. Like
242 // mem_categorization, it requires a TYPER, which is a type that
243 // supplies types from the tree. After type checking is complete, you
244 // can just use the tcx as the typer.
246 // FIXME(stage0): the :'t here is probably only important for stage0
247 pub struct ExprUseVisitor<'d, 't, 'a: 't, 'tcx:'a+'d+'t> {
248 typer: &'t infer::InferCtxt<'a, 'tcx>,
249 mc: mc::MemCategorizationContext<'t, 'a, 'tcx>,
250 delegate: &'d mut Delegate<'tcx>,
253 // If the TYPER results in an error, it's because the type check
254 // failed (or will fail, when the error is uncovered and reported
255 // during writeback). In this case, we just ignore this part of the
258 // Note that this macro appears similar to try!(), but, unlike try!(),
259 // it does not propagate the error.
260 macro_rules! return_if_err {
265 debug!("mc reported err");
272 /// Whether the elements of an overloaded operation are passed by value or by reference
278 impl<'d,'t,'a,'tcx> ExprUseVisitor<'d,'t,'a,'tcx> {
279 pub fn new(delegate: &'d mut Delegate<'tcx>,
280 typer: &'t infer::InferCtxt<'a, 'tcx>)
281 -> ExprUseVisitor<'d,'t,'a,'tcx>
283 let result = ExprUseVisitor {
285 mc: mc::MemCategorizationContext::new(typer),
292 pub fn walk_fn(&mut self,
295 self.walk_arg_patterns(decl, body);
296 self.walk_block(body);
299 fn walk_arg_patterns(&mut self,
302 for arg in &decl.inputs {
303 let arg_ty = return_if_err!(self.typer.node_ty(arg.pat.id));
305 let fn_body_scope = self.tcx().region_maps.node_extent(body.id);
306 let arg_cmt = self.mc.cat_rvalue(
309 ty::ReScope(fn_body_scope), // Args live only as long as the fn body.
312 self.walk_irrefutable_pat(arg_cmt, &*arg.pat);
316 fn tcx(&self) -> &'t ty::ctxt<'tcx> {
320 fn delegate_consume(&mut self,
321 consume_id: ast::NodeId,
323 cmt: mc::cmt<'tcx>) {
324 debug!("delegate_consume(consume_id={}, cmt={:?})",
327 let mode = copy_or_move(self.typer, &cmt, DirectRefMove);
328 self.delegate.consume(consume_id, consume_span, cmt, mode);
331 fn consume_exprs(&mut self, exprs: &Vec<P<hir::Expr>>) {
333 self.consume_expr(&**expr);
337 pub fn consume_expr(&mut self, expr: &hir::Expr) {
338 debug!("consume_expr(expr={:?})", expr);
340 let cmt = return_if_err!(self.mc.cat_expr(expr));
341 self.delegate_consume(expr.id, expr.span, cmt);
342 self.walk_expr(expr);
345 fn mutate_expr(&mut self,
346 assignment_expr: &hir::Expr,
349 let cmt = return_if_err!(self.mc.cat_expr(expr));
350 self.delegate.mutate(assignment_expr.id, assignment_expr.span, cmt, mode);
351 self.walk_expr(expr);
354 fn borrow_expr(&mut self,
359 debug!("borrow_expr(expr={:?}, r={:?}, bk={:?})",
362 let cmt = return_if_err!(self.mc.cat_expr(expr));
363 self.delegate.borrow(expr.id, expr.span, cmt, r, bk, cause);
365 // Note: Unlike consume, we can ignore ExprParen. cat_expr
366 // already skips over them, and walk will uncover any
367 // attachments or whatever.
371 fn select_from_expr(&mut self, expr: &hir::Expr) {
375 pub fn walk_expr(&mut self, expr: &hir::Expr) {
376 debug!("walk_expr(expr={:?})", expr);
378 self.walk_adjustment(expr);
381 hir::ExprParen(ref subexpr) => {
382 self.walk_expr(&**subexpr)
385 hir::ExprPath(..) => { }
387 hir::ExprUnary(hir::UnDeref, ref base) => { // *base
388 if !self.walk_overloaded_operator(expr, &**base, Vec::new(), PassArgs::ByRef) {
389 self.select_from_expr(&**base);
393 hir::ExprField(ref base, _) => { // base.f
394 self.select_from_expr(&**base);
397 hir::ExprTupField(ref base, _) => { // base.<n>
398 self.select_from_expr(&**base);
401 hir::ExprIndex(ref lhs, ref rhs) => { // lhs[rhs]
402 if !self.walk_overloaded_operator(expr,
406 self.select_from_expr(&**lhs);
407 self.consume_expr(&**rhs);
411 hir::ExprRange(ref start, ref end) => {
412 start.as_ref().map(|e| self.consume_expr(&**e));
413 end.as_ref().map(|e| self.consume_expr(&**e));
416 hir::ExprCall(ref callee, ref args) => { // callee(args)
417 self.walk_callee(expr, &**callee);
418 self.consume_exprs(args);
421 hir::ExprMethodCall(_, _, ref args) => { // callee.m(args)
422 self.consume_exprs(args);
425 hir::ExprStruct(_, ref fields, ref opt_with) => {
426 self.walk_struct_expr(expr, fields, opt_with);
429 hir::ExprTup(ref exprs) => {
430 self.consume_exprs(exprs);
433 hir::ExprIf(ref cond_expr, ref then_blk, ref opt_else_expr) => {
434 self.consume_expr(&**cond_expr);
435 self.walk_block(&**then_blk);
436 if let Some(ref else_expr) = *opt_else_expr {
437 self.consume_expr(&**else_expr);
441 hir::ExprMatch(ref discr, ref arms, _) => {
442 let discr_cmt = return_if_err!(self.mc.cat_expr(&**discr));
443 self.borrow_expr(&**discr, ty::ReEmpty, ty::ImmBorrow, MatchDiscriminant);
445 // treatment of the discriminant is handled while walking the arms.
447 let mode = self.arm_move_mode(discr_cmt.clone(), arm);
448 let mode = mode.match_mode();
449 self.walk_arm(discr_cmt.clone(), arm, mode);
453 hir::ExprVec(ref exprs) => {
454 self.consume_exprs(exprs);
457 hir::ExprAddrOf(m, ref base) => { // &base
458 // make sure that the thing we are pointing out stays valid
459 // for the lifetime `scope_r` of the resulting ptr:
460 let expr_ty = return_if_err!(self.typer.node_ty(expr.id));
461 if let ty::TyRef(&r, _) = expr_ty.sty {
462 let bk = ty::BorrowKind::from_mutbl(m);
463 self.borrow_expr(&**base, r, bk, AddrOf);
467 hir::ExprInlineAsm(ref ia) => {
468 for &(_, ref input) in &ia.inputs {
469 self.consume_expr(&**input);
472 for &(_, ref output, is_rw) in &ia.outputs {
473 self.mutate_expr(expr, &**output,
474 if is_rw { WriteAndRead } else { JustWrite });
480 hir::ExprLit(..) => {}
482 hir::ExprLoop(ref blk, _) => {
483 self.walk_block(&**blk);
486 hir::ExprWhile(ref cond_expr, ref blk, _) => {
487 self.consume_expr(&**cond_expr);
488 self.walk_block(&**blk);
491 hir::ExprUnary(op, ref lhs) => {
492 let pass_args = if ::rustc_front::util::is_by_value_unop(op) {
498 if !self.walk_overloaded_operator(expr, &**lhs, Vec::new(), pass_args) {
499 self.consume_expr(&**lhs);
503 hir::ExprBinary(op, ref lhs, ref rhs) => {
504 let pass_args = if ::rustc_front::util::is_by_value_binop(op.node) {
510 if !self.walk_overloaded_operator(expr, &**lhs, vec![&**rhs], pass_args) {
511 self.consume_expr(&**lhs);
512 self.consume_expr(&**rhs);
516 hir::ExprBlock(ref blk) => {
517 self.walk_block(&**blk);
520 hir::ExprRet(ref opt_expr) => {
521 if let Some(ref expr) = *opt_expr {
522 self.consume_expr(&**expr);
526 hir::ExprAssign(ref lhs, ref rhs) => {
527 self.mutate_expr(expr, &**lhs, JustWrite);
528 self.consume_expr(&**rhs);
531 hir::ExprCast(ref base, _) => {
532 self.consume_expr(&**base);
535 hir::ExprAssignOp(_, ref lhs, ref rhs) => {
536 // This will have to change if/when we support
537 // overloaded operators for `+=` and so forth.
538 self.mutate_expr(expr, &**lhs, WriteAndRead);
539 self.consume_expr(&**rhs);
542 hir::ExprRepeat(ref base, ref count) => {
543 self.consume_expr(&**base);
544 self.consume_expr(&**count);
547 hir::ExprClosure(..) => {
548 self.walk_captures(expr)
551 hir::ExprBox(ref place, ref base) => {
553 Some(ref place) => self.consume_expr(&**place),
556 self.consume_expr(&**base);
558 self.tcx().sess.span_bug(
560 "box with explicit place remains after expansion");
566 fn walk_callee(&mut self, call: &hir::Expr, callee: &hir::Expr) {
567 let callee_ty = return_if_err!(self.typer.expr_ty_adjusted(callee));
568 debug!("walk_callee: callee={:?} callee_ty={:?}",
570 let call_scope = self.tcx().region_maps.node_extent(call.id);
571 match callee_ty.sty {
572 ty::TyBareFn(..) => {
573 self.consume_expr(callee);
577 let overloaded_call_type =
578 match self.typer.node_method_id(ty::MethodCall::expr(call.id)) {
580 OverloadedCallType::from_method_id(self.tcx(), method_id)
583 self.tcx().sess.span_bug(
585 &format!("unexpected callee type {}", callee_ty))
588 match overloaded_call_type {
589 FnMutOverloadedCall => {
590 self.borrow_expr(callee,
591 ty::ReScope(call_scope),
595 FnOverloadedCall => {
596 self.borrow_expr(callee,
597 ty::ReScope(call_scope),
601 FnOnceOverloadedCall => self.consume_expr(callee),
607 fn walk_stmt(&mut self, stmt: &hir::Stmt) {
609 hir::StmtDecl(ref decl, _) => {
611 hir::DeclLocal(ref local) => {
612 self.walk_local(&**local);
615 hir::DeclItem(_) => {
616 // we don't visit nested items in this visitor,
617 // only the fn body we were given.
622 hir::StmtExpr(ref expr, _) |
623 hir::StmtSemi(ref expr, _) => {
624 self.consume_expr(&**expr);
629 fn walk_local(&mut self, local: &hir::Local) {
632 let delegate = &mut self.delegate;
633 pat_util::pat_bindings(&self.typer.tcx.def_map, &*local.pat,
635 delegate.decl_without_init(id, span);
640 // Variable declarations with
641 // initializers are considered
642 // "assigns", which is handled by
644 self.walk_expr(&**expr);
645 let init_cmt = return_if_err!(self.mc.cat_expr(&**expr));
646 self.walk_irrefutable_pat(init_cmt, &*local.pat);
651 /// Indicates that the value of `blk` will be consumed, meaning either copied or moved
652 /// depending on its type.
653 fn walk_block(&mut self, blk: &hir::Block) {
654 debug!("walk_block(blk.id={})", blk.id);
656 for stmt in &blk.stmts {
657 self.walk_stmt(&**stmt);
660 if let Some(ref tail_expr) = blk.expr {
661 self.consume_expr(&**tail_expr);
665 fn walk_struct_expr(&mut self,
667 fields: &Vec<hir::Field>,
668 opt_with: &Option<P<hir::Expr>>) {
669 // Consume the expressions supplying values for each field.
670 for field in fields {
671 self.consume_expr(&*field.expr);
674 let with_expr = match *opt_with {
679 let with_cmt = return_if_err!(self.mc.cat_expr(&*with_expr));
681 // Select just those fields of the `with`
682 // expression that will actually be used
683 if let ty::TyStruct(def, substs) = with_cmt.ty.sty {
684 // Consume those fields of the with expression that are needed.
685 for with_field in &def.struct_variant().fields {
686 if !contains_field_named(with_field, fields) {
687 let cmt_field = self.mc.cat_field(
691 with_field.ty(self.tcx(), substs)
693 self.delegate_consume(with_expr.id, with_expr.span, cmt_field);
697 // the base expression should always evaluate to a
698 // struct; however, when EUV is run during typeck, it
699 // may not. This will generate an error earlier in typeck,
700 // so we can just ignore it.
701 if !self.tcx().sess.has_errors() {
702 self.tcx().sess.span_bug(
704 "with expression doesn't evaluate to a struct");
708 // walk the with expression so that complex expressions
709 // are properly handled.
710 self.walk_expr(with_expr);
712 fn contains_field_named(field: ty::FieldDef,
713 fields: &Vec<hir::Field>)
717 |f| f.ident.node.name == field.name)
721 // Invoke the appropriate delegate calls for anything that gets
722 // consumed or borrowed as part of the automatic adjustment
724 fn walk_adjustment(&mut self, expr: &hir::Expr) {
725 let typer = self.typer;
726 //NOTE(@jroesch): mixed RefCell borrow causes crash
727 let adj = typer.adjustments().get(&expr.id).map(|x| x.clone());
728 if let Some(adjustment) = adj {
730 adjustment::AdjustReifyFnPointer |
731 adjustment::AdjustUnsafeFnPointer => {
732 // Creating a closure/fn-pointer or unsizing consumes
733 // the input and stores it into the resulting rvalue.
734 debug!("walk_adjustment(AdjustReifyFnPointer|AdjustUnsafeFnPointer)");
736 return_if_err!(self.mc.cat_expr_unadjusted(expr));
737 self.delegate_consume(expr.id, expr.span, cmt_unadjusted);
739 adjustment::AdjustDerefRef(ref adj) => {
740 self.walk_autoderefref(expr, adj);
746 /// Autoderefs for overloaded Deref calls in fact reference their receiver. That is, if we have
747 /// `(*x)` where `x` is of type `Rc<T>`, then this in fact is equivalent to `x.deref()`. Since
748 /// `deref()` is declared with `&self`, this is an autoref of `x`.
749 fn walk_autoderefs(&mut self,
752 debug!("walk_autoderefs expr={:?} autoderefs={}", expr, autoderefs);
754 for i in 0..autoderefs {
755 let deref_id = ty::MethodCall::autoderef(expr.id, i as u32);
756 match self.typer.node_method_ty(deref_id) {
759 let cmt = return_if_err!(self.mc.cat_expr_autoderefd(expr, i));
761 // the method call infrastructure should have
762 // replaced all late-bound regions with variables:
763 let self_ty = method_ty.fn_sig().input(0);
764 let self_ty = self.tcx().no_late_bound_regions(&self_ty).unwrap();
766 let (m, r) = match self_ty.sty {
767 ty::TyRef(r, ref m) => (m.mutbl, r),
768 _ => self.tcx().sess.span_bug(expr.span,
769 &format!("bad overloaded deref type {:?}",
772 let bk = ty::BorrowKind::from_mutbl(m);
773 self.delegate.borrow(expr.id, expr.span, cmt,
780 fn walk_autoderefref(&mut self,
782 adj: &adjustment::AutoDerefRef<'tcx>) {
783 debug!("walk_autoderefref expr={:?} adj={:?}",
787 self.walk_autoderefs(expr, adj.autoderefs);
790 return_if_err!(self.mc.cat_expr_autoderefd(expr, adj.autoderefs));
793 self.walk_autoref(expr, cmt_derefd, adj.autoref);
795 if adj.unsize.is_some() {
796 // Unsizing consumes the thin pointer and produces a fat one.
797 self.delegate_consume(expr.id, expr.span, cmt_refd);
802 /// Walks the autoref `opt_autoref` applied to the autoderef'd
803 /// `expr`. `cmt_derefd` is the mem-categorized form of `expr`
804 /// after all relevant autoderefs have occurred. Because AutoRefs
805 /// can be recursive, this function is recursive: it first walks
806 /// deeply all the way down the autoref chain, and then processes
807 /// the autorefs on the way out. At each point, it returns the
808 /// `cmt` for the rvalue that will be produced by introduced an
810 fn walk_autoref(&mut self,
812 cmt_base: mc::cmt<'tcx>,
813 opt_autoref: Option<adjustment::AutoRef<'tcx>>)
816 debug!("walk_autoref(expr.id={} cmt_derefd={:?} opt_autoref={:?})",
821 let cmt_base_ty = cmt_base.ty;
823 let autoref = match opt_autoref {
824 Some(ref autoref) => autoref,
832 adjustment::AutoPtr(r, m) => {
833 self.delegate.borrow(expr.id,
837 ty::BorrowKind::from_mutbl(m),
841 adjustment::AutoUnsafe(m) => {
842 debug!("walk_autoref: expr.id={} cmt_base={:?}",
846 // Converting from a &T to *T (or &mut T to *mut T) is
847 // treated as borrowing it for the enclosing temporary
849 let r = ty::ReScope(self.tcx().region_maps.node_extent(expr.id));
851 self.delegate.borrow(expr.id,
855 ty::BorrowKind::from_mutbl(m),
860 // Construct the categorization for the result of the autoref.
861 // This is always an rvalue, since we are producing a new
862 // (temporary) indirection.
864 let adj_ty = cmt_base_ty.adjust_for_autoref(self.tcx(), opt_autoref);
866 self.mc.cat_rvalue_node(expr.id, expr.span, adj_ty)
870 // When this returns true, it means that the expression *is* a
871 // method-call (i.e. via the operator-overload). This true result
872 // also implies that walk_overloaded_operator already took care of
873 // recursively processing the input arguments, and thus the caller
875 fn walk_overloaded_operator(&mut self,
877 receiver: &hir::Expr,
878 rhs: Vec<&hir::Expr>,
882 if !self.typer.is_method_call(expr.id) {
887 PassArgs::ByValue => {
888 self.consume_expr(receiver);
890 self.consume_expr(arg);
895 PassArgs::ByRef => {},
898 self.walk_expr(receiver);
900 // Arguments (but not receivers) to overloaded operator
901 // methods are implicitly autoref'd which sadly does not use
902 // adjustments, so we must hardcode the borrow here.
904 let r = ty::ReScope(self.tcx().region_maps.node_extent(expr.id));
905 let bk = ty::ImmBorrow;
908 self.borrow_expr(arg, r, bk, OverloadedOperator);
913 fn arm_move_mode(&mut self, discr_cmt: mc::cmt<'tcx>, arm: &hir::Arm) -> TrackMatchMode {
914 let mut mode = Unknown;
915 for pat in &arm.pats {
916 self.determine_pat_move_mode(discr_cmt.clone(), &**pat, &mut mode);
921 fn walk_arm(&mut self, discr_cmt: mc::cmt<'tcx>, arm: &hir::Arm, mode: MatchMode) {
922 for pat in &arm.pats {
923 self.walk_pat(discr_cmt.clone(), &**pat, mode);
926 if let Some(ref guard) = arm.guard {
927 self.consume_expr(&**guard);
930 self.consume_expr(&*arm.body);
933 /// Walks an pat that occurs in isolation (i.e. top-level of fn
934 /// arg or let binding. *Not* a match arm or nested pat.)
935 fn walk_irrefutable_pat(&mut self, cmt_discr: mc::cmt<'tcx>, pat: &hir::Pat) {
936 let mut mode = Unknown;
937 self.determine_pat_move_mode(cmt_discr.clone(), pat, &mut mode);
938 let mode = mode.match_mode();
939 self.walk_pat(cmt_discr, pat, mode);
942 /// Identifies any bindings within `pat` and accumulates within
943 /// `mode` whether the overall pattern/match structure is a move,
945 fn determine_pat_move_mode(&mut self,
946 cmt_discr: mc::cmt<'tcx>,
948 mode: &mut TrackMatchMode) {
949 debug!("determine_pat_move_mode cmt_discr={:?} pat={:?}", cmt_discr,
951 return_if_err!(self.mc.cat_pattern(cmt_discr, pat, |_mc, cmt_pat, pat| {
952 let tcx = self.tcx();
953 let def_map = &self.tcx().def_map;
954 if pat_util::pat_is_binding(def_map, pat) {
956 hir::PatIdent(hir::BindByRef(_), _, _) =>
957 mode.lub(BorrowingMatch),
958 hir::PatIdent(hir::BindByValue(_), _, _) => {
959 match copy_or_move(self.typer, &cmt_pat, PatBindingMove) {
960 Copy => mode.lub(CopyingMatch),
961 Move(_) => mode.lub(MovingMatch),
967 "binding pattern not an identifier");
974 /// The core driver for walking a pattern; `match_mode` must be
975 /// established up front, e.g. via `determine_pat_move_mode` (see
976 /// also `walk_irrefutable_pat` for patterns that stand alone).
977 fn walk_pat(&mut self,
978 cmt_discr: mc::cmt<'tcx>,
980 match_mode: MatchMode) {
981 debug!("walk_pat cmt_discr={:?} pat={:?}", cmt_discr,
985 let typer = self.typer;
986 let def_map = &self.tcx().def_map;
987 let delegate = &mut self.delegate;
988 return_if_err!(mc.cat_pattern(cmt_discr.clone(), pat, |mc, cmt_pat, pat| {
989 if pat_util::pat_is_binding(def_map, pat) {
992 debug!("binding cmt_pat={:?} pat={:?} match_mode={:?}",
997 // pat_ty: the type of the binding being produced.
998 let pat_ty = return_if_err!(typer.node_ty(pat.id));
1000 // Each match binding is effectively an assignment to the
1001 // binding being produced.
1002 let def = def_map.borrow().get(&pat.id).unwrap().full_def();
1003 match mc.cat_def(pat.id, pat.span, pat_ty, def) {
1004 Ok(binding_cmt) => {
1005 delegate.mutate(pat.id, pat.span, binding_cmt, Init);
1010 // It is also a borrow or copy/move of the value being matched.
1012 hir::PatIdent(hir::BindByRef(m), _, _) => {
1013 if let ty::TyRef(&r, _) = pat_ty.sty {
1014 let bk = ty::BorrowKind::from_mutbl(m);
1015 delegate.borrow(pat.id, pat.span, cmt_pat,
1019 hir::PatIdent(hir::BindByValue(_), _, _) => {
1020 let mode = copy_or_move(typer, &cmt_pat, PatBindingMove);
1021 debug!("walk_pat binding consuming pat");
1022 delegate.consume_pat(pat, cmt_pat, mode);
1027 "binding pattern not an identifier");
1032 hir::PatVec(_, Some(ref slice_pat), _) => {
1033 // The `slice_pat` here creates a slice into
1034 // the original vector. This is effectively a
1035 // borrow of the elements of the vector being
1038 let (slice_cmt, slice_mutbl, slice_r) =
1039 return_if_err!(mc.cat_slice_pattern(cmt_pat, &**slice_pat));
1041 // Note: We declare here that the borrow
1042 // occurs upon entering the `[...]`
1043 // pattern. This implies that something like
1044 // `[a; b]` where `a` is a move is illegal,
1045 // because the borrow is already in effect.
1046 // In fact such a move would be safe-ish, but
1047 // it effectively *requires* that we use the
1048 // nulling out semantics to indicate when a
1049 // value has been moved, which we are trying
1050 // to move away from. Otherwise, how can we
1051 // indicate that the first element in the
1052 // vector has been moved? Eventually, we
1053 // could perhaps modify this rule to permit
1054 // `[..a, b]` where `b` is a move, because in
1055 // that case we can adjust the length of the
1056 // original vec accordingly, but we'd have to
1057 // make trans do the right thing, and it would
1058 // only work for `Box<[T]>`s. It seems simpler
1059 // to just require that people call
1060 // `vec.pop()` or `vec.unshift()`.
1061 let slice_bk = ty::BorrowKind::from_mutbl(slice_mutbl);
1062 delegate.borrow(pat.id, pat.span,
1064 slice_bk, RefBinding);
1071 // Do a second pass over the pattern, calling `matched_pat` on
1072 // the interior nodes (enum variants and structs), as opposed
1073 // to the above loop's visit of than the bindings that form
1074 // the leaves of the pattern tree structure.
1075 return_if_err!(mc.cat_pattern(cmt_discr, pat, |mc, cmt_pat, pat| {
1076 let def_map = def_map.borrow();
1077 let tcx = typer.tcx;
1080 hir::PatEnum(_, _) | hir::PatQPath(..) |
1081 hir::PatIdent(_, _, None) | hir::PatStruct(..) => {
1082 match def_map.get(&pat.id).map(|d| d.full_def()) {
1084 // no definition found: pat is not a
1085 // struct or enum pattern.
1088 Some(def::DefVariant(enum_did, variant_did, _is_struct)) => {
1090 if tcx.lookup_adt_def(enum_did).is_univariant() {
1093 let cmt_pat_ty = cmt_pat.ty;
1094 mc.cat_downcast(pat, cmt_pat, cmt_pat_ty, variant_did)
1097 debug!("variant downcast_cmt={:?} pat={:?}",
1101 delegate.matched_pat(pat, downcast_cmt, match_mode);
1104 Some(def::DefStruct(..)) | Some(def::DefTy(_, false)) => {
1105 // A struct (in either the value or type
1106 // namespace; we encounter the former on
1107 // e.g. patterns for unit structs).
1109 debug!("struct cmt_pat={:?} pat={:?}",
1113 delegate.matched_pat(pat, cmt_pat, match_mode);
1116 Some(def::DefConst(..)) |
1117 Some(def::DefAssociatedConst(..)) |
1118 Some(def::DefLocal(..)) => {
1119 // This is a leaf (i.e. identifier binding
1120 // or constant value to match); thus no
1121 // `matched_pat` call.
1124 Some(def @ def::DefTy(_, true)) => {
1125 // An enum's type -- should never be in a
1128 if !tcx.sess.has_errors() {
1129 let msg = format!("Pattern has unexpected type: {:?} and type {:?}",
1132 tcx.sess.span_bug(pat.span, &msg)
1137 // Remaining cases are e.g. DefFn, to
1138 // which identifiers within patterns
1139 // should not resolve. However, we do
1140 // encouter this when using the
1141 // expr-use-visitor during typeck. So just
1142 // ignore it, an error should have been
1145 if !tcx.sess.has_errors() {
1146 let msg = format!("Pattern has unexpected def: {:?} and type {:?}",
1149 tcx.sess.span_bug(pat.span, &msg[..])
1155 hir::PatIdent(_, _, Some(_)) => {
1156 // Do nothing; this is a binding (not a enum
1157 // variant or struct), and the cat_pattern call
1158 // will visit the substructure recursively.
1161 hir::PatWild(_) | hir::PatTup(..) | hir::PatBox(..) |
1162 hir::PatRegion(..) | hir::PatLit(..) | hir::PatRange(..) |
1163 hir::PatVec(..) => {
1164 // Similarly, each of these cases does not
1165 // correspond to a enum variant or struct, so we
1166 // do not do any `matched_pat` calls for these
1173 fn walk_captures(&mut self, closure_expr: &hir::Expr) {
1174 debug!("walk_captures({:?})", closure_expr);
1176 self.tcx().with_freevars(closure_expr.id, |freevars| {
1177 for freevar in freevars {
1178 let id_var = freevar.def.def_id().node;
1179 let upvar_id = ty::UpvarId { var_id: id_var,
1180 closure_expr_id: closure_expr.id };
1181 let upvar_capture = self.typer.upvar_capture(upvar_id).unwrap();
1182 let cmt_var = return_if_err!(self.cat_captured_var(closure_expr.id,
1185 match upvar_capture {
1186 ty::UpvarCapture::ByValue => {
1187 let mode = copy_or_move(self.typer, &cmt_var, CaptureMove);
1188 self.delegate.consume(closure_expr.id, freevar.span, cmt_var, mode);
1190 ty::UpvarCapture::ByRef(upvar_borrow) => {
1191 self.delegate.borrow(closure_expr.id,
1194 upvar_borrow.region,
1196 ClosureCapture(freevar.span));
1203 fn cat_captured_var(&mut self,
1204 closure_id: ast::NodeId,
1206 upvar_def: def::Def)
1207 -> mc::McResult<mc::cmt<'tcx>> {
1208 // Create the cmt for the variable being borrowed, from the
1209 // caller's perspective
1210 let var_id = upvar_def.def_id().node;
1211 let var_ty = try!(self.typer.node_ty(var_id));
1212 self.mc.cat_def(closure_id, closure_span, var_ty, upvar_def)
1216 fn copy_or_move<'a, 'tcx>(typer: &infer::InferCtxt<'a, 'tcx>,
1217 cmt: &mc::cmt<'tcx>,
1218 move_reason: MoveReason)
1221 if typer.type_moves_by_default(cmt.ty, cmt.span) {