1 // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! A different sort of visitor for walking fn bodies. Unlike the
12 //! normal visitor, which just walks the entire body in one shot, the
13 //! `ExprUseVisitor` determines how expressions are being used.
15 pub use self::LoanCause::*;
16 pub use self::ConsumeMode::*;
17 pub use self::MoveReason::*;
18 pub use self::MatchMode::*;
19 use self::TrackMatchMode::*;
20 use self::OverloadedCallType::*;
23 use hir::def_id::{DefId};
25 use middle::mem_categorization as mc;
26 use ty::{self, TyCtxt, adjustment};
28 use hir::{self, PatKind};
34 ///////////////////////////////////////////////////////////////////////////
37 /// This trait defines the callbacks you can expect to receive when
38 /// employing the ExprUseVisitor.
39 pub trait Delegate<'tcx> {
40 // The value found at `cmt` is either copied or moved, depending
43 consume_id: ast::NodeId,
48 // The value found at `cmt` has been determined to match the
49 // pattern binding `matched_pat`, and its subparts are being
50 // copied or moved depending on `mode`. Note that `matched_pat`
51 // is called on all variant/structs in the pattern (i.e., the
52 // interior nodes of the pattern's tree structure) while
53 // consume_pat is called on the binding identifiers in the pattern
54 // (which are leaves of the pattern's tree structure).
56 // Note that variants/structs and identifiers are disjoint; thus
57 // `matched_pat` and `consume_pat` are never both called on the
58 // same input pattern structure (though of `consume_pat` can be
59 // called on a subpart of an input passed to `matched_pat).
60 fn matched_pat(&mut self,
61 matched_pat: &hir::Pat,
65 // The value found at `cmt` is either copied or moved via the
66 // pattern binding `consume_pat`, depending on mode.
67 fn consume_pat(&mut self,
68 consume_pat: &hir::Pat,
72 // The value found at `borrow` is being borrowed at the point
73 // `borrow_id` for the region `loan_region` with kind `bk`.
75 borrow_id: ast::NodeId,
78 loan_region: &'tcx ty::Region,
80 loan_cause: LoanCause);
82 // The local variable `id` is declared but not initialized.
83 fn decl_without_init(&mut self,
87 // The path at `cmt` is being assigned to.
89 assignment_id: ast::NodeId,
90 assignment_span: Span,
91 assignee_cmt: mc::cmt<'tcx>,
95 #[derive(Copy, Clone, PartialEq, Debug)]
108 #[derive(Copy, Clone, PartialEq, Debug)]
109 pub enum ConsumeMode {
110 Copy, // reference to x where x has a type that copies
111 Move(MoveReason), // reference to x where x has a type that moves
114 #[derive(Copy, Clone, PartialEq, Debug)]
115 pub enum MoveReason {
121 #[derive(Copy, Clone, PartialEq, Debug)]
129 #[derive(Copy, Clone, PartialEq, Debug)]
130 enum TrackMatchMode {
136 impl TrackMatchMode {
137 // Builds up the whole match mode for a pattern from its constituent
138 // parts. The lattice looks like this:
154 // * `(_, some_int)` pattern is Copying, since
155 // NonBinding + Copying => Copying
157 // * `(some_int, some_box)` pattern is Moving, since
158 // Copying + Moving => Moving
160 // * `(ref x, some_box)` pattern is Conflicting, since
161 // Borrowing + Moving => Conflicting
163 // Note that the `Unknown` and `Conflicting` states are
164 // represented separately from the other more interesting
165 // `Definite` states, which simplifies logic here somewhat.
166 fn lub(&mut self, mode: MatchMode) {
167 *self = match (*self, mode) {
168 // Note that clause order below is very significant.
169 (Unknown, new) => Definite(new),
170 (Definite(old), new) if old == new => Definite(old),
172 (Definite(old), NonBindingMatch) => Definite(old),
173 (Definite(NonBindingMatch), new) => Definite(new),
175 (Definite(old), CopyingMatch) => Definite(old),
176 (Definite(CopyingMatch), new) => Definite(new),
178 (Definite(_), _) => Conflicting,
179 (Conflicting, _) => *self,
183 fn match_mode(&self) -> MatchMode {
185 Unknown => NonBindingMatch,
186 Definite(mode) => mode,
188 // Conservatively return MovingMatch to let the
189 // compiler continue to make progress.
196 #[derive(Copy, Clone, PartialEq, Debug)]
197 pub enum MutateMode {
200 WriteAndRead, // x += y
203 #[derive(Copy, Clone)]
204 enum OverloadedCallType {
207 FnOnceOverloadedCall,
210 impl OverloadedCallType {
211 fn from_trait_id(tcx: TyCtxt, trait_id: DefId) -> OverloadedCallType {
212 for &(maybe_function_trait, overloaded_call_type) in &[
213 (tcx.lang_items.fn_once_trait(), FnOnceOverloadedCall),
214 (tcx.lang_items.fn_mut_trait(), FnMutOverloadedCall),
215 (tcx.lang_items.fn_trait(), FnOverloadedCall)
217 match maybe_function_trait {
218 Some(function_trait) if function_trait == trait_id => {
219 return overloaded_call_type
225 bug!("overloaded call didn't map to known function trait")
228 fn from_method_id(tcx: TyCtxt, method_id: DefId) -> OverloadedCallType {
229 let method = tcx.associated_item(method_id);
230 OverloadedCallType::from_trait_id(tcx, method.container.id())
234 ///////////////////////////////////////////////////////////////////////////
235 // The ExprUseVisitor type
237 // This is the code that actually walks the tree. Like
238 // mem_categorization, it requires a TYPER, which is a type that
239 // supplies types from the tree. After type checking is complete, you
240 // can just use the tcx as the typer.
241 pub struct ExprUseVisitor<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
242 mc: mc::MemCategorizationContext<'a, 'gcx, 'tcx>,
243 delegate: &'a mut Delegate<'tcx>,
246 // If the TYPER results in an error, it's because the type check
247 // failed (or will fail, when the error is uncovered and reported
248 // during writeback). In this case, we just ignore this part of the
251 // Note that this macro appears similar to try!(), but, unlike try!(),
252 // it does not propagate the error.
253 macro_rules! return_if_err {
258 debug!("mc reported err");
265 /// Whether the elements of an overloaded operation are passed by value or by reference
271 impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> {
272 pub fn new(delegate: &'a mut (Delegate<'tcx>+'a),
273 infcx: &'a InferCtxt<'a, 'gcx, 'tcx>)
276 ExprUseVisitor::with_options(delegate, infcx, mc::MemCategorizationOptions::default())
279 pub fn with_options(delegate: &'a mut (Delegate<'tcx>+'a),
280 infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
281 options: mc::MemCategorizationOptions)
285 mc: mc::MemCategorizationContext::with_options(infcx, options),
290 pub fn consume_body(&mut self, body: &hir::Body) {
291 debug!("consume_body(body={:?})", body);
293 for arg in &body.arguments {
294 let arg_ty = return_if_err!(self.mc.infcx.node_ty(arg.pat.id));
296 let fn_body_scope_r = self.tcx().node_scope_region(body.value.id);
297 let arg_cmt = self.mc.cat_rvalue(
300 fn_body_scope_r, // Args live only as long as the fn body.
304 self.walk_irrefutable_pat(arg_cmt, &arg.pat);
307 self.consume_expr(&body.value);
310 fn tcx(&self) -> TyCtxt<'a, 'gcx, 'tcx> {
314 fn delegate_consume(&mut self,
315 consume_id: ast::NodeId,
317 cmt: mc::cmt<'tcx>) {
318 debug!("delegate_consume(consume_id={}, cmt={:?})",
321 let mode = copy_or_move(self.mc.infcx, &cmt, DirectRefMove);
322 self.delegate.consume(consume_id, consume_span, cmt, mode);
325 fn consume_exprs(&mut self, exprs: &[hir::Expr]) {
327 self.consume_expr(&expr);
331 pub fn consume_expr(&mut self, expr: &hir::Expr) {
332 debug!("consume_expr(expr={:?})", expr);
334 let cmt = return_if_err!(self.mc.cat_expr(expr));
335 self.delegate_consume(expr.id, expr.span, cmt);
336 self.walk_expr(expr);
339 fn mutate_expr(&mut self,
340 assignment_expr: &hir::Expr,
343 let cmt = return_if_err!(self.mc.cat_expr(expr));
344 self.delegate.mutate(assignment_expr.id, assignment_expr.span, cmt, mode);
345 self.walk_expr(expr);
348 fn borrow_expr(&mut self,
353 debug!("borrow_expr(expr={:?}, r={:?}, bk={:?})",
356 let cmt = return_if_err!(self.mc.cat_expr(expr));
357 self.delegate.borrow(expr.id, expr.span, cmt, r, bk, cause);
362 fn select_from_expr(&mut self, expr: &hir::Expr) {
366 pub fn walk_expr(&mut self, expr: &hir::Expr) {
367 debug!("walk_expr(expr={:?})", expr);
369 self.walk_adjustment(expr);
372 hir::ExprPath(_) => { }
374 hir::ExprType(ref subexpr, _) => {
375 self.walk_expr(&subexpr)
378 hir::ExprUnary(hir::UnDeref, ref base) => { // *base
379 if !self.walk_overloaded_operator(expr, &base, Vec::new(), PassArgs::ByRef) {
380 self.select_from_expr(&base);
384 hir::ExprField(ref base, _) => { // base.f
385 self.select_from_expr(&base);
388 hir::ExprTupField(ref base, _) => { // base.<n>
389 self.select_from_expr(&base);
392 hir::ExprIndex(ref lhs, ref rhs) => { // lhs[rhs]
393 if !self.walk_overloaded_operator(expr,
397 self.select_from_expr(&lhs);
398 self.consume_expr(&rhs);
402 hir::ExprCall(ref callee, ref args) => { // callee(args)
403 self.walk_callee(expr, &callee);
404 self.consume_exprs(args);
407 hir::ExprMethodCall(.., ref args) => { // callee.m(args)
408 self.consume_exprs(args);
411 hir::ExprStruct(_, ref fields, ref opt_with) => {
412 self.walk_struct_expr(fields, opt_with);
415 hir::ExprTup(ref exprs) => {
416 self.consume_exprs(exprs);
419 hir::ExprIf(ref cond_expr, ref then_expr, ref opt_else_expr) => {
420 self.consume_expr(&cond_expr);
421 self.walk_expr(&then_expr);
422 if let Some(ref else_expr) = *opt_else_expr {
423 self.consume_expr(&else_expr);
427 hir::ExprMatch(ref discr, ref arms, _) => {
428 let discr_cmt = return_if_err!(self.mc.cat_expr(&discr));
429 let r = self.tcx().types.re_empty;
430 self.borrow_expr(&discr, r, ty::ImmBorrow, MatchDiscriminant);
432 // treatment of the discriminant is handled while walking the arms.
434 let mode = self.arm_move_mode(discr_cmt.clone(), arm);
435 let mode = mode.match_mode();
436 self.walk_arm(discr_cmt.clone(), arm, mode);
440 hir::ExprArray(ref exprs) => {
441 self.consume_exprs(exprs);
444 hir::ExprAddrOf(m, ref base) => { // &base
445 // make sure that the thing we are pointing out stays valid
446 // for the lifetime `scope_r` of the resulting ptr:
447 let expr_ty = return_if_err!(self.mc.infcx.node_ty(expr.id));
448 if let ty::TyRef(r, _) = expr_ty.sty {
449 let bk = ty::BorrowKind::from_mutbl(m);
450 self.borrow_expr(&base, r, bk, AddrOf);
454 hir::ExprInlineAsm(ref ia, ref outputs, ref inputs) => {
455 for (o, output) in ia.outputs.iter().zip(outputs) {
457 self.consume_expr(output);
459 self.mutate_expr(expr, output,
461 MutateMode::WriteAndRead
463 MutateMode::JustWrite
467 self.consume_exprs(inputs);
471 hir::ExprLit(..) => {}
473 hir::ExprLoop(ref blk, _, _) => {
474 self.walk_block(&blk);
477 hir::ExprWhile(ref cond_expr, ref blk, _) => {
478 self.consume_expr(&cond_expr);
479 self.walk_block(&blk);
482 hir::ExprUnary(op, ref lhs) => {
483 let pass_args = if op.is_by_value() {
489 if !self.walk_overloaded_operator(expr, &lhs, Vec::new(), pass_args) {
490 self.consume_expr(&lhs);
494 hir::ExprBinary(op, ref lhs, ref rhs) => {
495 let pass_args = if op.node.is_by_value() {
501 if !self.walk_overloaded_operator(expr, &lhs, vec![&rhs], pass_args) {
502 self.consume_expr(&lhs);
503 self.consume_expr(&rhs);
507 hir::ExprBlock(ref blk) => {
508 self.walk_block(&blk);
511 hir::ExprBreak(_, ref opt_expr) | hir::ExprRet(ref opt_expr) => {
512 if let Some(ref expr) = *opt_expr {
513 self.consume_expr(&expr);
517 hir::ExprAssign(ref lhs, ref rhs) => {
518 self.mutate_expr(expr, &lhs, MutateMode::JustWrite);
519 self.consume_expr(&rhs);
522 hir::ExprCast(ref base, _) => {
523 self.consume_expr(&base);
526 hir::ExprAssignOp(op, ref lhs, ref rhs) => {
527 // NB All our assignment operations take the RHS by value
528 assert!(op.node.is_by_value());
530 if !self.walk_overloaded_operator(expr, lhs, vec![rhs], PassArgs::ByValue) {
531 self.mutate_expr(expr, &lhs, MutateMode::WriteAndRead);
532 self.consume_expr(&rhs);
536 hir::ExprRepeat(ref base, _) => {
537 self.consume_expr(&base);
540 hir::ExprClosure(.., fn_decl_span) => {
541 self.walk_captures(expr, fn_decl_span)
544 hir::ExprBox(ref base) => {
545 self.consume_expr(&base);
550 fn walk_callee(&mut self, call: &hir::Expr, callee: &hir::Expr) {
551 let callee_ty = return_if_err!(self.mc.infcx.expr_ty_adjusted(callee));
552 debug!("walk_callee: callee={:?} callee_ty={:?}",
554 match callee_ty.sty {
555 ty::TyFnDef(..) | ty::TyFnPtr(_) => {
556 self.consume_expr(callee);
560 let overloaded_call_type =
561 match self.mc.infcx.node_method_id(ty::MethodCall::expr(call.id)) {
563 OverloadedCallType::from_method_id(self.tcx(), method_id)
568 "unexpected callee type {}",
572 match overloaded_call_type {
573 FnMutOverloadedCall => {
574 let call_scope_r = self.tcx().node_scope_region(call.id);
575 self.borrow_expr(callee,
580 FnOverloadedCall => {
581 let call_scope_r = self.tcx().node_scope_region(call.id);
582 self.borrow_expr(callee,
587 FnOnceOverloadedCall => self.consume_expr(callee),
593 fn walk_stmt(&mut self, stmt: &hir::Stmt) {
595 hir::StmtDecl(ref decl, _) => {
597 hir::DeclLocal(ref local) => {
598 self.walk_local(&local);
601 hir::DeclItem(_) => {
602 // we don't visit nested items in this visitor,
603 // only the fn body we were given.
608 hir::StmtExpr(ref expr, _) |
609 hir::StmtSemi(ref expr, _) => {
610 self.consume_expr(&expr);
615 fn walk_local(&mut self, local: &hir::Local) {
618 let delegate = &mut self.delegate;
619 local.pat.each_binding(|_, id, span, _| {
620 delegate.decl_without_init(id, span);
625 // Variable declarations with
626 // initializers are considered
627 // "assigns", which is handled by
629 self.walk_expr(&expr);
630 let init_cmt = return_if_err!(self.mc.cat_expr(&expr));
631 self.walk_irrefutable_pat(init_cmt, &local.pat);
636 /// Indicates that the value of `blk` will be consumed, meaning either copied or moved
637 /// depending on its type.
638 fn walk_block(&mut self, blk: &hir::Block) {
639 debug!("walk_block(blk.id={})", blk.id);
641 for stmt in &blk.stmts {
642 self.walk_stmt(stmt);
645 if let Some(ref tail_expr) = blk.expr {
646 self.consume_expr(&tail_expr);
650 fn walk_struct_expr(&mut self,
651 fields: &[hir::Field],
652 opt_with: &Option<P<hir::Expr>>) {
653 // Consume the expressions supplying values for each field.
654 for field in fields {
655 self.consume_expr(&field.expr);
658 let with_expr = match *opt_with {
663 let with_cmt = return_if_err!(self.mc.cat_expr(&with_expr));
665 // Select just those fields of the `with`
666 // expression that will actually be used
667 match with_cmt.ty.sty {
668 ty::TyAdt(adt, substs) if adt.is_struct() => {
669 // Consume those fields of the with expression that are needed.
670 for with_field in &adt.struct_variant().fields {
671 if !contains_field_named(with_field, fields) {
672 let cmt_field = self.mc.cat_field(
676 with_field.ty(self.tcx(), substs)
678 self.delegate_consume(with_expr.id, with_expr.span, cmt_field);
683 // the base expression should always evaluate to a
684 // struct; however, when EUV is run during typeck, it
685 // may not. This will generate an error earlier in typeck,
686 // so we can just ignore it.
687 if !self.tcx().sess.has_errors() {
690 "with expression doesn't evaluate to a struct");
695 // walk the with expression so that complex expressions
696 // are properly handled.
697 self.walk_expr(with_expr);
699 fn contains_field_named(field: &ty::FieldDef,
700 fields: &[hir::Field])
704 |f| f.name.node == field.name)
708 // Invoke the appropriate delegate calls for anything that gets
709 // consumed or borrowed as part of the automatic adjustment
711 fn walk_adjustment(&mut self, expr: &hir::Expr) {
712 let infcx = self.mc.infcx;
713 //NOTE(@jroesch): mixed RefCell borrow causes crash
714 let adj = infcx.tables.borrow().adjustments.get(&expr.id).map(|x| x.clone());
715 if let Some(adjustment) = adj {
716 match adjustment.kind {
717 adjustment::Adjust::NeverToAny |
718 adjustment::Adjust::ReifyFnPointer |
719 adjustment::Adjust::UnsafeFnPointer |
720 adjustment::Adjust::ClosureFnPointer |
721 adjustment::Adjust::MutToConstPointer => {
722 // Creating a closure/fn-pointer or unsizing consumes
723 // the input and stores it into the resulting rvalue.
724 debug!("walk_adjustment: trivial adjustment");
726 return_if_err!(self.mc.cat_expr_unadjusted(expr));
727 self.delegate_consume(expr.id, expr.span, cmt_unadjusted);
729 adjustment::Adjust::DerefRef { autoderefs, autoref, unsize } => {
730 debug!("walk_adjustment expr={:?} adj={:?}", expr, adjustment);
732 self.walk_autoderefs(expr, autoderefs);
735 return_if_err!(self.mc.cat_expr_autoderefd(expr, autoderefs));
738 self.walk_autoref(expr, cmt_derefd, autoref);
741 // Unsizing consumes the thin pointer and produces a fat one.
742 self.delegate_consume(expr.id, expr.span, cmt_refd);
749 /// Autoderefs for overloaded Deref calls in fact reference their receiver. That is, if we have
750 /// `(*x)` where `x` is of type `Rc<T>`, then this in fact is equivalent to `x.deref()`. Since
751 /// `deref()` is declared with `&self`, this is an autoref of `x`.
752 fn walk_autoderefs(&mut self,
755 debug!("walk_autoderefs expr={:?} autoderefs={}", expr, autoderefs);
757 for i in 0..autoderefs {
758 let deref_id = ty::MethodCall::autoderef(expr.id, i as u32);
759 if let Some(method_ty) = self.mc.infcx.node_method_ty(deref_id) {
760 let cmt = return_if_err!(self.mc.cat_expr_autoderefd(expr, i));
762 // the method call infrastructure should have
763 // replaced all late-bound regions with variables:
764 let self_ty = method_ty.fn_sig().input(0);
765 let self_ty = self.tcx().no_late_bound_regions(&self_ty).unwrap();
767 let (m, r) = match self_ty.sty {
768 ty::TyRef(r, ref m) => (m.mutbl, r),
769 _ => span_bug!(expr.span,
770 "bad overloaded deref type {:?}",
773 let bk = ty::BorrowKind::from_mutbl(m);
774 self.delegate.borrow(expr.id, expr.span, cmt,
780 /// Walks the autoref `opt_autoref` applied to the autoderef'd
781 /// `expr`. `cmt_derefd` is the mem-categorized form of `expr`
782 /// after all relevant autoderefs have occurred. Because AutoRefs
783 /// can be recursive, this function is recursive: it first walks
784 /// deeply all the way down the autoref chain, and then processes
785 /// the autorefs on the way out. At each point, it returns the
786 /// `cmt` for the rvalue that will be produced by introduced an
788 fn walk_autoref(&mut self,
790 cmt_base: mc::cmt<'tcx>,
791 opt_autoref: Option<adjustment::AutoBorrow<'tcx>>)
794 debug!("walk_autoref(expr.id={} cmt_derefd={:?} opt_autoref={:?})",
799 let cmt_base_ty = cmt_base.ty;
801 let autoref = match opt_autoref {
802 Some(ref autoref) => autoref,
810 adjustment::AutoBorrow::Ref(r, m) => {
811 self.delegate.borrow(expr.id,
815 ty::BorrowKind::from_mutbl(m),
819 adjustment::AutoBorrow::RawPtr(m) => {
820 debug!("walk_autoref: expr.id={} cmt_base={:?}",
824 // Converting from a &T to *T (or &mut T to *mut T) is
825 // treated as borrowing it for the enclosing temporary
827 let r = self.tcx().node_scope_region(expr.id);
829 self.delegate.borrow(expr.id,
833 ty::BorrowKind::from_mutbl(m),
838 // Construct the categorization for the result of the autoref.
839 // This is always an rvalue, since we are producing a new
840 // (temporary) indirection.
842 let adj_ty = cmt_base_ty.adjust_for_autoref(self.tcx(), opt_autoref);
844 self.mc.cat_rvalue_node(expr.id, expr.span, adj_ty)
848 // When this returns true, it means that the expression *is* a
849 // method-call (i.e. via the operator-overload). This true result
850 // also implies that walk_overloaded_operator already took care of
851 // recursively processing the input arguments, and thus the caller
853 fn walk_overloaded_operator(&mut self,
855 receiver: &hir::Expr,
856 rhs: Vec<&hir::Expr>,
860 if !self.mc.infcx.is_method_call(expr.id) {
865 PassArgs::ByValue => {
866 self.consume_expr(receiver);
868 self.consume_expr(arg);
873 PassArgs::ByRef => {},
876 self.walk_expr(receiver);
878 // Arguments (but not receivers) to overloaded operator
879 // methods are implicitly autoref'd which sadly does not use
880 // adjustments, so we must hardcode the borrow here.
882 let r = self.tcx().node_scope_region(expr.id);
883 let bk = ty::ImmBorrow;
886 self.borrow_expr(arg, r, bk, OverloadedOperator);
891 fn arm_move_mode(&mut self, discr_cmt: mc::cmt<'tcx>, arm: &hir::Arm) -> TrackMatchMode {
892 let mut mode = Unknown;
893 for pat in &arm.pats {
894 self.determine_pat_move_mode(discr_cmt.clone(), &pat, &mut mode);
899 fn walk_arm(&mut self, discr_cmt: mc::cmt<'tcx>, arm: &hir::Arm, mode: MatchMode) {
900 for pat in &arm.pats {
901 self.walk_pat(discr_cmt.clone(), &pat, mode);
904 if let Some(ref guard) = arm.guard {
905 self.consume_expr(&guard);
908 self.consume_expr(&arm.body);
911 /// Walks a pat that occurs in isolation (i.e. top-level of fn
912 /// arg or let binding. *Not* a match arm or nested pat.)
913 fn walk_irrefutable_pat(&mut self, cmt_discr: mc::cmt<'tcx>, pat: &hir::Pat) {
914 let mut mode = Unknown;
915 self.determine_pat_move_mode(cmt_discr.clone(), pat, &mut mode);
916 let mode = mode.match_mode();
917 self.walk_pat(cmt_discr, pat, mode);
920 /// Identifies any bindings within `pat` and accumulates within
921 /// `mode` whether the overall pattern/match structure is a move,
923 fn determine_pat_move_mode(&mut self,
924 cmt_discr: mc::cmt<'tcx>,
926 mode: &mut TrackMatchMode) {
927 debug!("determine_pat_move_mode cmt_discr={:?} pat={:?}", cmt_discr,
929 return_if_err!(self.mc.cat_pattern(cmt_discr, pat, |_mc, cmt_pat, pat| {
931 PatKind::Binding(hir::BindByRef(..), ..) =>
932 mode.lub(BorrowingMatch),
933 PatKind::Binding(hir::BindByValue(..), ..) => {
934 match copy_or_move(self.mc.infcx, &cmt_pat, PatBindingMove) {
935 Copy => mode.lub(CopyingMatch),
936 Move(..) => mode.lub(MovingMatch),
944 /// The core driver for walking a pattern; `match_mode` must be
945 /// established up front, e.g. via `determine_pat_move_mode` (see
946 /// also `walk_irrefutable_pat` for patterns that stand alone).
947 fn walk_pat(&mut self, cmt_discr: mc::cmt<'tcx>, pat: &hir::Pat, match_mode: MatchMode) {
948 debug!("walk_pat cmt_discr={:?} pat={:?}", cmt_discr, pat);
950 let tcx = &self.tcx();
952 let infcx = self.mc.infcx;
953 let delegate = &mut self.delegate;
954 return_if_err!(mc.cat_pattern(cmt_discr.clone(), pat, |mc, cmt_pat, pat| {
955 if let PatKind::Binding(bmode, def_id, ..) = pat.node {
956 debug!("binding cmt_pat={:?} pat={:?} match_mode={:?}", cmt_pat, pat, match_mode);
958 // pat_ty: the type of the binding being produced.
959 let pat_ty = return_if_err!(infcx.node_ty(pat.id));
961 // Each match binding is effectively an assignment to the
962 // binding being produced.
963 let def = Def::Local(def_id);
964 if let Ok(binding_cmt) = mc.cat_def(pat.id, pat.span, pat_ty, def) {
965 delegate.mutate(pat.id, pat.span, binding_cmt, MutateMode::Init);
968 // It is also a borrow or copy/move of the value being matched.
970 hir::BindByRef(m) => {
971 if let ty::TyRef(r, _) = pat_ty.sty {
972 let bk = ty::BorrowKind::from_mutbl(m);
973 delegate.borrow(pat.id, pat.span, cmt_pat, r, bk, RefBinding);
976 hir::BindByValue(..) => {
977 let mode = copy_or_move(infcx, &cmt_pat, PatBindingMove);
978 debug!("walk_pat binding consuming pat");
979 delegate.consume_pat(pat, cmt_pat, mode);
985 // Do a second pass over the pattern, calling `matched_pat` on
986 // the interior nodes (enum variants and structs), as opposed
987 // to the above loop's visit of than the bindings that form
988 // the leaves of the pattern tree structure.
989 return_if_err!(mc.cat_pattern(cmt_discr, pat, |mc, cmt_pat, pat| {
990 let qpath = match pat.node {
991 PatKind::Path(ref qpath) |
992 PatKind::TupleStruct(ref qpath, ..) |
993 PatKind::Struct(ref qpath, ..) => qpath,
996 let def = infcx.tables.borrow().qpath_def(qpath, pat.id);
998 Def::Variant(variant_did) |
999 Def::VariantCtor(variant_did, ..) => {
1000 let enum_did = tcx.parent_def_id(variant_did).unwrap();
1001 let downcast_cmt = if tcx.adt_def(enum_did).is_univariant() {
1004 let cmt_pat_ty = cmt_pat.ty;
1005 mc.cat_downcast(pat, cmt_pat, cmt_pat_ty, variant_did)
1008 debug!("variant downcast_cmt={:?} pat={:?}", downcast_cmt, pat);
1009 delegate.matched_pat(pat, downcast_cmt, match_mode);
1011 Def::Struct(..) | Def::StructCtor(..) | Def::Union(..) |
1012 Def::TyAlias(..) | Def::AssociatedTy(..) | Def::SelfTy(..) => {
1013 debug!("struct cmt_pat={:?} pat={:?}", cmt_pat, pat);
1014 delegate.matched_pat(pat, cmt_pat, match_mode);
1021 fn walk_captures(&mut self, closure_expr: &hir::Expr, fn_decl_span: Span) {
1022 debug!("walk_captures({:?})", closure_expr);
1024 self.tcx().with_freevars(closure_expr.id, |freevars| {
1025 for freevar in freevars {
1026 let def_id = freevar.def.def_id();
1027 let id_var = self.tcx().hir.as_local_node_id(def_id).unwrap();
1028 let upvar_id = ty::UpvarId { var_id: id_var,
1029 closure_expr_id: closure_expr.id };
1030 let upvar_capture = self.mc.infcx.upvar_capture(upvar_id).unwrap();
1031 let cmt_var = return_if_err!(self.cat_captured_var(closure_expr.id,
1034 match upvar_capture {
1035 ty::UpvarCapture::ByValue => {
1036 let mode = copy_or_move(self.mc.infcx, &cmt_var, CaptureMove);
1037 self.delegate.consume(closure_expr.id, freevar.span, cmt_var, mode);
1039 ty::UpvarCapture::ByRef(upvar_borrow) => {
1040 self.delegate.borrow(closure_expr.id,
1043 upvar_borrow.region,
1045 ClosureCapture(freevar.span));
1052 fn cat_captured_var(&mut self,
1053 closure_id: ast::NodeId,
1056 -> mc::McResult<mc::cmt<'tcx>> {
1057 // Create the cmt for the variable being borrowed, from the
1058 // caller's perspective
1059 let var_id = self.tcx().hir.as_local_node_id(upvar_def.def_id()).unwrap();
1060 let var_ty = self.mc.infcx.node_ty(var_id)?;
1061 self.mc.cat_def(closure_id, closure_span, var_ty, upvar_def)
1065 fn copy_or_move<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>,
1066 cmt: &mc::cmt<'tcx>,
1067 move_reason: MoveReason)
1070 if infcx.type_moves_by_default(cmt.ty, cmt.span) {