1 // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! A different sort of visitor for walking fn bodies. Unlike the
12 //! normal visitor, which just walks the entire body in one shot, the
13 //! `ExprUseVisitor` determines how expressions are being used.
15 pub use self::LoanCause::*;
16 pub use self::ConsumeMode::*;
17 pub use self::MoveReason::*;
18 pub use self::MatchMode::*;
19 use self::TrackMatchMode::*;
20 use self::OverloadedCallType::*;
24 use hir::def_id::{DefId};
26 use middle::mem_categorization as mc;
27 use ty::{self, TyCtxt, adjustment};
29 use hir::{self, PatKind};
33 use syntax::codemap::Span;
35 ///////////////////////////////////////////////////////////////////////////
38 /// This trait defines the callbacks you can expect to receive when
39 /// employing the ExprUseVisitor.
40 pub trait Delegate<'tcx> {
41 // The value found at `cmt` is either copied or moved, depending
44 consume_id: ast::NodeId,
49 // The value found at `cmt` has been determined to match the
50 // pattern binding `matched_pat`, and its subparts are being
51 // copied or moved depending on `mode`. Note that `matched_pat`
52 // is called on all variant/structs in the pattern (i.e., the
53 // interior nodes of the pattern's tree structure) while
54 // consume_pat is called on the binding identifiers in the pattern
55 // (which are leaves of the pattern's tree structure).
57 // Note that variants/structs and identifiers are disjoint; thus
58 // `matched_pat` and `consume_pat` are never both called on the
59 // same input pattern structure (though of `consume_pat` can be
60 // called on a subpart of an input passed to `matched_pat).
61 fn matched_pat(&mut self,
62 matched_pat: &hir::Pat,
66 // The value found at `cmt` is either copied or moved via the
67 // pattern binding `consume_pat`, depending on mode.
68 fn consume_pat(&mut self,
69 consume_pat: &hir::Pat,
73 // The value found at `borrow` is being borrowed at the point
74 // `borrow_id` for the region `loan_region` with kind `bk`.
76 borrow_id: ast::NodeId,
79 loan_region: ty::Region,
81 loan_cause: LoanCause);
83 // The local variable `id` is declared but not initialized.
84 fn decl_without_init(&mut self,
88 // The path at `cmt` is being assigned to.
90 assignment_id: ast::NodeId,
91 assignment_span: Span,
92 assignee_cmt: mc::cmt<'tcx>,
96 #[derive(Copy, Clone, PartialEq, Debug)]
109 #[derive(Copy, Clone, PartialEq, Debug)]
110 pub enum ConsumeMode {
111 Copy, // reference to x where x has a type that copies
112 Move(MoveReason), // reference to x where x has a type that moves
115 #[derive(Copy, Clone, PartialEq, Debug)]
116 pub enum MoveReason {
122 #[derive(Copy, Clone, PartialEq, Debug)]
130 #[derive(Copy, Clone, PartialEq, Debug)]
131 enum TrackMatchMode {
137 impl TrackMatchMode {
138 // Builds up the whole match mode for a pattern from its constituent
139 // parts. The lattice looks like this:
155 // * `(_, some_int)` pattern is Copying, since
156 // NonBinding + Copying => Copying
158 // * `(some_int, some_box)` pattern is Moving, since
159 // Copying + Moving => Moving
161 // * `(ref x, some_box)` pattern is Conflicting, since
162 // Borrowing + Moving => Conflicting
164 // Note that the `Unknown` and `Conflicting` states are
165 // represented separately from the other more interesting
166 // `Definite` states, which simplifies logic here somewhat.
167 fn lub(&mut self, mode: MatchMode) {
168 *self = match (*self, mode) {
169 // Note that clause order below is very significant.
170 (Unknown, new) => Definite(new),
171 (Definite(old), new) if old == new => Definite(old),
173 (Definite(old), NonBindingMatch) => Definite(old),
174 (Definite(NonBindingMatch), new) => Definite(new),
176 (Definite(old), CopyingMatch) => Definite(old),
177 (Definite(CopyingMatch), new) => Definite(new),
179 (Definite(_), _) => Conflicting,
180 (Conflicting, _) => *self,
184 fn match_mode(&self) -> MatchMode {
186 Unknown => NonBindingMatch,
187 Definite(mode) => mode,
189 // Conservatively return MovingMatch to let the
190 // compiler continue to make progress.
197 #[derive(Copy, Clone, PartialEq, Debug)]
198 pub enum MutateMode {
201 WriteAndRead, // x += y
204 #[derive(Copy, Clone)]
205 enum OverloadedCallType {
208 FnOnceOverloadedCall,
211 impl OverloadedCallType {
212 fn from_trait_id(tcx: TyCtxt, trait_id: DefId) -> OverloadedCallType {
213 for &(maybe_function_trait, overloaded_call_type) in &[
214 (tcx.lang_items.fn_once_trait(), FnOnceOverloadedCall),
215 (tcx.lang_items.fn_mut_trait(), FnMutOverloadedCall),
216 (tcx.lang_items.fn_trait(), FnOverloadedCall)
218 match maybe_function_trait {
219 Some(function_trait) if function_trait == trait_id => {
220 return overloaded_call_type
226 bug!("overloaded call didn't map to known function trait")
229 fn from_method_id(tcx: TyCtxt, method_id: DefId) -> OverloadedCallType {
230 let method = tcx.impl_or_trait_item(method_id);
231 OverloadedCallType::from_trait_id(tcx, method.container().id())
235 ///////////////////////////////////////////////////////////////////////////
236 // The ExprUseVisitor type
238 // This is the code that actually walks the tree. Like
239 // mem_categorization, it requires a TYPER, which is a type that
240 // supplies types from the tree. After type checking is complete, you
241 // can just use the tcx as the typer.
242 pub struct ExprUseVisitor<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
243 mc: mc::MemCategorizationContext<'a, 'gcx, 'tcx>,
244 delegate: &'a mut Delegate<'tcx>,
247 // If the TYPER results in an error, it's because the type check
248 // failed (or will fail, when the error is uncovered and reported
249 // during writeback). In this case, we just ignore this part of the
252 // Note that this macro appears similar to try!(), but, unlike try!(),
253 // it does not propagate the error.
254 macro_rules! return_if_err {
259 debug!("mc reported err");
266 /// Whether the elements of an overloaded operation are passed by value or by reference
272 impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> {
273 pub fn new(delegate: &'a mut (Delegate<'tcx>+'a),
274 infcx: &'a InferCtxt<'a, 'gcx, 'tcx>) -> Self
277 mc: mc::MemCategorizationContext::new(infcx),
282 pub fn walk_fn(&mut self,
285 self.walk_arg_patterns(decl, body);
286 self.walk_block(body);
289 fn walk_arg_patterns(&mut self,
292 for arg in &decl.inputs {
293 let arg_ty = return_if_err!(self.mc.infcx.node_ty(arg.pat.id));
295 let fn_body_scope = self.tcx().region_maps.node_extent(body.id);
296 let arg_cmt = self.mc.cat_rvalue(
299 ty::ReScope(fn_body_scope), // Args live only as long as the fn body.
302 self.walk_irrefutable_pat(arg_cmt, &arg.pat);
306 fn tcx(&self) -> TyCtxt<'a, 'gcx, 'tcx> {
310 fn delegate_consume(&mut self,
311 consume_id: ast::NodeId,
313 cmt: mc::cmt<'tcx>) {
314 debug!("delegate_consume(consume_id={}, cmt={:?})",
317 let mode = copy_or_move(self.mc.infcx, &cmt, DirectRefMove);
318 self.delegate.consume(consume_id, consume_span, cmt, mode);
321 fn consume_exprs(&mut self, exprs: &[P<hir::Expr>]) {
323 self.consume_expr(&expr);
327 pub fn consume_expr(&mut self, expr: &hir::Expr) {
328 debug!("consume_expr(expr={:?})", expr);
330 let cmt = return_if_err!(self.mc.cat_expr(expr));
331 self.delegate_consume(expr.id, expr.span, cmt);
332 self.walk_expr(expr);
335 fn mutate_expr(&mut self,
336 assignment_expr: &hir::Expr,
339 let cmt = return_if_err!(self.mc.cat_expr(expr));
340 self.delegate.mutate(assignment_expr.id, assignment_expr.span, cmt, mode);
341 self.walk_expr(expr);
344 fn borrow_expr(&mut self,
349 debug!("borrow_expr(expr={:?}, r={:?}, bk={:?})",
352 let cmt = return_if_err!(self.mc.cat_expr(expr));
353 self.delegate.borrow(expr.id, expr.span, cmt, r, bk, cause);
358 fn select_from_expr(&mut self, expr: &hir::Expr) {
362 pub fn walk_expr(&mut self, expr: &hir::Expr) {
363 debug!("walk_expr(expr={:?})", expr);
365 self.walk_adjustment(expr);
368 hir::ExprPath(..) => { }
370 hir::ExprType(ref subexpr, _) => {
371 self.walk_expr(&subexpr)
374 hir::ExprUnary(hir::UnDeref, ref base) => { // *base
375 if !self.walk_overloaded_operator(expr, &base, Vec::new(), PassArgs::ByRef) {
376 self.select_from_expr(&base);
380 hir::ExprField(ref base, _) => { // base.f
381 self.select_from_expr(&base);
384 hir::ExprTupField(ref base, _) => { // base.<n>
385 self.select_from_expr(&base);
388 hir::ExprIndex(ref lhs, ref rhs) => { // lhs[rhs]
389 if !self.walk_overloaded_operator(expr,
393 self.select_from_expr(&lhs);
394 self.consume_expr(&rhs);
398 hir::ExprCall(ref callee, ref args) => { // callee(args)
399 self.walk_callee(expr, &callee);
400 self.consume_exprs(args);
403 hir::ExprMethodCall(_, _, ref args) => { // callee.m(args)
404 self.consume_exprs(args);
407 hir::ExprStruct(_, ref fields, ref opt_with) => {
408 self.walk_struct_expr(expr, fields, opt_with);
411 hir::ExprTup(ref exprs) => {
412 self.consume_exprs(exprs);
415 hir::ExprIf(ref cond_expr, ref then_blk, ref opt_else_expr) => {
416 self.consume_expr(&cond_expr);
417 self.walk_block(&then_blk);
418 if let Some(ref else_expr) = *opt_else_expr {
419 self.consume_expr(&else_expr);
423 hir::ExprMatch(ref discr, ref arms, _) => {
424 let discr_cmt = return_if_err!(self.mc.cat_expr(&discr));
425 self.borrow_expr(&discr, ty::ReEmpty, ty::ImmBorrow, MatchDiscriminant);
427 // treatment of the discriminant is handled while walking the arms.
429 let mode = self.arm_move_mode(discr_cmt.clone(), arm);
430 let mode = mode.match_mode();
431 self.walk_arm(discr_cmt.clone(), arm, mode);
435 hir::ExprVec(ref exprs) => {
436 self.consume_exprs(exprs);
439 hir::ExprAddrOf(m, ref base) => { // &base
440 // make sure that the thing we are pointing out stays valid
441 // for the lifetime `scope_r` of the resulting ptr:
442 let expr_ty = return_if_err!(self.mc.infcx.node_ty(expr.id));
443 if let ty::TyRef(&r, _) = expr_ty.sty {
444 let bk = ty::BorrowKind::from_mutbl(m);
445 self.borrow_expr(&base, r, bk, AddrOf);
449 hir::ExprInlineAsm(ref ia, ref outputs, ref inputs) => {
450 for (o, output) in ia.outputs.iter().zip(outputs) {
452 self.consume_expr(output);
454 self.mutate_expr(expr, output,
456 MutateMode::WriteAndRead
458 MutateMode::JustWrite
462 self.consume_exprs(inputs);
467 hir::ExprLit(..) => {}
469 hir::ExprLoop(ref blk, _) => {
470 self.walk_block(&blk);
473 hir::ExprWhile(ref cond_expr, ref blk, _) => {
474 self.consume_expr(&cond_expr);
475 self.walk_block(&blk);
478 hir::ExprUnary(op, ref lhs) => {
479 let pass_args = if op.is_by_value() {
485 if !self.walk_overloaded_operator(expr, &lhs, Vec::new(), pass_args) {
486 self.consume_expr(&lhs);
490 hir::ExprBinary(op, ref lhs, ref rhs) => {
491 let pass_args = if op.node.is_by_value() {
497 if !self.walk_overloaded_operator(expr, &lhs, vec![&rhs], pass_args) {
498 self.consume_expr(&lhs);
499 self.consume_expr(&rhs);
503 hir::ExprBlock(ref blk) => {
504 self.walk_block(&blk);
507 hir::ExprRet(ref opt_expr) => {
508 if let Some(ref expr) = *opt_expr {
509 self.consume_expr(&expr);
513 hir::ExprAssign(ref lhs, ref rhs) => {
514 self.mutate_expr(expr, &lhs, MutateMode::JustWrite);
515 self.consume_expr(&rhs);
518 hir::ExprCast(ref base, _) => {
519 self.consume_expr(&base);
522 hir::ExprAssignOp(op, ref lhs, ref rhs) => {
523 // NB All our assignment operations take the RHS by value
524 assert!(op.node.is_by_value());
526 if !self.walk_overloaded_operator(expr, lhs, vec![rhs], PassArgs::ByValue) {
527 self.mutate_expr(expr, &lhs, MutateMode::WriteAndRead);
528 self.consume_expr(&rhs);
532 hir::ExprRepeat(ref base, ref count) => {
533 self.consume_expr(&base);
534 self.consume_expr(&count);
537 hir::ExprClosure(_, _, _, fn_decl_span) => {
538 self.walk_captures(expr, fn_decl_span)
541 hir::ExprBox(ref base) => {
542 self.consume_expr(&base);
547 fn walk_callee(&mut self, call: &hir::Expr, callee: &hir::Expr) {
548 let callee_ty = return_if_err!(self.mc.infcx.expr_ty_adjusted(callee));
549 debug!("walk_callee: callee={:?} callee_ty={:?}",
551 let call_scope = self.tcx().region_maps.node_extent(call.id);
552 match callee_ty.sty {
553 ty::TyFnDef(..) | ty::TyFnPtr(_) => {
554 self.consume_expr(callee);
558 let overloaded_call_type =
559 match self.mc.infcx.node_method_id(ty::MethodCall::expr(call.id)) {
561 OverloadedCallType::from_method_id(self.tcx(), method_id)
566 "unexpected callee type {}",
570 match overloaded_call_type {
571 FnMutOverloadedCall => {
572 self.borrow_expr(callee,
573 ty::ReScope(call_scope),
577 FnOverloadedCall => {
578 self.borrow_expr(callee,
579 ty::ReScope(call_scope),
583 FnOnceOverloadedCall => self.consume_expr(callee),
589 fn walk_stmt(&mut self, stmt: &hir::Stmt) {
591 hir::StmtDecl(ref decl, _) => {
593 hir::DeclLocal(ref local) => {
594 self.walk_local(&local);
597 hir::DeclItem(_) => {
598 // we don't visit nested items in this visitor,
599 // only the fn body we were given.
604 hir::StmtExpr(ref expr, _) |
605 hir::StmtSemi(ref expr, _) => {
606 self.consume_expr(&expr);
611 fn walk_local(&mut self, local: &hir::Local) {
614 let delegate = &mut self.delegate;
615 pat_util::pat_bindings(&local.pat, |_, id, span, _| {
616 delegate.decl_without_init(id, span);
621 // Variable declarations with
622 // initializers are considered
623 // "assigns", which is handled by
625 self.walk_expr(&expr);
626 let init_cmt = return_if_err!(self.mc.cat_expr(&expr));
627 self.walk_irrefutable_pat(init_cmt, &local.pat);
632 /// Indicates that the value of `blk` will be consumed, meaning either copied or moved
633 /// depending on its type.
634 fn walk_block(&mut self, blk: &hir::Block) {
635 debug!("walk_block(blk.id={})", blk.id);
637 for stmt in &blk.stmts {
638 self.walk_stmt(stmt);
641 if let Some(ref tail_expr) = blk.expr {
642 self.consume_expr(&tail_expr);
646 fn walk_struct_expr(&mut self,
648 fields: &[hir::Field],
649 opt_with: &Option<P<hir::Expr>>) {
650 // Consume the expressions supplying values for each field.
651 for field in fields {
652 self.consume_expr(&field.expr);
655 let with_expr = match *opt_with {
660 let with_cmt = return_if_err!(self.mc.cat_expr(&with_expr));
662 // Select just those fields of the `with`
663 // expression that will actually be used
664 if let ty::TyStruct(def, substs) = with_cmt.ty.sty {
665 // Consume those fields of the with expression that are needed.
666 for with_field in &def.struct_variant().fields {
667 if !contains_field_named(with_field, fields) {
668 let cmt_field = self.mc.cat_field(
672 with_field.ty(self.tcx(), substs)
674 self.delegate_consume(with_expr.id, with_expr.span, cmt_field);
678 // the base expression should always evaluate to a
679 // struct; however, when EUV is run during typeck, it
680 // may not. This will generate an error earlier in typeck,
681 // so we can just ignore it.
682 if !self.tcx().sess.has_errors() {
685 "with expression doesn't evaluate to a struct");
689 // walk the with expression so that complex expressions
690 // are properly handled.
691 self.walk_expr(with_expr);
693 fn contains_field_named(field: ty::FieldDef,
694 fields: &[hir::Field])
698 |f| f.name.node == field.name)
702 // Invoke the appropriate delegate calls for anything that gets
703 // consumed or borrowed as part of the automatic adjustment
705 fn walk_adjustment(&mut self, expr: &hir::Expr) {
706 let infcx = self.mc.infcx;
707 //NOTE(@jroesch): mixed RefCell borrow causes crash
708 let adj = infcx.adjustments().get(&expr.id).map(|x| x.clone());
709 if let Some(adjustment) = adj {
711 adjustment::AdjustReifyFnPointer |
712 adjustment::AdjustUnsafeFnPointer |
713 adjustment::AdjustMutToConstPointer => {
714 // Creating a closure/fn-pointer or unsizing consumes
715 // the input and stores it into the resulting rvalue.
716 debug!("walk_adjustment: trivial adjustment");
718 return_if_err!(self.mc.cat_expr_unadjusted(expr));
719 self.delegate_consume(expr.id, expr.span, cmt_unadjusted);
721 adjustment::AdjustDerefRef(ref adj) => {
722 self.walk_autoderefref(expr, adj);
728 /// Autoderefs for overloaded Deref calls in fact reference their receiver. That is, if we have
729 /// `(*x)` where `x` is of type `Rc<T>`, then this in fact is equivalent to `x.deref()`. Since
730 /// `deref()` is declared with `&self`, this is an autoref of `x`.
731 fn walk_autoderefs(&mut self,
734 debug!("walk_autoderefs expr={:?} autoderefs={}", expr, autoderefs);
736 for i in 0..autoderefs {
737 let deref_id = ty::MethodCall::autoderef(expr.id, i as u32);
738 match self.mc.infcx.node_method_ty(deref_id) {
741 let cmt = return_if_err!(self.mc.cat_expr_autoderefd(expr, i));
743 // the method call infrastructure should have
744 // replaced all late-bound regions with variables:
745 let self_ty = method_ty.fn_sig().input(0);
746 let self_ty = self.tcx().no_late_bound_regions(&self_ty).unwrap();
748 let (m, r) = match self_ty.sty {
749 ty::TyRef(r, ref m) => (m.mutbl, r),
750 _ => span_bug!(expr.span,
751 "bad overloaded deref type {:?}",
754 let bk = ty::BorrowKind::from_mutbl(m);
755 self.delegate.borrow(expr.id, expr.span, cmt,
762 fn walk_autoderefref(&mut self,
764 adj: &adjustment::AutoDerefRef<'tcx>) {
765 debug!("walk_autoderefref expr={:?} adj={:?}",
769 self.walk_autoderefs(expr, adj.autoderefs);
772 return_if_err!(self.mc.cat_expr_autoderefd(expr, adj.autoderefs));
775 self.walk_autoref(expr, cmt_derefd, adj.autoref);
777 if adj.unsize.is_some() {
778 // Unsizing consumes the thin pointer and produces a fat one.
779 self.delegate_consume(expr.id, expr.span, cmt_refd);
784 /// Walks the autoref `opt_autoref` applied to the autoderef'd
785 /// `expr`. `cmt_derefd` is the mem-categorized form of `expr`
786 /// after all relevant autoderefs have occurred. Because AutoRefs
787 /// can be recursive, this function is recursive: it first walks
788 /// deeply all the way down the autoref chain, and then processes
789 /// the autorefs on the way out. At each point, it returns the
790 /// `cmt` for the rvalue that will be produced by introduced an
792 fn walk_autoref(&mut self,
794 cmt_base: mc::cmt<'tcx>,
795 opt_autoref: Option<adjustment::AutoRef<'tcx>>)
798 debug!("walk_autoref(expr.id={} cmt_derefd={:?} opt_autoref={:?})",
803 let cmt_base_ty = cmt_base.ty;
805 let autoref = match opt_autoref {
806 Some(ref autoref) => autoref,
814 adjustment::AutoPtr(r, m) => {
815 self.delegate.borrow(expr.id,
819 ty::BorrowKind::from_mutbl(m),
823 adjustment::AutoUnsafe(m) => {
824 debug!("walk_autoref: expr.id={} cmt_base={:?}",
828 // Converting from a &T to *T (or &mut T to *mut T) is
829 // treated as borrowing it for the enclosing temporary
831 let r = ty::ReScope(self.tcx().region_maps.node_extent(expr.id));
833 self.delegate.borrow(expr.id,
837 ty::BorrowKind::from_mutbl(m),
842 // Construct the categorization for the result of the autoref.
843 // This is always an rvalue, since we are producing a new
844 // (temporary) indirection.
846 let adj_ty = cmt_base_ty.adjust_for_autoref(self.tcx(), opt_autoref);
848 self.mc.cat_rvalue_node(expr.id, expr.span, adj_ty)
852 // When this returns true, it means that the expression *is* a
853 // method-call (i.e. via the operator-overload). This true result
854 // also implies that walk_overloaded_operator already took care of
855 // recursively processing the input arguments, and thus the caller
857 fn walk_overloaded_operator(&mut self,
859 receiver: &hir::Expr,
860 rhs: Vec<&hir::Expr>,
864 if !self.mc.infcx.is_method_call(expr.id) {
869 PassArgs::ByValue => {
870 self.consume_expr(receiver);
872 self.consume_expr(arg);
877 PassArgs::ByRef => {},
880 self.walk_expr(receiver);
882 // Arguments (but not receivers) to overloaded operator
883 // methods are implicitly autoref'd which sadly does not use
884 // adjustments, so we must hardcode the borrow here.
886 let r = ty::ReScope(self.tcx().region_maps.node_extent(expr.id));
887 let bk = ty::ImmBorrow;
890 self.borrow_expr(arg, r, bk, OverloadedOperator);
895 fn arm_move_mode(&mut self, discr_cmt: mc::cmt<'tcx>, arm: &hir::Arm) -> TrackMatchMode {
896 let mut mode = Unknown;
897 for pat in &arm.pats {
898 self.determine_pat_move_mode(discr_cmt.clone(), &pat, &mut mode);
903 fn walk_arm(&mut self, discr_cmt: mc::cmt<'tcx>, arm: &hir::Arm, mode: MatchMode) {
904 for pat in &arm.pats {
905 self.walk_pat(discr_cmt.clone(), &pat, mode);
908 if let Some(ref guard) = arm.guard {
909 self.consume_expr(&guard);
912 self.consume_expr(&arm.body);
915 /// Walks a pat that occurs in isolation (i.e. top-level of fn
916 /// arg or let binding. *Not* a match arm or nested pat.)
917 fn walk_irrefutable_pat(&mut self, cmt_discr: mc::cmt<'tcx>, pat: &hir::Pat) {
918 let mut mode = Unknown;
919 self.determine_pat_move_mode(cmt_discr.clone(), pat, &mut mode);
920 let mode = mode.match_mode();
921 self.walk_pat(cmt_discr, pat, mode);
924 /// Identifies any bindings within `pat` and accumulates within
925 /// `mode` whether the overall pattern/match structure is a move,
927 fn determine_pat_move_mode(&mut self,
928 cmt_discr: mc::cmt<'tcx>,
930 mode: &mut TrackMatchMode) {
931 debug!("determine_pat_move_mode cmt_discr={:?} pat={:?}", cmt_discr,
933 return_if_err!(self.mc.cat_pattern(cmt_discr, pat, |_mc, cmt_pat, pat| {
935 PatKind::Binding(hir::BindByRef(..), _, _) =>
936 mode.lub(BorrowingMatch),
937 PatKind::Binding(hir::BindByValue(..), _, _) => {
938 match copy_or_move(self.mc.infcx, &cmt_pat, PatBindingMove) {
939 Copy => mode.lub(CopyingMatch),
940 Move(..) => mode.lub(MovingMatch),
948 /// The core driver for walking a pattern; `match_mode` must be
949 /// established up front, e.g. via `determine_pat_move_mode` (see
950 /// also `walk_irrefutable_pat` for patterns that stand alone).
951 fn walk_pat(&mut self,
952 cmt_discr: mc::cmt<'tcx>,
954 match_mode: MatchMode) {
955 debug!("walk_pat cmt_discr={:?} pat={:?}", cmt_discr,
958 let tcx = &self.tcx();
960 let infcx = self.mc.infcx;
961 let delegate = &mut self.delegate;
962 return_if_err!(mc.cat_pattern(cmt_discr.clone(), pat, |mc, cmt_pat, pat| {
964 PatKind::Binding(bmode, _, _) => {
965 debug!("binding cmt_pat={:?} pat={:?} match_mode={:?}",
970 // pat_ty: the type of the binding being produced.
971 let pat_ty = return_if_err!(infcx.node_ty(pat.id));
973 // Each match binding is effectively an assignment to the
974 // binding being produced.
975 if let Ok(binding_cmt) = mc.cat_def(pat.id, pat.span, pat_ty,
976 tcx.expect_def(pat.id)) {
977 delegate.mutate(pat.id, pat.span, binding_cmt, MutateMode::Init);
980 // It is also a borrow or copy/move of the value being matched.
982 hir::BindByRef(m) => {
983 if let ty::TyRef(&r, _) = pat_ty.sty {
984 let bk = ty::BorrowKind::from_mutbl(m);
985 delegate.borrow(pat.id, pat.span, cmt_pat,
989 hir::BindByValue(..) => {
990 let mode = copy_or_move(infcx, &cmt_pat, PatBindingMove);
991 debug!("walk_pat binding consuming pat");
992 delegate.consume_pat(pat, cmt_pat, mode);
1000 // Do a second pass over the pattern, calling `matched_pat` on
1001 // the interior nodes (enum variants and structs), as opposed
1002 // to the above loop's visit of than the bindings that form
1003 // the leaves of the pattern tree structure.
1004 return_if_err!(mc.cat_pattern(cmt_discr, pat, |mc, cmt_pat, pat| {
1006 PatKind::Struct(..) | PatKind::TupleStruct(..) |
1007 PatKind::Path(..) | PatKind::QPath(..) => {
1008 match tcx.expect_def(pat.id) {
1009 Def::Variant(enum_did, variant_did) => {
1011 if tcx.lookup_adt_def(enum_did).is_univariant() {
1014 let cmt_pat_ty = cmt_pat.ty;
1015 mc.cat_downcast(pat, cmt_pat, cmt_pat_ty, variant_did)
1018 debug!("variant downcast_cmt={:?} pat={:?}",
1022 delegate.matched_pat(pat, downcast_cmt, match_mode);
1025 Def::Struct(..) | Def::TyAlias(..) => {
1026 // A struct (in either the value or type
1027 // namespace; we encounter the former on
1028 // e.g. patterns for unit structs).
1030 debug!("struct cmt_pat={:?} pat={:?}",
1034 delegate.matched_pat(pat, cmt_pat, match_mode);
1037 Def::Const(..) | Def::AssociatedConst(..) => {
1038 // This is a leaf (i.e. identifier binding
1039 // or constant value to match); thus no
1040 // `matched_pat` call.
1044 // An enum type should never be in a pattern.
1045 // Remaining cases are e.g. Def::Fn, to
1046 // which identifiers within patterns
1047 // should not resolve. However, we do
1048 // encouter this when using the
1049 // expr-use-visitor during typeck. So just
1050 // ignore it, an error should have been
1053 if !tcx.sess.has_errors() {
1055 "Pattern has unexpected def: {:?} and type {:?}",
1063 PatKind::Wild | PatKind::Tuple(..) | PatKind::Box(..) |
1064 PatKind::Ref(..) | PatKind::Lit(..) | PatKind::Range(..) |
1065 PatKind::Vec(..) | PatKind::Binding(..) => {
1066 // Each of these cases does not
1067 // correspond to an enum variant or struct, so we
1068 // do not do any `matched_pat` calls for these
1075 fn walk_captures(&mut self, closure_expr: &hir::Expr, fn_decl_span: Span) {
1076 debug!("walk_captures({:?})", closure_expr);
1078 self.tcx().with_freevars(closure_expr.id, |freevars| {
1079 for freevar in freevars {
1080 let id_var = freevar.def.var_id();
1081 let upvar_id = ty::UpvarId { var_id: id_var,
1082 closure_expr_id: closure_expr.id };
1083 let upvar_capture = self.mc.infcx.upvar_capture(upvar_id).unwrap();
1084 let cmt_var = return_if_err!(self.cat_captured_var(closure_expr.id,
1087 match upvar_capture {
1088 ty::UpvarCapture::ByValue => {
1089 let mode = copy_or_move(self.mc.infcx, &cmt_var, CaptureMove);
1090 self.delegate.consume(closure_expr.id, freevar.span, cmt_var, mode);
1092 ty::UpvarCapture::ByRef(upvar_borrow) => {
1093 self.delegate.borrow(closure_expr.id,
1096 upvar_borrow.region,
1098 ClosureCapture(freevar.span));
1105 fn cat_captured_var(&mut self,
1106 closure_id: ast::NodeId,
1109 -> mc::McResult<mc::cmt<'tcx>> {
1110 // Create the cmt for the variable being borrowed, from the
1111 // caller's perspective
1112 let var_id = upvar_def.var_id();
1113 let var_ty = self.mc.infcx.node_ty(var_id)?;
1114 self.mc.cat_def(closure_id, closure_span, var_ty, upvar_def)
1118 fn copy_or_move<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>,
1119 cmt: &mc::cmt<'tcx>,
1120 move_reason: MoveReason)
1123 if infcx.type_moves_by_default(cmt.ty, cmt.span) {