1 // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! A different sort of visitor for walking fn bodies. Unlike the
12 //! normal visitor, which just walks the entire body in one shot, the
13 //! `ExprUseVisitor` determines how expressions are being used.
15 pub use self::MutateMode::*;
16 pub use self::LoanCause::*;
17 pub use self::ConsumeMode::*;
18 pub use self::MoveReason::*;
19 pub use self::MatchMode::*;
20 use self::TrackMatchMode::*;
21 use self::OverloadedCallType::*;
23 use middle::{def, region, pat_util};
24 use middle::mem_categorization as mc;
25 use middle::mem_categorization::Typer;
26 use middle::ty::{mod};
27 use middle::ty::{MethodCall, MethodObject, MethodTraitObject};
28 use middle::ty::{MethodOrigin, MethodParam, MethodTypeParam};
29 use middle::ty::{MethodStatic, MethodStaticUnboxedClosure};
30 use util::ppaux::Repr;
33 use syntax::{ast, ast_util};
35 use syntax::codemap::Span;
37 ///////////////////////////////////////////////////////////////////////////
40 /// This trait defines the callbacks you can expect to receive when
41 /// employing the ExprUseVisitor.
42 pub trait Delegate<'tcx> {
43 // The value found at `cmt` is either copied or moved, depending
46 consume_id: ast::NodeId,
51 // The value found at `cmt` has been determined to match the
52 // pattern binding `matched_pat`, and its subparts are being
53 // copied or moved depending on `mode`. Note that `matched_pat`
54 // is called on all variant/structs in the pattern (i.e., the
55 // interior nodes of the pattern's tree structure) while
56 // consume_pat is called on the binding identifiers in the pattern
57 // (which are leaves of the pattern's tree structure).
59 // Note that variants/structs and identifiers are disjoint; thus
60 // `matched_pat` and `consume_pat` are never both called on the
61 // same input pattern structure (though of `consume_pat` can be
62 // called on a subpart of an input passed to `matched_pat).
63 fn matched_pat(&mut self,
64 matched_pat: &ast::Pat,
68 // The value found at `cmt` is either copied or moved via the
69 // pattern binding `consume_pat`, depending on mode.
70 fn consume_pat(&mut self,
71 consume_pat: &ast::Pat,
75 // The value found at `borrow` is being borrowed at the point
76 // `borrow_id` for the region `loan_region` with kind `bk`.
78 borrow_id: ast::NodeId,
81 loan_region: ty::Region,
83 loan_cause: LoanCause);
85 // The local variable `id` is declared but not initialized.
86 fn decl_without_init(&mut self,
90 // The path at `cmt` is being assigned to.
92 assignment_id: ast::NodeId,
93 assignment_span: Span,
94 assignee_cmt: mc::cmt<'tcx>,
98 #[deriving(Copy, PartialEq, Show)]
100 ClosureCapture(Span),
110 #[deriving(Copy, PartialEq, Show)]
111 pub enum ConsumeMode {
112 Copy, // reference to x where x has a type that copies
113 Move(MoveReason), // reference to x where x has a type that moves
116 #[deriving(Copy, PartialEq, Show)]
117 pub enum MoveReason {
123 #[deriving(Copy, PartialEq, Show)]
131 #[deriving(PartialEq,Show)]
132 enum TrackMatchMode<T> {
138 impl<T> kinds::Copy for TrackMatchMode<T> {}
140 impl<T> TrackMatchMode<T> {
141 // Builds up the whole match mode for a pattern from its constituent
142 // parts. The lattice looks like this:
158 // * `(_, some_int)` pattern is Copying, since
159 // NonBinding + Copying => Copying
161 // * `(some_int, some_box)` pattern is Moving, since
162 // Copying + Moving => Moving
164 // * `(ref x, some_box)` pattern is Conflicting, since
165 // Borrowing + Moving => Conflicting
167 // Note that the `Unknown` and `Conflicting` states are
168 // represented separately from the other more interesting
169 // `Definite` states, which simplifies logic here somewhat.
170 fn lub(&mut self, mode: MatchMode) {
171 *self = match (*self, mode) {
172 // Note that clause order below is very significant.
173 (Unknown, new) => Definite(new),
174 (Definite(old), new) if old == new => Definite(old),
176 (Definite(old), NonBindingMatch) => Definite(old),
177 (Definite(NonBindingMatch), new) => Definite(new),
179 (Definite(old), CopyingMatch) => Definite(old),
180 (Definite(CopyingMatch), new) => Definite(new),
182 (Definite(_), _) => Conflicting,
183 (Conflicting, _) => *self,
187 fn match_mode(&self) -> MatchMode {
189 Unknown => NonBindingMatch,
190 Definite(mode) => mode,
192 // Conservatively return MovingMatch to let the
193 // compiler continue to make progress.
200 #[deriving(Copy, PartialEq, Show)]
201 pub enum MutateMode {
204 WriteAndRead, // x += y
208 enum OverloadedCallType {
211 FnOnceOverloadedCall,
214 impl OverloadedCallType {
215 fn from_trait_id(tcx: &ty::ctxt, trait_id: ast::DefId)
216 -> OverloadedCallType {
217 for &(maybe_function_trait, overloaded_call_type) in [
218 (tcx.lang_items.fn_once_trait(), FnOnceOverloadedCall),
219 (tcx.lang_items.fn_mut_trait(), FnMutOverloadedCall),
220 (tcx.lang_items.fn_trait(), FnOverloadedCall)
222 match maybe_function_trait {
223 Some(function_trait) if function_trait == trait_id => {
224 return overloaded_call_type
230 tcx.sess.bug("overloaded call didn't map to known function trait")
233 fn from_method_id(tcx: &ty::ctxt, method_id: ast::DefId)
234 -> OverloadedCallType {
235 let method_descriptor = match ty::impl_or_trait_item(tcx, method_id) {
236 ty::MethodTraitItem(ref method_descriptor) => {
237 (*method_descriptor).clone()
239 ty::TypeTraitItem(_) => {
240 tcx.sess.bug("overloaded call method wasn't in method map")
243 let impl_id = match method_descriptor.container {
244 ty::TraitContainer(_) => {
245 tcx.sess.bug("statically resolved overloaded call method \
246 belonged to a trait?!")
248 ty::ImplContainer(impl_id) => impl_id,
250 let trait_ref = match ty::impl_trait_ref(tcx, impl_id) {
252 tcx.sess.bug("statically resolved overloaded call impl \
253 didn't implement a trait?!")
255 Some(ref trait_ref) => (*trait_ref).clone(),
257 OverloadedCallType::from_trait_id(tcx, trait_ref.def_id)
260 fn from_unboxed_closure(tcx: &ty::ctxt, closure_did: ast::DefId)
261 -> OverloadedCallType {
266 .expect("OverloadedCallType::from_unboxed_closure: didn't \
270 OverloadedCallType::from_trait_id(tcx, trait_did)
273 fn from_method_origin(tcx: &ty::ctxt, origin: &MethodOrigin)
274 -> OverloadedCallType {
276 MethodStatic(def_id) => {
277 OverloadedCallType::from_method_id(tcx, def_id)
279 MethodStaticUnboxedClosure(def_id) => {
280 OverloadedCallType::from_unboxed_closure(tcx, def_id)
282 MethodTypeParam(MethodParam { ref trait_ref, .. }) |
283 MethodTraitObject(MethodObject { ref trait_ref, .. }) => {
284 OverloadedCallType::from_trait_id(tcx, trait_ref.def_id)
290 ///////////////////////////////////////////////////////////////////////////
291 // The ExprUseVisitor type
293 // This is the code that actually walks the tree. Like
294 // mem_categorization, it requires a TYPER, which is a type that
295 // supplies types from the tree. After type checking is complete, you
296 // can just use the tcx as the typer.
298 pub struct ExprUseVisitor<'d,'t,'tcx:'t,TYPER:'t> {
300 mc: mc::MemCategorizationContext<'t,TYPER>,
301 delegate: &'d mut (Delegate<'tcx>+'d),
304 // If the TYPER results in an error, it's because the type check
305 // failed (or will fail, when the error is uncovered and reported
306 // during writeback). In this case, we just ignore this part of the
309 // Note that this macro appears similar to try!(), but, unlike try!(),
310 // it does not propagate the error.
311 macro_rules! return_if_err {
320 /// Whether the elements of an overloaded operation are passed by value or by reference
326 impl<'d,'t,'tcx,TYPER:mc::Typer<'tcx>> ExprUseVisitor<'d,'t,'tcx,TYPER> {
327 pub fn new(delegate: &'d mut Delegate<'tcx>,
329 -> ExprUseVisitor<'d,'t,'tcx,TYPER> {
332 mc: mc::MemCategorizationContext::new(typer),
337 pub fn walk_fn(&mut self,
340 self.walk_arg_patterns(decl, body);
341 self.walk_block(body);
344 fn walk_arg_patterns(&mut self,
347 for arg in decl.inputs.iter() {
348 let arg_ty = return_if_err!(self.typer.node_ty(arg.pat.id));
350 let fn_body_scope = region::CodeExtent::from_node_id(body.id);
351 let arg_cmt = self.mc.cat_rvalue(
354 ty::ReScope(fn_body_scope), // Args live only as long as the fn body.
357 self.walk_irrefutable_pat(arg_cmt, &*arg.pat);
361 fn tcx(&self) -> &'t ty::ctxt<'tcx> {
365 fn delegate_consume(&mut self,
366 consume_id: ast::NodeId,
368 cmt: mc::cmt<'tcx>) {
369 let mode = copy_or_move(self.typer, &cmt, DirectRefMove);
370 self.delegate.consume(consume_id, consume_span, cmt, mode);
373 fn consume_exprs(&mut self, exprs: &Vec<P<ast::Expr>>) {
374 for expr in exprs.iter() {
375 self.consume_expr(&**expr);
379 pub fn consume_expr(&mut self, expr: &ast::Expr) {
380 debug!("consume_expr(expr={})", expr.repr(self.tcx()));
382 let cmt = return_if_err!(self.mc.cat_expr(expr));
383 self.delegate_consume(expr.id, expr.span, cmt);
384 self.walk_expr(expr);
387 fn mutate_expr(&mut self,
388 assignment_expr: &ast::Expr,
391 let cmt = return_if_err!(self.mc.cat_expr(expr));
392 self.delegate.mutate(assignment_expr.id, assignment_expr.span, cmt, mode);
393 self.walk_expr(expr);
396 fn borrow_expr(&mut self,
401 debug!("borrow_expr(expr={}, r={}, bk={})",
402 expr.repr(self.tcx()), r.repr(self.tcx()), bk.repr(self.tcx()));
404 let cmt = return_if_err!(self.mc.cat_expr(expr));
405 self.delegate.borrow(expr.id, expr.span, cmt, r, bk, cause);
407 // Note: Unlike consume, we can ignore ExprParen. cat_expr
408 // already skips over them, and walk will uncover any
409 // attachments or whatever.
413 fn select_from_expr(&mut self, expr: &ast::Expr) {
417 pub fn walk_expr(&mut self, expr: &ast::Expr) {
418 debug!("walk_expr(expr={})", expr.repr(self.tcx()));
420 self.walk_adjustment(expr);
423 ast::ExprParen(ref subexpr) => {
424 self.walk_expr(&**subexpr)
427 ast::ExprPath(..) => { }
429 ast::ExprUnary(ast::UnDeref, ref base) => { // *base
430 if !self.walk_overloaded_operator(expr, &**base, Vec::new(), PassArgs::ByRef) {
431 self.select_from_expr(&**base);
435 ast::ExprField(ref base, _) => { // base.f
436 self.select_from_expr(&**base);
439 ast::ExprTupField(ref base, _) => { // base.<n>
440 self.select_from_expr(&**base);
443 ast::ExprIndex(ref lhs, ref rhs) => { // lhs[rhs]
445 ast::ExprRange(ref start, ref end) => {
446 // Hacked slicing syntax (KILLME).
447 let args = match (start, end) {
448 (&Some(ref e1), &Some(ref e2)) => vec![&**e1, &**e2],
449 (&Some(ref e), &None) => vec![&**e],
450 (&None, &Some(ref e)) => vec![&**e],
451 (&None, &None) => Vec::new()
454 self.walk_overloaded_operator(expr, &**lhs, args, PassArgs::ByRef);
458 if !self.walk_overloaded_operator(expr,
462 self.select_from_expr(&**lhs);
463 self.consume_expr(&**rhs);
469 ast::ExprRange(ref start, ref end) => {
470 start.as_ref().map(|e| self.consume_expr(&**e));
471 end.as_ref().map(|e| self.consume_expr(&**e));
474 ast::ExprCall(ref callee, ref args) => { // callee(args)
475 self.walk_callee(expr, &**callee);
476 self.consume_exprs(args);
479 ast::ExprMethodCall(_, _, ref args) => { // callee.m(args)
480 self.consume_exprs(args);
483 ast::ExprStruct(_, ref fields, ref opt_with) => {
484 self.walk_struct_expr(expr, fields, opt_with);
487 ast::ExprTup(ref exprs) => {
488 self.consume_exprs(exprs);
491 ast::ExprIf(ref cond_expr, ref then_blk, ref opt_else_expr) => {
492 self.consume_expr(&**cond_expr);
493 self.walk_block(&**then_blk);
494 for else_expr in opt_else_expr.iter() {
495 self.consume_expr(&**else_expr);
499 ast::ExprIfLet(..) => {
500 self.tcx().sess.span_bug(expr.span, "non-desugared ExprIfLet");
503 ast::ExprMatch(ref discr, ref arms, _) => {
504 let discr_cmt = return_if_err!(self.mc.cat_expr(&**discr));
505 self.borrow_expr(&**discr, ty::ReEmpty, ty::ImmBorrow, MatchDiscriminant);
507 // treatment of the discriminant is handled while walking the arms.
508 for arm in arms.iter() {
509 let mode = self.arm_move_mode(discr_cmt.clone(), arm);
510 let mode = mode.match_mode();
511 self.walk_arm(discr_cmt.clone(), arm, mode);
515 ast::ExprVec(ref exprs) => {
516 self.consume_exprs(exprs);
519 ast::ExprAddrOf(m, ref base) => { // &base
520 // make sure that the thing we are pointing out stays valid
521 // for the lifetime `scope_r` of the resulting ptr:
522 let expr_ty = return_if_err!(self.typer.node_ty(expr.id));
523 let r = ty::ty_region(self.tcx(), expr.span, expr_ty);
524 let bk = ty::BorrowKind::from_mutbl(m);
525 self.borrow_expr(&**base, r, bk, AddrOf);
528 ast::ExprInlineAsm(ref ia) => {
529 for &(_, ref input) in ia.inputs.iter() {
530 self.consume_expr(&**input);
533 for &(_, ref output, is_rw) in ia.outputs.iter() {
534 self.mutate_expr(expr, &**output,
535 if is_rw { WriteAndRead } else { JustWrite });
541 ast::ExprLit(..) => {}
543 ast::ExprLoop(ref blk, _) => {
544 self.walk_block(&**blk);
547 ast::ExprWhile(ref cond_expr, ref blk, _) => {
548 self.consume_expr(&**cond_expr);
549 self.walk_block(&**blk);
552 ast::ExprWhileLet(..) => {
553 self.tcx().sess.span_bug(expr.span, "non-desugared ExprWhileLet");
556 ast::ExprForLoop(ref pat, ref head, ref blk, _) => {
557 // The pattern lives as long as the block.
558 debug!("walk_expr for loop case: blk id={}", blk.id);
559 self.consume_expr(&**head);
561 // Fetch the type of the value that the iteration yields to
562 // produce the pattern's categorized mutable type.
563 let pattern_type = return_if_err!(self.typer.node_ty(pat.id));
564 let blk_scope = region::CodeExtent::from_node_id(blk.id);
565 let pat_cmt = self.mc.cat_rvalue(pat.id,
567 ty::ReScope(blk_scope),
569 self.walk_irrefutable_pat(pat_cmt, &**pat);
571 self.walk_block(&**blk);
574 ast::ExprUnary(op, ref lhs) => {
575 let pass_args = if ast_util::is_by_value_unop(op) {
581 if !self.walk_overloaded_operator(expr, &**lhs, Vec::new(), pass_args) {
582 self.consume_expr(&**lhs);
586 ast::ExprBinary(op, ref lhs, ref rhs) => {
587 let pass_args = if ast_util::is_by_value_binop(op) {
593 if !self.walk_overloaded_operator(expr, &**lhs, vec![&**rhs], pass_args) {
594 self.consume_expr(&**lhs);
595 self.consume_expr(&**rhs);
599 ast::ExprBlock(ref blk) => {
600 self.walk_block(&**blk);
603 ast::ExprRet(ref opt_expr) => {
604 for expr in opt_expr.iter() {
605 self.consume_expr(&**expr);
609 ast::ExprAssign(ref lhs, ref rhs) => {
610 self.mutate_expr(expr, &**lhs, JustWrite);
611 self.consume_expr(&**rhs);
614 ast::ExprCast(ref base, _) => {
615 self.consume_expr(&**base);
618 ast::ExprAssignOp(_, ref lhs, ref rhs) => {
619 // This will have to change if/when we support
620 // overloaded operators for `+=` and so forth.
621 self.mutate_expr(expr, &**lhs, WriteAndRead);
622 self.consume_expr(&**rhs);
625 ast::ExprRepeat(ref base, ref count) => {
626 self.consume_expr(&**base);
627 self.consume_expr(&**count);
630 ast::ExprClosure(..) => {
631 self.walk_captures(expr)
634 ast::ExprBox(ref place, ref base) => {
636 Some(ref place) => self.consume_expr(&**place),
639 self.consume_expr(&**base);
642 ast::ExprMac(..) => {
643 self.tcx().sess.span_bug(
645 "macro expression remains after expansion");
650 fn walk_callee(&mut self, call: &ast::Expr, callee: &ast::Expr) {
651 let callee_ty = return_if_err!(self.typer.expr_ty_adjusted(callee));
652 debug!("walk_callee: callee={} callee_ty={}",
653 callee.repr(self.tcx()), callee_ty.repr(self.tcx()));
654 let call_scope = region::CodeExtent::from_node_id(call.id);
655 match callee_ty.sty {
656 ty::ty_bare_fn(..) => {
657 self.consume_expr(callee);
659 ty::ty_closure(ref f) => {
662 self.borrow_expr(callee,
663 ty::ReScope(call_scope),
668 self.consume_expr(callee);
674 let overloaded_call_type =
675 match self.typer.node_method_origin(MethodCall::expr(call.id)) {
676 Some(method_origin) => {
677 OverloadedCallType::from_method_origin(
682 self.tcx().sess.span_bug(
684 format!("unexpected callee type {}",
685 callee_ty.repr(self.tcx())).as_slice())
688 match overloaded_call_type {
689 FnMutOverloadedCall => {
690 self.borrow_expr(callee,
691 ty::ReScope(call_scope),
695 FnOverloadedCall => {
696 self.borrow_expr(callee,
697 ty::ReScope(call_scope),
701 FnOnceOverloadedCall => self.consume_expr(callee),
707 fn walk_stmt(&mut self, stmt: &ast::Stmt) {
709 ast::StmtDecl(ref decl, _) => {
711 ast::DeclLocal(ref local) => {
712 self.walk_local(&**local);
715 ast::DeclItem(_) => {
716 // we don't visit nested items in this visitor,
717 // only the fn body we were given.
722 ast::StmtExpr(ref expr, _) |
723 ast::StmtSemi(ref expr, _) => {
724 self.consume_expr(&**expr);
727 ast::StmtMac(..) => {
728 self.tcx().sess.span_bug(stmt.span, "unexpanded stmt macro");
733 fn walk_local(&mut self, local: &ast::Local) {
736 let delegate = &mut self.delegate;
737 pat_util::pat_bindings(&self.typer.tcx().def_map, &*local.pat,
739 delegate.decl_without_init(id, span);
744 // Variable declarations with
745 // initializers are considered
746 // "assigns", which is handled by
748 self.walk_expr(&**expr);
749 let init_cmt = return_if_err!(self.mc.cat_expr(&**expr));
750 self.walk_irrefutable_pat(init_cmt, &*local.pat);
755 /// Indicates that the value of `blk` will be consumed, meaning either copied or moved
756 /// depending on its type.
757 fn walk_block(&mut self, blk: &ast::Block) {
758 debug!("walk_block(blk.id={})", blk.id);
760 for stmt in blk.stmts.iter() {
761 self.walk_stmt(&**stmt);
764 for tail_expr in blk.expr.iter() {
765 self.consume_expr(&**tail_expr);
769 fn walk_struct_expr(&mut self,
771 fields: &Vec<ast::Field>,
772 opt_with: &Option<P<ast::Expr>>) {
773 // Consume the expressions supplying values for each field.
774 for field in fields.iter() {
775 self.consume_expr(&*field.expr);
778 let with_expr = match *opt_with {
783 let with_cmt = return_if_err!(self.mc.cat_expr(&*with_expr));
785 // Select just those fields of the `with`
786 // expression that will actually be used
787 let with_fields = match with_cmt.ty.sty {
788 ty::ty_struct(did, substs) => {
789 ty::struct_fields(self.tcx(), did, substs)
792 // the base expression should always evaluate to a
793 // struct; however, when EUV is run during typeck, it
794 // may not. This will generate an error earlier in typeck,
795 // so we can just ignore it.
796 if !self.tcx().sess.has_errors() {
797 self.tcx().sess.span_bug(
799 "with expression doesn't evaluate to a struct");
801 assert!(self.tcx().sess.has_errors());
806 // Consume those fields of the with expression that are needed.
807 for with_field in with_fields.iter() {
808 if !contains_field_named(with_field, fields) {
809 let cmt_field = self.mc.cat_field(&*with_expr,
813 self.delegate_consume(with_expr.id, with_expr.span, cmt_field);
817 // walk the with expression so that complex expressions
818 // are properly handled.
819 self.walk_expr(with_expr);
821 fn contains_field_named(field: &ty::field,
822 fields: &Vec<ast::Field>)
826 |f| f.ident.node.name == field.name)
830 // Invoke the appropriate delegate calls for anything that gets
831 // consumed or borrowed as part of the automatic adjustment
833 fn walk_adjustment(&mut self, expr: &ast::Expr) {
834 let typer = self.typer;
835 match typer.adjustments().borrow().get(&expr.id) {
837 Some(adjustment) => {
839 ty::AdjustAddEnv(..) |
840 ty::AdjustReifyFnPointer(..) => {
841 // Creating a closure/fn-pointer consumes the
842 // input and stores it into the resulting
844 debug!("walk_adjustment(AutoAddEnv|AdjustReifyFnPointer)");
846 return_if_err!(self.mc.cat_expr_unadjusted(expr));
847 self.delegate_consume(expr.id, expr.span, cmt_unadjusted);
849 ty::AdjustDerefRef(ty::AutoDerefRef {
850 autoref: ref opt_autoref,
853 self.walk_autoderefs(expr, n);
858 self.walk_autoref(expr, r, n);
867 /// Autoderefs for overloaded Deref calls in fact reference their receiver. That is, if we have
868 /// `(*x)` where `x` is of type `Rc<T>`, then this in fact is equivalent to `x.deref()`. Since
869 /// `deref()` is declared with `&self`, this is an autoref of `x`.
870 fn walk_autoderefs(&mut self,
873 debug!("walk_autoderefs expr={} autoderefs={}", expr.repr(self.tcx()), autoderefs);
875 for i in range(0, autoderefs) {
876 let deref_id = ty::MethodCall::autoderef(expr.id, i);
877 match self.typer.node_method_ty(deref_id) {
880 let cmt = return_if_err!(self.mc.cat_expr_autoderefd(expr, i));
881 let self_ty = ty::ty_fn_args(method_ty)[0];
882 let (m, r) = match self_ty.sty {
883 ty::ty_rptr(r, ref m) => (m.mutbl, r),
884 _ => self.tcx().sess.span_bug(expr.span,
885 format!("bad overloaded deref type {}",
886 method_ty.repr(self.tcx()))[])
888 let bk = ty::BorrowKind::from_mutbl(m);
889 self.delegate.borrow(expr.id, expr.span, cmt,
896 fn walk_autoref(&mut self,
898 autoref: &ty::AutoRef,
900 debug!("walk_autoref expr={}", expr.repr(self.tcx()));
902 // Match for unique trait coercions first, since we don't need the
903 // call to cat_expr_autoderefd.
905 ty::AutoUnsizeUniq(ty::UnsizeVtable(..)) |
906 ty::AutoUnsize(ty::UnsizeVtable(..)) => {
907 assert!(n == 1, format!("Expected exactly 1 deref with Uniq \
908 AutoRefs, found: {}", n));
910 return_if_err!(self.mc.cat_expr_unadjusted(expr));
911 self.delegate_consume(expr.id, expr.span, cmt_unadjusted);
917 let cmt_derefd = return_if_err!(
918 self.mc.cat_expr_autoderefd(expr, n));
919 debug!("walk_adjustment: cmt_derefd={}",
920 cmt_derefd.repr(self.tcx()));
923 ty::AutoPtr(r, m, _) => {
924 self.delegate.borrow(expr.id,
928 ty::BorrowKind::from_mutbl(m),
931 ty::AutoUnsizeUniq(_) | ty::AutoUnsize(_) | ty::AutoUnsafe(..) => {}
935 fn walk_overloaded_operator(&mut self,
937 receiver: &ast::Expr,
938 rhs: Vec<&ast::Expr>,
942 if !self.typer.is_method_call(expr.id) {
947 PassArgs::ByValue => {
948 self.consume_expr(receiver);
949 for &arg in rhs.iter() {
950 self.consume_expr(arg);
955 PassArgs::ByRef => {},
958 self.walk_expr(receiver);
960 // Arguments (but not receivers) to overloaded operator
961 // methods are implicitly autoref'd which sadly does not use
962 // adjustments, so we must hardcode the borrow here.
964 let r = ty::ReScope(region::CodeExtent::from_node_id(expr.id));
965 let bk = ty::ImmBorrow;
967 for &arg in rhs.iter() {
968 self.borrow_expr(arg, r, bk, OverloadedOperator);
973 fn arm_move_mode(&mut self, discr_cmt: mc::cmt<'tcx>, arm: &ast::Arm) -> TrackMatchMode<Span> {
974 let mut mode = Unknown;
975 for pat in arm.pats.iter() {
976 self.determine_pat_move_mode(discr_cmt.clone(), &**pat, &mut mode);
981 fn walk_arm(&mut self, discr_cmt: mc::cmt<'tcx>, arm: &ast::Arm, mode: MatchMode) {
982 for pat in arm.pats.iter() {
983 self.walk_pat(discr_cmt.clone(), &**pat, mode);
986 for guard in arm.guard.iter() {
987 self.consume_expr(&**guard);
990 self.consume_expr(&*arm.body);
993 /// Walks an pat that occurs in isolation (i.e. top-level of fn
994 /// arg or let binding. *Not* a match arm or nested pat.)
995 fn walk_irrefutable_pat(&mut self, cmt_discr: mc::cmt<'tcx>, pat: &ast::Pat) {
996 let mut mode = Unknown;
997 self.determine_pat_move_mode(cmt_discr.clone(), pat, &mut mode);
998 let mode = mode.match_mode();
999 self.walk_pat(cmt_discr, pat, mode);
1002 /// Identifies any bindings within `pat` and accumulates within
1003 /// `mode` whether the overall pattern/match structure is a move,
1004 /// copy, or borrow.
1005 fn determine_pat_move_mode(&mut self,
1006 cmt_discr: mc::cmt<'tcx>,
1008 mode: &mut TrackMatchMode<Span>) {
1009 debug!("determine_pat_move_mode cmt_discr={} pat={}", cmt_discr.repr(self.tcx()),
1010 pat.repr(self.tcx()));
1011 return_if_err!(self.mc.cat_pattern(cmt_discr, pat, |_mc, cmt_pat, pat| {
1012 let tcx = self.tcx();
1013 let def_map = &self.tcx().def_map;
1014 if pat_util::pat_is_binding(def_map, pat) {
1016 ast::PatIdent(ast::BindByRef(_), _, _) =>
1017 mode.lub(BorrowingMatch),
1018 ast::PatIdent(ast::BindByValue(_), _, _) => {
1019 match copy_or_move(self.typer, &cmt_pat, PatBindingMove) {
1020 Copy => mode.lub(CopyingMatch),
1021 Move(_) => mode.lub(MovingMatch),
1027 "binding pattern not an identifier");
1034 /// The core driver for walking a pattern; `match_mode` must be
1035 /// established up front, e.g. via `determine_pat_move_mode` (see
1036 /// also `walk_irrefutable_pat` for patterns that stand alone).
1037 fn walk_pat(&mut self,
1038 cmt_discr: mc::cmt<'tcx>,
1040 match_mode: MatchMode) {
1041 debug!("walk_pat cmt_discr={} pat={}", cmt_discr.repr(self.tcx()),
1042 pat.repr(self.tcx()));
1045 let typer = self.typer;
1046 let def_map = &self.tcx().def_map;
1047 let delegate = &mut self.delegate;
1048 return_if_err!(mc.cat_pattern(cmt_discr.clone(), pat, |mc, cmt_pat, pat| {
1049 if pat_util::pat_is_binding(def_map, pat) {
1050 let tcx = typer.tcx();
1052 debug!("binding cmt_pat={} pat={} match_mode={}",
1057 // pat_ty: the type of the binding being produced.
1058 let pat_ty = return_if_err!(typer.node_ty(pat.id));
1060 // Each match binding is effectively an assignment to the
1061 // binding being produced.
1062 let def = def_map.borrow()[pat.id].clone();
1063 match mc.cat_def(pat.id, pat.span, pat_ty, def) {
1064 Ok(binding_cmt) => {
1065 delegate.mutate(pat.id, pat.span, binding_cmt, Init);
1070 // It is also a borrow or copy/move of the value being matched.
1072 ast::PatIdent(ast::BindByRef(m), _, _) => {
1074 (ty::ty_region(tcx, pat.span, pat_ty),
1075 ty::BorrowKind::from_mutbl(m))
1077 delegate.borrow(pat.id, pat.span, cmt_pat,
1080 ast::PatIdent(ast::BindByValue(_), _, _) => {
1081 let mode = copy_or_move(typer, &cmt_pat, PatBindingMove);
1082 debug!("walk_pat binding consuming pat");
1083 delegate.consume_pat(pat, cmt_pat, mode);
1088 "binding pattern not an identifier");
1093 ast::PatVec(_, Some(ref slice_pat), _) => {
1094 // The `slice_pat` here creates a slice into
1095 // the original vector. This is effectively a
1096 // borrow of the elements of the vector being
1099 let (slice_cmt, slice_mutbl, slice_r) =
1100 return_if_err!(mc.cat_slice_pattern(cmt_pat, &**slice_pat));
1102 // Note: We declare here that the borrow
1103 // occurs upon entering the `[...]`
1104 // pattern. This implies that something like
1105 // `[a; b]` where `a` is a move is illegal,
1106 // because the borrow is already in effect.
1107 // In fact such a move would be safe-ish, but
1108 // it effectively *requires* that we use the
1109 // nulling out semantics to indicate when a
1110 // value has been moved, which we are trying
1111 // to move away from. Otherwise, how can we
1112 // indicate that the first element in the
1113 // vector has been moved? Eventually, we
1114 // could perhaps modify this rule to permit
1115 // `[..a, b]` where `b` is a move, because in
1116 // that case we can adjust the length of the
1117 // original vec accordingly, but we'd have to
1118 // make trans do the right thing, and it would
1119 // only work for `~` vectors. It seems simpler
1120 // to just require that people call
1121 // `vec.pop()` or `vec.unshift()`.
1122 let slice_bk = ty::BorrowKind::from_mutbl(slice_mutbl);
1123 delegate.borrow(pat.id, pat.span,
1125 slice_bk, RefBinding);
1132 // Do a second pass over the pattern, calling `matched_pat` on
1133 // the interior nodes (enum variants and structs), as opposed
1134 // to the above loop's visit of than the bindings that form
1135 // the leaves of the pattern tree structure.
1136 return_if_err!(mc.cat_pattern(cmt_discr, pat, |mc, cmt_pat, pat| {
1137 let def_map = def_map.borrow();
1138 let tcx = typer.tcx();
1141 ast::PatEnum(_, _) | ast::PatIdent(_, _, None) | ast::PatStruct(..) => {
1142 match def_map.get(&pat.id) {
1144 // no definition found: pat is not a
1145 // struct or enum pattern.
1148 Some(&def::DefVariant(enum_did, variant_did, _is_struct)) => {
1150 if ty::enum_is_univariant(tcx, enum_did) {
1153 let cmt_pat_ty = cmt_pat.ty;
1154 mc.cat_downcast(pat, cmt_pat, cmt_pat_ty, variant_did)
1157 debug!("variant downcast_cmt={} pat={}",
1158 downcast_cmt.repr(tcx),
1161 delegate.matched_pat(pat, downcast_cmt, match_mode);
1164 Some(&def::DefStruct(..)) | Some(&def::DefTy(_, false)) => {
1165 // A struct (in either the value or type
1166 // namespace; we encounter the former on
1167 // e.g. patterns for unit structs).
1169 debug!("struct cmt_pat={} pat={}",
1173 delegate.matched_pat(pat, cmt_pat, match_mode);
1176 Some(&def::DefConst(..)) |
1177 Some(&def::DefLocal(..)) => {
1178 // This is a leaf (i.e. identifier binding
1179 // or constant value to match); thus no
1180 // `matched_pat` call.
1183 Some(def @ &def::DefTy(_, true)) => {
1184 // An enum's type -- should never be in a
1187 if !tcx.sess.has_errors() {
1188 let msg = format!("Pattern has unexpected type: {} and type {}",
1190 cmt_pat.ty.repr(tcx));
1191 tcx.sess.span_bug(pat.span, msg[])
1196 // Remaining cases are e.g. DefFn, to
1197 // which identifiers within patterns
1198 // should not resolve. However, we do
1199 // encouter this when using the
1200 // expr-use-visitor during typeck. So just
1201 // ignore it, an error should have been
1204 if !tcx.sess.has_errors() {
1205 let msg = format!("Pattern has unexpected def: {} and type {}",
1207 cmt_pat.ty.repr(tcx));
1208 tcx.sess.span_bug(pat.span, msg[])
1214 ast::PatIdent(_, _, Some(_)) => {
1215 // Do nothing; this is a binding (not a enum
1216 // variant or struct), and the cat_pattern call
1217 // will visit the substructure recursively.
1220 ast::PatWild(_) | ast::PatTup(..) | ast::PatBox(..) |
1221 ast::PatRegion(..) | ast::PatLit(..) | ast::PatRange(..) |
1222 ast::PatVec(..) | ast::PatMac(..) => {
1223 // Similarly, each of these cases does not
1224 // correspond to a enum variant or struct, so we
1225 // do not do any `matched_pat` calls for these
1232 fn walk_captures(&mut self, closure_expr: &ast::Expr) {
1233 debug!("walk_captures({})", closure_expr.repr(self.tcx()));
1235 ty::with_freevars(self.tcx(), closure_expr.id, |freevars| {
1236 match self.tcx().capture_mode(closure_expr.id) {
1237 ast::CaptureByRef => {
1238 self.walk_by_ref_captures(closure_expr, freevars);
1240 ast::CaptureByValue => {
1241 self.walk_by_value_captures(closure_expr, freevars);
1247 fn walk_by_ref_captures(&mut self,
1248 closure_expr: &ast::Expr,
1249 freevars: &[ty::Freevar]) {
1250 for freevar in freevars.iter() {
1251 let id_var = freevar.def.def_id().node;
1252 let cmt_var = return_if_err!(self.cat_captured_var(closure_expr.id,
1256 // Lookup the kind of borrow the callee requires, as
1257 // inferred by regionbk
1258 let upvar_id = ty::UpvarId { var_id: id_var,
1259 closure_expr_id: closure_expr.id };
1260 let upvar_borrow = self.typer.upvar_borrow(upvar_id).unwrap();
1262 self.delegate.borrow(closure_expr.id,
1265 upvar_borrow.region,
1267 ClosureCapture(freevar.span));
1271 fn walk_by_value_captures(&mut self,
1272 closure_expr: &ast::Expr,
1273 freevars: &[ty::Freevar]) {
1274 for freevar in freevars.iter() {
1275 let cmt_var = return_if_err!(self.cat_captured_var(closure_expr.id,
1278 let mode = copy_or_move(self.typer, &cmt_var, CaptureMove);
1279 self.delegate.consume(closure_expr.id, freevar.span, cmt_var, mode);
1283 fn cat_captured_var(&mut self,
1284 closure_id: ast::NodeId,
1286 upvar_def: def::Def)
1287 -> mc::McResult<mc::cmt<'tcx>> {
1288 // Create the cmt for the variable being borrowed, from the
1289 // caller's perspective
1290 let var_id = upvar_def.def_id().node;
1291 let var_ty = try!(self.typer.node_ty(var_id));
1292 self.mc.cat_def(closure_id, closure_span, var_ty, upvar_def)
1296 fn copy_or_move<'tcx>(typer: &mc::Typer<'tcx>,
1297 cmt: &mc::cmt<'tcx>,
1298 move_reason: MoveReason)
1301 if typer.type_moves_by_default(cmt.span, cmt.ty) {