1 // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
12 * A different sort of visitor for walking fn bodies. Unlike the
13 * normal visitor, which just walks the entire body in one shot, the
14 * `ExprUseVisitor` determines how expressions are being used.
17 pub use self::MutateMode::*;
18 pub use self::LoanCause::*;
19 pub use self::ConsumeMode::*;
20 pub use self::MoveReason::*;
21 pub use self::MatchMode::*;
22 use self::TrackMatchMode::*;
23 use self::OverloadedCallType::*;
25 use middle::{def, region, pat_util};
26 use middle::mem_categorization as mc;
27 use middle::mem_categorization::Typer;
28 use middle::ty::{mod, Ty};
29 use middle::typeck::{MethodCall, MethodObject, MethodTraitObject};
30 use middle::typeck::{MethodOrigin, MethodParam, MethodTypeParam};
31 use middle::typeck::{MethodStatic, MethodStaticUnboxedClosure};
33 use util::ppaux::Repr;
37 use syntax::codemap::Span;
39 ///////////////////////////////////////////////////////////////////////////
42 /// This trait defines the callbacks you can expect to receive when
43 /// employing the ExprUseVisitor.
44 pub trait Delegate<'tcx> {
45 // The value found at `cmt` is either copied or moved, depending
48 consume_id: ast::NodeId,
53 // The value found at `cmt` has been determined to match the
54 // pattern binding `matched_pat`, and its subparts are being
55 // copied or moved depending on `mode`. Note that `matched_pat`
56 // is called on all variant/structs in the pattern (i.e., the
57 // interior nodes of the pattern's tree structure) while
58 // consume_pat is called on the binding identifiers in the pattern
59 // (which are leaves of the pattern's tree structure).
61 // Note that variants/structs and identifiers are disjoint; thus
62 // `matched_pat` and `consume_pat` are never both called on the
63 // same input pattern structure (though of `consume_pat` can be
64 // called on a subpart of an input passed to `matched_pat).
65 fn matched_pat(&mut self,
66 matched_pat: &ast::Pat,
70 // The value found at `cmt` is either copied or moved via the
71 // pattern binding `consume_pat`, depending on mode.
72 fn consume_pat(&mut self,
73 consume_pat: &ast::Pat,
77 // The value found at `borrow` is being borrowed at the point
78 // `borrow_id` for the region `loan_region` with kind `bk`.
80 borrow_id: ast::NodeId,
83 loan_region: ty::Region,
85 loan_cause: LoanCause);
87 // The local variable `id` is declared but not initialized.
88 fn decl_without_init(&mut self,
92 // The path at `cmt` is being assigned to.
94 assignment_id: ast::NodeId,
95 assignment_span: Span,
96 assignee_cmt: mc::cmt<'tcx>,
100 #[deriving(PartialEq, Show)]
102 ClosureCapture(Span),
112 #[deriving(PartialEq, Show)]
113 pub enum ConsumeMode {
114 Copy, // reference to x where x has a type that copies
115 Move(MoveReason), // reference to x where x has a type that moves
118 #[deriving(PartialEq,Show)]
119 pub enum MoveReason {
125 #[deriving(PartialEq,Show)]
133 #[deriving(PartialEq,Show)]
134 enum TrackMatchMode<T> {
135 Unknown, Definite(MatchMode), Conflicting,
138 impl<T> TrackMatchMode<T> {
139 // Builds up the whole match mode for a pattern from its constituent
140 // parts. The lattice looks like this:
156 // * `(_, some_int)` pattern is Copying, since
157 // NonBinding + Copying => Copying
159 // * `(some_int, some_box)` pattern is Moving, since
160 // Copying + Moving => Moving
162 // * `(ref x, some_box)` pattern is Conflicting, since
163 // Borrowing + Moving => Conflicting
165 // Note that the `Unknown` and `Conflicting` states are
166 // represented separately from the other more interesting
167 // `Definite` states, which simplifies logic here somewhat.
168 fn lub(&mut self, mode: MatchMode) {
169 *self = match (*self, mode) {
170 // Note that clause order below is very significant.
171 (Unknown, new) => Definite(new),
172 (Definite(old), new) if old == new => Definite(old),
174 (Definite(old), NonBindingMatch) => Definite(old),
175 (Definite(NonBindingMatch), new) => Definite(new),
177 (Definite(old), CopyingMatch) => Definite(old),
178 (Definite(CopyingMatch), new) => Definite(new),
180 (Definite(_), _) => Conflicting,
181 (Conflicting, _) => *self,
185 fn match_mode(&self) -> MatchMode {
187 Unknown => NonBindingMatch,
188 Definite(mode) => mode,
190 // Conservatively return MovingMatch to let the
191 // compiler continue to make progress.
198 #[deriving(PartialEq,Show)]
199 pub enum MutateMode {
202 WriteAndRead, // x += y
205 enum OverloadedCallType {
208 FnOnceOverloadedCall,
211 impl OverloadedCallType {
212 fn from_trait_id(tcx: &ty::ctxt, trait_id: ast::DefId)
213 -> OverloadedCallType {
214 for &(maybe_function_trait, overloaded_call_type) in [
215 (tcx.lang_items.fn_once_trait(), FnOnceOverloadedCall),
216 (tcx.lang_items.fn_mut_trait(), FnMutOverloadedCall),
217 (tcx.lang_items.fn_trait(), FnOverloadedCall)
219 match maybe_function_trait {
220 Some(function_trait) if function_trait == trait_id => {
221 return overloaded_call_type
227 tcx.sess.bug("overloaded call didn't map to known function trait")
230 fn from_method_id(tcx: &ty::ctxt, method_id: ast::DefId)
231 -> OverloadedCallType {
232 let method_descriptor = match ty::impl_or_trait_item(tcx, method_id) {
233 ty::MethodTraitItem(ref method_descriptor) => {
234 (*method_descriptor).clone()
236 ty::TypeTraitItem(_) => {
237 tcx.sess.bug("overloaded call method wasn't in method map")
240 let impl_id = match method_descriptor.container {
241 ty::TraitContainer(_) => {
242 tcx.sess.bug("statically resolved overloaded call method \
243 belonged to a trait?!")
245 ty::ImplContainer(impl_id) => impl_id,
247 let trait_ref = match ty::impl_trait_ref(tcx, impl_id) {
249 tcx.sess.bug("statically resolved overloaded call impl \
250 didn't implement a trait?!")
252 Some(ref trait_ref) => (*trait_ref).clone(),
254 OverloadedCallType::from_trait_id(tcx, trait_ref.def_id)
257 fn from_unboxed_closure(tcx: &ty::ctxt, closure_did: ast::DefId)
258 -> OverloadedCallType {
263 .expect("OverloadedCallType::from_unboxed_closure: didn't \
267 OverloadedCallType::from_trait_id(tcx, trait_did)
270 fn from_method_origin(tcx: &ty::ctxt, origin: &MethodOrigin)
271 -> OverloadedCallType {
273 MethodStatic(def_id) => {
274 OverloadedCallType::from_method_id(tcx, def_id)
276 MethodStaticUnboxedClosure(def_id) => {
277 OverloadedCallType::from_unboxed_closure(tcx, def_id)
279 MethodTypeParam(MethodParam { ref trait_ref, .. }) |
280 MethodTraitObject(MethodObject { ref trait_ref, .. }) => {
281 OverloadedCallType::from_trait_id(tcx, trait_ref.def_id)
287 ///////////////////////////////////////////////////////////////////////////
288 // The ExprUseVisitor type
290 // This is the code that actually walks the tree. Like
291 // mem_categorization, it requires a TYPER, which is a type that
292 // supplies types from the tree. After type checking is complete, you
293 // can just use the tcx as the typer.
295 pub struct ExprUseVisitor<'d,'t,'tcx,TYPER:'t> {
297 mc: mc::MemCategorizationContext<'t,TYPER>,
298 delegate: &'d mut Delegate<'tcx>+'d,
301 // If the TYPER results in an error, it's because the type check
302 // failed (or will fail, when the error is uncovered and reported
303 // during writeback). In this case, we just ignore this part of the
306 // Note that this macro appears similar to try!(), but, unlike try!(),
307 // it does not propagate the error.
308 macro_rules! return_if_err(
317 impl<'d,'t,'tcx,TYPER:mc::Typer<'tcx>> ExprUseVisitor<'d,'t,'tcx,TYPER> {
318 pub fn new(delegate: &'d mut Delegate<'tcx>,
320 -> ExprUseVisitor<'d,'t,'tcx,TYPER> {
321 ExprUseVisitor { typer: typer,
322 mc: mc::MemCategorizationContext::new(typer),
326 pub fn walk_fn(&mut self,
329 self.walk_arg_patterns(decl, body);
330 self.walk_block(body);
333 fn walk_arg_patterns(&mut self,
336 for arg in decl.inputs.iter() {
337 let arg_ty = return_if_err!(self.typer.node_ty(arg.pat.id));
339 let fn_body_scope = region::CodeExtent::from_node_id(body.id);
340 let arg_cmt = self.mc.cat_rvalue(
343 ty::ReScope(fn_body_scope), // Args live only as long as the fn body.
346 self.walk_irrefutable_pat(arg_cmt, &*arg.pat);
350 fn tcx(&self) -> &'t ty::ctxt<'tcx> {
354 fn delegate_consume(&mut self,
355 consume_id: ast::NodeId,
357 cmt: mc::cmt<'tcx>) {
358 let mode = copy_or_move(self.tcx(), cmt.ty, DirectRefMove);
359 self.delegate.consume(consume_id, consume_span, cmt, mode);
362 fn consume_exprs(&mut self, exprs: &Vec<P<ast::Expr>>) {
363 for expr in exprs.iter() {
364 self.consume_expr(&**expr);
368 pub fn consume_expr(&mut self, expr: &ast::Expr) {
369 debug!("consume_expr(expr={})", expr.repr(self.tcx()));
371 let cmt = return_if_err!(self.mc.cat_expr(expr));
372 self.delegate_consume(expr.id, expr.span, cmt);
373 self.walk_expr(expr);
376 fn mutate_expr(&mut self,
377 assignment_expr: &ast::Expr,
380 let cmt = return_if_err!(self.mc.cat_expr(expr));
381 self.delegate.mutate(assignment_expr.id, assignment_expr.span, cmt, mode);
382 self.walk_expr(expr);
385 fn borrow_expr(&mut self,
390 debug!("borrow_expr(expr={}, r={}, bk={})",
391 expr.repr(self.tcx()), r.repr(self.tcx()), bk.repr(self.tcx()));
393 let cmt = return_if_err!(self.mc.cat_expr(expr));
394 self.delegate.borrow(expr.id, expr.span, cmt, r, bk, cause);
396 // Note: Unlike consume, we can ignore ExprParen. cat_expr
397 // already skips over them, and walk will uncover any
398 // attachments or whatever.
402 fn select_from_expr(&mut self, expr: &ast::Expr) {
406 pub fn walk_expr(&mut self, expr: &ast::Expr) {
407 debug!("walk_expr(expr={})", expr.repr(self.tcx()));
409 self.walk_adjustment(expr);
412 ast::ExprParen(ref subexpr) => {
413 self.walk_expr(&**subexpr)
416 ast::ExprPath(..) => { }
418 ast::ExprUnary(ast::UnDeref, ref base) => { // *base
419 if !self.walk_overloaded_operator(expr, &**base, Vec::new()) {
420 self.select_from_expr(&**base);
424 ast::ExprField(ref base, _) => { // base.f
425 self.select_from_expr(&**base);
428 ast::ExprTupField(ref base, _) => { // base.<n>
429 self.select_from_expr(&**base);
432 ast::ExprIndex(ref lhs, ref rhs) => { // lhs[rhs]
433 if !self.walk_overloaded_operator(expr, &**lhs, vec![&**rhs]) {
434 self.select_from_expr(&**lhs);
435 self.consume_expr(&**rhs);
439 ast::ExprSlice(ref base, ref start, ref end, _) => { // base[start..end]
440 let args = match (start, end) {
441 (&Some(ref e1), &Some(ref e2)) => vec![&**e1, &**e2],
442 (&Some(ref e), &None) => vec![&**e],
443 (&None, &Some(ref e)) => vec![&**e],
444 (&None, &None) => Vec::new()
446 let overloaded = self.walk_overloaded_operator(expr, &**base, args);
450 ast::ExprCall(ref callee, ref args) => { // callee(args)
451 self.walk_callee(expr, &**callee);
452 self.consume_exprs(args);
455 ast::ExprMethodCall(_, _, ref args) => { // callee.m(args)
456 self.consume_exprs(args);
459 ast::ExprStruct(_, ref fields, ref opt_with) => {
460 self.walk_struct_expr(expr, fields, opt_with);
463 ast::ExprTup(ref exprs) => {
464 self.consume_exprs(exprs);
467 ast::ExprIf(ref cond_expr, ref then_blk, ref opt_else_expr) => {
468 self.consume_expr(&**cond_expr);
469 self.walk_block(&**then_blk);
470 for else_expr in opt_else_expr.iter() {
471 self.consume_expr(&**else_expr);
475 ast::ExprIfLet(..) => {
476 self.tcx().sess.span_bug(expr.span, "non-desugared ExprIfLet");
479 ast::ExprMatch(ref discr, ref arms, _) => {
480 let discr_cmt = return_if_err!(self.mc.cat_expr(&**discr));
481 self.borrow_expr(&**discr, ty::ReEmpty, ty::ImmBorrow, MatchDiscriminant);
483 // treatment of the discriminant is handled while walking the arms.
484 for arm in arms.iter() {
485 let mode = self.arm_move_mode(discr_cmt.clone(), arm);
486 let mode = mode.match_mode();
487 self.walk_arm(discr_cmt.clone(), arm, mode);
491 ast::ExprVec(ref exprs) => {
492 self.consume_exprs(exprs);
495 ast::ExprAddrOf(m, ref base) => { // &base
496 // make sure that the thing we are pointing out stays valid
497 // for the lifetime `scope_r` of the resulting ptr:
498 let expr_ty = ty::expr_ty(self.tcx(), expr);
499 let r = ty::ty_region(self.tcx(), expr.span, expr_ty);
500 let bk = ty::BorrowKind::from_mutbl(m);
501 self.borrow_expr(&**base, r, bk, AddrOf);
504 ast::ExprInlineAsm(ref ia) => {
505 for &(_, ref input) in ia.inputs.iter() {
506 self.consume_expr(&**input);
509 for &(_, ref output, is_rw) in ia.outputs.iter() {
510 self.mutate_expr(expr, &**output,
511 if is_rw { WriteAndRead } else { JustWrite });
517 ast::ExprLit(..) => {}
519 ast::ExprLoop(ref blk, _) => {
520 self.walk_block(&**blk);
523 ast::ExprWhile(ref cond_expr, ref blk, _) => {
524 self.consume_expr(&**cond_expr);
525 self.walk_block(&**blk);
528 ast::ExprWhileLet(..) => {
529 self.tcx().sess.span_bug(expr.span, "non-desugared ExprWhileLet");
532 ast::ExprForLoop(ref pat, ref head, ref blk, _) => {
533 // The pattern lives as long as the block.
534 debug!("walk_expr for loop case: blk id={}", blk.id);
535 self.consume_expr(&**head);
537 // Fetch the type of the value that the iteration yields to
538 // produce the pattern's categorized mutable type.
539 let pattern_type = return_if_err!(self.typer.node_ty(pat.id));
540 let blk_scope = region::CodeExtent::from_node_id(blk.id);
541 let pat_cmt = self.mc.cat_rvalue(pat.id,
543 ty::ReScope(blk_scope),
545 self.walk_irrefutable_pat(pat_cmt, &**pat);
547 self.walk_block(&**blk);
550 ast::ExprUnary(_, ref lhs) => {
551 if !self.walk_overloaded_operator(expr, &**lhs, Vec::new()) {
552 self.consume_expr(&**lhs);
556 ast::ExprBinary(_, ref lhs, ref rhs) => {
557 if !self.walk_overloaded_operator(expr, &**lhs, vec![&**rhs]) {
558 self.consume_expr(&**lhs);
559 self.consume_expr(&**rhs);
563 ast::ExprBlock(ref blk) => {
564 self.walk_block(&**blk);
567 ast::ExprRet(ref opt_expr) => {
568 for expr in opt_expr.iter() {
569 self.consume_expr(&**expr);
573 ast::ExprAssign(ref lhs, ref rhs) => {
574 self.mutate_expr(expr, &**lhs, JustWrite);
575 self.consume_expr(&**rhs);
578 ast::ExprCast(ref base, _) => {
579 self.consume_expr(&**base);
582 ast::ExprAssignOp(_, ref lhs, ref rhs) => {
583 // This will have to change if/when we support
584 // overloaded operators for `+=` and so forth.
585 self.mutate_expr(expr, &**lhs, WriteAndRead);
586 self.consume_expr(&**rhs);
589 ast::ExprRepeat(ref base, ref count) => {
590 self.consume_expr(&**base);
591 self.consume_expr(&**count);
594 ast::ExprClosure(..) |
595 ast::ExprProc(..) => {
596 self.walk_captures(expr)
599 ast::ExprBox(ref place, ref base) => {
600 self.consume_expr(&**place);
601 self.consume_expr(&**base);
604 ast::ExprMac(..) => {
605 self.tcx().sess.span_bug(
607 "macro expression remains after expansion");
612 fn walk_callee(&mut self, call: &ast::Expr, callee: &ast::Expr) {
613 let callee_ty = ty::expr_ty_adjusted(self.tcx(), callee);
614 debug!("walk_callee: callee={} callee_ty={}",
615 callee.repr(self.tcx()), callee_ty.repr(self.tcx()));
616 let call_scope = region::CodeExtent::from_node_id(call.id);
617 match callee_ty.sty {
618 ty::ty_bare_fn(..) => {
619 self.consume_expr(callee);
621 ty::ty_closure(ref f) => {
624 self.borrow_expr(callee,
625 ty::ReScope(call_scope),
630 self.consume_expr(callee);
635 let overloaded_call_type =
639 .get(&MethodCall::expr(call.id)) {
640 Some(ref method_callee) => {
641 OverloadedCallType::from_method_origin(
643 &method_callee.origin)
646 self.tcx().sess.span_bug(
648 format!("unexpected callee type {}",
649 callee_ty.repr(self.tcx())).as_slice())
652 match overloaded_call_type {
653 FnMutOverloadedCall => {
654 self.borrow_expr(callee,
655 ty::ReScope(call_scope),
659 FnOverloadedCall => {
660 self.borrow_expr(callee,
661 ty::ReScope(call_scope),
665 FnOnceOverloadedCall => self.consume_expr(callee),
671 fn walk_stmt(&mut self, stmt: &ast::Stmt) {
673 ast::StmtDecl(ref decl, _) => {
675 ast::DeclLocal(ref local) => {
676 self.walk_local(&**local);
679 ast::DeclItem(_) => {
680 // we don't visit nested items in this visitor,
681 // only the fn body we were given.
686 ast::StmtExpr(ref expr, _) |
687 ast::StmtSemi(ref expr, _) => {
688 self.consume_expr(&**expr);
691 ast::StmtMac(..) => {
692 self.tcx().sess.span_bug(stmt.span, "unexpanded stmt macro");
697 fn walk_local(&mut self, local: &ast::Local) {
700 let delegate = &mut self.delegate;
701 pat_util::pat_bindings(&self.typer.tcx().def_map, &*local.pat,
703 delegate.decl_without_init(id, span);
708 // Variable declarations with
709 // initializers are considered
710 // "assigns", which is handled by
712 self.walk_expr(&**expr);
713 let init_cmt = return_if_err!(self.mc.cat_expr(&**expr));
714 self.walk_irrefutable_pat(init_cmt, &*local.pat);
719 fn walk_block(&mut self, blk: &ast::Block) {
721 * Indicates that the value of `blk` will be consumed,
722 * meaning either copied or moved depending on its type.
725 debug!("walk_block(blk.id={})", blk.id);
727 for stmt in blk.stmts.iter() {
728 self.walk_stmt(&**stmt);
731 for tail_expr in blk.expr.iter() {
732 self.consume_expr(&**tail_expr);
736 fn walk_struct_expr(&mut self,
738 fields: &Vec<ast::Field>,
739 opt_with: &Option<P<ast::Expr>>) {
740 // Consume the expressions supplying values for each field.
741 for field in fields.iter() {
742 self.consume_expr(&*field.expr);
745 let with_expr = match *opt_with {
750 let with_cmt = return_if_err!(self.mc.cat_expr(&*with_expr));
752 // Select just those fields of the `with`
753 // expression that will actually be used
754 let with_fields = match with_cmt.ty.sty {
755 ty::ty_struct(did, ref substs) => {
756 ty::struct_fields(self.tcx(), did, substs)
759 self.tcx().sess.span_bug(
761 "with expression doesn't evaluate to a struct");
765 // Consume those fields of the with expression that are needed.
766 for with_field in with_fields.iter() {
767 if !contains_field_named(with_field, fields) {
768 let cmt_field = self.mc.cat_field(&*with_expr,
772 self.delegate_consume(with_expr.id, with_expr.span, cmt_field);
776 // walk the with expression so that complex expressions
777 // are properly handled.
778 self.walk_expr(with_expr);
780 fn contains_field_named(field: &ty::field,
781 fields: &Vec<ast::Field>)
785 |f| f.ident.node.name == field.name)
789 // Invoke the appropriate delegate calls for anything that gets
790 // consumed or borrowed as part of the automatic adjustment
792 fn walk_adjustment(&mut self, expr: &ast::Expr) {
793 let typer = self.typer;
794 match typer.adjustments().borrow().get(&expr.id) {
796 Some(adjustment) => {
798 ty::AdjustAddEnv(..) => {
799 // Creating a closure consumes the input and stores it
800 // into the resulting rvalue.
801 debug!("walk_adjustment(AutoAddEnv)");
803 return_if_err!(self.mc.cat_expr_unadjusted(expr));
804 self.delegate_consume(expr.id, expr.span, cmt_unadjusted);
806 ty::AdjustDerefRef(ty::AutoDerefRef {
807 autoref: ref opt_autoref,
810 self.walk_autoderefs(expr, n);
815 self.walk_autoref(expr, r, n);
824 fn walk_autoderefs(&mut self,
828 * Autoderefs for overloaded Deref calls in fact reference
829 * their receiver. That is, if we have `(*x)` where `x` is of
830 * type `Rc<T>`, then this in fact is equivalent to
831 * `x.deref()`. Since `deref()` is declared with `&self`, this
832 * is an autoref of `x`.
834 debug!("walk_autoderefs expr={} autoderefs={}", expr.repr(self.tcx()), autoderefs);
836 for i in range(0, autoderefs) {
837 let deref_id = typeck::MethodCall::autoderef(expr.id, i);
838 match self.typer.node_method_ty(deref_id) {
841 let cmt = return_if_err!(self.mc.cat_expr_autoderefd(expr, i));
842 let self_ty = ty::ty_fn_args(method_ty)[0];
843 let (m, r) = match self_ty.sty {
844 ty::ty_rptr(r, ref m) => (m.mutbl, r),
845 _ => self.tcx().sess.span_bug(expr.span,
846 format!("bad overloaded deref type {}",
847 method_ty.repr(self.tcx())).as_slice())
849 let bk = ty::BorrowKind::from_mutbl(m);
850 self.delegate.borrow(expr.id, expr.span, cmt,
857 fn walk_autoref(&mut self,
859 autoref: &ty::AutoRef,
861 debug!("walk_autoref expr={}", expr.repr(self.tcx()));
863 // Match for unique trait coercions first, since we don't need the
864 // call to cat_expr_autoderefd.
866 ty::AutoUnsizeUniq(ty::UnsizeVtable(..)) |
867 ty::AutoUnsize(ty::UnsizeVtable(..)) => {
868 assert!(n == 1, format!("Expected exactly 1 deref with Uniq \
869 AutoRefs, found: {}", n));
871 return_if_err!(self.mc.cat_expr_unadjusted(expr));
872 self.delegate_consume(expr.id, expr.span, cmt_unadjusted);
878 let cmt_derefd = return_if_err!(
879 self.mc.cat_expr_autoderefd(expr, n));
880 debug!("walk_adjustment: cmt_derefd={}",
881 cmt_derefd.repr(self.tcx()));
884 ty::AutoPtr(r, m, _) => {
885 self.delegate.borrow(expr.id,
889 ty::BorrowKind::from_mutbl(m),
892 ty::AutoUnsizeUniq(_) | ty::AutoUnsize(_) | ty::AutoUnsafe(..) => {}
896 fn walk_overloaded_operator(&mut self,
898 receiver: &ast::Expr,
899 rhs: Vec<&ast::Expr>)
902 if !self.typer.is_method_call(expr.id) {
906 self.walk_expr(receiver);
908 // Arguments (but not receivers) to overloaded operator
909 // methods are implicitly autoref'd which sadly does not use
910 // adjustments, so we must hardcode the borrow here.
912 let r = ty::ReScope(region::CodeExtent::from_node_id(expr.id));
913 let bk = ty::ImmBorrow;
915 for &arg in rhs.iter() {
916 self.borrow_expr(arg, r, bk, OverloadedOperator);
921 fn arm_move_mode(&mut self, discr_cmt: mc::cmt<'tcx>, arm: &ast::Arm) -> TrackMatchMode<Span> {
922 let mut mode = Unknown;
923 for pat in arm.pats.iter() {
924 self.determine_pat_move_mode(discr_cmt.clone(), &**pat, &mut mode);
929 fn walk_arm(&mut self, discr_cmt: mc::cmt<'tcx>, arm: &ast::Arm, mode: MatchMode) {
930 for pat in arm.pats.iter() {
931 self.walk_pat(discr_cmt.clone(), &**pat, mode);
934 for guard in arm.guard.iter() {
935 self.consume_expr(&**guard);
938 self.consume_expr(&*arm.body);
941 /// Walks an pat that occurs in isolation (i.e. top-level of fn
942 /// arg or let binding. *Not* a match arm or nested pat.)
943 fn walk_irrefutable_pat(&mut self, cmt_discr: mc::cmt<'tcx>, pat: &ast::Pat) {
944 let mut mode = Unknown;
945 self.determine_pat_move_mode(cmt_discr.clone(), pat, &mut mode);
946 let mode = mode.match_mode();
947 self.walk_pat(cmt_discr, pat, mode);
950 /// Identifies any bindings within `pat` and accumulates within
951 /// `mode` whether the overall pattern/match structure is a move,
953 fn determine_pat_move_mode(&mut self,
954 cmt_discr: mc::cmt<'tcx>,
956 mode: &mut TrackMatchMode<Span>) {
957 debug!("determine_pat_move_mode cmt_discr={} pat={}", cmt_discr.repr(self.tcx()),
958 pat.repr(self.tcx()));
959 return_if_err!(self.mc.cat_pattern(cmt_discr, pat, |_mc, cmt_pat, pat| {
960 let tcx = self.typer.tcx();
961 let def_map = &self.typer.tcx().def_map;
962 if pat_util::pat_is_binding(def_map, pat) {
964 ast::PatIdent(ast::BindByRef(_), _, _) =>
965 mode.lub(BorrowingMatch),
966 ast::PatIdent(ast::BindByValue(_), _, _) => {
967 match copy_or_move(tcx, cmt_pat.ty, PatBindingMove) {
968 Copy => mode.lub(CopyingMatch),
969 Move(_) => mode.lub(MovingMatch),
975 "binding pattern not an identifier");
982 /// The core driver for walking a pattern; `match_mode` must be
983 /// established up front, e.g. via `determine_pat_move_mode` (see
984 /// also `walk_irrefutable_pat` for patterns that stand alone).
985 fn walk_pat(&mut self,
986 cmt_discr: mc::cmt<'tcx>,
988 match_mode: MatchMode) {
989 debug!("walk_pat cmt_discr={} pat={}", cmt_discr.repr(self.tcx()),
990 pat.repr(self.tcx()));
993 let typer = self.typer;
994 let tcx = typer.tcx();
995 let def_map = &self.typer.tcx().def_map;
996 let delegate = &mut self.delegate;
998 return_if_err!(mc.cat_pattern(cmt_discr.clone(), pat, |mc, cmt_pat, pat| {
999 if pat_util::pat_is_binding(def_map, pat) {
1000 let tcx = typer.tcx();
1002 debug!("binding cmt_pat={} pat={} match_mode={}",
1007 // pat_ty: the type of the binding being produced.
1008 let pat_ty = return_if_err!(typer.node_ty(pat.id));
1010 // Each match binding is effectively an assignment to the
1011 // binding being produced.
1012 let def = def_map.borrow()[pat.id].clone();
1013 match mc.cat_def(pat.id, pat.span, pat_ty, def) {
1014 Ok(binding_cmt) => {
1015 delegate.mutate(pat.id, pat.span, binding_cmt, Init);
1020 // It is also a borrow or copy/move of the value being matched.
1022 ast::PatIdent(ast::BindByRef(m), _, _) => {
1024 (ty::ty_region(tcx, pat.span, pat_ty),
1025 ty::BorrowKind::from_mutbl(m))
1027 delegate.borrow(pat.id, pat.span, cmt_pat,
1030 ast::PatIdent(ast::BindByValue(_), _, _) => {
1031 let mode = copy_or_move(typer.tcx(), cmt_pat.ty, PatBindingMove);
1032 debug!("walk_pat binding consuming pat");
1033 delegate.consume_pat(pat, cmt_pat, mode);
1036 typer.tcx().sess.span_bug(
1038 "binding pattern not an identifier");
1043 ast::PatVec(_, Some(ref slice_pat), _) => {
1044 // The `slice_pat` here creates a slice into
1045 // the original vector. This is effectively a
1046 // borrow of the elements of the vector being
1049 let (slice_cmt, slice_mutbl, slice_r) = {
1050 match mc.cat_slice_pattern(cmt_pat, &**slice_pat) {
1053 tcx.sess.span_bug(slice_pat.span,
1059 // Note: We declare here that the borrow
1060 // occurs upon entering the `[...]`
1061 // pattern. This implies that something like
1062 // `[a, ..b]` where `a` is a move is illegal,
1063 // because the borrow is already in effect.
1064 // In fact such a move would be safe-ish, but
1065 // it effectively *requires* that we use the
1066 // nulling out semantics to indicate when a
1067 // value has been moved, which we are trying
1068 // to move away from. Otherwise, how can we
1069 // indicate that the first element in the
1070 // vector has been moved? Eventually, we
1071 // could perhaps modify this rule to permit
1072 // `[..a, b]` where `b` is a move, because in
1073 // that case we can adjust the length of the
1074 // original vec accordingly, but we'd have to
1075 // make trans do the right thing, and it would
1076 // only work for `~` vectors. It seems simpler
1077 // to just require that people call
1078 // `vec.pop()` or `vec.unshift()`.
1079 let slice_bk = ty::BorrowKind::from_mutbl(slice_mutbl);
1080 delegate.borrow(pat.id, pat.span,
1082 slice_bk, RefBinding);
1089 // Do a second pass over the pattern, calling `matched_pat` on
1090 // the interior nodes (enum variants and structs), as opposed
1091 // to the above loop's visit of than the bindings that form
1092 // the leaves of the pattern tree structure.
1093 return_if_err!(mc.cat_pattern(cmt_discr, pat, |mc, cmt_pat, pat| {
1094 let def_map = def_map.borrow();
1095 let tcx = typer.tcx();
1098 ast::PatEnum(_, _) | ast::PatIdent(_, _, None) | ast::PatStruct(..) => {
1099 match def_map.get(&pat.id) {
1101 // no definition found: pat is not a
1102 // struct or enum pattern.
1105 Some(&def::DefVariant(enum_did, variant_did, _is_struct)) => {
1107 if ty::enum_is_univariant(tcx, enum_did) {
1110 let cmt_pat_ty = cmt_pat.ty;
1111 mc.cat_downcast(pat, cmt_pat, cmt_pat_ty, variant_did)
1114 debug!("variant downcast_cmt={} pat={}",
1115 downcast_cmt.repr(tcx),
1118 delegate.matched_pat(pat, downcast_cmt, match_mode);
1121 Some(&def::DefStruct(..)) | Some(&def::DefTy(_, false)) => {
1122 // A struct (in either the value or type
1123 // namespace; we encounter the former on
1124 // e.g. patterns for unit structs).
1126 debug!("struct cmt_pat={} pat={}",
1130 delegate.matched_pat(pat, cmt_pat, match_mode);
1133 Some(&def::DefConst(..)) |
1134 Some(&def::DefLocal(..)) => {
1135 // This is a leaf (i.e. identifier binding
1136 // or constant value to match); thus no
1137 // `matched_pat` call.
1140 Some(def @ &def::DefTy(_, true)) => {
1141 // An enum's type -- should never be in a
1144 let msg = format!("Pattern has unexpected type: {}", def);
1145 tcx.sess.span_bug(pat.span, msg.as_slice())
1149 // Remaining cases are e.g. DefFn, to
1150 // which identifiers within patterns
1151 // should not resolve.
1153 let msg = format!("Pattern has unexpected def: {}", def);
1154 tcx.sess.span_bug(pat.span, msg.as_slice())
1159 ast::PatIdent(_, _, Some(_)) => {
1160 // Do nothing; this is a binding (not a enum
1161 // variant or struct), and the cat_pattern call
1162 // will visit the substructure recursively.
1165 ast::PatWild(_) | ast::PatTup(..) | ast::PatBox(..) |
1166 ast::PatRegion(..) | ast::PatLit(..) | ast::PatRange(..) |
1167 ast::PatVec(..) | ast::PatMac(..) => {
1168 // Similarly, each of these cases does not
1169 // correspond to a enum variant or struct, so we
1170 // do not do any `matched_pat` calls for these
1177 fn walk_captures(&mut self, closure_expr: &ast::Expr) {
1178 debug!("walk_captures({})", closure_expr.repr(self.tcx()));
1180 let tcx = self.typer.tcx();
1181 ty::with_freevars(tcx, closure_expr.id, |freevars| {
1182 match self.tcx().capture_mode(closure_expr.id) {
1183 ast::CaptureByRef => {
1184 self.walk_by_ref_captures(closure_expr, freevars);
1186 ast::CaptureByValue => {
1187 self.walk_by_value_captures(closure_expr, freevars);
1193 fn walk_by_ref_captures(&mut self,
1194 closure_expr: &ast::Expr,
1195 freevars: &[ty::Freevar]) {
1196 for freevar in freevars.iter() {
1197 let id_var = freevar.def.def_id().node;
1198 let cmt_var = return_if_err!(self.cat_captured_var(closure_expr.id,
1202 // Lookup the kind of borrow the callee requires, as
1203 // inferred by regionbk
1204 let upvar_id = ty::UpvarId { var_id: id_var,
1205 closure_expr_id: closure_expr.id };
1206 let upvar_borrow = self.tcx().upvar_borrow_map.borrow()[upvar_id].clone();
1208 self.delegate.borrow(closure_expr.id,
1211 upvar_borrow.region,
1213 ClosureCapture(freevar.span));
1217 fn walk_by_value_captures(&mut self,
1218 closure_expr: &ast::Expr,
1219 freevars: &[ty::Freevar]) {
1220 for freevar in freevars.iter() {
1221 let cmt_var = return_if_err!(self.cat_captured_var(closure_expr.id,
1224 let mode = copy_or_move(self.tcx(), cmt_var.ty, CaptureMove);
1225 self.delegate.consume(closure_expr.id, freevar.span, cmt_var, mode);
1229 fn cat_captured_var(&mut self,
1230 closure_id: ast::NodeId,
1232 upvar_def: def::Def)
1233 -> mc::McResult<mc::cmt<'tcx>> {
1234 // Create the cmt for the variable being borrowed, from the
1235 // caller's perspective
1236 let var_id = upvar_def.def_id().node;
1237 let var_ty = try!(self.typer.node_ty(var_id));
1238 self.mc.cat_def(closure_id, closure_span, var_ty, upvar_def)
1242 fn copy_or_move<'tcx>(tcx: &ty::ctxt<'tcx>, ty: Ty<'tcx>,
1243 move_reason: MoveReason) -> ConsumeMode {
1244 if ty::type_moves_by_default(tcx, ty) { Move(move_reason) } else { Copy }