1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! Data structures used for tracking moves. Please see the extensive
12 //! comments in the section "Moves and initialization" in `doc.rs`.
14 pub use self::MoveKind::*;
17 use rustc::middle::cfg;
18 use rustc::middle::dataflow::DataFlowContext;
19 use rustc::middle::dataflow::BitwiseOperator;
20 use rustc::middle::dataflow::DataFlowOperator;
21 use rustc::middle::expr_use_visitor as euv;
22 use rustc::middle::mem_categorization as mc;
23 use rustc::middle::ty;
24 use rustc::util::nodemap::{FnvHashMap, NodeSet};
25 use rustc::util::ppaux::Repr;
26 use std::cell::RefCell;
31 use syntax::codemap::Span;
33 #[path="fragments.rs"]
36 pub struct MoveData<'tcx> {
37 /// Move paths. See section "Move paths" in `doc.rs`.
38 pub paths: RefCell<Vec<MovePath<'tcx>>>,
40 /// Cache of loan path to move path index, for easy lookup.
41 pub path_map: RefCell<FnvHashMap<Rc<LoanPath<'tcx>>, MovePathIndex>>,
43 /// Each move or uninitialized variable gets an entry here.
44 pub moves: RefCell<Vec<Move>>,
46 /// Assignments to a variable, like `x = foo`. These are assigned
47 /// bits for dataflow, since we must track them to ensure that
48 /// immutable variables are assigned at most once along each path.
49 pub var_assignments: RefCell<Vec<Assignment>>,
51 /// Assignments to a path, like `x.f = foo`. These are not
52 /// assigned dataflow bits, but we track them because they still
54 pub path_assignments: RefCell<Vec<Assignment>>,
56 /// Enum variant matched within a pattern on some match arm, like
57 /// `SomeStruct{ f: Variant1(x, y) } => ...`
58 pub variant_matches: RefCell<Vec<VariantMatch>>,
60 /// Assignments to a variable or path, like `x = foo`, but not `x += foo`.
61 pub assignee_ids: RefCell<NodeSet>,
63 /// Path-fragments from moves in to or out of parts of structured data.
64 pub fragments: RefCell<fragments::FragmentSets>,
67 pub struct FlowedMoveData<'a, 'tcx: 'a> {
68 pub move_data: MoveData<'tcx>,
70 pub dfcx_moves: MoveDataFlow<'a, 'tcx>,
72 // We could (and maybe should, for efficiency) combine both move
73 // and assign data flow into one, but this way it's easier to
74 // distinguish the bits that correspond to moves and assignments.
75 pub dfcx_assign: AssignDataFlow<'a, 'tcx>
78 /// Index into `MoveData.paths`, used like a pointer
79 #[deriving(Copy, PartialEq, Eq, PartialOrd, Ord, Show)]
80 pub struct MovePathIndex(uint);
83 fn get(&self) -> uint {
84 let MovePathIndex(v) = *self; v
88 impl Clone for MovePathIndex {
89 fn clone(&self) -> MovePathIndex {
90 MovePathIndex(self.get())
94 #[allow(non_upper_case_globals)]
95 static InvalidMovePathIndex: MovePathIndex =
96 MovePathIndex(uint::MAX);
98 /// Index into `MoveData.moves`, used like a pointer
99 #[deriving(Copy, PartialEq)]
100 pub struct MoveIndex(uint);
103 fn get(&self) -> uint {
104 let MoveIndex(v) = *self; v
108 #[allow(non_upper_case_globals)]
109 static InvalidMoveIndex: MoveIndex =
110 MoveIndex(uint::MAX);
112 pub struct MovePath<'tcx> {
113 /// Loan path corresponding to this move path
114 pub loan_path: Rc<LoanPath<'tcx>>,
116 /// Parent pointer, `InvalidMovePathIndex` if root
117 pub parent: MovePathIndex,
119 /// Head of linked list of moves to this path,
120 /// `InvalidMoveIndex` if not moved
121 pub first_move: MoveIndex,
123 /// First node in linked list of children, `InvalidMovePathIndex` if leaf
124 pub first_child: MovePathIndex,
126 /// Next node in linked list of parent's children (siblings),
127 /// `InvalidMovePathIndex` if none.
128 pub next_sibling: MovePathIndex,
131 #[deriving(Copy, PartialEq, Show)]
133 Declared, // When declared, variables start out "moved".
134 MoveExpr, // Expression or binding that moves a variable
135 MovePat, // By-move binding
136 Captured // Closure creation that moves a value
141 /// Path being moved.
142 pub path: MovePathIndex,
144 /// id of node that is doing the move.
147 /// Kind of move, for error messages.
150 /// Next node in linked list of moves from `path`, or `InvalidMoveIndex`
151 pub next_move: MoveIndex
155 pub struct Assignment {
156 /// Path being assigned.
157 pub path: MovePathIndex,
159 /// id where assignment occurs
162 /// span of node where assignment occurs
167 pub struct VariantMatch {
168 /// downcast to the variant.
169 pub path: MovePathIndex,
171 /// path being downcast to the variant.
172 pub base_path: MovePathIndex,
174 /// id where variant's pattern occurs
177 /// says if variant established by move (and why), by copy, or by borrow.
178 pub mode: euv::MatchMode
181 #[deriving(Clone, Copy)]
182 pub struct MoveDataFlowOperator;
184 pub type MoveDataFlow<'a, 'tcx> = DataFlowContext<'a, 'tcx, MoveDataFlowOperator>;
186 #[deriving(Clone, Copy)]
187 pub struct AssignDataFlowOperator;
189 pub type AssignDataFlow<'a, 'tcx> = DataFlowContext<'a, 'tcx, AssignDataFlowOperator>;
191 fn loan_path_is_precise(loan_path: &LoanPath) -> bool {
192 match loan_path.kind {
193 LpVar(_) | LpUpvar(_) => {
196 LpExtend(_, _, LpInterior(mc::InteriorElement(_))) => {
197 // Paths involving element accesses do not refer to a unique
198 // location, as there is no accurate tracking of the indices.
201 LpDowncast(ref lp_base, _) |
202 LpExtend(ref lp_base, _, _) => {
203 loan_path_is_precise(&**lp_base)
208 impl<'tcx> MoveData<'tcx> {
209 pub fn new() -> MoveData<'tcx> {
211 paths: RefCell::new(Vec::new()),
212 path_map: RefCell::new(FnvHashMap::new()),
213 moves: RefCell::new(Vec::new()),
214 path_assignments: RefCell::new(Vec::new()),
215 var_assignments: RefCell::new(Vec::new()),
216 variant_matches: RefCell::new(Vec::new()),
217 assignee_ids: RefCell::new(NodeSet::new()),
218 fragments: RefCell::new(fragments::FragmentSets::new()),
222 pub fn path_loan_path(&self, index: MovePathIndex) -> Rc<LoanPath<'tcx>> {
223 (*self.paths.borrow())[index.get()].loan_path.clone()
226 fn path_parent(&self, index: MovePathIndex) -> MovePathIndex {
227 (*self.paths.borrow())[index.get()].parent
230 fn path_first_move(&self, index: MovePathIndex) -> MoveIndex {
231 (*self.paths.borrow())[index.get()].first_move
234 /// Returns the index of first child, or `InvalidMovePathIndex` if
236 fn path_first_child(&self, index: MovePathIndex) -> MovePathIndex {
237 (*self.paths.borrow())[index.get()].first_child
240 fn path_next_sibling(&self, index: MovePathIndex) -> MovePathIndex {
241 (*self.paths.borrow())[index.get()].next_sibling
244 fn set_path_first_move(&self,
245 index: MovePathIndex,
246 first_move: MoveIndex) {
247 (*self.paths.borrow_mut())[index.get()].first_move = first_move
250 fn set_path_first_child(&self,
251 index: MovePathIndex,
252 first_child: MovePathIndex) {
253 (*self.paths.borrow_mut())[index.get()].first_child = first_child
256 fn move_next_move(&self, index: MoveIndex) -> MoveIndex {
257 //! Type safe indexing operator
258 (*self.moves.borrow())[index.get()].next_move
261 fn is_var_path(&self, index: MovePathIndex) -> bool {
262 //! True if `index` refers to a variable
263 self.path_parent(index) == InvalidMovePathIndex
266 /// Returns the existing move path index for `lp`, if any, and otherwise adds a new index for
267 /// `lp` and any of its base paths that do not yet have an index.
268 pub fn move_path(&self,
269 tcx: &ty::ctxt<'tcx>,
270 lp: Rc<LoanPath<'tcx>>) -> MovePathIndex {
271 match self.path_map.borrow().get(&lp) {
278 let index = match lp.kind {
279 LpVar(..) | LpUpvar(..) => {
280 let index = MovePathIndex(self.paths.borrow().len());
282 self.paths.borrow_mut().push(MovePath {
283 loan_path: lp.clone(),
284 parent: InvalidMovePathIndex,
285 first_move: InvalidMoveIndex,
286 first_child: InvalidMovePathIndex,
287 next_sibling: InvalidMovePathIndex,
293 LpDowncast(ref base, _) |
294 LpExtend(ref base, _, _) => {
295 let parent_index = self.move_path(tcx, base.clone());
297 let index = MovePathIndex(self.paths.borrow().len());
299 let next_sibling = self.path_first_child(parent_index);
300 self.set_path_first_child(parent_index, index);
302 self.paths.borrow_mut().push(MovePath {
303 loan_path: lp.clone(),
304 parent: parent_index,
305 first_move: InvalidMoveIndex,
306 first_child: InvalidMovePathIndex,
307 next_sibling: next_sibling,
314 debug!("move_path(lp={}, index={})",
318 assert_eq!(index.get(), self.paths.borrow().len() - 1);
319 self.path_map.borrow_mut().insert(lp, index);
323 fn existing_move_path(&self, lp: &Rc<LoanPath<'tcx>>)
324 -> Option<MovePathIndex> {
325 self.path_map.borrow().get(lp).cloned()
328 fn existing_base_paths(&self, lp: &Rc<LoanPath<'tcx>>)
329 -> Vec<MovePathIndex> {
330 let mut result = vec!();
331 self.add_existing_base_paths(lp, &mut result);
335 /// Adds any existing move path indices for `lp` and any base paths of `lp` to `result`, but
336 /// does not add new move paths
337 fn add_existing_base_paths(&self, lp: &Rc<LoanPath<'tcx>>,
338 result: &mut Vec<MovePathIndex>) {
339 match self.path_map.borrow().get(lp).cloned() {
341 self.each_base_path(index, |p| {
348 LpVar(..) | LpUpvar(..) => { }
349 LpDowncast(ref b, _) |
350 LpExtend(ref b, _, _) => {
351 self.add_existing_base_paths(b, result);
359 /// Adds a new move entry for a move of `lp` that occurs at location `id` with kind `kind`.
360 pub fn add_move(&self,
361 tcx: &ty::ctxt<'tcx>,
362 lp: Rc<LoanPath<'tcx>>,
365 debug!("add_move(lp={}, id={}, kind={})",
370 let path_index = self.move_path(tcx, lp.clone());
371 let move_index = MoveIndex(self.moves.borrow().len());
373 self.fragments.borrow_mut().add_move(path_index);
375 let next_move = self.path_first_move(path_index);
376 self.set_path_first_move(path_index, move_index);
378 self.moves.borrow_mut().push(Move {
386 /// Adds a new record for an assignment to `lp` that occurs at location `id` with the given
388 pub fn add_assignment(&self,
389 tcx: &ty::ctxt<'tcx>,
390 lp: Rc<LoanPath<'tcx>>,
391 assign_id: ast::NodeId,
393 assignee_id: ast::NodeId,
394 mode: euv::MutateMode) {
395 debug!("add_assignment(lp={}, assign_id={}, assignee_id={}",
396 lp.repr(tcx), assign_id, assignee_id);
398 let path_index = self.move_path(tcx, lp.clone());
400 self.fragments.borrow_mut().add_assignment(path_index);
403 euv::Init | euv::JustWrite => {
404 self.assignee_ids.borrow_mut().insert(assignee_id);
406 euv::WriteAndRead => { }
409 let assignment = Assignment {
415 if self.is_var_path(path_index) {
416 debug!("add_assignment[var](lp={}, assignment={}, path_index={})",
417 lp.repr(tcx), self.var_assignments.borrow().len(), path_index);
419 self.var_assignments.borrow_mut().push(assignment);
421 debug!("add_assignment[path](lp={}, path_index={})",
422 lp.repr(tcx), path_index);
424 self.path_assignments.borrow_mut().push(assignment);
428 /// Adds a new record for a match of `base_lp`, downcast to
429 /// variant `lp`, that occurs at location `pattern_id`. (One
430 /// should be able to recover the span info from the
431 /// `pattern_id` and the ast_map, I think.)
432 pub fn add_variant_match(&self,
433 tcx: &ty::ctxt<'tcx>,
434 lp: Rc<LoanPath<'tcx>>,
435 pattern_id: ast::NodeId,
436 base_lp: Rc<LoanPath<'tcx>>,
437 mode: euv::MatchMode) {
438 debug!("add_variant_match(lp={}, pattern_id={})",
439 lp.repr(tcx), pattern_id);
441 let path_index = self.move_path(tcx, lp.clone());
442 let base_path_index = self.move_path(tcx, base_lp.clone());
444 self.fragments.borrow_mut().add_assignment(path_index);
446 let variant_match = VariantMatch {
448 base_path: base_path_index,
453 self.variant_matches.borrow_mut().push(variant_match);
456 fn fixup_fragment_sets(&self, tcx: &ty::ctxt<'tcx>) {
457 fragments::fixup_fragment_sets(self, tcx)
460 /// Adds the gen/kills for the various moves and
461 /// assignments into the provided data flow contexts.
462 /// Moves are generated by moves and killed by assignments and
463 /// scoping. Assignments are generated by assignment to variables and
464 /// killed by scoping. See `doc.rs` for more details.
465 fn add_gen_kills(&self,
466 tcx: &ty::ctxt<'tcx>,
467 dfcx_moves: &mut MoveDataFlow,
468 dfcx_assign: &mut AssignDataFlow) {
469 for (i, the_move) in self.moves.borrow().iter().enumerate() {
470 dfcx_moves.add_gen(the_move.id, i);
473 for (i, assignment) in self.var_assignments.borrow().iter().enumerate() {
474 dfcx_assign.add_gen(assignment.id, i);
475 self.kill_moves(assignment.path, assignment.id, dfcx_moves);
478 for assignment in self.path_assignments.borrow().iter() {
479 self.kill_moves(assignment.path, assignment.id, dfcx_moves);
482 // Kill all moves related to a variable `x` when
483 // it goes out of scope:
484 for path in self.paths.borrow().iter() {
485 match path.loan_path.kind {
486 LpVar(..) | LpUpvar(..) | LpDowncast(..) => {
487 let kill_scope = path.loan_path.kill_scope(tcx);
488 let path = self.path_map.borrow()[path.loan_path];
489 self.kill_moves(path, kill_scope.node_id(), dfcx_moves);
495 // Kill all assignments when the variable goes out of scope:
496 for (assignment_index, assignment) in
497 self.var_assignments.borrow().iter().enumerate() {
498 let lp = self.path_loan_path(assignment.path);
500 LpVar(..) | LpUpvar(..) | LpDowncast(..) => {
501 let kill_scope = lp.kill_scope(tcx);
502 dfcx_assign.add_kill(kill_scope.node_id(), assignment_index);
505 tcx.sess.bug("var assignment for non var path");
511 fn each_base_path<F>(&self, index: MovePathIndex, mut f: F) -> bool where
512 F: FnMut(MovePathIndex) -> bool,
515 while p != InvalidMovePathIndex {
519 p = self.path_parent(p);
524 // FIXME(#19596) This is a workaround, but there should be better way to do this
525 fn each_extending_path_<F>(&self, index: MovePathIndex, f: &mut F) -> bool where
526 F: FnMut(MovePathIndex) -> bool,
532 let mut p = self.path_first_child(index);
533 while p != InvalidMovePathIndex {
534 if !self.each_extending_path_(p, f) {
537 p = self.path_next_sibling(p);
543 fn each_extending_path<F>(&self, index: MovePathIndex, mut f: F) -> bool where
544 F: FnMut(MovePathIndex) -> bool,
546 self.each_extending_path_(index, &mut f)
549 fn each_applicable_move<F>(&self, index0: MovePathIndex, mut f: F) -> bool where
550 F: FnMut(MoveIndex) -> bool,
553 self.each_extending_path(index0, |index| {
554 let mut p = self.path_first_move(index);
555 while p != InvalidMoveIndex {
560 p = self.move_next_move(p);
569 kill_id: ast::NodeId,
570 dfcx_moves: &mut MoveDataFlow) {
571 // We can only perform kills for paths that refer to a unique location,
572 // since otherwise we may kill a move from one location with an
573 // assignment referring to another location.
575 let loan_path = self.path_loan_path(path);
576 if loan_path_is_precise(&*loan_path) {
577 self.each_applicable_move(path, |move_index| {
578 dfcx_moves.add_kill(kill_id, move_index.get());
585 impl<'a, 'tcx> FlowedMoveData<'a, 'tcx> {
586 pub fn new(move_data: MoveData<'tcx>,
587 tcx: &'a ty::ctxt<'tcx>,
589 id_range: ast_util::IdRange,
592 -> FlowedMoveData<'a, 'tcx> {
594 DataFlowContext::new(tcx,
595 "flowed_move_data_moves",
598 MoveDataFlowOperator,
600 move_data.moves.borrow().len());
601 let mut dfcx_assign =
602 DataFlowContext::new(tcx,
603 "flowed_move_data_assigns",
606 AssignDataFlowOperator,
608 move_data.var_assignments.borrow().len());
610 move_data.fixup_fragment_sets(tcx);
612 move_data.add_gen_kills(tcx,
616 dfcx_moves.add_kills_from_flow_exits(cfg);
617 dfcx_assign.add_kills_from_flow_exits(cfg);
619 dfcx_moves.propagate(cfg, body);
620 dfcx_assign.propagate(cfg, body);
623 move_data: move_data,
624 dfcx_moves: dfcx_moves,
625 dfcx_assign: dfcx_assign,
629 pub fn kind_of_move_of_path(&self,
631 loan_path: &Rc<LoanPath<'tcx>>)
632 -> Option<MoveKind> {
633 //! Returns the kind of a move of `loan_path` by `id`, if one exists.
636 for loan_path_index in self.move_data.path_map.borrow().get(&*loan_path).iter() {
637 self.dfcx_moves.each_gen_bit(id, |move_index| {
638 let the_move = self.move_data.moves.borrow();
639 let the_move = (*the_move)[move_index];
640 if the_move.path == **loan_path_index {
641 ret = Some(the_move.kind);
651 /// Iterates through each move of `loan_path` (or some base path of `loan_path`) that *may*
652 /// have occurred on entry to `id` without an intervening assignment. In other words, any moves
653 /// that would invalidate a reference to `loan_path` at location `id`.
654 pub fn each_move_of<F>(&self,
656 loan_path: &Rc<LoanPath<'tcx>>,
659 F: FnMut(&Move, &LoanPath<'tcx>) -> bool,
663 // 1. Move of `a.b.c`, use of `a.b.c`
664 // 2. Move of `a.b.c`, use of `a.b.c.d`
665 // 3. Move of `a.b.c`, use of `a` or `a.b`
669 // 4. move of `a.b.c`, use of `a.b.d`
671 let base_indices = self.move_data.existing_base_paths(loan_path);
672 if base_indices.is_empty() {
676 let opt_loan_path_index = self.move_data.existing_move_path(loan_path);
680 self.dfcx_moves.each_bit_on_entry(id, |index| {
681 let the_move = self.move_data.moves.borrow();
682 let the_move = &(*the_move)[index];
683 let moved_path = the_move.path;
684 if base_indices.iter().any(|x| x == &moved_path) {
685 // Scenario 1 or 2: `loan_path` or some base path of
686 // `loan_path` was moved.
687 if !f(the_move, &*self.move_data.path_loan_path(moved_path)) {
691 for &loan_path_index in opt_loan_path_index.iter() {
692 let cont = self.move_data.each_base_path(moved_path, |p| {
693 if p == loan_path_index {
694 // Scenario 3: some extension of `loan_path`
697 &*self.move_data.path_loan_path(moved_path))
702 if !cont { ret = false; break }
709 /// Iterates through every assignment to `loan_path` that may have occurred on entry to `id`.
710 /// `loan_path` must be a single variable.
711 pub fn each_assignment_of<F>(&self,
713 loan_path: &Rc<LoanPath<'tcx>>,
716 F: FnMut(&Assignment) -> bool,
718 let loan_path_index = {
719 match self.move_data.existing_move_path(loan_path) {
722 // if there were any assignments, it'd have an index
728 self.dfcx_assign.each_bit_on_entry(id, |index| {
729 let assignment = self.move_data.var_assignments.borrow();
730 let assignment = &(*assignment)[index];
731 if assignment.path == loan_path_index && !f(assignment) {
740 impl BitwiseOperator for MoveDataFlowOperator {
742 fn join(&self, succ: uint, pred: uint) -> uint {
743 succ | pred // moves from both preds are in scope
747 impl DataFlowOperator for MoveDataFlowOperator {
749 fn initial_value(&self) -> bool {
750 false // no loans in scope by default
754 impl BitwiseOperator for AssignDataFlowOperator {
756 fn join(&self, succ: uint, pred: uint) -> uint {
757 succ | pred // moves from both preds are in scope
761 impl DataFlowOperator for AssignDataFlowOperator {
763 fn initial_value(&self) -> bool {
764 false // no assignments in scope by default