1 //! Data structures used for tracking moves. Please see the extensive
2 //! comments in the section "Moves and initialization" in `README.md`.
6 use crate::dataflow::{DataFlowContext, BitwiseOperator, DataFlowOperator, KillFrom};
8 use crate::borrowck::*;
10 use rustc::ty::{self, TyCtxt};
11 use rustc::util::nodemap::FxHashMap;
13 use std::cell::RefCell;
21 pub struct MoveData<'tcx> {
22 /// Move paths. See section "Move paths" in `README.md`.
23 pub paths: RefCell<Vec<MovePath<'tcx>>>,
25 /// Cache of loan path to move path index, for easy lookup.
26 pub path_map: RefCell<FxHashMap<Rc<LoanPath<'tcx>>, MovePathIndex>>,
28 /// Each move or uninitialized variable gets an entry here.
29 pub moves: RefCell<Vec<Move>>,
31 /// Assignments to a variable, like `x = foo`. These are assigned
32 /// bits for dataflow, since we must track them to ensure that
33 /// immutable variables are assigned at most once along each path.
34 pub var_assignments: RefCell<Vec<Assignment>>,
36 /// Assignments to a path, like `x.f = foo`. These are not
37 /// assigned dataflow bits, but we track them because they still
39 pub path_assignments: RefCell<Vec<Assignment>>,
42 pub struct FlowedMoveData<'tcx> {
43 pub move_data: MoveData<'tcx>,
45 pub dfcx_moves: MoveDataFlow<'tcx>,
47 // We could (and maybe should, for efficiency) combine both move
48 // and assign data flow into one, but this way it's easier to
49 // distinguish the bits that correspond to moves and assignments.
50 pub dfcx_assign: AssignDataFlow<'tcx>,
53 /// Index into `MoveData.paths`, used like a pointer
54 #[derive(Copy, PartialEq, Eq, PartialOrd, Ord, Debug)]
55 pub struct MovePathIndex(usize);
58 fn get(&self) -> usize {
59 let MovePathIndex(v) = *self; v
63 impl Clone for MovePathIndex {
64 fn clone(&self) -> MovePathIndex {
65 MovePathIndex(self.get())
69 #[allow(non_upper_case_globals)]
70 const InvalidMovePathIndex: MovePathIndex = MovePathIndex(usize::MAX);
72 /// Index into `MoveData.moves`, used like a pointer
73 #[derive(Copy, Clone, PartialEq)]
74 pub struct MoveIndex(usize);
77 fn get(&self) -> usize {
78 let MoveIndex(v) = *self; v
82 #[allow(non_upper_case_globals)]
83 const InvalidMoveIndex: MoveIndex = MoveIndex(usize::MAX);
85 pub struct MovePath<'tcx> {
86 /// Loan path corresponding to this move path
87 pub loan_path: Rc<LoanPath<'tcx>>,
89 /// Parent pointer, `InvalidMovePathIndex` if root
90 pub parent: MovePathIndex,
92 /// Head of linked list of moves to this path,
93 /// `InvalidMoveIndex` if not moved
94 pub first_move: MoveIndex,
96 /// First node in linked list of children, `InvalidMovePathIndex` if leaf
97 pub first_child: MovePathIndex,
99 /// Next node in linked list of parent's children (siblings),
100 /// `InvalidMovePathIndex` if none.
101 pub next_sibling: MovePathIndex,
104 #[derive(Copy, Clone, PartialEq, Debug)]
106 Declared, // When declared, variables start out "moved".
107 MoveExpr, // Expression or binding that moves a variable
108 MovePat, // By-move binding
109 Captured // Closure creation that moves a value
112 #[derive(Copy, Clone)]
114 /// Path being moved.
115 pub path: MovePathIndex,
117 /// ID of node that is doing the move.
118 pub id: hir::ItemLocalId,
120 /// Kind of move, for error messages.
123 /// Next node in linked list of moves from `path`, or `InvalidMoveIndex`
124 pub next_move: MoveIndex
127 #[derive(Copy, Clone)]
128 pub struct Assignment {
129 /// Path being assigned.
130 pub path: MovePathIndex,
132 /// ID where assignment occurs
133 pub id: hir::ItemLocalId,
135 /// span of node where assignment occurs
139 #[derive(Clone, Copy)]
140 pub struct MoveDataFlowOperator;
142 pub type MoveDataFlow<'tcx> = DataFlowContext<'tcx, MoveDataFlowOperator>;
144 #[derive(Clone, Copy)]
145 pub struct AssignDataFlowOperator;
147 pub type AssignDataFlow<'tcx> = DataFlowContext<'tcx, AssignDataFlowOperator>;
149 fn loan_path_is_precise(loan_path: &LoanPath<'_>) -> bool {
150 match loan_path.kind {
151 LpVar(_) | LpUpvar(_) => {
154 LpExtend(.., LpInterior(_, InteriorKind::InteriorElement)) => {
155 // Paths involving element accesses a[i] do not refer to a unique
156 // location, as there is no accurate tracking of the indices.
158 // (Paths involving element accesses via slice pattern bindings
159 // can in principle be tracked precisely, but that is future
160 // work. For now, continue claiming that they are imprecise.)
163 LpDowncast(ref lp_base, _) |
164 LpExtend(ref lp_base, ..) => {
165 loan_path_is_precise(&lp_base)
170 impl MoveData<'tcx> {
171 /// Returns `true` if there are no trackable assignments or moves
172 /// in this move data -- that means that there is nothing that
173 /// could cause a borrow error.
174 pub fn is_empty(&self) -> bool {
175 self.moves.borrow().is_empty() &&
176 self.path_assignments.borrow().is_empty() &&
177 self.var_assignments.borrow().is_empty()
180 pub fn path_loan_path(&self, index: MovePathIndex) -> Rc<LoanPath<'tcx>> {
181 (*self.paths.borrow())[index.get()].loan_path.clone()
184 fn path_parent(&self, index: MovePathIndex) -> MovePathIndex {
185 (*self.paths.borrow())[index.get()].parent
188 fn path_first_move(&self, index: MovePathIndex) -> MoveIndex {
189 (*self.paths.borrow())[index.get()].first_move
192 /// Returns the index of first child, or `InvalidMovePathIndex` if
194 fn path_first_child(&self, index: MovePathIndex) -> MovePathIndex {
195 (*self.paths.borrow())[index.get()].first_child
198 fn path_next_sibling(&self, index: MovePathIndex) -> MovePathIndex {
199 (*self.paths.borrow())[index.get()].next_sibling
202 fn set_path_first_move(&self,
203 index: MovePathIndex,
204 first_move: MoveIndex) {
205 (*self.paths.borrow_mut())[index.get()].first_move = first_move
208 fn set_path_first_child(&self,
209 index: MovePathIndex,
210 first_child: MovePathIndex) {
211 (*self.paths.borrow_mut())[index.get()].first_child = first_child
214 fn move_next_move(&self, index: MoveIndex) -> MoveIndex {
215 //! Type safe indexing operator
216 (*self.moves.borrow())[index.get()].next_move
219 fn is_var_path(&self, index: MovePathIndex) -> bool {
220 //! True if `index` refers to a variable
221 self.path_parent(index) == InvalidMovePathIndex
224 /// Returns the existing move path index for `lp`, if any, and otherwise adds a new index for
225 /// `lp` and any of its base paths that do not yet have an index.
226 pub fn move_path(&self, tcx: TyCtxt<'tcx, 'tcx>, lp: Rc<LoanPath<'tcx>>) -> MovePathIndex {
227 if let Some(&index) = self.path_map.borrow().get(&lp) {
231 let index = match lp.kind {
232 LpVar(..) | LpUpvar(..) => {
233 let index = MovePathIndex(self.paths.borrow().len());
235 self.paths.borrow_mut().push(MovePath {
236 loan_path: lp.clone(),
237 parent: InvalidMovePathIndex,
238 first_move: InvalidMoveIndex,
239 first_child: InvalidMovePathIndex,
240 next_sibling: InvalidMovePathIndex,
246 LpDowncast(ref base, _) |
247 LpExtend(ref base, ..) => {
248 let parent_index = self.move_path(tcx, base.clone());
250 let index = MovePathIndex(self.paths.borrow().len());
252 let next_sibling = self.path_first_child(parent_index);
253 self.set_path_first_child(parent_index, index);
255 self.paths.borrow_mut().push(MovePath {
256 loan_path: lp.clone(),
257 parent: parent_index,
258 first_move: InvalidMoveIndex,
259 first_child: InvalidMovePathIndex,
267 debug!("move_path(lp={:?}, index={:?})",
271 assert_eq!(index.get(), self.paths.borrow().len() - 1);
272 self.path_map.borrow_mut().insert(lp, index);
276 fn existing_move_path(&self, lp: &Rc<LoanPath<'tcx>>)
277 -> Option<MovePathIndex> {
278 self.path_map.borrow().get(lp).cloned()
281 fn existing_base_paths(&self, lp: &Rc<LoanPath<'tcx>>)
282 -> Vec<MovePathIndex> {
283 let mut result = vec![];
284 self.add_existing_base_paths(lp, &mut result);
288 /// Adds any existing move path indices for `lp` and any base paths of `lp` to `result`, but
289 /// does not add new move paths
290 fn add_existing_base_paths(&self, lp: &Rc<LoanPath<'tcx>>,
291 result: &mut Vec<MovePathIndex>) {
292 match self.path_map.borrow().get(lp).cloned() {
294 self.each_base_path(index, |p| {
301 LpVar(..) | LpUpvar(..) => { }
302 LpDowncast(ref b, _) |
303 LpExtend(ref b, ..) => {
304 self.add_existing_base_paths(b, result);
312 /// Adds a new move entry for a move of `lp` that occurs at location `id` with kind `kind`.
315 tcx: TyCtxt<'tcx, 'tcx>,
316 orig_lp: Rc<LoanPath<'tcx>>,
317 id: hir::ItemLocalId,
320 // Moving one union field automatically moves all its fields. Also move siblings of
321 // all parent union fields, moves do not propagate upwards automatically.
322 let mut lp = orig_lp.clone();
323 while let LpExtend(ref base_lp, mutbl, lp_elem) = lp.clone().kind {
324 if let (&ty::Adt(adt_def, _), LpInterior(opt_variant_id, interior))
325 = (&base_lp.ty.sty, lp_elem) {
326 if adt_def.is_union() {
327 for (i, field) in adt_def.non_enum_variant().fields.iter().enumerate() {
329 InteriorKind::InteriorField(mc::FieldIndex(i, field.ident.name));
330 if field != interior {
331 let sibling_lp_kind =
332 LpExtend(base_lp.clone(), mutbl, LpInterior(opt_variant_id, field));
333 let sibling_lp = Rc::new(LoanPath::new(sibling_lp_kind, tcx.types.err));
334 self.add_move_helper(tcx, sibling_lp, id, kind);
339 lp = base_lp.clone();
342 self.add_move_helper(tcx, orig_lp, id, kind);
347 tcx: TyCtxt<'tcx, 'tcx>,
348 lp: Rc<LoanPath<'tcx>>,
349 id: hir::ItemLocalId,
352 debug!("add_move(lp={:?}, id={:?}, kind={:?})",
357 let path_index = self.move_path(tcx, lp);
358 let move_index = MoveIndex(self.moves.borrow().len());
360 let next_move = self.path_first_move(path_index);
361 self.set_path_first_move(path_index, move_index);
363 self.moves.borrow_mut().push(Move {
371 /// Adds a new record for an assignment to `lp` that occurs at location `id` with the given
373 pub fn add_assignment(
375 tcx: TyCtxt<'tcx, 'tcx>,
376 lp: Rc<LoanPath<'tcx>>,
377 assign_id: hir::ItemLocalId,
380 // Assigning to one union field automatically assigns to all its fields.
381 if let LpExtend(ref base_lp, mutbl, LpInterior(opt_variant_id, interior)) = lp.kind {
382 if let ty::Adt(adt_def, _) = base_lp.ty.sty {
383 if adt_def.is_union() {
384 for (i, field) in adt_def.non_enum_variant().fields.iter().enumerate() {
386 InteriorKind::InteriorField(mc::FieldIndex(i, field.ident.name));
387 let field_ty = if field == interior {
390 tcx.types.err // Doesn't matter
392 let sibling_lp_kind = LpExtend(base_lp.clone(), mutbl,
393 LpInterior(opt_variant_id, field));
394 let sibling_lp = Rc::new(LoanPath::new(sibling_lp_kind, field_ty));
395 self.add_assignment_helper(tcx, sibling_lp, assign_id,
403 self.add_assignment_helper(tcx, lp, assign_id, span);
406 fn add_assignment_helper(
408 tcx: TyCtxt<'tcx, 'tcx>,
409 lp: Rc<LoanPath<'tcx>>,
410 assign_id: hir::ItemLocalId,
413 debug!("add_assignment(lp={:?}, assign_id={:?}", lp, assign_id);
415 let path_index = self.move_path(tcx, lp.clone());
417 let assignment = Assignment {
423 if self.is_var_path(path_index) {
424 debug!("add_assignment[var](lp={:?}, assignment={}, path_index={:?})",
425 lp, self.var_assignments.borrow().len(), path_index);
427 self.var_assignments.borrow_mut().push(assignment);
429 debug!("add_assignment[path](lp={:?}, path_index={:?})",
432 self.path_assignments.borrow_mut().push(assignment);
436 /// Adds the gen/kills for the various moves and
437 /// assignments into the provided data flow contexts.
438 /// Moves are generated by moves and killed by assignments and
439 /// scoping. Assignments are generated by assignment to variables and
440 /// killed by scoping. See `README.md` for more details.
443 bccx: &BorrowckCtxt<'_, 'tcx>,
444 dfcx_moves: &mut MoveDataFlow<'_>,
445 dfcx_assign: &mut AssignDataFlow<'_>,
447 for (i, the_move) in self.moves.borrow().iter().enumerate() {
448 dfcx_moves.add_gen(the_move.id, i);
451 for (i, assignment) in self.var_assignments.borrow().iter().enumerate() {
452 dfcx_assign.add_gen(assignment.id, i);
453 self.kill_moves(assignment.path, assignment.id,
454 KillFrom::Execution, dfcx_moves);
457 for assignment in self.path_assignments.borrow().iter() {
458 self.kill_moves(assignment.path, assignment.id,
459 KillFrom::Execution, dfcx_moves);
462 // Kill all moves related to a variable `x` when
463 // it goes out of scope:
464 for path in self.paths.borrow().iter() {
465 match path.loan_path.kind {
466 LpVar(..) | LpUpvar(..) | LpDowncast(..) => {
467 let kill_scope = path.loan_path.kill_scope(bccx);
468 let path = *self.path_map.borrow().get(&path.loan_path).unwrap();
469 self.kill_moves(path, kill_scope.item_local_id(),
470 KillFrom::ScopeEnd, dfcx_moves);
476 // Kill all assignments when the variable goes out of scope:
477 for (assignment_index, assignment) in
478 self.var_assignments.borrow().iter().enumerate() {
479 let lp = self.path_loan_path(assignment.path);
481 LpVar(..) | LpUpvar(..) | LpDowncast(..) => {
482 let kill_scope = lp.kill_scope(bccx);
483 dfcx_assign.add_kill(KillFrom::ScopeEnd,
484 kill_scope.item_local_id(),
488 bug!("var assignment for non var path");
494 fn each_base_path<F>(&self, index: MovePathIndex, mut f: F) -> bool where
495 F: FnMut(MovePathIndex) -> bool,
498 while p != InvalidMovePathIndex {
502 p = self.path_parent(p);
507 // FIXME(#19596) This is a workaround, but there should be better way to do this
508 fn each_extending_path_<F>(&self, index: MovePathIndex, f: &mut F) -> bool where
509 F: FnMut(MovePathIndex) -> bool,
515 let mut p = self.path_first_child(index);
516 while p != InvalidMovePathIndex {
517 if !self.each_extending_path_(p, f) {
520 p = self.path_next_sibling(p);
526 fn each_extending_path<F>(&self, index: MovePathIndex, mut f: F) -> bool where
527 F: FnMut(MovePathIndex) -> bool,
529 self.each_extending_path_(index, &mut f)
532 fn each_applicable_move<F>(&self, index0: MovePathIndex, mut f: F) -> bool where
533 F: FnMut(MoveIndex) -> bool,
536 self.each_extending_path(index0, |index| {
537 let mut p = self.path_first_move(index);
538 while p != InvalidMoveIndex {
543 p = self.move_next_move(p);
553 kill_id: hir::ItemLocalId,
555 dfcx_moves: &mut MoveDataFlow<'_>,
557 // We can only perform kills for paths that refer to a unique location,
558 // since otherwise we may kill a move from one location with an
559 // assignment referring to another location.
561 let loan_path = self.path_loan_path(path);
562 if loan_path_is_precise(&loan_path) {
563 self.each_applicable_move(path, |move_index| {
564 debug!("kill_moves add_kill {:?} kill_id={:?} move_index={}",
565 kill_kind, kill_id, move_index.get());
566 dfcx_moves.add_kill(kill_kind, kill_id, move_index.get());
573 impl<'tcx> FlowedMoveData<'tcx> {
575 move_data: MoveData<'tcx>,
576 bccx: &BorrowckCtxt<'_, 'tcx>,
579 ) -> FlowedMoveData<'tcx> {
583 DataFlowContext::new(tcx,
584 "flowed_move_data_moves",
587 MoveDataFlowOperator,
588 move_data.moves.borrow().len());
589 let mut dfcx_assign =
590 DataFlowContext::new(tcx,
591 "flowed_move_data_assigns",
594 AssignDataFlowOperator,
595 move_data.var_assignments.borrow().len());
597 move_data.add_gen_kills(bccx,
601 dfcx_moves.add_kills_from_flow_exits(cfg);
602 dfcx_assign.add_kills_from_flow_exits(cfg);
604 dfcx_moves.propagate(cfg, body);
605 dfcx_assign.propagate(cfg, body);
614 pub fn kind_of_move_of_path(&self,
615 id: hir::ItemLocalId,
616 loan_path: &Rc<LoanPath<'tcx>>)
617 -> Option<MoveKind> {
618 //! Returns the kind of a move of `loan_path` by `id`, if one exists.
621 if let Some(loan_path_index) = self.move_data.path_map.borrow().get(&*loan_path) {
622 self.dfcx_moves.each_gen_bit(id, |move_index| {
623 let the_move = self.move_data.moves.borrow();
624 let the_move = (*the_move)[move_index];
625 if the_move.path == *loan_path_index {
626 ret = Some(the_move.kind);
636 /// Iterates through each move of `loan_path` (or some base path of `loan_path`) that *may*
637 /// have occurred on entry to `id` without an intervening assignment. In other words, any moves
638 /// that would invalidate a reference to `loan_path` at location `id`.
639 pub fn each_move_of<F>(&self,
640 id: hir::ItemLocalId,
641 loan_path: &Rc<LoanPath<'tcx>>,
644 F: FnMut(&Move, &LoanPath<'tcx>) -> bool,
648 // 1. Move of `a.b.c`, use of `a.b.c`
649 // 2. Move of `a.b.c`, use of `a.b.c.d`
650 // 3. Move of `a.b.c`, use of `a` or `a.b`
654 // 4. move of `a.b.c`, use of `a.b.d`
656 let base_indices = self.move_data.existing_base_paths(loan_path);
657 if base_indices.is_empty() {
661 let opt_loan_path_index = self.move_data.existing_move_path(loan_path);
665 self.dfcx_moves.each_bit_on_entry(id, |index| {
666 let the_move = self.move_data.moves.borrow();
667 let the_move = &(*the_move)[index];
668 let moved_path = the_move.path;
669 if base_indices.iter().any(|x| x == &moved_path) {
670 // Scenario 1 or 2: `loan_path` or some base path of
671 // `loan_path` was moved.
672 if !f(the_move, &self.move_data.path_loan_path(moved_path)) {
676 if let Some(loan_path_index) = opt_loan_path_index {
677 let cont = self.move_data.each_base_path(moved_path, |p| {
678 if p == loan_path_index {
679 // Scenario 3: some extension of `loan_path`
682 &self.move_data.path_loan_path(moved_path))
687 if !cont { ret = false; }
694 /// Iterates through every assignment to `loan_path` that may have occurred on entry to `id`.
695 /// `loan_path` must be a single variable.
696 pub fn each_assignment_of<F>(&self,
697 id: hir::ItemLocalId,
698 loan_path: &Rc<LoanPath<'tcx>>,
701 F: FnMut(&Assignment) -> bool,
703 let loan_path_index = {
704 match self.move_data.existing_move_path(loan_path) {
707 // if there were any assignments, it'd have an index
713 self.dfcx_assign.each_bit_on_entry(id, |index| {
714 let assignment = self.move_data.var_assignments.borrow();
715 let assignment = &(*assignment)[index];
716 if assignment.path == loan_path_index && !f(assignment) {
725 impl BitwiseOperator for MoveDataFlowOperator {
727 fn join(&self, succ: usize, pred: usize) -> usize {
728 succ | pred // moves from both preds are in scope
732 impl DataFlowOperator for MoveDataFlowOperator {
734 fn initial_value(&self) -> bool {
735 false // no loans in scope by default
739 impl BitwiseOperator for AssignDataFlowOperator {
741 fn join(&self, succ: usize, pred: usize) -> usize {
742 succ | pred // moves from both preds are in scope
746 impl DataFlowOperator for AssignDataFlowOperator {
748 fn initial_value(&self) -> bool {
749 false // no assignments in scope by default