1 //! Data structures used for tracking moves. Please see the extensive
2 //! comments in the section "Moves and initialization" in `README.md`.
4 use crate::dataflow::{DataFlowContext, BitwiseOperator, DataFlowOperator, KillFrom};
6 use crate::borrowck::*;
8 use rustc::ty::{self, TyCtxt};
9 use rustc::util::nodemap::FxHashMap;
11 use std::cell::RefCell;
19 pub struct MoveData<'tcx> {
20 /// Move paths. See section "Move paths" in `README.md`.
21 pub paths: RefCell<Vec<MovePath<'tcx>>>,
23 /// Cache of loan path to move path index, for easy lookup.
24 pub path_map: RefCell<FxHashMap<Rc<LoanPath<'tcx>>, MovePathIndex>>,
26 /// Each move or uninitialized variable gets an entry here.
27 pub moves: RefCell<Vec<Move>>,
29 /// Assignments to a variable, like `x = foo`. These are assigned
30 /// bits for dataflow, since we must track them to ensure that
31 /// immutable variables are assigned at most once along each path.
32 pub var_assignments: RefCell<Vec<Assignment>>,
34 /// Assignments to a path, like `x.f = foo`. These are not
35 /// assigned dataflow bits, but we track them because they still
37 pub path_assignments: RefCell<Vec<Assignment>>,
40 pub struct FlowedMoveData<'tcx> {
41 pub move_data: MoveData<'tcx>,
43 pub dfcx_moves: MoveDataFlow<'tcx>,
45 // We could (and maybe should, for efficiency) combine both move
46 // and assign data flow into one, but this way it's easier to
47 // distinguish the bits that correspond to moves and assignments.
48 pub dfcx_assign: AssignDataFlow<'tcx>,
51 /// Index into `MoveData.paths`, used like a pointer
52 #[derive(Copy, PartialEq, Eq, PartialOrd, Ord, Debug)]
53 pub struct MovePathIndex(usize);
56 fn get(&self) -> usize {
57 let MovePathIndex(v) = *self; v
61 impl Clone for MovePathIndex {
62 fn clone(&self) -> MovePathIndex {
63 MovePathIndex(self.get())
67 #[allow(non_upper_case_globals)]
68 const InvalidMovePathIndex: MovePathIndex = MovePathIndex(usize::MAX);
70 /// Index into `MoveData.moves`, used like a pointer
71 #[derive(Copy, Clone, PartialEq)]
72 pub struct MoveIndex(usize);
75 fn get(&self) -> usize {
76 let MoveIndex(v) = *self; v
80 #[allow(non_upper_case_globals)]
81 const InvalidMoveIndex: MoveIndex = MoveIndex(usize::MAX);
83 pub struct MovePath<'tcx> {
84 /// Loan path corresponding to this move path
85 pub loan_path: Rc<LoanPath<'tcx>>,
87 /// Parent pointer, `InvalidMovePathIndex` if root
88 pub parent: MovePathIndex,
90 /// Head of linked list of moves to this path,
91 /// `InvalidMoveIndex` if not moved
92 pub first_move: MoveIndex,
94 /// First node in linked list of children, `InvalidMovePathIndex` if leaf
95 pub first_child: MovePathIndex,
97 /// Next node in linked list of parent's children (siblings),
98 /// `InvalidMovePathIndex` if none.
99 pub next_sibling: MovePathIndex,
103 #[derive(Copy, Clone)]
105 /// Path being moved.
106 pub path: MovePathIndex,
108 /// ID of node that is doing the move.
109 pub id: hir::ItemLocalId,
111 /// Next node in linked list of moves from `path`, or `InvalidMoveIndex`
112 pub next_move: MoveIndex
115 #[derive(Copy, Clone)]
116 pub struct Assignment {
117 /// Path being assigned.
118 pub path: MovePathIndex,
120 /// ID where assignment occurs
121 pub id: hir::ItemLocalId,
123 /// span of node where assignment occurs
127 #[derive(Clone, Copy)]
128 pub struct MoveDataFlowOperator;
130 pub type MoveDataFlow<'tcx> = DataFlowContext<'tcx, MoveDataFlowOperator>;
132 #[derive(Clone, Copy)]
133 pub struct AssignDataFlowOperator;
135 pub type AssignDataFlow<'tcx> = DataFlowContext<'tcx, AssignDataFlowOperator>;
137 fn loan_path_is_precise(loan_path: &LoanPath<'_>) -> bool {
138 match loan_path.kind {
139 LpVar(_) | LpUpvar(_) => {
142 LpExtend(.., LpInterior(_, InteriorKind::InteriorElement)) => {
143 // Paths involving element accesses a[i] do not refer to a unique
144 // location, as there is no accurate tracking of the indices.
146 // (Paths involving element accesses via slice pattern bindings
147 // can in principle be tracked precisely, but that is future
148 // work. For now, continue claiming that they are imprecise.)
151 LpDowncast(ref lp_base, _) |
152 LpExtend(ref lp_base, ..) => {
153 loan_path_is_precise(&lp_base)
158 impl MoveData<'tcx> {
159 /// Returns `true` if there are no trackable assignments or moves
160 /// in this move data -- that means that there is nothing that
161 /// could cause a borrow error.
162 pub fn is_empty(&self) -> bool {
163 self.moves.borrow().is_empty() &&
164 self.path_assignments.borrow().is_empty() &&
165 self.var_assignments.borrow().is_empty()
168 pub fn path_loan_path(&self, index: MovePathIndex) -> Rc<LoanPath<'tcx>> {
169 (*self.paths.borrow())[index.get()].loan_path.clone()
172 fn path_parent(&self, index: MovePathIndex) -> MovePathIndex {
173 (*self.paths.borrow())[index.get()].parent
176 fn path_first_move(&self, index: MovePathIndex) -> MoveIndex {
177 (*self.paths.borrow())[index.get()].first_move
180 /// Returns the index of first child, or `InvalidMovePathIndex` if
182 fn path_first_child(&self, index: MovePathIndex) -> MovePathIndex {
183 (*self.paths.borrow())[index.get()].first_child
186 fn path_next_sibling(&self, index: MovePathIndex) -> MovePathIndex {
187 (*self.paths.borrow())[index.get()].next_sibling
190 fn set_path_first_move(&self,
191 index: MovePathIndex,
192 first_move: MoveIndex) {
193 (*self.paths.borrow_mut())[index.get()].first_move = first_move
196 fn set_path_first_child(&self,
197 index: MovePathIndex,
198 first_child: MovePathIndex) {
199 (*self.paths.borrow_mut())[index.get()].first_child = first_child
202 fn move_next_move(&self, index: MoveIndex) -> MoveIndex {
203 //! Type safe indexing operator
204 (*self.moves.borrow())[index.get()].next_move
207 fn is_var_path(&self, index: MovePathIndex) -> bool {
208 //! True if `index` refers to a variable
209 self.path_parent(index) == InvalidMovePathIndex
212 /// Returns the existing move path index for `lp`, if any, and otherwise adds a new index for
213 /// `lp` and any of its base paths that do not yet have an index.
214 pub fn move_path(&self, tcx: TyCtxt<'tcx>, lp: Rc<LoanPath<'tcx>>) -> MovePathIndex {
215 if let Some(&index) = self.path_map.borrow().get(&lp) {
219 let index = match lp.kind {
220 LpVar(..) | LpUpvar(..) => {
221 let index = MovePathIndex(self.paths.borrow().len());
223 self.paths.borrow_mut().push(MovePath {
224 loan_path: lp.clone(),
225 parent: InvalidMovePathIndex,
226 first_move: InvalidMoveIndex,
227 first_child: InvalidMovePathIndex,
228 next_sibling: InvalidMovePathIndex,
234 LpDowncast(ref base, _) |
235 LpExtend(ref base, ..) => {
236 let parent_index = self.move_path(tcx, base.clone());
238 let index = MovePathIndex(self.paths.borrow().len());
240 let next_sibling = self.path_first_child(parent_index);
241 self.set_path_first_child(parent_index, index);
243 self.paths.borrow_mut().push(MovePath {
244 loan_path: lp.clone(),
245 parent: parent_index,
246 first_move: InvalidMoveIndex,
247 first_child: InvalidMovePathIndex,
255 debug!("move_path(lp={:?}, index={:?})",
259 assert_eq!(index.get(), self.paths.borrow().len() - 1);
260 self.path_map.borrow_mut().insert(lp, index);
264 fn existing_move_path(&self, lp: &Rc<LoanPath<'tcx>>)
265 -> Option<MovePathIndex> {
266 self.path_map.borrow().get(lp).cloned()
269 fn existing_base_paths(&self, lp: &Rc<LoanPath<'tcx>>)
270 -> Vec<MovePathIndex> {
271 let mut result = vec![];
272 self.add_existing_base_paths(lp, &mut result);
276 /// Adds any existing move path indices for `lp` and any base paths of `lp` to `result`, but
277 /// does not add new move paths
278 fn add_existing_base_paths(&self, lp: &Rc<LoanPath<'tcx>>,
279 result: &mut Vec<MovePathIndex>) {
280 match self.path_map.borrow().get(lp).cloned() {
282 self.each_base_path(index, |p| {
289 LpVar(..) | LpUpvar(..) => { }
290 LpDowncast(ref b, _) |
291 LpExtend(ref b, ..) => {
292 self.add_existing_base_paths(b, result);
300 /// Adds a new move entry for a move of `lp` that occurs at location `id` with kind `kind`.
304 orig_lp: Rc<LoanPath<'tcx>>,
305 id: hir::ItemLocalId,
307 // Moving one union field automatically moves all its fields. Also move siblings of
308 // all parent union fields, moves do not propagate upwards automatically.
309 let mut lp = orig_lp.clone();
310 while let LpExtend(ref base_lp, mutbl, lp_elem) = lp.clone().kind {
311 if let (&ty::Adt(adt_def, _), LpInterior(opt_variant_id, interior))
312 = (&base_lp.ty.sty, lp_elem) {
313 if adt_def.is_union() {
314 for (i, field) in adt_def.non_enum_variant().fields.iter().enumerate() {
316 InteriorKind::InteriorField(mc::FieldIndex(i, field.ident.name));
317 if field != interior {
318 let sibling_lp_kind =
319 LpExtend(base_lp.clone(), mutbl, LpInterior(opt_variant_id, field));
320 let sibling_lp = Rc::new(LoanPath::new(sibling_lp_kind, tcx.types.err));
321 self.add_move_helper(tcx, sibling_lp, id);
326 lp = base_lp.clone();
329 self.add_move_helper(tcx, orig_lp, id);
335 lp: Rc<LoanPath<'tcx>>,
336 id: hir::ItemLocalId,
338 debug!("add_move(lp={:?}, id={:?})", lp, id);
340 let path_index = self.move_path(tcx, lp);
341 let move_index = MoveIndex(self.moves.borrow().len());
343 let next_move = self.path_first_move(path_index);
344 self.set_path_first_move(path_index, move_index);
346 self.moves.borrow_mut().push(Move {
353 /// Adds a new record for an assignment to `lp` that occurs at location `id` with the given
355 pub fn add_assignment(
358 lp: Rc<LoanPath<'tcx>>,
359 assign_id: hir::ItemLocalId,
362 // Assigning to one union field automatically assigns to all its fields.
363 if let LpExtend(ref base_lp, mutbl, LpInterior(opt_variant_id, interior)) = lp.kind {
364 if let ty::Adt(adt_def, _) = base_lp.ty.sty {
365 if adt_def.is_union() {
366 for (i, field) in adt_def.non_enum_variant().fields.iter().enumerate() {
368 InteriorKind::InteriorField(mc::FieldIndex(i, field.ident.name));
369 let field_ty = if field == interior {
372 tcx.types.err // Doesn't matter
374 let sibling_lp_kind = LpExtend(base_lp.clone(), mutbl,
375 LpInterior(opt_variant_id, field));
376 let sibling_lp = Rc::new(LoanPath::new(sibling_lp_kind, field_ty));
377 self.add_assignment_helper(tcx, sibling_lp, assign_id,
385 self.add_assignment_helper(tcx, lp, assign_id, span);
388 fn add_assignment_helper(
391 lp: Rc<LoanPath<'tcx>>,
392 assign_id: hir::ItemLocalId,
395 debug!("add_assignment(lp={:?}, assign_id={:?}", lp, assign_id);
397 let path_index = self.move_path(tcx, lp.clone());
399 let assignment = Assignment {
405 if self.is_var_path(path_index) {
406 debug!("add_assignment[var](lp={:?}, assignment={}, path_index={:?})",
407 lp, self.var_assignments.borrow().len(), path_index);
409 self.var_assignments.borrow_mut().push(assignment);
411 debug!("add_assignment[path](lp={:?}, path_index={:?})",
414 self.path_assignments.borrow_mut().push(assignment);
418 /// Adds the gen/kills for the various moves and
419 /// assignments into the provided data flow contexts.
420 /// Moves are generated by moves and killed by assignments and
421 /// scoping. Assignments are generated by assignment to variables and
422 /// killed by scoping. See `README.md` for more details.
425 bccx: &BorrowckCtxt<'_, 'tcx>,
426 dfcx_moves: &mut MoveDataFlow<'_>,
427 dfcx_assign: &mut AssignDataFlow<'_>,
429 for (i, the_move) in self.moves.borrow().iter().enumerate() {
430 dfcx_moves.add_gen(the_move.id, i);
433 for (i, assignment) in self.var_assignments.borrow().iter().enumerate() {
434 dfcx_assign.add_gen(assignment.id, i);
435 self.kill_moves(assignment.path, assignment.id,
436 KillFrom::Execution, dfcx_moves);
439 for assignment in self.path_assignments.borrow().iter() {
440 self.kill_moves(assignment.path, assignment.id,
441 KillFrom::Execution, dfcx_moves);
444 // Kill all moves related to a variable `x` when
445 // it goes out of scope:
446 for path in self.paths.borrow().iter() {
447 match path.loan_path.kind {
448 LpVar(..) | LpUpvar(..) | LpDowncast(..) => {
449 let kill_scope = path.loan_path.kill_scope(bccx);
450 let path = *self.path_map.borrow().get(&path.loan_path).unwrap();
451 self.kill_moves(path, kill_scope.item_local_id(),
452 KillFrom::ScopeEnd, dfcx_moves);
458 // Kill all assignments when the variable goes out of scope:
459 for (assignment_index, assignment) in
460 self.var_assignments.borrow().iter().enumerate() {
461 let lp = self.path_loan_path(assignment.path);
463 LpVar(..) | LpUpvar(..) | LpDowncast(..) => {
464 let kill_scope = lp.kill_scope(bccx);
465 dfcx_assign.add_kill(KillFrom::ScopeEnd,
466 kill_scope.item_local_id(),
470 bug!("var assignment for non var path");
476 fn each_base_path<F>(&self, index: MovePathIndex, mut f: F) -> bool where
477 F: FnMut(MovePathIndex) -> bool,
480 while p != InvalidMovePathIndex {
484 p = self.path_parent(p);
489 // FIXME(#19596) This is a workaround, but there should be better way to do this
490 fn each_extending_path_<F>(&self, index: MovePathIndex, f: &mut F) -> bool where
491 F: FnMut(MovePathIndex) -> bool,
497 let mut p = self.path_first_child(index);
498 while p != InvalidMovePathIndex {
499 if !self.each_extending_path_(p, f) {
502 p = self.path_next_sibling(p);
508 fn each_extending_path<F>(&self, index: MovePathIndex, mut f: F) -> bool where
509 F: FnMut(MovePathIndex) -> bool,
511 self.each_extending_path_(index, &mut f)
514 fn each_applicable_move<F>(&self, index0: MovePathIndex, mut f: F) -> bool where
515 F: FnMut(MoveIndex) -> bool,
518 self.each_extending_path(index0, |index| {
519 let mut p = self.path_first_move(index);
520 while p != InvalidMoveIndex {
525 p = self.move_next_move(p);
535 kill_id: hir::ItemLocalId,
537 dfcx_moves: &mut MoveDataFlow<'_>,
539 // We can only perform kills for paths that refer to a unique location,
540 // since otherwise we may kill a move from one location with an
541 // assignment referring to another location.
543 let loan_path = self.path_loan_path(path);
544 if loan_path_is_precise(&loan_path) {
545 self.each_applicable_move(path, |move_index| {
546 debug!("kill_moves add_kill {:?} kill_id={:?} move_index={}",
547 kill_kind, kill_id, move_index.get());
548 dfcx_moves.add_kill(kill_kind, kill_id, move_index.get());
555 impl<'tcx> FlowedMoveData<'tcx> {
557 move_data: MoveData<'tcx>,
558 bccx: &BorrowckCtxt<'_, 'tcx>,
561 ) -> FlowedMoveData<'tcx> {
565 DataFlowContext::new(tcx,
566 "flowed_move_data_moves",
569 MoveDataFlowOperator,
570 move_data.moves.borrow().len());
571 let mut dfcx_assign =
572 DataFlowContext::new(tcx,
573 "flowed_move_data_assigns",
576 AssignDataFlowOperator,
577 move_data.var_assignments.borrow().len());
579 move_data.add_gen_kills(bccx,
583 dfcx_moves.add_kills_from_flow_exits(cfg);
584 dfcx_assign.add_kills_from_flow_exits(cfg);
586 dfcx_moves.propagate(cfg, body);
587 dfcx_assign.propagate(cfg, body);
596 pub fn is_move_path(&self, id: hir::ItemLocalId, loan_path: &Rc<LoanPath<'tcx>>) -> bool {
597 //! Returns the kind of a move of `loan_path` by `id`, if one exists.
600 if let Some(loan_path_index) = self.move_data.path_map.borrow().get(&*loan_path) {
601 self.dfcx_moves.each_gen_bit(id, |move_index| {
602 let the_move = self.move_data.moves.borrow();
603 let the_move = (*the_move)[move_index];
604 if the_move.path == *loan_path_index {
615 /// Iterates through each move of `loan_path` (or some base path of `loan_path`) that *may*
616 /// have occurred on entry to `id` without an intervening assignment. In other words, any moves
617 /// that would invalidate a reference to `loan_path` at location `id`.
618 pub fn each_move_of<F>(&self,
619 id: hir::ItemLocalId,
620 loan_path: &Rc<LoanPath<'tcx>>,
623 F: FnMut(&Move, &LoanPath<'tcx>) -> bool,
627 // 1. Move of `a.b.c`, use of `a.b.c`
628 // 2. Move of `a.b.c`, use of `a.b.c.d`
629 // 3. Move of `a.b.c`, use of `a` or `a.b`
633 // 4. move of `a.b.c`, use of `a.b.d`
635 let base_indices = self.move_data.existing_base_paths(loan_path);
636 if base_indices.is_empty() {
640 let opt_loan_path_index = self.move_data.existing_move_path(loan_path);
644 self.dfcx_moves.each_bit_on_entry(id, |index| {
645 let the_move = self.move_data.moves.borrow();
646 let the_move = &(*the_move)[index];
647 let moved_path = the_move.path;
648 if base_indices.iter().any(|x| x == &moved_path) {
649 // Scenario 1 or 2: `loan_path` or some base path of
650 // `loan_path` was moved.
651 if !f(the_move, &self.move_data.path_loan_path(moved_path)) {
655 if let Some(loan_path_index) = opt_loan_path_index {
656 let cont = self.move_data.each_base_path(moved_path, |p| {
657 if p == loan_path_index {
658 // Scenario 3: some extension of `loan_path`
661 &self.move_data.path_loan_path(moved_path))
666 if !cont { ret = false; }
673 /// Iterates through every assignment to `loan_path` that may have occurred on entry to `id`.
674 /// `loan_path` must be a single variable.
675 pub fn each_assignment_of<F>(&self,
676 id: hir::ItemLocalId,
677 loan_path: &Rc<LoanPath<'tcx>>,
680 F: FnMut(&Assignment) -> bool,
682 let loan_path_index = {
683 match self.move_data.existing_move_path(loan_path) {
686 // if there were any assignments, it'd have an index
692 self.dfcx_assign.each_bit_on_entry(id, |index| {
693 let assignment = self.move_data.var_assignments.borrow();
694 let assignment = &(*assignment)[index];
695 if assignment.path == loan_path_index && !f(assignment) {
704 impl BitwiseOperator for MoveDataFlowOperator {
706 fn join(&self, succ: usize, pred: usize) -> usize {
707 succ | pred // moves from both preds are in scope
711 impl DataFlowOperator for MoveDataFlowOperator {
713 fn initial_value(&self) -> bool {
714 false // no loans in scope by default
718 impl BitwiseOperator for AssignDataFlowOperator {
720 fn join(&self, succ: usize, pred: usize) -> usize {
721 succ | pred // moves from both preds are in scope
725 impl DataFlowOperator for AssignDataFlowOperator {
727 fn initial_value(&self) -> bool {
728 false // no assignments in scope by default