1 //! Data structures used for tracking moves. Please see the extensive
2 //! comments in the section "Moves and initialization" in `README.md`.
6 use crate::dataflow::{DataFlowContext, BitwiseOperator, DataFlowOperator, KillFrom};
8 use crate::borrowck::*;
10 use rustc::ty::{self, TyCtxt};
11 use rustc::util::nodemap::FxHashMap;
13 use std::cell::RefCell;
21 pub struct MoveData<'tcx> {
22 /// Move paths. See section "Move paths" in `README.md`.
23 pub paths: RefCell<Vec<MovePath<'tcx>>>,
25 /// Cache of loan path to move path index, for easy lookup.
26 pub path_map: RefCell<FxHashMap<Rc<LoanPath<'tcx>>, MovePathIndex>>,
28 /// Each move or uninitialized variable gets an entry here.
29 pub moves: RefCell<Vec<Move>>,
31 /// Assignments to a variable, like `x = foo`. These are assigned
32 /// bits for dataflow, since we must track them to ensure that
33 /// immutable variables are assigned at most once along each path.
34 pub var_assignments: RefCell<Vec<Assignment>>,
36 /// Assignments to a path, like `x.f = foo`. These are not
37 /// assigned dataflow bits, but we track them because they still
39 pub path_assignments: RefCell<Vec<Assignment>>,
42 pub struct FlowedMoveData<'a, 'tcx: 'a> {
43 pub move_data: MoveData<'tcx>,
45 pub dfcx_moves: MoveDataFlow<'a, 'tcx>,
47 // We could (and maybe should, for efficiency) combine both move
48 // and assign data flow into one, but this way it's easier to
49 // distinguish the bits that correspond to moves and assignments.
50 pub dfcx_assign: AssignDataFlow<'a, 'tcx>
53 /// Index into `MoveData.paths`, used like a pointer
54 #[derive(Copy, PartialEq, Eq, PartialOrd, Ord, Debug)]
55 pub struct MovePathIndex(usize);
58 fn get(&self) -> usize {
59 let MovePathIndex(v) = *self; v
63 impl Clone for MovePathIndex {
64 fn clone(&self) -> MovePathIndex {
65 MovePathIndex(self.get())
69 #[allow(non_upper_case_globals)]
70 const InvalidMovePathIndex: MovePathIndex = MovePathIndex(usize::MAX);
72 /// Index into `MoveData.moves`, used like a pointer
73 #[derive(Copy, Clone, PartialEq)]
74 pub struct MoveIndex(usize);
77 fn get(&self) -> usize {
78 let MoveIndex(v) = *self; v
82 #[allow(non_upper_case_globals)]
83 const InvalidMoveIndex: MoveIndex = MoveIndex(usize::MAX);
85 pub struct MovePath<'tcx> {
86 /// Loan path corresponding to this move path
87 pub loan_path: Rc<LoanPath<'tcx>>,
89 /// Parent pointer, `InvalidMovePathIndex` if root
90 pub parent: MovePathIndex,
92 /// Head of linked list of moves to this path,
93 /// `InvalidMoveIndex` if not moved
94 pub first_move: MoveIndex,
96 /// First node in linked list of children, `InvalidMovePathIndex` if leaf
97 pub first_child: MovePathIndex,
99 /// Next node in linked list of parent's children (siblings),
100 /// `InvalidMovePathIndex` if none.
101 pub next_sibling: MovePathIndex,
104 #[derive(Copy, Clone, PartialEq, Debug)]
106 Declared, // When declared, variables start out "moved".
107 MoveExpr, // Expression or binding that moves a variable
108 MovePat, // By-move binding
109 Captured // Closure creation that moves a value
112 #[derive(Copy, Clone)]
114 /// Path being moved.
115 pub path: MovePathIndex,
117 /// id of node that is doing the move.
118 pub id: hir::ItemLocalId,
120 /// Kind of move, for error messages.
123 /// Next node in linked list of moves from `path`, or `InvalidMoveIndex`
124 pub next_move: MoveIndex
127 #[derive(Copy, Clone)]
128 pub struct Assignment {
129 /// Path being assigned.
130 pub path: MovePathIndex,
132 /// id where assignment occurs
133 pub id: hir::ItemLocalId,
135 /// span of node where assignment occurs
139 #[derive(Clone, Copy)]
140 pub struct MoveDataFlowOperator;
142 pub type MoveDataFlow<'a, 'tcx> = DataFlowContext<'a, 'tcx, MoveDataFlowOperator>;
144 #[derive(Clone, Copy)]
145 pub struct AssignDataFlowOperator;
147 pub type AssignDataFlow<'a, 'tcx> = DataFlowContext<'a, 'tcx, AssignDataFlowOperator>;
149 fn loan_path_is_precise(loan_path: &LoanPath<'_>) -> bool {
150 match loan_path.kind {
151 LpVar(_) | LpUpvar(_) => {
154 LpExtend(.., LpInterior(_, InteriorKind::InteriorElement)) => {
155 // Paths involving element accesses a[i] do not refer to a unique
156 // location, as there is no accurate tracking of the indices.
158 // (Paths involving element accesses via slice pattern bindings
159 // can in principle be tracked precisely, but that is future
160 // work. For now, continue claiming that they are imprecise.)
163 LpDowncast(ref lp_base, _) |
164 LpExtend(ref lp_base, ..) => {
165 loan_path_is_precise(&lp_base)
170 impl<'a, 'tcx> MoveData<'tcx> {
171 /// return true if there are no trackable assignments or moves
172 /// in this move data - that means that there is nothing that
173 /// could cause a borrow error.
174 pub fn is_empty(&self) -> bool {
175 self.moves.borrow().is_empty() &&
176 self.path_assignments.borrow().is_empty() &&
177 self.var_assignments.borrow().is_empty()
180 pub fn path_loan_path(&self, index: MovePathIndex) -> Rc<LoanPath<'tcx>> {
181 (*self.paths.borrow())[index.get()].loan_path.clone()
184 fn path_parent(&self, index: MovePathIndex) -> MovePathIndex {
185 (*self.paths.borrow())[index.get()].parent
188 fn path_first_move(&self, index: MovePathIndex) -> MoveIndex {
189 (*self.paths.borrow())[index.get()].first_move
192 /// Returns the index of first child, or `InvalidMovePathIndex` if
194 fn path_first_child(&self, index: MovePathIndex) -> MovePathIndex {
195 (*self.paths.borrow())[index.get()].first_child
198 fn path_next_sibling(&self, index: MovePathIndex) -> MovePathIndex {
199 (*self.paths.borrow())[index.get()].next_sibling
202 fn set_path_first_move(&self,
203 index: MovePathIndex,
204 first_move: MoveIndex) {
205 (*self.paths.borrow_mut())[index.get()].first_move = first_move
208 fn set_path_first_child(&self,
209 index: MovePathIndex,
210 first_child: MovePathIndex) {
211 (*self.paths.borrow_mut())[index.get()].first_child = first_child
214 fn move_next_move(&self, index: MoveIndex) -> MoveIndex {
215 //! Type safe indexing operator
216 (*self.moves.borrow())[index.get()].next_move
219 fn is_var_path(&self, index: MovePathIndex) -> bool {
220 //! True if `index` refers to a variable
221 self.path_parent(index) == InvalidMovePathIndex
224 /// Returns the existing move path index for `lp`, if any, and otherwise adds a new index for
225 /// `lp` and any of its base paths that do not yet have an index.
226 pub fn move_path(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
227 lp: Rc<LoanPath<'tcx>>) -> MovePathIndex {
228 if let Some(&index) = self.path_map.borrow().get(&lp) {
232 let index = match lp.kind {
233 LpVar(..) | LpUpvar(..) => {
234 let index = MovePathIndex(self.paths.borrow().len());
236 self.paths.borrow_mut().push(MovePath {
237 loan_path: lp.clone(),
238 parent: InvalidMovePathIndex,
239 first_move: InvalidMoveIndex,
240 first_child: InvalidMovePathIndex,
241 next_sibling: InvalidMovePathIndex,
247 LpDowncast(ref base, _) |
248 LpExtend(ref base, ..) => {
249 let parent_index = self.move_path(tcx, base.clone());
251 let index = MovePathIndex(self.paths.borrow().len());
253 let next_sibling = self.path_first_child(parent_index);
254 self.set_path_first_child(parent_index, index);
256 self.paths.borrow_mut().push(MovePath {
257 loan_path: lp.clone(),
258 parent: parent_index,
259 first_move: InvalidMoveIndex,
260 first_child: InvalidMovePathIndex,
268 debug!("move_path(lp={:?}, index={:?})",
272 assert_eq!(index.get(), self.paths.borrow().len() - 1);
273 self.path_map.borrow_mut().insert(lp, index);
277 fn existing_move_path(&self, lp: &Rc<LoanPath<'tcx>>)
278 -> Option<MovePathIndex> {
279 self.path_map.borrow().get(lp).cloned()
282 fn existing_base_paths(&self, lp: &Rc<LoanPath<'tcx>>)
283 -> Vec<MovePathIndex> {
284 let mut result = vec![];
285 self.add_existing_base_paths(lp, &mut result);
289 /// Adds any existing move path indices for `lp` and any base paths of `lp` to `result`, but
290 /// does not add new move paths
291 fn add_existing_base_paths(&self, lp: &Rc<LoanPath<'tcx>>,
292 result: &mut Vec<MovePathIndex>) {
293 match self.path_map.borrow().get(lp).cloned() {
295 self.each_base_path(index, |p| {
302 LpVar(..) | LpUpvar(..) => { }
303 LpDowncast(ref b, _) |
304 LpExtend(ref b, ..) => {
305 self.add_existing_base_paths(b, result);
313 /// Adds a new move entry for a move of `lp` that occurs at location `id` with kind `kind`.
314 pub fn add_move(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
315 orig_lp: Rc<LoanPath<'tcx>>,
316 id: hir::ItemLocalId,
318 // Moving one union field automatically moves all its fields. Also move siblings of
319 // all parent union fields, moves do not propagate upwards automatically.
320 let mut lp = orig_lp.clone();
321 while let LpExtend(ref base_lp, mutbl, lp_elem) = lp.clone().kind {
322 if let (&ty::Adt(adt_def, _), LpInterior(opt_variant_id, interior))
323 = (&base_lp.ty.sty, lp_elem) {
324 if adt_def.is_union() {
325 for (i, field) in adt_def.non_enum_variant().fields.iter().enumerate() {
327 InteriorKind::InteriorField(mc::FieldIndex(i, field.ident.name));
328 if field != interior {
329 let sibling_lp_kind =
330 LpExtend(base_lp.clone(), mutbl, LpInterior(opt_variant_id, field));
331 let sibling_lp = Rc::new(LoanPath::new(sibling_lp_kind, tcx.types.err));
332 self.add_move_helper(tcx, sibling_lp, id, kind);
337 lp = base_lp.clone();
340 self.add_move_helper(tcx, orig_lp, id, kind);
343 fn add_move_helper(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
344 lp: Rc<LoanPath<'tcx>>,
345 id: hir::ItemLocalId,
347 debug!("add_move(lp={:?}, id={:?}, kind={:?})",
352 let path_index = self.move_path(tcx, lp);
353 let move_index = MoveIndex(self.moves.borrow().len());
355 let next_move = self.path_first_move(path_index);
356 self.set_path_first_move(path_index, move_index);
358 self.moves.borrow_mut().push(Move {
366 /// Adds a new record for an assignment to `lp` that occurs at location `id` with the given
368 pub fn add_assignment(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
369 lp: Rc<LoanPath<'tcx>>,
370 assign_id: hir::ItemLocalId,
372 // Assigning to one union field automatically assigns to all its fields.
373 if let LpExtend(ref base_lp, mutbl, LpInterior(opt_variant_id, interior)) = lp.kind {
374 if let ty::Adt(adt_def, _) = base_lp.ty.sty {
375 if adt_def.is_union() {
376 for (i, field) in adt_def.non_enum_variant().fields.iter().enumerate() {
378 InteriorKind::InteriorField(mc::FieldIndex(i, field.ident.name));
379 let field_ty = if field == interior {
382 tcx.types.err // Doesn't matter
384 let sibling_lp_kind = LpExtend(base_lp.clone(), mutbl,
385 LpInterior(opt_variant_id, field));
386 let sibling_lp = Rc::new(LoanPath::new(sibling_lp_kind, field_ty));
387 self.add_assignment_helper(tcx, sibling_lp, assign_id,
395 self.add_assignment_helper(tcx, lp, assign_id, span);
398 fn add_assignment_helper(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
399 lp: Rc<LoanPath<'tcx>>,
400 assign_id: hir::ItemLocalId,
402 debug!("add_assignment(lp={:?}, assign_id={:?}", lp, assign_id);
404 let path_index = self.move_path(tcx, lp.clone());
406 let assignment = Assignment {
412 if self.is_var_path(path_index) {
413 debug!("add_assignment[var](lp={:?}, assignment={}, path_index={:?})",
414 lp, self.var_assignments.borrow().len(), path_index);
416 self.var_assignments.borrow_mut().push(assignment);
418 debug!("add_assignment[path](lp={:?}, path_index={:?})",
421 self.path_assignments.borrow_mut().push(assignment);
425 /// Adds the gen/kills for the various moves and
426 /// assignments into the provided data flow contexts.
427 /// Moves are generated by moves and killed by assignments and
428 /// scoping. Assignments are generated by assignment to variables and
429 /// killed by scoping. See `README.md` for more details.
430 fn add_gen_kills(&self,
431 bccx: &BorrowckCtxt<'a, 'tcx>,
432 dfcx_moves: &mut MoveDataFlow<'_, '_>,
433 dfcx_assign: &mut AssignDataFlow<'_, '_>) {
434 for (i, the_move) in self.moves.borrow().iter().enumerate() {
435 dfcx_moves.add_gen(the_move.id, i);
438 for (i, assignment) in self.var_assignments.borrow().iter().enumerate() {
439 dfcx_assign.add_gen(assignment.id, i);
440 self.kill_moves(assignment.path, assignment.id,
441 KillFrom::Execution, dfcx_moves);
444 for assignment in self.path_assignments.borrow().iter() {
445 self.kill_moves(assignment.path, assignment.id,
446 KillFrom::Execution, dfcx_moves);
449 // Kill all moves related to a variable `x` when
450 // it goes out of scope:
451 for path in self.paths.borrow().iter() {
452 match path.loan_path.kind {
453 LpVar(..) | LpUpvar(..) | LpDowncast(..) => {
454 let kill_scope = path.loan_path.kill_scope(bccx);
455 let path = *self.path_map.borrow().get(&path.loan_path).unwrap();
456 self.kill_moves(path, kill_scope.item_local_id(),
457 KillFrom::ScopeEnd, dfcx_moves);
463 // Kill all assignments when the variable goes out of scope:
464 for (assignment_index, assignment) in
465 self.var_assignments.borrow().iter().enumerate() {
466 let lp = self.path_loan_path(assignment.path);
468 LpVar(..) | LpUpvar(..) | LpDowncast(..) => {
469 let kill_scope = lp.kill_scope(bccx);
470 dfcx_assign.add_kill(KillFrom::ScopeEnd,
471 kill_scope.item_local_id(),
475 bug!("var assignment for non var path");
481 fn each_base_path<F>(&self, index: MovePathIndex, mut f: F) -> bool where
482 F: FnMut(MovePathIndex) -> bool,
485 while p != InvalidMovePathIndex {
489 p = self.path_parent(p);
494 // FIXME(#19596) This is a workaround, but there should be better way to do this
495 fn each_extending_path_<F>(&self, index: MovePathIndex, f: &mut F) -> bool where
496 F: FnMut(MovePathIndex) -> bool,
502 let mut p = self.path_first_child(index);
503 while p != InvalidMovePathIndex {
504 if !self.each_extending_path_(p, f) {
507 p = self.path_next_sibling(p);
513 fn each_extending_path<F>(&self, index: MovePathIndex, mut f: F) -> bool where
514 F: FnMut(MovePathIndex) -> bool,
516 self.each_extending_path_(index, &mut f)
519 fn each_applicable_move<F>(&self, index0: MovePathIndex, mut f: F) -> bool where
520 F: FnMut(MoveIndex) -> bool,
523 self.each_extending_path(index0, |index| {
524 let mut p = self.path_first_move(index);
525 while p != InvalidMoveIndex {
530 p = self.move_next_move(p);
539 kill_id: hir::ItemLocalId,
541 dfcx_moves: &mut MoveDataFlow<'_, '_>) {
542 // We can only perform kills for paths that refer to a unique location,
543 // since otherwise we may kill a move from one location with an
544 // assignment referring to another location.
546 let loan_path = self.path_loan_path(path);
547 if loan_path_is_precise(&loan_path) {
548 self.each_applicable_move(path, |move_index| {
549 debug!("kill_moves add_kill {:?} kill_id={:?} move_index={}",
550 kill_kind, kill_id, move_index.get());
551 dfcx_moves.add_kill(kill_kind, kill_id, move_index.get());
558 impl<'a, 'tcx> FlowedMoveData<'a, 'tcx> {
559 pub fn new(move_data: MoveData<'tcx>,
560 bccx: &BorrowckCtxt<'a, 'tcx>,
563 -> FlowedMoveData<'a, 'tcx> {
567 DataFlowContext::new(tcx,
568 "flowed_move_data_moves",
571 MoveDataFlowOperator,
572 move_data.moves.borrow().len());
573 let mut dfcx_assign =
574 DataFlowContext::new(tcx,
575 "flowed_move_data_assigns",
578 AssignDataFlowOperator,
579 move_data.var_assignments.borrow().len());
581 move_data.add_gen_kills(bccx,
585 dfcx_moves.add_kills_from_flow_exits(cfg);
586 dfcx_assign.add_kills_from_flow_exits(cfg);
588 dfcx_moves.propagate(cfg, body);
589 dfcx_assign.propagate(cfg, body);
598 pub fn kind_of_move_of_path(&self,
599 id: hir::ItemLocalId,
600 loan_path: &Rc<LoanPath<'tcx>>)
601 -> Option<MoveKind> {
602 //! Returns the kind of a move of `loan_path` by `id`, if one exists.
605 if let Some(loan_path_index) = self.move_data.path_map.borrow().get(&*loan_path) {
606 self.dfcx_moves.each_gen_bit(id, |move_index| {
607 let the_move = self.move_data.moves.borrow();
608 let the_move = (*the_move)[move_index];
609 if the_move.path == *loan_path_index {
610 ret = Some(the_move.kind);
620 /// Iterates through each move of `loan_path` (or some base path of `loan_path`) that *may*
621 /// have occurred on entry to `id` without an intervening assignment. In other words, any moves
622 /// that would invalidate a reference to `loan_path` at location `id`.
623 pub fn each_move_of<F>(&self,
624 id: hir::ItemLocalId,
625 loan_path: &Rc<LoanPath<'tcx>>,
628 F: FnMut(&Move, &LoanPath<'tcx>) -> bool,
632 // 1. Move of `a.b.c`, use of `a.b.c`
633 // 2. Move of `a.b.c`, use of `a.b.c.d`
634 // 3. Move of `a.b.c`, use of `a` or `a.b`
638 // 4. move of `a.b.c`, use of `a.b.d`
640 let base_indices = self.move_data.existing_base_paths(loan_path);
641 if base_indices.is_empty() {
645 let opt_loan_path_index = self.move_data.existing_move_path(loan_path);
649 self.dfcx_moves.each_bit_on_entry(id, |index| {
650 let the_move = self.move_data.moves.borrow();
651 let the_move = &(*the_move)[index];
652 let moved_path = the_move.path;
653 if base_indices.iter().any(|x| x == &moved_path) {
654 // Scenario 1 or 2: `loan_path` or some base path of
655 // `loan_path` was moved.
656 if !f(the_move, &self.move_data.path_loan_path(moved_path)) {
660 if let Some(loan_path_index) = opt_loan_path_index {
661 let cont = self.move_data.each_base_path(moved_path, |p| {
662 if p == loan_path_index {
663 // Scenario 3: some extension of `loan_path`
666 &self.move_data.path_loan_path(moved_path))
671 if !cont { ret = false; }
678 /// Iterates through every assignment to `loan_path` that may have occurred on entry to `id`.
679 /// `loan_path` must be a single variable.
680 pub fn each_assignment_of<F>(&self,
681 id: hir::ItemLocalId,
682 loan_path: &Rc<LoanPath<'tcx>>,
685 F: FnMut(&Assignment) -> bool,
687 let loan_path_index = {
688 match self.move_data.existing_move_path(loan_path) {
691 // if there were any assignments, it'd have an index
697 self.dfcx_assign.each_bit_on_entry(id, |index| {
698 let assignment = self.move_data.var_assignments.borrow();
699 let assignment = &(*assignment)[index];
700 if assignment.path == loan_path_index && !f(assignment) {
709 impl BitwiseOperator for MoveDataFlowOperator {
711 fn join(&self, succ: usize, pred: usize) -> usize {
712 succ | pred // moves from both preds are in scope
716 impl DataFlowOperator for MoveDataFlowOperator {
718 fn initial_value(&self) -> bool {
719 false // no loans in scope by default
723 impl BitwiseOperator for AssignDataFlowOperator {
725 fn join(&self, succ: usize, pred: usize) -> usize {
726 succ | pred // moves from both preds are in scope
730 impl DataFlowOperator for AssignDataFlowOperator {
732 fn initial_value(&self) -> bool {
733 false // no assignments in scope by default