1 //! Data structures used for tracking moves. Please see the extensive
2 //! comments in the section "Moves and initialization" in `README.md`.
4 pub use self::MoveKind::*;
6 use dataflow::{DataFlowContext, BitwiseOperator, DataFlowOperator, KillFrom};
10 use rustc::ty::{self, TyCtxt};
11 use rustc::util::nodemap::FxHashMap;
13 use std::cell::RefCell;
20 pub struct MoveData<'tcx> {
21 /// Move paths. See section "Move paths" in `README.md`.
22 pub paths: RefCell<Vec<MovePath<'tcx>>>,
24 /// Cache of loan path to move path index, for easy lookup.
25 pub path_map: RefCell<FxHashMap<Rc<LoanPath<'tcx>>, MovePathIndex>>,
27 /// Each move or uninitialized variable gets an entry here.
28 pub moves: RefCell<Vec<Move>>,
30 /// Assignments to a variable, like `x = foo`. These are assigned
31 /// bits for dataflow, since we must track them to ensure that
32 /// immutable variables are assigned at most once along each path.
33 pub var_assignments: RefCell<Vec<Assignment>>,
35 /// Assignments to a path, like `x.f = foo`. These are not
36 /// assigned dataflow bits, but we track them because they still
38 pub path_assignments: RefCell<Vec<Assignment>>,
41 pub struct FlowedMoveData<'a, 'tcx: 'a> {
42 pub move_data: MoveData<'tcx>,
44 pub dfcx_moves: MoveDataFlow<'a, 'tcx>,
46 // We could (and maybe should, for efficiency) combine both move
47 // and assign data flow into one, but this way it's easier to
48 // distinguish the bits that correspond to moves and assignments.
49 pub dfcx_assign: AssignDataFlow<'a, 'tcx>
52 /// Index into `MoveData.paths`, used like a pointer
53 #[derive(Copy, PartialEq, Eq, PartialOrd, Ord, Debug)]
54 pub struct MovePathIndex(usize);
57 fn get(&self) -> usize {
58 let MovePathIndex(v) = *self; v
62 impl Clone for MovePathIndex {
63 fn clone(&self) -> MovePathIndex {
64 MovePathIndex(self.get())
68 #[allow(non_upper_case_globals)]
69 const InvalidMovePathIndex: MovePathIndex = MovePathIndex(usize::MAX);
71 /// Index into `MoveData.moves`, used like a pointer
72 #[derive(Copy, Clone, PartialEq)]
73 pub struct MoveIndex(usize);
76 fn get(&self) -> usize {
77 let MoveIndex(v) = *self; v
81 #[allow(non_upper_case_globals)]
82 const InvalidMoveIndex: MoveIndex = MoveIndex(usize::MAX);
84 pub struct MovePath<'tcx> {
85 /// Loan path corresponding to this move path
86 pub loan_path: Rc<LoanPath<'tcx>>,
88 /// Parent pointer, `InvalidMovePathIndex` if root
89 pub parent: MovePathIndex,
91 /// Head of linked list of moves to this path,
92 /// `InvalidMoveIndex` if not moved
93 pub first_move: MoveIndex,
95 /// First node in linked list of children, `InvalidMovePathIndex` if leaf
96 pub first_child: MovePathIndex,
98 /// Next node in linked list of parent's children (siblings),
99 /// `InvalidMovePathIndex` if none.
100 pub next_sibling: MovePathIndex,
103 #[derive(Copy, Clone, PartialEq, Debug)]
105 Declared, // When declared, variables start out "moved".
106 MoveExpr, // Expression or binding that moves a variable
107 MovePat, // By-move binding
108 Captured // Closure creation that moves a value
111 #[derive(Copy, Clone)]
113 /// Path being moved.
114 pub path: MovePathIndex,
116 /// id of node that is doing the move.
117 pub id: hir::ItemLocalId,
119 /// Kind of move, for error messages.
122 /// Next node in linked list of moves from `path`, or `InvalidMoveIndex`
123 pub next_move: MoveIndex
126 #[derive(Copy, Clone)]
127 pub struct Assignment {
128 /// Path being assigned.
129 pub path: MovePathIndex,
131 /// id where assignment occurs
132 pub id: hir::ItemLocalId,
134 /// span of node where assignment occurs
138 #[derive(Clone, Copy)]
139 pub struct MoveDataFlowOperator;
141 pub type MoveDataFlow<'a, 'tcx> = DataFlowContext<'a, 'tcx, MoveDataFlowOperator>;
143 #[derive(Clone, Copy)]
144 pub struct AssignDataFlowOperator;
146 pub type AssignDataFlow<'a, 'tcx> = DataFlowContext<'a, 'tcx, AssignDataFlowOperator>;
148 fn loan_path_is_precise(loan_path: &LoanPath) -> bool {
149 match loan_path.kind {
150 LpVar(_) | LpUpvar(_) => {
153 LpExtend(.., LpInterior(_, InteriorKind::InteriorElement)) => {
154 // Paths involving element accesses a[i] do not refer to a unique
155 // location, as there is no accurate tracking of the indices.
157 // (Paths involving element accesses via slice pattern bindings
158 // can in principle be tracked precisely, but that is future
159 // work. For now, continue claiming that they are imprecise.)
162 LpDowncast(ref lp_base, _) |
163 LpExtend(ref lp_base, ..) => {
164 loan_path_is_precise(&lp_base)
169 impl<'a, 'tcx> MoveData<'tcx> {
170 /// return true if there are no trackable assignments or moves
171 /// in this move data - that means that there is nothing that
172 /// could cause a borrow error.
173 pub fn is_empty(&self) -> bool {
174 self.moves.borrow().is_empty() &&
175 self.path_assignments.borrow().is_empty() &&
176 self.var_assignments.borrow().is_empty()
179 pub fn path_loan_path(&self, index: MovePathIndex) -> Rc<LoanPath<'tcx>> {
180 (*self.paths.borrow())[index.get()].loan_path.clone()
183 fn path_parent(&self, index: MovePathIndex) -> MovePathIndex {
184 (*self.paths.borrow())[index.get()].parent
187 fn path_first_move(&self, index: MovePathIndex) -> MoveIndex {
188 (*self.paths.borrow())[index.get()].first_move
191 /// Returns the index of first child, or `InvalidMovePathIndex` if
193 fn path_first_child(&self, index: MovePathIndex) -> MovePathIndex {
194 (*self.paths.borrow())[index.get()].first_child
197 fn path_next_sibling(&self, index: MovePathIndex) -> MovePathIndex {
198 (*self.paths.borrow())[index.get()].next_sibling
201 fn set_path_first_move(&self,
202 index: MovePathIndex,
203 first_move: MoveIndex) {
204 (*self.paths.borrow_mut())[index.get()].first_move = first_move
207 fn set_path_first_child(&self,
208 index: MovePathIndex,
209 first_child: MovePathIndex) {
210 (*self.paths.borrow_mut())[index.get()].first_child = first_child
213 fn move_next_move(&self, index: MoveIndex) -> MoveIndex {
214 //! Type safe indexing operator
215 (*self.moves.borrow())[index.get()].next_move
218 fn is_var_path(&self, index: MovePathIndex) -> bool {
219 //! True if `index` refers to a variable
220 self.path_parent(index) == InvalidMovePathIndex
223 /// Returns the existing move path index for `lp`, if any, and otherwise adds a new index for
224 /// `lp` and any of its base paths that do not yet have an index.
225 pub fn move_path(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
226 lp: Rc<LoanPath<'tcx>>) -> MovePathIndex {
227 if let Some(&index) = self.path_map.borrow().get(&lp) {
231 let index = match lp.kind {
232 LpVar(..) | LpUpvar(..) => {
233 let index = MovePathIndex(self.paths.borrow().len());
235 self.paths.borrow_mut().push(MovePath {
236 loan_path: lp.clone(),
237 parent: InvalidMovePathIndex,
238 first_move: InvalidMoveIndex,
239 first_child: InvalidMovePathIndex,
240 next_sibling: InvalidMovePathIndex,
246 LpDowncast(ref base, _) |
247 LpExtend(ref base, ..) => {
248 let parent_index = self.move_path(tcx, base.clone());
250 let index = MovePathIndex(self.paths.borrow().len());
252 let next_sibling = self.path_first_child(parent_index);
253 self.set_path_first_child(parent_index, index);
255 self.paths.borrow_mut().push(MovePath {
256 loan_path: lp.clone(),
257 parent: parent_index,
258 first_move: InvalidMoveIndex,
259 first_child: InvalidMovePathIndex,
267 debug!("move_path(lp={:?}, index={:?})",
271 assert_eq!(index.get(), self.paths.borrow().len() - 1);
272 self.path_map.borrow_mut().insert(lp, index);
276 fn existing_move_path(&self, lp: &Rc<LoanPath<'tcx>>)
277 -> Option<MovePathIndex> {
278 self.path_map.borrow().get(lp).cloned()
281 fn existing_base_paths(&self, lp: &Rc<LoanPath<'tcx>>)
282 -> Vec<MovePathIndex> {
283 let mut result = vec![];
284 self.add_existing_base_paths(lp, &mut result);
288 /// Adds any existing move path indices for `lp` and any base paths of `lp` to `result`, but
289 /// does not add new move paths
290 fn add_existing_base_paths(&self, lp: &Rc<LoanPath<'tcx>>,
291 result: &mut Vec<MovePathIndex>) {
292 match self.path_map.borrow().get(lp).cloned() {
294 self.each_base_path(index, |p| {
301 LpVar(..) | LpUpvar(..) => { }
302 LpDowncast(ref b, _) |
303 LpExtend(ref b, ..) => {
304 self.add_existing_base_paths(b, result);
312 /// Adds a new move entry for a move of `lp` that occurs at location `id` with kind `kind`.
313 pub fn add_move(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
314 orig_lp: Rc<LoanPath<'tcx>>,
315 id: hir::ItemLocalId,
317 // Moving one union field automatically moves all its fields. Also move siblings of
318 // all parent union fields, moves do not propagate upwards automatically.
319 let mut lp = orig_lp.clone();
320 while let LpExtend(ref base_lp, mutbl, lp_elem) = lp.clone().kind {
321 if let (&ty::Adt(adt_def, _), LpInterior(opt_variant_id, interior))
322 = (&base_lp.ty.sty, lp_elem) {
323 if adt_def.is_union() {
324 for (i, field) in adt_def.non_enum_variant().fields.iter().enumerate() {
326 InteriorKind::InteriorField(mc::FieldIndex(i, field.ident.name));
327 if field != interior {
328 let sibling_lp_kind =
329 LpExtend(base_lp.clone(), mutbl, LpInterior(opt_variant_id, field));
330 let sibling_lp = Rc::new(LoanPath::new(sibling_lp_kind, tcx.types.err));
331 self.add_move_helper(tcx, sibling_lp, id, kind);
336 lp = base_lp.clone();
339 self.add_move_helper(tcx, orig_lp, id, kind);
342 fn add_move_helper(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
343 lp: Rc<LoanPath<'tcx>>,
344 id: hir::ItemLocalId,
346 debug!("add_move(lp={:?}, id={:?}, kind={:?})",
351 let path_index = self.move_path(tcx, lp);
352 let move_index = MoveIndex(self.moves.borrow().len());
354 let next_move = self.path_first_move(path_index);
355 self.set_path_first_move(path_index, move_index);
357 self.moves.borrow_mut().push(Move {
365 /// Adds a new record for an assignment to `lp` that occurs at location `id` with the given
367 pub fn add_assignment(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
368 lp: Rc<LoanPath<'tcx>>,
369 assign_id: hir::ItemLocalId,
371 // Assigning to one union field automatically assigns to all its fields.
372 if let LpExtend(ref base_lp, mutbl, LpInterior(opt_variant_id, interior)) = lp.kind {
373 if let ty::Adt(adt_def, _) = base_lp.ty.sty {
374 if adt_def.is_union() {
375 for (i, field) in adt_def.non_enum_variant().fields.iter().enumerate() {
377 InteriorKind::InteriorField(mc::FieldIndex(i, field.ident.name));
378 let field_ty = if field == interior {
381 tcx.types.err // Doesn't matter
383 let sibling_lp_kind = LpExtend(base_lp.clone(), mutbl,
384 LpInterior(opt_variant_id, field));
385 let sibling_lp = Rc::new(LoanPath::new(sibling_lp_kind, field_ty));
386 self.add_assignment_helper(tcx, sibling_lp, assign_id,
394 self.add_assignment_helper(tcx, lp, assign_id, span);
397 fn add_assignment_helper(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
398 lp: Rc<LoanPath<'tcx>>,
399 assign_id: hir::ItemLocalId,
401 debug!("add_assignment(lp={:?}, assign_id={:?}", lp, assign_id);
403 let path_index = self.move_path(tcx, lp.clone());
405 let assignment = Assignment {
411 if self.is_var_path(path_index) {
412 debug!("add_assignment[var](lp={:?}, assignment={}, path_index={:?})",
413 lp, self.var_assignments.borrow().len(), path_index);
415 self.var_assignments.borrow_mut().push(assignment);
417 debug!("add_assignment[path](lp={:?}, path_index={:?})",
420 self.path_assignments.borrow_mut().push(assignment);
424 /// Adds the gen/kills for the various moves and
425 /// assignments into the provided data flow contexts.
426 /// Moves are generated by moves and killed by assignments and
427 /// scoping. Assignments are generated by assignment to variables and
428 /// killed by scoping. See `README.md` for more details.
429 fn add_gen_kills(&self,
430 bccx: &BorrowckCtxt<'a, 'tcx>,
431 dfcx_moves: &mut MoveDataFlow,
432 dfcx_assign: &mut AssignDataFlow) {
433 for (i, the_move) in self.moves.borrow().iter().enumerate() {
434 dfcx_moves.add_gen(the_move.id, i);
437 for (i, assignment) in self.var_assignments.borrow().iter().enumerate() {
438 dfcx_assign.add_gen(assignment.id, i);
439 self.kill_moves(assignment.path, assignment.id,
440 KillFrom::Execution, dfcx_moves);
443 for assignment in self.path_assignments.borrow().iter() {
444 self.kill_moves(assignment.path, assignment.id,
445 KillFrom::Execution, dfcx_moves);
448 // Kill all moves related to a variable `x` when
449 // it goes out of scope:
450 for path in self.paths.borrow().iter() {
451 match path.loan_path.kind {
452 LpVar(..) | LpUpvar(..) | LpDowncast(..) => {
453 let kill_scope = path.loan_path.kill_scope(bccx);
454 let path = *self.path_map.borrow().get(&path.loan_path).unwrap();
455 self.kill_moves(path, kill_scope.item_local_id(),
456 KillFrom::ScopeEnd, dfcx_moves);
462 // Kill all assignments when the variable goes out of scope:
463 for (assignment_index, assignment) in
464 self.var_assignments.borrow().iter().enumerate() {
465 let lp = self.path_loan_path(assignment.path);
467 LpVar(..) | LpUpvar(..) | LpDowncast(..) => {
468 let kill_scope = lp.kill_scope(bccx);
469 dfcx_assign.add_kill(KillFrom::ScopeEnd,
470 kill_scope.item_local_id(),
474 bug!("var assignment for non var path");
480 fn each_base_path<F>(&self, index: MovePathIndex, mut f: F) -> bool where
481 F: FnMut(MovePathIndex) -> bool,
484 while p != InvalidMovePathIndex {
488 p = self.path_parent(p);
493 // FIXME(#19596) This is a workaround, but there should be better way to do this
494 fn each_extending_path_<F>(&self, index: MovePathIndex, f: &mut F) -> bool where
495 F: FnMut(MovePathIndex) -> bool,
501 let mut p = self.path_first_child(index);
502 while p != InvalidMovePathIndex {
503 if !self.each_extending_path_(p, f) {
506 p = self.path_next_sibling(p);
512 fn each_extending_path<F>(&self, index: MovePathIndex, mut f: F) -> bool where
513 F: FnMut(MovePathIndex) -> bool,
515 self.each_extending_path_(index, &mut f)
518 fn each_applicable_move<F>(&self, index0: MovePathIndex, mut f: F) -> bool where
519 F: FnMut(MoveIndex) -> bool,
522 self.each_extending_path(index0, |index| {
523 let mut p = self.path_first_move(index);
524 while p != InvalidMoveIndex {
529 p = self.move_next_move(p);
538 kill_id: hir::ItemLocalId,
540 dfcx_moves: &mut MoveDataFlow) {
541 // We can only perform kills for paths that refer to a unique location,
542 // since otherwise we may kill a move from one location with an
543 // assignment referring to another location.
545 let loan_path = self.path_loan_path(path);
546 if loan_path_is_precise(&loan_path) {
547 self.each_applicable_move(path, |move_index| {
548 debug!("kill_moves add_kill {:?} kill_id={:?} move_index={}",
549 kill_kind, kill_id, move_index.get());
550 dfcx_moves.add_kill(kill_kind, kill_id, move_index.get());
557 impl<'a, 'tcx> FlowedMoveData<'a, 'tcx> {
558 pub fn new(move_data: MoveData<'tcx>,
559 bccx: &BorrowckCtxt<'a, 'tcx>,
562 -> FlowedMoveData<'a, 'tcx> {
566 DataFlowContext::new(tcx,
567 "flowed_move_data_moves",
570 MoveDataFlowOperator,
571 move_data.moves.borrow().len());
572 let mut dfcx_assign =
573 DataFlowContext::new(tcx,
574 "flowed_move_data_assigns",
577 AssignDataFlowOperator,
578 move_data.var_assignments.borrow().len());
580 move_data.add_gen_kills(bccx,
584 dfcx_moves.add_kills_from_flow_exits(cfg);
585 dfcx_assign.add_kills_from_flow_exits(cfg);
587 dfcx_moves.propagate(cfg, body);
588 dfcx_assign.propagate(cfg, body);
597 pub fn kind_of_move_of_path(&self,
598 id: hir::ItemLocalId,
599 loan_path: &Rc<LoanPath<'tcx>>)
600 -> Option<MoveKind> {
601 //! Returns the kind of a move of `loan_path` by `id`, if one exists.
604 if let Some(loan_path_index) = self.move_data.path_map.borrow().get(&*loan_path) {
605 self.dfcx_moves.each_gen_bit(id, |move_index| {
606 let the_move = self.move_data.moves.borrow();
607 let the_move = (*the_move)[move_index];
608 if the_move.path == *loan_path_index {
609 ret = Some(the_move.kind);
619 /// Iterates through each move of `loan_path` (or some base path of `loan_path`) that *may*
620 /// have occurred on entry to `id` without an intervening assignment. In other words, any moves
621 /// that would invalidate a reference to `loan_path` at location `id`.
622 pub fn each_move_of<F>(&self,
623 id: hir::ItemLocalId,
624 loan_path: &Rc<LoanPath<'tcx>>,
627 F: FnMut(&Move, &LoanPath<'tcx>) -> bool,
631 // 1. Move of `a.b.c`, use of `a.b.c`
632 // 2. Move of `a.b.c`, use of `a.b.c.d`
633 // 3. Move of `a.b.c`, use of `a` or `a.b`
637 // 4. move of `a.b.c`, use of `a.b.d`
639 let base_indices = self.move_data.existing_base_paths(loan_path);
640 if base_indices.is_empty() {
644 let opt_loan_path_index = self.move_data.existing_move_path(loan_path);
648 self.dfcx_moves.each_bit_on_entry(id, |index| {
649 let the_move = self.move_data.moves.borrow();
650 let the_move = &(*the_move)[index];
651 let moved_path = the_move.path;
652 if base_indices.iter().any(|x| x == &moved_path) {
653 // Scenario 1 or 2: `loan_path` or some base path of
654 // `loan_path` was moved.
655 if !f(the_move, &self.move_data.path_loan_path(moved_path)) {
659 if let Some(loan_path_index) = opt_loan_path_index {
660 let cont = self.move_data.each_base_path(moved_path, |p| {
661 if p == loan_path_index {
662 // Scenario 3: some extension of `loan_path`
665 &self.move_data.path_loan_path(moved_path))
670 if !cont { ret = false; }
677 /// Iterates through every assignment to `loan_path` that may have occurred on entry to `id`.
678 /// `loan_path` must be a single variable.
679 pub fn each_assignment_of<F>(&self,
680 id: hir::ItemLocalId,
681 loan_path: &Rc<LoanPath<'tcx>>,
684 F: FnMut(&Assignment) -> bool,
686 let loan_path_index = {
687 match self.move_data.existing_move_path(loan_path) {
690 // if there were any assignments, it'd have an index
696 self.dfcx_assign.each_bit_on_entry(id, |index| {
697 let assignment = self.move_data.var_assignments.borrow();
698 let assignment = &(*assignment)[index];
699 if assignment.path == loan_path_index && !f(assignment) {
708 impl BitwiseOperator for MoveDataFlowOperator {
710 fn join(&self, succ: usize, pred: usize) -> usize {
711 succ | pred // moves from both preds are in scope
715 impl DataFlowOperator for MoveDataFlowOperator {
717 fn initial_value(&self) -> bool {
718 false // no loans in scope by default
722 impl BitwiseOperator for AssignDataFlowOperator {
724 fn join(&self, succ: usize, pred: usize) -> usize {
725 succ | pred // moves from both preds are in scope
729 impl DataFlowOperator for AssignDataFlowOperator {
731 fn initial_value(&self) -> bool {
732 false // no assignments in scope by default