1 // Copyright 2012-2016 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
12 use rustc::ty::TyCtxt;
13 use rustc::mir::repr::*;
14 use rustc::util::nodemap::FnvHashMap;
15 use rustc_data_structures::indexed_vec::{Idx, IndexVec};
17 use std::cell::{Cell};
18 use std::collections::hash_map::Entry;
23 use super::abs_domain::{AbstractElem, Lift};
25 // This submodule holds some newtype'd Index wrappers that are using
26 // NonZero to ensure that Option<Index> occupies only a single word.
27 // They are in a submodule to impose privacy restrictions; namely, to
28 // ensure that other code does not accidentally access `index.0`
29 // (which is likely to yield a subtle off-by-one error).
31 use core::nonzero::NonZero;
32 use rustc_data_structures::indexed_vec::Idx;
34 macro_rules! new_index {
36 #[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)]
37 pub struct $Index(NonZero<usize>);
43 fn new(idx: usize) -> Self {
44 unsafe { $Index(NonZero::new(idx + 1)) }
46 fn index(self) -> usize {
53 /// Index into MovePathData.move_paths
54 new_index!(MovePathIndex);
56 /// Index into MoveData.moves.
57 new_index!(MoveOutIndex);
60 pub use self::indexes::MovePathIndex;
61 pub use self::indexes::MoveOutIndex;
63 impl self::indexes::MoveOutIndex {
64 pub fn move_path_index(&self, move_data: &MoveData) -> MovePathIndex {
65 move_data.moves[self.index()].path
69 /// `MovePath` is a canonicalized representation of a path that is
70 /// moved or assigned to.
72 /// It follows a tree structure.
74 /// Given `struct X { m: M, n: N }` and `x: X`, moves like `drop x.m;`
75 /// move *out* of the l-value `x.m`.
77 /// The MovePaths representing `x.m` and `x.n` are siblings (that is,
78 /// one of them will link to the other via the `next_sibling` field,
79 /// and the other will have no entry in its `next_sibling` field), and
80 /// they both have the MovePath representing `x` as their parent.
82 pub struct MovePath<'tcx> {
83 pub next_sibling: Option<MovePathIndex>,
84 pub first_child: Option<MovePathIndex>,
85 pub parent: Option<MovePathIndex>,
86 pub content: MovePathContent<'tcx>,
89 /// MovePaths usually represent a single l-value. The exceptions are
90 /// forms that arise due to erroneous input code: static data holds
91 /// l-values that we cannot actually move out of. Therefore we map
92 /// statics to a special marker value (`MovePathContent::Static`)
93 /// representing an invalid origin.
94 #[derive(Clone, Debug)]
95 pub enum MovePathContent<'tcx> {
100 /// During construction of the MovePath's, we use PreMovePath to
101 /// represent accumulated state while we are gathering up all the
102 /// children of each path.
104 struct PreMovePath<'tcx> {
105 pub next_sibling: Option<MovePathIndex>,
106 pub first_child: Cell<Option<MovePathIndex>>,
107 pub parent: Option<MovePathIndex>,
108 pub content: MovePathContent<'tcx>,
111 impl<'tcx> PreMovePath<'tcx> {
112 fn into_move_path(self) -> MovePath<'tcx> {
114 next_sibling: self.next_sibling,
116 content: self.content,
117 first_child: self.first_child.get(),
122 impl<'tcx> fmt::Debug for MovePath<'tcx> {
123 fn fmt(&self, w: &mut fmt::Formatter) -> fmt::Result {
124 write!(w, "MovePath {{")?;
125 if let Some(parent) = self.parent {
126 write!(w, " parent: {:?},", parent)?;
128 if let Some(first_child) = self.first_child {
129 write!(w, " first_child: {:?},", first_child)?;
131 if let Some(next_sibling) = self.next_sibling {
132 write!(w, " next_sibling: {:?}", next_sibling)?;
134 write!(w, " content: {:?} }}", self.content)
139 pub struct MoveData<'tcx> {
140 pub move_paths: MovePathData<'tcx>,
141 pub moves: Vec<MoveOut>,
143 pub path_map: PathMap,
144 pub rev_lookup: MovePathLookup<'tcx>,
149 /// Location-indexed (BasicBlock for outer index, index within BB
150 /// for inner index) map to list of MoveOutIndex's.
152 /// Each Location `l` is mapped to the MoveOut's that are effects
153 /// of executing the code at `l`. (There can be multiple MoveOut's
154 /// for a given `l` because each MoveOut is associated with one
155 /// particular path being moved.)
156 map: Vec<Vec<Vec<MoveOutIndex>>>,
159 impl Index<Location> for LocMap {
160 type Output = [MoveOutIndex];
161 fn index(&self, index: Location) -> &Self::Output {
162 assert!(index.block.index() < self.map.len());
163 assert!(index.statement_index < self.map[index.block.index()].len());
164 &self.map[index.block.index()][index.statement_index]
170 /// Path-indexed map to list of MoveOutIndex's.
172 /// Each Path `p` is mapped to the MoveOut's that move out of `p`.
173 map: Vec<Vec<MoveOutIndex>>,
176 impl Index<MovePathIndex> for PathMap {
177 type Output = [MoveOutIndex];
178 fn index(&self, index: MovePathIndex) -> &Self::Output {
179 &self.map[index.index()]
183 /// `MoveOut` represents a point in a program that moves out of some
184 /// L-value; i.e., "creates" uninitialized memory.
186 /// With respect to dataflow analysis:
187 /// - Generated by moves and declaration of uninitialized variables.
188 /// - Killed by assignments to the memory.
189 #[derive(Copy, Clone)]
192 pub path: MovePathIndex,
194 pub source: Location,
197 impl fmt::Debug for MoveOut {
198 fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
199 write!(fmt, "p{}@{:?}", self.path.index(), self.source)
204 pub struct MovePathData<'tcx> {
205 move_paths: Vec<MovePath<'tcx>>,
208 impl<'tcx> MovePathData<'tcx> {
209 pub fn len(&self) -> usize { self.move_paths.len() }
212 impl<'tcx> Index<MovePathIndex> for MovePathData<'tcx> {
213 type Output = MovePath<'tcx>;
214 fn index(&self, i: MovePathIndex) -> &MovePath<'tcx> {
215 &self.move_paths[i.index()]
219 struct MovePathDataBuilder<'tcx> {
220 pre_move_paths: Vec<PreMovePath<'tcx>>,
221 rev_lookup: MovePathLookup<'tcx>,
224 /// Tables mapping from an l-value to its MovePathIndex.
226 pub struct MovePathLookup<'tcx> {
227 vars: IndexVec<Var, Option<MovePathIndex>>,
228 temps: IndexVec<Temp, Option<MovePathIndex>>,
229 args: IndexVec<Arg, Option<MovePathIndex>>,
231 /// The move path representing the return value is constructed
232 /// lazily when we first encounter it in the input MIR.
233 return_ptr: Option<MovePathIndex>,
235 /// A single move path (representing any static data referenced)
236 /// is constructed lazily when we first encounter statics in the
238 statics: Option<MovePathIndex>,
240 /// projections are made from a base-lvalue and a projection
241 /// elem. The base-lvalue will have a unique MovePathIndex; we use
242 /// the latter as the index into the outer vector (narrowing
243 /// subsequent search so that it is solely relative to that
244 /// base-lvalue). For the remaining lookup, we map the projection
245 /// elem to the associated MovePathIndex.
246 projections: Vec<FnvHashMap<AbstractElem<'tcx>, MovePathIndex>>,
248 /// Tracks the next index to allocate during construction of the
249 /// MovePathData. Unused after MovePathData is fully constructed.
250 next_index: MovePathIndex,
255 fn fill_to_with(&mut self, idx: usize, x: Self::T);
256 fn fill_to(&mut self, idx: usize) where Self::T: Default {
257 self.fill_to_with(idx, Default::default())
260 impl<T:Clone> FillTo for Vec<T> {
262 fn fill_to_with(&mut self, idx: usize, x: T) {
263 if idx >= self.len() {
264 let delta = idx + 1 - self.len();
265 assert_eq!(idx + 1, self.len() + delta);
266 self.extend(iter::repeat(x).take(delta))
268 debug_assert!(idx < self.len());
272 #[derive(Clone, Debug)]
273 enum LookupKind { Generate, Reuse }
274 #[derive(Clone, Debug)]
275 struct Lookup<T>(LookupKind, T);
277 impl Lookup<MovePathIndex> {
278 fn index(&self) -> usize { (self.1).index() }
281 impl<'tcx> MovePathLookup<'tcx> {
282 fn new(mir: &Mir) -> Self {
284 vars: IndexVec::from_elem(None, &mir.var_decls),
285 temps: IndexVec::from_elem(None, &mir.temp_decls),
286 args: IndexVec::from_elem(None, &mir.arg_decls),
290 next_index: MovePathIndex::new(0),
294 fn next_index(next: &mut MovePathIndex) -> MovePathIndex {
296 *next = MovePathIndex::new(i.index() + 1);
300 fn lookup_or_generate<I: Idx>(vec: &mut IndexVec<I, Option<MovePathIndex>>,
302 next_index: &mut MovePathIndex)
303 -> Lookup<MovePathIndex> {
304 let entry = &mut vec[idx];
307 let i = Self::next_index(next_index);
309 Lookup(LookupKind::Generate, i)
312 Lookup(LookupKind::Reuse, entry_idx)
317 fn lookup_var(&mut self, var_idx: Var) -> Lookup<MovePathIndex> {
318 Self::lookup_or_generate(&mut self.vars,
320 &mut self.next_index)
323 fn lookup_temp(&mut self, temp_idx: Temp) -> Lookup<MovePathIndex> {
324 Self::lookup_or_generate(&mut self.temps,
326 &mut self.next_index)
329 fn lookup_arg(&mut self, arg_idx: Arg) -> Lookup<MovePathIndex> {
330 Self::lookup_or_generate(&mut self.args,
332 &mut self.next_index)
335 fn lookup_static(&mut self) -> Lookup<MovePathIndex> {
338 Lookup(LookupKind::Reuse, mpi)
340 ref mut ret @ None => {
341 let mpi = Self::next_index(&mut self.next_index);
343 Lookup(LookupKind::Generate, mpi)
348 fn lookup_return_pointer(&mut self) -> Lookup<MovePathIndex> {
349 match self.return_ptr {
351 Lookup(LookupKind::Reuse, mpi)
353 ref mut ret @ None => {
354 let mpi = Self::next_index(&mut self.next_index);
356 Lookup(LookupKind::Generate, mpi)
361 fn lookup_proj(&mut self,
362 proj: &LvalueProjection<'tcx>,
363 base: MovePathIndex) -> Lookup<MovePathIndex> {
364 let MovePathLookup { ref mut projections,
365 ref mut next_index, .. } = *self;
366 projections.fill_to(base.index());
367 match projections[base.index()].entry(proj.elem.lift()) {
368 Entry::Occupied(ent) => {
369 Lookup(LookupKind::Reuse, *ent.get())
371 Entry::Vacant(ent) => {
372 let mpi = Self::next_index(next_index);
374 Lookup(LookupKind::Generate, mpi)
380 impl<'tcx> MovePathLookup<'tcx> {
381 // Unlike the builder `fn move_path_for` below, this lookup
382 // alternative will *not* create a MovePath on the fly for an
383 // unknown l-value; it will simply panic.
384 pub fn find(&self, lval: &Lvalue<'tcx>) -> MovePathIndex {
386 Lvalue::Var(var) => self.vars[var].unwrap(),
387 Lvalue::Temp(temp) => self.temps[temp].unwrap(),
388 Lvalue::Arg(arg) => self.args[arg].unwrap(),
389 Lvalue::Static(ref _def_id) => self.statics.unwrap(),
390 Lvalue::ReturnPointer => self.return_ptr.unwrap(),
391 Lvalue::Projection(ref proj) => {
392 let base_index = self.find(&proj.base);
393 self.projections[base_index.index()][&proj.elem.lift()]
399 impl<'tcx> MovePathDataBuilder<'tcx> {
400 fn lookup(&mut self, lval: &Lvalue<'tcx>) -> Lookup<MovePathIndex> {
401 let proj = match *lval {
402 Lvalue::Var(var_idx) =>
403 return self.rev_lookup.lookup_var(var_idx),
404 Lvalue::Temp(temp_idx) =>
405 return self.rev_lookup.lookup_temp(temp_idx),
406 Lvalue::Arg(arg_idx) =>
407 return self.rev_lookup.lookup_arg(arg_idx),
408 Lvalue::Static(_def_id) =>
409 return self.rev_lookup.lookup_static(),
410 Lvalue::ReturnPointer =>
411 return self.rev_lookup.lookup_return_pointer(),
412 Lvalue::Projection(ref proj) => {
417 let base_index = self.move_path_for(&proj.base);
418 self.rev_lookup.lookup_proj(proj, base_index)
421 fn create_move_path(&mut self, lval: &Lvalue<'tcx>) {
422 // Create MovePath for `lval`, discarding returned index.
423 self.move_path_for(lval);
426 fn move_path_for(&mut self, lval: &Lvalue<'tcx>) -> MovePathIndex {
427 debug!("move_path_for({:?})", lval);
429 let lookup: Lookup<MovePathIndex> = self.lookup(lval);
431 // `lookup` is either the previously assigned index or a
432 // newly-allocated one.
433 debug_assert!(lookup.index() <= self.pre_move_paths.len());
435 if let Lookup(LookupKind::Generate, mpi) = lookup {
438 // tracks whether content is Some non-static; statics map to None.
439 let content: Option<&Lvalue<'tcx>>;
442 Lvalue::Static(_) => {
448 Lvalue::Var(_) | Lvalue::Temp(_) | Lvalue::Arg(_) |
449 Lvalue::ReturnPointer => {
450 content = Some(lval);
454 Lvalue::Projection(ref proj) => {
455 content = Some(lval);
457 // Here, install new MovePath as new first_child.
459 // Note: `parent` previously allocated (Projection
460 // case of match above established this).
461 let idx = self.move_path_for(&proj.base);
464 let parent_move_path = &mut self.pre_move_paths[idx.index()];
466 // At last: Swap in the new first_child.
467 sibling = parent_move_path.first_child.get();
468 parent_move_path.first_child.set(Some(mpi));
472 let content = match content {
473 Some(lval) => MovePathContent::Lvalue(lval.clone()),
474 None => MovePathContent::Static,
477 let move_path = PreMovePath {
478 next_sibling: sibling,
481 first_child: Cell::new(None),
484 self.pre_move_paths.push(move_path);
491 impl<'a, 'tcx> MoveData<'tcx> {
492 pub fn gather_moves(mir: &Mir<'tcx>, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Self {
493 gather_moves(mir, tcx)
499 Use, Repeat, Cast, BinaryOp, UnaryOp, Box,
500 Aggregate, Drop, CallFn, CallArg, Return, If,
503 fn gather_moves<'a, 'tcx>(mir: &Mir<'tcx>, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> MoveData<'tcx> {
504 use self::StmtKind as SK;
506 let bb_count = mir.basic_blocks().len();
507 let mut moves = vec![];
508 let mut loc_map: Vec<_> = iter::repeat(Vec::new()).take(bb_count).collect();
509 let mut path_map = Vec::new();
511 // this is mutable only because we will move it to and fro' the
512 // BlockContexts constructed on each iteration. (Moving is more
513 // straight-forward than mutable borrows in this instance.)
514 let mut builder = MovePathDataBuilder {
515 pre_move_paths: Vec::new(),
516 rev_lookup: MovePathLookup::new(mir),
519 // Before we analyze the program text, we create the MovePath's
520 // for all of the vars, args, and temps. (This enforces a basic
521 // property that even if the MIR body doesn't contain any
522 // references to a var/arg/temp, it will still be a valid
523 // operation to lookup the MovePath associated with it.)
524 assert!(mir.var_decls.len() <= ::std::u32::MAX as usize);
525 assert!(mir.arg_decls.len() <= ::std::u32::MAX as usize);
526 assert!(mir.temp_decls.len() <= ::std::u32::MAX as usize);
527 for var in mir.var_decls.indices() {
528 let path_idx = builder.move_path_for(&Lvalue::Var(var));
529 path_map.fill_to(path_idx.index());
531 for arg in mir.arg_decls.indices() {
532 let path_idx = builder.move_path_for(&Lvalue::Arg(arg));
533 path_map.fill_to(path_idx.index());
535 for temp in mir.temp_decls.indices() {
536 let path_idx = builder.move_path_for(&Lvalue::Temp(temp));
537 path_map.fill_to(path_idx.index());
540 for (bb, bb_data) in mir.basic_blocks().iter_enumerated() {
541 let loc_map_bb = &mut loc_map[bb.index()];
543 debug_assert!(loc_map_bb.len() == 0);
544 let len = bb_data.statements.len();
545 loc_map_bb.fill_to(len);
546 debug_assert!(loc_map_bb.len() == len + 1);
548 let mut bb_ctxt = BlockContext {
552 path_map: &mut path_map,
553 loc_map_bb: loc_map_bb,
556 for (i, stmt) in bb_data.statements.iter().enumerate() {
557 let source = Location { block: bb, statement_index: i };
559 StatementKind::Assign(ref lval, ref rval) => {
560 bb_ctxt.builder.create_move_path(lval);
562 // Ensure that the path_map contains entries even
563 // if the lvalue is assigned and never read.
564 let assigned_path = bb_ctxt.builder.move_path_for(lval);
565 bb_ctxt.path_map.fill_to(assigned_path.index());
568 Rvalue::Use(ref operand) => {
569 bb_ctxt.on_operand(SK::Use, operand, source)
571 Rvalue::Repeat(ref operand, ref _const) =>
572 bb_ctxt.on_operand(SK::Repeat, operand, source),
573 Rvalue::Cast(ref _kind, ref operand, ref _ty) =>
574 bb_ctxt.on_operand(SK::Cast, operand, source),
575 Rvalue::BinaryOp(ref _binop, ref operand1, ref operand2) |
576 Rvalue::CheckedBinaryOp(ref _binop, ref operand1, ref operand2) => {
577 bb_ctxt.on_operand(SK::BinaryOp, operand1, source);
578 bb_ctxt.on_operand(SK::BinaryOp, operand2, source);
580 Rvalue::UnaryOp(ref _unop, ref operand) => {
581 bb_ctxt.on_operand(SK::UnaryOp, operand, source);
583 Rvalue::Box(ref _ty) => {
584 // this is creating uninitialized
585 // memory that needs to be initialized.
586 let deref_lval = Lvalue::Projection(Box::new(Projection {
588 elem: ProjectionElem::Deref,
590 bb_ctxt.on_move_out_lval(SK::Box, &deref_lval, source);
592 Rvalue::Aggregate(ref _kind, ref operands) => {
593 for operand in operands {
594 bb_ctxt.on_operand(SK::Aggregate, operand, source);
599 Rvalue::InlineAsm { .. } => {}
602 StatementKind::StorageLive(_) |
603 StatementKind::StorageDead(_) => {}
604 StatementKind::SetDiscriminant{ .. } => {
605 span_bug!(stmt.source_info.span,
606 "SetDiscriminant should not exist during borrowck");
611 debug!("gather_moves({:?})", bb_data.terminator());
612 match bb_data.terminator().kind {
613 TerminatorKind::Goto { target: _ } |
614 TerminatorKind::Resume |
615 TerminatorKind::Unreachable => { }
617 TerminatorKind::Return => {
618 let source = Location { block: bb,
619 statement_index: bb_data.statements.len() };
620 debug!("gather_moves Return on_move_out_lval return {:?}", source);
621 bb_ctxt.on_move_out_lval(SK::Return, &Lvalue::ReturnPointer, source);
624 TerminatorKind::If { ref cond, targets: _ } => {
625 let source = Location { block: bb,
626 statement_index: bb_data.statements.len() };
627 bb_ctxt.on_operand(SK::If, cond, source);
630 TerminatorKind::Assert {
631 ref cond, expected: _,
632 ref msg, target: _, cleanup: _
634 // The `cond` is always of (copyable) type `bool`,
635 // so there will never be anything to move.
638 AssertMessage:: BoundsCheck { ref len, ref index } => {
639 // Same for the usize length and index in bounds-checking.
640 let _ = (len, index);
642 AssertMessage::Math(_) => {}
646 TerminatorKind::SwitchInt { switch_ty: _, values: _, targets: _, ref discr } |
647 TerminatorKind::Switch { adt_def: _, targets: _, ref discr } => {
648 // The `discr` is not consumed; that is instead
649 // encoded on specific match arms (and for
650 // SwitchInt`, it is always a copyable integer
655 TerminatorKind::Drop { ref location, target: _, unwind: _ } => {
656 let source = Location { block: bb,
657 statement_index: bb_data.statements.len() };
658 bb_ctxt.on_move_out_lval(SK::Drop, location, source);
660 TerminatorKind::DropAndReplace { ref location, ref value, .. } => {
661 let assigned_path = bb_ctxt.builder.move_path_for(location);
662 bb_ctxt.path_map.fill_to(assigned_path.index());
664 let source = Location { block: bb,
665 statement_index: bb_data.statements.len() };
666 bb_ctxt.on_operand(SK::Use, value, source);
668 TerminatorKind::Call { ref func, ref args, ref destination, cleanup: _ } => {
669 let source = Location { block: bb,
670 statement_index: bb_data.statements.len() };
671 bb_ctxt.on_operand(SK::CallFn, func, source);
673 debug!("gather_moves Call on_operand {:?} {:?}", arg, source);
674 bb_ctxt.on_operand(SK::CallArg, arg, source);
676 if let Some((ref destination, _bb)) = *destination {
677 debug!("gather_moves Call create_move_path {:?} {:?}", destination, source);
679 // Ensure that the path_map contains entries even
680 // if the lvalue is assigned and never read.
681 let assigned_path = bb_ctxt.builder.move_path_for(destination);
682 bb_ctxt.path_map.fill_to(assigned_path.index());
684 bb_ctxt.builder.create_move_path(destination);
689 builder = bb_ctxt.builder;
692 // At this point, we may have created some MovePaths that do not
693 // have corresponding entries in the path map.
695 // (For example, creating the path `a.b.c` may, as a side-effect,
696 // create a path for the parent path `a.b`.)
698 // All such paths were not referenced ...
700 // well you know, lets actually try just asserting that the path map *is* complete.
701 assert_eq!(path_map.len(), builder.pre_move_paths.len());
703 let pre_move_paths = builder.pre_move_paths;
704 let move_paths: Vec<_> = pre_move_paths.into_iter()
705 .map(|p| p.into_move_path())
709 let mut seen: Vec<_> = move_paths.iter().map(|_| false).collect();
710 for (j, &MoveOut { ref path, ref source }) in moves.iter().enumerate() {
711 debug!("MovePathData moves[{}]: MoveOut {{ path: {:?} = {:?}, source: {:?} }}",
712 j, path, move_paths[path.index()], source);
713 seen[path.index()] = true;
715 for (j, path) in move_paths.iter().enumerate() {
717 debug!("MovePathData move_paths[{}]: {:?}", j, path);
720 "done dumping MovePathData"
724 move_paths: MovePathData { move_paths: move_paths, },
726 loc_map: LocMap { map: loc_map },
727 path_map: PathMap { map: path_map },
728 rev_lookup: builder.rev_lookup,
732 struct BlockContext<'b, 'tcx: 'b> {
733 _tcx: TyCtxt<'b, 'tcx, 'tcx>,
734 moves: &'b mut Vec<MoveOut>,
735 builder: MovePathDataBuilder<'tcx>,
736 path_map: &'b mut Vec<Vec<MoveOutIndex>>,
737 loc_map_bb: &'b mut Vec<Vec<MoveOutIndex>>,
740 impl<'b, 'tcx: 'b> BlockContext<'b, 'tcx> {
741 fn on_move_out_lval(&mut self,
745 let i = source.statement_index;
746 let index = MoveOutIndex::new(self.moves.len());
748 let path = self.builder.move_path_for(lval);
749 self.moves.push(MoveOut { path: path, source: source.clone() });
750 self.path_map.fill_to(path.index());
752 debug!("ctxt: {:?} add consume of lval: {:?} \
754 to path_map for path: {:?} and \
755 to loc_map for loc: {:?}",
756 stmt_kind, lval, index, path, source);
758 debug_assert!(path.index() < self.path_map.len());
759 // this is actually a questionable assert; at the very
760 // least, incorrect input code can probably cause it to
762 assert!(self.path_map[path.index()].iter().find(|idx| **idx == index).is_none());
763 self.path_map[path.index()].push(index);
765 debug_assert!(i < self.loc_map_bb.len());
766 debug_assert!(self.loc_map_bb[i].iter().find(|idx| **idx == index).is_none());
767 self.loc_map_bb[i].push(index);
770 fn on_operand(&mut self, stmt_kind: StmtKind, operand: &Operand<'tcx>, source: Location) {
772 Operand::Constant(..) => {} // not-a-move
773 Operand::Consume(ref lval) => { // a move
774 self.on_move_out_lval(stmt_kind, lval, source);