log graphviz rustc_llvm rustc_back rustc_data_structures\
rustc_const_eval
DEPS_rustc_back := std syntax rustc_llvm rustc_front flate log libc
-DEPS_rustc_borrowck := rustc rustc_front log graphviz syntax
+DEPS_rustc_borrowck := rustc rustc_front rustc_mir log graphviz syntax
DEPS_rustc_data_structures := std log serialize
DEPS_rustc_driver := arena flate getopts graphviz libc rustc rustc_back rustc_borrowck \
rustc_typeck rustc_mir rustc_resolve log syntax serialize rustc_llvm \
let word = bit / usize_bits;
let bit_in_word = bit % usize_bits;
let bit_mask = 1 << bit_in_word;
- debug!("word={} bit_in_word={} bit_mask={}", word, bit_in_word, word);
+ debug!("word={} bit_in_word={} bit_mask={}", word, bit_in_word, bit_mask);
let oldv = words[word];
let newv = oldv | bit_mask;
words[word] = newv;
/// or `*B` or `B[index]`. Note that it is parameterized because it is
/// shared between `Constant` and `Lvalue`. See the aliases
/// `LvalueProjection` etc below.
-#[derive(Clone, Debug, PartialEq, RustcEncodable, RustcDecodable)]
+#[derive(Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
pub struct Projection<'tcx, B, V> {
pub base: B,
pub elem: ProjectionElem<'tcx, V>,
}
-#[derive(Clone, Debug, PartialEq, RustcEncodable, RustcDecodable)]
+#[derive(Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
pub enum ProjectionElem<'tcx, V> {
Deref,
Field(Field, Ty<'tcx>),
/// this does not necessarily mean that they are "==" in Rust -- in
/// particular one must be wary of `NaN`!
-#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable)]
+#[derive(Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
pub struct Constant<'tcx> {
pub span: Span,
pub ty: Ty<'tcx>,
}
}
-#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable)]
+#[derive(Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
pub enum Literal<'tcx> {
Item {
def_id: DefId,
--- /dev/null
+// Copyright 2012-2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use std::mem;
+
+/// `BitSlice` provides helper methods for treating a `[usize]`
+/// as a bitvector.
+pub trait BitSlice {
+ fn set_bit(&mut self, idx: usize) -> bool;
+ fn get_bit(&self, idx: usize) -> bool;
+}
+
+impl BitSlice for [usize] {
+ fn set_bit(&mut self, idx: usize) -> bool {
+ let words = self;
+ debug!("set_bit: words={} idx={}",
+ bits_to_string(words, words.len() * mem::size_of::<usize>()), bit_str(idx));
+ let BitLookup { word, bit_in_word, bit_mask } = bit_lookup(idx);
+ debug!("word={} bit_in_word={} bit_mask={}", word, bit_in_word, bit_mask);
+ let oldv = words[word];
+ let newv = oldv | bit_mask;
+ words[word] = newv;
+ oldv != newv
+ }
+
+ fn get_bit(&self, idx: usize) -> bool {
+ let words = self;
+ let BitLookup { word, bit_mask, .. } = bit_lookup(idx);
+ (words[word] & bit_mask) != 0
+ }
+}
+
+struct BitLookup { word: usize, bit_in_word: usize, bit_mask: usize }
+
+#[inline]
+fn bit_lookup(bit: usize) -> BitLookup {
+ let usize_bits = mem::size_of::<usize>() * 8;
+ let word = bit / usize_bits;
+ let bit_in_word = bit % usize_bits;
+ let bit_mask = 1 << bit_in_word;
+ BitLookup { word: word, bit_in_word: bit_in_word, bit_mask: bit_mask }
+}
+
+
+fn bit_str(bit: usize) -> String {
+ let byte = bit >> 8;
+ let lobits = 1 << (bit & 0xFF);
+ format!("[{}:{}-{:02x}]", bit, byte, lobits)
+}
+
+pub fn bits_to_string(words: &[usize], bytes: usize) -> String {
+ let mut result = String::new();
+ let mut sep = '[';
+
+ // Note: this is a little endian printout of bytes.
+
+ let mut i = 0;
+ for &word in words.iter() {
+ let mut v = word;
+ for _ in 0..mem::size_of::<usize>() {
+ let byte = v & 0xFF;
+ if i >= bytes {
+ assert!(byte == 0);
+ } else {
+ result.push(sep);
+ result.push_str(&format!("{:02x}", byte));
+ }
+ v >>= 8;
+ i += 1;
+ sep = '-';
+ }
+ }
+ result.push(']');
+ return result
+}
--- /dev/null
+// Copyright 2012-2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! The move-analysis portion of borrowck needs to work in an abstract
+//! domain of lifted Lvalues. Most of the Lvalue variants fall into a
+//! one-to-one mapping between the concrete and abstract (e.g. a
+//! field-deref on a local-variable, `x.field`, has the same meaning
+//! in both domains). Indexed-Projections are the exception: `a[x]`
+//! needs to be treated as mapping to the same move path as `a[y]` as
+//! well as `a[13]`, et cetera.
+//!
+//! (In theory the analysis could be extended to work with sets of
+//! paths, so that `a[0]` and `a[13]` could be kept distinct, while
+//! `a[x]` would still overlap them both. But that is not this
+//! representation does today.)
+
+use rustc::mir::repr::{Lvalue, LvalueElem};
+use rustc::mir::repr::{Operand, Projection, ProjectionElem};
+
+#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
+pub struct AbstractOperand;
+pub type AbstractProjection<'tcx> =
+ Projection<'tcx, Lvalue<'tcx>, AbstractOperand>;
+pub type AbstractElem<'tcx> =
+ ProjectionElem<'tcx, AbstractOperand>;
+
+pub trait Lift {
+ type Abstract;
+ fn lift(&self) -> Self::Abstract;
+}
+impl<'tcx> Lift for Operand<'tcx> {
+ type Abstract = AbstractOperand;
+ fn lift(&self) -> Self::Abstract { AbstractOperand }
+}
+impl<'tcx> Lift for LvalueElem<'tcx> {
+ type Abstract = AbstractElem<'tcx>;
+ fn lift(&self) -> Self::Abstract {
+ match *self {
+ ProjectionElem::Deref =>
+ ProjectionElem::Deref,
+ ProjectionElem::Field(ref f, ty) =>
+ ProjectionElem::Field(f.clone(), ty.clone()),
+ ProjectionElem::Index(ref i) =>
+ ProjectionElem::Index(i.lift()),
+ ProjectionElem::ConstantIndex {offset,min_length,from_end} =>
+ ProjectionElem::ConstantIndex {
+ offset: offset,
+ min_length: min_length,
+ from_end: from_end
+ },
+ ProjectionElem::Downcast(a, u) =>
+ ProjectionElem::Downcast(a.clone(), u.clone()),
+ }
+ }
+}
--- /dev/null
+// Copyright 2012-2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use syntax::attr::AttrMetaMethods;
+
+use rustc::middle::ty;
+use rustc::mir::repr::{self, Mir};
+
+use std::io;
+use std::mem;
+use std::usize;
+
+use super::MirBorrowckCtxt;
+use super::gather_moves::{Location, MoveData, MovePathData, MovePathIndex, PathMap};
+use super::graphviz;
+use bitslice::BitSlice; // adds set_bit/get_bit to &[usize] bitvector rep.
+
+pub trait Dataflow {
+ fn dataflow(&mut self);
+}
+
+impl<'b, 'a: 'b, 'tcx: 'a> Dataflow for MirBorrowckCtxt<'b, 'a, 'tcx> {
+ fn dataflow(&mut self) {
+ self.build_gen_and_kill_sets();
+ self.pre_dataflow_instrumentation().unwrap();
+ self.propagate();
+ self.post_dataflow_instrumentation().unwrap();
+ }
+}
+
+struct PropagationContext<'c, 'b: 'c, 'a: 'b, 'tcx: 'a> {
+ mbcx: &'c mut MirBorrowckCtxt<'b, 'a, 'tcx>,
+ changed: bool,
+}
+
+impl<'b, 'a: 'b, 'tcx: 'a> MirBorrowckCtxt<'b, 'a, 'tcx> {
+ fn propagate(&mut self) {
+ let mut temp = vec![0; self.flow_state.sets.words_per_block];
+ let mut propcx = PropagationContext { mbcx: &mut *self, changed: true, };
+ while propcx.changed {
+ propcx.changed = false;
+ propcx.reset(&mut temp);
+ propcx.walk_cfg(&mut temp);
+ }
+ }
+
+ fn build_gen_and_kill_sets(&mut self) {
+ // First we need to build the gen- and kill-sets. The
+ // gather_moves information provides a high-level mapping from
+ // mir-locations to the MoveOuts (and those correspond
+ // directly to gen-sets here). But we still need to figure out
+ // the kill-sets.
+
+ let move_data = &self.flow_state.operator;
+ let move_paths = &move_data.move_paths;
+ let loc_map = &move_data.loc_map;
+ let path_map = &move_data.path_map;
+ let rev_lookup = &move_data.rev_lookup;
+
+ for bb in self.mir.all_basic_blocks() {
+ let &repr::BasicBlockData { ref statements,
+ ref terminator,
+ is_cleanup: _ } =
+ self.mir.basic_block_data(bb);
+
+ let mut sets = self.flow_state.sets.for_block(bb.index());
+ for (j, stmt) in statements.iter().enumerate() {
+ let loc = Location { block: bb, index: j };
+ debug!("stmt {:?} at loc {:?} moves out of move_indexes {:?}",
+ stmt, loc, &loc_map[loc]);
+ for move_index in &loc_map[loc] {
+ // Every path deinitialized by a *particular move*
+ // has corresponding bit, "gen'ed" (i.e. set)
+ // here, in dataflow vector
+ let retval = sets.gen_set.set_bit(move_index.idx().unwrap());
+ assert!(retval);
+ }
+ match stmt.kind {
+ repr::StatementKind::Assign(ref lvalue, _) => {
+ // assigning into this `lvalue` kills all
+ // MoveOuts from it, and *also* all MoveOuts
+ // for children and associated fragment sets.
+ let move_path_index = rev_lookup.find(lvalue);
+ set_children_kill_bits(sets.kill_set,
+ move_path_index,
+ path_map,
+ move_paths);
+ }
+ }
+ }
+
+ let loc = Location { block: bb, index: statements.len() };
+ debug!("terminator {:?} at loc {:?} moves out of move_indexes {:?}",
+ terminator, loc, &loc_map[loc]);
+ for move_index in &loc_map[loc] {
+ let retval = sets.gen_set.set_bit(move_index.idx().unwrap());
+ assert!(retval);
+ }
+
+ // Note: while below as originally authored could be
+ // written as an `if let`, it is more future-proof (to MIR
+ // changes) to use an explicit `match` here.
+ match *terminator {
+ None => {}
+ Some(repr::Terminator::Goto { target: _ }) => {}
+ Some(repr::Terminator::If { cond: _, targets: _ }) => {}
+ Some(repr::Terminator::Switch { discr: _, adt_def: _, targets: _ }) => {}
+ Some(repr::Terminator::SwitchInt { discr: _, switch_ty: _, values: _, targets: _ }) => {}
+ Some(repr::Terminator::Resume) => {}
+ Some(repr::Terminator::Return) => {}
+ Some(repr::Terminator::Drop { value: _, target: _, unwind: _ }) => {
+ // either kind of Drop completely invalidates the
+ // state of the referenced memory, effectively
+ // acting like a MoveOut. Such gen-set additions
+ // were added by the loop above over the loc_map.
+ }
+ Some(repr::Terminator::Call { func: _, args: _, cleanup: _,
+ ref destination }) => {
+ // Note: a followup commit refines this to reflect
+ // that the destination will be initialized if the
+ // call succeeds (thus killling any MoveOuts for
+ // that destination).
+ //
+ // That is, this code just does the kills
+ // unconditionally (which I believe this matches
+ // the behavior of the old borrowck dataflow
+ // analysis), but this code also is also removed
+ // and replaced with something flow-dependent in a
+ // followup commit.
+
+ if let Some((ref destination, _)) = *destination {
+ let move_path_index = rev_lookup.find(destination);
+ set_children_kill_bits(sets.kill_set,
+ move_path_index,
+ path_map,
+ move_paths);
+ }
+ }
+ }
+ }
+
+ fn set_children_kill_bits(kill_set: &mut [usize],
+ move_path_index: MovePathIndex,
+ path_map: &PathMap,
+ move_paths: &MovePathData) {
+ assert!(move_path_index.idx().is_some());
+
+ // 1. set kill bits for all moves that directly
+ // influence path for `move_path_index`
+ for move_index in &path_map[move_path_index] {
+ kill_set.set_bit(move_index.idx().unwrap());
+ }
+
+ // 2. for each child of the path (that is named in this
+ // function), recur.
+ //
+ // (Unnamed children are irrelevant to dataflow; by
+ // definition they have no associated moves.)
+ let mut child_index = move_paths[move_path_index].first_child;
+ while let Some(_) = child_index.idx() {
+ set_children_kill_bits(kill_set, child_index, path_map, move_paths);
+ child_index = move_paths[child_index].next_sibling;
+ }
+ }
+ }
+}
+
+impl<'c, 'b: 'c, 'a: 'b, 'tcx: 'a> PropagationContext<'c, 'b, 'a, 'tcx> {
+ fn reset(&mut self, bits: &mut [usize]) {
+ let e = if self.mbcx.flow_state.operator.initial_value() {usize::MAX} else {0};
+ for b in bits {
+ *b = e;
+ }
+ }
+
+ fn walk_cfg(&mut self, in_out: &mut [usize]) {
+ let &mut MirBorrowckCtxt { ref mir, ref mut flow_state, .. } = self.mbcx;
+ for (idx, bb) in mir.basic_blocks.iter().enumerate() {
+ {
+ let sets = flow_state.sets.for_block(idx);
+ debug_assert!(in_out.len() == sets.on_entry.len());
+ in_out.clone_from_slice(sets.on_entry);
+ bitwise(in_out, sets.gen_set, &Union);
+ bitwise(in_out, sets.kill_set, &Subtract);
+ }
+ flow_state.propagate_bits_into_graph_successors_of(in_out, &mut self.changed, bb);
+ }
+ }
+}
+
+impl<'b, 'a: 'b, 'tcx: 'a> MirBorrowckCtxt<'b, 'a, 'tcx> {
+ fn pre_dataflow_instrumentation(&self) -> io::Result<()> {
+ self.if_attr_meta_name_found(
+ "borrowck_graphviz_preflow",
+ |this, path: &str| {
+ graphviz::print_borrowck_graph_to(this, "preflow", path)
+ })
+ }
+
+ fn post_dataflow_instrumentation(&self) -> io::Result<()> {
+ self.if_attr_meta_name_found(
+ "borrowck_graphviz_postflow",
+ |this, path: &str| {
+ graphviz::print_borrowck_graph_to(this, "postflow", path)
+ })
+ }
+
+ fn if_attr_meta_name_found<F>(&self,
+ name: &str,
+ callback: F) -> io::Result<()>
+ where F: for <'aa, 'bb> FnOnce(&'aa Self, &'bb str) -> io::Result<()>
+ {
+ for attr in self.attributes {
+ if attr.check_name("rustc_mir") {
+ let items = attr.meta_item_list();
+ for item in items.iter().flat_map(|l| l.iter()) {
+ if item.check_name(name) {
+ if let Some(s) = item.value_str() {
+ return callback(self, &s);
+ } else {
+ self.bcx.tcx.sess.span_err(
+ item.span,
+ &format!("{} attribute requires a path", item.name()));
+ }
+ }
+ }
+ }
+ }
+
+ Ok(())
+ }
+}
+
+/// Maps each block to a set of bits
+#[derive(Clone, Debug)]
+struct Bits {
+ bits: Vec<usize>,
+}
+
+impl Bits {
+ fn new(init_word: usize, num_words: usize) -> Self {
+ Bits { bits: vec![init_word; num_words] }
+ }
+}
+
+pub struct DataflowState<O: BitDenotation>
+{
+ /// All the sets for the analysis. (Factored into its
+ /// own structure so that we can borrow it mutably
+ /// on its own separate from other fields.)
+ pub sets: AllSets,
+
+ /// operator used to initialize, combine, and interpret bits.
+ operator: O,
+}
+
+pub struct AllSets {
+ /// Analysis bitwidth for each block.
+ bits_per_block: usize,
+
+ /// Number of words associated with each block entry
+ /// equal to bits_per_block / usize::BITS, rounded up.
+ words_per_block: usize,
+
+ /// For each block, bits generated by executing the statements in
+ /// the block. (For comparison, the Terminator for each block is
+ /// handled in a flow-specific manner during propagation.)
+ gen_sets: Bits,
+
+ /// For each block, bits killed by executing the statements in the
+ /// block. (For comparison, the Terminator for each block is
+ /// handled in a flow-specific manner during propagation.)
+ kill_sets: Bits,
+
+ /// For each block, bits valid on entry to the block.
+ on_entry_sets: Bits,
+}
+
+pub struct BlockSets<'a> {
+ on_entry: &'a mut [usize],
+ gen_set: &'a mut [usize],
+ kill_set: &'a mut [usize],
+}
+
+impl AllSets {
+ pub fn bits_per_block(&self) -> usize { self.bits_per_block }
+ pub fn bytes_per_block(&self) -> usize { (self.bits_per_block + 7) / 8 }
+ pub fn for_block(&mut self, block_idx: usize) -> BlockSets {
+ let offset = self.words_per_block * block_idx;
+ let range = offset..(offset + self.words_per_block);
+ BlockSets {
+ on_entry: &mut self.on_entry_sets.bits[range.clone()],
+ gen_set: &mut self.gen_sets.bits[range.clone()],
+ kill_set: &mut self.kill_sets.bits[range],
+ }
+ }
+
+ fn lookup_set_for<'a>(&self, sets: &'a Bits, block_idx: usize) -> &'a [usize] {
+ let offset = self.words_per_block * block_idx;
+ &sets.bits[offset..(offset + self.words_per_block)]
+ }
+ pub fn gen_set_for(&self, block_idx: usize) -> &[usize] {
+ self.lookup_set_for(&self.gen_sets, block_idx)
+ }
+ pub fn kill_set_for(&self, block_idx: usize) -> &[usize] {
+ self.lookup_set_for(&self.kill_sets, block_idx)
+ }
+ pub fn on_entry_set_for(&self, block_idx: usize) -> &[usize] {
+ self.lookup_set_for(&self.on_entry_sets, block_idx)
+ }
+}
+
+impl<O: BitDenotation> DataflowState<O> {
+ fn each_bit<F>(&self, words: &[usize], mut f: F)
+ where F: FnMut(usize) {
+ //! Helper for iterating over the bits in a bitvector.
+
+ for (word_index, &word) in words.iter().enumerate() {
+ if word != 0 {
+ let usize_bits: usize = mem::size_of::<usize>();
+ let base_index = word_index * usize_bits;
+ for offset in 0..usize_bits {
+ let bit = 1 << offset;
+ if (word & bit) != 0 {
+ // NB: we round up the total number of bits
+ // that we store in any given bit set so that
+ // it is an even multiple of usize::BITS. This
+ // means that there may be some stray bits at
+ // the end that do not correspond to any
+ // actual value; that's why we first check
+ // that we are in range of bits_per_block.
+ let bit_index = base_index + offset as usize;
+ if bit_index >= self.sets.bits_per_block() {
+ return;
+ } else {
+ f(bit_index);
+ }
+ }
+ }
+ }
+ }
+ }
+
+ pub fn interpret_set(&self, words: &[usize]) -> Vec<&O::Bit> {
+ let mut v = Vec::new();
+ self.each_bit(words, |i| {
+ v.push(self.operator.interpret(i));
+ });
+ v
+ }
+}
+
+pub trait BitwiseOperator {
+ /// Joins two predecessor bits together, typically either `|` or `&`
+ fn join(&self, pred1: usize, pred2: usize) -> usize;
+}
+
+/// Parameterization for the precise form of data flow that is used.
+pub trait DataflowOperator : BitwiseOperator {
+ /// Specifies the initial value for each bit in the `on_entry` set
+ fn initial_value(&self) -> bool;
+}
+
+pub trait BitDenotation: DataflowOperator {
+ /// Specifies what is represented by each bit in the dataflow bitvector.
+ type Bit;
+ /// Size of each bivector allocated for each block in the analysis.
+ fn bits_per_block(&self) -> usize;
+ /// Provides the meaning of each entry in the dataflow bitvector.
+ /// (Mostly intended for use for better debug instrumentation.)
+ fn interpret(&self, idx: usize) -> &Self::Bit;
+}
+
+impl<D: BitDenotation> DataflowState<D> {
+ pub fn new(mir: &Mir, denotation: D) -> Self {
+ let bits_per_block = denotation.bits_per_block();
+ let usize_bits = mem::size_of::<usize>() * 8;
+ let words_per_block = (bits_per_block + usize_bits - 1) / usize_bits;
+ let num_blocks = mir.basic_blocks.len();
+ let num_words = num_blocks * words_per_block;
+
+ let entry = if denotation.initial_value() { usize::MAX } else {0};
+
+ let zeroes = Bits::new(0, num_words);
+ let on_entry = Bits::new(entry, num_words);
+
+ DataflowState {
+ sets: AllSets {
+ bits_per_block: bits_per_block,
+ words_per_block: words_per_block,
+ gen_sets: zeroes.clone(),
+ kill_sets: zeroes,
+ on_entry_sets: on_entry,
+ },
+ operator: denotation,
+ }
+ }
+}
+
+impl<D: BitDenotation> DataflowState<D> {
+ fn propagate_bits_into_graph_successors_of(&mut self,
+ in_out: &mut [usize],
+ changed: &mut bool,
+ bb: &repr::BasicBlockData) {
+ let term = if let Some(ref term) = bb.terminator { term } else { return };
+ match *term {
+ repr::Terminator::Return |
+ repr::Terminator::Resume => {}
+ repr::Terminator::Goto { ref target } |
+ repr::Terminator::Drop { ref target, value: _, unwind: None } => {
+ self.propagate_bits_into_entry_set_for(in_out, changed, target);
+ }
+ repr::Terminator::Drop { ref target, value: _, unwind: Some(ref unwind) } => {
+ self.propagate_bits_into_entry_set_for(in_out, changed, target);
+ self.propagate_bits_into_entry_set_for(in_out, changed, unwind);
+ }
+ repr::Terminator::If { ref targets, .. } => {
+ self.propagate_bits_into_entry_set_for(in_out, changed, &targets.0);
+ self.propagate_bits_into_entry_set_for(in_out, changed, &targets.1);
+ }
+ repr::Terminator::Switch { ref targets, .. } |
+ repr::Terminator::SwitchInt { ref targets, .. } => {
+ for target in targets {
+ self.propagate_bits_into_entry_set_for(in_out, changed, target);
+ }
+ }
+ repr::Terminator::Call { ref cleanup, ref destination, func: _, args: _ } => {
+ if let Some(ref unwind) = *cleanup {
+ self.propagate_bits_into_entry_set_for(in_out, changed, unwind);
+ }
+ if let Some((_, ref destination)) = *destination {
+ self.propagate_bits_into_entry_set_for(in_out, changed, destination);
+ }
+ }
+ }
+ }
+
+ fn propagate_bits_into_entry_set_for(&mut self,
+ in_out: &mut [usize],
+ changed: &mut bool,
+ bb: &repr::BasicBlock) {
+ let entry_set = self.sets.for_block(bb.index()).on_entry;
+ let set_changed = bitwise(entry_set, in_out, &self.operator);
+ if set_changed {
+ *changed = true;
+ }
+ }
+}
+
+
+impl<'tcx> DataflowState<MoveData<'tcx>> {
+ pub fn new_move_analysis(mir: &Mir<'tcx>, tcx: &ty::TyCtxt<'tcx>) -> Self {
+ let move_data = MoveData::gather_moves(mir, tcx);
+ DataflowState::new(mir, move_data)
+ }
+}
+
+impl<'tcx> BitwiseOperator for MoveData<'tcx> {
+ #[inline]
+ fn join(&self, pred1: usize, pred2: usize) -> usize {
+ pred1 | pred2 // moves from both preds are in scope
+ }
+}
+
+impl<'tcx> DataflowOperator for MoveData<'tcx> {
+ #[inline]
+ fn initial_value(&self) -> bool {
+ false // no loans in scope by default
+ }
+}
+
+#[inline]
+fn bitwise<Op:BitwiseOperator>(out_vec: &mut [usize],
+ in_vec: &[usize],
+ op: &Op) -> bool {
+ assert_eq!(out_vec.len(), in_vec.len());
+ let mut changed = false;
+ for (out_elt, in_elt) in out_vec.iter_mut().zip(in_vec) {
+ let old_val = *out_elt;
+ let new_val = op.join(old_val, *in_elt);
+ *out_elt = new_val;
+ changed |= old_val != new_val;
+ }
+ changed
+}
+
+struct Union;
+impl BitwiseOperator for Union {
+ fn join(&self, a: usize, b: usize) -> usize { a | b }
+}
+struct Subtract;
+impl BitwiseOperator for Subtract {
+ fn join(&self, a: usize, b: usize) -> usize { a & !b }
+}
--- /dev/null
+// Copyright 2012-2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+
+use rustc::middle::ty;
+use rustc::middle::def_id::DefId;
+use rustc::mir::repr::{self, Mir, BasicBlock, Lvalue, Rvalue};
+use rustc::mir::repr::{StatementKind, Terminator};
+use rustc::util::nodemap::FnvHashMap;
+
+use std::cell::{Cell, RefCell};
+use std::collections::hash_map::Entry;
+use std::fmt;
+use std::iter;
+use std::ops::Index;
+use std::usize;
+
+use super::dataflow::BitDenotation;
+use super::abs_domain::{AbstractElem, Lift};
+
+/// Index into MovePathData.move_paths
+#[derive(Copy, Clone, PartialEq, Eq, Debug)]
+pub struct MovePathIndex(usize);
+
+const INVALID_MOVE_PATH_INDEX: MovePathIndex = MovePathIndex(usize::MAX);
+
+impl MovePathIndex {
+ pub fn idx(&self) -> Option<usize> {
+ if *self == INVALID_MOVE_PATH_INDEX {
+ None
+ } else {
+ Some(self.0)
+ }
+ }
+}
+
+/// `MovePath` is a canonicalized representation of a path that is
+/// moved or assigned to.
+///
+/// It follows a tree structure.
+///
+/// Given `struct X { m: M, n: N }` and `x: X`, moves like `drop x.m;`
+/// move *out* of the l-value `x.m`.
+///
+/// The MovePaths representing `x.m` and `x.n` are siblings (that is,
+/// one of them will link to the other via the `next_sibling` field,
+/// and the other will have no entry in its `next_sibling` field), and
+/// they both have the MovePath representing `x` as their parent.
+#[derive(Clone)]
+pub struct MovePath<'tcx> {
+ pub next_sibling: MovePathIndex,
+ pub first_child: MovePathIndex,
+ pub parent: MovePathIndex,
+ pub lvalue: Lvalue<'tcx>,
+}
+
+/// During construction of the MovePath's, we use PreMovePath to
+/// represent accumulated state while we are gathering up all the
+/// children of each path.
+#[derive(Clone)]
+struct PreMovePath<'tcx> {
+ pub next_sibling: MovePathIndex,
+ pub first_child: Cell<MovePathIndex>,
+ pub parent: MovePathIndex,
+ pub lvalue: Lvalue<'tcx>,
+}
+
+impl<'tcx> PreMovePath<'tcx> {
+ fn into_move_path(self) -> MovePath<'tcx> {
+ MovePath {
+ next_sibling: self.next_sibling,
+ parent: self.parent,
+ lvalue: self.lvalue,
+ first_child: self.first_child.get(),
+ }
+ }
+}
+
+impl<'tcx> fmt::Debug for MovePath<'tcx> {
+ fn fmt(&self, w: &mut fmt::Formatter) -> fmt::Result {
+ try!(write!(w, "MovePath {{"));
+ if self.parent != INVALID_MOVE_PATH_INDEX {
+ try!(write!(w, " parent: {:?},", self.parent));
+ }
+ if self.first_child != INVALID_MOVE_PATH_INDEX {
+ try!(write!(w, " first_child: {:?},", self.first_child));
+ }
+ if self.next_sibling != INVALID_MOVE_PATH_INDEX {
+ try!(write!(w, " next_sibling: {:?}", self.next_sibling));
+ }
+ write!(w, " lvalue: {:?} }}", self.lvalue)
+ }
+}
+
+/// Index into MoveData.moves.
+#[derive(Copy, Clone, PartialEq, Eq, Debug)]
+pub struct MoveOutIndex(usize);
+
+impl MoveOutIndex {
+ pub fn idx(&self) -> Option<usize> {
+ if *self == INVALID_MOVE_OUT_INDEX {
+ None
+ } else {
+ Some(self.0)
+ }
+ }
+}
+
+const INVALID_MOVE_OUT_INDEX: MoveOutIndex = MoveOutIndex(usize::MAX);
+
+pub struct MoveData<'tcx> {
+ pub move_paths: MovePathData<'tcx>,
+ pub moves: Vec<MoveOut>,
+ pub loc_map: LocMap,
+ pub path_map: PathMap,
+ pub rev_lookup: MovePathLookup<'tcx>,
+}
+
+pub struct LocMap {
+ /// Location-indexed (BasicBlock for outer index, index within BB
+ /// for inner index) map to list of MoveOutIndex's.
+ ///
+ /// Each Location `l` is mapped to the MoveOut's that are effects
+ /// of executing the code at `l`. (There can be multiple MoveOut's
+ /// for a given `l` because each MoveOut is associated with one
+ /// particular path being moved.)
+ map: Vec<Vec<Vec<MoveOutIndex>>>,
+}
+
+impl Index<Location> for LocMap {
+ type Output = [MoveOutIndex];
+ fn index(&self, index: Location) -> &Self::Output {
+ assert!(index.block.index() < self.map.len());
+ assert!(index.index < self.map[index.block.index()].len());
+ &self.map[index.block.index()][index.index]
+ }
+}
+
+pub struct PathMap {
+ /// Path-indexed map to list of MoveOutIndex's.
+ ///
+ /// Each Path `p` is mapped to the MoveOut's that move out of `p`.
+ map: Vec<Vec<MoveOutIndex>>,
+}
+
+impl Index<MovePathIndex> for PathMap {
+ type Output = [MoveOutIndex];
+ fn index(&self, index: MovePathIndex) -> &Self::Output {
+ assert!(index != INVALID_MOVE_PATH_INDEX);
+ &self.map[index.0]
+ }
+}
+
+/// `MoveOut` represents a point in a program that moves out of some
+/// L-value; i.e., "creates" uninitialized memory.
+///
+/// With respect to dataflow analysis:
+/// - Generated by moves and declaration of uninitialized variables.
+/// - Killed by assignments to the memory.
+#[derive(Copy, Clone)]
+pub struct MoveOut {
+ /// path being moved
+ pub path: MovePathIndex,
+ /// location of move
+ pub source: Location,
+}
+
+impl fmt::Debug for MoveOut {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ write!(fmt, "p{}@{:?}", self.path.0, self.source)
+ }
+}
+
+#[derive(Copy, Clone)]
+pub struct Location {
+ /// block where action is located
+ pub block: BasicBlock,
+ /// index within above block; statement when < statments.len) or
+ /// the terminator (when = statements.len).
+ pub index: usize,
+}
+
+impl fmt::Debug for Location {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ write!(fmt, "{:?}[{}]", self.block, self.index)
+ }
+}
+
+pub struct MovePathData<'tcx> {
+ move_paths: Vec<MovePath<'tcx>>,
+}
+
+impl<'tcx> Index<MovePathIndex> for MovePathData<'tcx> {
+ type Output = MovePath<'tcx>;
+ fn index(&self, i: MovePathIndex) -> &MovePath<'tcx> {
+ &self.move_paths[i.idx().unwrap()]
+ }
+}
+
+/// MovePathRevIndex maps from a uint in an lvalue-category to the
+/// MovePathIndex for the MovePath for that lvalue.
+type MovePathRevIndex = Vec<MovePathIndex>;
+
+struct MovePathDataBuilder<'a, 'tcx: 'a> {
+ mir: &'a Mir<'tcx>,
+ pre_move_paths: RefCell<Vec<PreMovePath<'tcx>>>,
+ rev_lookup: RefCell<MovePathLookup<'tcx>>,
+}
+
+/// Tables mapping from an l-value to its MovePathIndex.
+pub struct MovePathLookup<'tcx> {
+ vars: MovePathRevIndex,
+ temps: MovePathRevIndex,
+ args: MovePathRevIndex,
+ statics: FnvHashMap<DefId, MovePathIndex>,
+ return_ptr: Option<MovePathIndex>,
+
+ /// This is the only non-trivial lookup to explain: projections
+ /// are made from a base-lvalue and a projection elem. The
+ /// base-lvalue will have a unique MovePathIndex; we use the
+ /// latter as the index into the outer vector (narrowing
+ /// subsequent search so that it is solely relative to that
+ /// base-lvalue). For the remaining lookup, we map the projection
+ /// elem to the associated MovePathIndex.
+ projections: Vec<FnvHashMap<AbstractElem<'tcx>, MovePathIndex>>,
+
+ /// Tracks the next index to allocate during construction of the
+ /// MovePathData. Unused after MovePathData is fully constructed.
+ next_index: MovePathIndex,
+}
+
+trait FillTo {
+ type T;
+ fn fill_to_with(&mut self, idx: usize, x: Self::T);
+ fn fill_to(&mut self, idx: usize) where Self::T: Default {
+ self.fill_to_with(idx, Default::default())
+ }
+}
+impl<T:Clone> FillTo for Vec<T> {
+ type T = T;
+ fn fill_to_with(&mut self, idx: usize, x: T) {
+ if idx >= self.len() {
+ let delta = idx + 1 - self.len();
+ assert_eq!(idx + 1, self.len() + delta);
+ self.extend(iter::repeat(x).take(delta))
+ }
+ debug_assert!(idx < self.len());
+ }
+}
+
+#[derive(Clone, Debug)]
+enum LookupKind { Generate, Reuse }
+struct Lookup<T>(LookupKind, T);
+
+impl Lookup<MovePathIndex> {
+ fn idx(&self) -> usize { (self.1).0 }
+}
+
+impl<'tcx> MovePathLookup<'tcx> {
+ fn new() -> Self {
+ MovePathLookup {
+ vars: vec![],
+ temps: vec![],
+ args: vec![],
+ statics: Default::default(),
+ return_ptr: None,
+ projections: vec![],
+ next_index: MovePathIndex(0),
+ }
+ }
+
+ fn next_index(next: &mut MovePathIndex) -> MovePathIndex {
+ let i = *next;
+ *next = MovePathIndex(i.0 + 1);
+ i
+ }
+
+ fn lookup_or_generate(vec: &mut Vec<MovePathIndex>,
+ idx: u32,
+ next_index: &mut MovePathIndex) -> Lookup<MovePathIndex> {
+ let idx = idx as usize;
+ vec.fill_to_with(idx, INVALID_MOVE_PATH_INDEX);
+ let entry = &mut vec[idx];
+ if *entry == INVALID_MOVE_PATH_INDEX {
+ let i = Self::next_index(next_index);
+ *entry = i;
+ Lookup(LookupKind::Generate, i)
+ } else {
+ Lookup(LookupKind::Reuse, *entry)
+ }
+ }
+
+ fn lookup_var(&mut self, var_idx: u32) -> Lookup<MovePathIndex> {
+ Self::lookup_or_generate(&mut self.vars,
+ var_idx,
+ &mut self.next_index)
+ }
+
+ fn lookup_temp(&mut self, temp_idx: u32) -> Lookup<MovePathIndex> {
+ Self::lookup_or_generate(&mut self.temps,
+ temp_idx,
+ &mut self.next_index)
+ }
+
+ fn lookup_arg(&mut self, arg_idx: u32) -> Lookup<MovePathIndex> {
+ Self::lookup_or_generate(&mut self.args,
+ arg_idx,
+ &mut self.next_index)
+ }
+
+ fn lookup_static(&mut self, static_id: DefId) -> Lookup<MovePathIndex> {
+ let &mut MovePathLookup { ref mut statics,
+ ref mut next_index, .. } = self;
+ match statics.entry(static_id.clone()) {
+ Entry::Occupied(ent) => {
+ Lookup(LookupKind::Reuse, *ent.get())
+ }
+ Entry::Vacant(ent) => {
+ let mpi = Self::next_index(next_index);
+ ent.insert(mpi);
+ Lookup(LookupKind::Generate, mpi)
+ }
+ }
+ }
+
+ fn lookup_return_pointer(&mut self) -> Lookup<MovePathIndex> {
+ match self.return_ptr {
+ Some(mpi) => {
+ Lookup(LookupKind::Reuse, mpi)
+ }
+ ref mut ret @ None => {
+ let mpi = Self::next_index(&mut self.next_index);
+ *ret = Some(mpi);
+ Lookup(LookupKind::Generate, mpi)
+ }
+ }
+ }
+
+ fn lookup_proj(&mut self,
+ proj: &repr::LvalueProjection<'tcx>,
+ base: MovePathIndex) -> Lookup<MovePathIndex> {
+ let MovePathLookup { ref mut projections,
+ ref mut next_index, .. } = *self;
+ projections.fill_to(base.0);
+ match projections[base.0].entry(proj.elem.lift()) {
+ Entry::Occupied(ent) => {
+ Lookup(LookupKind::Reuse, *ent.get())
+ }
+ Entry::Vacant(ent) => {
+ let mpi = Self::next_index(next_index);
+ ent.insert(mpi);
+ Lookup(LookupKind::Generate, mpi)
+ }
+ }
+ }
+}
+
+impl<'tcx> MovePathLookup<'tcx> {
+ // Unlike the builder `fn move_path_for` below, this lookup
+ // alternative will *not* create a MovePath on the fly for an
+ // unknown l-value; it will simply panic.
+ pub fn find(&self, lval: &Lvalue<'tcx>) -> MovePathIndex {
+ match *lval {
+ Lvalue::Var(var_idx) => self.vars[var_idx as usize],
+ Lvalue::Temp(temp_idx) => self.temps[temp_idx as usize],
+ Lvalue::Arg(arg_idx) => self.args[arg_idx as usize],
+ Lvalue::Static(ref def_id) => self.statics[def_id],
+ Lvalue::ReturnPointer => self.return_ptr.unwrap(),
+ Lvalue::Projection(ref proj) => {
+ let base_index = self.find(&proj.base);
+ self.projections[base_index.0 as usize][&proj.elem.lift()]
+ }
+ }
+ }
+}
+
+impl<'a, 'tcx> MovePathDataBuilder<'a, 'tcx> {
+ // (use of `&self` here is going to necessitate use of e.g. RefCell
+ // or some other &-safe data accumulator)
+ //
+ // Caller must ensure self's RefCells (i.e. `self.pre_move_paths`
+ // and `self.rev_lookup`) are not mutably borrowed.
+ fn move_path_for(&self, lval: &Lvalue<'tcx>) -> MovePathIndex {
+ let lookup = {
+ let mut rev_lookup = self.rev_lookup.borrow_mut();
+ match *lval {
+ Lvalue::Var(var_idx) => rev_lookup.lookup_var(var_idx),
+ Lvalue::Temp(temp_idx) => rev_lookup.lookup_temp(temp_idx),
+ Lvalue::Arg(arg_idx) => rev_lookup.lookup_arg(arg_idx),
+ Lvalue::Static(def_id) => rev_lookup.lookup_static(def_id),
+ Lvalue::ReturnPointer => rev_lookup.lookup_return_pointer(),
+ Lvalue::Projection(ref proj) => {
+ // Manually drop the rev_lookup ...
+ drop(rev_lookup);
+
+ // ... so that we can reborrow it here (which may
+ // well be building new move path) ...
+ let base_index = self.move_path_for(&proj.base);
+
+ // ... and restablish exclusive access here.
+ let mut rev_lookup = self.rev_lookup.borrow_mut();
+ rev_lookup.lookup_proj(proj, base_index)
+ }
+ }
+ };
+
+ let mut pre_move_paths = self.pre_move_paths.borrow_mut();
+
+ // At this point, `lookup` is either the previously assigned
+ // index or a newly-allocated one.
+ debug_assert!(lookup.idx() <= pre_move_paths.len());
+
+ if let Lookup(LookupKind::Generate, mpi) = lookup {
+ let parent;
+ let sibling;
+
+ match *lval {
+ Lvalue::Var(_) | Lvalue::Temp(_) | Lvalue::Arg(_) |
+ Lvalue::Static(_) | Lvalue::ReturnPointer => {
+ sibling = INVALID_MOVE_PATH_INDEX;
+ parent = INVALID_MOVE_PATH_INDEX;
+ }
+ Lvalue::Projection(ref proj) => {
+ // Here, install new MovePath as new first_child.
+
+ drop(pre_move_paths);
+
+ // Note: `parent` previously allocated (Projection
+ // case of match above established this).
+ parent = self.move_path_for(&proj.base);
+
+ pre_move_paths = self.pre_move_paths.borrow_mut();
+ let parent_move_path = &mut pre_move_paths[parent.0];
+
+ // At last: Swap in the new first_child.
+ sibling = parent_move_path.first_child.get();
+ parent_move_path.first_child.set(mpi);
+ }
+ };
+
+ let move_path = PreMovePath {
+ next_sibling: sibling,
+ parent: parent,
+ lvalue: lval.clone(),
+ first_child: Cell::new(INVALID_MOVE_PATH_INDEX),
+ };
+
+ pre_move_paths.push(move_path);
+ }
+
+ return lookup.1;
+ }
+}
+
+impl<'tcx> MoveData<'tcx> {
+ pub fn gather_moves(mir: &Mir<'tcx>, tcx: &ty::TyCtxt<'tcx>) -> Self {
+ gather_moves(mir, tcx)
+ }
+}
+
+#[derive(Debug)]
+enum StmtKind {
+ Use, Repeat, Cast, BinaryOp, UnaryOp, Box,
+ Aggregate, Drop, CallFn, CallArg, Return,
+}
+
+fn gather_moves<'tcx>(mir: &Mir<'tcx>, tcx: &ty::TyCtxt<'tcx>) -> MoveData<'tcx> {
+ use self::StmtKind as SK;
+
+ let bbs = mir.all_basic_blocks();
+ let mut moves = Vec::with_capacity(bbs.len());
+ let mut loc_map: Vec<_> = iter::repeat(Vec::new()).take(bbs.len()).collect();
+ let mut path_map = Vec::new();
+
+ let builder = MovePathDataBuilder {
+ mir: mir,
+ pre_move_paths: RefCell::new(Vec::new()),
+ rev_lookup: RefCell::new(MovePathLookup::new()),
+ };
+
+ for bb in bbs {
+ let loc_map_bb = &mut loc_map[bb.index()];
+ let bb_data = mir.basic_block_data(bb);
+
+ debug_assert!(loc_map_bb.len() == 0);
+ let len = bb_data.statements.len();
+ loc_map_bb.fill_to(len);
+ debug_assert!(loc_map_bb.len() == len + 1);
+
+ let mut bb_ctxt = BlockContext {
+ tcx: tcx,
+ moves: &mut moves,
+ builder: &builder,
+ path_map: &mut path_map,
+ loc_map_bb: loc_map_bb,
+ };
+
+ for (i, stmt) in bb_data.statements.iter().enumerate() {
+ let source = Location { block: bb, index: i };
+ match stmt.kind {
+ StatementKind::Assign(ref lval, ref rval) => {
+ // ensure MovePath created for `lval`.
+ builder.move_path_for(lval);
+
+ match *rval {
+ Rvalue::Use(ref operand) => {
+ bb_ctxt.on_operand(SK::Use, operand, source)
+ }
+ Rvalue::Repeat(ref operand, ref _const) =>
+ bb_ctxt.on_operand(SK::Repeat, operand, source),
+ Rvalue::Cast(ref _kind, ref operand, ref _ty) =>
+ bb_ctxt.on_operand(SK::Cast, operand, source),
+ Rvalue::BinaryOp(ref _binop, ref operand1, ref operand2) => {
+ bb_ctxt.on_operand(SK::BinaryOp, operand1, source);
+ bb_ctxt.on_operand(SK::BinaryOp, operand2, source);
+ }
+ Rvalue::UnaryOp(ref _unop, ref operand) => {
+ bb_ctxt.on_operand(SK::UnaryOp, operand, source);
+ }
+ Rvalue::Box(ref _ty) => {
+ // this is creating uninitialized
+ // memory that needs to be initialized.
+ bb_ctxt.on_move_out_lval(SK::Box, lval, source);
+ }
+ Rvalue::Aggregate(ref _kind, ref operands) => {
+ for operand in operands {
+ bb_ctxt.on_operand(SK::Aggregate, operand, source);
+ }
+ }
+ Rvalue::Ref(..) |
+ Rvalue::Len(..) |
+ Rvalue::InlineAsm { .. } => {}
+
+ Rvalue::Slice {..} => {
+ bb_ctxt.tcx.sess.bug("cannot move out of slice");
+ }
+ }
+ }
+ }
+ }
+
+ if let Some(ref term) = bb_data.terminator {
+ match *term {
+ Terminator::Goto { target: _ } | Terminator::Resume => { }
+
+ Terminator::Return => {
+ let source = Location { block: bb,
+ index: bb_data.statements.len() };
+ let lval = &Lvalue::ReturnPointer.deref();
+ bb_ctxt.on_move_out_lval(SK::Return, lval, source);
+ }
+
+ Terminator::If { ref cond, targets: _ } => {
+ // The `cond` is always of (copyable) type `bool`,
+ // so there will never be anything to move.
+ let _ = cond;
+ }
+
+ Terminator::SwitchInt { switch_ty: _, values: _, targets: _, ref discr } |
+ Terminator::Switch { adt_def: _, targets: _, ref discr } => {
+ // The `discr` is not consumed; that is instead
+ // encoded on specific match arms (and for
+ // SwitchInt`, it is always a copyable integer
+ // type anyway).
+ let _ = discr;
+ }
+
+ Terminator::Drop { value: ref lval, target: _, unwind: _ } => {
+ let source = Location { block: bb,
+ index: bb_data.statements.len() };
+ bb_ctxt.on_move_out_lval(SK::Drop, lval, source);
+ }
+
+ Terminator::Call { ref func, ref args, ref destination, cleanup: _ } => {
+ let source = Location { block: bb,
+ index: bb_data.statements.len() };
+ bb_ctxt.on_operand(SK::CallFn, func, source);
+ for arg in args {
+ bb_ctxt.on_operand(SK::CallArg, arg, source);
+ }
+ if let Some((ref destination, _bb)) = *destination {
+ // Create MovePath for `destination`, then
+ // discard returned index.
+ builder.move_path_for(destination);
+ }
+ }
+ }
+ }
+ }
+
+ // At this point, we may have created some MovePaths that do not
+ // have corresponding entries in the path map.
+ //
+ // (For example, creating the path `a.b.c` may, as a side-effect,
+ // create a path for the parent path `a.b`.)
+ //
+ // All such paths were not referenced ...
+ //
+ // well you know, lets actually try just asserting that the path map *is* complete.
+ assert_eq!(path_map.len(), builder.pre_move_paths.borrow().len());
+ path_map.fill_to(builder.pre_move_paths.borrow().len() - 1);
+
+ let pre_move_paths = builder.pre_move_paths.into_inner();
+ let move_paths: Vec<_> = pre_move_paths.into_iter()
+ .map(|p| p.into_move_path())
+ .collect();
+
+ debug!("{}", {
+ let mut seen: Vec<_> = move_paths.iter().map(|_| false).collect();
+ for (j, &MoveOut { ref path, ref source }) in moves.iter().enumerate() {
+ debug!("MovePathData moves[{}]: MoveOut {{ path: {:?} = {:?}, source: {:?} }}",
+ j, path, move_paths[path.0], source);
+ seen[path.0] = true;
+ }
+ for (j, path) in move_paths.iter().enumerate() {
+ if !seen[j] {
+ debug!("MovePathData move_paths[{}]: {:?}", j, path);
+ }
+ }
+ "done dumping MovePathData"
+ });
+
+ MoveData {
+ move_paths: MovePathData { move_paths: move_paths, },
+ moves: moves,
+ loc_map: LocMap { map: loc_map },
+ path_map: PathMap { map: path_map },
+ rev_lookup: builder.rev_lookup.into_inner(),
+ }
+}
+
+struct BlockContext<'b, 'a: 'b, 'tcx: 'a> {
+ tcx: &'b ty::TyCtxt<'tcx>,
+ moves: &'b mut Vec<MoveOut>,
+ builder: &'b MovePathDataBuilder<'a, 'tcx>,
+ path_map: &'b mut Vec<Vec<MoveOutIndex>>,
+ loc_map_bb: &'b mut Vec<Vec<MoveOutIndex>>,
+}
+
+impl<'b, 'a: 'b, 'tcx: 'a> BlockContext<'b, 'a, 'tcx> {
+ fn on_move_out_lval(&mut self,
+ stmt_kind: StmtKind,
+ lval: &repr::Lvalue<'tcx>,
+ source: Location) {
+ let builder = self.builder;
+ let tcx = self.tcx;
+ let lval_ty = builder.mir.lvalue_ty(tcx, lval);
+
+ // FIXME: does lvalue_ty ever return TyError, or is it
+ // guaranteed to always return non-Infer/non-Error values?
+
+ // This code is just trying to avoid creating a MoveOut
+ // entry for values that do not need move semantics.
+ //
+ // type_contents is imprecise (may claim needs drop for
+ // types that in fact have no destructor). But that is
+ // still usable for our purposes here.
+ let consumed = lval_ty.to_ty(tcx).type_contents(tcx).needs_drop(tcx);
+
+ if !consumed {
+ debug!("ctxt: {:?} no consume of lval: {:?} of type {:?}",
+ stmt_kind, lval, lval_ty);
+ return;
+ }
+ let i = source.index;
+ let index = MoveOutIndex(self.moves.len());
+
+ let path = builder.move_path_for(lval);
+ self.moves.push(MoveOut { path: path, source: source.clone() });
+ self.path_map.fill_to(path.0);
+
+ debug!("ctxt: {:?} add consume of lval: {:?} \
+ at index: {:?} \
+ to path_map for path: {:?} and \
+ to loc_map for loc: {:?}",
+ stmt_kind, lval, index, path, source);
+
+ debug_assert!(path.0 < self.path_map.len());
+ // this is actually a questionable assert; at the very
+ // least, incorrect input code can probably cause it to
+ // fire.
+ assert!(self.path_map[path.0].iter().find(|idx| **idx == index).is_none());
+ self.path_map[path.0].push(index);
+
+ debug_assert!(i < self.loc_map_bb.len());
+ debug_assert!(self.loc_map_bb[i].iter().find(|idx| **idx == index).is_none());
+ self.loc_map_bb[i].push(index);
+ }
+
+ fn on_operand(&mut self, stmt_kind: StmtKind, operand: &repr::Operand<'tcx>, source: Location) {
+ match *operand {
+ repr::Operand::Constant(..) => {} // not-a-move
+ repr::Operand::Consume(ref lval) => { // a move
+ self.on_move_out_lval(stmt_kind, lval, source);
+ }
+ }
+ }
+}
+
+impl<'tcx> BitDenotation for MoveData<'tcx>{
+ type Bit = MoveOut;
+ fn bits_per_block(&self) -> usize {
+ self.moves.len()
+ }
+ fn interpret(&self, idx: usize) -> &Self::Bit {
+ &self.moves[idx]
+ }
+}
--- /dev/null
+// Copyright 2012-2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Hook into libgraphviz for rendering dataflow graphs for MIR.
+
+use rustc::mir::repr::{BasicBlock, Mir};
+
+use dot;
+use dot::IntoCow;
+
+use std::fs::File;
+use std::io;
+use std::io::prelude::*;
+
+use super::MirBorrowckCtxt;
+use bitslice::bits_to_string;
+use super::gather_moves::MoveOut;
+
+struct Graph<'c, 'b:'c, 'a:'b, 'tcx:'a> { mbcx: &'c MirBorrowckCtxt<'b, 'a, 'tcx>,
+ context: &'b str }
+
+pub fn print_borrowck_graph_to(mbcx: &MirBorrowckCtxt,
+ context: &str,
+ path: &str) -> io::Result<()> {
+ let g = Graph { mbcx: mbcx, context: context };
+ let mut v = Vec::new();
+ try!(dot::render(&g, &mut v));
+ println!("print_borrowck_graph_to path: {} context: {} node_id: {}",
+ path, context, mbcx.node_id);
+ File::create(path).and_then(|mut f| f.write_all(&v))
+}
+
+pub type Node = BasicBlock;
+
+#[derive(Copy, Clone, PartialEq, Eq, Debug)]
+pub struct Edge { source: BasicBlock, index: usize }
+
+fn outgoing(mir: &Mir, bb: BasicBlock) -> Vec<Edge> {
+ let succ_len = mir.basic_block_data(bb).terminator().successors().len();
+ (0..succ_len).map(|index| Edge { source: bb, index: index}).collect()
+}
+
+impl<'c, 'b:'c, 'a:'b, 'tcx:'a> dot::Labeller<'c> for Graph<'c,'b,'a,'tcx> {
+ type Node = Node;
+ type Edge = Edge;
+ fn graph_id(&self) -> dot::Id {
+ dot::Id::new(format!("graph_for_node_{}_{}",
+ self.mbcx.node_id,
+ self.context))
+ .unwrap()
+ }
+
+ fn node_id(&self, n: &Node) -> dot::Id {
+ dot::Id::new(format!("bb_{}", n.index()))
+ .unwrap()
+ }
+
+ fn node_label(&self, n: &Node) -> dot::LabelText {
+ // A standard MIR label, as generated by write_node_label, is
+ // presented in a single column in a table.
+ //
+ // The code below does a bunch of formatting work to format a
+ // node (i.e. MIR basic-block) label with extra
+ // dataflow-enriched information. In particular, the goal is
+ // to add extra columns that present the three dataflow
+ // bitvectors, and the data those bitvectors represent.
+ //
+ // It presents it in the following format (where I am
+ // presenting the table rendering via ASCII art, one line per
+ // row of the table, and a chunk size of 3 rather than 5):
+ //
+ // ------ ----------------------- ------------ --------------------
+ // [e1, e3, e4]
+ // [e8, e9] "= ENTRY:" <ENTRY-BITS>
+ // ------ ----------------------- ------------ --------------------
+ // Left
+ // Most
+ // Column
+ // Is
+ // Just
+ // Normal
+ // Series
+ // Of
+ // MIR
+ // Stmts
+ // ------ ----------------------- ------------ --------------------
+ // [g1, g4, g5] "= GEN:" <GEN-BITS>
+ // ------ ----------------------- ------------ --------------------
+ // "KILL:" <KILL-BITS> "=" [k1, k3, k8]
+ // [k9]
+ // ------ ----------------------- ------------ --------------------
+ //
+ // (In addition, the added dataflow is rendered with a colored
+ // background just so it will stand out compared to the
+ // statements.)
+ let mut v = Vec::new();
+ let i = n.index();
+ let chunk_size = 5;
+ const BG_FLOWCONTENT: &'static str = r#"bgcolor="pink""#;
+ const ALIGN_RIGHT: &'static str = r#"align="right""#;
+ const FACE_MONOSPACE: &'static str = r#"FACE="Courier""#;
+ fn chunked_present_left<W:io::Write>(w: &mut W,
+ interpreted: &[&MoveOut],
+ chunk_size: usize)
+ -> io::Result<()>
+ {
+ // This function may emit a sequence of <tr>'s, but it
+ // always finishes with an (unfinished)
+ // <tr><td></td><td>
+ //
+ // Thus, after being called, one should finish both the
+ // pending <td> as well as the <tr> itself.
+ let mut seen_one = false;
+ for c in interpreted.chunks(chunk_size) {
+ if seen_one {
+ // if not the first row, finish off the previous row
+ try!(write!(w, "</td><td></td><td></td></tr>"));
+ }
+ try!(write!(w, "<tr><td></td><td {bg} {align}>{objs:?}",
+ bg = BG_FLOWCONTENT,
+ align = ALIGN_RIGHT,
+ objs = c));
+ seen_one = true;
+ }
+ if !seen_one {
+ try!(write!(w, "<tr><td></td><td {bg} {align}>[]",
+ bg = BG_FLOWCONTENT,
+ align = ALIGN_RIGHT));
+ }
+ Ok(())
+ }
+ ::rustc_mir::graphviz::write_node_label(
+ *n, self.mbcx.mir, &mut v, 4,
+ |w| {
+ let flow = &self.mbcx.flow_state;
+ let entry = flow.interpret_set(flow.sets.on_entry_set_for(i));
+ try!(chunked_present_left(w, &entry[..], chunk_size));
+ write!(w, "= ENTRY:</td><td {bg}><FONT {face}>{entrybits:?}</FONT></td>\
+ <td></td></tr>",
+ bg = BG_FLOWCONTENT,
+ face = FACE_MONOSPACE,
+ entrybits=bits_to_string(flow.sets.on_entry_set_for(i),
+ flow.sets.bytes_per_block()))
+ },
+ |w| {
+ let flow = &self.mbcx.flow_state;
+ let gen = flow.interpret_set( flow.sets.gen_set_for(i));
+ let kill = flow.interpret_set(flow.sets.kill_set_for(i));
+ try!(chunked_present_left(w, &gen[..], chunk_size));
+ try!(write!(w, " = GEN:</td><td {bg}><FONT {face}>{genbits:?}</FONT></td>\
+ <td></td></tr>",
+ bg = BG_FLOWCONTENT,
+ face = FACE_MONOSPACE,
+ genbits=bits_to_string( flow.sets.gen_set_for(i),
+ flow.sets.bytes_per_block())));
+ try!(write!(w, "<tr><td></td><td {bg} {align}>KILL:</td>\
+ <td {bg}><FONT {face}>{killbits:?}</FONT></td>",
+ bg = BG_FLOWCONTENT,
+ align = ALIGN_RIGHT,
+ face = FACE_MONOSPACE,
+ killbits=bits_to_string(flow.sets.kill_set_for(i),
+ flow.sets.bytes_per_block())));
+
+ // (chunked_present_right)
+ let mut seen_one = false;
+ for k in kill.chunks(chunk_size) {
+ if !seen_one {
+ // continuation of row; this is fourth <td>
+ try!(write!(w, "<td {bg}>= {kill:?}</td></tr>",
+ bg = BG_FLOWCONTENT,
+ kill=k));
+ } else {
+ // new row, with indent of three <td>'s
+ try!(write!(w, "<tr><td></td><td></td><td></td><td {bg}>{kill:?}</td></tr>",
+ bg = BG_FLOWCONTENT,
+ kill=k));
+ }
+ seen_one = true;
+ }
+ if !seen_one {
+ try!(write!(w, "<td {bg}>= []</td></tr>",
+ bg = BG_FLOWCONTENT));
+ }
+
+ Ok(())
+ })
+ .unwrap();
+ dot::LabelText::html(String::from_utf8(v).unwrap())
+ }
+
+ fn node_shape(&self, _n: &Node) -> Option<dot::LabelText> {
+ Some(dot::LabelText::label("none"))
+ }
+}
+
+impl<'c, 'b:'c, 'a:'b, 'tcx:'a> dot::GraphWalk<'c> for Graph<'c,'b,'a,'tcx> {
+ type Node = Node;
+ type Edge = Edge;
+ fn nodes(&self) -> dot::Nodes<Node> {
+ self.mbcx.mir.all_basic_blocks().into_cow()
+ }
+
+ fn edges(&self) -> dot::Edges<Edge> {
+ let mir = self.mbcx.mir;
+ let blocks = self.mbcx.mir.all_basic_blocks();
+ // base initial capacity on assumption every block has at
+ // least one outgoing edge (Which should be true for all
+ // blocks but one, the exit-block).
+ let mut edges = Vec::with_capacity(blocks.len());
+ for bb in blocks {
+ let outgoing = outgoing(mir, bb);
+ edges.extend(outgoing.into_iter());
+ }
+ edges.into_cow()
+ }
+
+ fn source(&self, edge: &Edge) -> Node {
+ edge.source
+ }
+
+ fn target(&self, edge: &Edge) -> Node {
+ let mir = self.mbcx.mir;
+ mir.basic_block_data(edge.source).terminator().successors()[edge.index]
+ }
+}
--- /dev/null
+// Copyright 2012-2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use borrowck::BorrowckCtxt;
+
+use syntax::ast;
+use syntax::codemap::Span;
+
+use rustc_front::hir;
+use rustc_front::intravisit::{FnKind};
+
+use rustc::mir::repr::{BasicBlock, BasicBlockData, Mir, Statement, Terminator};
+
+mod abs_domain;
+mod dataflow;
+mod gather_moves;
+mod graphviz;
+
+use self::dataflow::{Dataflow, DataflowState};
+use self::gather_moves::{MoveData};
+
+pub fn borrowck_mir<'b, 'a: 'b, 'tcx: 'a>(
+ bcx: &'b mut BorrowckCtxt<'a, 'tcx>,
+ fk: FnKind,
+ _decl: &hir::FnDecl,
+ mir: &'a Mir<'tcx>,
+ body: &hir::Block,
+ _sp: Span,
+ id: ast::NodeId,
+ attributes: &[ast::Attribute]) {
+ match fk {
+ FnKind::ItemFn(name, _, _, _, _, _, _) |
+ FnKind::Method(name, _, _, _) => {
+ debug!("borrowck_mir({}) UNIMPLEMENTED", name);
+ }
+ FnKind::Closure(_) => {
+ debug!("borrowck_mir closure (body.id={}) UNIMPLEMENTED", body.id);
+ }
+ }
+
+ let mut mbcx = MirBorrowckCtxt {
+ bcx: bcx,
+ mir: mir,
+ node_id: id,
+ attributes: attributes,
+ flow_state: DataflowState::new_move_analysis(mir, bcx.tcx),
+ };
+
+ for bb in mir.all_basic_blocks() {
+ mbcx.process_basic_block(bb);
+ }
+
+ mbcx.dataflow();
+
+ debug!("borrowck_mir done");
+}
+
+pub struct MirBorrowckCtxt<'b, 'a: 'b, 'tcx: 'a> {
+ bcx: &'b mut BorrowckCtxt<'a, 'tcx>,
+ mir: &'b Mir<'tcx>,
+ node_id: ast::NodeId,
+ attributes: &'b [ast::Attribute],
+ flow_state: DataflowState<MoveData<'tcx>>,
+}
+
+impl<'b, 'a: 'b, 'tcx: 'a> MirBorrowckCtxt<'b, 'a, 'tcx> {
+ fn process_basic_block(&mut self, bb: BasicBlock) {
+ let &BasicBlockData { ref statements, ref terminator, is_cleanup: _ } =
+ self.mir.basic_block_data(bb);
+ for stmt in statements {
+ self.process_statement(bb, stmt);
+ }
+
+ self.process_terminator(bb, terminator);
+ }
+
+ fn process_statement(&mut self, bb: BasicBlock, stmt: &Statement<'tcx>) {
+ debug!("MirBorrowckCtxt::process_statement({:?}, {:?}", bb, stmt);
+ }
+
+ fn process_terminator(&mut self, bb: BasicBlock, term: &Option<Terminator<'tcx>>) {
+ debug!("MirBorrowckCtxt::process_terminator({:?}, {:?})", bb, term);
+ }
+}
use std::mem;
use std::rc::Rc;
use syntax::ast;
+use syntax::attr::AttrMetaMethods;
use syntax::codemap::Span;
use syntax::errors::DiagnosticBuilder;
use rustc_front::intravisit::{Visitor, FnKind};
use rustc_front::util as hir_util;
+use rustc::mir::mir_map::MirMap;
+
pub mod check_loans;
pub mod gather_loans;
pub mod move_data;
+mod mir;
+
#[derive(Clone, Copy)]
pub struct LoanDataFlowOperator;
match fk {
FnKind::ItemFn(..) |
FnKind::Method(..) => {
- let new_free_region_map = self.tcx.free_region_map(id);
- let old_free_region_map =
- mem::replace(&mut self.free_region_map, new_free_region_map);
- borrowck_fn(self, fk, fd, b, s, id);
- self.free_region_map = old_free_region_map;
+ self.with_temp_region_map(id, |this| {
+ borrowck_fn(this, fk, fd, b, s, id, fk.attrs())
+ });
}
FnKind::Closure(..) => {
- borrowck_fn(self, fk, fd, b, s, id);
+ borrowck_fn(self, fk, fd, b, s, id, fk.attrs());
}
}
}
}
}
-pub fn check_crate(tcx: &TyCtxt) {
+pub fn check_crate<'tcx>(tcx: &TyCtxt<'tcx>, mir_map: &MirMap<'tcx>) {
let mut bccx = BorrowckCtxt {
tcx: tcx,
+ mir_map: Some(mir_map),
free_region_map: FreeRegionMap::new(),
stats: BorrowStats {
loaned_paths_same: 0,
decl: &hir::FnDecl,
body: &hir::Block,
sp: Span,
- id: ast::NodeId) {
+ id: ast::NodeId,
+ attributes: &[ast::Attribute]) {
debug!("borrowck_fn(id={})", id);
+
+ if attributes.iter().any(|item| item.check_name("rustc_mir_borrowck")) {
+ let mir = this.mir_map.unwrap().map.get(&id).unwrap();
+ this.with_temp_region_map(id, |this| {
+ mir::borrowck_mir(this, fk, decl, mir, body, sp, id, attributes)
+ });
+ }
+
let cfg = cfg::CFG::new(this.tcx, body);
let AnalysisData { all_loans,
loans: loan_dfcx,
/// the `BorrowckCtxt` itself , e.g. the flowgraph visualizer.
pub fn build_borrowck_dataflow_data_for_fn<'a, 'tcx>(
tcx: &'a TyCtxt<'tcx>,
+ mir_map: Option<&'a MirMap<'tcx>>,
fn_parts: FnParts<'a>,
cfg: &cfg::CFG)
-> (BorrowckCtxt<'a, 'tcx>, AnalysisData<'a, 'tcx>)
let mut bccx = BorrowckCtxt {
tcx: tcx,
+ mir_map: mir_map,
free_region_map: FreeRegionMap::new(),
stats: BorrowStats {
loaned_paths_same: 0,
free_region_map: FreeRegionMap,
// Statistics:
- stats: BorrowStats
+ stats: BorrowStats,
+
+ // NodeId to MIR mapping (for methods that carry the #[rustc_mir] attribute).
+ mir_map: Option<&'a MirMap<'tcx>>,
}
+#[derive(Clone)]
struct BorrowStats {
loaned_paths_same: usize,
loaned_paths_imm: usize,
// Misc
impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
+ fn with_temp_region_map<F>(&mut self, id: ast::NodeId, f: F)
+ where F: for <'b> FnOnce(&'b mut BorrowckCtxt<'a, 'tcx>)
+ {
+ let new_free_region_map = self.tcx.free_region_map(id);
+ let old_free_region_map = mem::replace(&mut self.free_region_map, new_free_region_map);
+ f(self);
+ self.free_region_map = old_free_region_map;
+ }
+
pub fn is_subregion_of(&self, r_sub: ty::Region, r_sup: ty::Region)
-> bool
{
#![feature(rustc_diagnostic_macros)]
#![feature(rustc_private)]
#![feature(staged_api)]
-
+#![feature(associated_consts)]
#[macro_use] extern crate log;
#[macro_use] extern crate syntax;
extern crate graphviz as dot;
extern crate rustc;
extern crate rustc_front;
+extern crate rustc_mir;
pub use borrowck::check_crate;
pub use borrowck::build_borrowck_dataflow_data_for_fn;
pub mod diagnostics;
mod borrowck;
+mod bitslice;
pub mod graphviz;
time(time_passes,
"borrow checking",
- || borrowck::check_crate(tcx));
+ || borrowck::check_crate(tcx, &mir_map));
// Avoid overwhelming user with errors if type checking failed.
// I'm not sure how helpful this is, to be honest, but it avoids
use rustc_front::lowering::{lower_crate, LoweringContext};
use rustc_front::print::pprust as pprust_hir;
+use rustc::mir::mir_map::MirMap;
+
#[derive(Copy, Clone, PartialEq, Debug)]
pub enum PpSourceMode {
PpmNormal,
&arenas,
&id,
resolve::MakeGlobMap::No,
- |tcx, _, _, _| {
+ |tcx, mir_map, _, _| {
print_flowgraph(variants,
tcx,
+ mir_map.as_ref(),
code,
mode,
out)
}
}
-fn print_flowgraph<W: Write>(variants: Vec<borrowck_dot::Variant>,
- tcx: &TyCtxt,
- code: blocks::Code,
- mode: PpFlowGraphMode,
- mut out: W)
- -> io::Result<()> {
+fn print_flowgraph<'tcx, W: Write>(variants: Vec<borrowck_dot::Variant>,
+ tcx: &TyCtxt<'tcx>,
+ mir_map: Option<&MirMap<'tcx>>,
+ code: blocks::Code,
+ mode: PpFlowGraphMode,
+ mut out: W)
+ -> io::Result<()> {
let cfg = match code {
blocks::BlockCode(block) => cfg::CFG::new(tcx, &block),
blocks::FnLikeCode(fn_like) => cfg::CFG::new(tcx, &fn_like.body()),
blocks::FnLikeCode(fn_like) => {
let (bccx, analysis_data) =
borrowck::build_borrowck_dataflow_data_for_fn(tcx,
+ mir_map,
fn_like.to_fn_parts(),
&cfg);
Ok(())
}
-/// Write a graphviz DOT node for the given basic block.
-fn write_node<W: Write>(block: BasicBlock, mir: &Mir, w: &mut W) -> io::Result<()> {
+/// Write a graphviz HTML-styled label for the given basic block, with
+/// all necessary escaping already performed. (This is suitable for
+/// emitting directly, as is done in this module, or for use with the
+/// LabelText::HtmlStr from libgraphviz.)
+///
+/// `init` and `fini` are callbacks for emitting additional rows of
+/// data (using HTML enclosed with `<tr>` in the emitted text).
+pub fn write_node_label<W: Write, INIT, FINI>(block: BasicBlock,
+ mir: &Mir,
+ w: &mut W,
+ num_cols: u32,
+ init: INIT,
+ fini: FINI) -> io::Result<()>
+ where INIT: Fn(&mut W) -> io::Result<()>,
+ FINI: Fn(&mut W) -> io::Result<()>
+{
let data = mir.basic_block_data(block);
- // Start a new node with the label to follow, in one of DOT's pseudo-HTML tables.
- try!(write!(w, r#" {} [shape="none", label=<"#, node(block)));
try!(write!(w, r#"<table border="0" cellborder="1" cellspacing="0">"#));
// Basic block number at the top.
- try!(write!(w, r#"<tr><td bgcolor="gray" align="center">{}</td></tr>"#, block.index()));
+ try!(write!(w, r#"<tr><td {attrs} colspan="{colspan}">{blk}</td></tr>"#,
+ attrs=r#"bgcolor="gray" align="center""#,
+ colspan=num_cols,
+ blk=block.index()));
+
+ try!(init(w));
// List of statements in the middle.
if !data.statements.is_empty() {
data.terminator().fmt_head(&mut terminator_head).unwrap();
try!(write!(w, r#"<tr><td align="left">{}</td></tr>"#, dot::escape_html(&terminator_head)));
- // Close the table, node label, and the node itself.
- writeln!(w, "</table>>];")
+ try!(fini(w));
+
+ // Close the table
+ writeln!(w, "</table>")
+}
+
+/// Write a graphviz DOT node for the given basic block.
+fn write_node<W: Write>(block: BasicBlock, mir: &Mir, w: &mut W) -> io::Result<()> {
+ // Start a new node with the label to follow, in one of DOT's pseudo-HTML tables.
+ try!(write!(w, r#" {} [shape="none", label=<"#, node(block)));
+ try!(write_node_label(block, mir, w, 1, |_| Ok(()), |_| Ok(())));
+ // Close the node label and the node itself.
+ writeln!(w, ">];")
}
/// Write graphviz DOT edges with labels between the given basic block and all of its successors.