name = "clippy-mini-macro-test"
version = "0.2.0"
-[[package]]
-name = "clippy_lints"
-version = "0.0.211"
-source = "git+https://github.com/rust-lang-nursery/rust-clippy?rev=6c70013f93a18c1ca7990efa8b1464acc6e18ce7#6c70013f93a18c1ca7990efa8b1464acc6e18ce7"
-dependencies = [
- "cargo_metadata 0.5.8 (registry+https://github.com/rust-lang/crates.io-index)",
- "if_chain 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "itertools 0.7.8 (registry+https://github.com/rust-lang/crates.io-index)",
- "lazy_static 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "matches 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
- "pulldown-cmark 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "quine-mc_cluskey 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)",
- "regex-syntax 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde 1.0.69 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde_derive 1.0.69 (registry+https://github.com/rust-lang/crates.io-index)",
- "toml 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
- "unicode-normalization 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
- "url 1.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
[[package]]
name = "clippy_lints"
version = "0.0.211"
dependencies = [
"cargo 0.30.0",
"cargo_metadata 0.5.8 (registry+https://github.com/rust-lang/crates.io-index)",
- "clippy_lints 0.0.211 (git+https://github.com/rust-lang-nursery/rust-clippy?rev=6c70013f93a18c1ca7990efa8b1464acc6e18ce7)",
+ "clippy_lints 0.0.211",
"env_logger 0.5.10 (registry+https://github.com/rust-lang/crates.io-index)",
"failure 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"itertools 0.7.8 (registry+https://github.com/rust-lang/crates.io-index)",
"checksum chalk-macros 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "295635afd6853aa9f20baeb7f0204862440c0fe994c5a253d5f479dac41d047e"
"checksum chrono 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "6962c635d530328acc53ac6a955e83093fedc91c5809dfac1fa60fa470830a37"
"checksum clap 2.32.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b957d88f4b6a63b9d70d5f454ac8011819c6efa7727858f458ab71c756ce2d3e"
-"checksum clippy_lints 0.0.211 (git+https://github.com/rust-lang-nursery/rust-clippy?rev=6c70013f93a18c1ca7990efa8b1464acc6e18ce7)" = "<none>"
"checksum cmake 0.1.31 (registry+https://github.com/rust-lang/crates.io-index)" = "95470235c31c726d72bf2e1f421adc1e65b9d561bf5529612cbe1a72da1467b3"
"checksum colored 1.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b0aa3473e85a3161b59845d6096b289bb577874cafeaf75ea1b1beaa6572c7fc"
"checksum commoncrypto 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d056a8586ba25a1e4d61cb090900e495952c7886786fc55f909ab2f819b69007"
[patch."https://github.com/rust-lang-nursery/rust-clippy"]
clippy = { path = "tools/clippy" }
+clippy_lints = { path = "tools/clippy/clippy_lints" }
return "{}-{}".format(cputype, ostype)
+@contextlib.contextmanager
+def output(filepath):
+ tmp = filepath + '.tmp'
+ with open(tmp, 'w') as f:
+ yield f
+ try:
+ os.remove(filepath) # PermissionError/OSError on Win32 if in use
+ os.rename(tmp, filepath)
+ except OSError:
+ shutil.copy2(tmp, filepath)
+ os.remove(tmp)
+
+
class RustBuild(object):
"""Provide all the methods required to build Rust"""
def __init__(self):
self._download_stage0_helper(filename, "rustc")
self.fix_executable("{}/bin/rustc".format(self.bin_root()))
self.fix_executable("{}/bin/rustdoc".format(self.bin_root()))
- with open(self.rustc_stamp(), 'w') as rust_stamp:
+ with output(self.rustc_stamp()) as rust_stamp:
rust_stamp.write(self.date)
# This is required so that we don't mix incompatible MinGW
filename = "cargo-{}-{}.tar.gz".format(cargo_channel, self.build)
self._download_stage0_helper(filename, "cargo")
self.fix_executable("{}/bin/cargo".format(self.bin_root()))
- with open(self.cargo_stamp(), 'w') as cargo_stamp:
+ with output(self.cargo_stamp()) as cargo_stamp:
cargo_stamp.write(self.date)
def _download_stage0_helper(self, filename, pattern):
if build.use_vendored_sources:
if not os.path.exists('.cargo'):
os.makedirs('.cargo')
- with open('.cargo/config', 'w') as cargo_config:
+ with output('.cargo/config') as cargo_config:
cargo_config.write("""
[source.crates-io]
replace-with = 'vendored-sources'
pub top_stage: u32,
pub kind: Kind,
cache: Cache,
- stack: RefCell<Vec<Box<Any>>>,
+ stack: RefCell<Vec<Box<dyn Any>>>,
time_spent_on_dependencies: Cell<Duration>,
pub paths: Vec<PathBuf>,
graph_nodes: RefCell<HashMap<String, NodeIndex>>,
pub struct Cache(
RefCell<HashMap<
TypeId,
- Box<Any>, // actually a HashMap<Step, Interned<Step::Output>>
+ Box<dyn Any>, // actually a HashMap<Step, Interned<Step::Output>>
>>
);
pub fn stream_cargo(
builder: &Builder,
cargo: &mut Command,
- cb: &mut FnMut(CargoMessage),
+ cb: &mut dyn FnMut(CargoMessage),
) -> bool {
if builder.config.dry_run {
return true;
# order that we read it in.
p("")
p("writing `config.toml` in current directory")
-with open('config.toml', 'w') as f:
+with bootstrap.output('config.toml') as f:
for section in section_order:
if section == 'target':
for target in targets:
for line in sections[section]:
f.write(line + "\n")
-with open('Makefile', 'w') as f:
+with bootstrap.output('Makefile') as f:
contents = os.path.join(rust_dir, 'src', 'bootstrap', 'mk', 'Makefile.in')
contents = open(contents).read()
contents = contents.replace("$(CFG_SRC_DIR)", rust_dir + '/')
//! More documentation can be found in each respective module below, and you can
//! also check out the `src/bootstrap/README.md` file for more information.
+#![deny(bare_trait_objects)]
#![deny(warnings)]
#![feature(core_intrinsics)]
#![feature(drain_filter)]
"llvm-objcopy", // used to transform ELFs into binary format which flashing tools consume
"llvm-objdump", // used to disassemble programs
"llvm-profdata", // used to inspect and merge files generated by profiles
- "llvm-size", // prints the size of the linker sections of a program
+ "llvm-size", // used to prints the size of the linker sections of a program
+ "llvm-strip", // used to discard symbols from binary files to reduce their size
];
/// A structure representing a Rust compiler.
/// Copies the `src` directory recursively to `dst`. Both are assumed to exist
/// when this function is called. Unwanted files or directories can be skipped
/// by returning `false` from the filter function.
- pub fn cp_filtered(&self, src: &Path, dst: &Path, filter: &Fn(&Path) -> bool) {
+ pub fn cp_filtered(&self, src: &Path, dst: &Path, filter: &dyn Fn(&Path) -> bool) {
// Immediately recurse with an empty relative path
self.recurse_(src, dst, Path::new(""), filter)
}
// Inner function does the actual work
- fn recurse_(&self, src: &Path, dst: &Path, relative: &Path, filter: &Fn(&Path) -> bool) {
+ fn recurse_(&self, src: &Path, dst: &Path, relative: &Path, filter: &dyn Fn(&Path) -> bool) {
for f in self.read_dir(src) {
let path = f.path();
let name = path.file_name().unwrap();
#[inline]
fn spec_next(&mut self) -> Option<Self::Item> {
self.first_take = false;
- if !(self.iter.start <= self.iter.end) {
+ self.iter.compute_is_empty();
+ if self.iter.is_empty.unwrap_or_default() {
return None;
}
// add 1 to self.step to get original step size back
// it was decremented for the general case on construction
if let Some(n) = self.iter.start.add_usize(self.step+1) {
+ self.iter.is_empty = Some(!(n <= self.iter.end));
let next = mem::replace(&mut self.iter.start, n);
Some(next)
} else {
- let last = self.iter.start.replace_one();
- self.iter.end.replace_zero();
+ let last = self.iter.start.clone();
+ self.iter.is_empty = Some(true);
Some(last)
}
}
use convert::TryFrom;
use mem;
-use ops::{self, Add, Sub, Try};
+use ops::{self, Add, Sub};
use usize;
use super::{FusedIterator, TrustedLen};
#[inline]
fn next(&mut self) -> Option<A> {
- if self.start <= self.end {
- if self.start < self.end {
- let n = self.start.add_one();
- Some(mem::replace(&mut self.start, n))
- } else {
- let last = self.start.replace_one();
- self.end.replace_zero();
- Some(last)
- }
- } else {
- None
+ self.compute_is_empty();
+ if self.is_empty.unwrap_or_default() {
+ return None;
}
+ let is_iterating = self.start < self.end;
+ self.is_empty = Some(!is_iterating);
+ Some(if is_iterating {
+ let n = self.start.add_one();
+ mem::replace(&mut self.start, n)
+ } else {
+ self.start.clone()
+ })
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
- if !(self.start <= self.end) {
+ if self.is_empty() {
return (0, Some(0));
}
#[inline]
fn nth(&mut self, n: usize) -> Option<A> {
+ self.compute_is_empty();
+ if self.is_empty.unwrap_or_default() {
+ return None;
+ }
+
if let Some(plus_n) = self.start.add_usize(n) {
use cmp::Ordering::*;
match plus_n.partial_cmp(&self.end) {
Some(Less) => {
+ self.is_empty = Some(false);
self.start = plus_n.add_one();
return Some(plus_n)
}
Some(Equal) => {
- self.start.replace_one();
- self.end.replace_zero();
+ self.is_empty = Some(true);
return Some(plus_n)
}
_ => {}
}
}
- self.start.replace_one();
- self.end.replace_zero();
+ self.is_empty = Some(true);
None
}
fn max(mut self) -> Option<A> {
self.next_back()
}
-
- #[inline]
- fn try_fold<B, F, R>(&mut self, init: B, mut f: F) -> R where
- Self: Sized, F: FnMut(B, Self::Item) -> R, R: Try<Ok=B>
- {
- let mut accum = init;
- if self.start <= self.end {
- loop {
- let (x, done) =
- if self.start < self.end {
- let n = self.start.add_one();
- (mem::replace(&mut self.start, n), false)
- } else {
- self.end.replace_zero();
- (self.start.replace_one(), true)
- };
- accum = f(accum, x)?;
- if done { break }
- }
- }
- Try::from_ok(accum)
- }
}
#[stable(feature = "inclusive_range", since = "1.26.0")]
impl<A: Step> DoubleEndedIterator for ops::RangeInclusive<A> {
#[inline]
fn next_back(&mut self) -> Option<A> {
- if self.start <= self.end {
- if self.start < self.end {
- let n = self.end.sub_one();
- Some(mem::replace(&mut self.end, n))
- } else {
- let last = self.end.replace_zero();
- self.start.replace_one();
- Some(last)
- }
- } else {
- None
+ self.compute_is_empty();
+ if self.is_empty.unwrap_or_default() {
+ return None;
}
- }
-
- #[inline]
- fn try_rfold<B, F, R>(&mut self, init: B, mut f: F) -> R where
- Self: Sized, F: FnMut(B, Self::Item) -> R, R: Try<Ok=B>
- {
- let mut accum = init;
- if self.start <= self.end {
- loop {
- let (x, done) =
- if self.start < self.end {
- let n = self.end.sub_one();
- (mem::replace(&mut self.end, n), false)
- } else {
- self.start.replace_one();
- (self.end.replace_zero(), true)
- };
- accum = f(accum, x)?;
- if done { break }
- }
- }
- Try::from_ok(accum)
+ let is_iterating = self.start < self.end;
+ self.is_empty = Some(!is_iterating);
+ Some(if is_iterating {
+ let n = self.end.sub_one();
+ mem::replace(&mut self.end, n)
+ } else {
+ self.end.clone()
+ })
}
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-/*!
-Almost direct (but slightly optimized) Rust translation of Figure 3 of \[1\].
-
-\[1\] Burger, R. G. and Dybvig, R. K. 1996. Printing floating-point numbers
- quickly and accurately. SIGPLAN Not. 31, 5 (May. 1996), 108-116.
-*/
+//! Almost direct (but slightly optimized) Rust translation of Figure 3 of "Printing
+//! Floating-Point Numbers Quickly and Accurately"[^1].
+//!
+//! [^1]: Burger, R. G. and Dybvig, R. K. 1996. Printing floating-point numbers
+//! quickly and accurately. SIGPLAN Not. 31, 5 (May. 1996), 108-116.
use cmp::Ordering;
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-/*!
-Rust adaptation of Grisu3 algorithm described in \[1\]. It uses about
-1KB of precomputed table, and in turn, it's very quick for most inputs.
-
-\[1\] Florian Loitsch. 2010. Printing floating-point numbers quickly and
- accurately with integers. SIGPLAN Not. 45, 6 (June 2010), 233-243.
-*/
+//! Rust adaptation of the Grisu3 algorithm described in "Printing Floating-Point Numbers Quickly
+//! and Accurately with Integers"[^1]. It uses about 1KB of precomputed table, and in turn, it's
+//! very quick for most inputs.
+//!
+//! [^1]: Florian Loitsch. 2010. Printing floating-point numbers quickly and
+//! accurately with integers. SIGPLAN Not. 45, 6 (June 2010), 233-243.
use num::diy_float::Fp;
use num::flt2dec::{Decoded, MAX_SIG_DIGITS, round_up};
// except according to those terms.
use fmt;
+use hash::{Hash, Hasher};
/// An unbounded range (`..`).
///
/// assert_eq!(arr[1..=2], [ 1,2 ]); // RangeInclusive
/// ```
#[doc(alias = "..=")]
-#[derive(Clone, PartialEq, Eq, Hash)] // not Copy -- see #27186
+#[derive(Clone)] // not Copy -- see #27186
#[stable(feature = "inclusive_range", since = "1.26.0")]
pub struct RangeInclusive<Idx> {
- // FIXME: The current representation follows RFC 1980,
- // but it is known that LLVM is not able to optimize loops following that RFC.
- // Consider adding an extra `bool` field to indicate emptiness of the range.
- // See #45222 for performance test cases.
pub(crate) start: Idx,
pub(crate) end: Idx,
+ pub(crate) is_empty: Option<bool>,
+ // This field is:
+ // - `None` when next() or next_back() was never called
+ // - `Some(false)` when `start <= end` assuming no overflow
+ // - `Some(true)` otherwise
+ // The field cannot be a simple `bool` because the `..=` constructor can
+ // accept non-PartialOrd types, also we want the constructor to be const.
+}
+
+trait RangeInclusiveEquality: Sized {
+ fn canonicalized_is_empty(range: &RangeInclusive<Self>) -> bool;
+}
+impl<T> RangeInclusiveEquality for T {
+ #[inline]
+ default fn canonicalized_is_empty(range: &RangeInclusive<Self>) -> bool {
+ range.is_empty.unwrap_or_default()
+ }
+}
+impl<T: PartialOrd> RangeInclusiveEquality for T {
+ #[inline]
+ fn canonicalized_is_empty(range: &RangeInclusive<Self>) -> bool {
+ range.is_empty()
+ }
+}
+
+#[stable(feature = "inclusive_range", since = "1.26.0")]
+impl<Idx: PartialEq> PartialEq for RangeInclusive<Idx> {
+ #[inline]
+ fn eq(&self, other: &Self) -> bool {
+ self.start == other.start && self.end == other.end
+ && RangeInclusiveEquality::canonicalized_is_empty(self)
+ == RangeInclusiveEquality::canonicalized_is_empty(other)
+ }
+}
+
+#[stable(feature = "inclusive_range", since = "1.26.0")]
+impl<Idx: Eq> Eq for RangeInclusive<Idx> {}
+
+#[stable(feature = "inclusive_range", since = "1.26.0")]
+impl<Idx: Hash> Hash for RangeInclusive<Idx> {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.start.hash(state);
+ self.end.hash(state);
+ RangeInclusiveEquality::canonicalized_is_empty(self).hash(state);
+ }
}
impl<Idx> RangeInclusive<Idx> {
#[stable(feature = "inclusive_range_methods", since = "1.27.0")]
#[inline]
pub const fn new(start: Idx, end: Idx) -> Self {
- Self { start, end }
+ Self { start, end, is_empty: None }
}
/// Returns the lower bound of the range (inclusive).
/// assert!(r.is_empty());
/// ```
#[unstable(feature = "range_is_empty", reason = "recently added", issue = "48111")]
+ #[inline]
pub fn is_empty(&self) -> bool {
- !(self.start <= self.end)
+ self.is_empty.unwrap_or_else(|| !(self.start <= self.end))
+ }
+
+ // If this range's `is_empty` is field is unknown (`None`), update it to be a concrete value.
+ #[inline]
+ pub(crate) fn compute_is_empty(&mut self) {
+ if self.is_empty.is_none() {
+ self.is_empty = Some(!(self.start <= self.end));
+ }
}
}
pub fn take(&mut self) -> Option<T> {
mem::replace(self, None)
}
+
+ /// Replaces the actual value in the option by the value given in parameter,
+ /// returning the old value if present,
+ /// leaving a [`Some`] in its place without deinitializing either one.
+ ///
+ /// [`Some`]: #variant.Some
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(option_replace)]
+ ///
+ /// let mut x = Some(2);
+ /// let old = x.replace(5);
+ /// assert_eq!(x, Some(5));
+ /// assert_eq!(old, Some(2));
+ ///
+ /// let mut x = None;
+ /// let old = x.replace(3);
+ /// assert_eq!(x, Some(3));
+ /// assert_eq!(old, None);
+ /// ```
+ #[inline]
+ #[unstable(feature = "option_replace", issue = "51998")]
+ pub fn replace(&mut self, value: T) -> Option<T> {
+ mem::replace(self, Some(value))
+ }
}
impl<'a, T: Clone> Option<&'a T> {
/// Returns an iterator over `chunk_size` elements of the slice at a
/// time. The chunks are slices and do not overlap. If `chunk_size` does
/// not divide the length of the slice, then the last up to `chunk_size-1`
- /// elements will be omitted.
+ /// elements will be omitted and can be retrieved from the `remainder`
+ /// function of the iterator.
///
/// Due to each chunk having exactly `chunk_size` elements, the compiler
/// can often optimize the resulting code better than in the case of
assert!(chunk_size != 0);
let rem = self.len() % chunk_size;
let len = self.len() - rem;
- ExactChunks { v: &self[..len], chunk_size: chunk_size}
+ let (fst, snd) = self.split_at(len);
+ ExactChunks { v: fst, rem: snd, chunk_size: chunk_size}
}
/// Returns an iterator over `chunk_size` elements of the slice at a time.
/// The chunks are mutable slices, and do not overlap. If `chunk_size` does
/// not divide the length of the slice, then the last up to `chunk_size-1`
- /// elements will be omitted.
- ///
+ /// elements will be omitted and can be retrieved from the `into_remainder`
+ /// function of the iterator.
///
/// Due to each chunk having exactly `chunk_size` elements, the compiler
/// can often optimize the resulting code better than in the case of
assert!(chunk_size != 0);
let rem = self.len() % chunk_size;
let len = self.len() - rem;
- ExactChunksMut { v: &mut self[..len], chunk_size: chunk_size}
+ let (fst, snd) = self.split_at_mut(len);
+ ExactChunksMut { v: fst, rem: snd, chunk_size: chunk_size}
}
/// Divides one slice into two at an index.
#[inline]
fn get(self, slice: &[T]) -> Option<&[T]> {
- if self.end == usize::max_value() { None }
- else { (self.start..self.end + 1).get(slice) }
+ if *self.end() == usize::max_value() { None }
+ else { (*self.start()..self.end() + 1).get(slice) }
}
#[inline]
fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
- if self.end == usize::max_value() { None }
- else { (self.start..self.end + 1).get_mut(slice) }
+ if *self.end() == usize::max_value() { None }
+ else { (*self.start()..self.end() + 1).get_mut(slice) }
}
#[inline]
unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
- (self.start..self.end + 1).get_unchecked(slice)
+ (*self.start()..self.end() + 1).get_unchecked(slice)
}
#[inline]
unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
- (self.start..self.end + 1).get_unchecked_mut(slice)
+ (*self.start()..self.end() + 1).get_unchecked_mut(slice)
}
#[inline]
fn index(self, slice: &[T]) -> &[T] {
- if self.end == usize::max_value() { slice_index_overflow_fail(); }
- (self.start..self.end + 1).index(slice)
+ if *self.end() == usize::max_value() { slice_index_overflow_fail(); }
+ (*self.start()..self.end() + 1).index(slice)
}
#[inline]
fn index_mut(self, slice: &mut [T]) -> &mut [T] {
- if self.end == usize::max_value() { slice_index_overflow_fail(); }
- (self.start..self.end + 1).index_mut(slice)
+ if *self.end() == usize::max_value() { slice_index_overflow_fail(); }
+ (*self.start()..self.end() + 1).index_mut(slice)
}
}
/// time).
///
/// When the slice len is not evenly divided by the chunk size, the last
-/// up to `chunk_size-1` elements will be omitted.
+/// up to `chunk_size-1` elements will be omitted but can be retrieved from
+/// the [`remainder`] function from the iterator.
///
/// This struct is created by the [`exact_chunks`] method on [slices].
///
/// [`exact_chunks`]: ../../std/primitive.slice.html#method.exact_chunks
+/// [`remainder`]: ../../std/slice/struct.ExactChunks.html#method.remainder
/// [slices]: ../../std/primitive.slice.html
#[derive(Debug)]
#[unstable(feature = "exact_chunks", issue = "47115")]
pub struct ExactChunks<'a, T:'a> {
v: &'a [T],
+ rem: &'a [T],
chunk_size: usize
}
+#[unstable(feature = "exact_chunks", issue = "47115")]
+impl<'a, T> ExactChunks<'a, T> {
+ /// Return the remainder of the original slice that is not going to be
+ /// returned by the iterator. The returned slice has at most `chunk_size-1`
+ /// elements.
+ pub fn remainder(&self) -> &'a [T] {
+ self.rem
+ }
+}
+
// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
#[unstable(feature = "exact_chunks", issue = "47115")]
impl<'a, T> Clone for ExactChunks<'a, T> {
fn clone(&self) -> ExactChunks<'a, T> {
ExactChunks {
v: self.v,
+ rem: self.rem,
chunk_size: self.chunk_size,
}
}
}
/// An iterator over a slice in (non-overlapping) mutable chunks (`chunk_size`
-/// elements at a time). When the slice len is not evenly divided by the chunk
-/// size, the last up to `chunk_size-1` elements will be omitted.
+/// elements at a time).
+///
+/// When the slice len is not evenly divided by the chunk size, the last up to
+/// `chunk_size-1` elements will be omitted but can be retrieved from the
+/// [`into_remainder`] function from the iterator.
///
/// This struct is created by the [`exact_chunks_mut`] method on [slices].
///
/// [`exact_chunks_mut`]: ../../std/primitive.slice.html#method.exact_chunks_mut
+/// [`into_remainder`]: ../../std/slice/struct.ExactChunksMut.html#method.into_remainder
/// [slices]: ../../std/primitive.slice.html
#[derive(Debug)]
#[unstable(feature = "exact_chunks", issue = "47115")]
pub struct ExactChunksMut<'a, T:'a> {
v: &'a mut [T],
+ rem: &'a mut [T],
chunk_size: usize
}
+#[unstable(feature = "exact_chunks", issue = "47115")]
+impl<'a, T> ExactChunksMut<'a, T> {
+ /// Return the remainder of the original slice that is not going to be
+ /// returned by the iterator. The returned slice has at most `chunk_size-1`
+ /// elements.
+ pub fn into_remainder(self) -> &'a mut [T] {
+ self.rem
+ }
+}
+
#[unstable(feature = "exact_chunks", issue = "47115")]
impl<'a, T> Iterator for ExactChunksMut<'a, T> {
type Item = &'a mut [T];
type Output = str;
#[inline]
fn get(self, slice: &str) -> Option<&Self::Output> {
- if self.end == usize::max_value() { None }
- else { (self.start..self.end+1).get(slice) }
+ if *self.end() == usize::max_value() { None }
+ else { (*self.start()..self.end()+1).get(slice) }
}
#[inline]
fn get_mut(self, slice: &mut str) -> Option<&mut Self::Output> {
- if self.end == usize::max_value() { None }
- else { (self.start..self.end+1).get_mut(slice) }
+ if *self.end() == usize::max_value() { None }
+ else { (*self.start()..self.end()+1).get_mut(slice) }
}
#[inline]
unsafe fn get_unchecked(self, slice: &str) -> &Self::Output {
- (self.start..self.end+1).get_unchecked(slice)
+ (*self.start()..self.end()+1).get_unchecked(slice)
}
#[inline]
unsafe fn get_unchecked_mut(self, slice: &mut str) -> &mut Self::Output {
- (self.start..self.end+1).get_unchecked_mut(slice)
+ (*self.start()..self.end()+1).get_unchecked_mut(slice)
}
#[inline]
fn index(self, slice: &str) -> &Self::Output {
- if self.end == usize::max_value() { str_index_overflow_fail(); }
- (self.start..self.end+1).index(slice)
+ if *self.end() == usize::max_value() { str_index_overflow_fail(); }
+ (*self.start()..self.end()+1).index(slice)
}
#[inline]
fn index_mut(self, slice: &mut str) -> &mut Self::Output {
- if self.end == usize::max_value() { str_index_overflow_fail(); }
- (self.start..self.end+1).index_mut(slice)
+ if *self.end() == usize::max_value() { str_index_overflow_fail(); }
+ (*self.start()..self.end()+1).index_mut(slice)
}
}
#![feature(reverse_bits)]
#![feature(iterator_find_map)]
#![feature(slice_internals)]
+#![feature(option_replace)]
extern crate core;
extern crate test;
}
assert_eq!(try_option_err(), Err(NoneError));
}
+
+#[test]
+fn test_replace() {
+ let mut x = Some(2);
+ let old = x.replace(5);
+
+ assert_eq!(x, Some(5));
+ assert_eq!(old, Some(2));
+
+ let mut x = None;
+ let old = x.replace(3);
+
+ assert_eq!(x, Some(3));
+ assert_eq!(old, None);
+}
assert_eq!(c2.last().unwrap(), &[2, 3]);
}
+#[test]
+fn test_exact_chunks_remainder() {
+ let v: &[i32] = &[0, 1, 2, 3, 4];
+ let c = v.exact_chunks(2);
+ assert_eq!(c.remainder(), &[4]);
+}
+
#[test]
fn test_exact_chunks_zip() {
let v1: &[i32] = &[0, 1, 2, 3, 4];
assert_eq!(c2.last().unwrap(), &[2, 3]);
}
+#[test]
+fn test_exact_chunks_mut_remainder() {
+ let v: &mut [i32] = &mut [0, 1, 2, 3, 4];
+ let c = v.exact_chunks_mut(2);
+ assert_eq!(c.into_remainder(), &[4]);
+}
+
#[test]
fn test_exact_chunks_mut_zip() {
let v1: &mut [i32] = &mut [0, 1, 2, 3, 4];
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use rustc_data_structures::graph;
use cfg::*;
use middle::region;
-use ty::{self, TyCtxt};
+use rustc_data_structures::graph::implementation as graph;
use syntax::ptr::P;
+use ty::{self, TyCtxt};
use hir::{self, PatKind};
use hir::def_id::DefId;
//! Module that constructs a control-flow graph representing an item.
//! Uses `Graph` as the underlying representation.
-use rustc_data_structures::graph;
+use rustc_data_structures::graph::implementation as graph;
use ty::TyCtxt;
use hir;
use hir::def_id::DefId;
// except according to those terms.
use rustc_data_structures::fx::FxHashMap;
-use rustc_data_structures::graph::{Direction, INCOMING, Graph, NodeIndex, OUTGOING};
+use rustc_data_structures::graph::implementation::{
+ Direction, INCOMING, Graph, NodeIndex, OUTGOING
+};
use super::DepNode;
use middle::free_region::RegionRelations;
use rustc_data_structures::indexed_vec::{Idx, IndexVec};
use rustc_data_structures::fx::FxHashSet;
-use rustc_data_structures::graph::{self, Direction, NodeIndex, OUTGOING};
+use rustc_data_structures::graph::implementation::{Graph, Direction, NodeIndex, INCOMING, OUTGOING};
use std::fmt;
use std::u32;
use ty::{self, TyCtxt};
origin: SubregionOrigin<'tcx>,
}
-type RegionGraph<'tcx> = graph::Graph<(), Constraint<'tcx>>;
+type RegionGraph<'tcx> = Graph<(), Constraint<'tcx>>;
struct LexicalResolver<'cx, 'gcx: 'tcx, 'tcx: 'cx> {
region_rels: &'cx RegionRelations<'cx, 'gcx, 'tcx>,
fn construct_graph(&self) -> RegionGraph<'tcx> {
let num_vars = self.num_vars();
- let mut graph = graph::Graph::new();
+ let mut graph = Graph::new();
for _ in 0..num_vars {
graph.add_node(());
// Errors in expanding nodes result from a lower-bound that is
// not contained by an upper-bound.
let (mut lower_bounds, lower_dup) =
- self.collect_concrete_regions(graph, node_idx, graph::INCOMING, dup_vec);
+ self.collect_concrete_regions(graph, node_idx, INCOMING, dup_vec);
let (mut upper_bounds, upper_dup) =
- self.collect_concrete_regions(graph, node_idx, graph::OUTGOING, dup_vec);
+ self.collect_concrete_regions(graph, node_idx, OUTGOING, dup_vec);
if lower_dup || upper_dup {
return;
"detects proc macro derives using inaccessible names from parent modules"
}
+declare_lint! {
+ pub MACRO_USE_EXTERN_CRATE,
+ Allow,
+ "the `#[macro_use]` attribute is now deprecated in favor of using macros \
+ via the module system"
+}
+
/// Does nothing as a lint pass, but registers some `Lint`s
/// which are used by other parts of the compiler.
#[derive(Copy, Clone)]
INTRA_DOC_LINK_RESOLUTION_FAILURE,
WHERE_CLAUSES_OBJECT_SAFETY,
PROC_MACRO_DERIVE_RESOLUTION_FALLBACK,
+ MACRO_USE_EXTERN_CRATE,
)
}
}
use std::usize;
use syntax::print::pprust::PrintState;
-use rustc_data_structures::graph::OUTGOING;
+use rustc_data_structures::graph::implementation::OUTGOING;
use util::nodemap::FxHashMap;
use hir;
use mir::visit::MirVisitable;
use rustc_apfloat::ieee::{Double, Single};
use rustc_apfloat::Float;
-use rustc_data_structures::control_flow_graph::dominators::{dominators, Dominators};
-use rustc_data_structures::control_flow_graph::ControlFlowGraph;
-use rustc_data_structures::control_flow_graph::{GraphPredecessors, GraphSuccessors};
+use rustc_data_structures::graph::dominators::{dominators, Dominators};
+use rustc_data_structures::graph::{self, GraphPredecessors, GraphSuccessors};
use rustc_data_structures::indexed_vec::{Idx, IndexVec};
use rustc_data_structures::small_vec::SmallVec;
use rustc_data_structures::sync::Lrc;
ty::tls::with(|tcx| tcx.item_path_str(def_id))
}
-impl<'tcx> ControlFlowGraph for Mir<'tcx> {
+impl<'tcx> graph::DirectedGraph for Mir<'tcx> {
type Node = BasicBlock;
+}
+impl<'tcx> graph::WithNumNodes for Mir<'tcx> {
fn num_nodes(&self) -> usize {
self.basic_blocks.len()
}
+}
+impl<'tcx> graph::WithStartNode for Mir<'tcx> {
fn start_node(&self) -> Self::Node {
START_BLOCK
}
+}
+impl<'tcx> graph::WithPredecessors for Mir<'tcx> {
fn predecessors<'graph>(
&'graph self,
node: Self::Node,
) -> <Self as GraphPredecessors<'graph>>::Iter {
self.predecessors_for(node).clone().into_iter()
}
+}
+
+impl<'tcx> graph::WithSuccessors for Mir<'tcx> {
fn successors<'graph>(
&'graph self,
node: Self::Node,
}
}
-impl<'a, 'b> GraphPredecessors<'b> for Mir<'a> {
+impl<'a, 'b> graph::GraphPredecessors<'b> for Mir<'a> {
type Item = BasicBlock;
type Iter = IntoIter<BasicBlock>;
}
-impl<'a, 'b> GraphSuccessors<'b> for Mir<'a> {
+impl<'a, 'b> graph::GraphSuccessors<'b> for Mir<'a> {
type Item = BasicBlock;
type Iter = iter::Cloned<Successors<'b>>;
}
//! which do not.
use rustc_data_structures::bitvec::BitVector;
-use rustc_data_structures::control_flow_graph::dominators::Dominators;
+use rustc_data_structures::graph::dominators::Dominators;
use rustc_data_structures::indexed_vec::{Idx, IndexVec};
use rustc::mir::{self, Location, TerminatorKind};
use rustc::mir::visit::{Visitor, PlaceContext};
+++ /dev/null
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Algorithm citation:
-//! A Simple, Fast Dominance Algorithm.
-//! Keith D. Cooper, Timothy J. Harvey, and Ken Kennedy
-//! Rice Computer Science TS-06-33870
-//! <https://www.cs.rice.edu/~keith/EMBED/dom.pdf>
-
-use super::ControlFlowGraph;
-use super::iterate::reverse_post_order;
-use super::super::indexed_vec::{IndexVec, Idx};
-
-use std::fmt;
-
-#[cfg(test)]
-mod test;
-
-pub fn dominators<G: ControlFlowGraph>(graph: &G) -> Dominators<G::Node> {
- let start_node = graph.start_node();
- let rpo = reverse_post_order(graph, start_node);
- dominators_given_rpo(graph, &rpo)
-}
-
-pub fn dominators_given_rpo<G: ControlFlowGraph>(graph: &G,
- rpo: &[G::Node])
- -> Dominators<G::Node> {
- let start_node = graph.start_node();
- assert_eq!(rpo[0], start_node);
-
- // compute the post order index (rank) for each node
- let mut post_order_rank: IndexVec<G::Node, usize> = IndexVec::from_elem_n(usize::default(),
- graph.num_nodes());
- for (index, node) in rpo.iter().rev().cloned().enumerate() {
- post_order_rank[node] = index;
- }
-
- let mut immediate_dominators: IndexVec<G::Node, Option<G::Node>> =
- IndexVec::from_elem_n(Option::default(), graph.num_nodes());
- immediate_dominators[start_node] = Some(start_node);
-
- let mut changed = true;
- while changed {
- changed = false;
-
- for &node in &rpo[1..] {
- let mut new_idom = None;
- for pred in graph.predecessors(node) {
- if immediate_dominators[pred].is_some() {
- // (*)
- // (*) dominators for `pred` have been calculated
- new_idom = intersect_opt(&post_order_rank,
- &immediate_dominators,
- new_idom,
- Some(pred));
- }
- }
-
- if new_idom != immediate_dominators[node] {
- immediate_dominators[node] = new_idom;
- changed = true;
- }
- }
- }
-
- Dominators {
- post_order_rank,
- immediate_dominators,
- }
-}
-
-fn intersect_opt<Node: Idx>(post_order_rank: &IndexVec<Node, usize>,
- immediate_dominators: &IndexVec<Node, Option<Node>>,
- node1: Option<Node>,
- node2: Option<Node>)
- -> Option<Node> {
- match (node1, node2) {
- (None, None) => None,
- (Some(n), None) | (None, Some(n)) => Some(n),
- (Some(n1), Some(n2)) => Some(intersect(post_order_rank, immediate_dominators, n1, n2)),
- }
-}
-
-fn intersect<Node: Idx>(post_order_rank: &IndexVec<Node, usize>,
- immediate_dominators: &IndexVec<Node, Option<Node>>,
- mut node1: Node,
- mut node2: Node)
- -> Node {
- while node1 != node2 {
- while post_order_rank[node1] < post_order_rank[node2] {
- node1 = immediate_dominators[node1].unwrap();
- }
-
- while post_order_rank[node2] < post_order_rank[node1] {
- node2 = immediate_dominators[node2].unwrap();
- }
- }
- return node1;
-}
-
-#[derive(Clone, Debug)]
-pub struct Dominators<N: Idx> {
- post_order_rank: IndexVec<N, usize>,
- immediate_dominators: IndexVec<N, Option<N>>,
-}
-
-impl<Node: Idx> Dominators<Node> {
- pub fn is_reachable(&self, node: Node) -> bool {
- self.immediate_dominators[node].is_some()
- }
-
- pub fn immediate_dominator(&self, node: Node) -> Node {
- assert!(self.is_reachable(node), "node {:?} is not reachable", node);
- self.immediate_dominators[node].unwrap()
- }
-
- pub fn dominators(&self, node: Node) -> Iter<Node> {
- assert!(self.is_reachable(node), "node {:?} is not reachable", node);
- Iter {
- dominators: self,
- node: Some(node),
- }
- }
-
- pub fn is_dominated_by(&self, node: Node, dom: Node) -> bool {
- // FIXME -- could be optimized by using post-order-rank
- self.dominators(node).any(|n| n == dom)
- }
-
- #[cfg(test)]
- fn all_immediate_dominators(&self) -> &IndexVec<Node, Option<Node>> {
- &self.immediate_dominators
- }
-}
-
-pub struct Iter<'dom, Node: Idx + 'dom> {
- dominators: &'dom Dominators<Node>,
- node: Option<Node>,
-}
-
-impl<'dom, Node: Idx> Iterator for Iter<'dom, Node> {
- type Item = Node;
-
- fn next(&mut self) -> Option<Self::Item> {
- if let Some(node) = self.node {
- let dom = self.dominators.immediate_dominator(node);
- if dom == node {
- self.node = None; // reached the root
- } else {
- self.node = Some(dom);
- }
- return Some(node);
- } else {
- return None;
- }
- }
-}
-
-pub struct DominatorTree<N: Idx> {
- root: N,
- children: IndexVec<N, Vec<N>>,
-}
-
-impl<Node: Idx> DominatorTree<Node> {
- pub fn children(&self, node: Node) -> &[Node] {
- &self.children[node]
- }
-}
-
-impl<Node: Idx> fmt::Debug for DominatorTree<Node> {
- fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
- fmt::Debug::fmt(&DominatorTreeNode {
- tree: self,
- node: self.root,
- },
- fmt)
- }
-}
-
-struct DominatorTreeNode<'tree, Node: Idx> {
- tree: &'tree DominatorTree<Node>,
- node: Node,
-}
-
-impl<'tree, Node: Idx> fmt::Debug for DominatorTreeNode<'tree, Node> {
- fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
- let subtrees: Vec<_> = self.tree
- .children(self.node)
- .iter()
- .map(|&child| {
- DominatorTreeNode {
- tree: self.tree,
- node: child,
- }
- })
- .collect();
- fmt.debug_tuple("")
- .field(&self.node)
- .field(&subtrees)
- .finish()
- }
-}
+++ /dev/null
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-use super::super::test::TestGraph;
-
-use super::*;
-
-#[test]
-fn diamond() {
- let graph = TestGraph::new(0, &[(0, 1), (0, 2), (1, 3), (2, 3)]);
-
- let dominators = dominators(&graph);
- let immediate_dominators = dominators.all_immediate_dominators();
- assert_eq!(immediate_dominators[0], Some(0));
- assert_eq!(immediate_dominators[1], Some(0));
- assert_eq!(immediate_dominators[2], Some(0));
- assert_eq!(immediate_dominators[3], Some(0));
-}
-
-#[test]
-fn paper() {
- // example from the paper:
- let graph = TestGraph::new(6,
- &[(6, 5), (6, 4), (5, 1), (4, 2), (4, 3), (1, 2), (2, 3), (3, 2),
- (2, 1)]);
-
- let dominators = dominators(&graph);
- let immediate_dominators = dominators.all_immediate_dominators();
- assert_eq!(immediate_dominators[0], None); // <-- note that 0 is not in graph
- assert_eq!(immediate_dominators[1], Some(6));
- assert_eq!(immediate_dominators[2], Some(6));
- assert_eq!(immediate_dominators[3], Some(6));
- assert_eq!(immediate_dominators[4], Some(6));
- assert_eq!(immediate_dominators[5], Some(6));
- assert_eq!(immediate_dominators[6], Some(6));
-}
+++ /dev/null
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-use super::ControlFlowGraph;
-use super::super::indexed_vec::IndexVec;
-
-#[cfg(test)]
-mod test;
-
-pub fn post_order_from<G: ControlFlowGraph>(graph: &G, start_node: G::Node) -> Vec<G::Node> {
- post_order_from_to(graph, start_node, None)
-}
-
-pub fn post_order_from_to<G: ControlFlowGraph>(graph: &G,
- start_node: G::Node,
- end_node: Option<G::Node>)
- -> Vec<G::Node> {
- let mut visited: IndexVec<G::Node, bool> = IndexVec::from_elem_n(false, graph.num_nodes());
- let mut result: Vec<G::Node> = Vec::with_capacity(graph.num_nodes());
- if let Some(end_node) = end_node {
- visited[end_node] = true;
- }
- post_order_walk(graph, start_node, &mut result, &mut visited);
- result
-}
-
-fn post_order_walk<G: ControlFlowGraph>(graph: &G,
- node: G::Node,
- result: &mut Vec<G::Node>,
- visited: &mut IndexVec<G::Node, bool>) {
- if visited[node] {
- return;
- }
- visited[node] = true;
-
- for successor in graph.successors(node) {
- post_order_walk(graph, successor, result, visited);
- }
-
- result.push(node);
-}
-
-pub fn reverse_post_order<G: ControlFlowGraph>(graph: &G, start_node: G::Node) -> Vec<G::Node> {
- let mut vec = post_order_from(graph, start_node);
- vec.reverse();
- vec
-}
+++ /dev/null
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-use super::super::test::TestGraph;
-
-use super::*;
-
-#[test]
-fn diamond_post_order() {
- let graph = TestGraph::new(0, &[(0, 1), (0, 2), (1, 3), (2, 3)]);
-
- let result = post_order_from(&graph, 0);
- assert_eq!(result, vec![3, 1, 2, 0]);
-}
+++ /dev/null
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-use super::indexed_vec::Idx;
-
-pub mod dominators;
-pub mod iterate;
-mod reference;
-
-#[cfg(test)]
-mod test;
-
-pub trait ControlFlowGraph
- where Self: for<'graph> GraphPredecessors<'graph, Item=<Self as ControlFlowGraph>::Node>,
- Self: for<'graph> GraphSuccessors<'graph, Item=<Self as ControlFlowGraph>::Node>
-{
- type Node: Idx;
-
- fn num_nodes(&self) -> usize;
- fn start_node(&self) -> Self::Node;
- fn predecessors<'graph>(&'graph self, node: Self::Node)
- -> <Self as GraphPredecessors<'graph>>::Iter;
- fn successors<'graph>(&'graph self, node: Self::Node)
- -> <Self as GraphSuccessors<'graph>>::Iter;
-}
-
-pub trait GraphPredecessors<'graph> {
- type Item;
- type Iter: Iterator<Item = Self::Item>;
-}
-
-pub trait GraphSuccessors<'graph> {
- type Item;
- type Iter: Iterator<Item = Self::Item>;
-}
+++ /dev/null
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-use super::*;
-
-impl<'graph, G: ControlFlowGraph> ControlFlowGraph for &'graph G {
- type Node = G::Node;
-
- fn num_nodes(&self) -> usize {
- (**self).num_nodes()
- }
-
- fn start_node(&self) -> Self::Node {
- (**self).start_node()
- }
-
- fn predecessors<'iter>(&'iter self,
- node: Self::Node)
- -> <Self as GraphPredecessors<'iter>>::Iter {
- (**self).predecessors(node)
- }
-
- fn successors<'iter>(&'iter self, node: Self::Node) -> <Self as GraphSuccessors<'iter>>::Iter {
- (**self).successors(node)
- }
-}
-
-impl<'iter, 'graph, G: ControlFlowGraph> GraphPredecessors<'iter> for &'graph G {
- type Item = G::Node;
- type Iter = <G as GraphPredecessors<'iter>>::Iter;
-}
-
-impl<'iter, 'graph, G: ControlFlowGraph> GraphSuccessors<'iter> for &'graph G {
- type Item = G::Node;
- type Iter = <G as GraphSuccessors<'iter>>::Iter;
-}
+++ /dev/null
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-use std::collections::HashMap;
-use std::cmp::max;
-use std::slice;
-use std::iter;
-
-use super::{ControlFlowGraph, GraphPredecessors, GraphSuccessors};
-
-pub struct TestGraph {
- num_nodes: usize,
- start_node: usize,
- successors: HashMap<usize, Vec<usize>>,
- predecessors: HashMap<usize, Vec<usize>>,
-}
-
-impl TestGraph {
- pub fn new(start_node: usize, edges: &[(usize, usize)]) -> Self {
- let mut graph = TestGraph {
- num_nodes: start_node + 1,
- start_node,
- successors: HashMap::new(),
- predecessors: HashMap::new(),
- };
- for &(source, target) in edges {
- graph.num_nodes = max(graph.num_nodes, source + 1);
- graph.num_nodes = max(graph.num_nodes, target + 1);
- graph.successors.entry(source).or_insert(vec![]).push(target);
- graph.predecessors.entry(target).or_insert(vec![]).push(source);
- }
- for node in 0..graph.num_nodes {
- graph.successors.entry(node).or_insert(vec![]);
- graph.predecessors.entry(node).or_insert(vec![]);
- }
- graph
- }
-}
-
-impl ControlFlowGraph for TestGraph {
- type Node = usize;
-
- fn start_node(&self) -> usize {
- self.start_node
- }
-
- fn num_nodes(&self) -> usize {
- self.num_nodes
- }
-
- fn predecessors<'graph>(&'graph self,
- node: usize)
- -> <Self as GraphPredecessors<'graph>>::Iter {
- self.predecessors[&node].iter().cloned()
- }
-
- fn successors<'graph>(&'graph self, node: usize) -> <Self as GraphSuccessors<'graph>>::Iter {
- self.successors[&node].iter().cloned()
- }
-}
-
-impl<'graph> GraphPredecessors<'graph> for TestGraph {
- type Item = usize;
- type Iter = iter::Cloned<slice::Iter<'graph, usize>>;
-}
-
-impl<'graph> GraphSuccessors<'graph> for TestGraph {
- type Item = usize;
- type Iter = iter::Cloned<slice::Iter<'graph, usize>>;
-}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Algorithm citation:
+//! A Simple, Fast Dominance Algorithm.
+//! Keith D. Cooper, Timothy J. Harvey, and Ken Kennedy
+//! Rice Computer Science TS-06-33870
+//! <https://www.cs.rice.edu/~keith/EMBED/dom.pdf>
+
+use super::super::indexed_vec::{Idx, IndexVec};
+use super::iterate::reverse_post_order;
+use super::ControlFlowGraph;
+
+use std::fmt;
+
+#[cfg(test)]
+mod test;
+
+pub fn dominators<G: ControlFlowGraph>(graph: &G) -> Dominators<G::Node> {
+ let start_node = graph.start_node();
+ let rpo = reverse_post_order(graph, start_node);
+ dominators_given_rpo(graph, &rpo)
+}
+
+pub fn dominators_given_rpo<G: ControlFlowGraph>(
+ graph: &G,
+ rpo: &[G::Node],
+) -> Dominators<G::Node> {
+ let start_node = graph.start_node();
+ assert_eq!(rpo[0], start_node);
+
+ // compute the post order index (rank) for each node
+ let mut post_order_rank: IndexVec<G::Node, usize> =
+ IndexVec::from_elem_n(usize::default(), graph.num_nodes());
+ for (index, node) in rpo.iter().rev().cloned().enumerate() {
+ post_order_rank[node] = index;
+ }
+
+ let mut immediate_dominators: IndexVec<G::Node, Option<G::Node>> =
+ IndexVec::from_elem_n(Option::default(), graph.num_nodes());
+ immediate_dominators[start_node] = Some(start_node);
+
+ let mut changed = true;
+ while changed {
+ changed = false;
+
+ for &node in &rpo[1..] {
+ let mut new_idom = None;
+ for pred in graph.predecessors(node) {
+ if immediate_dominators[pred].is_some() {
+ // (*)
+ // (*) dominators for `pred` have been calculated
+ new_idom = intersect_opt(
+ &post_order_rank,
+ &immediate_dominators,
+ new_idom,
+ Some(pred),
+ );
+ }
+ }
+
+ if new_idom != immediate_dominators[node] {
+ immediate_dominators[node] = new_idom;
+ changed = true;
+ }
+ }
+ }
+
+ Dominators {
+ post_order_rank,
+ immediate_dominators,
+ }
+}
+
+fn intersect_opt<Node: Idx>(
+ post_order_rank: &IndexVec<Node, usize>,
+ immediate_dominators: &IndexVec<Node, Option<Node>>,
+ node1: Option<Node>,
+ node2: Option<Node>,
+) -> Option<Node> {
+ match (node1, node2) {
+ (None, None) => None,
+ (Some(n), None) | (None, Some(n)) => Some(n),
+ (Some(n1), Some(n2)) => Some(intersect(post_order_rank, immediate_dominators, n1, n2)),
+ }
+}
+
+fn intersect<Node: Idx>(
+ post_order_rank: &IndexVec<Node, usize>,
+ immediate_dominators: &IndexVec<Node, Option<Node>>,
+ mut node1: Node,
+ mut node2: Node,
+) -> Node {
+ while node1 != node2 {
+ while post_order_rank[node1] < post_order_rank[node2] {
+ node1 = immediate_dominators[node1].unwrap();
+ }
+
+ while post_order_rank[node2] < post_order_rank[node1] {
+ node2 = immediate_dominators[node2].unwrap();
+ }
+ }
+ return node1;
+}
+
+#[derive(Clone, Debug)]
+pub struct Dominators<N: Idx> {
+ post_order_rank: IndexVec<N, usize>,
+ immediate_dominators: IndexVec<N, Option<N>>,
+}
+
+impl<Node: Idx> Dominators<Node> {
+ pub fn is_reachable(&self, node: Node) -> bool {
+ self.immediate_dominators[node].is_some()
+ }
+
+ pub fn immediate_dominator(&self, node: Node) -> Node {
+ assert!(self.is_reachable(node), "node {:?} is not reachable", node);
+ self.immediate_dominators[node].unwrap()
+ }
+
+ pub fn dominators(&self, node: Node) -> Iter<Node> {
+ assert!(self.is_reachable(node), "node {:?} is not reachable", node);
+ Iter {
+ dominators: self,
+ node: Some(node),
+ }
+ }
+
+ pub fn is_dominated_by(&self, node: Node, dom: Node) -> bool {
+ // FIXME -- could be optimized by using post-order-rank
+ self.dominators(node).any(|n| n == dom)
+ }
+
+ #[cfg(test)]
+ fn all_immediate_dominators(&self) -> &IndexVec<Node, Option<Node>> {
+ &self.immediate_dominators
+ }
+}
+
+pub struct Iter<'dom, Node: Idx + 'dom> {
+ dominators: &'dom Dominators<Node>,
+ node: Option<Node>,
+}
+
+impl<'dom, Node: Idx> Iterator for Iter<'dom, Node> {
+ type Item = Node;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ if let Some(node) = self.node {
+ let dom = self.dominators.immediate_dominator(node);
+ if dom == node {
+ self.node = None; // reached the root
+ } else {
+ self.node = Some(dom);
+ }
+ return Some(node);
+ } else {
+ return None;
+ }
+ }
+}
+
+pub struct DominatorTree<N: Idx> {
+ root: N,
+ children: IndexVec<N, Vec<N>>,
+}
+
+impl<Node: Idx> DominatorTree<Node> {
+ pub fn children(&self, node: Node) -> &[Node] {
+ &self.children[node]
+ }
+}
+
+impl<Node: Idx> fmt::Debug for DominatorTree<Node> {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ fmt::Debug::fmt(
+ &DominatorTreeNode {
+ tree: self,
+ node: self.root,
+ },
+ fmt,
+ )
+ }
+}
+
+struct DominatorTreeNode<'tree, Node: Idx> {
+ tree: &'tree DominatorTree<Node>,
+ node: Node,
+}
+
+impl<'tree, Node: Idx> fmt::Debug for DominatorTreeNode<'tree, Node> {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ let subtrees: Vec<_> = self.tree
+ .children(self.node)
+ .iter()
+ .map(|&child| DominatorTreeNode {
+ tree: self.tree,
+ node: child,
+ })
+ .collect();
+ fmt.debug_tuple("")
+ .field(&self.node)
+ .field(&subtrees)
+ .finish()
+ }
+}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use super::super::test::TestGraph;
+
+use super::*;
+
+#[test]
+fn diamond() {
+ let graph = TestGraph::new(0, &[(0, 1), (0, 2), (1, 3), (2, 3)]);
+
+ let dominators = dominators(&graph);
+ let immediate_dominators = dominators.all_immediate_dominators();
+ assert_eq!(immediate_dominators[0], Some(0));
+ assert_eq!(immediate_dominators[1], Some(0));
+ assert_eq!(immediate_dominators[2], Some(0));
+ assert_eq!(immediate_dominators[3], Some(0));
+}
+
+#[test]
+fn paper() {
+ // example from the paper:
+ let graph = TestGraph::new(6,
+ &[(6, 5), (6, 4), (5, 1), (4, 2), (4, 3), (1, 2), (2, 3), (3, 2),
+ (2, 1)]);
+
+ let dominators = dominators(&graph);
+ let immediate_dominators = dominators.all_immediate_dominators();
+ assert_eq!(immediate_dominators[0], None); // <-- note that 0 is not in graph
+ assert_eq!(immediate_dominators[1], Some(6));
+ assert_eq!(immediate_dominators[2], Some(6));
+ assert_eq!(immediate_dominators[3], Some(6));
+ assert_eq!(immediate_dominators[4], Some(6));
+ assert_eq!(immediate_dominators[5], Some(6));
+ assert_eq!(immediate_dominators[6], Some(6));
+}
--- /dev/null
+// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! A graph module for use in dataflow, region resolution, and elsewhere.
+//!
+//! # Interface details
+//!
+//! You customize the graph by specifying a "node data" type `N` and an
+//! "edge data" type `E`. You can then later gain access (mutable or
+//! immutable) to these "user-data" bits. Currently, you can only add
+//! nodes or edges to the graph. You cannot remove or modify them once
+//! added. This could be changed if we have a need.
+//!
+//! # Implementation details
+//!
+//! The main tricky thing about this code is the way that edges are
+//! stored. The edges are stored in a central array, but they are also
+//! threaded onto two linked lists for each node, one for incoming edges
+//! and one for outgoing edges. Note that every edge is a member of some
+//! incoming list and some outgoing list. Basically you can load the
+//! first index of the linked list from the node data structures (the
+//! field `first_edge`) and then, for each edge, load the next index from
+//! the field `next_edge`). Each of those fields is an array that should
+//! be indexed by the direction (see the type `Direction`).
+
+use bitvec::BitVector;
+use std::fmt::Debug;
+use std::usize;
+use snapshot_vec::{SnapshotVec, SnapshotVecDelegate};
+
+#[cfg(test)]
+mod tests;
+
+pub struct Graph<N, E> {
+ nodes: SnapshotVec<Node<N>>,
+ edges: SnapshotVec<Edge<E>>,
+}
+
+pub struct Node<N> {
+ first_edge: [EdgeIndex; 2], // see module comment
+ pub data: N,
+}
+
+#[derive(Debug)]
+pub struct Edge<E> {
+ next_edge: [EdgeIndex; 2], // see module comment
+ source: NodeIndex,
+ target: NodeIndex,
+ pub data: E,
+}
+
+impl<N> SnapshotVecDelegate for Node<N> {
+ type Value = Node<N>;
+ type Undo = ();
+
+ fn reverse(_: &mut Vec<Node<N>>, _: ()) {}
+}
+
+impl<N> SnapshotVecDelegate for Edge<N> {
+ type Value = Edge<N>;
+ type Undo = ();
+
+ fn reverse(_: &mut Vec<Edge<N>>, _: ()) {}
+}
+
+#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)]
+pub struct NodeIndex(pub usize);
+
+#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)]
+pub struct EdgeIndex(pub usize);
+
+pub const INVALID_EDGE_INDEX: EdgeIndex = EdgeIndex(usize::MAX);
+
+// Use a private field here to guarantee no more instances are created:
+#[derive(Copy, Clone, Debug, PartialEq)]
+pub struct Direction {
+ repr: usize,
+}
+
+pub const OUTGOING: Direction = Direction { repr: 0 };
+
+pub const INCOMING: Direction = Direction { repr: 1 };
+
+impl NodeIndex {
+ /// Returns unique id (unique with respect to the graph holding associated node).
+ pub fn node_id(&self) -> usize {
+ self.0
+ }
+}
+
+impl<N: Debug, E: Debug> Graph<N, E> {
+ pub fn new() -> Graph<N, E> {
+ Graph {
+ nodes: SnapshotVec::new(),
+ edges: SnapshotVec::new(),
+ }
+ }
+
+ pub fn with_capacity(nodes: usize, edges: usize) -> Graph<N, E> {
+ Graph {
+ nodes: SnapshotVec::with_capacity(nodes),
+ edges: SnapshotVec::with_capacity(edges),
+ }
+ }
+
+ // # Simple accessors
+
+ #[inline]
+ pub fn all_nodes(&self) -> &[Node<N>] {
+ &self.nodes
+ }
+
+ #[inline]
+ pub fn len_nodes(&self) -> usize {
+ self.nodes.len()
+ }
+
+ #[inline]
+ pub fn all_edges(&self) -> &[Edge<E>] {
+ &self.edges
+ }
+
+ #[inline]
+ pub fn len_edges(&self) -> usize {
+ self.edges.len()
+ }
+
+ // # Node construction
+
+ pub fn next_node_index(&self) -> NodeIndex {
+ NodeIndex(self.nodes.len())
+ }
+
+ pub fn add_node(&mut self, data: N) -> NodeIndex {
+ let idx = self.next_node_index();
+ self.nodes.push(Node {
+ first_edge: [INVALID_EDGE_INDEX, INVALID_EDGE_INDEX],
+ data,
+ });
+ idx
+ }
+
+ pub fn mut_node_data(&mut self, idx: NodeIndex) -> &mut N {
+ &mut self.nodes[idx.0].data
+ }
+
+ pub fn node_data(&self, idx: NodeIndex) -> &N {
+ &self.nodes[idx.0].data
+ }
+
+ pub fn node(&self, idx: NodeIndex) -> &Node<N> {
+ &self.nodes[idx.0]
+ }
+
+ // # Edge construction and queries
+
+ pub fn next_edge_index(&self) -> EdgeIndex {
+ EdgeIndex(self.edges.len())
+ }
+
+ pub fn add_edge(&mut self, source: NodeIndex, target: NodeIndex, data: E) -> EdgeIndex {
+ debug!("graph: add_edge({:?}, {:?}, {:?})", source, target, data);
+
+ let idx = self.next_edge_index();
+
+ // read current first of the list of edges from each node
+ let source_first = self.nodes[source.0].first_edge[OUTGOING.repr];
+ let target_first = self.nodes[target.0].first_edge[INCOMING.repr];
+
+ // create the new edge, with the previous firsts from each node
+ // as the next pointers
+ self.edges.push(Edge {
+ next_edge: [source_first, target_first],
+ source,
+ target,
+ data,
+ });
+
+ // adjust the firsts for each node target be the next object.
+ self.nodes[source.0].first_edge[OUTGOING.repr] = idx;
+ self.nodes[target.0].first_edge[INCOMING.repr] = idx;
+
+ return idx;
+ }
+
+ pub fn edge(&self, idx: EdgeIndex) -> &Edge<E> {
+ &self.edges[idx.0]
+ }
+
+ // # Iterating over nodes, edges
+
+ pub fn enumerated_nodes(&self) -> impl Iterator<Item = (NodeIndex, &Node<N>)> {
+ self.nodes
+ .iter()
+ .enumerate()
+ .map(|(idx, n)| (NodeIndex(idx), n))
+ }
+
+ pub fn enumerated_edges(&self) -> impl Iterator<Item = (EdgeIndex, &Edge<E>)> {
+ self.edges
+ .iter()
+ .enumerate()
+ .map(|(idx, e)| (EdgeIndex(idx), e))
+ }
+
+ pub fn each_node<'a>(&'a self, mut f: impl FnMut(NodeIndex, &'a Node<N>) -> bool) -> bool {
+ //! Iterates over all edges defined in the graph.
+ self.enumerated_nodes()
+ .all(|(node_idx, node)| f(node_idx, node))
+ }
+
+ pub fn each_edge<'a>(&'a self, mut f: impl FnMut(EdgeIndex, &'a Edge<E>) -> bool) -> bool {
+ //! Iterates over all edges defined in the graph
+ self.enumerated_edges()
+ .all(|(edge_idx, edge)| f(edge_idx, edge))
+ }
+
+ pub fn outgoing_edges(&self, source: NodeIndex) -> AdjacentEdges<N, E> {
+ self.adjacent_edges(source, OUTGOING)
+ }
+
+ pub fn incoming_edges(&self, source: NodeIndex) -> AdjacentEdges<N, E> {
+ self.adjacent_edges(source, INCOMING)
+ }
+
+ pub fn adjacent_edges(&self, source: NodeIndex, direction: Direction) -> AdjacentEdges<N, E> {
+ let first_edge = self.node(source).first_edge[direction.repr];
+ AdjacentEdges {
+ graph: self,
+ direction,
+ next: first_edge,
+ }
+ }
+
+ pub fn successor_nodes<'a>(
+ &'a self,
+ source: NodeIndex,
+ ) -> impl Iterator<Item = NodeIndex> + 'a {
+ self.outgoing_edges(source).targets()
+ }
+
+ pub fn predecessor_nodes<'a>(
+ &'a self,
+ target: NodeIndex,
+ ) -> impl Iterator<Item = NodeIndex> + 'a {
+ self.incoming_edges(target).sources()
+ }
+
+ pub fn depth_traverse<'a>(
+ &'a self,
+ start: NodeIndex,
+ direction: Direction,
+ ) -> DepthFirstTraversal<'a, N, E> {
+ DepthFirstTraversal::with_start_node(self, start, direction)
+ }
+
+ pub fn nodes_in_postorder<'a>(
+ &'a self,
+ direction: Direction,
+ entry_node: NodeIndex,
+ ) -> Vec<NodeIndex> {
+ let mut visited = BitVector::new(self.len_nodes());
+ let mut stack = vec![];
+ let mut result = Vec::with_capacity(self.len_nodes());
+ let mut push_node = |stack: &mut Vec<_>, node: NodeIndex| {
+ if visited.insert(node.0) {
+ stack.push((node, self.adjacent_edges(node, direction)));
+ }
+ };
+
+ for node in Some(entry_node)
+ .into_iter()
+ .chain(self.enumerated_nodes().map(|(node, _)| node))
+ {
+ push_node(&mut stack, node);
+ while let Some((node, mut iter)) = stack.pop() {
+ if let Some((_, child)) = iter.next() {
+ let target = child.source_or_target(direction);
+ // the current node needs more processing, so
+ // add it back to the stack
+ stack.push((node, iter));
+ // and then push the new node
+ push_node(&mut stack, target);
+ } else {
+ result.push(node);
+ }
+ }
+ }
+
+ assert_eq!(result.len(), self.len_nodes());
+ result
+ }
+}
+
+// # Iterators
+
+pub struct AdjacentEdges<'g, N, E>
+where
+ N: 'g,
+ E: 'g,
+{
+ graph: &'g Graph<N, E>,
+ direction: Direction,
+ next: EdgeIndex,
+}
+
+impl<'g, N: Debug, E: Debug> AdjacentEdges<'g, N, E> {
+ fn targets(self) -> impl Iterator<Item = NodeIndex> + 'g {
+ self.into_iter().map(|(_, edge)| edge.target)
+ }
+
+ fn sources(self) -> impl Iterator<Item = NodeIndex> + 'g {
+ self.into_iter().map(|(_, edge)| edge.source)
+ }
+}
+
+impl<'g, N: Debug, E: Debug> Iterator for AdjacentEdges<'g, N, E> {
+ type Item = (EdgeIndex, &'g Edge<E>);
+
+ fn next(&mut self) -> Option<(EdgeIndex, &'g Edge<E>)> {
+ let edge_index = self.next;
+ if edge_index == INVALID_EDGE_INDEX {
+ return None;
+ }
+
+ let edge = self.graph.edge(edge_index);
+ self.next = edge.next_edge[self.direction.repr];
+ Some((edge_index, edge))
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ // At most, all the edges in the graph.
+ (0, Some(self.graph.len_edges()))
+ }
+}
+
+pub struct DepthFirstTraversal<'g, N, E>
+where
+ N: 'g,
+ E: 'g,
+{
+ graph: &'g Graph<N, E>,
+ stack: Vec<NodeIndex>,
+ visited: BitVector,
+ direction: Direction,
+}
+
+impl<'g, N: Debug, E: Debug> DepthFirstTraversal<'g, N, E> {
+ pub fn with_start_node(
+ graph: &'g Graph<N, E>,
+ start_node: NodeIndex,
+ direction: Direction,
+ ) -> Self {
+ let mut visited = BitVector::new(graph.len_nodes());
+ visited.insert(start_node.node_id());
+ DepthFirstTraversal {
+ graph,
+ stack: vec![start_node],
+ visited,
+ direction,
+ }
+ }
+
+ fn visit(&mut self, node: NodeIndex) {
+ if self.visited.insert(node.node_id()) {
+ self.stack.push(node);
+ }
+ }
+}
+
+impl<'g, N: Debug, E: Debug> Iterator for DepthFirstTraversal<'g, N, E> {
+ type Item = NodeIndex;
+
+ fn next(&mut self) -> Option<NodeIndex> {
+ let next = self.stack.pop();
+ if let Some(idx) = next {
+ for (_, edge) in self.graph.adjacent_edges(idx, self.direction) {
+ let target = edge.source_or_target(self.direction);
+ self.visit(target);
+ }
+ }
+ next
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ // We will visit every node in the graph exactly once.
+ let remaining = self.graph.len_nodes() - self.visited.count();
+ (remaining, Some(remaining))
+ }
+}
+
+impl<'g, N: Debug, E: Debug> ExactSizeIterator for DepthFirstTraversal<'g, N, E> {}
+
+impl<E> Edge<E> {
+ pub fn source(&self) -> NodeIndex {
+ self.source
+ }
+
+ pub fn target(&self) -> NodeIndex {
+ self.target
+ }
+
+ pub fn source_or_target(&self, direction: Direction) -> NodeIndex {
+ if direction == OUTGOING {
+ self.target
+ } else {
+ self.source
+ }
+ }
+}
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use graph::implementation::*;
+use std::fmt::Debug;
+
+type TestGraph = Graph<&'static str, &'static str>;
+
+fn create_graph() -> TestGraph {
+ let mut graph = Graph::new();
+
+ // Create a simple graph
+ //
+ // F
+ // |
+ // V
+ // A --> B --> C
+ // | ^
+ // v |
+ // D --> E
+
+ let a = graph.add_node("A");
+ let b = graph.add_node("B");
+ let c = graph.add_node("C");
+ let d = graph.add_node("D");
+ let e = graph.add_node("E");
+ let f = graph.add_node("F");
+
+ graph.add_edge(a, b, "AB");
+ graph.add_edge(b, c, "BC");
+ graph.add_edge(b, d, "BD");
+ graph.add_edge(d, e, "DE");
+ graph.add_edge(e, c, "EC");
+ graph.add_edge(f, b, "FB");
+
+ return graph;
+}
+
+#[test]
+fn each_node() {
+ let graph = create_graph();
+ let expected = ["A", "B", "C", "D", "E", "F"];
+ graph.each_node(|idx, node| {
+ assert_eq!(&expected[idx.0], graph.node_data(idx));
+ assert_eq!(expected[idx.0], node.data);
+ true
+ });
+}
+
+#[test]
+fn each_edge() {
+ let graph = create_graph();
+ let expected = ["AB", "BC", "BD", "DE", "EC", "FB"];
+ graph.each_edge(|idx, edge| {
+ assert_eq!(expected[idx.0], edge.data);
+ true
+ });
+}
+
+fn test_adjacent_edges<N: PartialEq + Debug, E: PartialEq + Debug>(graph: &Graph<N, E>,
+ start_index: NodeIndex,
+ start_data: N,
+ expected_incoming: &[(E, N)],
+ expected_outgoing: &[(E, N)]) {
+ assert!(graph.node_data(start_index) == &start_data);
+
+ let mut counter = 0;
+ for (edge_index, edge) in graph.incoming_edges(start_index) {
+ assert!(counter < expected_incoming.len());
+ debug!("counter={:?} expected={:?} edge_index={:?} edge={:?}",
+ counter,
+ expected_incoming[counter],
+ edge_index,
+ edge);
+ match expected_incoming[counter] {
+ (ref e, ref n) => {
+ assert!(e == &edge.data);
+ assert!(n == graph.node_data(edge.source()));
+ assert!(start_index == edge.target);
+ }
+ }
+ counter += 1;
+ }
+ assert_eq!(counter, expected_incoming.len());
+
+ let mut counter = 0;
+ for (edge_index, edge) in graph.outgoing_edges(start_index) {
+ assert!(counter < expected_outgoing.len());
+ debug!("counter={:?} expected={:?} edge_index={:?} edge={:?}",
+ counter,
+ expected_outgoing[counter],
+ edge_index,
+ edge);
+ match expected_outgoing[counter] {
+ (ref e, ref n) => {
+ assert!(e == &edge.data);
+ assert!(start_index == edge.source);
+ assert!(n == graph.node_data(edge.target));
+ }
+ }
+ counter += 1;
+ }
+ assert_eq!(counter, expected_outgoing.len());
+}
+
+#[test]
+fn each_adjacent_from_a() {
+ let graph = create_graph();
+ test_adjacent_edges(&graph, NodeIndex(0), "A", &[], &[("AB", "B")]);
+}
+
+#[test]
+fn each_adjacent_from_b() {
+ let graph = create_graph();
+ test_adjacent_edges(&graph,
+ NodeIndex(1),
+ "B",
+ &[("FB", "F"), ("AB", "A")],
+ &[("BD", "D"), ("BC", "C")]);
+}
+
+#[test]
+fn each_adjacent_from_c() {
+ let graph = create_graph();
+ test_adjacent_edges(&graph, NodeIndex(2), "C", &[("EC", "E"), ("BC", "B")], &[]);
+}
+
+#[test]
+fn each_adjacent_from_d() {
+ let graph = create_graph();
+ test_adjacent_edges(&graph, NodeIndex(3), "D", &[("BD", "B")], &[("DE", "E")]);
+}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use super::super::indexed_vec::IndexVec;
+use super::{DirectedGraph, WithSuccessors, WithNumNodes};
+
+#[cfg(test)]
+mod test;
+
+pub fn post_order_from<G: DirectedGraph + WithSuccessors + WithNumNodes>(
+ graph: &G,
+ start_node: G::Node,
+) -> Vec<G::Node> {
+ post_order_from_to(graph, start_node, None)
+}
+
+pub fn post_order_from_to<G: DirectedGraph + WithSuccessors + WithNumNodes>(
+ graph: &G,
+ start_node: G::Node,
+ end_node: Option<G::Node>,
+) -> Vec<G::Node> {
+ let mut visited: IndexVec<G::Node, bool> = IndexVec::from_elem_n(false, graph.num_nodes());
+ let mut result: Vec<G::Node> = Vec::with_capacity(graph.num_nodes());
+ if let Some(end_node) = end_node {
+ visited[end_node] = true;
+ }
+ post_order_walk(graph, start_node, &mut result, &mut visited);
+ result
+}
+
+fn post_order_walk<G: DirectedGraph + WithSuccessors + WithNumNodes>(
+ graph: &G,
+ node: G::Node,
+ result: &mut Vec<G::Node>,
+ visited: &mut IndexVec<G::Node, bool>,
+) {
+ if visited[node] {
+ return;
+ }
+ visited[node] = true;
+
+ for successor in graph.successors(node) {
+ post_order_walk(graph, successor, result, visited);
+ }
+
+ result.push(node);
+}
+
+pub fn reverse_post_order<G: DirectedGraph + WithSuccessors + WithNumNodes>(
+ graph: &G,
+ start_node: G::Node,
+) -> Vec<G::Node> {
+ let mut vec = post_order_from(graph, start_node);
+ vec.reverse();
+ vec
+}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use super::super::test::TestGraph;
+
+use super::*;
+
+#[test]
+fn diamond_post_order() {
+ let graph = TestGraph::new(0, &[(0, 1), (0, 2), (1, 3), (2, 3)]);
+
+ let result = post_order_from(&graph, 0);
+ assert_eq!(result, vec![3, 1, 2, 0]);
+}
-// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-//! A graph module for use in dataflow, region resolution, and elsewhere.
-//!
-//! # Interface details
-//!
-//! You customize the graph by specifying a "node data" type `N` and an
-//! "edge data" type `E`. You can then later gain access (mutable or
-//! immutable) to these "user-data" bits. Currently, you can only add
-//! nodes or edges to the graph. You cannot remove or modify them once
-//! added. This could be changed if we have a need.
-//!
-//! # Implementation details
-//!
-//! The main tricky thing about this code is the way that edges are
-//! stored. The edges are stored in a central array, but they are also
-//! threaded onto two linked lists for each node, one for incoming edges
-//! and one for outgoing edges. Note that every edge is a member of some
-//! incoming list and some outgoing list. Basically you can load the
-//! first index of the linked list from the node data structures (the
-//! field `first_edge`) and then, for each edge, load the next index from
-//! the field `next_edge`). Each of those fields is an array that should
-//! be indexed by the direction (see the type `Direction`).
+use super::indexed_vec::Idx;
-use bitvec::BitVector;
-use std::fmt::Debug;
-use std::usize;
-use snapshot_vec::{SnapshotVec, SnapshotVecDelegate};
+pub mod dominators;
+pub mod implementation;
+pub mod iterate;
+mod reference;
+pub mod scc;
#[cfg(test)]
-mod tests;
+mod test;
-pub struct Graph<N, E> {
- nodes: SnapshotVec<Node<N>>,
- edges: SnapshotVec<Edge<E>>,
+pub trait DirectedGraph {
+ type Node: Idx;
}
-pub struct Node<N> {
- first_edge: [EdgeIndex; 2], // see module comment
- pub data: N,
+pub trait WithNumNodes: DirectedGraph {
+ fn num_nodes(&self) -> usize;
}
-#[derive(Debug)]
-pub struct Edge<E> {
- next_edge: [EdgeIndex; 2], // see module comment
- source: NodeIndex,
- target: NodeIndex,
- pub data: E,
-}
-
-impl<N> SnapshotVecDelegate for Node<N> {
- type Value = Node<N>;
- type Undo = ();
-
- fn reverse(_: &mut Vec<Node<N>>, _: ()) {}
-}
-
-impl<N> SnapshotVecDelegate for Edge<N> {
- type Value = Edge<N>;
- type Undo = ();
-
- fn reverse(_: &mut Vec<Edge<N>>, _: ()) {}
-}
-
-#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)]
-pub struct NodeIndex(pub usize);
-
-#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)]
-pub struct EdgeIndex(pub usize);
-
-pub const INVALID_EDGE_INDEX: EdgeIndex = EdgeIndex(usize::MAX);
-
-// Use a private field here to guarantee no more instances are created:
-#[derive(Copy, Clone, Debug, PartialEq)]
-pub struct Direction {
- repr: usize,
-}
-
-pub const OUTGOING: Direction = Direction { repr: 0 };
-
-pub const INCOMING: Direction = Direction { repr: 1 };
-
-impl NodeIndex {
- /// Returns unique id (unique with respect to the graph holding associated node).
- pub fn node_id(&self) -> usize {
- self.0
- }
-}
-
-impl<N: Debug, E: Debug> Graph<N, E> {
- pub fn new() -> Graph<N, E> {
- Graph {
- nodes: SnapshotVec::new(),
- edges: SnapshotVec::new(),
- }
- }
-
- pub fn with_capacity(nodes: usize, edges: usize) -> Graph<N, E> {
- Graph {
- nodes: SnapshotVec::with_capacity(nodes),
- edges: SnapshotVec::with_capacity(edges),
- }
- }
-
- // # Simple accessors
-
- #[inline]
- pub fn all_nodes(&self) -> &[Node<N>] {
- &self.nodes
- }
-
- #[inline]
- pub fn len_nodes(&self) -> usize {
- self.nodes.len()
- }
-
- #[inline]
- pub fn all_edges(&self) -> &[Edge<E>] {
- &self.edges
- }
-
- #[inline]
- pub fn len_edges(&self) -> usize {
- self.edges.len()
- }
-
- // # Node construction
-
- pub fn next_node_index(&self) -> NodeIndex {
- NodeIndex(self.nodes.len())
- }
-
- pub fn add_node(&mut self, data: N) -> NodeIndex {
- let idx = self.next_node_index();
- self.nodes.push(Node {
- first_edge: [INVALID_EDGE_INDEX, INVALID_EDGE_INDEX],
- data,
- });
- idx
- }
-
- pub fn mut_node_data(&mut self, idx: NodeIndex) -> &mut N {
- &mut self.nodes[idx.0].data
- }
-
- pub fn node_data(&self, idx: NodeIndex) -> &N {
- &self.nodes[idx.0].data
- }
-
- pub fn node(&self, idx: NodeIndex) -> &Node<N> {
- &self.nodes[idx.0]
- }
-
- // # Edge construction and queries
-
- pub fn next_edge_index(&self) -> EdgeIndex {
- EdgeIndex(self.edges.len())
- }
-
- pub fn add_edge(&mut self, source: NodeIndex, target: NodeIndex, data: E) -> EdgeIndex {
- debug!("graph: add_edge({:?}, {:?}, {:?})", source, target, data);
-
- let idx = self.next_edge_index();
-
- // read current first of the list of edges from each node
- let source_first = self.nodes[source.0].first_edge[OUTGOING.repr];
- let target_first = self.nodes[target.0].first_edge[INCOMING.repr];
-
- // create the new edge, with the previous firsts from each node
- // as the next pointers
- self.edges.push(Edge {
- next_edge: [source_first, target_first],
- source,
- target,
- data,
- });
-
- // adjust the firsts for each node target be the next object.
- self.nodes[source.0].first_edge[OUTGOING.repr] = idx;
- self.nodes[target.0].first_edge[INCOMING.repr] = idx;
-
- return idx;
- }
-
- pub fn edge(&self, idx: EdgeIndex) -> &Edge<E> {
- &self.edges[idx.0]
- }
-
- // # Iterating over nodes, edges
-
- pub fn enumerated_nodes(&self) -> impl Iterator<Item = (NodeIndex, &Node<N>)> {
- self.nodes
- .iter()
- .enumerate()
- .map(|(idx, n)| (NodeIndex(idx), n))
- }
-
- pub fn enumerated_edges(&self) -> impl Iterator<Item = (EdgeIndex, &Edge<E>)> {
- self.edges
- .iter()
- .enumerate()
- .map(|(idx, e)| (EdgeIndex(idx), e))
- }
-
- pub fn each_node<'a>(&'a self, mut f: impl FnMut(NodeIndex, &'a Node<N>) -> bool) -> bool {
- //! Iterates over all edges defined in the graph.
- self.enumerated_nodes()
- .all(|(node_idx, node)| f(node_idx, node))
- }
-
- pub fn each_edge<'a>(&'a self, mut f: impl FnMut(EdgeIndex, &'a Edge<E>) -> bool) -> bool {
- //! Iterates over all edges defined in the graph
- self.enumerated_edges()
- .all(|(edge_idx, edge)| f(edge_idx, edge))
- }
-
- pub fn outgoing_edges(&self, source: NodeIndex) -> AdjacentEdges<N, E> {
- self.adjacent_edges(source, OUTGOING)
- }
-
- pub fn incoming_edges(&self, source: NodeIndex) -> AdjacentEdges<N, E> {
- self.adjacent_edges(source, INCOMING)
- }
-
- pub fn adjacent_edges(&self, source: NodeIndex, direction: Direction) -> AdjacentEdges<N, E> {
- let first_edge = self.node(source).first_edge[direction.repr];
- AdjacentEdges {
- graph: self,
- direction,
- next: first_edge,
- }
- }
-
- pub fn successor_nodes<'a>(
- &'a self,
- source: NodeIndex,
- ) -> impl Iterator<Item = NodeIndex> + 'a {
- self.outgoing_edges(source).targets()
- }
-
- pub fn predecessor_nodes<'a>(
- &'a self,
- target: NodeIndex,
- ) -> impl Iterator<Item = NodeIndex> + 'a {
- self.incoming_edges(target).sources()
- }
-
- pub fn depth_traverse<'a>(
- &'a self,
- start: NodeIndex,
- direction: Direction,
- ) -> DepthFirstTraversal<'a, N, E> {
- DepthFirstTraversal::with_start_node(self, start, direction)
- }
-
- pub fn nodes_in_postorder<'a>(
- &'a self,
- direction: Direction,
- entry_node: NodeIndex,
- ) -> Vec<NodeIndex> {
- let mut visited = BitVector::new(self.len_nodes());
- let mut stack = vec![];
- let mut result = Vec::with_capacity(self.len_nodes());
- let mut push_node = |stack: &mut Vec<_>, node: NodeIndex| {
- if visited.insert(node.0) {
- stack.push((node, self.adjacent_edges(node, direction)));
- }
- };
-
- for node in Some(entry_node)
- .into_iter()
- .chain(self.enumerated_nodes().map(|(node, _)| node))
- {
- push_node(&mut stack, node);
- while let Some((node, mut iter)) = stack.pop() {
- if let Some((_, child)) = iter.next() {
- let target = child.source_or_target(direction);
- // the current node needs more processing, so
- // add it back to the stack
- stack.push((node, iter));
- // and then push the new node
- push_node(&mut stack, target);
- } else {
- result.push(node);
- }
- }
- }
-
- assert_eq!(result.len(), self.len_nodes());
- result
- }
-}
-
-// # Iterators
-
-pub struct AdjacentEdges<'g, N, E>
+pub trait WithSuccessors: DirectedGraph
where
- N: 'g,
- E: 'g,
+ Self: for<'graph> GraphSuccessors<'graph, Item = <Self as DirectedGraph>::Node>,
{
- graph: &'g Graph<N, E>,
- direction: Direction,
- next: EdgeIndex,
+ fn successors<'graph>(
+ &'graph self,
+ node: Self::Node,
+ ) -> <Self as GraphSuccessors<'graph>>::Iter;
}
-impl<'g, N: Debug, E: Debug> AdjacentEdges<'g, N, E> {
- fn targets(self) -> impl Iterator<Item = NodeIndex> + 'g {
- self.into_iter().map(|(_, edge)| edge.target)
- }
-
- fn sources(self) -> impl Iterator<Item = NodeIndex> + 'g {
- self.into_iter().map(|(_, edge)| edge.source)
- }
+pub trait GraphSuccessors<'graph> {
+ type Item;
+ type Iter: Iterator<Item = Self::Item>;
}
-impl<'g, N: Debug, E: Debug> Iterator for AdjacentEdges<'g, N, E> {
- type Item = (EdgeIndex, &'g Edge<E>);
-
- fn next(&mut self) -> Option<(EdgeIndex, &'g Edge<E>)> {
- let edge_index = self.next;
- if edge_index == INVALID_EDGE_INDEX {
- return None;
- }
-
- let edge = self.graph.edge(edge_index);
- self.next = edge.next_edge[self.direction.repr];
- Some((edge_index, edge))
- }
-
- fn size_hint(&self) -> (usize, Option<usize>) {
- // At most, all the edges in the graph.
- (0, Some(self.graph.len_edges()))
- }
-}
-
-pub struct DepthFirstTraversal<'g, N, E>
+pub trait WithPredecessors: DirectedGraph
where
- N: 'g,
- E: 'g,
+ Self: for<'graph> GraphPredecessors<'graph, Item = <Self as DirectedGraph>::Node>,
{
- graph: &'g Graph<N, E>,
- stack: Vec<NodeIndex>,
- visited: BitVector,
- direction: Direction,
+ fn predecessors<'graph>(
+ &'graph self,
+ node: Self::Node,
+ ) -> <Self as GraphPredecessors<'graph>>::Iter;
}
-impl<'g, N: Debug, E: Debug> DepthFirstTraversal<'g, N, E> {
- pub fn with_start_node(
- graph: &'g Graph<N, E>,
- start_node: NodeIndex,
- direction: Direction,
- ) -> Self {
- let mut visited = BitVector::new(graph.len_nodes());
- visited.insert(start_node.node_id());
- DepthFirstTraversal {
- graph,
- stack: vec![start_node],
- visited,
- direction,
- }
- }
-
- fn visit(&mut self, node: NodeIndex) {
- if self.visited.insert(node.node_id()) {
- self.stack.push(node);
- }
- }
+pub trait GraphPredecessors<'graph> {
+ type Item;
+ type Iter: Iterator<Item = Self::Item>;
}
-impl<'g, N: Debug, E: Debug> Iterator for DepthFirstTraversal<'g, N, E> {
- type Item = NodeIndex;
-
- fn next(&mut self) -> Option<NodeIndex> {
- let next = self.stack.pop();
- if let Some(idx) = next {
- for (_, edge) in self.graph.adjacent_edges(idx, self.direction) {
- let target = edge.source_or_target(self.direction);
- self.visit(target);
- }
- }
- next
- }
-
- fn size_hint(&self) -> (usize, Option<usize>) {
- // We will visit every node in the graph exactly once.
- let remaining = self.graph.len_nodes() - self.visited.count();
- (remaining, Some(remaining))
- }
+pub trait WithStartNode: DirectedGraph {
+ fn start_node(&self) -> Self::Node;
}
-impl<'g, N: Debug, E: Debug> ExactSizeIterator for DepthFirstTraversal<'g, N, E> {}
-
-impl<E> Edge<E> {
- pub fn source(&self) -> NodeIndex {
- self.source
- }
-
- pub fn target(&self) -> NodeIndex {
- self.target
- }
+pub trait ControlFlowGraph:
+ DirectedGraph + WithStartNode + WithPredecessors + WithStartNode + WithSuccessors + WithNumNodes
+{
+ // convenient trait
+}
- pub fn source_or_target(&self, direction: Direction) -> NodeIndex {
- if direction == OUTGOING {
- self.target
- } else {
- self.source
- }
- }
+impl<T> ControlFlowGraph for T
+where
+ T: DirectedGraph
+ + WithStartNode
+ + WithPredecessors
+ + WithStartNode
+ + WithSuccessors
+ + WithNumNodes,
+{
}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use super::*;
+
+impl<'graph, G: DirectedGraph> DirectedGraph for &'graph G {
+ type Node = G::Node;
+}
+
+impl<'graph, G: WithNumNodes> WithNumNodes for &'graph G {
+ fn num_nodes(&self) -> usize {
+ (**self).num_nodes()
+ }
+}
+
+impl<'graph, G: WithStartNode> WithStartNode for &'graph G {
+ fn start_node(&self) -> Self::Node {
+ (**self).start_node()
+ }
+}
+
+impl<'graph, G: WithSuccessors> WithSuccessors for &'graph G {
+ fn successors<'iter>(&'iter self, node: Self::Node) -> <Self as GraphSuccessors<'iter>>::Iter {
+ (**self).successors(node)
+ }
+}
+
+impl<'graph, G: WithPredecessors> WithPredecessors for &'graph G {
+ fn predecessors<'iter>(&'iter self,
+ node: Self::Node)
+ -> <Self as GraphPredecessors<'iter>>::Iter {
+ (**self).predecessors(node)
+ }
+}
+
+impl<'iter, 'graph, G: WithPredecessors> GraphPredecessors<'iter> for &'graph G {
+ type Item = G::Node;
+ type Iter = <G as GraphPredecessors<'iter>>::Iter;
+}
+
+impl<'iter, 'graph, G: WithSuccessors> GraphSuccessors<'iter> for &'graph G {
+ type Item = G::Node;
+ type Iter = <G as GraphSuccessors<'iter>>::Iter;
+}
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Routine to compute the strongly connected components (SCCs) of a
+//! graph, as well as the resulting DAG if each SCC is replaced with a
+//! node in the graph. This uses Tarjan's algorithm that completes in
+//! O(n) time.
+
+use fx::FxHashSet;
+use graph::{DirectedGraph, WithNumNodes, WithSuccessors};
+use indexed_vec::{Idx, IndexVec};
+use std::ops::Range;
+
+mod test;
+
+/// Strongly connected components (SCC) of a graph. The type `N` is
+/// the index type for the graph nodes and `S` is the index type for
+/// the SCCs. We can map from each node to the SCC that it
+/// participates in, and we also have the successors of each SCC.
+pub struct Sccs<N: Idx, S: Idx> {
+ /// For each node, what is the SCC index of the SCC to which it
+ /// belongs.
+ scc_indices: IndexVec<N, S>,
+
+ /// Data about each SCC.
+ scc_data: SccData<S>,
+}
+
+struct SccData<S: Idx> {
+ /// For each SCC, the range of `all_successors` where its
+ /// successors can be found.
+ ranges: IndexVec<S, Range<usize>>,
+
+ /// Contains the succcessors for all the Sccs, concatenated. The
+ /// range of indices corresponding to a given SCC is found in its
+ /// SccData.
+ all_successors: Vec<S>,
+}
+
+impl<N: Idx, S: Idx> Sccs<N, S> {
+ pub fn new(graph: &(impl DirectedGraph<Node = N> + WithNumNodes + WithSuccessors)) -> Self {
+ SccsConstruction::construct(graph)
+ }
+
+ /// Returns the number of SCCs in the graph.
+ pub fn num_sccs(&self) -> usize {
+ self.scc_data.len()
+ }
+
+ /// Returns an iterator over the SCCs in the graph.
+ pub fn all_sccs(&self) -> impl Iterator<Item = S> {
+ (0 .. self.scc_data.len()).map(S::new)
+ }
+
+ /// Returns the SCC to which a node `r` belongs.
+ pub fn scc(&self, r: N) -> S {
+ self.scc_indices[r]
+ }
+
+ /// Returns the successors of the given SCC.
+ pub fn successors(&self, scc: S) -> &[S] {
+ self.scc_data.successors(scc)
+ }
+}
+
+impl<S: Idx> SccData<S> {
+ /// Number of SCCs,
+ fn len(&self) -> usize {
+ self.ranges.len()
+ }
+
+ /// Returns the successors of the given SCC.
+ fn successors(&self, scc: S) -> &[S] {
+ // Annoyingly, `range` does not implement `Copy`, so we have
+ // to do `range.start..range.end`:
+ let range = &self.ranges[scc];
+ &self.all_successors[range.start..range.end]
+ }
+
+ /// Creates a new SCC with `successors` as its successors and
+ /// returns the resulting index.
+ fn create_scc(&mut self, successors: impl IntoIterator<Item = S>) -> S {
+ // Store the successors on `scc_successors_vec`, remembering
+ // the range of indices.
+ let all_successors_start = self.all_successors.len();
+ self.all_successors.extend(successors);
+ let all_successors_end = self.all_successors.len();
+
+ debug!(
+ "create_scc({:?}) successors={:?}",
+ self.ranges.len(),
+ &self.all_successors[all_successors_start..all_successors_end],
+ );
+
+ self.ranges.push(all_successors_start..all_successors_end)
+ }
+}
+
+struct SccsConstruction<'c, G: DirectedGraph + WithNumNodes + WithSuccessors + 'c, S: Idx> {
+ graph: &'c G,
+
+ /// The state of each node; used during walk to record the stack
+ /// and after walk to record what cycle each node ended up being
+ /// in.
+ node_states: IndexVec<G::Node, NodeState<G::Node, S>>,
+
+ /// The stack of nodes that we are visiting as part of the DFS.
+ node_stack: Vec<G::Node>,
+
+ /// The stack of successors: as we visit a node, we mark our
+ /// position in this stack, and when we encounter a successor SCC,
+ /// we push it on the stack. When we complete an SCC, we can pop
+ /// everything off the stack that was found along the way.
+ successors_stack: Vec<S>,
+
+ /// A set used to strip duplicates. As we accumulate successors
+ /// into the successors_stack, we sometimes get duplicate entries.
+ /// We use this set to remove those -- we also keep its storage
+ /// around between successors to amortize memory allocation costs.
+ duplicate_set: FxHashSet<S>,
+
+ scc_data: SccData<S>,
+}
+
+#[derive(Copy, Clone, Debug)]
+enum NodeState<N, S> {
+ /// This node has not yet been visited as part of the DFS.
+ ///
+ /// After SCC construction is complete, this state ought to be
+ /// impossible.
+ NotVisited,
+
+ /// This node is currently being walk as part of our DFS. It is on
+ /// the stack at the depth `depth`.
+ ///
+ /// After SCC construction is complete, this state ought to be
+ /// impossible.
+ BeingVisited { depth: usize },
+
+ /// Indicates that this node is a member of the given cycle.
+ InCycle { scc_index: S },
+
+ /// Indicates that this node is a member of whatever cycle
+ /// `parent` is a member of. This state is transient: whenever we
+ /// see it, we try to overwrite it with the current state of
+ /// `parent` (this is the "path compression" step of a union-find
+ /// algorithm).
+ InCycleWith { parent: N },
+}
+
+#[derive(Copy, Clone, Debug)]
+enum WalkReturn<S> {
+ Cycle { min_depth: usize },
+ Complete { scc_index: S },
+}
+
+impl<'c, G, S> SccsConstruction<'c, G, S>
+where
+ G: DirectedGraph + WithNumNodes + WithSuccessors,
+ S: Idx,
+{
+ /// Identifies SCCs in the graph `G` and computes the resulting
+ /// DAG. This uses a variant of [Tarjan's
+ /// algorithm][wikipedia]. The high-level summary of the algorithm
+ /// is that we do a depth-first search. Along the way, we keep a
+ /// stack of each node whose successors are being visited. We
+ /// track the depth of each node on this stack (there is no depth
+ /// if the node is not on the stack). When we find that some node
+ /// N with depth D can reach some other node N' with lower depth
+ /// D' (i.e., D' < D), we know that N, N', and all nodes in
+ /// between them on the stack are part of an SCC.
+ ///
+ /// [wikipedia]: https://bit.ly/2EZIx84
+ fn construct(graph: &'c G) -> Sccs<G::Node, S> {
+ let num_nodes = graph.num_nodes();
+
+ let mut this = Self {
+ graph,
+ node_states: IndexVec::from_elem_n(NodeState::NotVisited, num_nodes),
+ node_stack: Vec::with_capacity(num_nodes),
+ successors_stack: Vec::new(),
+ scc_data: SccData {
+ ranges: IndexVec::new(),
+ all_successors: Vec::new(),
+ },
+ duplicate_set: FxHashSet::default(),
+ };
+
+ let scc_indices = (0..num_nodes)
+ .map(G::Node::new)
+ .map(|node| match this.walk_node(0, node) {
+ WalkReturn::Complete { scc_index } => scc_index,
+ WalkReturn::Cycle { min_depth } => panic!(
+ "`walk_node(0, {:?})` returned cycle with depth {:?}",
+ node, min_depth
+ ),
+ })
+ .collect();
+
+ Sccs {
+ scc_indices,
+ scc_data: this.scc_data,
+ }
+ }
+
+ /// Visit a node during the DFS. We first examine its current
+ /// state -- if it is not yet visited (`NotVisited`), we can push
+ /// it onto the stack and start walking its successors.
+ ///
+ /// If it is already on the DFS stack it will be in the state
+ /// `BeingVisited`. In that case, we have found a cycle and we
+ /// return the depth from the stack.
+ ///
+ /// Otherwise, we are looking at a node that has already been
+ /// completely visited. We therefore return `WalkReturn::Complete`
+ /// with its associated SCC index.
+ fn walk_node(&mut self, depth: usize, node: G::Node) -> WalkReturn<S> {
+ debug!("walk_node(depth = {:?}, node = {:?})", depth, node);
+ match self.find_state(node) {
+ NodeState::InCycle { scc_index } => WalkReturn::Complete { scc_index },
+
+ NodeState::BeingVisited { depth: min_depth } => WalkReturn::Cycle { min_depth },
+
+ NodeState::NotVisited => self.walk_unvisited_node(depth, node),
+
+ NodeState::InCycleWith { parent } => panic!(
+ "`find_state` returned `InCycleWith({:?})`, which ought to be impossible",
+ parent
+ ),
+ }
+ }
+
+ /// Fetches the state of the node `r`. If `r` is recorded as being
+ /// in a cycle with some other node `r2`, then fetches the state
+ /// of `r2` (and updates `r` to reflect current result). This is
+ /// basically the "find" part of a standard union-find algorithm
+ /// (with path compression).
+ fn find_state(&mut self, r: G::Node) -> NodeState<G::Node, S> {
+ debug!("find_state(r = {:?} in state {:?})", r, self.node_states[r]);
+ match self.node_states[r] {
+ NodeState::InCycle { scc_index } => NodeState::InCycle { scc_index },
+ NodeState::BeingVisited { depth } => NodeState::BeingVisited { depth },
+ NodeState::NotVisited => NodeState::NotVisited,
+ NodeState::InCycleWith { parent } => {
+ let parent_state = self.find_state(parent);
+ debug!("find_state: parent_state = {:?}", parent_state);
+ match parent_state {
+ NodeState::InCycle { .. } => {
+ self.node_states[r] = parent_state;
+ parent_state
+ }
+
+ NodeState::BeingVisited { depth } => {
+ self.node_states[r] = NodeState::InCycleWith {
+ parent: self.node_stack[depth],
+ };
+ parent_state
+ }
+
+ NodeState::NotVisited | NodeState::InCycleWith { .. } => {
+ panic!("invalid parent state: {:?}", parent_state)
+ }
+ }
+ }
+ }
+ }
+
+ /// Walks a node that has never been visited before.
+ fn walk_unvisited_node(&mut self, depth: usize, node: G::Node) -> WalkReturn<S> {
+ debug!(
+ "walk_unvisited_node(depth = {:?}, node = {:?})",
+ depth, node
+ );
+
+ debug_assert!(match self.node_states[node] {
+ NodeState::NotVisited => true,
+ _ => false,
+ });
+
+ // Push `node` onto the stack.
+ self.node_states[node] = NodeState::BeingVisited { depth };
+ self.node_stack.push(node);
+
+ // Walk each successor of the node, looking to see if any of
+ // them can reach a node that is presently on the stack. If
+ // so, that means they can also reach us.
+ let mut min_depth = depth;
+ let mut min_cycle_root = node;
+ let successors_len = self.successors_stack.len();
+ for successor_node in self.graph.successors(node) {
+ debug!(
+ "walk_unvisited_node: node = {:?} successor_ode = {:?}",
+ node, successor_node
+ );
+ match self.walk_node(depth + 1, successor_node) {
+ WalkReturn::Cycle {
+ min_depth: successor_min_depth,
+ } => {
+ // Track the minimum depth we can reach.
+ assert!(successor_min_depth <= depth);
+ if successor_min_depth < min_depth {
+ debug!(
+ "walk_unvisited_node: node = {:?} successor_min_depth = {:?}",
+ node, successor_min_depth
+ );
+ min_depth = successor_min_depth;
+ min_cycle_root = successor_node;
+ }
+ }
+
+ WalkReturn::Complete {
+ scc_index: successor_scc_index,
+ } => {
+ // Push the completed SCC indices onto
+ // the `successors_stack` for later.
+ debug!(
+ "walk_unvisited_node: node = {:?} successor_scc_index = {:?}",
+ node, successor_scc_index
+ );
+ self.successors_stack.push(successor_scc_index);
+ }
+ }
+ }
+
+ // Completed walk, remove `node` from the stack.
+ let r = self.node_stack.pop();
+ debug_assert_eq!(r, Some(node));
+
+ // If `min_depth == depth`, then we are the root of the
+ // cycle: we can't reach anyone further down the stack.
+ if min_depth == depth {
+ // Note that successor stack may have duplicates, so we
+ // want to remove those:
+ let deduplicated_successors = {
+ let duplicate_set = &mut self.duplicate_set;
+ duplicate_set.clear();
+ self.successors_stack
+ .drain(successors_len..)
+ .filter(move |&i| duplicate_set.insert(i))
+ };
+ let scc_index = self.scc_data.create_scc(deduplicated_successors);
+ self.node_states[node] = NodeState::InCycle { scc_index };
+ WalkReturn::Complete { scc_index }
+ } else {
+ // We are not the head of the cycle. Return back to our
+ // caller. They will take ownership of the
+ // `self.successors` data that we pushed.
+ self.node_states[node] = NodeState::InCycleWith {
+ parent: min_cycle_root,
+ };
+ WalkReturn::Cycle { min_depth }
+ }
+ }
+}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![cfg(test)]
+
+use graph::test::TestGraph;
+use super::*;
+
+#[test]
+fn diamond() {
+ let graph = TestGraph::new(0, &[(0, 1), (0, 2), (1, 3), (2, 3)]);
+ let sccs: Sccs<_, usize> = Sccs::new(&graph);
+ assert_eq!(sccs.num_sccs(), 4);
+ assert_eq!(sccs.num_sccs(), 4);
+}
+
+#[test]
+fn test_big_scc() {
+ // The order in which things will be visited is important to this
+ // test.
+ //
+ // We will visit:
+ //
+ // 0 -> 1 -> 2 -> 0
+ //
+ // and at this point detect a cycle. 2 will return back to 1 which
+ // will visit 3. 3 will visit 2 before the cycle is complete, and
+ // hence it too will return a cycle.
+
+ /*
++-> 0
+| |
+| v
+| 1 -> 3
+| | |
+| v |
++-- 2 <--+
+ */
+ let graph = TestGraph::new(0, &[
+ (0, 1),
+ (1, 2),
+ (1, 3),
+ (2, 0),
+ (3, 2),
+ ]);
+ let sccs: Sccs<_, usize> = Sccs::new(&graph);
+ assert_eq!(sccs.num_sccs(), 1);
+}
+
+#[test]
+fn test_three_sccs() {
+ /*
+ 0
+ |
+ v
++-> 1 3
+| | |
+| v |
++-- 2 <--+
+ */
+ let graph = TestGraph::new(0, &[
+ (0, 1),
+ (1, 2),
+ (2, 1),
+ (3, 2),
+ ]);
+ let sccs: Sccs<_, usize> = Sccs::new(&graph);
+ assert_eq!(sccs.num_sccs(), 3);
+ assert_eq!(sccs.scc(0), 1);
+ assert_eq!(sccs.scc(1), 0);
+ assert_eq!(sccs.scc(2), 0);
+ assert_eq!(sccs.scc(3), 2);
+ assert_eq!(sccs.successors(0), &[]);
+ assert_eq!(sccs.successors(1), &[0]);
+ assert_eq!(sccs.successors(2), &[0]);
+}
+
+#[test]
+fn test_find_state_2() {
+ // The order in which things will be visited is important to this
+ // test. It tests part of the `find_state` behavior. Here is the
+ // graph:
+ //
+ //
+ // /----+
+ // 0 <--+ |
+ // | | |
+ // v | |
+ // +-> 1 -> 3 4
+ // | | |
+ // | v |
+ // +-- 2 <----+
+
+ let graph = TestGraph::new(0, &[
+ (0, 1),
+ (0, 4),
+ (1, 2),
+ (1, 3),
+ (2, 1),
+ (3, 0),
+ (4, 2),
+ ]);
+
+ // For this graph, we will start in our DFS by visiting:
+ //
+ // 0 -> 1 -> 2 -> 1
+ //
+ // and at this point detect a cycle. The state of 2 will thus be
+ // `InCycleWith { 1 }`. We will then visit the 1 -> 3 edge, which
+ // will attempt to visit 0 as well, thus going to the state
+ // `InCycleWith { 0 }`. Finally, node 1 will complete; the lowest
+ // depth of any successor was 3 which had depth 0, and thus it
+ // will be in the state `InCycleWith { 3 }`.
+ //
+ // When we finally traverse the `0 -> 4` edge and then visit node 2,
+ // the states of the nodes are:
+ //
+ // 0 BeingVisited { 0 }
+ // 1 InCycleWith { 3 }
+ // 2 InCycleWith { 1 }
+ // 3 InCycleWith { 0 }
+ //
+ // and hence 4 will traverse the links, finding an ultimate depth of 0.
+ // If will also collapse the states to the following:
+ //
+ // 0 BeingVisited { 0 }
+ // 1 InCycleWith { 3 }
+ // 2 InCycleWith { 1 }
+ // 3 InCycleWith { 0 }
+
+ let sccs: Sccs<_, usize> = Sccs::new(&graph);
+ assert_eq!(sccs.num_sccs(), 1);
+ assert_eq!(sccs.scc(0), 0);
+ assert_eq!(sccs.scc(1), 0);
+ assert_eq!(sccs.scc(2), 0);
+ assert_eq!(sccs.scc(3), 0);
+ assert_eq!(sccs.scc(4), 0);
+ assert_eq!(sccs.successors(0), &[]);
+}
+
+#[test]
+fn test_find_state_3() {
+ /*
+ /----+
+ 0 <--+ |
+ | | |
+ v | |
++-> 1 -> 3 4 5
+| | | |
+| v | |
++-- 2 <----+-+
+ */
+ let graph = TestGraph::new(0, &[
+ (0, 1),
+ (0, 4),
+ (1, 2),
+ (1, 3),
+ (2, 1),
+ (3, 0),
+ (4, 2),
+ (5, 2),
+ ]);
+ let sccs: Sccs<_, usize> = Sccs::new(&graph);
+ assert_eq!(sccs.num_sccs(), 2);
+ assert_eq!(sccs.scc(0), 0);
+ assert_eq!(sccs.scc(1), 0);
+ assert_eq!(sccs.scc(2), 0);
+ assert_eq!(sccs.scc(3), 0);
+ assert_eq!(sccs.scc(4), 0);
+ assert_eq!(sccs.scc(5), 1);
+ assert_eq!(sccs.successors(0), &[]);
+ assert_eq!(sccs.successors(1), &[0]);
+}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use std::collections::HashMap;
+use std::cmp::max;
+use std::slice;
+use std::iter;
+
+use super::*;
+
+pub struct TestGraph {
+ num_nodes: usize,
+ start_node: usize,
+ successors: HashMap<usize, Vec<usize>>,
+ predecessors: HashMap<usize, Vec<usize>>,
+}
+
+impl TestGraph {
+ pub fn new(start_node: usize, edges: &[(usize, usize)]) -> Self {
+ let mut graph = TestGraph {
+ num_nodes: start_node + 1,
+ start_node,
+ successors: HashMap::new(),
+ predecessors: HashMap::new(),
+ };
+ for &(source, target) in edges {
+ graph.num_nodes = max(graph.num_nodes, source + 1);
+ graph.num_nodes = max(graph.num_nodes, target + 1);
+ graph.successors.entry(source).or_insert(vec![]).push(target);
+ graph.predecessors.entry(target).or_insert(vec![]).push(source);
+ }
+ for node in 0..graph.num_nodes {
+ graph.successors.entry(node).or_insert(vec![]);
+ graph.predecessors.entry(node).or_insert(vec![]);
+ }
+ graph
+ }
+}
+
+impl DirectedGraph for TestGraph {
+ type Node = usize;
+}
+
+impl WithStartNode for TestGraph {
+ fn start_node(&self) -> usize {
+ self.start_node
+ }
+}
+
+impl WithNumNodes for TestGraph {
+ fn num_nodes(&self) -> usize {
+ self.num_nodes
+ }
+}
+
+impl WithPredecessors for TestGraph {
+ fn predecessors<'graph>(&'graph self,
+ node: usize)
+ -> <Self as GraphPredecessors<'graph>>::Iter {
+ self.predecessors[&node].iter().cloned()
+ }
+}
+
+impl WithSuccessors for TestGraph {
+ fn successors<'graph>(&'graph self, node: usize) -> <Self as GraphSuccessors<'graph>>::Iter {
+ self.successors[&node].iter().cloned()
+ }
+}
+
+impl<'graph> GraphPredecessors<'graph> for TestGraph {
+ type Item = usize;
+ type Iter = iter::Cloned<slice::Iter<'graph, usize>>;
+}
+
+impl<'graph> GraphSuccessors<'graph> for TestGraph {
+ type Item = usize;
+ type Iter = iter::Cloned<slice::Iter<'graph, usize>>;
+}
+++ /dev/null
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-use graph::*;
-use std::fmt::Debug;
-
-type TestGraph = Graph<&'static str, &'static str>;
-
-fn create_graph() -> TestGraph {
- let mut graph = Graph::new();
-
- // Create a simple graph
- //
- // F
- // |
- // V
- // A --> B --> C
- // | ^
- // v |
- // D --> E
-
- let a = graph.add_node("A");
- let b = graph.add_node("B");
- let c = graph.add_node("C");
- let d = graph.add_node("D");
- let e = graph.add_node("E");
- let f = graph.add_node("F");
-
- graph.add_edge(a, b, "AB");
- graph.add_edge(b, c, "BC");
- graph.add_edge(b, d, "BD");
- graph.add_edge(d, e, "DE");
- graph.add_edge(e, c, "EC");
- graph.add_edge(f, b, "FB");
-
- return graph;
-}
-
-#[test]
-fn each_node() {
- let graph = create_graph();
- let expected = ["A", "B", "C", "D", "E", "F"];
- graph.each_node(|idx, node| {
- assert_eq!(&expected[idx.0], graph.node_data(idx));
- assert_eq!(expected[idx.0], node.data);
- true
- });
-}
-
-#[test]
-fn each_edge() {
- let graph = create_graph();
- let expected = ["AB", "BC", "BD", "DE", "EC", "FB"];
- graph.each_edge(|idx, edge| {
- assert_eq!(expected[idx.0], edge.data);
- true
- });
-}
-
-fn test_adjacent_edges<N: PartialEq + Debug, E: PartialEq + Debug>(graph: &Graph<N, E>,
- start_index: NodeIndex,
- start_data: N,
- expected_incoming: &[(E, N)],
- expected_outgoing: &[(E, N)]) {
- assert!(graph.node_data(start_index) == &start_data);
-
- let mut counter = 0;
- for (edge_index, edge) in graph.incoming_edges(start_index) {
- assert!(counter < expected_incoming.len());
- debug!("counter={:?} expected={:?} edge_index={:?} edge={:?}",
- counter,
- expected_incoming[counter],
- edge_index,
- edge);
- match expected_incoming[counter] {
- (ref e, ref n) => {
- assert!(e == &edge.data);
- assert!(n == graph.node_data(edge.source()));
- assert!(start_index == edge.target);
- }
- }
- counter += 1;
- }
- assert_eq!(counter, expected_incoming.len());
-
- let mut counter = 0;
- for (edge_index, edge) in graph.outgoing_edges(start_index) {
- assert!(counter < expected_outgoing.len());
- debug!("counter={:?} expected={:?} edge_index={:?} edge={:?}",
- counter,
- expected_outgoing[counter],
- edge_index,
- edge);
- match expected_outgoing[counter] {
- (ref e, ref n) => {
- assert!(e == &edge.data);
- assert!(start_index == edge.source);
- assert!(n == graph.node_data(edge.target));
- }
- }
- counter += 1;
- }
- assert_eq!(counter, expected_outgoing.len());
-}
-
-#[test]
-fn each_adjacent_from_a() {
- let graph = create_graph();
- test_adjacent_edges(&graph, NodeIndex(0), "A", &[], &[("AB", "B")]);
-}
-
-#[test]
-fn each_adjacent_from_b() {
- let graph = create_graph();
- test_adjacent_edges(&graph,
- NodeIndex(1),
- "B",
- &[("FB", "F"), ("AB", "A")],
- &[("BD", "D"), ("BC", "C")]);
-}
-
-#[test]
-fn each_adjacent_from_c() {
- let graph = create_graph();
- test_adjacent_edges(&graph, NodeIndex(2), "C", &[("EC", "E"), ("BC", "B")], &[]);
-}
-
-#[test]
-fn each_adjacent_from_d() {
- let graph = create_graph();
- test_adjacent_edges(&graph, NodeIndex(3), "D", &[("BD", "B")], &[("DE", "E")]);
-}
use std::marker::PhantomData;
use std::ops::{Index, IndexMut, Range, RangeBounds};
use std::fmt;
+use std::hash::Hash;
use std::vec;
use std::u32;
/// Represents some newtyped `usize` wrapper.
///
/// (purpose: avoid mixing indexes for different bitvector domains.)
-pub trait Idx: Copy + 'static + Eq + Debug {
+pub trait Idx: Copy + 'static + Ord + Debug + Hash {
fn new(idx: usize) -> Self;
fn index(self) -> usize;
}
pub mod base_n;
pub mod bitslice;
pub mod bitvec;
-pub mod graph;
pub mod indexed_set;
pub mod indexed_vec;
pub mod obligation_forest;
pub use ena::unify;
pub mod fx;
pub mod tuple_slice;
-pub mod control_flow_graph;
+pub mod graph;
pub mod flock;
pub mod sync;
pub mod owning_ref;
use rustc::hir::def_id::DefId;
use rustc::ty::TyCtxt;
use rustc_data_structures::fx::FxHashSet;
-use rustc_data_structures::graph::{Direction, INCOMING, OUTGOING, NodeIndex};
+use rustc_data_structures::graph::implementation::{
+ Direction, INCOMING, OUTGOING, NodeIndex
+};
use rustc::hir;
use rustc::hir::intravisit::{self, NestedVisitorMap, Visitor};
use rustc::ich::{ATTR_IF_THIS_CHANGED, ATTR_THEN_THIS_WOULD_NEED};
use rustc::lint;
use rustc::lint::{LateContext, LateLintPass, LintPass, LintArray};
use rustc::lint::builtin::{BARE_TRAIT_OBJECTS, ABSOLUTE_PATHS_NOT_STARTING_WITH_CRATE};
+use rustc::lint::builtin::MACRO_USE_EXTERN_CRATE;
use rustc::session;
use rustc::util;
use rustc::hir;
BARE_TRAIT_OBJECTS,
UNREACHABLE_PUB,
UNUSED_EXTERN_CRATES,
+ MACRO_USE_EXTERN_CRATE,
ELLIPSIS_INCLUSIVE_RANGE_PATTERNS);
// Guidelines for creating a future incompatibility lint:
use rustc::ty::query::Providers;
use rustc::ty::{self, ParamEnv, TyCtxt};
-use rustc_data_structures::control_flow_graph::dominators::Dominators;
+use rustc_data_structures::graph::dominators::Dominators;
use rustc_data_structures::fx::FxHashSet;
use rustc_data_structures::indexed_set::IdxSetBuf;
use rustc_data_structures::indexed_vec::Idx;
use borrow_check::nll::ToRegionVid;
use borrow_check::nll::facts::AllFacts;
use borrow_check::nll::region_infer::RegionInferenceContext;
-use borrow_check::nll::type_check::AtLocation;
-use rustc::hir;
use rustc::infer::InferCtxt;
use rustc::mir::visit::TyContext;
use rustc::mir::visit::Visitor;
-use rustc::mir::Place::Projection;
use rustc::mir::{BasicBlock, BasicBlockData, Location, Mir, Place, Rvalue};
-use rustc::mir::{Local, PlaceProjection, ProjectionElem, Statement, Terminator};
+use rustc::mir::{Local, Statement, Terminator};
use rustc::ty::fold::TypeFoldable;
use rustc::ty::subst::Substs;
use rustc::ty::{self, CanonicalTy, ClosureSubsts, GeneratorSubsts};
regioncx,
location_table,
all_facts,
- mir,
};
cg.add_region_liveness_constraints_from_type_check(liveness_set_from_typeck);
all_facts: &'cg mut Option<AllFacts>,
location_table: &'cg LocationTable,
regioncx: &'cg mut RegionInferenceContext<'tcx>,
- mir: &'cg Mir<'tcx>,
borrow_set: &'cg BorrowSet<'tcx>,
}
self.super_terminator(block, terminator, location);
}
- fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
- debug!("visit_rvalue(rvalue={:?}, location={:?})", rvalue, location);
-
- match rvalue {
- Rvalue::Ref(region, _borrow_kind, borrowed_place) => {
- // In some cases, e.g. when borrowing from an unsafe
- // place, we don't bother to create a loan, since
- // there are no conditions to validate.
- if let Some(all_facts) = self.all_facts {
- if let Some(borrow_index) = self.borrow_set.location_map.get(&location) {
- let region_vid = region.to_region_vid();
- all_facts.borrow_region.push((
- region_vid,
- *borrow_index,
- self.location_table.mid_index(location),
- ));
- }
- }
-
- // Look for an rvalue like:
- //
- // & L
- //
- // where L is the path that is borrowed. In that case, we have
- // to add the reborrow constraints (which don't fall out
- // naturally from the type-checker).
- self.add_reborrow_constraint(location, region, borrowed_place);
- }
-
- _ => {}
- }
-
- self.super_rvalue(rvalue, location);
- }
-
fn visit_user_assert_ty(
&mut self,
_c_ty: &CanonicalTy<'tcx>,
for (region, location) in liveness_set {
debug!("generate: {:#?} is live at {:#?}", region, location);
let region_vid = regioncx.to_region_vid(region);
- regioncx.add_live_point(region_vid, *location);
+ regioncx.add_live_element(region_vid, *location);
}
if let Some(all_facts) = all_facts {
.tcx
.for_each_free_region(&live_ty, |live_region| {
let vid = live_region.to_region_vid();
- self.regioncx.add_live_point(vid, location);
+ self.regioncx.add_live_element(vid, location);
});
}
-
- // Add the reborrow constraint at `location` so that `borrowed_place`
- // is valid for `borrow_region`.
- fn add_reborrow_constraint(
- &mut self,
- location: Location,
- borrow_region: ty::Region<'tcx>,
- borrowed_place: &Place<'tcx>,
- ) {
- let mut borrowed_place = borrowed_place;
-
- debug!(
- "add_reborrow_constraint({:?}, {:?}, {:?})",
- location, borrow_region, borrowed_place
- );
- while let Projection(box PlaceProjection { base, elem }) = borrowed_place {
- debug!("add_reborrow_constraint - iteration {:?}", borrowed_place);
-
- match *elem {
- ProjectionElem::Deref => {
- let tcx = self.infcx.tcx;
- let base_ty = base.ty(self.mir, tcx).to_ty(tcx);
-
- debug!("add_reborrow_constraint - base_ty = {:?}", base_ty);
- match base_ty.sty {
- ty::TyRef(ref_region, _, mutbl) => {
- self.regioncx.add_outlives(
- location.boring(),
- ref_region.to_region_vid(),
- borrow_region.to_region_vid(),
- );
-
- if let Some(all_facts) = self.all_facts {
- all_facts.outlives.push((
- ref_region.to_region_vid(),
- borrow_region.to_region_vid(),
- self.location_table.mid_index(location),
- ));
- }
-
- match mutbl {
- hir::Mutability::MutImmutable => {
- // Immutable reference. We don't need the base
- // to be valid for the entire lifetime of
- // the borrow.
- break;
- }
- hir::Mutability::MutMutable => {
- // Mutable reference. We *do* need the base
- // to be valid, because after the base becomes
- // invalid, someone else can use our mutable deref.
-
- // This is in order to make the following function
- // illegal:
- // ```
- // fn unsafe_deref<'a, 'b>(x: &'a &'b mut T) -> &'b mut T {
- // &mut *x
- // }
- // ```
- //
- // As otherwise you could clone `&mut T` using the
- // following function:
- // ```
- // fn bad(x: &mut T) -> (&mut T, &mut T) {
- // let my_clone = unsafe_deref(&'a x);
- // ENDREGION 'a;
- // (my_clone, x)
- // }
- // ```
- }
- }
- }
- ty::TyRawPtr(..) => {
- // deref of raw pointer, guaranteed to be valid
- break;
- }
- ty::TyAdt(def, _) if def.is_box() => {
- // deref of `Box`, need the base to be valid - propagate
- }
- _ => bug!("unexpected deref ty {:?} in {:?}", base_ty, borrowed_place),
- }
- }
- ProjectionElem::Field(..)
- | ProjectionElem::Downcast(..)
- | ProjectionElem::Index(..)
- | ProjectionElem::ConstantIndex { .. }
- | ProjectionElem::Subslice { .. } => {
- // other field access
- }
- }
-
- // The "propagate" case. We need to check that our base is valid
- // for the borrow's lifetime.
- borrowed_place = base;
- }
- }
}
+++ /dev/null
-// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-use rustc::ty::RegionVid;
-use rustc_data_structures::indexed_vec::{Idx, IndexVec};
-use borrow_check::nll::type_check::Locations;
-
-use std::fmt;
-use std::ops::Deref;
-
-#[derive(Clone, Default)]
-crate struct ConstraintSet {
- constraints: IndexVec<ConstraintIndex, OutlivesConstraint>,
-}
-
-impl ConstraintSet {
- pub fn push(&mut self, constraint: OutlivesConstraint) {
- debug!(
- "add_outlives({:?}: {:?} @ {:?})",
- constraint.sup, constraint.sub, constraint.locations
- );
- if constraint.sup == constraint.sub {
- // 'a: 'a is pretty uninteresting
- return;
- }
- self.constraints.push(constraint);
- }
-
- /// Once all constraints have been added, `link()` is used to thread together the constraints
- /// based on which would be affected when a particular region changes. See the next field of
- /// `OutlivesContraint` for more details.
- /// link returns a map that is needed later by `each_affected_by_dirty`.
- pub fn link(&mut self, len: usize) -> IndexVec<RegionVid, Option<ConstraintIndex>> {
- let mut map = IndexVec::from_elem_n(None, len);
-
- for (idx, constraint) in self.constraints.iter_enumerated_mut().rev() {
- let mut head = &mut map[constraint.sub];
- debug_assert!(constraint.next.is_none());
- constraint.next = *head;
- *head = Some(idx);
- }
-
- map
- }
-
- /// When a region R1 changes, we need to reprocess all constraints R2: R1 to take into account
- /// any new elements that R1 now has. This method will quickly enumerate all such constraints
- /// (that is, constraints where R1 is in the "subregion" position).
- /// To use it, invoke with `map[R1]` where map is the map returned by `link`;
- /// the callback op will be invoked for each affected constraint.
- pub fn each_affected_by_dirty(
- &self,
- mut opt_dep_idx: Option<ConstraintIndex>,
- mut op: impl FnMut(ConstraintIndex),
- ) {
- while let Some(dep_idx) = opt_dep_idx {
- op(dep_idx);
- opt_dep_idx = self.constraints[dep_idx].next;
- }
- }
-}
-
-impl Deref for ConstraintSet {
- type Target = IndexVec<ConstraintIndex, OutlivesConstraint>;
-
- fn deref(&self) -> &Self::Target { &self.constraints }
-}
-
-#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
-pub struct OutlivesConstraint {
- // NB. The ordering here is not significant for correctness, but
- // it is for convenience. Before we dump the constraints in the
- // debugging logs, we sort them, and we'd like the "super region"
- // to be first, etc. (In particular, span should remain last.)
- /// The region SUP must outlive SUB...
- pub sup: RegionVid,
-
- /// Region that must be outlived.
- pub sub: RegionVid,
-
- /// Later on, we thread the constraints onto a linked list
- /// grouped by their `sub` field. So if you had:
- ///
- /// Index | Constraint | Next Field
- /// ----- | ---------- | ----------
- /// 0 | `'a: 'b` | Some(2)
- /// 1 | `'b: 'c` | None
- /// 2 | `'c: 'b` | None
- pub next: Option<ConstraintIndex>,
-
- /// Where did this constraint arise?
- pub locations: Locations,
-}
-
-impl fmt::Debug for OutlivesConstraint {
- fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
- write!(
- formatter,
- "({:?}: {:?}) due to {:?}",
- self.sup, self.sub, self.locations
- )
- }
-}
-
-newtype_index!(ConstraintIndex { DEBUG_FORMAT = "ConstraintIndex({})" });
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use borrow_check::nll::constraints::{ConstraintIndex, ConstraintSet};
+use rustc::ty::RegionVid;
+use rustc_data_structures::graph;
+use rustc_data_structures::indexed_vec::IndexVec;
+
+crate struct ConstraintGraph {
+ first_constraints: IndexVec<RegionVid, Option<ConstraintIndex>>,
+ next_constraints: IndexVec<ConstraintIndex, Option<ConstraintIndex>>,
+}
+
+impl ConstraintGraph {
+ /// Create a "dependency graph" where each region constraint `R1:
+ /// R2` is treated as an edge `R1 -> R2`. We use this graph to
+ /// construct SCCs for region inference but also for error
+ /// reporting.
+ crate fn new(set: &ConstraintSet, num_region_vars: usize) -> Self {
+ let mut first_constraints = IndexVec::from_elem_n(None, num_region_vars);
+ let mut next_constraints = IndexVec::from_elem(None, &set.constraints);
+
+ for (idx, constraint) in set.constraints.iter_enumerated().rev() {
+ let mut head = &mut first_constraints[constraint.sup];
+ let mut next = &mut next_constraints[idx];
+ debug_assert!(next.is_none());
+ *next = *head;
+ *head = Some(idx);
+ }
+
+ Self {
+ first_constraints,
+ next_constraints,
+ }
+ }
+
+ /// Given a region `R`, iterate over all constraints `R: R1`.
+ crate fn outgoing_edges(&self, region_sup: RegionVid) -> Edges<'_> {
+ let first = self.first_constraints[region_sup];
+ Edges {
+ graph: self,
+ pointer: first,
+ }
+ }
+}
+
+crate struct Edges<'s> {
+ graph: &'s ConstraintGraph,
+ pointer: Option<ConstraintIndex>,
+}
+
+impl<'s> Iterator for Edges<'s> {
+ type Item = ConstraintIndex;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ if let Some(p) = self.pointer {
+ self.pointer = self.graph.next_constraints[p];
+ Some(p)
+ } else {
+ None
+ }
+ }
+}
+
+crate struct RegionGraph<'s> {
+ set: &'s ConstraintSet,
+ constraint_graph: &'s ConstraintGraph,
+}
+
+impl<'s> RegionGraph<'s> {
+ /// Create a "dependency graph" where each region constraint `R1:
+ /// R2` is treated as an edge `R1 -> R2`. We use this graph to
+ /// construct SCCs for region inference but also for error
+ /// reporting.
+ crate fn new(set: &'s ConstraintSet, constraint_graph: &'s ConstraintGraph) -> Self {
+ Self {
+ set,
+ constraint_graph,
+ }
+ }
+
+ /// Given a region `R`, iterate over all regions `R1` such that
+ /// there exists a constraint `R: R1`.
+ crate fn sub_regions(&self, region_sup: RegionVid) -> Successors<'_> {
+ Successors {
+ set: self.set,
+ edges: self.constraint_graph.outgoing_edges(region_sup),
+ }
+ }
+}
+
+crate struct Successors<'s> {
+ set: &'s ConstraintSet,
+ edges: Edges<'s>,
+}
+
+impl<'s> Iterator for Successors<'s> {
+ type Item = RegionVid;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.edges.next().map(|c| self.set[c].sub)
+ }
+}
+
+impl<'s> graph::DirectedGraph for RegionGraph<'s> {
+ type Node = RegionVid;
+}
+
+impl<'s> graph::WithNumNodes for RegionGraph<'s> {
+ fn num_nodes(&self) -> usize {
+ self.constraint_graph.first_constraints.len()
+ }
+}
+
+impl<'s> graph::WithSuccessors for RegionGraph<'s> {
+ fn successors<'graph>(
+ &'graph self,
+ node: Self::Node,
+ ) -> <Self as graph::GraphSuccessors<'graph>>::Iter {
+ self.sub_regions(node)
+ }
+}
+
+impl<'s, 'graph> graph::GraphSuccessors<'graph> for RegionGraph<'s> {
+ type Item = RegionVid;
+ type Iter = Successors<'graph>;
+}
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use rustc::ty::RegionVid;
+use rustc_data_structures::graph::scc::Sccs;
+use rustc_data_structures::indexed_vec::{Idx, IndexVec};
+use borrow_check::nll::type_check::Locations;
+
+use std::fmt;
+use std::ops::Deref;
+
+crate mod graph;
+
+#[derive(Clone, Default)]
+crate struct ConstraintSet {
+ constraints: IndexVec<ConstraintIndex, OutlivesConstraint>,
+}
+
+impl ConstraintSet {
+ crate fn push(&mut self, constraint: OutlivesConstraint) {
+ debug!(
+ "ConstraintSet::push({:?}: {:?} @ {:?}",
+ constraint.sup, constraint.sub, constraint.locations
+ );
+ if constraint.sup == constraint.sub {
+ // 'a: 'a is pretty uninteresting
+ return;
+ }
+ self.constraints.push(constraint);
+ }
+
+ /// Constructs a graph from the constraint set; the graph makes it
+ /// easy to find the constraints affecting a particular region
+ /// (you should not mutate the set once this graph is
+ /// constructed).
+ crate fn graph(&self, num_region_vars: usize) -> graph::ConstraintGraph {
+ graph::ConstraintGraph::new(self, num_region_vars)
+ }
+
+ /// Compute cycles (SCCs) in the graph of regions. In particular,
+ /// find all regions R1, R2 such that R1: R2 and R2: R1 and group
+ /// them into an SCC, and find the relationships between SCCs.
+ crate fn compute_sccs(
+ &self,
+ constraint_graph: &graph::ConstraintGraph,
+ ) -> Sccs<RegionVid, ConstraintSccIndex> {
+ let region_graph = &graph::RegionGraph::new(self, constraint_graph);
+ Sccs::new(region_graph)
+ }
+}
+
+impl Deref for ConstraintSet {
+ type Target = IndexVec<ConstraintIndex, OutlivesConstraint>;
+
+ fn deref(&self) -> &Self::Target {
+ &self.constraints
+ }
+}
+
+#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub struct OutlivesConstraint {
+ // NB. The ordering here is not significant for correctness, but
+ // it is for convenience. Before we dump the constraints in the
+ // debugging logs, we sort them, and we'd like the "super region"
+ // to be first, etc. (In particular, span should remain last.)
+ /// The region SUP must outlive SUB...
+ pub sup: RegionVid,
+
+ /// Region that must be outlived.
+ pub sub: RegionVid,
+
+ /// Where did this constraint arise?
+ pub locations: Locations,
+}
+
+impl fmt::Debug for OutlivesConstraint {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ write!(
+ formatter,
+ "({:?}: {:?}) due to {:?}",
+ self.sup, self.sub, self.locations
+ )
+ }
+}
+
+newtype_index!(ConstraintIndex { DEBUG_FORMAT = "ConstraintIndex({})" });
+
+newtype_index!(ConstraintSccIndex { DEBUG_FORMAT = "ConstraintSccIndex({})" });
queue.push_back(self.start_point);
while let Some(p) = queue.pop_front() {
- if !self.regioncx.region_contains_point(self.region_vid, p) {
+ if !self.regioncx.region_contains(self.region_vid, p) {
continue;
}
use rustc::mir::{Field, Operand, BorrowKind};
use rustc::ty::{self, ParamEnv};
use rustc_data_structures::indexed_vec::Idx;
-use rustc_data_structures::control_flow_graph::dominators::Dominators;
+use rustc_data_structures::graph::dominators::Dominators;
pub(super) fn generate_invalidates<'cx, 'gcx, 'tcx>(
infcx: &InferCtxt<'cx, 'gcx, 'tcx>,
crate mod type_check;
mod universal_regions;
-crate mod constraint_set;
+mod constraints;
use self::facts::AllFacts;
use self::region_infer::RegionInferenceContext;
def_id,
&universal_regions,
location_table,
+ borrow_set,
&liveness,
&mut all_facts,
flow_inits,
// Also dump the inference graph constraints as a graphviz file.
let _: io::Result<()> = do catch {
let mut file =
- pretty::create_dump_file(infcx.tcx, "regioncx.dot", None, "nll", &0, source)?;
- regioncx.dump_graphviz(&mut file)?;
+ pretty::create_dump_file(infcx.tcx, "regioncx.all.dot", None, "nll", &0, source)?;
+ regioncx.dump_graphviz_raw_constraints(&mut file)?;
+ };
+
+ // Also dump the inference graph constraints as a graphviz file.
+ let _: io::Result<()> = do catch {
+ let mut file =
+ pretty::create_dump_file(infcx.tcx, "regioncx.scc.dot", None, "nll", &0, source)?;
+ regioncx.dump_graphviz_scc_constraints(&mut file)?;
};
}
sup,
sub,
locations,
- next: _,
} = constraint;
with_msg(&format!(
"{:?}: {:?} due to {:?}",
impl<'tcx> RegionInferenceContext<'tcx> {
/// Walks the graph of constraints (where `'a: 'b` is considered
- /// an edge `'b -> 'a`) to find all paths from `from_region` to
+ /// an edge `'a -> 'b`) to find all paths from `from_region` to
/// `to_region`. The paths are accumulated into the vector
/// `results`. The paths are stored as a series of
/// `ConstraintIndex` values -- in other words, a list of *edges*.
- ///
- /// # Parameters
- ///
- /// - `from_region`
- /// When reporting an error, it is useful to be able to determine
- /// which constraints influenced the region being reported as an
- /// error. This function finds all of the paths from the
- /// constraint.
fn find_constraint_paths_between_regions(
&self,
from_region: RegionVid,
stack: &mut Vec<ConstraintIndex>,
results: &mut Vec<Vec<ConstraintIndex>>,
) {
- let dependency_map = self.dependency_map.as_ref().unwrap();
-
// Check if we already visited this region.
if !visited.insert(current_region) {
return;
// Check if we reached the region we were looking for.
if target_test(current_region) {
if !stack.is_empty() {
- assert_eq!(self.constraints[stack[0]].sub, from_region);
+ assert_eq!(self.constraints[stack[0]].sup, from_region);
results.push(stack.clone());
}
return;
}
- self.constraints
- .each_affected_by_dirty(dependency_map[current_region], |constraint| {
- assert_eq!(self.constraints[constraint].sub, current_region);
- stack.push(constraint);
- self.find_constraint_paths_between_regions_helper(
- from_region,
- self.constraints[constraint].sup,
- target_test,
- visited,
- stack,
- results,
- );
- stack.pop();
- });
+ for constraint in self.constraint_graph.outgoing_edges(current_region) {
+ assert_eq!(self.constraints[constraint].sup, current_region);
+ stack.push(constraint);
+ self.find_constraint_paths_between_regions_helper(
+ from_region,
+ self.constraints[constraint].sub,
+ target_test,
+ visited,
+ stack,
+ results,
+ );
+ stack.pop();
+ }
}
/// This function will return true if a constraint is interesting and false if a constraint
}
// Find all paths
- let constraint_paths = self.find_constraint_paths_between_regions(outlived_fr, |r| r == fr);
+ let constraint_paths = self.find_constraint_paths_between_regions(fr, |r| r == outlived_fr);
debug!("report_error: constraint_paths={:#?}", constraint_paths);
// Find the shortest such path.
while changed {
changed = false;
- for constraint in &*self.constraints {
+ for constraint in self.constraints.iter() {
if let Some(n) = result_set[constraint.sup] {
let m = n + 1;
if result_set[constraint.sub]
//! libgraphviz traits, specialized to attaching borrowck analysis
//! data to rendered labels.
+use super::*;
+use borrow_check::nll::constraints::OutlivesConstraint;
use dot::{self, IntoCow};
use rustc_data_structures::indexed_vec::Idx;
use std::borrow::Cow;
use std::io::{self, Write};
-use super::*;
-use borrow_check::nll::constraint_set::OutlivesConstraint;
-
impl<'tcx> RegionInferenceContext<'tcx> {
/// Write out the region constraint graph.
- pub(crate) fn dump_graphviz(&self, mut w: &mut dyn Write) -> io::Result<()> {
- dot::render(self, &mut w)
+ crate fn dump_graphviz_raw_constraints(&self, mut w: &mut dyn Write) -> io::Result<()> {
+ dot::render(&RawConstraints { regioncx: self }, &mut w)
+ }
+
+ /// Write out the region constraint graph.
+ crate fn dump_graphviz_scc_constraints(&self, mut w: &mut dyn Write) -> io::Result<()> {
+ let mut nodes_per_scc: IndexVec<ConstraintSccIndex, _> = self.constraint_sccs
+ .all_sccs()
+ .map(|_| Vec::new())
+ .collect();
+
+ for region in self.definitions.indices() {
+ let scc = self.constraint_sccs.scc(region);
+ nodes_per_scc[scc].push(region);
+ }
+
+ dot::render(&SccConstraints { regioncx: self, nodes_per_scc }, &mut w)
}
}
-impl<'this, 'tcx> dot::Labeller<'this> for RegionInferenceContext<'tcx> {
+struct RawConstraints<'a, 'tcx: 'a> {
+ regioncx: &'a RegionInferenceContext<'tcx>,
+}
+
+impl<'a, 'this, 'tcx> dot::Labeller<'this> for RawConstraints<'a, 'tcx> {
type Node = RegionVid;
type Edge = OutlivesConstraint;
}
}
-impl<'this, 'tcx> dot::GraphWalk<'this> for RegionInferenceContext<'tcx> {
+impl<'a, 'this, 'tcx> dot::GraphWalk<'this> for RawConstraints<'a, 'tcx> {
type Node = RegionVid;
type Edge = OutlivesConstraint;
fn nodes(&'this self) -> dot::Nodes<'this, RegionVid> {
- let vids: Vec<RegionVid> = self.definitions.indices().collect();
+ let vids: Vec<RegionVid> = self.regioncx.definitions.indices().collect();
vids.into_cow()
}
fn edges(&'this self) -> dot::Edges<'this, OutlivesConstraint> {
- (&self.constraints.raw[..]).into_cow()
+ (&self.regioncx.constraints.raw[..]).into_cow()
}
- // Render `a: b` as `a <- b`, indicating the flow
+ // Render `a: b` as `a -> b`, indicating the flow
// of data during inference.
fn source(&'this self, edge: &OutlivesConstraint) -> RegionVid {
- edge.sub
+ edge.sup
}
fn target(&'this self, edge: &OutlivesConstraint) -> RegionVid {
- edge.sup
+ edge.sub
+ }
+}
+
+struct SccConstraints<'a, 'tcx: 'a> {
+ regioncx: &'a RegionInferenceContext<'tcx>,
+ nodes_per_scc: IndexVec<ConstraintSccIndex, Vec<RegionVid>>,
+}
+
+impl<'a, 'this, 'tcx> dot::Labeller<'this> for SccConstraints<'a, 'tcx> {
+ type Node = ConstraintSccIndex;
+ type Edge = (ConstraintSccIndex, ConstraintSccIndex);
+
+ fn graph_id(&'this self) -> dot::Id<'this> {
+ dot::Id::new(format!("RegionInferenceContext")).unwrap()
+ }
+ fn node_id(&'this self, n: &ConstraintSccIndex) -> dot::Id<'this> {
+ dot::Id::new(format!("r{}", n.index())).unwrap()
+ }
+ fn node_shape(&'this self, _node: &ConstraintSccIndex) -> Option<dot::LabelText<'this>> {
+ Some(dot::LabelText::LabelStr(Cow::Borrowed("box")))
+ }
+ fn node_label(&'this self, n: &ConstraintSccIndex) -> dot::LabelText<'this> {
+ let nodes = &self.nodes_per_scc[*n];
+ dot::LabelText::LabelStr(format!("{:?} = {:?}", n, nodes).into_cow())
+ }
+}
+
+impl<'a, 'this, 'tcx> dot::GraphWalk<'this> for SccConstraints<'a, 'tcx> {
+ type Node = ConstraintSccIndex;
+ type Edge = (ConstraintSccIndex, ConstraintSccIndex);
+
+ fn nodes(&'this self) -> dot::Nodes<'this, ConstraintSccIndex> {
+ let vids: Vec<ConstraintSccIndex> = self.regioncx.constraint_sccs.all_sccs().collect();
+ vids.into_cow()
+ }
+ fn edges(&'this self) -> dot::Edges<'this, (ConstraintSccIndex, ConstraintSccIndex)> {
+ let edges: Vec<_> = self.regioncx
+ .constraint_sccs
+ .all_sccs()
+ .flat_map(|scc_a| {
+ self.regioncx
+ .constraint_sccs
+ .successors(scc_a)
+ .iter()
+ .map(move |&scc_b| (scc_a, scc_b))
+ })
+ .collect();
+
+ edges.into_cow()
+ }
+
+ // Render `a: b` as `a -> b`, indicating the flow
+ // of data during inference.
+
+ fn source(&'this self, edge: &(ConstraintSccIndex, ConstraintSccIndex)) -> ConstraintSccIndex {
+ edge.0
+ }
+
+ fn target(&'this self, edge: &(ConstraintSccIndex, ConstraintSccIndex)) -> ConstraintSccIndex {
+ edge.1
}
}
// except according to those terms.
use super::universal_regions::UniversalRegions;
-use borrow_check::nll::constraint_set::{ConstraintIndex, ConstraintSet, OutlivesConstraint};
+use borrow_check::nll::constraints::{
+ ConstraintIndex, ConstraintSccIndex, ConstraintSet, OutlivesConstraint,
+};
+use borrow_check::nll::constraints::graph::ConstraintGraph;
+use borrow_check::nll::region_infer::values::ToElementIndex;
use borrow_check::nll::type_check::Locations;
use rustc::hir::def_id::DefId;
use rustc::infer::canonical::QueryRegionConstraint;
};
use rustc::ty::{self, RegionVid, Ty, TyCtxt, TypeFoldable};
use rustc::util::common;
-use rustc_data_structures::bitvec::BitVector;
-use rustc_data_structures::indexed_vec::{Idx, IndexVec};
+use rustc_data_structures::graph::scc::Sccs;
+use rustc_data_structures::indexed_set::{IdxSet, IdxSetBuf};
+use rustc_data_structures::indexed_vec::IndexVec;
use std::rc::Rc;
/// regions, these start out empty and steadily grow, though for
/// each universally quantified region R they start out containing
/// the entire CFG and `end(R)`.
- liveness_constraints: RegionValues,
+ liveness_constraints: RegionValues<RegionVid>,
- /// The final inferred values of the inference variables; `None`
- /// until `solve` is invoked.
- inferred_values: Option<RegionValues>,
+ /// The outlives constraints computed by the type-check.
+ constraints: Rc<ConstraintSet>,
- /// For each variable, stores the index of the first constraint
- /// where that variable appears on the RHS. This is the start of a
- /// 'linked list' threaded by the `next` field in `Constraint`.
- ///
- /// This map is build when values are inferred.
- dependency_map: Option<IndexVec<RegionVid, Option<ConstraintIndex>>>,
+ /// The constraint-set, but in graph form, making it easy to traverse
+ /// the constraints adjacent to a particular region. Used to construct
+ /// the SCC (see `constraint_sccs`) and for error reporting.
+ constraint_graph: Rc<ConstraintGraph>,
+
+ /// The SCC computed from `constraints` and
+ /// `constraint_graph`. Used to compute the values of each region.
+ constraint_sccs: Rc<Sccs<RegionVid, ConstraintSccIndex>>,
- /// The constraints we have accumulated and used during solving.
- constraints: ConstraintSet,
+ /// The final inferred values of the region variables; we compute
+ /// one value per SCC. To get the value for any given *region*,
+ /// you first find which scc it is a part of.
+ scc_values: RegionValues<ConstraintSccIndex>,
/// Type constraints that we check after solving.
type_tests: Vec<TypeTest<'tcx>>,
/// Information about the universally quantified regions in scope
/// on this function and their (known) relations to one another.
- universal_regions: UniversalRegions<'tcx>,
+ universal_regions: Rc<UniversalRegions<'tcx>>,
}
struct RegionDefinition<'tcx> {
outlives_constraints: ConstraintSet,
type_tests: Vec<TypeTest<'tcx>>,
) -> Self {
- // The `next` field should not yet have been initialized:
- debug_assert!(outlives_constraints.iter().all(|c| c.next.is_none()));
-
+ let universal_regions = Rc::new(universal_regions);
let num_region_variables = var_infos.len();
let num_universal_regions = universal_regions.len();
let elements = &Rc::new(RegionValueElements::new(mir, num_universal_regions));
// Create a RegionDefinition for each inference variable.
- let definitions = var_infos
+ let definitions: IndexVec<_, _> = var_infos
.into_iter()
.map(|info| RegionDefinition::new(info.origin))
.collect();
+ let constraints = Rc::new(outlives_constraints); // freeze constraints
+ let constraint_graph = Rc::new(constraints.graph(definitions.len()));
+ let constraint_sccs = Rc::new(constraints.compute_sccs(&constraint_graph));
+
+ let scc_values = RegionValues::new(elements, constraint_sccs.num_sccs());
+
let mut result = Self {
definitions,
elements: elements.clone(),
liveness_constraints: RegionValues::new(elements, num_region_variables),
- inferred_values: None,
- dependency_map: None,
- constraints: outlives_constraints,
+ constraints,
+ constraint_sccs,
+ constraint_graph,
+ scc_values,
type_tests,
universal_regions,
};
}
// For each universally quantified region X:
- for variable in self.universal_regions.universal_regions() {
+ let elements = self.elements.clone();
+ let universal_regions = self.universal_regions.clone();
+ for variable in universal_regions.universal_regions() {
// These should be free-region variables.
assert!(match self.definitions[variable].origin {
RegionVariableOrigin::NLL(NLLRegionVariableOrigin::FreeRegion) => true,
self.definitions[variable].is_universal = true;
// Add all nodes in the CFG to liveness constraints
- for point_index in self.elements.all_point_indices() {
- self.liveness_constraints.add_element(variable, point_index);
+ for point_index in elements.all_point_indices() {
+ self.add_live_element(variable, point_index);
}
// Add `end(X)` into the set for X.
- self.liveness_constraints.add_element(variable, variable);
+ self.add_live_element(variable, variable);
}
}
/// Returns true if the region `r` contains the point `p`.
///
/// Panics if called before `solve()` executes,
- pub fn region_contains_point<R>(&self, r: R, p: Location) -> bool
- where
- R: ToRegionVid,
- {
- let inferred_values = self
- .inferred_values
- .as_ref()
- .expect("region values not yet inferred");
- inferred_values.contains(r.to_region_vid(), p)
+ crate fn region_contains(&self, r: impl ToRegionVid, p: impl ToElementIndex) -> bool {
+ let scc = self.constraint_sccs.scc(r.to_region_vid());
+ self.scc_values.contains(scc, p)
}
/// Returns access to the value of `r` for debugging purposes.
crate fn region_value_str(&self, r: RegionVid) -> String {
- let inferred_values = self
- .inferred_values
- .as_ref()
- .expect("region values not yet inferred");
-
- inferred_values.region_value_str(r)
+ let scc = self.constraint_sccs.scc(r.to_region_vid());
+ self.scc_values.region_value_str(scc)
}
/// Indicates that the region variable `v` is live at the point `point`.
///
/// Returns `true` if this constraint is new and `false` is the
/// constraint was already present.
- pub(super) fn add_live_point(&mut self, v: RegionVid, point: Location) -> bool {
- debug!("add_live_point({:?}, {:?})", v, point);
- assert!(self.inferred_values.is_none(), "values already inferred");
+ pub(super) fn add_live_element(
+ &mut self,
+ v: RegionVid,
+ elem: impl ToElementIndex,
+ ) -> bool {
+ debug!("add_live_element({:?}, {:?})", v, elem);
- let element = self.elements.index(point);
- self.liveness_constraints.add_element(v, element)
- }
+ // Add to the liveness values for `v`...
+ if self.liveness_constraints.add_element(v, elem) {
+ // ...but also add to the SCC in which `v` appears.
+ let scc = self.constraint_sccs.scc(v);
+ self.scc_values.add_element(scc, elem);
- /// Indicates that the region variable `sup` must outlive `sub` is live at the point `point`.
- pub(super) fn add_outlives(&mut self, locations: Locations, sup: RegionVid, sub: RegionVid) {
- assert!(self.inferred_values.is_none(), "values already inferred");
- self.constraints.push(OutlivesConstraint {
- locations,
- sup,
- sub,
- next: None,
- })
+ true
+ } else {
+ false
+ }
}
/// Perform region inference and report errors if we see any
mir: &Mir<'tcx>,
mir_def_id: DefId,
) -> Option<ClosureRegionRequirements<'gcx>> {
- assert!(self.inferred_values.is_none(), "values already inferred");
-
self.propagate_constraints(mir);
// If this is a closure, we can propagate unsatisfied
/// for each region variable until all the constraints are
/// satisfied. Note that some values may grow **too** large to be
/// feasible, but we check this later.
- fn propagate_constraints(&mut self, mir: &Mir<'tcx>) {
- self.dependency_map = Some(self.build_dependency_map());
- let inferred_values = self.compute_region_values(mir);
- self.inferred_values = Some(inferred_values);
- }
+ fn propagate_constraints(&mut self, _mir: &Mir<'tcx>) {
+ debug!("propagate_constraints()");
- fn compute_region_values(&self, _mir: &Mir<'tcx>) -> RegionValues {
- debug!("compute_region_values()");
- debug!("compute_region_values: constraints={:#?}", {
+ debug!("propagate_constraints: constraints={:#?}", {
let mut constraints: Vec<_> = self.constraints.iter().collect();
constraints.sort();
constraints
});
- // The initial values for each region are derived from the liveness
- // constraints we have accumulated.
- let mut inferred_values = self.liveness_constraints.clone();
-
- let dependency_map = self.dependency_map.as_ref().unwrap();
-
- // Constraints that may need to be repropagated (initially all):
- let mut dirty_list: Vec<_> = self.constraints.indices().collect();
-
- // Set to 0 for each constraint that is on the dirty list:
- let mut clean_bit_vec = BitVector::new(dirty_list.len());
+ // To propagate constriants, we walk the DAG induced by the
+ // SCC. For each SCC, we visit its successors and compute
+ // their values, then we union all those values to get our
+ // own.
+ let visited = &mut IdxSetBuf::new_empty(self.constraint_sccs.num_sccs());
+ for scc_index in self.constraint_sccs.all_sccs() {
+ self.propagate_constraint_sccs_if_new(scc_index, visited);
+ }
+ }
- debug!("propagate_constraints: --------------------");
- while let Some(constraint_idx) = dirty_list.pop() {
- clean_bit_vec.insert(constraint_idx.index());
+ #[inline]
+ fn propagate_constraint_sccs_if_new(
+ &mut self,
+ scc_a: ConstraintSccIndex,
+ visited: &mut IdxSet<ConstraintSccIndex>,
+ ) {
+ if visited.add(&scc_a) {
+ self.propagate_constraint_sccs_new(scc_a, visited);
+ }
+ }
- let constraint = &self.constraints[constraint_idx];
- debug!("propagate_constraints: constraint={:?}", constraint);
+ fn propagate_constraint_sccs_new(
+ &mut self,
+ scc_a: ConstraintSccIndex,
+ visited: &mut IdxSet<ConstraintSccIndex>,
+ ) {
+ let constraint_sccs = self.constraint_sccs.clone();
- if inferred_values.add_region(constraint.sup, constraint.sub) {
- debug!("propagate_constraints: sub={:?}", constraint.sub);
- debug!("propagate_constraints: sup={:?}", constraint.sup);
+ // Walk each SCC `B` such that `A: B`...
+ for &scc_b in constraint_sccs.successors(scc_a) {
+ debug!(
+ "propagate_constraint_sccs: scc_a = {:?} scc_b = {:?}",
+ scc_a, scc_b
+ );
- self.constraints.each_affected_by_dirty(
- dependency_map[constraint.sup],
- |dep_idx| {
- if clean_bit_vec.remove(dep_idx.index()) {
- dirty_list.push(dep_idx);
- }
- },
- );
- }
+ // ...compute the value of `B`...
+ self.propagate_constraint_sccs_if_new(scc_b, visited);
- debug!("\n");
+ // ...and add elements from `B` into `A`.
+ self.scc_values.add_region(scc_a, scc_b);
}
- inferred_values
- }
-
- /// Builds up a map from each region variable X to a vector with the
- /// indices of constraints that need to be re-evaluated when X changes.
- /// These are constraints like Y: X @ P -- so if X changed, we may
- /// need to grow Y.
- fn build_dependency_map(&mut self) -> IndexVec<RegionVid, Option<ConstraintIndex>> {
- self.constraints.link(self.definitions.len())
+ debug!(
+ "propagate_constraint_sccs: scc_a = {:?} has value {:?}",
+ scc_a,
+ self.scc_values.region_value_str(scc_a),
+ );
}
/// Once regions have been propagated, this method is used to see
if self.universal_regions.is_universal_region(r) {
return self.definitions[r].external_name;
} else {
- let inferred_values = self
- .inferred_values
- .as_ref()
- .expect("region values not yet inferred");
+ let r_scc = self.constraint_sccs.scc(r);
let upper_bound = self.universal_upper_bound(r);
- if inferred_values.contains(r, upper_bound) {
+ if self.scc_values.contains(r_scc, upper_bound) {
self.to_error_region(upper_bound)
} else {
None
// region, which ensures it can be encoded in a `ClosureOutlivesRequirement`.
let lower_bound_plus = self.non_local_universal_upper_bound(*lower_bound);
assert!(self.universal_regions.is_universal_region(lower_bound_plus));
- assert!(
- !self
- .universal_regions
- .is_local_free_region(lower_bound_plus)
- );
+ assert!(!self.universal_regions
+ .is_local_free_region(lower_bound_plus));
propagated_outlives_requirements.push(ClosureOutlivesRequirement {
subject,
) -> Option<ClosureOutlivesSubject<'gcx>> {
let tcx = infcx.tcx;
let gcx = tcx.global_tcx();
- let inferred_values = self
- .inferred_values
- .as_ref()
- .expect("region values not yet inferred");
debug!("try_promote_type_test_subject(ty = {:?})", ty);
// `'static` is not contained in `r`, we would fail to
// find an equivalent.
let upper_bound = self.non_local_universal_upper_bound(region_vid);
- if inferred_values.contains(region_vid, upper_bound) {
+ if self.region_contains(region_vid, upper_bound) {
tcx.mk_region(ty::ReClosureBound(upper_bound))
} else {
// In the case of a failure, use a `ReVar`
/// except that it converts further takes the non-local upper
/// bound of `'y`, so that the final result is non-local.
fn non_local_universal_upper_bound(&self, r: RegionVid) -> RegionVid {
- let inferred_values = self.inferred_values.as_ref().unwrap();
-
debug!(
"non_local_universal_upper_bound(r={:?}={})",
r,
- inferred_values.region_value_str(r)
+ self.region_value_str(r)
);
let lub = self.universal_upper_bound(r);
/// - For each `end('x)` element in `'r`, compute the mutual LUB, yielding
/// a result `'y`.
fn universal_upper_bound(&self, r: RegionVid) -> RegionVid {
- let inferred_values = self.inferred_values.as_ref().unwrap();
-
debug!(
"universal_upper_bound(r={:?}={})",
r,
- inferred_values.region_value_str(r)
+ self.region_value_str(r)
);
// Find the smallest universal region that contains all other
// universal regions within `region`.
let mut lub = self.universal_regions.fr_fn_body;
- for ur in inferred_values.universal_regions_outlived_by(r) {
+ let r_scc = self.constraint_sccs.scc(r);
+ for ur in self.scc_values.universal_regions_outlived_by(r_scc) {
lub = self.universal_regions.postdom_upper_bound(lub, ur);
}
) -> bool {
debug!("eval_outlives({:?}: {:?})", sup_region, sub_region);
- let inferred_values = self
- .inferred_values
- .as_ref()
- .expect("values for regions not yet inferred");
-
debug!(
"eval_outlives: sup_region's value = {:?}",
- inferred_values.region_value_str(sup_region),
+ self.region_value_str(sup_region),
);
debug!(
"eval_outlives: sub_region's value = {:?}",
- inferred_values.region_value_str(sub_region),
+ self.region_value_str(sub_region),
);
+ let sub_region_scc = self.constraint_sccs.scc(sub_region);
+ let sup_region_scc = self.constraint_sccs.scc(sup_region);
+
// Both the `sub_region` and `sup_region` consist of the union
// of some number of universal regions (along with the union
// of various points in the CFG; ignore those points for
// now). Therefore, the sup-region outlives the sub-region if,
// for each universal region R1 in the sub-region, there
// exists some region R2 in the sup-region that outlives R1.
- let universal_outlives = inferred_values
- .universal_regions_outlived_by(sub_region)
+ let universal_outlives = self.scc_values
+ .universal_regions_outlived_by(sub_region_scc)
.all(|r1| {
- inferred_values
- .universal_regions_outlived_by(sup_region)
+ self.scc_values
+ .universal_regions_outlived_by(sup_region_scc)
.any(|r2| self.universal_regions.outlives(r2, r1))
});
return true;
}
- inferred_values.contains_points(sup_region, sub_region)
+ self.scc_values
+ .contains_points(sup_region_scc, sub_region_scc)
}
/// Once regions have been propagated, this method is used to see
) {
// The universal regions are always found in a prefix of the
// full list.
- let universal_definitions = self
- .definitions
+ let universal_definitions = self.definitions
.iter_enumerated()
.take_while(|(_, fr_definition)| fr_definition.is_universal);
longer_fr: RegionVid,
propagated_outlives_requirements: &mut Option<&mut Vec<ClosureOutlivesRequirement<'gcx>>>,
) {
- let inferred_values = self.inferred_values.as_ref().unwrap();
-
debug!("check_universal_region(fr={:?})", longer_fr);
+ let longer_fr_scc = self.constraint_sccs.scc(longer_fr);
+
// Find every region `o` such that `fr: o`
// (because `fr` includes `end(o)`).
- for shorter_fr in inferred_values.universal_regions_outlived_by(longer_fr) {
+ for shorter_fr in self.scc_values.universal_regions_outlived_by(longer_fr_scc) {
// If it is known that `fr: o`, carry on.
if self.universal_regions.outlives(longer_fr, shorter_fr) {
continue;
/// Maps between the various kinds of elements of a region value to
/// the internal indices that w use.
-pub(super) struct RegionValueElements {
+crate struct RegionValueElements {
/// For each basic block, how many points are contained within?
statements_before_block: IndexVec<BasicBlock, usize>,
num_points: usize,
}
impl RegionValueElements {
- pub(super) fn new(mir: &Mir<'_>, num_universal_regions: usize) -> Self {
+ crate fn new(mir: &Mir<'_>, num_universal_regions: usize) -> Self {
let mut num_points = 0;
let statements_before_block = mir
.basic_blocks()
}
/// Total number of element indices that exist.
- pub(super) fn num_elements(&self) -> usize {
+ crate fn num_elements(&self) -> usize {
self.num_points + self.num_universal_regions
}
/// Converts an element of a region value into a `RegionElementIndex`.
- pub(super) fn index<T: ToElementIndex>(&self, elem: T) -> RegionElementIndex {
+ crate fn index<T: ToElementIndex>(&self, elem: T) -> RegionElementIndex {
elem.to_element_index(self)
}
/// Iterates over the `RegionElementIndex` for all points in the CFG.
- pub(super) fn all_point_indices<'a>(&'a self) -> impl Iterator<Item = RegionElementIndex> + 'a {
+ crate fn all_point_indices<'a>(&'a self) -> impl Iterator<Item = RegionElementIndex> + 'a {
(0..self.num_points).map(move |i| RegionElementIndex::new(i + self.num_universal_regions))
}
/// Converts a particular `RegionElementIndex` to the `RegionElement` it represents.
- pub(super) fn to_element(&self, i: RegionElementIndex) -> RegionElement {
+ crate fn to_element(&self, i: RegionElementIndex) -> RegionElement {
debug!("to_element(i={:?})", i);
if let Some(r) = self.to_universal_region(i) {
/// Converts a particular `RegionElementIndex` to a universal
/// region, if that is what it represents. Returns `None`
/// otherwise.
- pub(super) fn to_universal_region(&self, i: RegionElementIndex) -> Option<RegionVid> {
+ crate fn to_universal_region(&self, i: RegionElementIndex) -> Option<RegionVid> {
if i.index() < self.num_universal_regions {
Some(RegionVid::new(i.index()))
} else {
/// An individual element in a region value -- the value of a
/// particular region variable consists of a set of these elements.
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
-pub(super) enum RegionElement {
+crate enum RegionElement {
/// A point in the control-flow graph.
Location(Location),
UniversalRegion(RegionVid),
}
-pub(super) trait ToElementIndex: Debug + Copy {
+crate trait ToElementIndex: Debug + Copy {
fn to_element_index(self, elements: &RegionValueElements) -> RegionElementIndex;
}
/// variable. The columns consist of either universal regions or
/// points in the CFG.
#[derive(Clone)]
-pub(super) struct RegionValues {
+crate struct RegionValues<N: Idx> {
elements: Rc<RegionValueElements>,
- matrix: SparseBitMatrix<RegionVid, RegionElementIndex>,
+ matrix: SparseBitMatrix<N, RegionElementIndex>,
}
-impl RegionValues {
+impl<N: Idx> RegionValues<N> {
/// Creates a new set of "region values" that tracks causal information.
/// Each of the regions in num_region_variables will be initialized with an
/// empty set of points and no causal information.
- pub(super) fn new(elements: &Rc<RegionValueElements>, num_region_variables: usize) -> Self {
+ crate fn new(elements: &Rc<RegionValueElements>, num_region_variables: usize) -> Self {
assert!(
elements.num_universal_regions <= num_region_variables,
"universal regions are a subset of the region variables"
Self {
elements: elements.clone(),
matrix: SparseBitMatrix::new(
- RegionVid::new(num_region_variables),
+ N::new(num_region_variables),
RegionElementIndex::new(elements.num_elements()),
),
}
/// Adds the given element to the value for the given region. Returns true if
/// the element is newly added (i.e., was not already present).
- pub(super) fn add_element<E: ToElementIndex>(&mut self, r: RegionVid, elem: E) -> bool {
+ crate fn add_element(
+ &mut self,
+ r: N,
+ elem: impl ToElementIndex,
+ ) -> bool {
let i = self.elements.index(elem);
debug!("add(r={:?}, elem={:?})", r, elem);
self.matrix.add(r, i)
/// Add all elements in `r_from` to `r_to` (because e.g. `r_to:
/// r_from`).
- pub(super) fn add_region(&mut self, r_to: RegionVid, r_from: RegionVid) -> bool {
+ crate fn add_region(&mut self, r_to: N, r_from: N) -> bool {
self.matrix.merge(r_from, r_to)
}
/// True if the region `r` contains the given element.
- pub(super) fn contains<E: ToElementIndex>(&self, r: RegionVid, elem: E) -> bool {
+ crate fn contains(&self, r: N, elem: impl ToElementIndex) -> bool {
let i = self.elements.index(elem);
self.matrix.contains(r, i)
}
/// True if `sup_region` contains all the CFG points that
/// `sub_region` contains. Ignores universal regions.
- pub(super) fn contains_points(&self, sup_region: RegionVid, sub_region: RegionVid) -> bool {
+ crate fn contains_points(&self, sup_region: N, sub_region: N) -> bool {
// This could be done faster by comparing the bitsets. But I
// am lazy.
self.element_indices_contained_in(sub_region)
/// Iterate over the value of the region `r`, yielding up element
/// indices. You may prefer `universal_regions_outlived_by` or
/// `elements_contained_in`.
- pub(super) fn element_indices_contained_in<'a>(
+ crate fn element_indices_contained_in<'a>(
&'a self,
- r: RegionVid,
+ r: N,
) -> impl Iterator<Item = RegionElementIndex> + 'a {
self.matrix.iter(r).map(move |i| i)
}
/// Returns just the universal regions that are contained in a given region's value.
- pub(super) fn universal_regions_outlived_by<'a>(
+ crate fn universal_regions_outlived_by<'a>(
&'a self,
- r: RegionVid,
+ r: N,
) -> impl Iterator<Item = RegionVid> + 'a {
self.element_indices_contained_in(r)
.map(move |i| self.elements.to_universal_region(i))
}
/// Returns all the elements contained in a given region's value.
- pub(super) fn elements_contained_in<'a>(
+ crate fn elements_contained_in<'a>(
&'a self,
- r: RegionVid,
+ r: N,
) -> impl Iterator<Item = RegionElement> + 'a {
self.element_indices_contained_in(r)
.map(move |r| self.elements.to_element(r))
}
/// Returns a "pretty" string value of the region. Meant for debugging.
- pub(super) fn region_value_str(&self, r: RegionVid) -> String {
+ crate fn region_value_str(&self, r: N) -> String {
let mut result = String::new();
result.push_str("{");
// except according to those terms.
use borrow_check::location::LocationTable;
-use borrow_check::nll::constraint_set::OutlivesConstraint;
+use borrow_check::nll::constraints::{ConstraintSet, OutlivesConstraint};
use borrow_check::nll::facts::AllFacts;
use borrow_check::nll::region_infer::{RegionTest, TypeTest};
use borrow_check::nll::type_check::Locations;
use borrow_check::nll::universal_regions::UniversalRegions;
-use borrow_check::nll::constraint_set::ConstraintSet;
use rustc::infer::canonical::QueryRegionConstraint;
use rustc::infer::outlives::obligations::{TypeOutlives, TypeOutlivesDelegate};
use rustc::infer::region_constraints::{GenericKind, VerifyBound};
locations: self.locations,
sub,
sup,
- next: None,
});
}
//! This pass type-checks the MIR to ensure it is not broken.
#![allow(unreachable_code)]
+use borrow_check::borrow_set::BorrowSet;
use borrow_check::location::LocationTable;
-use borrow_check::nll::constraint_set::ConstraintSet;
+use borrow_check::nll::constraints::{ConstraintSet, OutlivesConstraint};
use borrow_check::nll::facts::AllFacts;
use borrow_check::nll::region_infer::{ClosureRegionRequirementsExt, TypeTest};
use borrow_check::nll::universal_regions::UniversalRegions;
+use borrow_check::nll::ToRegionVid;
use dataflow::move_paths::MoveData;
use dataflow::FlowAtLocation;
use dataflow::MaybeInitializedPlaces;
+use rustc::hir;
use rustc::hir::def_id::DefId;
use rustc::infer::canonical::QueryRegionConstraint;
use rustc::infer::region_constraints::GenericKind;
mir_def_id: DefId,
universal_regions: &UniversalRegions<'tcx>,
location_table: &LocationTable,
+ borrow_set: &BorrowSet<'tcx>,
liveness: &LivenessResults,
all_facts: &mut Option<AllFacts>,
flow_inits: &mut FlowAtLocation<MaybeInitializedPlaces<'_, 'gcx, 'tcx>>,
Some(BorrowCheckContext {
universal_regions,
location_table,
+ borrow_set,
all_facts,
}),
&mut |cx| {
) -> MirTypeckRegionConstraints<'tcx> {
let mut checker = TypeChecker::new(
infcx,
+ mir,
mir_def_id,
param_env,
region_bound_pairs,
infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
param_env: ty::ParamEnv<'gcx>,
last_span: Span,
+ mir: &'a Mir<'tcx>,
mir_def_id: DefId,
region_bound_pairs: &'a [(ty::Region<'tcx>, GenericKind<'tcx>)],
implicit_region_bound: Option<ty::Region<'tcx>>,
universal_regions: &'a UniversalRegions<'tcx>,
location_table: &'a LocationTable,
all_facts: &'a mut Option<AllFacts>,
+ borrow_set: &'a BorrowSet<'tcx>,
}
/// A collection of region constraints that must be satisfied for the
impl<'a, 'gcx, 'tcx> TypeChecker<'a, 'gcx, 'tcx> {
fn new(
infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
+ mir: &'a Mir<'tcx>,
mir_def_id: DefId,
param_env: ty::ParamEnv<'gcx>,
region_bound_pairs: &'a [(ty::Region<'tcx>, GenericKind<'tcx>)],
TypeChecker {
infcx,
last_span: DUMMY_SP,
+ mir,
mir_def_id,
param_env,
region_bound_pairs,
}
StatementKind::UserAssertTy(ref c_ty, ref local) => {
let local_ty = mir.local_decls()[*local].ty;
- let (ty, _) = self
- .infcx
+ let (ty, _) = self.infcx
.instantiate_canonical_with_fresh_inference_vars(stmt.source_info.span, c_ty);
debug!(
"check_stmt: user_assert_ty ty={:?} local_ty={:?}",
CastKind::Misc => {}
},
+ Rvalue::Ref(region, _borrow_kind, borrowed_place) => {
+ self.add_reborrow_constraint(location, region, borrowed_place);
+ }
+
// FIXME: These other cases have to be implemented in future PRs
Rvalue::Use(..)
- | Rvalue::Ref(..)
| Rvalue::Len(..)
| Rvalue::BinaryOp(..)
| Rvalue::CheckedBinaryOp(..)
}
}
+ /// Add the constraints that arise from a borrow expression `&'a P` at the location `L`.
+ ///
+ /// # Parameters
+ ///
+ /// - `location`: the location `L` where the borrow expression occurs
+ /// - `borrow_region`: the region `'a` associated with the borrow
+ /// - `borrowed_place`: the place `P` being borrowed
+ fn add_reborrow_constraint(
+ &mut self,
+ location: Location,
+ borrow_region: ty::Region<'tcx>,
+ borrowed_place: &Place<'tcx>,
+ ) {
+ // These constraints are only meaningful during borrowck:
+ let BorrowCheckContext {
+ borrow_set,
+ location_table,
+ all_facts,
+ ..
+ } = match &mut self.borrowck_context {
+ Some(borrowck_context) => borrowck_context,
+ None => return,
+ };
+
+ // In Polonius mode, we also push a `borrow_region` fact
+ // linking the loan to the region (in some cases, though,
+ // there is no loan associated with this borrow expression --
+ // that occurs when we are borrowing an unsafe place, for
+ // example).
+ if let Some(all_facts) = all_facts {
+ if let Some(borrow_index) = borrow_set.location_map.get(&location) {
+ let region_vid = borrow_region.to_region_vid();
+ all_facts.borrow_region.push((
+ region_vid,
+ *borrow_index,
+ location_table.mid_index(location),
+ ));
+ }
+ }
+
+ // If we are reborrowing the referent of another reference, we
+ // need to add outlives relationships. In a case like `&mut
+ // *p`, where the `p` has type `&'b mut Foo`, for example, we
+ // need to ensure that `'b: 'a`.
+
+ let mut borrowed_place = borrowed_place;
+
+ debug!(
+ "add_reborrow_constraint({:?}, {:?}, {:?})",
+ location, borrow_region, borrowed_place
+ );
+ while let Place::Projection(box PlaceProjection { base, elem }) = borrowed_place {
+ debug!("add_reborrow_constraint - iteration {:?}", borrowed_place);
+
+ match *elem {
+ ProjectionElem::Deref => {
+ let tcx = self.infcx.tcx;
+ let base_ty = base.ty(self.mir, tcx).to_ty(tcx);
+
+ debug!("add_reborrow_constraint - base_ty = {:?}", base_ty);
+ match base_ty.sty {
+ ty::TyRef(ref_region, _, mutbl) => {
+ self.constraints
+ .outlives_constraints
+ .push(OutlivesConstraint {
+ sup: ref_region.to_region_vid(),
+ sub: borrow_region.to_region_vid(),
+ locations: location.boring(),
+ });
+
+ if let Some(all_facts) = all_facts {
+ all_facts.outlives.push((
+ ref_region.to_region_vid(),
+ borrow_region.to_region_vid(),
+ location_table.mid_index(location),
+ ));
+ }
+
+ match mutbl {
+ hir::Mutability::MutImmutable => {
+ // Immutable reference. We don't need the base
+ // to be valid for the entire lifetime of
+ // the borrow.
+ break;
+ }
+ hir::Mutability::MutMutable => {
+ // Mutable reference. We *do* need the base
+ // to be valid, because after the base becomes
+ // invalid, someone else can use our mutable deref.
+
+ // This is in order to make the following function
+ // illegal:
+ // ```
+ // fn unsafe_deref<'a, 'b>(x: &'a &'b mut T) -> &'b mut T {
+ // &mut *x
+ // }
+ // ```
+ //
+ // As otherwise you could clone `&mut T` using the
+ // following function:
+ // ```
+ // fn bad(x: &mut T) -> (&mut T, &mut T) {
+ // let my_clone = unsafe_deref(&'a x);
+ // ENDREGION 'a;
+ // (my_clone, x)
+ // }
+ // ```
+ }
+ }
+ }
+ ty::TyRawPtr(..) => {
+ // deref of raw pointer, guaranteed to be valid
+ break;
+ }
+ ty::TyAdt(def, _) if def.is_box() => {
+ // deref of `Box`, need the base to be valid - propagate
+ }
+ _ => bug!("unexpected deref ty {:?} in {:?}", base_ty, borrowed_place),
+ }
+ }
+ ProjectionElem::Field(..)
+ | ProjectionElem::Downcast(..)
+ | ProjectionElem::Index(..)
+ | ProjectionElem::ConstantIndex { .. }
+ | ProjectionElem::Subslice { .. } => {
+ // other field access
+ }
+ }
+
+ // The "propagate" case. We need to check that our base is valid
+ // for the borrow's lifetime.
+ borrowed_place = base;
+ }
+ }
+
fn prove_aggregate_predicates(
&mut self,
aggregate_kind: &AggregateKind<'tcx>,
use rustc::mir::{BasicBlock, Location, Mir, Place};
use rustc::mir::{ProjectionElem, BorrowKind};
use rustc::ty::TyCtxt;
-use rustc_data_structures::control_flow_graph::dominators::Dominators;
+use rustc_data_structures::graph::dominators::Dominators;
/// Returns true if the borrow represented by `kind` is
/// allowed to be split into separate Reservation and
});
if let Some(scope) = scope {
// schedule a shallow free of that memory, lest we unwind:
- this.schedule_drop(expr_span, scope, &Place::Local(result), value.ty);
+ this.schedule_drop_storage_and_value(
+ expr_span, scope, &Place::Local(result), value.ty,
+ );
}
// malloc some memory of suitable type (thus far, uninitialized):
// anything because no values with a destructor can be created in
// a constant at this time, even if the type may need dropping.
if let Some(temp_lifetime) = temp_lifetime {
- this.schedule_drop(expr_span, temp_lifetime, &Place::Local(temp), expr_ty);
+ this.schedule_drop_storage_and_value(
+ expr_span, temp_lifetime, &Place::Local(temp), expr_ty,
+ );
}
block.and(temp)
use build::{BlockAnd, BlockAndExtension, Builder};
use build::{GuardFrame, GuardFrameLocal, LocalsForNode};
use build::ForGuard::{self, OutsideGuard, RefWithinGuard, ValWithinGuard};
+use build::scope::{CachedBlock, DropKind};
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::bitvec::BitVector;
use rustc::ty::{self, Ty};
source_info,
kind: StatementKind::StorageLive(local_id)
});
- Place::Local(local_id)
+ let place = Place::Local(local_id);
+ let var_ty = self.local_decls[local_id].ty;
+ let hir_id = self.hir.tcx().hir.node_to_hir_id(var);
+ let region_scope = self.hir.region_scope_tree.var_scope(hir_id.local_id);
+ self.schedule_drop(
+ span, region_scope, &place, var_ty,
+ DropKind::Storage,
+ );
+ place
}
pub fn schedule_drop_for_binding(&mut self,
let var_ty = self.local_decls[local_id].ty;
let hir_id = self.hir.tcx().hir.node_to_hir_id(var);
let region_scope = self.hir.region_scope_tree.var_scope(hir_id.local_id);
- self.schedule_drop(span, region_scope, &Place::Local(local_id), var_ty);
+ self.schedule_drop(
+ span, region_scope, &Place::Local(local_id), var_ty,
+ DropKind::Value {
+ cached_block: CachedBlock::default(),
+ },
+ );
}
pub fn visit_bindings<F>(&mut self, pattern: &Pattern<'tcx>, f: &mut F)
use build;
+use build::scope::{CachedBlock, DropKind};
use hair::cx::Cx;
use hair::{LintLevel, BindingMode, PatternKind};
use rustc::hir;
}
// Make sure we drop (parts of) the argument even when not matched on.
- self.schedule_drop(pattern.as_ref().map_or(ast_body.span, |pat| pat.span),
- argument_scope, &place, ty);
-
+ self.schedule_drop(
+ pattern.as_ref().map_or(ast_body.span, |pat| pat.span),
+ argument_scope, &place, ty,
+ DropKind::Value { cached_block: CachedBlock::default() },
+ );
}
// Enter the argument pattern bindings source scope, if it exists.
/// place to drop
location: Place<'tcx>,
- /// Whether this is a full value Drop, or just a StorageDead.
- kind: DropKind
+ /// Whether this is a value Drop or a StorageDead.
+ kind: DropKind,
}
#[derive(Debug, Default, Clone, Copy)]
-struct CachedBlock {
+pub(crate) struct CachedBlock {
/// The cached block for the cleanups-on-diverge path. This block
/// contains code to run the current drop and all the preceding
/// drops (i.e. those having lower index in Drop’s Scope drop
}
#[derive(Debug)]
-enum DropKind {
+pub(crate) enum DropKind {
Value {
cached_block: CachedBlock,
},
abortblk
}
+ pub fn schedule_drop_storage_and_value(
+ &mut self,
+ span: Span,
+ region_scope: region::Scope,
+ place: &Place<'tcx>,
+ place_ty: Ty<'tcx>,
+ ) {
+ self.schedule_drop(
+ span, region_scope, place, place_ty,
+ DropKind::Storage,
+ );
+ self.schedule_drop(
+ span, region_scope, place, place_ty,
+ DropKind::Value {
+ cached_block: CachedBlock::default(),
+ },
+ );
+ }
+
// Scheduling drops
// ================
/// Indicates that `place` should be dropped on exit from
/// `region_scope`.
- pub fn schedule_drop(&mut self,
- span: Span,
- region_scope: region::Scope,
- place: &Place<'tcx>,
- place_ty: Ty<'tcx>) {
+ ///
+ /// When called with `DropKind::Storage`, `place` should be a local
+ /// with an index higher than the current `self.arg_count`.
+ pub fn schedule_drop(
+ &mut self,
+ span: Span,
+ region_scope: region::Scope,
+ place: &Place<'tcx>,
+ place_ty: Ty<'tcx>,
+ drop_kind: DropKind,
+ ) {
let needs_drop = self.hir.needs_drop(place_ty);
- let drop_kind = if needs_drop {
- DropKind::Value { cached_block: CachedBlock::default() }
- } else {
- // Only temps and vars need their storage dead.
- match *place {
- Place::Local(index) if index.index() > self.arg_count => DropKind::Storage,
- _ => return
+ match drop_kind {
+ DropKind::Value { .. } => if !needs_drop { return },
+ DropKind::Storage => {
+ match *place {
+ Place::Local(index) => if index.index() <= self.arg_count {
+ span_bug!(
+ span, "`schedule_drop` called with index {} and arg_count {}",
+ index.index(),
+ self.arg_count,
+ )
+ },
+ _ => span_bug!(
+ span, "`schedule_drop` called with non-`Local` place {:?}", place
+ ),
+ }
}
- };
+ }
for scope in self.scopes.iter_mut().rev() {
let this_scope = scope.region_scope == region_scope;
});
block = next;
}
- DropKind::Storage => {}
- }
-
- // We do not need to emit StorageDead for generator drops
- if generator_drop {
- continue
- }
+ DropKind::Storage => {
+ // We do not need to emit StorageDead for generator drops
+ if generator_drop {
+ continue
+ }
- // Drop the storage for both value and storage drops.
- // Only temps and vars need their storage dead.
- match drop_data.location {
- Place::Local(index) if index.index() > arg_count => {
- cfg.push(block, Statement {
- source_info,
- kind: StatementKind::StorageDead(index)
- });
+ // Drop the storage for both value and storage drops.
+ // Only temps and vars need their storage dead.
+ match drop_data.location {
+ Place::Local(index) if index.index() > arg_count => {
+ cfg.push(block, Statement {
+ source_info,
+ kind: StatementKind::StorageDead(index)
+ });
+ }
+ _ => unreachable!(),
+ }
}
- _ => continue
}
}
block.unit()
while let Some(location) = stack.pop() {
// If region does not contain a point at the location, then add to list and skip
// successor locations.
- if !regioncx.region_contains_point(borrow_region, location) {
+ if !regioncx.region_contains(borrow_region, location) {
debug!("borrow {:?} gets killed at {:?}", borrow_index, location);
borrows_out_of_scope_at_location
.entry(location)
match directive.subclass {
_ if directive.used.get() ||
directive.vis.get() == ty::Visibility::Public ||
- directive.span.is_dummy() => {}
+ directive.span.is_dummy() => {
+ if let ImportDirectiveSubclass::MacroUse = directive.subclass {
+ if resolver.session.features_untracked().use_extern_macros &&
+ !directive.span.is_dummy() {
+ resolver.session.buffer_lint(
+ lint::builtin::MACRO_USE_EXTERN_CRATE,
+ directive.id,
+ directive.span,
+ "deprecated `#[macro_use]` directive used to \
+ import macros should be replaced at use sites \
+ with a `use` statement to import the macro \
+ instead",
+ );
+ }
+ }
+ }
ImportDirectiveSubclass::ExternCrate(_) => {
resolver.maybe_unused_extern_crates.push((directive.id, directive.span));
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+#![deny(bare_trait_objects)]
+
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
html_root_url = "https://doc.rust-lang.org/nightly/")]
/// This is the visitor that walks the whole crate.
pub struct Resolver<'a> {
session: &'a Session,
- cstore: &'a CrateStore,
+ cstore: &'a dyn CrateStore,
pub definitions: Definitions,
/// true if `#![feature(use_extern_macros)]`
use_extern_macros: bool,
- crate_loader: &'a mut CrateLoader,
+ crate_loader: &'a mut dyn CrateLoader,
macro_names: FxHashSet<Ident>,
global_macros: FxHashMap<Name, &'a NameBinding<'a>>,
pub all_macros: FxHashMap<Name, Def>,
impl<'a> Resolver<'a> {
pub fn new(session: &'a Session,
- cstore: &'a CrateStore,
+ cstore: &'a dyn CrateStore,
krate: &Crate,
crate_name: &str,
make_glob_map: MakeGlobMap,
- crate_loader: &'a mut CrateLoader,
+ crate_loader: &'a mut dyn CrateLoader,
arenas: &'a ResolverArenas<'a>)
-> Resolver<'a> {
let root_def_id = DefId::local(CRATE_DEF_INDEX);
Err(Determinacy::Determined) => {}
}
+ // Ok at this point we've determined that the `attr` above doesn't
+ // actually resolve at this time, so we may want to report an error.
+ // It could be the case, though, that `attr` won't ever resolve! If
+ // there's a custom derive that could be used it might declare `attr` as
+ // a custom attribute accepted by the derive. In this case we don't want
+ // to report this particular invocation as unresolved, but rather we'd
+ // want to move on to the next invocation.
+ //
+ // This loop here looks through all of the derive annotations in scope
+ // and tries to resolve them. If they themselves successfully resolve
+ // *and* the resolve mentions that this attribute's name is a registered
+ // custom attribute then we flag this attribute as known and update
+ // `invoc` above to point to the next invocation.
+ //
+ // By then returning `Undetermined` we should continue resolution to
+ // resolve the next attribute.
let attr_name = match path.segments.len() {
1 => path.segments[0].ident.name,
_ => return Err(determinacy),
attrs.push(inert_attr);
attrs
});
+ return Err(Determinacy::Undetermined)
}
- return Err(Determinacy::Undetermined);
},
Err(Determinacy::Undetermined) => determinacy = Determinacy::Undetermined,
Err(Determinacy::Determined) => {}
}
pub struct CallbackOutput<'b> {
- callback: &'b mut FnMut(&Analysis),
+ callback: &'b mut dyn FnMut(&Analysis),
}
impl<'b> DumpOutput for CallbackOutput<'b> {
impl<'b> JsonDumper<CallbackOutput<'b>> {
pub fn with_callback(
- callback: &'b mut FnMut(&Analysis),
+ callback: &'b mut dyn FnMut(&Analysis),
config: Config,
) -> JsonDumper<CallbackOutput<'b>> {
JsonDumper {
html_root_url = "https://doc.rust-lang.org/nightly/")]
#![feature(custom_attribute)]
#![allow(unused_attributes)]
+#![deny(bare_trait_objects)]
#![recursion_limit="256"]
/// Call a callback with the results of save-analysis.
pub struct CallbackHandler<'b> {
- pub callback: &'b mut FnMut(&rls_data::Analysis),
+ pub callback: &'b mut dyn FnMut(&rls_data::Analysis),
}
impl<'b> SaveHandler for CallbackHandler<'b> {
/// This type must not appear anywhere in other converted types.
const TRAIT_OBJECT_DUMMY_SELF: ty::TypeVariants<'static> = ty::TyInfer(ty::FreshTy(0));
-impl<'o, 'gcx: 'tcx, 'tcx> AstConv<'gcx, 'tcx>+'o {
+impl<'o, 'gcx: 'tcx, 'tcx> dyn AstConv<'gcx, 'tcx>+'o {
pub fn ast_region_to_region(&self,
lifetime: &hir::Lifetime,
def: Option<&ty::GenericParamDef>)
/// If there is no expected signature, then we will convert the
/// types that the user gave into a signature.
fn supplied_sig_of_closure(&self, decl: &hir::FnDecl) -> ty::PolyFnSig<'tcx> {
- let astconv: &AstConv = self;
+ let astconv: &dyn AstConv = self;
// First, convert the types that the user supplied (if any).
let supplied_arguments = decl.inputs.iter().map(|a| astconv.ast_ty_to_ty(a));
/// so should yield an error, but returns back a signature where
/// all parameters are of type `TyErr`.
fn error_sig_of_closure(&self, decl: &hir::FnDecl) -> ty::PolyFnSig<'tcx> {
- let astconv: &AstConv = self;
+ let astconv: &dyn AstConv = self;
let supplied_arguments = decl.inputs.iter().map(|a| {
// Convert the types that the user supplied (if any), but ignore them.
pub fn coerce_forced_unit<'a>(&mut self,
fcx: &FnCtxt<'a, 'gcx, 'tcx>,
cause: &ObligationCause<'tcx>,
- augment_error: &mut FnMut(&mut DiagnosticBuilder),
+ augment_error: &mut dyn FnMut(&mut DiagnosticBuilder),
label_unit_as_expected: bool)
{
self.coerce_inner(fcx,
cause: &ObligationCause<'tcx>,
expression: Option<&'gcx hir::Expr>,
mut expression_ty: Ty<'tcx>,
- augment_error: Option<&mut FnMut(&mut DiagnosticBuilder)>,
+ augment_error: Option<&mut dyn FnMut(&mut DiagnosticBuilder)>,
label_expression_as_expected: bool)
{
// Incorporate whatever type inference information we have
}
}
- fn resolve<T>(&self, x: &T, span: &Locatable) -> T::Lifted
+ fn resolve<T>(&self, x: &T, span: &dyn Locatable) -> T::Lifted
where
T: TypeFoldable<'tcx> + ty::Lift<'gcx>,
{
struct Resolver<'cx, 'gcx: 'cx + 'tcx, 'tcx: 'cx> {
tcx: TyCtxt<'cx, 'gcx, 'tcx>,
infcx: &'cx InferCtxt<'cx, 'gcx, 'tcx>,
- span: &'cx Locatable,
+ span: &'cx dyn Locatable,
body: &'gcx hir::Body,
}
impl<'cx, 'gcx, 'tcx> Resolver<'cx, 'gcx, 'tcx> {
fn new(
fcx: &'cx FnCtxt<'cx, 'gcx, 'tcx>,
- span: &'cx Locatable,
+ span: &'cx dyn Locatable,
body: &'gcx hir::Body,
) -> Resolver<'cx, 'gcx, 'tcx> {
Resolver {
let cause = ObligationCause::misc(span, impl_node_id);
let check_mutbl = |mt_a: ty::TypeAndMut<'gcx>,
mt_b: ty::TypeAndMut<'gcx>,
- mk_ptr: &Fn(Ty<'gcx>) -> Ty<'gcx>| {
+ mk_ptr: &dyn Fn(Ty<'gcx>) -> Ty<'gcx>| {
if (mt_a.mutbl, mt_b.mutbl) == (hir::MutImmutable, hir::MutMutable) {
infcx.report_mismatched_types(&cause,
mk_ptr(mt_b.ty),
}
// Is it marked with ?Sized
-fn is_unsized<'gcx: 'tcx, 'tcx>(astconv: &AstConv<'gcx, 'tcx>,
+fn is_unsized<'gcx: 'tcx, 'tcx>(astconv: &dyn AstConv<'gcx, 'tcx>,
ast_bounds: &[hir::GenericBound],
span: Span) -> bool
{
/// Translate the AST's notion of ty param bounds (which are an enum consisting of a newtyped Ty or
/// a region) to ty's notion of ty param bounds, which can either be user-defined traits, or the
/// built-in trait (formerly known as kind): Send.
-pub fn compute_bounds<'gcx: 'tcx, 'tcx>(astconv: &AstConv<'gcx, 'tcx>,
+pub fn compute_bounds<'gcx: 'tcx, 'tcx>(astconv: &dyn AstConv<'gcx, 'tcx>,
param_ty: Ty<'tcx>,
ast_bounds: &[hir::GenericBound],
sized_by_default: SizedByDefault,
/// because this can be anywhere from 0 predicates (`T:?Sized` adds no
/// predicates) to 1 (`T:Foo`) to many (`T:Bar<X=i32>` adds `T:Bar`
/// and `<T as Bar>::X == i32`).
-fn predicates_from_bound<'tcx>(astconv: &AstConv<'tcx, 'tcx>,
+fn predicates_from_bound<'tcx>(astconv: &dyn AstConv<'tcx, 'tcx>,
param_ty: Ty<'tcx>,
bound: &hir::GenericBound)
-> Vec<ty::Predicate<'tcx>>
```compile_fail,E0225
fn main() {
- let _: Box<std::io::Read + std::io::Write>;
+ let _: Box<dyn std::io::Read + std::io::Write>;
}
```
```
fn main() {
- let _: Box<std::io::Read + Send + Sync>;
+ let _: Box<dyn std::io::Read + Send + Sync>;
}
```
"##,
html_root_url = "https://doc.rust-lang.org/nightly/")]
#![allow(non_camel_case_types)]
+#![deny(bare_trait_objects)]
#![feature(box_patterns)]
#![feature(box_syntax)]
/// # Note
///
/// This method will be deprecated in favor of the identically-named
- /// inherent methods on `u8`, `char`, `[u8]` and `str`.
+ /// inherent methods on `u8` and `char`.
+ /// For `[u8]` use `.iter().all(u8::is_ascii_alphabetic)`.
+ /// For `str` use `.bytes().all(u8::is_ascii_alphabetic)`.
#[unstable(feature = "ascii_ctype", issue = "39658")]
#[rustc_deprecated(since = "1.26.0", reason = "use inherent methods instead")]
fn is_ascii_alphabetic(&self) -> bool { unimplemented!(); }
/// # Note
///
/// This method will be deprecated in favor of the identically-named
- /// inherent methods on `u8`, `char`, `[u8]` and `str`.
+ /// inherent methods on `u8` and `char`.
+ /// For `[u8]` use `.iter().all(u8::is_ascii_uppercase)`.
+ /// For `str` use `.bytes().all(u8::is_ascii_uppercase)`.
#[unstable(feature = "ascii_ctype", issue = "39658")]
#[rustc_deprecated(since = "1.26.0", reason = "use inherent methods instead")]
fn is_ascii_uppercase(&self) -> bool { unimplemented!(); }
/// # Note
///
/// This method will be deprecated in favor of the identically-named
- /// inherent methods on `u8`, `char`, `[u8]` and `str`.
+ /// inherent methods on `u8` and `char`.
+ /// For `[u8]` use `.iter().all(u8::is_ascii_lowercase)`.
+ /// For `str` use `.bytes().all(u8::is_ascii_lowercase)`.
#[unstable(feature = "ascii_ctype", issue = "39658")]
#[rustc_deprecated(since = "1.26.0", reason = "use inherent methods instead")]
fn is_ascii_lowercase(&self) -> bool { unimplemented!(); }
/// # Note
///
/// This method will be deprecated in favor of the identically-named
- /// inherent methods on `u8`, `char`, `[u8]` and `str`.
+ /// inherent methods on `u8` and `char`.
+ /// For `[u8]` use `.iter().all(u8::is_ascii_alphanumeric)`.
+ /// For `str` use `.bytes().all(u8::is_ascii_alphanumeric)`.
#[unstable(feature = "ascii_ctype", issue = "39658")]
#[rustc_deprecated(since = "1.26.0", reason = "use inherent methods instead")]
fn is_ascii_alphanumeric(&self) -> bool { unimplemented!(); }
/// # Note
///
/// This method will be deprecated in favor of the identically-named
- /// inherent methods on `u8`, `char`, `[u8]` and `str`.
+ /// inherent methods on `u8` and `char`.
+ /// For `[u8]` use `.iter().all(u8::is_ascii_digit)`.
+ /// For `str` use `.bytes().all(u8::is_ascii_digit)`.
#[unstable(feature = "ascii_ctype", issue = "39658")]
#[rustc_deprecated(since = "1.26.0", reason = "use inherent methods instead")]
fn is_ascii_digit(&self) -> bool { unimplemented!(); }
/// # Note
///
/// This method will be deprecated in favor of the identically-named
- /// inherent methods on `u8`, `char`, `[u8]` and `str`.
+ /// inherent methods on `u8` and `char`.
+ /// For `[u8]` use `.iter().all(u8::is_ascii_hexdigit)`.
+ /// For `str` use `.bytes().all(u8::is_ascii_hexdigit)`.
#[unstable(feature = "ascii_ctype", issue = "39658")]
#[rustc_deprecated(since = "1.26.0", reason = "use inherent methods instead")]
fn is_ascii_hexdigit(&self) -> bool { unimplemented!(); }
/// # Note
///
/// This method will be deprecated in favor of the identically-named
- /// inherent methods on `u8`, `char`, `[u8]` and `str`.
+ /// inherent methods on `u8` and `char`.
+ /// For `[u8]` use `.iter().all(u8::is_ascii_punctuation)`.
+ /// For `str` use `.bytes().all(u8::is_ascii_punctuation)`.
#[unstable(feature = "ascii_ctype", issue = "39658")]
#[rustc_deprecated(since = "1.26.0", reason = "use inherent methods instead")]
fn is_ascii_punctuation(&self) -> bool { unimplemented!(); }
/// # Note
///
/// This method will be deprecated in favor of the identically-named
- /// inherent methods on `u8`, `char`, `[u8]` and `str`.
+ /// inherent methods on `u8` and `char`.
+ /// For `[u8]` use `.iter().all(u8::is_ascii_graphic)`.
+ /// For `str` use `.bytes().all(u8::is_ascii_graphic)`.
#[unstable(feature = "ascii_ctype", issue = "39658")]
#[rustc_deprecated(since = "1.26.0", reason = "use inherent methods instead")]
fn is_ascii_graphic(&self) -> bool { unimplemented!(); }
/// # Note
///
/// This method will be deprecated in favor of the identically-named
- /// inherent methods on `u8`, `char`, `[u8]` and `str`.
+ /// inherent methods on `u8` and `char`.
+ /// For `[u8]` use `.iter().all(u8::is_ascii_whitespace)`.
+ /// For `str` use `.bytes().all(u8::is_ascii_whitespace)`.
#[unstable(feature = "ascii_ctype", issue = "39658")]
#[rustc_deprecated(since = "1.26.0", reason = "use inherent methods instead")]
fn is_ascii_whitespace(&self) -> bool { unimplemented!(); }
/// # Note
///
/// This method will be deprecated in favor of the identically-named
- /// inherent methods on `u8`, `char`, `[u8]` and `str`.
+ /// inherent methods on `u8` and `char`.
+ /// For `[u8]` use `.iter().all(u8::is_ascii_control)`.
+ /// For `str` use `.bytes().all(u8::is_ascii_control)`.
#[unstable(feature = "ascii_ctype", issue = "39658")]
#[rustc_deprecated(since = "1.26.0", reason = "use inherent methods instead")]
fn is_ascii_control(&self) -> bool { unimplemented!(); }
/// ```
#[rustc_deprecated(since = "1.29.0",
reason = "This function's behavior is unexpected and probably not what you want. \
- Consider using the home_dir function from crates.io/crates/dirs instead.")]
+ Consider using the home_dir function from https://crates.io/crates/dirs instead.")]
#[stable(feature = "env", since = "1.0.0")]
pub fn home_dir() -> Option<PathBuf> {
os_imp::home_dir()
}
}
+#[stable(feature = "more_box_slice_clone", since = "1.29.0")]
+impl Clone for Box<CStr> {
+ #[inline]
+ fn clone(&self) -> Self {
+ (**self).into()
+ }
+}
+
#[stable(feature = "box_from_c_string", since = "1.20.0")]
impl From<CString> for Box<CStr> {
#[inline]
}
}
+#[stable(feature = "more_box_slice_clone", since = "1.29.0")]
+impl Clone for Box<OsStr> {
+ #[inline]
+ fn clone(&self) -> Self {
+ self.to_os_string().into_boxed_os_str()
+ }
+}
+
#[stable(feature = "shared_from_slice2", since = "1.24.0")]
impl From<OsString> for Arc<OsStr> {
#[inline]
}
}
+#[stable(feature = "more_box_slice_clone", since = "1.29.0")]
+impl Clone for Box<Path> {
+ #[inline]
+ fn clone(&self) -> Self {
+ self.to_path_buf().into_boxed_path()
+ }
+}
+
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T: ?Sized + AsRef<OsStr>> From<&'a T> for PathBuf {
fn from(s: &'a T) -> PathBuf {
/// only one [`Receiver`] is supported.
///
/// If the [`Receiver`] is disconnected while trying to [`send`] with the
-/// [`Sender`], the [`send`] method will return a [`SendError`]. Similarly, If the
+/// [`Sender`], the [`send`] method will return a [`SendError`]. Similarly, if the
/// [`Sender`] is disconnected while trying to [`recv`], the [`recv`] method will
/// return a [`RecvError`].
///
// fallback implementation to use as well.
//
// Due to rust-lang/rust#18804, make sure this is not generic!
-#[cfg(target_os = "linux")]
+#[cfg(any(target_os = "linux", target_os = "fuchsia"))]
pub unsafe fn register_dtor(t: *mut u8, dtor: unsafe extern fn(*mut u8)) {
use libc;
use mem;
_tlv_atexit(dtor, t);
}
-// Just use the thread_local fallback implementation, at least until there's
-// a more direct implementation.
-#[cfg(target_os = "fuchsia")]
-pub use sys_common::thread_local::register_dtor_fallback as register_dtor;
-
pub fn requires_move_before_drop() -> bool {
// The macOS implementation of TLS apparently had an odd aspect to it
// where the pointer we have may be overwritten while this destructor
pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt,
sp: Span,
tts: &[tokenstream::TokenTree])
- -> Box<base::MacResult + 'cx> {
+ -> Box<dyn base::MacResult + 'cx> {
if !cx.ecfg.enable_asm() {
feature_gate::emit_feature_err(&cx.parse_sess,
"asm",
cx: &'cx mut ExtCtxt,
sp: Span,
tts: &[TokenTree],
-) -> Box<MacResult + 'cx> {
+) -> Box<dyn MacResult + 'cx> {
let mut parser = cx.new_parser_from_tts(tts);
let cond_expr = panictry!(parser.parse_expr());
let custom_msg_args = if parser.eat(&token::Comma) {
pub fn expand_cfg<'cx>(cx: &mut ExtCtxt,
sp: Span,
tts: &[tokenstream::TokenTree])
- -> Box<base::MacResult + 'static> {
+ -> Box<dyn base::MacResult + 'static> {
let sp = sp.apply_mark(cx.current_expansion.mark);
let mut p = cx.new_parser_from_tts(tts);
let cfg = panictry!(p.parse_meta_item());
pub fn expand_compile_error<'cx>(cx: &'cx mut ExtCtxt,
sp: Span,
tts: &[tokenstream::TokenTree])
- -> Box<base::MacResult + 'cx> {
+ -> Box<dyn base::MacResult + 'cx> {
let var = match get_single_str_from_tts(cx, sp, tts, "compile_error!") {
None => return DummyResult::expr(sp),
Some(v) => v,
cx: &mut base::ExtCtxt,
sp: syntax_pos::Span,
tts: &[tokenstream::TokenTree],
-) -> Box<base::MacResult + 'static> {
+) -> Box<dyn base::MacResult + 'static> {
let es = match base::get_exprs_from_tts(cx, sp, tts) {
Some(e) => e,
None => return base::DummyResult::expr(sp),
pub fn expand_syntax_ext<'cx>(cx: &'cx mut ExtCtxt,
sp: Span,
tts: &[TokenTree])
- -> Box<base::MacResult + 'cx> {
+ -> Box<dyn base::MacResult + 'cx> {
if !cx.ecfg.enable_concat_idents() {
feature_gate::emit_feature_err(&cx.parse_sess,
"concat_idents",
span: Span,
_: &MetaItem,
_: &Annotatable,
- _: &mut FnMut(Annotatable)) {
+ _: &mut dyn FnMut(Annotatable)) {
cx.span_err(span, "this unsafe trait should be implemented explicitly");
}
span: Span,
mitem: &MetaItem,
item: &Annotatable,
- push: &mut FnMut(Annotatable)) {
+ push: &mut dyn FnMut(Annotatable)) {
let trait_def = TraitDef {
span,
attributes: Vec::new(),
span: Span,
mitem: &MetaItem,
item: &Annotatable,
- push: &mut FnMut(Annotatable)) {
+ push: &mut dyn FnMut(Annotatable)) {
// check if we can use a short form
//
// the short form is `fn clone(&self) -> Self { *self }`
span: Span,
mitem: &MetaItem,
item: &Annotatable,
- push: &mut FnMut(Annotatable)) {
+ push: &mut dyn FnMut(Annotatable)) {
let inline = cx.meta_word(span, Symbol::intern("inline"));
let hidden = cx.meta_list_item_word(span, Symbol::intern("hidden"));
let doc = cx.meta_list(span, Symbol::intern("doc"), vec![hidden]);
span: Span,
mitem: &MetaItem,
item: &Annotatable,
- push: &mut FnMut(Annotatable)) {
+ push: &mut dyn FnMut(Annotatable)) {
let inline = cx.meta_word(span, Symbol::intern("inline"));
let attrs = vec![cx.attribute(span, inline)];
let trait_def = TraitDef {
span: Span,
mitem: &MetaItem,
item: &Annotatable,
- push: &mut FnMut(Annotatable)) {
+ push: &mut dyn FnMut(Annotatable)) {
// structures are equal if all fields are equal, and non equal, if
// any fields are not equal or if the enum variants are different
fn cs_op(cx: &mut ExtCtxt,
span: Span,
mitem: &MetaItem,
item: &Annotatable,
- push: &mut FnMut(Annotatable)) {
+ push: &mut dyn FnMut(Annotatable)) {
macro_rules! md {
($name:expr, $op:expr, $equal:expr) => { {
let inline = cx.meta_word(span, Symbol::intern("inline"));
span: Span,
mitem: &MetaItem,
item: &Annotatable,
- push: &mut FnMut(Annotatable)) {
+ push: &mut dyn FnMut(Annotatable)) {
// &mut ::std::fmt::Formatter
let fmtr = Ptr(Box::new(Literal(path_std!(cx, fmt::Formatter))),
Borrowed(None, ast::Mutability::Mutable));
span: Span,
mitem: &MetaItem,
item: &Annotatable,
- push: &mut FnMut(Annotatable)) {
+ push: &mut dyn FnMut(Annotatable)) {
expand_deriving_decodable_imp(cx, span, mitem, item, push, "rustc_serialize")
}
span: Span,
mitem: &MetaItem,
item: &Annotatable,
- push: &mut FnMut(Annotatable)) {
+ push: &mut dyn FnMut(Annotatable)) {
warn_if_deprecated(cx, span, "Decodable");
expand_deriving_decodable_imp(cx, span, mitem, item, push, "serialize")
}
span: Span,
mitem: &MetaItem,
item: &Annotatable,
- push: &mut FnMut(Annotatable),
+ push: &mut dyn FnMut(Annotatable),
krate: &'static str) {
let typaram = &*deriving::hygienic_type_parameter(item, "__D");
span: Span,
mitem: &MetaItem,
item: &Annotatable,
- push: &mut FnMut(Annotatable)) {
+ push: &mut dyn FnMut(Annotatable)) {
let inline = cx.meta_word(span, Symbol::intern("inline"));
let attrs = vec![cx.attribute(span, inline)];
let trait_def = TraitDef {
span: Span,
mitem: &MetaItem,
item: &Annotatable,
- push: &mut FnMut(Annotatable)) {
+ push: &mut dyn FnMut(Annotatable)) {
expand_deriving_encodable_imp(cx, span, mitem, item, push, "rustc_serialize")
}
span: Span,
mitem: &MetaItem,
item: &Annotatable,
- push: &mut FnMut(Annotatable)) {
+ push: &mut dyn FnMut(Annotatable)) {
warn_if_deprecated(cx, span, "Encodable");
expand_deriving_encodable_imp(cx, span, mitem, item, push, "serialize")
}
span: Span,
mitem: &MetaItem,
item: &Annotatable,
- push: &mut FnMut(Annotatable),
+ push: &mut dyn FnMut(Annotatable),
krate: &'static str) {
let typaram = &*deriving::hygienic_type_parameter(item, "__S");
/// Combine the values of all the fields together. The last argument is
/// all the fields of all the structures.
pub type CombineSubstructureFunc<'a> =
- Box<FnMut(&mut ExtCtxt, Span, &Substructure) -> P<Expr> + 'a>;
+ Box<dyn FnMut(&mut ExtCtxt, Span, &Substructure) -> P<Expr> + 'a>;
/// Deal with non-matching enum variants. The tuple is a list of
/// identifiers (one for each `Self` argument, which could be any of the
/// holding the variant index value for each of the `Self` arguments. The
/// last argument is all the non-`Self` args of the method being derived.
pub type EnumNonMatchCollapsedFunc<'a> =
- Box<FnMut(&mut ExtCtxt, Span, (&[Ident], &[Ident]), &[P<Expr>]) -> P<Expr> + 'a>;
+ Box<dyn FnMut(&mut ExtCtxt, Span, (&[Ident], &[Ident]), &[P<Expr>]) -> P<Expr> + 'a>;
pub fn combine_substructure<'a>(f: CombineSubstructureFunc<'a>)
-> RefCell<CombineSubstructureFunc<'a>> {
cx: &mut ExtCtxt,
mitem: &ast::MetaItem,
item: &'a Annotatable,
- push: &mut FnMut(Annotatable)) {
+ push: &mut dyn FnMut(Annotatable)) {
self.expand_ext(cx, mitem, item, push, false);
}
cx: &mut ExtCtxt,
mitem: &ast::MetaItem,
item: &'a Annotatable,
- push: &mut FnMut(Annotatable),
+ push: &mut dyn FnMut(Annotatable),
from_scratch: bool) {
match *item {
Annotatable::Item(ref item) => {
span: Span,
mitem: &MetaItem,
item: &Annotatable,
- push: &mut FnMut(Annotatable)) {
+ push: &mut dyn FnMut(Annotatable)) {
let path = Path::new_(pathvec_std!(cx, hash::Hash), None, vec![], PathKind::Std);
}
}
- pub fn register_builtin_derives(resolver: &mut Resolver) {
+ pub fn register_builtin_derives(resolver: &mut dyn Resolver) {
$(
resolver.add_builtin(
ast::Ident::with_empty_ctxt(Symbol::intern($name)),
pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt,
sp: Span,
tts: &[tokenstream::TokenTree])
- -> Box<base::MacResult + 'cx> {
+ -> Box<dyn base::MacResult + 'cx> {
let var = match get_single_str_from_tts(cx, sp, tts, "option_env!") {
None => return DummyResult::expr(sp),
Some(v) => v,
pub fn expand_env<'cx>(cx: &'cx mut ExtCtxt,
sp: Span,
tts: &[tokenstream::TokenTree])
- -> Box<base::MacResult + 'cx> {
+ -> Box<dyn base::MacResult + 'cx> {
let mut exprs = match get_exprs_from_tts(cx, sp, tts) {
Some(ref exprs) if exprs.is_empty() => {
cx.span_err(sp, "env! takes 1 or 2 arguments");
pub fn expand_format_args<'cx>(ecx: &'cx mut ExtCtxt,
mut sp: Span,
tts: &[tokenstream::TokenTree])
- -> Box<base::MacResult + 'cx> {
+ -> Box<dyn base::MacResult + 'cx> {
sp = sp.apply_mark(ecx.current_expansion.mark);
match parse_args(ecx, sp, tts) {
Some((efmt, args, names)) => {
pub fn expand_global_asm<'cx>(cx: &'cx mut ExtCtxt,
sp: Span,
- tts: &[tokenstream::TokenTree]) -> Box<base::MacResult + 'cx> {
+ tts: &[tokenstream::TokenTree]) -> Box<dyn base::MacResult + 'cx> {
if !cx.ecfg.enable_global_asm() {
feature_gate::emit_feature_err(&cx.parse_sess,
MACRO,
//! Syntax extensions in the Rust compiler.
+#![deny(bare_trait_objects)]
+
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
html_root_url = "https://doc.rust-lang.org/nightly/")]
use syntax::ext::hygiene;
use syntax::symbol::Symbol;
-pub fn register_builtins(resolver: &mut syntax::ext::base::Resolver,
+pub fn register_builtins(resolver: &mut dyn syntax::ext::base::Resolver,
user_exts: Vec<NamedSyntaxExtension>,
enable_quotes: bool) {
deriving::register_builtin_derives(resolver);
pub fn expand_syntax_ext<'cx>(cx: &'cx mut base::ExtCtxt,
sp: syntax_pos::Span,
tts: &[tokenstream::TokenTree])
- -> Box<base::MacResult + 'cx> {
+ -> Box<dyn base::MacResult + 'cx> {
if !cx.ecfg.enable_log_syntax() {
feature_gate::emit_feature_err(&cx.parse_sess,
"log_syntax",
}
pub fn modify(sess: &ParseSess,
- resolver: &mut ::syntax::ext::base::Resolver,
+ resolver: &mut dyn (::syntax::ext::base::Resolver),
mut krate: ast::Crate,
is_proc_macro_crate: bool,
is_test_crate: bool,
}
fn collect_attr_proc_macro(&mut self, item: &'a ast::Item, attr: &'a ast::Attribute) {
- if let Some(_) = attr.meta_item_list() {
- self.handler.span_err(attr.span, "`#[proc_macro_attribute]` attribute
+ if !attr.is_word() {
+ self.handler.span_err(attr.span, "`#[proc_macro_attribute]` attribute \
does not take any arguments");
return;
}
}
fn collect_bang_proc_macro(&mut self, item: &'a ast::Item, attr: &'a ast::Attribute) {
- if let Some(_) = attr.meta_item_list() {
- self.handler.span_err(attr.span, "`#[proc_macro]` attribute
+ if !attr.is_word() {
+ self.handler.span_err(attr.span, "`#[proc_macro]` attribute \
does not take any arguments");
return;
}
pub fn expand_trace_macros(cx: &mut ExtCtxt,
sp: Span,
tt: &[TokenTree])
- -> Box<base::MacResult + 'static> {
+ -> Box<dyn base::MacResult + 'static> {
if !cx.ecfg.enable_trace_macros() {
feature_gate::emit_feature_err(&cx.parse_sess,
"trace_macros",
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// compile-flags: -O
+// min-llvm-version 6.0
+
+#![crate_type = "lib"]
+
+// verify that LLVM recognizes a loop involving 0..=n and will const-fold it.
+
+//------------------------------------------------------------------------------
+// Example from original issue #45222
+
+fn foo2(n: u64) -> u64 {
+ let mut count = 0;
+ for _ in 0..n {
+ for j in (0..=n).rev() {
+ count += j;
+ }
+ }
+ count
+}
+
+// CHECK-LABEL: @check_foo2
+#[no_mangle]
+pub fn check_foo2() -> u64 {
+ // CHECK: ret i64 500005000000000
+ foo2(100000)
+}
+
+//------------------------------------------------------------------------------
+// Simplified example of #45222
+
+fn triangle_inc(n: u64) -> u64 {
+ let mut count = 0;
+ for j in 0 ..= n {
+ count += j;
+ }
+ count
+}
+
+// CHECK-LABEL: @check_triangle_inc
+#[no_mangle]
+pub fn check_triangle_inc() -> u64 {
+ // CHECK: ret i64 5000050000
+ triangle_inc(100000)
+}
+
+//------------------------------------------------------------------------------
+// Demo in #48012
+
+fn foo3r(n: u64) -> u64 {
+ let mut count = 0;
+ (0..n).for_each(|_| {
+ (0 ..= n).rev().for_each(|j| {
+ count += j;
+ })
+ });
+ count
+}
+
+// CHECK-LABEL: @check_foo3r
+#[no_mangle]
+pub fn check_foo3r() -> u64 {
+ // CHECK: ret i64 500005000000000
+ foo3r(100000)
+}
// CHECK: [[S__4:%[0-9]+]] = bitcast { i32, i32 }* %_4 to i8*
// CHECK: call void @llvm.lifetime.start{{.*}}(i{{[0-9 ]+}}, i8* [[S__4]])
-// CHECK: [[E_b:%[0-9]+]] = bitcast { i32, i32 }** %b to i8*
-// CHECK: call void @llvm.lifetime.end{{.*}}(i{{[0-9 ]+}}, i8* [[E_b]])
-
// CHECK: [[E__4:%[0-9]+]] = bitcast { i32, i32 }* %_4 to i8*
// CHECK: call void @llvm.lifetime.end{{.*}}(i{{[0-9 ]+}}, i8* [[E__4]])
+
+// CHECK: [[E_b:%[0-9]+]] = bitcast { i32, i32 }** %b to i8*
+// CHECK: call void @llvm.lifetime.end{{.*}}(i{{[0-9 ]+}}, i8* [[E_b]])
}
let c = 1;
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// We must mark a variable whose initialization fails due to an
+// abort statement as StorageDead.
+
+fn main() {
+ loop {
+ let beacon = {
+ match true {
+ false => 4,
+ true => break,
+ }
+ };
+ drop(&beacon);
+ }
+}
+
+// END RUST SOURCE
+// START rustc.main.mir_map.0.mir
+// fn main() -> (){
+// let mut _0: ();
+// scope 1 {
+// }
+// scope 2 {
+// let _2: i32;
+// }
+// let mut _1: ();
+// let mut _3: bool;
+// let mut _4: u8;
+// let mut _5: !;
+// let mut _6: ();
+// let mut _7: &i32;
+// bb0: {
+// goto -> bb1;
+// }
+// bb1: {
+// falseUnwind -> [real: bb3, cleanup: bb4];
+// }
+// bb2: {
+// goto -> bb29;
+// }
+// bb3: {
+// StorageLive(_2);
+// StorageLive(_3);
+// _3 = const true;
+// _4 = discriminant(_3);
+// switchInt(_3) -> [false: bb11, otherwise: bb10];
+// }
+// bb4: {
+// resume;
+// }
+// bb5: {
+// _2 = const 4i32;
+// goto -> bb14;
+// }
+// bb6: {
+// _0 = ();
+// goto -> bb15;
+// }
+// bb7: {
+// falseEdges -> [real: bb12, imaginary: bb8];
+// }
+// bb8: {
+// falseEdges -> [real: bb13, imaginary: bb9];
+// }
+// bb9: {
+// unreachable;
+// }
+// bb10: {
+// goto -> bb8;
+// }
+// bb11: {
+// goto -> bb7;
+// }
+// bb12: {
+// goto -> bb5;
+// }
+// bb13: {
+// goto -> bb6;
+// }
+// bb14: {
+// StorageDead(_3);
+// StorageLive(_7);
+// _7 = &_2;
+// _6 = const std::mem::drop(move _7) -> [return: bb28, unwind: bb4];
+// }
+// bb15: {
+// goto -> bb16;
+// }
+// bb16: {
+// goto -> bb17;
+// }
+// bb17: {
+// goto -> bb18;
+// }
+// bb18: {
+// goto -> bb19;
+// }
+// bb19: {
+// goto -> bb20;
+// }
+// bb20: {
+// StorageDead(_3);
+// goto -> bb21;
+// }
+// bb21: {
+// goto -> bb22;
+// }
+// bb22: {
+// StorageDead(_2);
+// goto -> bb23;
+// }
+// bb23: {
+// goto -> bb24;
+// }
+// bb24: {
+// goto -> bb25;
+// }
+// bb25: {
+// goto -> bb2;
+// }
+// bb26: {
+// _5 = ();
+// unreachable;
+// }
+// bb27: {
+// StorageDead(_5);
+// goto -> bb14;
+// }
+// bb28: {
+// StorageDead(_7);
+// _1 = ();
+// StorageDead(_2);
+// goto -> bb1;
+// }
+// bb29: {
+// return;
+// }
+// }
+// END rustc.main.mir_map.0.mir
// StorageDead(_5);
// _3 = &_4;
// _2 = ();
-// StorageDead(_3);
// StorageDead(_4);
+// StorageDead(_3);
// StorageLive(_6);
// _6 = const 1i32;
// _0 = ();
#[prelude_import]
use std::prelude::v1::*;
#[macro_use]
-extern crate std as std;
+extern crate std;
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
macro_rules! negative(( $ e : expr ) => { $ e < 0 });
fn main() { (1 as i32) < 0; }
-
fn main() {
negative!(1 as i32);
}
-
#[prelude_import]
use std::prelude::v1::*;
#[macro_use]
-extern crate std as std;
+extern crate std;
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// no-prefer-dynamic
+
+#![crate_type = "proc-macro"]
+
+extern crate proc_macro;
+
+use proc_macro::TokenStream;
+
+#[proc_macro_derive(Foo)]
+pub fn foo(a: TokenStream) -> TokenStream {
+ "".parse().unwrap()
+}
+
+#[proc_macro_derive(Bar, attributes(custom))]
+pub fn bar(a: TokenStream) -> TokenStream {
+ "".parse().unwrap()
+}
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// aux-build:custom-attr-only-one-derive.rs
+
+#![feature(rust_2018_preview)]
+
+#[macro_use]
+extern crate custom_attr_only_one_derive;
+
+#[derive(Bar, Foo)]
+#[custom = "test"]
+pub enum A {
+ B,
+ C,
+}
+
+fn main() {}
// Test inclusive range syntax.
-use std::ops::{RangeInclusive, RangeToInclusive};
+#![feature(range_is_empty)]
+#![allow(unused_comparisons)]
+
+use std::ops::RangeToInclusive;
fn foo() -> isize { 42 }
// Test that range syntax works in return statements
-fn return_range_to() -> RangeToInclusive<i32> { return ..=1; }
+pub fn return_range_to() -> RangeToInclusive<i32> { return ..=1; }
+
+#[derive(Debug)]
+struct P(u8);
pub fn main() {
let mut count = 0;
assert_eq!(count, 55);
let mut count = 0;
- let mut range = 0_usize..=10;
+ let range = 0_usize..=10;
for i in range {
assert!(i >= 0 && i <= 10);
count += i;
short.next();
assert_eq!(long.size_hint(), (255, Some(255)));
assert_eq!(short.size_hint(), (0, Some(0)));
- assert_eq!(short, 1..=0);
+ assert!(short.is_empty());
assert_eq!(long.len(), 255);
assert_eq!(short.len(), 0);
for i in 3..=251 {
assert_eq!(long.next(), Some(i));
}
- assert_eq!(long, 1..=0);
+ assert!(long.is_empty());
// check underflow
let mut narrow = 1..=0;
assert_eq!(narrow.next_back(), None);
- assert_eq!(narrow, 1..=0);
+ assert!(narrow.is_empty());
let mut zero = 0u8..=0;
assert_eq!(zero.next_back(), Some(0));
assert_eq!(zero.next_back(), None);
- assert_eq!(zero, 1..=0);
+ assert!(zero.is_empty());
let mut high = 255u8..=255;
assert_eq!(high.next_back(), Some(255));
assert_eq!(high.next_back(), None);
- assert_eq!(high, 1..=0);
+ assert!(high.is_empty());
// what happens if you have a nonsense range?
let mut nonsense = 10..=5;
assert_eq!(nonsense.next(), None);
- assert_eq!(nonsense, 10..=5);
+ assert!(nonsense.is_empty());
// output
assert_eq!(format!("{:?}", 0..=10), "0..=10");
assert_eq!(format!("{:?}", ..=10), "..=10");
- assert_eq!(format!("{:?}", long), "1..=0");
+ assert_eq!(format!("{:?}", 9..=6), "9..=6");
+
+ // ensure that constructing a RangeInclusive does not need PartialOrd bound
+ assert_eq!(format!("{:?}", P(1)..=P(2)), "P(1)..=P(2)");
}
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// no-prefer-dynamic
+
+#![crate_type = "proc-macro"]
+#![feature(proc_macro)]
+
+extern crate proc_macro;
+
+use proc_macro::TokenStream;
+
+#[proc_macro = "test"] //~ ERROR: does not take any arguments
+pub fn a(a: TokenStream) -> TokenStream { a }
+
+#[proc_macro()] //~ ERROR: does not take any arguments
+pub fn c(a: TokenStream) -> TokenStream { a }
+
+#[proc_macro(x)] //~ ERROR: does not take any arguments
+pub fn d(a: TokenStream) -> TokenStream { a }
+
+#[proc_macro_attribute = "test"] //~ ERROR: does not take any arguments
+pub fn e(_: TokenStream, a: TokenStream) -> TokenStream { a }
+
+#[proc_macro_attribute()] //~ ERROR: does not take any arguments
+pub fn g(_: TokenStream, a: TokenStream) -> TokenStream { a }
+
+#[proc_macro_attribute(x)] //~ ERROR: does not take any arguments
+pub fn h(_: TokenStream, a: TokenStream) -> TokenStream { a }
--- /dev/null
+error: `#[proc_macro]` attribute does not take any arguments
+ --> $DIR/invalid-attributes.rs:20:1
+ |
+LL | #[proc_macro = "test"] //~ ERROR: does not take any arguments
+ | ^^^^^^^^^^^^^^^^^^^^^^
+
+error: `#[proc_macro]` attribute does not take any arguments
+ --> $DIR/invalid-attributes.rs:23:1
+ |
+LL | #[proc_macro()] //~ ERROR: does not take any arguments
+ | ^^^^^^^^^^^^^^^
+
+error: `#[proc_macro]` attribute does not take any arguments
+ --> $DIR/invalid-attributes.rs:26:1
+ |
+LL | #[proc_macro(x)] //~ ERROR: does not take any arguments
+ | ^^^^^^^^^^^^^^^^
+
+error: `#[proc_macro_attribute]` attribute does not take any arguments
+ --> $DIR/invalid-attributes.rs:29:1
+ |
+LL | #[proc_macro_attribute = "test"] //~ ERROR: does not take any arguments
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: `#[proc_macro_attribute]` attribute does not take any arguments
+ --> $DIR/invalid-attributes.rs:32:1
+ |
+LL | #[proc_macro_attribute()] //~ ERROR: does not take any arguments
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: `#[proc_macro_attribute]` attribute does not take any arguments
+ --> $DIR/invalid-attributes.rs:35:1
+ |
+LL | #[proc_macro_attribute(x)] //~ ERROR: does not take any arguments
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 6 previous errors
+
cell_c,
|_outlives1, _outlives2, _outlives3, x, y| {
// Only works if 'x: 'y:
- let p = x.get();
+ let p = x.get(); //~ ERROR
//~^ WARN not reporting region error due to nll
- demand_y(x, y, p) //~ ERROR
+ demand_y(x, y, p)
},
);
}
warning: not reporting region error due to nll
--> $DIR/propagate-approximated-fail-no-postdom.rs:55:21
|
-LL | let p = x.get();
+LL | let p = x.get(); //~ ERROR
| ^^^^^^^
error: unsatisfied lifetime constraints
- --> $DIR/propagate-approximated-fail-no-postdom.rs:57:13
+ --> $DIR/propagate-approximated-fail-no-postdom.rs:55:21
|
LL | |_outlives1, _outlives2, _outlives3, x, y| {
| ---------- ---------- lifetime `'2` appears in this argument
| |
| lifetime `'1` appears in this argument
-...
-LL | demand_y(x, y, p) //~ ERROR
- | ^^^^^^^^^^^^^^^^^ argument requires that `'1` must outlive `'2`
+LL | // Only works if 'x: 'y:
+LL | let p = x.get(); //~ ERROR
+ | ^^^^^^^ argument requires that `'1` must outlive `'2`
note: No external requirements
--> $DIR/propagate-approximated-fail-no-postdom.rs:53:9
|
LL | / |_outlives1, _outlives2, _outlives3, x, y| {
LL | | // Only works if 'x: 'y:
-LL | | let p = x.get();
+LL | | let p = x.get(); //~ ERROR
LL | | //~^ WARN not reporting region error due to nll
-LL | | demand_y(x, y, p) //~ ERROR
+LL | | demand_y(x, y, p)
LL | | },
| |_________^
|
| ^^^
error: unsatisfied lifetime constraints
- --> $DIR/propagate-approximated-shorter-to-static-comparing-against-free.rs:33:9
+ --> $DIR/propagate-approximated-shorter-to-static-comparing-against-free.rs:33:20
|
LL | foo(cell, |cell_a, cell_x| {
| ------ ------ lifetime `'1` appears in this argument
| lifetime `'2` appears in this argument
LL | //~^ WARNING not reporting region error due to nll
LL | cell_a.set(cell_x.get()); // forces 'x: 'a, error in closure
- | ^^^^^^^^^^^^^^^^^^^^^^^^ argument requires that `'1` must outlive `'2`
+ | ^^^^^^^^^^^^ argument requires that `'1` must outlive `'2`
note: No external requirements
--> $DIR/propagate-approximated-shorter-to-static-comparing-against-free.rs:31:15
| ^^^^^^^^^^^^^^^^^^^^^^^
error: unsatisfied lifetime constraints
- --> $DIR/propagate-fail-to-approximate-longer-no-bounds.rs:47:9
+ --> $DIR/propagate-fail-to-approximate-longer-no-bounds.rs:47:24
|
LL | establish_relationships(&cell_a, &cell_b, |_outlives, x, y| {
| --------- - lifetime `'1` appears in this argument
| lifetime `'2` appears in this argument
LL | // Only works if 'x: 'y:
LL | demand_y(x, y, x.get())
- | ^^^^^^^^^^^^^^^^^^^^^^^ argument requires that `'1` must outlive `'2`
+ | ^^^^^^^ argument requires that `'1` must outlive `'2`
note: No external requirements
--> $DIR/propagate-fail-to-approximate-longer-no-bounds.rs:45:47
| ^^^^^^^^^^^^^^^^^^^^^^^
error: unsatisfied lifetime constraints
- --> $DIR/propagate-fail-to-approximate-longer-wrong-bounds.rs:51:9
+ --> $DIR/propagate-fail-to-approximate-longer-wrong-bounds.rs:51:24
|
LL | establish_relationships(&cell_a, &cell_b, |_outlives1, _outlives2, x, y| {
| ---------- ---------- lifetime `'2` appears in this argument
| lifetime `'1` appears in this argument
LL | // Only works if 'x: 'y:
LL | demand_y(x, y, x.get())
- | ^^^^^^^^^^^^^^^^^^^^^^^ argument requires that `'1` must outlive `'2`
+ | ^^^^^^^ argument requires that `'1` must outlive `'2`
note: No external requirements
--> $DIR/propagate-fail-to-approximate-longer-wrong-bounds.rs:49:47
--- /dev/null
+// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#[macro_export]
+macro_rules! foo { () => () }
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// aux-build:macro-use-warned-against.rs
+// aux-build:macro-use-warned-against2.rs
+// compile-pass
+
+#![warn(rust_2018_idioms, unused)]
+#![feature(use_extern_macros)]
+
+#[macro_use] //~ WARN should be replaced at use sites with a `use` statement
+extern crate macro_use_warned_against;
+#[macro_use] //~ WARN unused `#[macro_use]`
+extern crate macro_use_warned_against2;
+
+fn main() {
+ foo!();
+}
--- /dev/null
+warning: deprecated `#[macro_use]` directive used to import macros should be replaced at use sites with a `use` statement to import the macro instead
+ --> $DIR/macro-use-warned-against.rs:18:1
+ |
+LL | #[macro_use] //~ WARN should be replaced at use sites with a `use` statement
+ | ^^^^^^^^^^^^
+ |
+note: lint level defined here
+ --> $DIR/macro-use-warned-against.rs:15:9
+ |
+LL | #![warn(rust_2018_idioms, unused)]
+ | ^^^^^^^^^^^^^^^^
+ = note: #[warn(macro_use_extern_crate)] implied by #[warn(rust_2018_idioms)]
+
+warning: unused `#[macro_use]` import
+ --> $DIR/macro-use-warned-against.rs:20:1
+ |
+LL | #[macro_use] //~ WARN unused `#[macro_use]`
+ | ^^^^^^^^^^^^
+ |
+note: lint level defined here
+ --> $DIR/macro-use-warned-against.rs:15:27
+ |
+LL | #![warn(rust_2018_idioms, unused)]
+ | ^^^^^^
+ = note: #[warn(unused_imports)] implied by #[warn(unused)]
+
format!("clippy-{}-{}.tar.gz", self.clippy_release, target)
} else if component == "rustfmt" || component == "rustfmt-preview" {
format!("rustfmt-{}-{}.tar.gz", self.rustfmt_release, target)
- } else if component == "llvm_tools" {
+ } else if component == "llvm-tools" || component == "llvm-tools-preview" {
format!("llvm-tools-{}-{}.tar.gz", self.llvm_tools_release, target)
} else {
format!("{}-{}-{}.tar.gz", component, self.rust_release, target)
fn compare_source(&self, expected: &str, actual: &str) {
if expected != actual {
- self.error("pretty-printed source does not match expected source");
- println!(
- "\n\
+ self.fatal(&format!(
+ "pretty-printed source does not match expected source\n\
expected:\n\
------------------------------------------\n\
{}\n\
{}\n\
------------------------------------------\n\
\n",
- expected, actual
+ expected, actual)
);
}
}