]> git.lizzy.rs Git - rust.git/commitdiff
Rollup merge of #52299 - ljedrz:dyn_libserialize, r=cramertj
authorkennytm <kennytm@gmail.com>
Fri, 13 Jul 2018 18:56:46 +0000 (02:56 +0800)
committerGitHub <noreply@github.com>
Fri, 13 Jul 2018 18:56:46 +0000 (02:56 +0800)
Deny bare trait objects in src/libserialize

Enforce `#![deny(bare_trait_objects)]` in `src/libserialize`.

141 files changed:
src/Cargo.lock
src/Cargo.toml
src/bootstrap/bootstrap.py
src/bootstrap/builder.rs
src/bootstrap/cache.rs
src/bootstrap/compile.rs
src/bootstrap/configure.py
src/bootstrap/lib.rs
src/libcore/iter/mod.rs
src/libcore/iter/range.rs
src/libcore/num/flt2dec/strategy/dragon.rs
src/libcore/num/flt2dec/strategy/grisu.rs
src/libcore/ops/range.rs
src/libcore/option.rs
src/libcore/slice/mod.rs
src/libcore/str/mod.rs
src/libcore/tests/lib.rs
src/libcore/tests/option.rs
src/libcore/tests/slice.rs
src/librustc/cfg/construct.rs
src/librustc/cfg/mod.rs
src/librustc/dep_graph/query.rs
src/librustc/infer/lexical_region_resolve/mod.rs
src/librustc/lint/builtin.rs
src/librustc/middle/dataflow.rs
src/librustc/mir/mod.rs
src/librustc_codegen_llvm/mir/analyze.rs
src/librustc_data_structures/control_flow_graph/dominators/mod.rs [deleted file]
src/librustc_data_structures/control_flow_graph/dominators/test.rs [deleted file]
src/librustc_data_structures/control_flow_graph/iterate/mod.rs [deleted file]
src/librustc_data_structures/control_flow_graph/iterate/test.rs [deleted file]
src/librustc_data_structures/control_flow_graph/mod.rs [deleted file]
src/librustc_data_structures/control_flow_graph/reference.rs [deleted file]
src/librustc_data_structures/control_flow_graph/test.rs [deleted file]
src/librustc_data_structures/graph/dominators/mod.rs [new file with mode: 0644]
src/librustc_data_structures/graph/dominators/test.rs [new file with mode: 0644]
src/librustc_data_structures/graph/implementation/mod.rs [new file with mode: 0644]
src/librustc_data_structures/graph/implementation/tests.rs [new file with mode: 0644]
src/librustc_data_structures/graph/iterate/mod.rs [new file with mode: 0644]
src/librustc_data_structures/graph/iterate/test.rs [new file with mode: 0644]
src/librustc_data_structures/graph/mod.rs
src/librustc_data_structures/graph/reference.rs [new file with mode: 0644]
src/librustc_data_structures/graph/scc/mod.rs [new file with mode: 0644]
src/librustc_data_structures/graph/scc/test.rs [new file with mode: 0644]
src/librustc_data_structures/graph/test.rs [new file with mode: 0644]
src/librustc_data_structures/graph/tests.rs [deleted file]
src/librustc_data_structures/indexed_vec.rs
src/librustc_data_structures/lib.rs
src/librustc_incremental/assert_dep_graph.rs
src/librustc_lint/lib.rs
src/librustc_mir/borrow_check/mod.rs
src/librustc_mir/borrow_check/nll/constraint_generation.rs
src/librustc_mir/borrow_check/nll/constraint_set.rs [deleted file]
src/librustc_mir/borrow_check/nll/constraints/graph.rs [new file with mode: 0644]
src/librustc_mir/borrow_check/nll/constraints/mod.rs [new file with mode: 0644]
src/librustc_mir/borrow_check/nll/explain_borrow/find_use.rs
src/librustc_mir/borrow_check/nll/invalidation.rs
src/librustc_mir/borrow_check/nll/mod.rs
src/librustc_mir/borrow_check/nll/region_infer/dump_mir.rs
src/librustc_mir/borrow_check/nll/region_infer/error_reporting/mod.rs
src/librustc_mir/borrow_check/nll/region_infer/graphviz.rs
src/librustc_mir/borrow_check/nll/region_infer/mod.rs
src/librustc_mir/borrow_check/nll/region_infer/values.rs
src/librustc_mir/borrow_check/nll/type_check/constraint_conversion.rs
src/librustc_mir/borrow_check/nll/type_check/mod.rs
src/librustc_mir/borrow_check/path_utils.rs
src/librustc_mir/build/expr/as_rvalue.rs
src/librustc_mir/build/expr/as_temp.rs
src/librustc_mir/build/matches/mod.rs
src/librustc_mir/build/mod.rs
src/librustc_mir/build/scope.rs
src/librustc_mir/dataflow/impls/borrows.rs
src/librustc_resolve/check_unused.rs
src/librustc_resolve/lib.rs
src/librustc_resolve/macros.rs
src/librustc_save_analysis/json_dumper.rs
src/librustc_save_analysis/lib.rs
src/librustc_typeck/astconv.rs
src/librustc_typeck/check/closure.rs
src/librustc_typeck/check/coercion.rs
src/librustc_typeck/check/writeback.rs
src/librustc_typeck/coherence/builtin.rs
src/librustc_typeck/collect.rs
src/librustc_typeck/diagnostics.rs
src/librustc_typeck/lib.rs
src/libstd/ascii.rs
src/libstd/env.rs
src/libstd/ffi/c_str.rs
src/libstd/ffi/os_str.rs
src/libstd/path.rs
src/libstd/sync/mpsc/mod.rs
src/libstd/sys/unix/fast_thread_local.rs
src/libsyntax_ext/asm.rs
src/libsyntax_ext/assert.rs
src/libsyntax_ext/cfg.rs
src/libsyntax_ext/compile_error.rs
src/libsyntax_ext/concat.rs
src/libsyntax_ext/concat_idents.rs
src/libsyntax_ext/deriving/bounds.rs
src/libsyntax_ext/deriving/clone.rs
src/libsyntax_ext/deriving/cmp/eq.rs
src/libsyntax_ext/deriving/cmp/ord.rs
src/libsyntax_ext/deriving/cmp/partial_eq.rs
src/libsyntax_ext/deriving/cmp/partial_ord.rs
src/libsyntax_ext/deriving/debug.rs
src/libsyntax_ext/deriving/decodable.rs
src/libsyntax_ext/deriving/default.rs
src/libsyntax_ext/deriving/encodable.rs
src/libsyntax_ext/deriving/generic/mod.rs
src/libsyntax_ext/deriving/hash.rs
src/libsyntax_ext/deriving/mod.rs
src/libsyntax_ext/env.rs
src/libsyntax_ext/format.rs
src/libsyntax_ext/global_asm.rs
src/libsyntax_ext/lib.rs
src/libsyntax_ext/log_syntax.rs
src/libsyntax_ext/proc_macro_registrar.rs
src/libsyntax_ext/trace_macros.rs
src/test/codegen/issue-45222.rs [new file with mode: 0644]
src/test/codegen/lifetime_start_end.rs
src/test/mir-opt/issue-49232.rs [new file with mode: 0644]
src/test/mir-opt/storage_ranges.rs
src/test/pretty/cast-lt.pp
src/test/pretty/cast-lt.rs
src/test/pretty/issue-4264.pp
src/test/run-pass-fulldeps/proc-macro/auxiliary/custom-attr-only-one-derive.rs [new file with mode: 0644]
src/test/run-pass-fulldeps/proc-macro/custom-attr-only-one-derive.rs [new file with mode: 0644]
src/test/run-pass/range_inclusive.rs
src/test/ui-fulldeps/proc-macro/invalid-attributes.rs [new file with mode: 0644]
src/test/ui-fulldeps/proc-macro/invalid-attributes.stderr [new file with mode: 0644]
src/test/ui/nll/closure-requirements/propagate-approximated-fail-no-postdom.rs
src/test/ui/nll/closure-requirements/propagate-approximated-fail-no-postdom.stderr
src/test/ui/nll/closure-requirements/propagate-approximated-shorter-to-static-comparing-against-free.stderr
src/test/ui/nll/closure-requirements/propagate-fail-to-approximate-longer-no-bounds.stderr
src/test/ui/nll/closure-requirements/propagate-fail-to-approximate-longer-wrong-bounds.stderr
src/test/ui/rust-2018/auxiliary/macro-use-warned-against.rs [new file with mode: 0644]
src/test/ui/rust-2018/auxiliary/macro-use-warned-against2.rs [new file with mode: 0644]
src/test/ui/rust-2018/macro-use-warned-against.rs [new file with mode: 0644]
src/test/ui/rust-2018/macro-use-warned-against.stderr [new file with mode: 0644]
src/tools/build-manifest/src/main.rs
src/tools/compiletest/src/runtest.rs

index 73a2630911fa5324c1d2f88b60ce6448d8b054f8..539ab04af1a597af90fa72ca50db0ade79c71a80 100644 (file)
@@ -324,27 +324,6 @@ dependencies = [
 name = "clippy-mini-macro-test"
 version = "0.2.0"
 
-[[package]]
-name = "clippy_lints"
-version = "0.0.211"
-source = "git+https://github.com/rust-lang-nursery/rust-clippy?rev=6c70013f93a18c1ca7990efa8b1464acc6e18ce7#6c70013f93a18c1ca7990efa8b1464acc6e18ce7"
-dependencies = [
- "cargo_metadata 0.5.8 (registry+https://github.com/rust-lang/crates.io-index)",
- "if_chain 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "itertools 0.7.8 (registry+https://github.com/rust-lang/crates.io-index)",
- "lazy_static 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "matches 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
- "pulldown-cmark 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "quine-mc_cluskey 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)",
- "regex-syntax 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde 1.0.69 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde_derive 1.0.69 (registry+https://github.com/rust-lang/crates.io-index)",
- "toml 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
- "unicode-normalization 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
- "url 1.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
 [[package]]
 name = "clippy_lints"
 version = "0.0.211"
@@ -1686,7 +1665,7 @@ version = "0.129.0"
 dependencies = [
  "cargo 0.30.0",
  "cargo_metadata 0.5.8 (registry+https://github.com/rust-lang/crates.io-index)",
- "clippy_lints 0.0.211 (git+https://github.com/rust-lang-nursery/rust-clippy?rev=6c70013f93a18c1ca7990efa8b1464acc6e18ce7)",
+ "clippy_lints 0.0.211",
  "env_logger 0.5.10 (registry+https://github.com/rust-lang/crates.io-index)",
  "failure 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "itertools 0.7.8 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -3062,7 +3041,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 "checksum chalk-macros 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "295635afd6853aa9f20baeb7f0204862440c0fe994c5a253d5f479dac41d047e"
 "checksum chrono 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "6962c635d530328acc53ac6a955e83093fedc91c5809dfac1fa60fa470830a37"
 "checksum clap 2.32.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b957d88f4b6a63b9d70d5f454ac8011819c6efa7727858f458ab71c756ce2d3e"
-"checksum clippy_lints 0.0.211 (git+https://github.com/rust-lang-nursery/rust-clippy?rev=6c70013f93a18c1ca7990efa8b1464acc6e18ce7)" = "<none>"
 "checksum cmake 0.1.31 (registry+https://github.com/rust-lang/crates.io-index)" = "95470235c31c726d72bf2e1f421adc1e65b9d561bf5529612cbe1a72da1467b3"
 "checksum colored 1.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b0aa3473e85a3161b59845d6096b289bb577874cafeaf75ea1b1beaa6572c7fc"
 "checksum commoncrypto 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d056a8586ba25a1e4d61cb090900e495952c7886786fc55f909ab2f819b69007"
index 135bed6200ba9b9d30d2d285f59f5d44efc3c5bd..3cf48671e76def0327fe9b3a68698b9107ade294 100644 (file)
@@ -72,3 +72,4 @@ rustfmt-nightly = { path = "tools/rustfmt" }
 
 [patch."https://github.com/rust-lang-nursery/rust-clippy"]
 clippy = { path = "tools/clippy" }
+clippy_lints = { path = "tools/clippy/clippy_lints" }
index 512d4d8c5b792ce0e1958574ff59e0168996793c..71c1c61e3d97ebd51e9bd8cf5b9946cbcbe5ca5c 100644 (file)
@@ -303,6 +303,19 @@ def default_build_triple():
     return "{}-{}".format(cputype, ostype)
 
 
+@contextlib.contextmanager
+def output(filepath):
+    tmp = filepath + '.tmp'
+    with open(tmp, 'w') as f:
+        yield f
+    try:
+        os.remove(filepath)  # PermissionError/OSError on Win32 if in use
+        os.rename(tmp, filepath)
+    except OSError:
+        shutil.copy2(tmp, filepath)
+        os.remove(tmp)
+
+
 class RustBuild(object):
     """Provide all the methods required to build Rust"""
     def __init__(self):
@@ -346,7 +359,7 @@ class RustBuild(object):
             self._download_stage0_helper(filename, "rustc")
             self.fix_executable("{}/bin/rustc".format(self.bin_root()))
             self.fix_executable("{}/bin/rustdoc".format(self.bin_root()))
-            with open(self.rustc_stamp(), 'w') as rust_stamp:
+            with output(self.rustc_stamp()) as rust_stamp:
                 rust_stamp.write(self.date)
 
             # This is required so that we don't mix incompatible MinGW
@@ -363,7 +376,7 @@ class RustBuild(object):
             filename = "cargo-{}-{}.tar.gz".format(cargo_channel, self.build)
             self._download_stage0_helper(filename, "cargo")
             self.fix_executable("{}/bin/cargo".format(self.bin_root()))
-            with open(self.cargo_stamp(), 'w') as cargo_stamp:
+            with output(self.cargo_stamp()) as cargo_stamp:
                 cargo_stamp.write(self.date)
 
     def _download_stage0_helper(self, filename, pattern):
@@ -776,7 +789,7 @@ def bootstrap(help_triggered):
     if build.use_vendored_sources:
         if not os.path.exists('.cargo'):
             os.makedirs('.cargo')
-        with open('.cargo/config', 'w') as cargo_config:
+        with output('.cargo/config') as cargo_config:
             cargo_config.write("""
                 [source.crates-io]
                 replace-with = 'vendored-sources'
index 6c3a476d084db706a821ea2ec5589100d08e26de..eb534cb685e87a2afef58e8d5b53e79c99c11286 100644 (file)
@@ -44,7 +44,7 @@ pub struct Builder<'a> {
     pub top_stage: u32,
     pub kind: Kind,
     cache: Cache,
-    stack: RefCell<Vec<Box<Any>>>,
+    stack: RefCell<Vec<Box<dyn Any>>>,
     time_spent_on_dependencies: Cell<Duration>,
     pub paths: Vec<PathBuf>,
     graph_nodes: RefCell<HashMap<String, NodeIndex>>,
index d81c6bc28e52705f06c463ad55b0b137eb7fa2e6..bca5ff85ba23e846052ed1ed1b073f02579fe2ce 100644 (file)
@@ -249,7 +249,7 @@ pub fn intern_path(&self, s: PathBuf) -> Interned<PathBuf> {
 pub struct Cache(
     RefCell<HashMap<
         TypeId,
-        Box<Any>, // actually a HashMap<Step, Interned<Step::Output>>
+        Box<dyn Any>, // actually a HashMap<Step, Interned<Step::Output>>
     >>
 );
 
index 298bd58c6cdfe0545fc55271b948d46134e9e6a0..7d94bac66f7706aaf2fd10c19450c086954bae61 100644 (file)
@@ -1189,7 +1189,7 @@ pub fn run_cargo(builder: &Builder, cargo: &mut Command, stamp: &Path, is_check:
 pub fn stream_cargo(
     builder: &Builder,
     cargo: &mut Command,
-    cb: &mut FnMut(CargoMessage),
+    cb: &mut dyn FnMut(CargoMessage),
 ) -> bool {
     if builder.config.dry_run {
         return true;
index 80fa96509bd87f2a13cd563ebfd4086fdef2a306..9fdba044f4be3da82b035e5bc110c4a477f9085d 100755 (executable)
@@ -432,7 +432,7 @@ for section_key in config:
 # order that we read it in.
 p("")
 p("writing `config.toml` in current directory")
-with open('config.toml', 'w') as f:
+with bootstrap.output('config.toml') as f:
     for section in section_order:
         if section == 'target':
             for target in targets:
@@ -442,7 +442,7 @@ with open('config.toml', 'w') as f:
             for line in sections[section]:
                 f.write(line + "\n")
 
-with open('Makefile', 'w') as f:
+with bootstrap.output('Makefile') as f:
     contents = os.path.join(rust_dir, 'src', 'bootstrap', 'mk', 'Makefile.in')
     contents = open(contents).read()
     contents = contents.replace("$(CFG_SRC_DIR)", rust_dir + '/')
index 5f66d0b102e2676ae6395ba1ed3d7ad33b66039b..1efff19dfb993a69da84014b34587d77ab484499 100644 (file)
 //! More documentation can be found in each respective module below, and you can
 //! also check out the `src/bootstrap/README.md` file for more information.
 
+#![deny(bare_trait_objects)]
 #![deny(warnings)]
 #![feature(core_intrinsics)]
 #![feature(drain_filter)]
@@ -205,7 +206,8 @@ pub unsafe fn setup(_build: &mut ::Build) {
     "llvm-objcopy", // used to transform ELFs into binary format which flashing tools consume
     "llvm-objdump", // used to disassemble programs
     "llvm-profdata", // used to inspect and merge files generated by profiles
-    "llvm-size", // prints the size of the linker sections of a program
+    "llvm-size", // used to prints the size of the linker sections of a program
+    "llvm-strip", // used to discard symbols from binary files to reduce their size
 ];
 
 /// A structure representing a Rust compiler.
@@ -1174,13 +1176,13 @@ pub fn cp_r(&self, src: &Path, dst: &Path) {
     /// Copies the `src` directory recursively to `dst`. Both are assumed to exist
     /// when this function is called. Unwanted files or directories can be skipped
     /// by returning `false` from the filter function.
-    pub fn cp_filtered(&self, src: &Path, dst: &Path, filter: &Fn(&Path) -> bool) {
+    pub fn cp_filtered(&self, src: &Path, dst: &Path, filter: &dyn Fn(&Path) -> bool) {
         // Immediately recurse with an empty relative path
         self.recurse_(src, dst, Path::new(""), filter)
     }
 
     // Inner function does the actual work
-    fn recurse_(&self, src: &Path, dst: &Path, relative: &Path, filter: &Fn(&Path) -> bool) {
+    fn recurse_(&self, src: &Path, dst: &Path, relative: &Path, filter: &dyn Fn(&Path) -> bool) {
         for f in self.read_dir(src) {
             let path = f.path();
             let name = path.file_name().unwrap();
index 86b297557dddb3961e98fbe33d4549485b59b513..35ae77411069cdf5c9a63fda2fedc313dad341b3 100644 (file)
@@ -787,17 +787,19 @@ impl<T> StepBySpecIterator for StepBy<ops::RangeInclusive<T>>
     #[inline]
     fn spec_next(&mut self) -> Option<Self::Item> {
         self.first_take = false;
-        if !(self.iter.start <= self.iter.end) {
+        self.iter.compute_is_empty();
+        if self.iter.is_empty.unwrap_or_default() {
             return None;
         }
         // add 1 to self.step to get original step size back
         // it was decremented for the general case on construction
         if let Some(n) = self.iter.start.add_usize(self.step+1) {
+            self.iter.is_empty = Some(!(n <= self.iter.end));
             let next = mem::replace(&mut self.iter.start, n);
             Some(next)
         } else {
-            let last = self.iter.start.replace_one();
-            self.iter.end.replace_zero();
+            let last = self.iter.start.clone();
+            self.iter.is_empty = Some(true);
             Some(last)
         }
     }
index 0b279f66b88d6e31ef5e7aa038bac239eb6369c9..651c7a35d413c6478b6e661b23dbd586208be6a6 100644 (file)
@@ -10,7 +10,7 @@
 
 use convert::TryFrom;
 use mem;
-use ops::{self, Add, Sub, Try};
+use ops::{self, Add, Sub};
 use usize;
 
 use super::{FusedIterator, TrustedLen};
@@ -330,23 +330,23 @@ impl<A: Step> Iterator for ops::RangeInclusive<A> {
 
     #[inline]
     fn next(&mut self) -> Option<A> {
-        if self.start <= self.end {
-            if self.start < self.end {
-                let n = self.start.add_one();
-                Some(mem::replace(&mut self.start, n))
-            } else {
-                let last = self.start.replace_one();
-                self.end.replace_zero();
-                Some(last)
-            }
-        } else {
-            None
+        self.compute_is_empty();
+        if self.is_empty.unwrap_or_default() {
+            return None;
         }
+        let is_iterating = self.start < self.end;
+        self.is_empty = Some(!is_iterating);
+        Some(if is_iterating {
+            let n = self.start.add_one();
+            mem::replace(&mut self.start, n)
+        } else {
+            self.start.clone()
+        })
     }
 
     #[inline]
     fn size_hint(&self) -> (usize, Option<usize>) {
-        if !(self.start <= self.end) {
+        if self.is_empty() {
             return (0, Some(0));
         }
 
@@ -358,25 +358,29 @@ fn size_hint(&self) -> (usize, Option<usize>) {
 
     #[inline]
     fn nth(&mut self, n: usize) -> Option<A> {
+        self.compute_is_empty();
+        if self.is_empty.unwrap_or_default() {
+            return None;
+        }
+
         if let Some(plus_n) = self.start.add_usize(n) {
             use cmp::Ordering::*;
 
             match plus_n.partial_cmp(&self.end) {
                 Some(Less) => {
+                    self.is_empty = Some(false);
                     self.start = plus_n.add_one();
                     return Some(plus_n)
                 }
                 Some(Equal) => {
-                    self.start.replace_one();
-                    self.end.replace_zero();
+                    self.is_empty = Some(true);
                     return Some(plus_n)
                 }
                 _ => {}
             }
         }
 
-        self.start.replace_one();
-        self.end.replace_zero();
+        self.is_empty = Some(true);
         None
     }
 
@@ -394,68 +398,24 @@ fn min(mut self) -> Option<A> {
     fn max(mut self) -> Option<A> {
         self.next_back()
     }
-
-    #[inline]
-    fn try_fold<B, F, R>(&mut self, init: B, mut f: F) -> R where
-        Self: Sized, F: FnMut(B, Self::Item) -> R, R: Try<Ok=B>
-    {
-        let mut accum = init;
-        if self.start <= self.end {
-            loop {
-                let (x, done) =
-                    if self.start < self.end {
-                        let n = self.start.add_one();
-                        (mem::replace(&mut self.start, n), false)
-                    } else {
-                        self.end.replace_zero();
-                        (self.start.replace_one(), true)
-                    };
-                accum = f(accum, x)?;
-                if done { break }
-            }
-        }
-        Try::from_ok(accum)
-    }
 }
 
 #[stable(feature = "inclusive_range", since = "1.26.0")]
 impl<A: Step> DoubleEndedIterator for ops::RangeInclusive<A> {
     #[inline]
     fn next_back(&mut self) -> Option<A> {
-        if self.start <= self.end {
-            if self.start < self.end {
-                let n = self.end.sub_one();
-                Some(mem::replace(&mut self.end, n))
-            } else {
-                let last = self.end.replace_zero();
-                self.start.replace_one();
-                Some(last)
-            }
-        } else {
-            None
+        self.compute_is_empty();
+        if self.is_empty.unwrap_or_default() {
+            return None;
         }
-    }
-
-    #[inline]
-    fn try_rfold<B, F, R>(&mut self, init: B, mut f: F) -> R where
-        Self: Sized, F: FnMut(B, Self::Item) -> R, R: Try<Ok=B>
-    {
-        let mut accum = init;
-        if self.start <= self.end {
-            loop {
-                let (x, done) =
-                    if self.start < self.end {
-                        let n = self.end.sub_one();
-                        (mem::replace(&mut self.end, n), false)
-                    } else {
-                        self.start.replace_one();
-                        (self.end.replace_zero(), true)
-                    };
-                accum = f(accum, x)?;
-                if done { break }
-            }
-        }
-        Try::from_ok(accum)
+        let is_iterating = self.start < self.end;
+        self.is_empty = Some(!is_iterating);
+        Some(if is_iterating {
+            let n = self.end.sub_one();
+            mem::replace(&mut self.end, n)
+        } else {
+            self.end.clone()
+        })
     }
 }
 
index 9c9e531c593c6cf44ea501a07e183785413c2a36..aa6a08cb2057ea34a4e1faed0dfa1c700a56b5a6 100644 (file)
@@ -8,12 +8,11 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-/*!
-Almost direct (but slightly optimized) Rust translation of Figure 3 of \[1\].
-
-\[1\] Burger, R. G. and Dybvig, R. K. 1996. Printing floating-point numbers
-    quickly and accurately. SIGPLAN Not. 31, 5 (May. 1996), 108-116.
-*/
+//! Almost direct (but slightly optimized) Rust translation of Figure 3 of "Printing
+//! Floating-Point Numbers Quickly and Accurately"[^1].
+//!
+//! [^1]: Burger, R. G. and Dybvig, R. K. 1996. Printing floating-point numbers
+//!   quickly and accurately. SIGPLAN Not. 31, 5 (May. 1996), 108-116.
 
 use cmp::Ordering;
 
index 5c023a191db555fef2f38fee801a0b04fa9c08ae..f33186e59c2e6a65107aee8c436c8f852ab44e67 100644 (file)
@@ -8,13 +8,12 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-/*!
-Rust adaptation of Grisu3 algorithm described in \[1\]. It uses about
-1KB of precomputed table, and in turn, it's very quick for most inputs.
-
-\[1\] Florian Loitsch. 2010. Printing floating-point numbers quickly and
-    accurately with integers. SIGPLAN Not. 45, 6 (June 2010), 233-243.
-*/
+//! Rust adaptation of the Grisu3 algorithm described in "Printing Floating-Point Numbers Quickly
+//! and Accurately with Integers"[^1]. It uses about 1KB of precomputed table, and in turn, it's
+//! very quick for most inputs.
+//!
+//! [^1]: Florian Loitsch. 2010. Printing floating-point numbers quickly and
+//!   accurately with integers. SIGPLAN Not. 45, 6 (June 2010), 233-243.
 
 use num::diy_float::Fp;
 use num::flt2dec::{Decoded, MAX_SIG_DIGITS, round_up};
index 01e279589da98453c32a0293493164b4b25dc215..9c635678d7aa0b2da528bdaad32761238d60e431 100644 (file)
@@ -9,6 +9,7 @@
 // except according to those terms.
 
 use fmt;
+use hash::{Hash, Hasher};
 
 /// An unbounded range (`..`).
 ///
@@ -326,15 +327,56 @@ pub fn contains<U>(&self, item: &U) -> bool
 /// assert_eq!(arr[1..=2], [  1,2  ]);  // RangeInclusive
 /// ```
 #[doc(alias = "..=")]
-#[derive(Clone, PartialEq, Eq, Hash)]  // not Copy -- see #27186
+#[derive(Clone)]  // not Copy -- see #27186
 #[stable(feature = "inclusive_range", since = "1.26.0")]
 pub struct RangeInclusive<Idx> {
-    // FIXME: The current representation follows RFC 1980,
-    // but it is known that LLVM is not able to optimize loops following that RFC.
-    // Consider adding an extra `bool` field to indicate emptiness of the range.
-    // See #45222 for performance test cases.
     pub(crate) start: Idx,
     pub(crate) end: Idx,
+    pub(crate) is_empty: Option<bool>,
+    // This field is:
+    //  - `None` when next() or next_back() was never called
+    //  - `Some(false)` when `start <= end` assuming no overflow
+    //  - `Some(true)` otherwise
+    // The field cannot be a simple `bool` because the `..=` constructor can
+    // accept non-PartialOrd types, also we want the constructor to be const.
+}
+
+trait RangeInclusiveEquality: Sized {
+    fn canonicalized_is_empty(range: &RangeInclusive<Self>) -> bool;
+}
+impl<T> RangeInclusiveEquality for T {
+    #[inline]
+    default fn canonicalized_is_empty(range: &RangeInclusive<Self>) -> bool {
+        range.is_empty.unwrap_or_default()
+    }
+}
+impl<T: PartialOrd> RangeInclusiveEquality for T {
+    #[inline]
+    fn canonicalized_is_empty(range: &RangeInclusive<Self>) -> bool {
+        range.is_empty()
+    }
+}
+
+#[stable(feature = "inclusive_range", since = "1.26.0")]
+impl<Idx: PartialEq> PartialEq for RangeInclusive<Idx> {
+    #[inline]
+    fn eq(&self, other: &Self) -> bool {
+        self.start == other.start && self.end == other.end
+            && RangeInclusiveEquality::canonicalized_is_empty(self)
+                == RangeInclusiveEquality::canonicalized_is_empty(other)
+    }
+}
+
+#[stable(feature = "inclusive_range", since = "1.26.0")]
+impl<Idx: Eq> Eq for RangeInclusive<Idx> {}
+
+#[stable(feature = "inclusive_range", since = "1.26.0")]
+impl<Idx: Hash> Hash for RangeInclusive<Idx> {
+    fn hash<H: Hasher>(&self, state: &mut H) {
+        self.start.hash(state);
+        self.end.hash(state);
+        RangeInclusiveEquality::canonicalized_is_empty(self).hash(state);
+    }
 }
 
 impl<Idx> RangeInclusive<Idx> {
@@ -350,7 +392,7 @@ impl<Idx> RangeInclusive<Idx> {
     #[stable(feature = "inclusive_range_methods", since = "1.27.0")]
     #[inline]
     pub const fn new(start: Idx, end: Idx) -> Self {
-        Self { start, end }
+        Self { start, end, is_empty: None }
     }
 
     /// Returns the lower bound of the range (inclusive).
@@ -492,8 +534,17 @@ pub fn contains<U>(&self, item: &U) -> bool
     /// assert!(r.is_empty());
     /// ```
     #[unstable(feature = "range_is_empty", reason = "recently added", issue = "48111")]
+    #[inline]
     pub fn is_empty(&self) -> bool {
-        !(self.start <= self.end)
+        self.is_empty.unwrap_or_else(|| !(self.start <= self.end))
+    }
+
+    // If this range's `is_empty` is field is unknown (`None`), update it to be a concrete value.
+    #[inline]
+    pub(crate) fn compute_is_empty(&mut self) {
+        if self.is_empty.is_none() {
+            self.is_empty = Some(!(self.start <= self.end));
+        }
     }
 }
 
index 20bc173f7e1548043c5c28bebfb57649e5518472..f3e823670aaaba907d3871911e0aa39f1274363a 100644 (file)
@@ -845,6 +845,33 @@ pub fn get_or_insert_with<F: FnOnce() -> T>(&mut self, f: F) -> &mut T {
     pub fn take(&mut self) -> Option<T> {
         mem::replace(self, None)
     }
+
+    /// Replaces the actual value in the option by the value given in parameter,
+    /// returning the old value if present,
+    /// leaving a [`Some`] in its place without deinitializing either one.
+    ///
+    /// [`Some`]: #variant.Some
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(option_replace)]
+    ///
+    /// let mut x = Some(2);
+    /// let old = x.replace(5);
+    /// assert_eq!(x, Some(5));
+    /// assert_eq!(old, Some(2));
+    ///
+    /// let mut x = None;
+    /// let old = x.replace(3);
+    /// assert_eq!(x, Some(3));
+    /// assert_eq!(old, None);
+    /// ```
+    #[inline]
+    #[unstable(feature = "option_replace", issue = "51998")]
+    pub fn replace(&mut self, value: T) -> Option<T> {
+        mem::replace(self, Some(value))
+    }
 }
 
 impl<'a, T: Clone> Option<&'a T> {
index ed29d80cb903c58db79067c5c256cbae2135510a..b766140ffe99a1e37f292e729df55ecb4850ad57 100644 (file)
@@ -729,7 +729,8 @@ pub fn chunks_mut(&mut self, chunk_size: usize) -> ChunksMut<T> {
     /// Returns an iterator over `chunk_size` elements of the slice at a
     /// time. The chunks are slices and do not overlap. If `chunk_size` does
     /// not divide the length of the slice, then the last up to `chunk_size-1`
-    /// elements will be omitted.
+    /// elements will be omitted and can be retrieved from the `remainder`
+    /// function of the iterator.
     ///
     /// Due to each chunk having exactly `chunk_size` elements, the compiler
     /// can often optimize the resulting code better than in the case of
@@ -758,14 +759,15 @@ pub fn exact_chunks(&self, chunk_size: usize) -> ExactChunks<T> {
         assert!(chunk_size != 0);
         let rem = self.len() % chunk_size;
         let len = self.len() - rem;
-        ExactChunks { v: &self[..len], chunk_size: chunk_size}
+        let (fst, snd) = self.split_at(len);
+        ExactChunks { v: fst, rem: snd, chunk_size: chunk_size}
     }
 
     /// Returns an iterator over `chunk_size` elements of the slice at a time.
     /// The chunks are mutable slices, and do not overlap. If `chunk_size` does
     /// not divide the length of the slice, then the last up to `chunk_size-1`
-    /// elements will be omitted.
-    ///
+    /// elements will be omitted and can be retrieved from the `into_remainder`
+    /// function of the iterator.
     ///
     /// Due to each chunk having exactly `chunk_size` elements, the compiler
     /// can often optimize the resulting code better than in the case of
@@ -799,7 +801,8 @@ pub fn exact_chunks_mut(&mut self, chunk_size: usize) -> ExactChunksMut<T> {
         assert!(chunk_size != 0);
         let rem = self.len() % chunk_size;
         let len = self.len() - rem;
-        ExactChunksMut { v: &mut self[..len], chunk_size: chunk_size}
+        let (fst, snd) = self.split_at_mut(len);
+        ExactChunksMut { v: fst, rem: snd, chunk_size: chunk_size}
     }
 
     /// Divides one slice into two at an index.
@@ -2262,36 +2265,36 @@ impl<T> SliceIndex<[T]> for ops::RangeInclusive<usize> {
 
     #[inline]
     fn get(self, slice: &[T]) -> Option<&[T]> {
-        if self.end == usize::max_value() { None }
-        else { (self.start..self.end + 1).get(slice) }
+        if *self.end() == usize::max_value() { None }
+        else { (*self.start()..self.end() + 1).get(slice) }
     }
 
     #[inline]
     fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
-        if self.end == usize::max_value() { None }
-        else { (self.start..self.end + 1).get_mut(slice) }
+        if *self.end() == usize::max_value() { None }
+        else { (*self.start()..self.end() + 1).get_mut(slice) }
     }
 
     #[inline]
     unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
-        (self.start..self.end + 1).get_unchecked(slice)
+        (*self.start()..self.end() + 1).get_unchecked(slice)
     }
 
     #[inline]
     unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
-        (self.start..self.end + 1).get_unchecked_mut(slice)
+        (*self.start()..self.end() + 1).get_unchecked_mut(slice)
     }
 
     #[inline]
     fn index(self, slice: &[T]) -> &[T] {
-        if self.end == usize::max_value() { slice_index_overflow_fail(); }
-        (self.start..self.end + 1).index(slice)
+        if *self.end() == usize::max_value() { slice_index_overflow_fail(); }
+        (*self.start()..self.end() + 1).index(slice)
     }
 
     #[inline]
     fn index_mut(self, slice: &mut [T]) -> &mut [T] {
-        if self.end == usize::max_value() { slice_index_overflow_fail(); }
-        (self.start..self.end + 1).index_mut(slice)
+        if *self.end() == usize::max_value() { slice_index_overflow_fail(); }
+        (*self.start()..self.end() + 1).index_mut(slice)
     }
 }
 
@@ -3657,25 +3660,39 @@ fn may_have_side_effect() -> bool { false }
 /// time).
 ///
 /// When the slice len is not evenly divided by the chunk size, the last
-/// up to `chunk_size-1` elements will be omitted.
+/// up to `chunk_size-1` elements will be omitted but can be retrieved from
+/// the [`remainder`] function from the iterator.
 ///
 /// This struct is created by the [`exact_chunks`] method on [slices].
 ///
 /// [`exact_chunks`]: ../../std/primitive.slice.html#method.exact_chunks
+/// [`remainder`]: ../../std/slice/struct.ExactChunks.html#method.remainder
 /// [slices]: ../../std/primitive.slice.html
 #[derive(Debug)]
 #[unstable(feature = "exact_chunks", issue = "47115")]
 pub struct ExactChunks<'a, T:'a> {
     v: &'a [T],
+    rem: &'a [T],
     chunk_size: usize
 }
 
+#[unstable(feature = "exact_chunks", issue = "47115")]
+impl<'a, T> ExactChunks<'a, T> {
+    /// Return the remainder of the original slice that is not going to be
+    /// returned by the iterator. The returned slice has at most `chunk_size-1`
+    /// elements.
+    pub fn remainder(&self) -> &'a [T] {
+        self.rem
+    }
+}
+
 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
 #[unstable(feature = "exact_chunks", issue = "47115")]
 impl<'a, T> Clone for ExactChunks<'a, T> {
     fn clone(&self) -> ExactChunks<'a, T> {
         ExactChunks {
             v: self.v,
+            rem: self.rem,
             chunk_size: self.chunk_size,
         }
     }
@@ -3763,20 +3780,35 @@ fn may_have_side_effect() -> bool { false }
 }
 
 /// An iterator over a slice in (non-overlapping) mutable chunks (`chunk_size`
-/// elements at a time). When the slice len is not evenly divided by the chunk
-/// size, the last up to `chunk_size-1` elements will be omitted.
+/// elements at a time).
+///
+/// When the slice len is not evenly divided by the chunk size, the last up to
+/// `chunk_size-1` elements will be omitted but can be retrieved from the
+/// [`into_remainder`] function from the iterator.
 ///
 /// This struct is created by the [`exact_chunks_mut`] method on [slices].
 ///
 /// [`exact_chunks_mut`]: ../../std/primitive.slice.html#method.exact_chunks_mut
+/// [`into_remainder`]: ../../std/slice/struct.ExactChunksMut.html#method.into_remainder
 /// [slices]: ../../std/primitive.slice.html
 #[derive(Debug)]
 #[unstable(feature = "exact_chunks", issue = "47115")]
 pub struct ExactChunksMut<'a, T:'a> {
     v: &'a mut [T],
+    rem: &'a mut [T],
     chunk_size: usize
 }
 
+#[unstable(feature = "exact_chunks", issue = "47115")]
+impl<'a, T> ExactChunksMut<'a, T> {
+    /// Return the remainder of the original slice that is not going to be
+    /// returned by the iterator. The returned slice has at most `chunk_size-1`
+    /// elements.
+    pub fn into_remainder(self) -> &'a mut [T] {
+        self.rem
+    }
+}
+
 #[unstable(feature = "exact_chunks", issue = "47115")]
 impl<'a, T> Iterator for ExactChunksMut<'a, T> {
     type Item = &'a mut [T];
index 5ae2f6349e5b7e335846eeafb779c40e7ca106eb..255e8a07d75492dc96aee5535f7b6f48e56ac2a7 100644 (file)
@@ -2004,31 +2004,31 @@ impl SliceIndex<str> for ops::RangeInclusive<usize> {
         type Output = str;
         #[inline]
         fn get(self, slice: &str) -> Option<&Self::Output> {
-            if self.end == usize::max_value() { None }
-            else { (self.start..self.end+1).get(slice) }
+            if *self.end() == usize::max_value() { None }
+            else { (*self.start()..self.end()+1).get(slice) }
         }
         #[inline]
         fn get_mut(self, slice: &mut str) -> Option<&mut Self::Output> {
-            if self.end == usize::max_value() { None }
-            else { (self.start..self.end+1).get_mut(slice) }
+            if *self.end() == usize::max_value() { None }
+            else { (*self.start()..self.end()+1).get_mut(slice) }
         }
         #[inline]
         unsafe fn get_unchecked(self, slice: &str) -> &Self::Output {
-            (self.start..self.end+1).get_unchecked(slice)
+            (*self.start()..self.end()+1).get_unchecked(slice)
         }
         #[inline]
         unsafe fn get_unchecked_mut(self, slice: &mut str) -> &mut Self::Output {
-            (self.start..self.end+1).get_unchecked_mut(slice)
+            (*self.start()..self.end()+1).get_unchecked_mut(slice)
         }
         #[inline]
         fn index(self, slice: &str) -> &Self::Output {
-            if self.end == usize::max_value() { str_index_overflow_fail(); }
-            (self.start..self.end+1).index(slice)
+            if *self.end() == usize::max_value() { str_index_overflow_fail(); }
+            (*self.start()..self.end()+1).index(slice)
         }
         #[inline]
         fn index_mut(self, slice: &mut str) -> &mut Self::Output {
-            if self.end == usize::max_value() { str_index_overflow_fail(); }
-            (self.start..self.end+1).index_mut(slice)
+            if *self.end() == usize::max_value() { str_index_overflow_fail(); }
+            (*self.start()..self.end()+1).index_mut(slice)
         }
     }
 
index 9d4a5213992a19bc81c50d20e75a1ecadc6e1a0b..ca7db6e4639a522cd1b9c35b6f686f3fd1f261c4 100644 (file)
@@ -44,6 +44,7 @@
 #![feature(reverse_bits)]
 #![feature(iterator_find_map)]
 #![feature(slice_internals)]
+#![feature(option_replace)]
 
 extern crate core;
 extern crate test;
index 22109e28edd9b810534dffb3cbf371b77945b5f6..bc3e61a4f541f71f86d62aaee68119b9888d7102 100644 (file)
@@ -297,3 +297,18 @@ fn try_option_err() -> Result<u8, NoneError> {
     }
     assert_eq!(try_option_err(), Err(NoneError));
 }
+
+#[test]
+fn test_replace() {
+    let mut x = Some(2);
+    let old = x.replace(5);
+
+    assert_eq!(x, Some(5));
+    assert_eq!(old, Some(2));
+
+    let mut x = None;
+    let old = x.replace(3);
+
+    assert_eq!(x, Some(3));
+    assert_eq!(old, None);
+}
index 7981567067dad3c1f241d175c39474e29b819cfc..2b37acdfe3e81386d4a4c3b5d6a7bb606df29e3d 100644 (file)
@@ -259,6 +259,13 @@ fn test_exact_chunks_last() {
     assert_eq!(c2.last().unwrap(), &[2, 3]);
 }
 
+#[test]
+fn test_exact_chunks_remainder() {
+    let v: &[i32] = &[0, 1, 2, 3, 4];
+    let c = v.exact_chunks(2);
+    assert_eq!(c.remainder(), &[4]);
+}
+
 #[test]
 fn test_exact_chunks_zip() {
     let v1: &[i32] = &[0, 1, 2, 3, 4];
@@ -310,6 +317,13 @@ fn test_exact_chunks_mut_last() {
     assert_eq!(c2.last().unwrap(), &[2, 3]);
 }
 
+#[test]
+fn test_exact_chunks_mut_remainder() {
+    let v: &mut [i32] = &mut [0, 1, 2, 3, 4];
+    let c = v.exact_chunks_mut(2);
+    assert_eq!(c.into_remainder(), &[4]);
+}
+
 #[test]
 fn test_exact_chunks_mut_zip() {
     let v1: &mut [i32] = &mut [0, 1, 2, 3, 4];
index f52d201abecb90a6bee88466146fefe33650519e..aab70456dc18dbcef2e484c231cbe2916feadaca 100644 (file)
@@ -8,11 +8,11 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-use rustc_data_structures::graph;
 use cfg::*;
 use middle::region;
-use ty::{self, TyCtxt};
+use rustc_data_structures::graph::implementation as graph;
 use syntax::ptr::P;
+use ty::{self, TyCtxt};
 
 use hir::{self, PatKind};
 use hir::def_id::DefId;
index b379d3956e944b01b399b508ae025241ac12450b..cf9c24cc58a623176f400e5968e3f3f230e1ab1d 100644 (file)
@@ -11,7 +11,7 @@
 //! Module that constructs a control-flow graph representing an item.
 //! Uses `Graph` as the underlying representation.
 
-use rustc_data_structures::graph;
+use rustc_data_structures::graph::implementation as graph;
 use ty::TyCtxt;
 use hir;
 use hir::def_id::DefId;
index ea83a4f8b3104dd2de1113fa6eba381d1d043444..ce0b5557a34bf1c2ac15fccbba3a317b893200f2 100644 (file)
@@ -9,7 +9,9 @@
 // except according to those terms.
 
 use rustc_data_structures::fx::FxHashMap;
-use rustc_data_structures::graph::{Direction, INCOMING, Graph, NodeIndex, OUTGOING};
+use rustc_data_structures::graph::implementation::{
+    Direction, INCOMING, Graph, NodeIndex, OUTGOING
+};
 
 use super::DepNode;
 
index 5984a831e6fa0eeb4028a539040af99763d2de31..120b45ec01e5ec018337c617b4a5c165ab28bd18 100644 (file)
@@ -20,7 +20,7 @@
 use middle::free_region::RegionRelations;
 use rustc_data_structures::indexed_vec::{Idx, IndexVec};
 use rustc_data_structures::fx::FxHashSet;
-use rustc_data_structures::graph::{self, Direction, NodeIndex, OUTGOING};
+use rustc_data_structures::graph::implementation::{Graph, Direction, NodeIndex, INCOMING, OUTGOING};
 use std::fmt;
 use std::u32;
 use ty::{self, TyCtxt};
@@ -99,7 +99,7 @@ struct RegionAndOrigin<'tcx> {
     origin: SubregionOrigin<'tcx>,
 }
 
-type RegionGraph<'tcx> = graph::Graph<(), Constraint<'tcx>>;
+type RegionGraph<'tcx> = Graph<(), Constraint<'tcx>>;
 
 struct LexicalResolver<'cx, 'gcx: 'tcx, 'tcx: 'cx> {
     region_rels: &'cx RegionRelations<'cx, 'gcx, 'tcx>,
@@ -501,7 +501,7 @@ fn collect_var_errors(
     fn construct_graph(&self) -> RegionGraph<'tcx> {
         let num_vars = self.num_vars();
 
-        let mut graph = graph::Graph::new();
+        let mut graph = Graph::new();
 
         for _ in 0..num_vars {
             graph.add_node(());
@@ -550,9 +550,9 @@ fn collect_error_for_expanding_node(
         // Errors in expanding nodes result from a lower-bound that is
         // not contained by an upper-bound.
         let (mut lower_bounds, lower_dup) =
-            self.collect_concrete_regions(graph, node_idx, graph::INCOMING, dup_vec);
+            self.collect_concrete_regions(graph, node_idx, INCOMING, dup_vec);
         let (mut upper_bounds, upper_dup) =
-            self.collect_concrete_regions(graph, node_idx, graph::OUTGOING, dup_vec);
+            self.collect_concrete_regions(graph, node_idx, OUTGOING, dup_vec);
 
         if lower_dup || upper_dup {
             return;
index efc2d9311c1dcbe67ee58d39cc4d14e7e2da43a7..a46b31206224732568d9094dacb59c764fe8aff2 100644 (file)
     "detects proc macro derives using inaccessible names from parent modules"
 }
 
+declare_lint! {
+    pub MACRO_USE_EXTERN_CRATE,
+    Allow,
+    "the `#[macro_use]` attribute is now deprecated in favor of using macros \
+     via the module system"
+}
+
 /// Does nothing as a lint pass, but registers some `Lint`s
 /// which are used by other parts of the compiler.
 #[derive(Copy, Clone)]
@@ -379,6 +386,7 @@ fn get_lints(&self) -> LintArray {
             INTRA_DOC_LINK_RESOLUTION_FAILURE,
             WHERE_CLAUSES_OBJECT_SAFETY,
             PROC_MACRO_DERIVE_RESOLUTION_FALLBACK,
+            MACRO_USE_EXTERN_CRATE,
         )
     }
 }
index 5c86554f9079011e12b1244f40dce588c6f84309..b949fd02126ba37d2912d93530389f08c399d2b0 100644 (file)
@@ -22,7 +22,7 @@
 use std::usize;
 use syntax::print::pprust::PrintState;
 
-use rustc_data_structures::graph::OUTGOING;
+use rustc_data_structures::graph::implementation::OUTGOING;
 
 use util::nodemap::FxHashMap;
 use hir;
index f8f8753e214e85af15f9218388712ee4af5a05c9..f6076896385a6876ff423e2d5a3ff2cce19f3257 100644 (file)
@@ -21,9 +21,8 @@
 use mir::visit::MirVisitable;
 use rustc_apfloat::ieee::{Double, Single};
 use rustc_apfloat::Float;
-use rustc_data_structures::control_flow_graph::dominators::{dominators, Dominators};
-use rustc_data_structures::control_flow_graph::ControlFlowGraph;
-use rustc_data_structures::control_flow_graph::{GraphPredecessors, GraphSuccessors};
+use rustc_data_structures::graph::dominators::{dominators, Dominators};
+use rustc_data_structures::graph::{self, GraphPredecessors, GraphSuccessors};
 use rustc_data_structures::indexed_vec::{Idx, IndexVec};
 use rustc_data_structures::small_vec::SmallVec;
 use rustc_data_structures::sync::Lrc;
@@ -2289,23 +2288,32 @@ fn item_path_str(def_id: DefId) -> String {
     ty::tls::with(|tcx| tcx.item_path_str(def_id))
 }
 
-impl<'tcx> ControlFlowGraph for Mir<'tcx> {
+impl<'tcx> graph::DirectedGraph for Mir<'tcx> {
     type Node = BasicBlock;
+}
 
+impl<'tcx> graph::WithNumNodes for Mir<'tcx> {
     fn num_nodes(&self) -> usize {
         self.basic_blocks.len()
     }
+}
 
+impl<'tcx> graph::WithStartNode for Mir<'tcx> {
     fn start_node(&self) -> Self::Node {
         START_BLOCK
     }
+}
 
+impl<'tcx> graph::WithPredecessors for Mir<'tcx> {
     fn predecessors<'graph>(
         &'graph self,
         node: Self::Node,
     ) -> <Self as GraphPredecessors<'graph>>::Iter {
         self.predecessors_for(node).clone().into_iter()
     }
+}
+
+impl<'tcx> graph::WithSuccessors for Mir<'tcx> {
     fn successors<'graph>(
         &'graph self,
         node: Self::Node,
@@ -2314,12 +2322,12 @@ fn successors<'graph>(
     }
 }
 
-impl<'a, 'b> GraphPredecessors<'b> for Mir<'a> {
+impl<'a, 'b> graph::GraphPredecessors<'b> for Mir<'a> {
     type Item = BasicBlock;
     type Iter = IntoIter<BasicBlock>;
 }
 
-impl<'a, 'b> GraphSuccessors<'b> for Mir<'a> {
+impl<'a, 'b> graph::GraphSuccessors<'b> for Mir<'a> {
     type Item = BasicBlock;
     type Iter = iter::Cloned<Successors<'b>>;
 }
index 9e5298eb736a33e18c7d97710f1739fa3b925ebc..efd829c283f063ff8c1a20f49a162fad01ef12cf 100644 (file)
@@ -12,7 +12,7 @@
 //! which do not.
 
 use rustc_data_structures::bitvec::BitVector;
-use rustc_data_structures::control_flow_graph::dominators::Dominators;
+use rustc_data_structures::graph::dominators::Dominators;
 use rustc_data_structures::indexed_vec::{Idx, IndexVec};
 use rustc::mir::{self, Location, TerminatorKind};
 use rustc::mir::visit::{Visitor, PlaceContext};
diff --git a/src/librustc_data_structures/control_flow_graph/dominators/mod.rs b/src/librustc_data_structures/control_flow_graph/dominators/mod.rs
deleted file mode 100644 (file)
index 5440765..0000000
+++ /dev/null
@@ -1,209 +0,0 @@
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Algorithm citation:
-//! A Simple, Fast Dominance Algorithm.
-//! Keith D. Cooper, Timothy J. Harvey, and Ken Kennedy
-//! Rice Computer Science TS-06-33870
-//! <https://www.cs.rice.edu/~keith/EMBED/dom.pdf>
-
-use super::ControlFlowGraph;
-use super::iterate::reverse_post_order;
-use super::super::indexed_vec::{IndexVec, Idx};
-
-use std::fmt;
-
-#[cfg(test)]
-mod test;
-
-pub fn dominators<G: ControlFlowGraph>(graph: &G) -> Dominators<G::Node> {
-    let start_node = graph.start_node();
-    let rpo = reverse_post_order(graph, start_node);
-    dominators_given_rpo(graph, &rpo)
-}
-
-pub fn dominators_given_rpo<G: ControlFlowGraph>(graph: &G,
-                                                 rpo: &[G::Node])
-                                                 -> Dominators<G::Node> {
-    let start_node = graph.start_node();
-    assert_eq!(rpo[0], start_node);
-
-    // compute the post order index (rank) for each node
-    let mut post_order_rank: IndexVec<G::Node, usize> = IndexVec::from_elem_n(usize::default(),
-                                                                              graph.num_nodes());
-    for (index, node) in rpo.iter().rev().cloned().enumerate() {
-        post_order_rank[node] = index;
-    }
-
-    let mut immediate_dominators: IndexVec<G::Node, Option<G::Node>> =
-        IndexVec::from_elem_n(Option::default(), graph.num_nodes());
-    immediate_dominators[start_node] = Some(start_node);
-
-    let mut changed = true;
-    while changed {
-        changed = false;
-
-        for &node in &rpo[1..] {
-            let mut new_idom = None;
-            for pred in graph.predecessors(node) {
-                if immediate_dominators[pred].is_some() {
-                    // (*)
-                    // (*) dominators for `pred` have been calculated
-                    new_idom = intersect_opt(&post_order_rank,
-                                             &immediate_dominators,
-                                             new_idom,
-                                             Some(pred));
-                }
-            }
-
-            if new_idom != immediate_dominators[node] {
-                immediate_dominators[node] = new_idom;
-                changed = true;
-            }
-        }
-    }
-
-    Dominators {
-        post_order_rank,
-        immediate_dominators,
-    }
-}
-
-fn intersect_opt<Node: Idx>(post_order_rank: &IndexVec<Node, usize>,
-                            immediate_dominators: &IndexVec<Node, Option<Node>>,
-                            node1: Option<Node>,
-                            node2: Option<Node>)
-                            -> Option<Node> {
-    match (node1, node2) {
-        (None, None) => None,
-        (Some(n), None) | (None, Some(n)) => Some(n),
-        (Some(n1), Some(n2)) => Some(intersect(post_order_rank, immediate_dominators, n1, n2)),
-    }
-}
-
-fn intersect<Node: Idx>(post_order_rank: &IndexVec<Node, usize>,
-                        immediate_dominators: &IndexVec<Node, Option<Node>>,
-                        mut node1: Node,
-                        mut node2: Node)
-                        -> Node {
-    while node1 != node2 {
-        while post_order_rank[node1] < post_order_rank[node2] {
-            node1 = immediate_dominators[node1].unwrap();
-        }
-
-        while post_order_rank[node2] < post_order_rank[node1] {
-            node2 = immediate_dominators[node2].unwrap();
-        }
-    }
-    return node1;
-}
-
-#[derive(Clone, Debug)]
-pub struct Dominators<N: Idx> {
-    post_order_rank: IndexVec<N, usize>,
-    immediate_dominators: IndexVec<N, Option<N>>,
-}
-
-impl<Node: Idx> Dominators<Node> {
-    pub fn is_reachable(&self, node: Node) -> bool {
-        self.immediate_dominators[node].is_some()
-    }
-
-    pub fn immediate_dominator(&self, node: Node) -> Node {
-        assert!(self.is_reachable(node), "node {:?} is not reachable", node);
-        self.immediate_dominators[node].unwrap()
-    }
-
-    pub fn dominators(&self, node: Node) -> Iter<Node> {
-        assert!(self.is_reachable(node), "node {:?} is not reachable", node);
-        Iter {
-            dominators: self,
-            node: Some(node),
-        }
-    }
-
-    pub fn is_dominated_by(&self, node: Node, dom: Node) -> bool {
-        // FIXME -- could be optimized by using post-order-rank
-        self.dominators(node).any(|n| n == dom)
-    }
-
-    #[cfg(test)]
-    fn all_immediate_dominators(&self) -> &IndexVec<Node, Option<Node>> {
-        &self.immediate_dominators
-    }
-}
-
-pub struct Iter<'dom, Node: Idx + 'dom> {
-    dominators: &'dom Dominators<Node>,
-    node: Option<Node>,
-}
-
-impl<'dom, Node: Idx> Iterator for Iter<'dom, Node> {
-    type Item = Node;
-
-    fn next(&mut self) -> Option<Self::Item> {
-        if let Some(node) = self.node {
-            let dom = self.dominators.immediate_dominator(node);
-            if dom == node {
-                self.node = None; // reached the root
-            } else {
-                self.node = Some(dom);
-            }
-            return Some(node);
-        } else {
-            return None;
-        }
-    }
-}
-
-pub struct DominatorTree<N: Idx> {
-    root: N,
-    children: IndexVec<N, Vec<N>>,
-}
-
-impl<Node: Idx> DominatorTree<Node> {
-    pub fn children(&self, node: Node) -> &[Node] {
-        &self.children[node]
-    }
-}
-
-impl<Node: Idx> fmt::Debug for DominatorTree<Node> {
-    fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
-        fmt::Debug::fmt(&DominatorTreeNode {
-                            tree: self,
-                            node: self.root,
-                        },
-                        fmt)
-    }
-}
-
-struct DominatorTreeNode<'tree, Node: Idx> {
-    tree: &'tree DominatorTree<Node>,
-    node: Node,
-}
-
-impl<'tree, Node: Idx> fmt::Debug for DominatorTreeNode<'tree, Node> {
-    fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
-        let subtrees: Vec<_> = self.tree
-            .children(self.node)
-            .iter()
-            .map(|&child| {
-                DominatorTreeNode {
-                    tree: self.tree,
-                    node: child,
-                }
-            })
-            .collect();
-        fmt.debug_tuple("")
-            .field(&self.node)
-            .field(&subtrees)
-            .finish()
-    }
-}
diff --git a/src/librustc_data_structures/control_flow_graph/dominators/test.rs b/src/librustc_data_structures/control_flow_graph/dominators/test.rs
deleted file mode 100644 (file)
index 0af878c..0000000
+++ /dev/null
@@ -1,43 +0,0 @@
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-use super::super::test::TestGraph;
-
-use super::*;
-
-#[test]
-fn diamond() {
-    let graph = TestGraph::new(0, &[(0, 1), (0, 2), (1, 3), (2, 3)]);
-
-    let dominators = dominators(&graph);
-    let immediate_dominators = dominators.all_immediate_dominators();
-    assert_eq!(immediate_dominators[0], Some(0));
-    assert_eq!(immediate_dominators[1], Some(0));
-    assert_eq!(immediate_dominators[2], Some(0));
-    assert_eq!(immediate_dominators[3], Some(0));
-}
-
-#[test]
-fn paper() {
-    // example from the paper:
-    let graph = TestGraph::new(6,
-                               &[(6, 5), (6, 4), (5, 1), (4, 2), (4, 3), (1, 2), (2, 3), (3, 2),
-                                 (2, 1)]);
-
-    let dominators = dominators(&graph);
-    let immediate_dominators = dominators.all_immediate_dominators();
-    assert_eq!(immediate_dominators[0], None); // <-- note that 0 is not in graph
-    assert_eq!(immediate_dominators[1], Some(6));
-    assert_eq!(immediate_dominators[2], Some(6));
-    assert_eq!(immediate_dominators[3], Some(6));
-    assert_eq!(immediate_dominators[4], Some(6));
-    assert_eq!(immediate_dominators[5], Some(6));
-    assert_eq!(immediate_dominators[6], Some(6));
-}
diff --git a/src/librustc_data_structures/control_flow_graph/iterate/mod.rs b/src/librustc_data_structures/control_flow_graph/iterate/mod.rs
deleted file mode 100644 (file)
index 2d70b40..0000000
+++ /dev/null
@@ -1,54 +0,0 @@
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-use super::ControlFlowGraph;
-use super::super::indexed_vec::IndexVec;
-
-#[cfg(test)]
-mod test;
-
-pub fn post_order_from<G: ControlFlowGraph>(graph: &G, start_node: G::Node) -> Vec<G::Node> {
-    post_order_from_to(graph, start_node, None)
-}
-
-pub fn post_order_from_to<G: ControlFlowGraph>(graph: &G,
-                                               start_node: G::Node,
-                                               end_node: Option<G::Node>)
-                                               -> Vec<G::Node> {
-    let mut visited: IndexVec<G::Node, bool> = IndexVec::from_elem_n(false, graph.num_nodes());
-    let mut result: Vec<G::Node> = Vec::with_capacity(graph.num_nodes());
-    if let Some(end_node) = end_node {
-        visited[end_node] = true;
-    }
-    post_order_walk(graph, start_node, &mut result, &mut visited);
-    result
-}
-
-fn post_order_walk<G: ControlFlowGraph>(graph: &G,
-                                        node: G::Node,
-                                        result: &mut Vec<G::Node>,
-                                        visited: &mut IndexVec<G::Node, bool>) {
-    if visited[node] {
-        return;
-    }
-    visited[node] = true;
-
-    for successor in graph.successors(node) {
-        post_order_walk(graph, successor, result, visited);
-    }
-
-    result.push(node);
-}
-
-pub fn reverse_post_order<G: ControlFlowGraph>(graph: &G, start_node: G::Node) -> Vec<G::Node> {
-    let mut vec = post_order_from(graph, start_node);
-    vec.reverse();
-    vec
-}
diff --git a/src/librustc_data_structures/control_flow_graph/iterate/test.rs b/src/librustc_data_structures/control_flow_graph/iterate/test.rs
deleted file mode 100644 (file)
index 100881d..0000000
+++ /dev/null
@@ -1,21 +0,0 @@
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-use super::super::test::TestGraph;
-
-use super::*;
-
-#[test]
-fn diamond_post_order() {
-    let graph = TestGraph::new(0, &[(0, 1), (0, 2), (1, 3), (2, 3)]);
-
-    let result = post_order_from(&graph, 0);
-    assert_eq!(result, vec![3, 1, 2, 0]);
-}
diff --git a/src/librustc_data_structures/control_flow_graph/mod.rs b/src/librustc_data_structures/control_flow_graph/mod.rs
deleted file mode 100644 (file)
index 7bf7766..0000000
+++ /dev/null
@@ -1,42 +0,0 @@
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-use super::indexed_vec::Idx;
-
-pub mod dominators;
-pub mod iterate;
-mod reference;
-
-#[cfg(test)]
-mod test;
-
-pub trait ControlFlowGraph
-    where Self: for<'graph> GraphPredecessors<'graph, Item=<Self as ControlFlowGraph>::Node>,
-          Self: for<'graph> GraphSuccessors<'graph, Item=<Self as ControlFlowGraph>::Node>
-{
-    type Node: Idx;
-
-    fn num_nodes(&self) -> usize;
-    fn start_node(&self) -> Self::Node;
-    fn predecessors<'graph>(&'graph self, node: Self::Node)
-                            -> <Self as GraphPredecessors<'graph>>::Iter;
-    fn successors<'graph>(&'graph self, node: Self::Node)
-                            -> <Self as GraphSuccessors<'graph>>::Iter;
-}
-
-pub trait GraphPredecessors<'graph> {
-    type Item;
-    type Iter: Iterator<Item = Self::Item>;
-}
-
-pub trait GraphSuccessors<'graph> {
-    type Item;
-    type Iter: Iterator<Item = Self::Item>;
-}
diff --git a/src/librustc_data_structures/control_flow_graph/reference.rs b/src/librustc_data_structures/control_flow_graph/reference.rs
deleted file mode 100644 (file)
index 3b8b01f..0000000
+++ /dev/null
@@ -1,43 +0,0 @@
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-use super::*;
-
-impl<'graph, G: ControlFlowGraph> ControlFlowGraph for &'graph G {
-    type Node = G::Node;
-
-    fn num_nodes(&self) -> usize {
-        (**self).num_nodes()
-    }
-
-    fn start_node(&self) -> Self::Node {
-        (**self).start_node()
-    }
-
-    fn predecessors<'iter>(&'iter self,
-                           node: Self::Node)
-                           -> <Self as GraphPredecessors<'iter>>::Iter {
-        (**self).predecessors(node)
-    }
-
-    fn successors<'iter>(&'iter self, node: Self::Node) -> <Self as GraphSuccessors<'iter>>::Iter {
-        (**self).successors(node)
-    }
-}
-
-impl<'iter, 'graph, G: ControlFlowGraph> GraphPredecessors<'iter> for &'graph G {
-    type Item = G::Node;
-    type Iter = <G as GraphPredecessors<'iter>>::Iter;
-}
-
-impl<'iter, 'graph, G: ControlFlowGraph> GraphSuccessors<'iter> for &'graph G {
-    type Item = G::Node;
-    type Iter = <G as GraphSuccessors<'iter>>::Iter;
-}
diff --git a/src/librustc_data_structures/control_flow_graph/test.rs b/src/librustc_data_structures/control_flow_graph/test.rs
deleted file mode 100644 (file)
index f04b536..0000000
+++ /dev/null
@@ -1,77 +0,0 @@
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-use std::collections::HashMap;
-use std::cmp::max;
-use std::slice;
-use std::iter;
-
-use super::{ControlFlowGraph, GraphPredecessors, GraphSuccessors};
-
-pub struct TestGraph {
-    num_nodes: usize,
-    start_node: usize,
-    successors: HashMap<usize, Vec<usize>>,
-    predecessors: HashMap<usize, Vec<usize>>,
-}
-
-impl TestGraph {
-    pub fn new(start_node: usize, edges: &[(usize, usize)]) -> Self {
-        let mut graph = TestGraph {
-            num_nodes: start_node + 1,
-            start_node,
-            successors: HashMap::new(),
-            predecessors: HashMap::new(),
-        };
-        for &(source, target) in edges {
-            graph.num_nodes = max(graph.num_nodes, source + 1);
-            graph.num_nodes = max(graph.num_nodes, target + 1);
-            graph.successors.entry(source).or_insert(vec![]).push(target);
-            graph.predecessors.entry(target).or_insert(vec![]).push(source);
-        }
-        for node in 0..graph.num_nodes {
-            graph.successors.entry(node).or_insert(vec![]);
-            graph.predecessors.entry(node).or_insert(vec![]);
-        }
-        graph
-    }
-}
-
-impl ControlFlowGraph for TestGraph {
-    type Node = usize;
-
-    fn start_node(&self) -> usize {
-        self.start_node
-    }
-
-    fn num_nodes(&self) -> usize {
-        self.num_nodes
-    }
-
-    fn predecessors<'graph>(&'graph self,
-                            node: usize)
-                            -> <Self as GraphPredecessors<'graph>>::Iter {
-        self.predecessors[&node].iter().cloned()
-    }
-
-    fn successors<'graph>(&'graph self, node: usize) -> <Self as GraphSuccessors<'graph>>::Iter {
-        self.successors[&node].iter().cloned()
-    }
-}
-
-impl<'graph> GraphPredecessors<'graph> for TestGraph {
-    type Item = usize;
-    type Iter = iter::Cloned<slice::Iter<'graph, usize>>;
-}
-
-impl<'graph> GraphSuccessors<'graph> for TestGraph {
-    type Item = usize;
-    type Iter = iter::Cloned<slice::Iter<'graph, usize>>;
-}
diff --git a/src/librustc_data_structures/graph/dominators/mod.rs b/src/librustc_data_structures/graph/dominators/mod.rs
new file mode 100644 (file)
index 0000000..d134fad
--- /dev/null
@@ -0,0 +1,214 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Algorithm citation:
+//! A Simple, Fast Dominance Algorithm.
+//! Keith D. Cooper, Timothy J. Harvey, and Ken Kennedy
+//! Rice Computer Science TS-06-33870
+//! <https://www.cs.rice.edu/~keith/EMBED/dom.pdf>
+
+use super::super::indexed_vec::{Idx, IndexVec};
+use super::iterate::reverse_post_order;
+use super::ControlFlowGraph;
+
+use std::fmt;
+
+#[cfg(test)]
+mod test;
+
+pub fn dominators<G: ControlFlowGraph>(graph: &G) -> Dominators<G::Node> {
+    let start_node = graph.start_node();
+    let rpo = reverse_post_order(graph, start_node);
+    dominators_given_rpo(graph, &rpo)
+}
+
+pub fn dominators_given_rpo<G: ControlFlowGraph>(
+    graph: &G,
+    rpo: &[G::Node],
+) -> Dominators<G::Node> {
+    let start_node = graph.start_node();
+    assert_eq!(rpo[0], start_node);
+
+    // compute the post order index (rank) for each node
+    let mut post_order_rank: IndexVec<G::Node, usize> =
+        IndexVec::from_elem_n(usize::default(), graph.num_nodes());
+    for (index, node) in rpo.iter().rev().cloned().enumerate() {
+        post_order_rank[node] = index;
+    }
+
+    let mut immediate_dominators: IndexVec<G::Node, Option<G::Node>> =
+        IndexVec::from_elem_n(Option::default(), graph.num_nodes());
+    immediate_dominators[start_node] = Some(start_node);
+
+    let mut changed = true;
+    while changed {
+        changed = false;
+
+        for &node in &rpo[1..] {
+            let mut new_idom = None;
+            for pred in graph.predecessors(node) {
+                if immediate_dominators[pred].is_some() {
+                    // (*)
+                    // (*) dominators for `pred` have been calculated
+                    new_idom = intersect_opt(
+                        &post_order_rank,
+                        &immediate_dominators,
+                        new_idom,
+                        Some(pred),
+                    );
+                }
+            }
+
+            if new_idom != immediate_dominators[node] {
+                immediate_dominators[node] = new_idom;
+                changed = true;
+            }
+        }
+    }
+
+    Dominators {
+        post_order_rank,
+        immediate_dominators,
+    }
+}
+
+fn intersect_opt<Node: Idx>(
+    post_order_rank: &IndexVec<Node, usize>,
+    immediate_dominators: &IndexVec<Node, Option<Node>>,
+    node1: Option<Node>,
+    node2: Option<Node>,
+) -> Option<Node> {
+    match (node1, node2) {
+        (None, None) => None,
+        (Some(n), None) | (None, Some(n)) => Some(n),
+        (Some(n1), Some(n2)) => Some(intersect(post_order_rank, immediate_dominators, n1, n2)),
+    }
+}
+
+fn intersect<Node: Idx>(
+    post_order_rank: &IndexVec<Node, usize>,
+    immediate_dominators: &IndexVec<Node, Option<Node>>,
+    mut node1: Node,
+    mut node2: Node,
+) -> Node {
+    while node1 != node2 {
+        while post_order_rank[node1] < post_order_rank[node2] {
+            node1 = immediate_dominators[node1].unwrap();
+        }
+
+        while post_order_rank[node2] < post_order_rank[node1] {
+            node2 = immediate_dominators[node2].unwrap();
+        }
+    }
+    return node1;
+}
+
+#[derive(Clone, Debug)]
+pub struct Dominators<N: Idx> {
+    post_order_rank: IndexVec<N, usize>,
+    immediate_dominators: IndexVec<N, Option<N>>,
+}
+
+impl<Node: Idx> Dominators<Node> {
+    pub fn is_reachable(&self, node: Node) -> bool {
+        self.immediate_dominators[node].is_some()
+    }
+
+    pub fn immediate_dominator(&self, node: Node) -> Node {
+        assert!(self.is_reachable(node), "node {:?} is not reachable", node);
+        self.immediate_dominators[node].unwrap()
+    }
+
+    pub fn dominators(&self, node: Node) -> Iter<Node> {
+        assert!(self.is_reachable(node), "node {:?} is not reachable", node);
+        Iter {
+            dominators: self,
+            node: Some(node),
+        }
+    }
+
+    pub fn is_dominated_by(&self, node: Node, dom: Node) -> bool {
+        // FIXME -- could be optimized by using post-order-rank
+        self.dominators(node).any(|n| n == dom)
+    }
+
+    #[cfg(test)]
+    fn all_immediate_dominators(&self) -> &IndexVec<Node, Option<Node>> {
+        &self.immediate_dominators
+    }
+}
+
+pub struct Iter<'dom, Node: Idx + 'dom> {
+    dominators: &'dom Dominators<Node>,
+    node: Option<Node>,
+}
+
+impl<'dom, Node: Idx> Iterator for Iter<'dom, Node> {
+    type Item = Node;
+
+    fn next(&mut self) -> Option<Self::Item> {
+        if let Some(node) = self.node {
+            let dom = self.dominators.immediate_dominator(node);
+            if dom == node {
+                self.node = None; // reached the root
+            } else {
+                self.node = Some(dom);
+            }
+            return Some(node);
+        } else {
+            return None;
+        }
+    }
+}
+
+pub struct DominatorTree<N: Idx> {
+    root: N,
+    children: IndexVec<N, Vec<N>>,
+}
+
+impl<Node: Idx> DominatorTree<Node> {
+    pub fn children(&self, node: Node) -> &[Node] {
+        &self.children[node]
+    }
+}
+
+impl<Node: Idx> fmt::Debug for DominatorTree<Node> {
+    fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+        fmt::Debug::fmt(
+            &DominatorTreeNode {
+                tree: self,
+                node: self.root,
+            },
+            fmt,
+        )
+    }
+}
+
+struct DominatorTreeNode<'tree, Node: Idx> {
+    tree: &'tree DominatorTree<Node>,
+    node: Node,
+}
+
+impl<'tree, Node: Idx> fmt::Debug for DominatorTreeNode<'tree, Node> {
+    fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+        let subtrees: Vec<_> = self.tree
+            .children(self.node)
+            .iter()
+            .map(|&child| DominatorTreeNode {
+                tree: self.tree,
+                node: child,
+            })
+            .collect();
+        fmt.debug_tuple("")
+            .field(&self.node)
+            .field(&subtrees)
+            .finish()
+    }
+}
diff --git a/src/librustc_data_structures/graph/dominators/test.rs b/src/librustc_data_structures/graph/dominators/test.rs
new file mode 100644 (file)
index 0000000..0af878c
--- /dev/null
@@ -0,0 +1,43 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use super::super::test::TestGraph;
+
+use super::*;
+
+#[test]
+fn diamond() {
+    let graph = TestGraph::new(0, &[(0, 1), (0, 2), (1, 3), (2, 3)]);
+
+    let dominators = dominators(&graph);
+    let immediate_dominators = dominators.all_immediate_dominators();
+    assert_eq!(immediate_dominators[0], Some(0));
+    assert_eq!(immediate_dominators[1], Some(0));
+    assert_eq!(immediate_dominators[2], Some(0));
+    assert_eq!(immediate_dominators[3], Some(0));
+}
+
+#[test]
+fn paper() {
+    // example from the paper:
+    let graph = TestGraph::new(6,
+                               &[(6, 5), (6, 4), (5, 1), (4, 2), (4, 3), (1, 2), (2, 3), (3, 2),
+                                 (2, 1)]);
+
+    let dominators = dominators(&graph);
+    let immediate_dominators = dominators.all_immediate_dominators();
+    assert_eq!(immediate_dominators[0], None); // <-- note that 0 is not in graph
+    assert_eq!(immediate_dominators[1], Some(6));
+    assert_eq!(immediate_dominators[2], Some(6));
+    assert_eq!(immediate_dominators[3], Some(6));
+    assert_eq!(immediate_dominators[4], Some(6));
+    assert_eq!(immediate_dominators[5], Some(6));
+    assert_eq!(immediate_dominators[6], Some(6));
+}
diff --git a/src/librustc_data_structures/graph/implementation/mod.rs b/src/librustc_data_structures/graph/implementation/mod.rs
new file mode 100644 (file)
index 0000000..e2b3930
--- /dev/null
@@ -0,0 +1,417 @@
+// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! A graph module for use in dataflow, region resolution, and elsewhere.
+//!
+//! # Interface details
+//!
+//! You customize the graph by specifying a "node data" type `N` and an
+//! "edge data" type `E`. You can then later gain access (mutable or
+//! immutable) to these "user-data" bits. Currently, you can only add
+//! nodes or edges to the graph. You cannot remove or modify them once
+//! added. This could be changed if we have a need.
+//!
+//! # Implementation details
+//!
+//! The main tricky thing about this code is the way that edges are
+//! stored. The edges are stored in a central array, but they are also
+//! threaded onto two linked lists for each node, one for incoming edges
+//! and one for outgoing edges. Note that every edge is a member of some
+//! incoming list and some outgoing list.  Basically you can load the
+//! first index of the linked list from the node data structures (the
+//! field `first_edge`) and then, for each edge, load the next index from
+//! the field `next_edge`). Each of those fields is an array that should
+//! be indexed by the direction (see the type `Direction`).
+
+use bitvec::BitVector;
+use std::fmt::Debug;
+use std::usize;
+use snapshot_vec::{SnapshotVec, SnapshotVecDelegate};
+
+#[cfg(test)]
+mod tests;
+
+pub struct Graph<N, E> {
+    nodes: SnapshotVec<Node<N>>,
+    edges: SnapshotVec<Edge<E>>,
+}
+
+pub struct Node<N> {
+    first_edge: [EdgeIndex; 2], // see module comment
+    pub data: N,
+}
+
+#[derive(Debug)]
+pub struct Edge<E> {
+    next_edge: [EdgeIndex; 2], // see module comment
+    source: NodeIndex,
+    target: NodeIndex,
+    pub data: E,
+}
+
+impl<N> SnapshotVecDelegate for Node<N> {
+    type Value = Node<N>;
+    type Undo = ();
+
+    fn reverse(_: &mut Vec<Node<N>>, _: ()) {}
+}
+
+impl<N> SnapshotVecDelegate for Edge<N> {
+    type Value = Edge<N>;
+    type Undo = ();
+
+    fn reverse(_: &mut Vec<Edge<N>>, _: ()) {}
+}
+
+#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)]
+pub struct NodeIndex(pub usize);
+
+#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)]
+pub struct EdgeIndex(pub usize);
+
+pub const INVALID_EDGE_INDEX: EdgeIndex = EdgeIndex(usize::MAX);
+
+// Use a private field here to guarantee no more instances are created:
+#[derive(Copy, Clone, Debug, PartialEq)]
+pub struct Direction {
+    repr: usize,
+}
+
+pub const OUTGOING: Direction = Direction { repr: 0 };
+
+pub const INCOMING: Direction = Direction { repr: 1 };
+
+impl NodeIndex {
+    /// Returns unique id (unique with respect to the graph holding associated node).
+    pub fn node_id(&self) -> usize {
+        self.0
+    }
+}
+
+impl<N: Debug, E: Debug> Graph<N, E> {
+    pub fn new() -> Graph<N, E> {
+        Graph {
+            nodes: SnapshotVec::new(),
+            edges: SnapshotVec::new(),
+        }
+    }
+
+    pub fn with_capacity(nodes: usize, edges: usize) -> Graph<N, E> {
+        Graph {
+            nodes: SnapshotVec::with_capacity(nodes),
+            edges: SnapshotVec::with_capacity(edges),
+        }
+    }
+
+    // # Simple accessors
+
+    #[inline]
+    pub fn all_nodes(&self) -> &[Node<N>] {
+        &self.nodes
+    }
+
+    #[inline]
+    pub fn len_nodes(&self) -> usize {
+        self.nodes.len()
+    }
+
+    #[inline]
+    pub fn all_edges(&self) -> &[Edge<E>] {
+        &self.edges
+    }
+
+    #[inline]
+    pub fn len_edges(&self) -> usize {
+        self.edges.len()
+    }
+
+    // # Node construction
+
+    pub fn next_node_index(&self) -> NodeIndex {
+        NodeIndex(self.nodes.len())
+    }
+
+    pub fn add_node(&mut self, data: N) -> NodeIndex {
+        let idx = self.next_node_index();
+        self.nodes.push(Node {
+            first_edge: [INVALID_EDGE_INDEX, INVALID_EDGE_INDEX],
+            data,
+        });
+        idx
+    }
+
+    pub fn mut_node_data(&mut self, idx: NodeIndex) -> &mut N {
+        &mut self.nodes[idx.0].data
+    }
+
+    pub fn node_data(&self, idx: NodeIndex) -> &N {
+        &self.nodes[idx.0].data
+    }
+
+    pub fn node(&self, idx: NodeIndex) -> &Node<N> {
+        &self.nodes[idx.0]
+    }
+
+    // # Edge construction and queries
+
+    pub fn next_edge_index(&self) -> EdgeIndex {
+        EdgeIndex(self.edges.len())
+    }
+
+    pub fn add_edge(&mut self, source: NodeIndex, target: NodeIndex, data: E) -> EdgeIndex {
+        debug!("graph: add_edge({:?}, {:?}, {:?})", source, target, data);
+
+        let idx = self.next_edge_index();
+
+        // read current first of the list of edges from each node
+        let source_first = self.nodes[source.0].first_edge[OUTGOING.repr];
+        let target_first = self.nodes[target.0].first_edge[INCOMING.repr];
+
+        // create the new edge, with the previous firsts from each node
+        // as the next pointers
+        self.edges.push(Edge {
+            next_edge: [source_first, target_first],
+            source,
+            target,
+            data,
+        });
+
+        // adjust the firsts for each node target be the next object.
+        self.nodes[source.0].first_edge[OUTGOING.repr] = idx;
+        self.nodes[target.0].first_edge[INCOMING.repr] = idx;
+
+        return idx;
+    }
+
+    pub fn edge(&self, idx: EdgeIndex) -> &Edge<E> {
+        &self.edges[idx.0]
+    }
+
+    // # Iterating over nodes, edges
+
+    pub fn enumerated_nodes(&self) -> impl Iterator<Item = (NodeIndex, &Node<N>)> {
+        self.nodes
+            .iter()
+            .enumerate()
+            .map(|(idx, n)| (NodeIndex(idx), n))
+    }
+
+    pub fn enumerated_edges(&self) -> impl Iterator<Item = (EdgeIndex, &Edge<E>)> {
+        self.edges
+            .iter()
+            .enumerate()
+            .map(|(idx, e)| (EdgeIndex(idx), e))
+    }
+
+    pub fn each_node<'a>(&'a self, mut f: impl FnMut(NodeIndex, &'a Node<N>) -> bool) -> bool {
+        //! Iterates over all edges defined in the graph.
+        self.enumerated_nodes()
+            .all(|(node_idx, node)| f(node_idx, node))
+    }
+
+    pub fn each_edge<'a>(&'a self, mut f: impl FnMut(EdgeIndex, &'a Edge<E>) -> bool) -> bool {
+        //! Iterates over all edges defined in the graph
+        self.enumerated_edges()
+            .all(|(edge_idx, edge)| f(edge_idx, edge))
+    }
+
+    pub fn outgoing_edges(&self, source: NodeIndex) -> AdjacentEdges<N, E> {
+        self.adjacent_edges(source, OUTGOING)
+    }
+
+    pub fn incoming_edges(&self, source: NodeIndex) -> AdjacentEdges<N, E> {
+        self.adjacent_edges(source, INCOMING)
+    }
+
+    pub fn adjacent_edges(&self, source: NodeIndex, direction: Direction) -> AdjacentEdges<N, E> {
+        let first_edge = self.node(source).first_edge[direction.repr];
+        AdjacentEdges {
+            graph: self,
+            direction,
+            next: first_edge,
+        }
+    }
+
+    pub fn successor_nodes<'a>(
+        &'a self,
+        source: NodeIndex,
+    ) -> impl Iterator<Item = NodeIndex> + 'a {
+        self.outgoing_edges(source).targets()
+    }
+
+    pub fn predecessor_nodes<'a>(
+        &'a self,
+        target: NodeIndex,
+    ) -> impl Iterator<Item = NodeIndex> + 'a {
+        self.incoming_edges(target).sources()
+    }
+
+    pub fn depth_traverse<'a>(
+        &'a self,
+        start: NodeIndex,
+        direction: Direction,
+    ) -> DepthFirstTraversal<'a, N, E> {
+        DepthFirstTraversal::with_start_node(self, start, direction)
+    }
+
+    pub fn nodes_in_postorder<'a>(
+        &'a self,
+        direction: Direction,
+        entry_node: NodeIndex,
+    ) -> Vec<NodeIndex> {
+        let mut visited = BitVector::new(self.len_nodes());
+        let mut stack = vec![];
+        let mut result = Vec::with_capacity(self.len_nodes());
+        let mut push_node = |stack: &mut Vec<_>, node: NodeIndex| {
+            if visited.insert(node.0) {
+                stack.push((node, self.adjacent_edges(node, direction)));
+            }
+        };
+
+        for node in Some(entry_node)
+            .into_iter()
+            .chain(self.enumerated_nodes().map(|(node, _)| node))
+        {
+            push_node(&mut stack, node);
+            while let Some((node, mut iter)) = stack.pop() {
+                if let Some((_, child)) = iter.next() {
+                    let target = child.source_or_target(direction);
+                    // the current node needs more processing, so
+                    // add it back to the stack
+                    stack.push((node, iter));
+                    // and then push the new node
+                    push_node(&mut stack, target);
+                } else {
+                    result.push(node);
+                }
+            }
+        }
+
+        assert_eq!(result.len(), self.len_nodes());
+        result
+    }
+}
+
+// # Iterators
+
+pub struct AdjacentEdges<'g, N, E>
+where
+    N: 'g,
+    E: 'g,
+{
+    graph: &'g Graph<N, E>,
+    direction: Direction,
+    next: EdgeIndex,
+}
+
+impl<'g, N: Debug, E: Debug> AdjacentEdges<'g, N, E> {
+    fn targets(self) -> impl Iterator<Item = NodeIndex> + 'g {
+        self.into_iter().map(|(_, edge)| edge.target)
+    }
+
+    fn sources(self) -> impl Iterator<Item = NodeIndex> + 'g {
+        self.into_iter().map(|(_, edge)| edge.source)
+    }
+}
+
+impl<'g, N: Debug, E: Debug> Iterator for AdjacentEdges<'g, N, E> {
+    type Item = (EdgeIndex, &'g Edge<E>);
+
+    fn next(&mut self) -> Option<(EdgeIndex, &'g Edge<E>)> {
+        let edge_index = self.next;
+        if edge_index == INVALID_EDGE_INDEX {
+            return None;
+        }
+
+        let edge = self.graph.edge(edge_index);
+        self.next = edge.next_edge[self.direction.repr];
+        Some((edge_index, edge))
+    }
+
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        // At most, all the edges in the graph.
+        (0, Some(self.graph.len_edges()))
+    }
+}
+
+pub struct DepthFirstTraversal<'g, N, E>
+where
+    N: 'g,
+    E: 'g,
+{
+    graph: &'g Graph<N, E>,
+    stack: Vec<NodeIndex>,
+    visited: BitVector,
+    direction: Direction,
+}
+
+impl<'g, N: Debug, E: Debug> DepthFirstTraversal<'g, N, E> {
+    pub fn with_start_node(
+        graph: &'g Graph<N, E>,
+        start_node: NodeIndex,
+        direction: Direction,
+    ) -> Self {
+        let mut visited = BitVector::new(graph.len_nodes());
+        visited.insert(start_node.node_id());
+        DepthFirstTraversal {
+            graph,
+            stack: vec![start_node],
+            visited,
+            direction,
+        }
+    }
+
+    fn visit(&mut self, node: NodeIndex) {
+        if self.visited.insert(node.node_id()) {
+            self.stack.push(node);
+        }
+    }
+}
+
+impl<'g, N: Debug, E: Debug> Iterator for DepthFirstTraversal<'g, N, E> {
+    type Item = NodeIndex;
+
+    fn next(&mut self) -> Option<NodeIndex> {
+        let next = self.stack.pop();
+        if let Some(idx) = next {
+            for (_, edge) in self.graph.adjacent_edges(idx, self.direction) {
+                let target = edge.source_or_target(self.direction);
+                self.visit(target);
+            }
+        }
+        next
+    }
+
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        // We will visit every node in the graph exactly once.
+        let remaining = self.graph.len_nodes() - self.visited.count();
+        (remaining, Some(remaining))
+    }
+}
+
+impl<'g, N: Debug, E: Debug> ExactSizeIterator for DepthFirstTraversal<'g, N, E> {}
+
+impl<E> Edge<E> {
+    pub fn source(&self) -> NodeIndex {
+        self.source
+    }
+
+    pub fn target(&self) -> NodeIndex {
+        self.target
+    }
+
+    pub fn source_or_target(&self, direction: Direction) -> NodeIndex {
+        if direction == OUTGOING {
+            self.target
+        } else {
+            self.source
+        }
+    }
+}
diff --git a/src/librustc_data_structures/graph/implementation/tests.rs b/src/librustc_data_structures/graph/implementation/tests.rs
new file mode 100644 (file)
index 0000000..3814827
--- /dev/null
@@ -0,0 +1,139 @@
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use graph::implementation::*;
+use std::fmt::Debug;
+
+type TestGraph = Graph<&'static str, &'static str>;
+
+fn create_graph() -> TestGraph {
+    let mut graph = Graph::new();
+
+    // Create a simple graph
+    //
+    //          F
+    //          |
+    //          V
+    //    A --> B --> C
+    //          |     ^
+    //          v     |
+    //          D --> E
+
+    let a = graph.add_node("A");
+    let b = graph.add_node("B");
+    let c = graph.add_node("C");
+    let d = graph.add_node("D");
+    let e = graph.add_node("E");
+    let f = graph.add_node("F");
+
+    graph.add_edge(a, b, "AB");
+    graph.add_edge(b, c, "BC");
+    graph.add_edge(b, d, "BD");
+    graph.add_edge(d, e, "DE");
+    graph.add_edge(e, c, "EC");
+    graph.add_edge(f, b, "FB");
+
+    return graph;
+}
+
+#[test]
+fn each_node() {
+    let graph = create_graph();
+    let expected = ["A", "B", "C", "D", "E", "F"];
+    graph.each_node(|idx, node| {
+        assert_eq!(&expected[idx.0], graph.node_data(idx));
+        assert_eq!(expected[idx.0], node.data);
+        true
+    });
+}
+
+#[test]
+fn each_edge() {
+    let graph = create_graph();
+    let expected = ["AB", "BC", "BD", "DE", "EC", "FB"];
+    graph.each_edge(|idx, edge| {
+        assert_eq!(expected[idx.0], edge.data);
+        true
+    });
+}
+
+fn test_adjacent_edges<N: PartialEq + Debug, E: PartialEq + Debug>(graph: &Graph<N, E>,
+                                                                   start_index: NodeIndex,
+                                                                   start_data: N,
+                                                                   expected_incoming: &[(E, N)],
+                                                                   expected_outgoing: &[(E, N)]) {
+    assert!(graph.node_data(start_index) == &start_data);
+
+    let mut counter = 0;
+    for (edge_index, edge) in graph.incoming_edges(start_index) {
+        assert!(counter < expected_incoming.len());
+        debug!("counter={:?} expected={:?} edge_index={:?} edge={:?}",
+               counter,
+               expected_incoming[counter],
+               edge_index,
+               edge);
+        match expected_incoming[counter] {
+            (ref e, ref n) => {
+                assert!(e == &edge.data);
+                assert!(n == graph.node_data(edge.source()));
+                assert!(start_index == edge.target);
+            }
+        }
+        counter += 1;
+    }
+    assert_eq!(counter, expected_incoming.len());
+
+    let mut counter = 0;
+    for (edge_index, edge) in graph.outgoing_edges(start_index) {
+        assert!(counter < expected_outgoing.len());
+        debug!("counter={:?} expected={:?} edge_index={:?} edge={:?}",
+               counter,
+               expected_outgoing[counter],
+               edge_index,
+               edge);
+        match expected_outgoing[counter] {
+            (ref e, ref n) => {
+                assert!(e == &edge.data);
+                assert!(start_index == edge.source);
+                assert!(n == graph.node_data(edge.target));
+            }
+        }
+        counter += 1;
+    }
+    assert_eq!(counter, expected_outgoing.len());
+}
+
+#[test]
+fn each_adjacent_from_a() {
+    let graph = create_graph();
+    test_adjacent_edges(&graph, NodeIndex(0), "A", &[], &[("AB", "B")]);
+}
+
+#[test]
+fn each_adjacent_from_b() {
+    let graph = create_graph();
+    test_adjacent_edges(&graph,
+                        NodeIndex(1),
+                        "B",
+                        &[("FB", "F"), ("AB", "A")],
+                        &[("BD", "D"), ("BC", "C")]);
+}
+
+#[test]
+fn each_adjacent_from_c() {
+    let graph = create_graph();
+    test_adjacent_edges(&graph, NodeIndex(2), "C", &[("EC", "E"), ("BC", "B")], &[]);
+}
+
+#[test]
+fn each_adjacent_from_d() {
+    let graph = create_graph();
+    test_adjacent_edges(&graph, NodeIndex(3), "D", &[("BD", "B")], &[("DE", "E")]);
+}
diff --git a/src/librustc_data_structures/graph/iterate/mod.rs b/src/librustc_data_structures/graph/iterate/mod.rs
new file mode 100644 (file)
index 0000000..3afdc88
--- /dev/null
@@ -0,0 +1,63 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use super::super::indexed_vec::IndexVec;
+use super::{DirectedGraph, WithSuccessors, WithNumNodes};
+
+#[cfg(test)]
+mod test;
+
+pub fn post_order_from<G: DirectedGraph + WithSuccessors + WithNumNodes>(
+    graph: &G,
+    start_node: G::Node,
+) -> Vec<G::Node> {
+    post_order_from_to(graph, start_node, None)
+}
+
+pub fn post_order_from_to<G: DirectedGraph + WithSuccessors + WithNumNodes>(
+    graph: &G,
+    start_node: G::Node,
+    end_node: Option<G::Node>,
+) -> Vec<G::Node> {
+    let mut visited: IndexVec<G::Node, bool> = IndexVec::from_elem_n(false, graph.num_nodes());
+    let mut result: Vec<G::Node> = Vec::with_capacity(graph.num_nodes());
+    if let Some(end_node) = end_node {
+        visited[end_node] = true;
+    }
+    post_order_walk(graph, start_node, &mut result, &mut visited);
+    result
+}
+
+fn post_order_walk<G: DirectedGraph + WithSuccessors + WithNumNodes>(
+    graph: &G,
+    node: G::Node,
+    result: &mut Vec<G::Node>,
+    visited: &mut IndexVec<G::Node, bool>,
+) {
+    if visited[node] {
+        return;
+    }
+    visited[node] = true;
+
+    for successor in graph.successors(node) {
+        post_order_walk(graph, successor, result, visited);
+    }
+
+    result.push(node);
+}
+
+pub fn reverse_post_order<G: DirectedGraph + WithSuccessors + WithNumNodes>(
+    graph: &G,
+    start_node: G::Node,
+) -> Vec<G::Node> {
+    let mut vec = post_order_from(graph, start_node);
+    vec.reverse();
+    vec
+}
diff --git a/src/librustc_data_structures/graph/iterate/test.rs b/src/librustc_data_structures/graph/iterate/test.rs
new file mode 100644 (file)
index 0000000..100881d
--- /dev/null
@@ -0,0 +1,21 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use super::super::test::TestGraph;
+
+use super::*;
+
+#[test]
+fn diamond_post_order() {
+    let graph = TestGraph::new(0, &[(0, 1), (0, 2), (1, 3), (2, 3)]);
+
+    let result = post_order_from(&graph, 0);
+    assert_eq!(result, vec![3, 1, 2, 0]);
+}
index e2b393071ff5c4ec0333aaf2f343ed31db734902..7265e4e8c7c66090ebb1600577c965355d1866cc 100644 (file)
@@ -1,4 +1,4 @@
-// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
 // file at the top-level directory of this distribution and at
 // http://rust-lang.org/COPYRIGHT.
 //
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-//! A graph module for use in dataflow, region resolution, and elsewhere.
-//!
-//! # Interface details
-//!
-//! You customize the graph by specifying a "node data" type `N` and an
-//! "edge data" type `E`. You can then later gain access (mutable or
-//! immutable) to these "user-data" bits. Currently, you can only add
-//! nodes or edges to the graph. You cannot remove or modify them once
-//! added. This could be changed if we have a need.
-//!
-//! # Implementation details
-//!
-//! The main tricky thing about this code is the way that edges are
-//! stored. The edges are stored in a central array, but they are also
-//! threaded onto two linked lists for each node, one for incoming edges
-//! and one for outgoing edges. Note that every edge is a member of some
-//! incoming list and some outgoing list.  Basically you can load the
-//! first index of the linked list from the node data structures (the
-//! field `first_edge`) and then, for each edge, load the next index from
-//! the field `next_edge`). Each of those fields is an array that should
-//! be indexed by the direction (see the type `Direction`).
+use super::indexed_vec::Idx;
 
-use bitvec::BitVector;
-use std::fmt::Debug;
-use std::usize;
-use snapshot_vec::{SnapshotVec, SnapshotVecDelegate};
+pub mod dominators;
+pub mod implementation;
+pub mod iterate;
+mod reference;
+pub mod scc;
 
 #[cfg(test)]
-mod tests;
+mod test;
 
-pub struct Graph<N, E> {
-    nodes: SnapshotVec<Node<N>>,
-    edges: SnapshotVec<Edge<E>>,
+pub trait DirectedGraph {
+    type Node: Idx;
 }
 
-pub struct Node<N> {
-    first_edge: [EdgeIndex; 2], // see module comment
-    pub data: N,
+pub trait WithNumNodes: DirectedGraph {
+    fn num_nodes(&self) -> usize;
 }
 
-#[derive(Debug)]
-pub struct Edge<E> {
-    next_edge: [EdgeIndex; 2], // see module comment
-    source: NodeIndex,
-    target: NodeIndex,
-    pub data: E,
-}
-
-impl<N> SnapshotVecDelegate for Node<N> {
-    type Value = Node<N>;
-    type Undo = ();
-
-    fn reverse(_: &mut Vec<Node<N>>, _: ()) {}
-}
-
-impl<N> SnapshotVecDelegate for Edge<N> {
-    type Value = Edge<N>;
-    type Undo = ();
-
-    fn reverse(_: &mut Vec<Edge<N>>, _: ()) {}
-}
-
-#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)]
-pub struct NodeIndex(pub usize);
-
-#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)]
-pub struct EdgeIndex(pub usize);
-
-pub const INVALID_EDGE_INDEX: EdgeIndex = EdgeIndex(usize::MAX);
-
-// Use a private field here to guarantee no more instances are created:
-#[derive(Copy, Clone, Debug, PartialEq)]
-pub struct Direction {
-    repr: usize,
-}
-
-pub const OUTGOING: Direction = Direction { repr: 0 };
-
-pub const INCOMING: Direction = Direction { repr: 1 };
-
-impl NodeIndex {
-    /// Returns unique id (unique with respect to the graph holding associated node).
-    pub fn node_id(&self) -> usize {
-        self.0
-    }
-}
-
-impl<N: Debug, E: Debug> Graph<N, E> {
-    pub fn new() -> Graph<N, E> {
-        Graph {
-            nodes: SnapshotVec::new(),
-            edges: SnapshotVec::new(),
-        }
-    }
-
-    pub fn with_capacity(nodes: usize, edges: usize) -> Graph<N, E> {
-        Graph {
-            nodes: SnapshotVec::with_capacity(nodes),
-            edges: SnapshotVec::with_capacity(edges),
-        }
-    }
-
-    // # Simple accessors
-
-    #[inline]
-    pub fn all_nodes(&self) -> &[Node<N>] {
-        &self.nodes
-    }
-
-    #[inline]
-    pub fn len_nodes(&self) -> usize {
-        self.nodes.len()
-    }
-
-    #[inline]
-    pub fn all_edges(&self) -> &[Edge<E>] {
-        &self.edges
-    }
-
-    #[inline]
-    pub fn len_edges(&self) -> usize {
-        self.edges.len()
-    }
-
-    // # Node construction
-
-    pub fn next_node_index(&self) -> NodeIndex {
-        NodeIndex(self.nodes.len())
-    }
-
-    pub fn add_node(&mut self, data: N) -> NodeIndex {
-        let idx = self.next_node_index();
-        self.nodes.push(Node {
-            first_edge: [INVALID_EDGE_INDEX, INVALID_EDGE_INDEX],
-            data,
-        });
-        idx
-    }
-
-    pub fn mut_node_data(&mut self, idx: NodeIndex) -> &mut N {
-        &mut self.nodes[idx.0].data
-    }
-
-    pub fn node_data(&self, idx: NodeIndex) -> &N {
-        &self.nodes[idx.0].data
-    }
-
-    pub fn node(&self, idx: NodeIndex) -> &Node<N> {
-        &self.nodes[idx.0]
-    }
-
-    // # Edge construction and queries
-
-    pub fn next_edge_index(&self) -> EdgeIndex {
-        EdgeIndex(self.edges.len())
-    }
-
-    pub fn add_edge(&mut self, source: NodeIndex, target: NodeIndex, data: E) -> EdgeIndex {
-        debug!("graph: add_edge({:?}, {:?}, {:?})", source, target, data);
-
-        let idx = self.next_edge_index();
-
-        // read current first of the list of edges from each node
-        let source_first = self.nodes[source.0].first_edge[OUTGOING.repr];
-        let target_first = self.nodes[target.0].first_edge[INCOMING.repr];
-
-        // create the new edge, with the previous firsts from each node
-        // as the next pointers
-        self.edges.push(Edge {
-            next_edge: [source_first, target_first],
-            source,
-            target,
-            data,
-        });
-
-        // adjust the firsts for each node target be the next object.
-        self.nodes[source.0].first_edge[OUTGOING.repr] = idx;
-        self.nodes[target.0].first_edge[INCOMING.repr] = idx;
-
-        return idx;
-    }
-
-    pub fn edge(&self, idx: EdgeIndex) -> &Edge<E> {
-        &self.edges[idx.0]
-    }
-
-    // # Iterating over nodes, edges
-
-    pub fn enumerated_nodes(&self) -> impl Iterator<Item = (NodeIndex, &Node<N>)> {
-        self.nodes
-            .iter()
-            .enumerate()
-            .map(|(idx, n)| (NodeIndex(idx), n))
-    }
-
-    pub fn enumerated_edges(&self) -> impl Iterator<Item = (EdgeIndex, &Edge<E>)> {
-        self.edges
-            .iter()
-            .enumerate()
-            .map(|(idx, e)| (EdgeIndex(idx), e))
-    }
-
-    pub fn each_node<'a>(&'a self, mut f: impl FnMut(NodeIndex, &'a Node<N>) -> bool) -> bool {
-        //! Iterates over all edges defined in the graph.
-        self.enumerated_nodes()
-            .all(|(node_idx, node)| f(node_idx, node))
-    }
-
-    pub fn each_edge<'a>(&'a self, mut f: impl FnMut(EdgeIndex, &'a Edge<E>) -> bool) -> bool {
-        //! Iterates over all edges defined in the graph
-        self.enumerated_edges()
-            .all(|(edge_idx, edge)| f(edge_idx, edge))
-    }
-
-    pub fn outgoing_edges(&self, source: NodeIndex) -> AdjacentEdges<N, E> {
-        self.adjacent_edges(source, OUTGOING)
-    }
-
-    pub fn incoming_edges(&self, source: NodeIndex) -> AdjacentEdges<N, E> {
-        self.adjacent_edges(source, INCOMING)
-    }
-
-    pub fn adjacent_edges(&self, source: NodeIndex, direction: Direction) -> AdjacentEdges<N, E> {
-        let first_edge = self.node(source).first_edge[direction.repr];
-        AdjacentEdges {
-            graph: self,
-            direction,
-            next: first_edge,
-        }
-    }
-
-    pub fn successor_nodes<'a>(
-        &'a self,
-        source: NodeIndex,
-    ) -> impl Iterator<Item = NodeIndex> + 'a {
-        self.outgoing_edges(source).targets()
-    }
-
-    pub fn predecessor_nodes<'a>(
-        &'a self,
-        target: NodeIndex,
-    ) -> impl Iterator<Item = NodeIndex> + 'a {
-        self.incoming_edges(target).sources()
-    }
-
-    pub fn depth_traverse<'a>(
-        &'a self,
-        start: NodeIndex,
-        direction: Direction,
-    ) -> DepthFirstTraversal<'a, N, E> {
-        DepthFirstTraversal::with_start_node(self, start, direction)
-    }
-
-    pub fn nodes_in_postorder<'a>(
-        &'a self,
-        direction: Direction,
-        entry_node: NodeIndex,
-    ) -> Vec<NodeIndex> {
-        let mut visited = BitVector::new(self.len_nodes());
-        let mut stack = vec![];
-        let mut result = Vec::with_capacity(self.len_nodes());
-        let mut push_node = |stack: &mut Vec<_>, node: NodeIndex| {
-            if visited.insert(node.0) {
-                stack.push((node, self.adjacent_edges(node, direction)));
-            }
-        };
-
-        for node in Some(entry_node)
-            .into_iter()
-            .chain(self.enumerated_nodes().map(|(node, _)| node))
-        {
-            push_node(&mut stack, node);
-            while let Some((node, mut iter)) = stack.pop() {
-                if let Some((_, child)) = iter.next() {
-                    let target = child.source_or_target(direction);
-                    // the current node needs more processing, so
-                    // add it back to the stack
-                    stack.push((node, iter));
-                    // and then push the new node
-                    push_node(&mut stack, target);
-                } else {
-                    result.push(node);
-                }
-            }
-        }
-
-        assert_eq!(result.len(), self.len_nodes());
-        result
-    }
-}
-
-// # Iterators
-
-pub struct AdjacentEdges<'g, N, E>
+pub trait WithSuccessors: DirectedGraph
 where
-    N: 'g,
-    E: 'g,
+    Self: for<'graph> GraphSuccessors<'graph, Item = <Self as DirectedGraph>::Node>,
 {
-    graph: &'g Graph<N, E>,
-    direction: Direction,
-    next: EdgeIndex,
+    fn successors<'graph>(
+        &'graph self,
+        node: Self::Node,
+    ) -> <Self as GraphSuccessors<'graph>>::Iter;
 }
 
-impl<'g, N: Debug, E: Debug> AdjacentEdges<'g, N, E> {
-    fn targets(self) -> impl Iterator<Item = NodeIndex> + 'g {
-        self.into_iter().map(|(_, edge)| edge.target)
-    }
-
-    fn sources(self) -> impl Iterator<Item = NodeIndex> + 'g {
-        self.into_iter().map(|(_, edge)| edge.source)
-    }
+pub trait GraphSuccessors<'graph> {
+    type Item;
+    type Iter: Iterator<Item = Self::Item>;
 }
 
-impl<'g, N: Debug, E: Debug> Iterator for AdjacentEdges<'g, N, E> {
-    type Item = (EdgeIndex, &'g Edge<E>);
-
-    fn next(&mut self) -> Option<(EdgeIndex, &'g Edge<E>)> {
-        let edge_index = self.next;
-        if edge_index == INVALID_EDGE_INDEX {
-            return None;
-        }
-
-        let edge = self.graph.edge(edge_index);
-        self.next = edge.next_edge[self.direction.repr];
-        Some((edge_index, edge))
-    }
-
-    fn size_hint(&self) -> (usize, Option<usize>) {
-        // At most, all the edges in the graph.
-        (0, Some(self.graph.len_edges()))
-    }
-}
-
-pub struct DepthFirstTraversal<'g, N, E>
+pub trait WithPredecessors: DirectedGraph
 where
-    N: 'g,
-    E: 'g,
+    Self: for<'graph> GraphPredecessors<'graph, Item = <Self as DirectedGraph>::Node>,
 {
-    graph: &'g Graph<N, E>,
-    stack: Vec<NodeIndex>,
-    visited: BitVector,
-    direction: Direction,
+    fn predecessors<'graph>(
+        &'graph self,
+        node: Self::Node,
+    ) -> <Self as GraphPredecessors<'graph>>::Iter;
 }
 
-impl<'g, N: Debug, E: Debug> DepthFirstTraversal<'g, N, E> {
-    pub fn with_start_node(
-        graph: &'g Graph<N, E>,
-        start_node: NodeIndex,
-        direction: Direction,
-    ) -> Self {
-        let mut visited = BitVector::new(graph.len_nodes());
-        visited.insert(start_node.node_id());
-        DepthFirstTraversal {
-            graph,
-            stack: vec![start_node],
-            visited,
-            direction,
-        }
-    }
-
-    fn visit(&mut self, node: NodeIndex) {
-        if self.visited.insert(node.node_id()) {
-            self.stack.push(node);
-        }
-    }
+pub trait GraphPredecessors<'graph> {
+    type Item;
+    type Iter: Iterator<Item = Self::Item>;
 }
 
-impl<'g, N: Debug, E: Debug> Iterator for DepthFirstTraversal<'g, N, E> {
-    type Item = NodeIndex;
-
-    fn next(&mut self) -> Option<NodeIndex> {
-        let next = self.stack.pop();
-        if let Some(idx) = next {
-            for (_, edge) in self.graph.adjacent_edges(idx, self.direction) {
-                let target = edge.source_or_target(self.direction);
-                self.visit(target);
-            }
-        }
-        next
-    }
-
-    fn size_hint(&self) -> (usize, Option<usize>) {
-        // We will visit every node in the graph exactly once.
-        let remaining = self.graph.len_nodes() - self.visited.count();
-        (remaining, Some(remaining))
-    }
+pub trait WithStartNode: DirectedGraph {
+    fn start_node(&self) -> Self::Node;
 }
 
-impl<'g, N: Debug, E: Debug> ExactSizeIterator for DepthFirstTraversal<'g, N, E> {}
-
-impl<E> Edge<E> {
-    pub fn source(&self) -> NodeIndex {
-        self.source
-    }
-
-    pub fn target(&self) -> NodeIndex {
-        self.target
-    }
+pub trait ControlFlowGraph:
+    DirectedGraph + WithStartNode + WithPredecessors + WithStartNode + WithSuccessors + WithNumNodes
+{
+    // convenient trait
+}
 
-    pub fn source_or_target(&self, direction: Direction) -> NodeIndex {
-        if direction == OUTGOING {
-            self.target
-        } else {
-            self.source
-        }
-    }
+impl<T> ControlFlowGraph for T
+where
+    T: DirectedGraph
+        + WithStartNode
+        + WithPredecessors
+        + WithStartNode
+        + WithSuccessors
+        + WithNumNodes,
+{
 }
diff --git a/src/librustc_data_structures/graph/reference.rs b/src/librustc_data_structures/graph/reference.rs
new file mode 100644 (file)
index 0000000..a7b763d
--- /dev/null
@@ -0,0 +1,51 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use super::*;
+
+impl<'graph, G: DirectedGraph> DirectedGraph for &'graph G {
+    type Node = G::Node;
+}
+
+impl<'graph, G: WithNumNodes> WithNumNodes for &'graph G {
+    fn num_nodes(&self) -> usize {
+        (**self).num_nodes()
+    }
+}
+
+impl<'graph, G: WithStartNode> WithStartNode for &'graph G {
+    fn start_node(&self) -> Self::Node {
+        (**self).start_node()
+    }
+}
+
+impl<'graph, G: WithSuccessors> WithSuccessors for &'graph G {
+    fn successors<'iter>(&'iter self, node: Self::Node) -> <Self as GraphSuccessors<'iter>>::Iter {
+        (**self).successors(node)
+    }
+}
+
+impl<'graph, G: WithPredecessors> WithPredecessors for &'graph G {
+    fn predecessors<'iter>(&'iter self,
+                           node: Self::Node)
+                           -> <Self as GraphPredecessors<'iter>>::Iter {
+        (**self).predecessors(node)
+    }
+}
+
+impl<'iter, 'graph, G: WithPredecessors> GraphPredecessors<'iter> for &'graph G {
+    type Item = G::Node;
+    type Iter = <G as GraphPredecessors<'iter>>::Iter;
+}
+
+impl<'iter, 'graph, G: WithSuccessors> GraphSuccessors<'iter> for &'graph G {
+    type Item = G::Node;
+    type Iter = <G as GraphSuccessors<'iter>>::Iter;
+}
diff --git a/src/librustc_data_structures/graph/scc/mod.rs b/src/librustc_data_structures/graph/scc/mod.rs
new file mode 100644 (file)
index 0000000..a989a54
--- /dev/null
@@ -0,0 +1,361 @@
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Routine to compute the strongly connected components (SCCs) of a
+//! graph, as well as the resulting DAG if each SCC is replaced with a
+//! node in the graph. This uses Tarjan's algorithm that completes in
+//! O(n) time.
+
+use fx::FxHashSet;
+use graph::{DirectedGraph, WithNumNodes, WithSuccessors};
+use indexed_vec::{Idx, IndexVec};
+use std::ops::Range;
+
+mod test;
+
+/// Strongly connected components (SCC) of a graph. The type `N` is
+/// the index type for the graph nodes and `S` is the index type for
+/// the SCCs. We can map from each node to the SCC that it
+/// participates in, and we also have the successors of each SCC.
+pub struct Sccs<N: Idx, S: Idx> {
+    /// For each node, what is the SCC index of the SCC to which it
+    /// belongs.
+    scc_indices: IndexVec<N, S>,
+
+    /// Data about each SCC.
+    scc_data: SccData<S>,
+}
+
+struct SccData<S: Idx> {
+    /// For each SCC, the range of `all_successors` where its
+    /// successors can be found.
+    ranges: IndexVec<S, Range<usize>>,
+
+    /// Contains the succcessors for all the Sccs, concatenated. The
+    /// range of indices corresponding to a given SCC is found in its
+    /// SccData.
+    all_successors: Vec<S>,
+}
+
+impl<N: Idx, S: Idx> Sccs<N, S> {
+    pub fn new(graph: &(impl DirectedGraph<Node = N> + WithNumNodes + WithSuccessors)) -> Self {
+        SccsConstruction::construct(graph)
+    }
+
+    /// Returns the number of SCCs in the graph.
+    pub fn num_sccs(&self) -> usize {
+        self.scc_data.len()
+    }
+
+    /// Returns an iterator over the SCCs in the graph.
+    pub fn all_sccs(&self) -> impl Iterator<Item = S> {
+        (0 .. self.scc_data.len()).map(S::new)
+    }
+
+    /// Returns the SCC to which a node `r` belongs.
+    pub fn scc(&self, r: N) -> S {
+        self.scc_indices[r]
+    }
+
+    /// Returns the successors of the given SCC.
+    pub fn successors(&self, scc: S) -> &[S] {
+        self.scc_data.successors(scc)
+    }
+}
+
+impl<S: Idx> SccData<S> {
+    /// Number of SCCs,
+    fn len(&self) -> usize {
+        self.ranges.len()
+    }
+
+    /// Returns the successors of the given SCC.
+    fn successors(&self, scc: S) -> &[S] {
+        // Annoyingly, `range` does not implement `Copy`, so we have
+        // to do `range.start..range.end`:
+        let range = &self.ranges[scc];
+        &self.all_successors[range.start..range.end]
+    }
+
+    /// Creates a new SCC with `successors` as its successors and
+    /// returns the resulting index.
+    fn create_scc(&mut self, successors: impl IntoIterator<Item = S>) -> S {
+        // Store the successors on `scc_successors_vec`, remembering
+        // the range of indices.
+        let all_successors_start = self.all_successors.len();
+        self.all_successors.extend(successors);
+        let all_successors_end = self.all_successors.len();
+
+        debug!(
+            "create_scc({:?}) successors={:?}",
+            self.ranges.len(),
+            &self.all_successors[all_successors_start..all_successors_end],
+        );
+
+        self.ranges.push(all_successors_start..all_successors_end)
+    }
+}
+
+struct SccsConstruction<'c, G: DirectedGraph + WithNumNodes + WithSuccessors + 'c, S: Idx> {
+    graph: &'c G,
+
+    /// The state of each node; used during walk to record the stack
+    /// and after walk to record what cycle each node ended up being
+    /// in.
+    node_states: IndexVec<G::Node, NodeState<G::Node, S>>,
+
+    /// The stack of nodes that we are visiting as part of the DFS.
+    node_stack: Vec<G::Node>,
+
+    /// The stack of successors: as we visit a node, we mark our
+    /// position in this stack, and when we encounter a successor SCC,
+    /// we push it on the stack. When we complete an SCC, we can pop
+    /// everything off the stack that was found along the way.
+    successors_stack: Vec<S>,
+
+    /// A set used to strip duplicates. As we accumulate successors
+    /// into the successors_stack, we sometimes get duplicate entries.
+    /// We use this set to remove those -- we also keep its storage
+    /// around between successors to amortize memory allocation costs.
+    duplicate_set: FxHashSet<S>,
+
+    scc_data: SccData<S>,
+}
+
+#[derive(Copy, Clone, Debug)]
+enum NodeState<N, S> {
+    /// This node has not yet been visited as part of the DFS.
+    ///
+    /// After SCC construction is complete, this state ought to be
+    /// impossible.
+    NotVisited,
+
+    /// This node is currently being walk as part of our DFS. It is on
+    /// the stack at the depth `depth`.
+    ///
+    /// After SCC construction is complete, this state ought to be
+    /// impossible.
+    BeingVisited { depth: usize },
+
+    /// Indicates that this node is a member of the given cycle.
+    InCycle { scc_index: S },
+
+    /// Indicates that this node is a member of whatever cycle
+    /// `parent` is a member of. This state is transient: whenever we
+    /// see it, we try to overwrite it with the current state of
+    /// `parent` (this is the "path compression" step of a union-find
+    /// algorithm).
+    InCycleWith { parent: N },
+}
+
+#[derive(Copy, Clone, Debug)]
+enum WalkReturn<S> {
+    Cycle { min_depth: usize },
+    Complete { scc_index: S },
+}
+
+impl<'c, G, S> SccsConstruction<'c, G, S>
+where
+    G: DirectedGraph + WithNumNodes + WithSuccessors,
+    S: Idx,
+{
+    /// Identifies SCCs in the graph `G` and computes the resulting
+    /// DAG. This uses a variant of [Tarjan's
+    /// algorithm][wikipedia]. The high-level summary of the algorithm
+    /// is that we do a depth-first search. Along the way, we keep a
+    /// stack of each node whose successors are being visited. We
+    /// track the depth of each node on this stack (there is no depth
+    /// if the node is not on the stack). When we find that some node
+    /// N with depth D can reach some other node N' with lower depth
+    /// D' (i.e., D' < D), we know that N, N', and all nodes in
+    /// between them on the stack are part of an SCC.
+    ///
+    /// [wikipedia]: https://bit.ly/2EZIx84
+    fn construct(graph: &'c G) -> Sccs<G::Node, S> {
+        let num_nodes = graph.num_nodes();
+
+        let mut this = Self {
+            graph,
+            node_states: IndexVec::from_elem_n(NodeState::NotVisited, num_nodes),
+            node_stack: Vec::with_capacity(num_nodes),
+            successors_stack: Vec::new(),
+            scc_data: SccData {
+                ranges: IndexVec::new(),
+                all_successors: Vec::new(),
+            },
+            duplicate_set: FxHashSet::default(),
+        };
+
+        let scc_indices = (0..num_nodes)
+            .map(G::Node::new)
+            .map(|node| match this.walk_node(0, node) {
+                WalkReturn::Complete { scc_index } => scc_index,
+                WalkReturn::Cycle { min_depth } => panic!(
+                    "`walk_node(0, {:?})` returned cycle with depth {:?}",
+                    node, min_depth
+                ),
+            })
+            .collect();
+
+        Sccs {
+            scc_indices,
+            scc_data: this.scc_data,
+        }
+    }
+
+    /// Visit a node during the DFS. We first examine its current
+    /// state -- if it is not yet visited (`NotVisited`), we can push
+    /// it onto the stack and start walking its successors.
+    ///
+    /// If it is already on the DFS stack it will be in the state
+    /// `BeingVisited`. In that case, we have found a cycle and we
+    /// return the depth from the stack.
+    ///
+    /// Otherwise, we are looking at a node that has already been
+    /// completely visited. We therefore return `WalkReturn::Complete`
+    /// with its associated SCC index.
+    fn walk_node(&mut self, depth: usize, node: G::Node) -> WalkReturn<S> {
+        debug!("walk_node(depth = {:?}, node = {:?})", depth, node);
+        match self.find_state(node) {
+            NodeState::InCycle { scc_index } => WalkReturn::Complete { scc_index },
+
+            NodeState::BeingVisited { depth: min_depth } => WalkReturn::Cycle { min_depth },
+
+            NodeState::NotVisited => self.walk_unvisited_node(depth, node),
+
+            NodeState::InCycleWith { parent } => panic!(
+                "`find_state` returned `InCycleWith({:?})`, which ought to be impossible",
+                parent
+            ),
+        }
+    }
+
+    /// Fetches the state of the node `r`. If `r` is recorded as being
+    /// in a cycle with some other node `r2`, then fetches the state
+    /// of `r2` (and updates `r` to reflect current result). This is
+    /// basically the "find" part of a standard union-find algorithm
+    /// (with path compression).
+    fn find_state(&mut self, r: G::Node) -> NodeState<G::Node, S> {
+        debug!("find_state(r = {:?} in state {:?})", r, self.node_states[r]);
+        match self.node_states[r] {
+            NodeState::InCycle { scc_index } => NodeState::InCycle { scc_index },
+            NodeState::BeingVisited { depth } => NodeState::BeingVisited { depth },
+            NodeState::NotVisited => NodeState::NotVisited,
+            NodeState::InCycleWith { parent } => {
+                let parent_state = self.find_state(parent);
+                debug!("find_state: parent_state = {:?}", parent_state);
+                match parent_state {
+                    NodeState::InCycle { .. } => {
+                        self.node_states[r] = parent_state;
+                        parent_state
+                    }
+
+                    NodeState::BeingVisited { depth } => {
+                        self.node_states[r] = NodeState::InCycleWith {
+                            parent: self.node_stack[depth],
+                        };
+                        parent_state
+                    }
+
+                    NodeState::NotVisited | NodeState::InCycleWith { .. } => {
+                        panic!("invalid parent state: {:?}", parent_state)
+                    }
+                }
+            }
+        }
+    }
+
+    /// Walks a node that has never been visited before.
+    fn walk_unvisited_node(&mut self, depth: usize, node: G::Node) -> WalkReturn<S> {
+        debug!(
+            "walk_unvisited_node(depth = {:?}, node = {:?})",
+            depth, node
+        );
+
+        debug_assert!(match self.node_states[node] {
+            NodeState::NotVisited => true,
+            _ => false,
+        });
+
+        // Push `node` onto the stack.
+        self.node_states[node] = NodeState::BeingVisited { depth };
+        self.node_stack.push(node);
+
+        // Walk each successor of the node, looking to see if any of
+        // them can reach a node that is presently on the stack. If
+        // so, that means they can also reach us.
+        let mut min_depth = depth;
+        let mut min_cycle_root = node;
+        let successors_len = self.successors_stack.len();
+        for successor_node in self.graph.successors(node) {
+            debug!(
+                "walk_unvisited_node: node = {:?} successor_ode = {:?}",
+                node, successor_node
+            );
+            match self.walk_node(depth + 1, successor_node) {
+                WalkReturn::Cycle {
+                    min_depth: successor_min_depth,
+                } => {
+                    // Track the minimum depth we can reach.
+                    assert!(successor_min_depth <= depth);
+                    if successor_min_depth < min_depth {
+                        debug!(
+                            "walk_unvisited_node: node = {:?} successor_min_depth = {:?}",
+                            node, successor_min_depth
+                        );
+                        min_depth = successor_min_depth;
+                        min_cycle_root = successor_node;
+                    }
+                }
+
+                WalkReturn::Complete {
+                    scc_index: successor_scc_index,
+                } => {
+                    // Push the completed SCC indices onto
+                    // the `successors_stack` for later.
+                    debug!(
+                        "walk_unvisited_node: node = {:?} successor_scc_index = {:?}",
+                        node, successor_scc_index
+                    );
+                    self.successors_stack.push(successor_scc_index);
+                }
+            }
+        }
+
+        // Completed walk, remove `node` from the stack.
+        let r = self.node_stack.pop();
+        debug_assert_eq!(r, Some(node));
+
+        // If `min_depth == depth`, then we are the root of the
+        // cycle: we can't reach anyone further down the stack.
+        if min_depth == depth {
+            // Note that successor stack may have duplicates, so we
+            // want to remove those:
+            let deduplicated_successors = {
+                let duplicate_set = &mut self.duplicate_set;
+                duplicate_set.clear();
+                self.successors_stack
+                    .drain(successors_len..)
+                    .filter(move |&i| duplicate_set.insert(i))
+            };
+            let scc_index = self.scc_data.create_scc(deduplicated_successors);
+            self.node_states[node] = NodeState::InCycle { scc_index };
+            WalkReturn::Complete { scc_index }
+        } else {
+            // We are not the head of the cycle. Return back to our
+            // caller. They will take ownership of the
+            // `self.successors` data that we pushed.
+            self.node_states[node] = NodeState::InCycleWith {
+                parent: min_cycle_root,
+            };
+            WalkReturn::Cycle { min_depth }
+        }
+    }
+}
diff --git a/src/librustc_data_structures/graph/scc/test.rs b/src/librustc_data_structures/graph/scc/test.rs
new file mode 100644 (file)
index 0000000..405e1b3
--- /dev/null
@@ -0,0 +1,180 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![cfg(test)]
+
+use graph::test::TestGraph;
+use super::*;
+
+#[test]
+fn diamond() {
+    let graph = TestGraph::new(0, &[(0, 1), (0, 2), (1, 3), (2, 3)]);
+    let sccs: Sccs<_, usize> = Sccs::new(&graph);
+    assert_eq!(sccs.num_sccs(), 4);
+    assert_eq!(sccs.num_sccs(), 4);
+}
+
+#[test]
+fn test_big_scc() {
+    // The order in which things will be visited is important to this
+    // test.
+    //
+    // We will visit:
+    //
+    // 0 -> 1 -> 2 -> 0
+    //
+    // and at this point detect a cycle. 2 will return back to 1 which
+    // will visit 3. 3 will visit 2 before the cycle is complete, and
+    // hence it too will return a cycle.
+
+    /*
++-> 0
+|   |
+|   v
+|   1 -> 3
+|   |    |
+|   v    |
++-- 2 <--+
+     */
+    let graph = TestGraph::new(0, &[
+        (0, 1),
+        (1, 2),
+        (1, 3),
+        (2, 0),
+        (3, 2),
+    ]);
+    let sccs: Sccs<_, usize> = Sccs::new(&graph);
+    assert_eq!(sccs.num_sccs(), 1);
+}
+
+#[test]
+fn test_three_sccs() {
+    /*
+    0
+    |
+    v
++-> 1    3
+|   |    |
+|   v    |
++-- 2 <--+
+     */
+    let graph = TestGraph::new(0, &[
+        (0, 1),
+        (1, 2),
+        (2, 1),
+        (3, 2),
+    ]);
+    let sccs: Sccs<_, usize> = Sccs::new(&graph);
+    assert_eq!(sccs.num_sccs(), 3);
+    assert_eq!(sccs.scc(0), 1);
+    assert_eq!(sccs.scc(1), 0);
+    assert_eq!(sccs.scc(2), 0);
+    assert_eq!(sccs.scc(3), 2);
+    assert_eq!(sccs.successors(0), &[]);
+    assert_eq!(sccs.successors(1), &[0]);
+    assert_eq!(sccs.successors(2), &[0]);
+}
+
+#[test]
+fn test_find_state_2() {
+    // The order in which things will be visited is important to this
+    // test. It tests part of the `find_state` behavior. Here is the
+    // graph:
+    //
+    //
+    //       /----+
+    //     0 <--+ |
+    //     |    | |
+    //     v    | |
+    // +-> 1 -> 3 4
+    // |   |      |
+    // |   v      |
+    // +-- 2 <----+
+
+    let graph = TestGraph::new(0, &[
+        (0, 1),
+        (0, 4),
+        (1, 2),
+        (1, 3),
+        (2, 1),
+        (3, 0),
+        (4, 2),
+    ]);
+
+    // For this graph, we will start in our DFS by visiting:
+    //
+    // 0 -> 1 -> 2 -> 1
+    //
+    // and at this point detect a cycle. The state of 2 will thus be
+    // `InCycleWith { 1 }`.  We will then visit the 1 -> 3 edge, which
+    // will attempt to visit 0 as well, thus going to the state
+    // `InCycleWith { 0 }`. Finally, node 1 will complete; the lowest
+    // depth of any successor was 3 which had depth 0, and thus it
+    // will be in the state `InCycleWith { 3 }`.
+    //
+    // When we finally traverse the `0 -> 4` edge and then visit node 2,
+    // the states of the nodes are:
+    //
+    // 0 BeingVisited { 0 }
+    // 1 InCycleWith { 3 }
+    // 2 InCycleWith { 1 }
+    // 3 InCycleWith { 0 }
+    //
+    // and hence 4 will traverse the links, finding an ultimate depth of 0.
+    // If will also collapse the states to the following:
+    //
+    // 0 BeingVisited { 0 }
+    // 1 InCycleWith { 3 }
+    // 2 InCycleWith { 1 }
+    // 3 InCycleWith { 0 }
+
+    let sccs: Sccs<_, usize> = Sccs::new(&graph);
+    assert_eq!(sccs.num_sccs(), 1);
+    assert_eq!(sccs.scc(0), 0);
+    assert_eq!(sccs.scc(1), 0);
+    assert_eq!(sccs.scc(2), 0);
+    assert_eq!(sccs.scc(3), 0);
+    assert_eq!(sccs.scc(4), 0);
+    assert_eq!(sccs.successors(0), &[]);
+}
+
+#[test]
+fn test_find_state_3() {
+    /*
+      /----+
+    0 <--+ |
+    |    | |
+    v    | |
++-> 1 -> 3 4 5
+|   |      | |
+|   v      | |
++-- 2 <----+-+
+     */
+    let graph = TestGraph::new(0, &[
+        (0, 1),
+        (0, 4),
+        (1, 2),
+        (1, 3),
+        (2, 1),
+        (3, 0),
+        (4, 2),
+        (5, 2),
+    ]);
+    let sccs: Sccs<_, usize> = Sccs::new(&graph);
+    assert_eq!(sccs.num_sccs(), 2);
+    assert_eq!(sccs.scc(0), 0);
+    assert_eq!(sccs.scc(1), 0);
+    assert_eq!(sccs.scc(2), 0);
+    assert_eq!(sccs.scc(3), 0);
+    assert_eq!(sccs.scc(4), 0);
+    assert_eq!(sccs.scc(5), 1);
+    assert_eq!(sccs.successors(0), &[]);
+    assert_eq!(sccs.successors(1), &[0]);
+}
diff --git a/src/librustc_data_structures/graph/test.rs b/src/librustc_data_structures/graph/test.rs
new file mode 100644 (file)
index 0000000..48b6547
--- /dev/null
@@ -0,0 +1,85 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use std::collections::HashMap;
+use std::cmp::max;
+use std::slice;
+use std::iter;
+
+use super::*;
+
+pub struct TestGraph {
+    num_nodes: usize,
+    start_node: usize,
+    successors: HashMap<usize, Vec<usize>>,
+    predecessors: HashMap<usize, Vec<usize>>,
+}
+
+impl TestGraph {
+    pub fn new(start_node: usize, edges: &[(usize, usize)]) -> Self {
+        let mut graph = TestGraph {
+            num_nodes: start_node + 1,
+            start_node,
+            successors: HashMap::new(),
+            predecessors: HashMap::new(),
+        };
+        for &(source, target) in edges {
+            graph.num_nodes = max(graph.num_nodes, source + 1);
+            graph.num_nodes = max(graph.num_nodes, target + 1);
+            graph.successors.entry(source).or_insert(vec![]).push(target);
+            graph.predecessors.entry(target).or_insert(vec![]).push(source);
+        }
+        for node in 0..graph.num_nodes {
+            graph.successors.entry(node).or_insert(vec![]);
+            graph.predecessors.entry(node).or_insert(vec![]);
+        }
+        graph
+    }
+}
+
+impl DirectedGraph for TestGraph {
+    type Node = usize;
+}
+
+impl WithStartNode for TestGraph {
+    fn start_node(&self) -> usize {
+        self.start_node
+    }
+}
+
+impl WithNumNodes for TestGraph {
+    fn num_nodes(&self) -> usize {
+        self.num_nodes
+    }
+}
+
+impl WithPredecessors for TestGraph {
+    fn predecessors<'graph>(&'graph self,
+                            node: usize)
+                            -> <Self as GraphPredecessors<'graph>>::Iter {
+        self.predecessors[&node].iter().cloned()
+    }
+}
+
+impl WithSuccessors for TestGraph {
+    fn successors<'graph>(&'graph self, node: usize) -> <Self as GraphSuccessors<'graph>>::Iter {
+        self.successors[&node].iter().cloned()
+    }
+}
+
+impl<'graph> GraphPredecessors<'graph> for TestGraph {
+    type Item = usize;
+    type Iter = iter::Cloned<slice::Iter<'graph, usize>>;
+}
+
+impl<'graph> GraphSuccessors<'graph> for TestGraph {
+    type Item = usize;
+    type Iter = iter::Cloned<slice::Iter<'graph, usize>>;
+}
diff --git a/src/librustc_data_structures/graph/tests.rs b/src/librustc_data_structures/graph/tests.rs
deleted file mode 100644 (file)
index 0077043..0000000
+++ /dev/null
@@ -1,139 +0,0 @@
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-use graph::*;
-use std::fmt::Debug;
-
-type TestGraph = Graph<&'static str, &'static str>;
-
-fn create_graph() -> TestGraph {
-    let mut graph = Graph::new();
-
-    // Create a simple graph
-    //
-    //          F
-    //          |
-    //          V
-    //    A --> B --> C
-    //          |     ^
-    //          v     |
-    //          D --> E
-
-    let a = graph.add_node("A");
-    let b = graph.add_node("B");
-    let c = graph.add_node("C");
-    let d = graph.add_node("D");
-    let e = graph.add_node("E");
-    let f = graph.add_node("F");
-
-    graph.add_edge(a, b, "AB");
-    graph.add_edge(b, c, "BC");
-    graph.add_edge(b, d, "BD");
-    graph.add_edge(d, e, "DE");
-    graph.add_edge(e, c, "EC");
-    graph.add_edge(f, b, "FB");
-
-    return graph;
-}
-
-#[test]
-fn each_node() {
-    let graph = create_graph();
-    let expected = ["A", "B", "C", "D", "E", "F"];
-    graph.each_node(|idx, node| {
-        assert_eq!(&expected[idx.0], graph.node_data(idx));
-        assert_eq!(expected[idx.0], node.data);
-        true
-    });
-}
-
-#[test]
-fn each_edge() {
-    let graph = create_graph();
-    let expected = ["AB", "BC", "BD", "DE", "EC", "FB"];
-    graph.each_edge(|idx, edge| {
-        assert_eq!(expected[idx.0], edge.data);
-        true
-    });
-}
-
-fn test_adjacent_edges<N: PartialEq + Debug, E: PartialEq + Debug>(graph: &Graph<N, E>,
-                                                                   start_index: NodeIndex,
-                                                                   start_data: N,
-                                                                   expected_incoming: &[(E, N)],
-                                                                   expected_outgoing: &[(E, N)]) {
-    assert!(graph.node_data(start_index) == &start_data);
-
-    let mut counter = 0;
-    for (edge_index, edge) in graph.incoming_edges(start_index) {
-        assert!(counter < expected_incoming.len());
-        debug!("counter={:?} expected={:?} edge_index={:?} edge={:?}",
-               counter,
-               expected_incoming[counter],
-               edge_index,
-               edge);
-        match expected_incoming[counter] {
-            (ref e, ref n) => {
-                assert!(e == &edge.data);
-                assert!(n == graph.node_data(edge.source()));
-                assert!(start_index == edge.target);
-            }
-        }
-        counter += 1;
-    }
-    assert_eq!(counter, expected_incoming.len());
-
-    let mut counter = 0;
-    for (edge_index, edge) in graph.outgoing_edges(start_index) {
-        assert!(counter < expected_outgoing.len());
-        debug!("counter={:?} expected={:?} edge_index={:?} edge={:?}",
-               counter,
-               expected_outgoing[counter],
-               edge_index,
-               edge);
-        match expected_outgoing[counter] {
-            (ref e, ref n) => {
-                assert!(e == &edge.data);
-                assert!(start_index == edge.source);
-                assert!(n == graph.node_data(edge.target));
-            }
-        }
-        counter += 1;
-    }
-    assert_eq!(counter, expected_outgoing.len());
-}
-
-#[test]
-fn each_adjacent_from_a() {
-    let graph = create_graph();
-    test_adjacent_edges(&graph, NodeIndex(0), "A", &[], &[("AB", "B")]);
-}
-
-#[test]
-fn each_adjacent_from_b() {
-    let graph = create_graph();
-    test_adjacent_edges(&graph,
-                        NodeIndex(1),
-                        "B",
-                        &[("FB", "F"), ("AB", "A")],
-                        &[("BD", "D"), ("BC", "C")]);
-}
-
-#[test]
-fn each_adjacent_from_c() {
-    let graph = create_graph();
-    test_adjacent_edges(&graph, NodeIndex(2), "C", &[("EC", "E"), ("BC", "B")], &[]);
-}
-
-#[test]
-fn each_adjacent_from_d() {
-    let graph = create_graph();
-    test_adjacent_edges(&graph, NodeIndex(3), "D", &[("BD", "B")], &[("DE", "E")]);
-}
index ad3710e9536f1af1c401821992c3fec0e23476c0..26de2191090e3a8d70dc9dd2b789fa56aadc3b0e 100644 (file)
@@ -14,6 +14,7 @@
 use std::marker::PhantomData;
 use std::ops::{Index, IndexMut, Range, RangeBounds};
 use std::fmt;
+use std::hash::Hash;
 use std::vec;
 use std::u32;
 
@@ -22,7 +23,7 @@
 /// Represents some newtyped `usize` wrapper.
 ///
 /// (purpose: avoid mixing indexes for different bitvector domains.)
-pub trait Idx: Copy + 'static + Eq + Debug {
+pub trait Idx: Copy + 'static + Ord + Debug + Hash {
     fn new(idx: usize) -> Self;
     fn index(self) -> usize;
 }
index 2cca31f70a092ce82fa07146bad432148d121e10..508dc567fa01af71c632b569c5efe02c7a9b2524 100644 (file)
@@ -61,7 +61,6 @@
 pub mod base_n;
 pub mod bitslice;
 pub mod bitvec;
-pub mod graph;
 pub mod indexed_set;
 pub mod indexed_vec;
 pub mod obligation_forest;
@@ -73,7 +72,7 @@
 pub use ena::unify;
 pub mod fx;
 pub mod tuple_slice;
-pub mod control_flow_graph;
+pub mod graph;
 pub mod flock;
 pub mod sync;
 pub mod owning_ref;
index c317d31b95ab9d4cf3ae199831679201346ed5de..a78a2008eecc5087d0c22d03d87ddf67b96f0f1f 100644 (file)
@@ -49,7 +49,9 @@
 use rustc::hir::def_id::DefId;
 use rustc::ty::TyCtxt;
 use rustc_data_structures::fx::FxHashSet;
-use rustc_data_structures::graph::{Direction, INCOMING, OUTGOING, NodeIndex};
+use rustc_data_structures::graph::implementation::{
+    Direction, INCOMING, OUTGOING, NodeIndex
+};
 use rustc::hir;
 use rustc::hir::intravisit::{self, NestedVisitorMap, Visitor};
 use rustc::ich::{ATTR_IF_THIS_CHANGED, ATTR_THEN_THIS_WOULD_NEED};
index 359b056b5a2d1f5da0fe7e14e6cbeb10e31c14a1..5348a47eed628cd080f8d5dda578d29ba8e0328d 100644 (file)
@@ -43,6 +43,7 @@
 use rustc::lint;
 use rustc::lint::{LateContext, LateLintPass, LintPass, LintArray};
 use rustc::lint::builtin::{BARE_TRAIT_OBJECTS, ABSOLUTE_PATHS_NOT_STARTING_WITH_CRATE};
+use rustc::lint::builtin::MACRO_USE_EXTERN_CRATE;
 use rustc::session;
 use rustc::util;
 use rustc::hir;
@@ -179,6 +180,7 @@ macro_rules! add_lint_group {
                     BARE_TRAIT_OBJECTS,
                     UNREACHABLE_PUB,
                     UNUSED_EXTERN_CRATES,
+                    MACRO_USE_EXTERN_CRATE,
                     ELLIPSIS_INCLUSIVE_RANGE_PATTERNS);
 
     // Guidelines for creating a future incompatibility lint:
index 1a66a2d2cb902a0614f0e5c2b45ad3c227d0755b..9c5203f43d23e8863874693f9f4a0b639905b57d 100644 (file)
@@ -23,7 +23,7 @@
 use rustc::ty::query::Providers;
 use rustc::ty::{self, ParamEnv, TyCtxt};
 
-use rustc_data_structures::control_flow_graph::dominators::Dominators;
+use rustc_data_structures::graph::dominators::Dominators;
 use rustc_data_structures::fx::FxHashSet;
 use rustc_data_structures::indexed_set::IdxSetBuf;
 use rustc_data_structures::indexed_vec::Idx;
index 25a0123755f2c586e540d18fd7bbecf34f1f388d..68484888477c843400c60a734ce48fe1bd972ed4 100644 (file)
 use borrow_check::nll::ToRegionVid;
 use borrow_check::nll::facts::AllFacts;
 use borrow_check::nll::region_infer::RegionInferenceContext;
-use borrow_check::nll::type_check::AtLocation;
-use rustc::hir;
 use rustc::infer::InferCtxt;
 use rustc::mir::visit::TyContext;
 use rustc::mir::visit::Visitor;
-use rustc::mir::Place::Projection;
 use rustc::mir::{BasicBlock, BasicBlockData, Location, Mir, Place, Rvalue};
-use rustc::mir::{Local, PlaceProjection, ProjectionElem, Statement, Terminator};
+use rustc::mir::{Local, Statement, Terminator};
 use rustc::ty::fold::TypeFoldable;
 use rustc::ty::subst::Substs;
 use rustc::ty::{self, CanonicalTy, ClosureSubsts, GeneratorSubsts};
@@ -41,7 +38,6 @@ pub(super) fn generate_constraints<'cx, 'gcx, 'tcx>(
         regioncx,
         location_table,
         all_facts,
-        mir,
     };
 
     cg.add_region_liveness_constraints_from_type_check(liveness_set_from_typeck);
@@ -57,7 +53,6 @@ struct ConstraintGeneration<'cg, 'cx: 'cg, 'gcx: 'tcx, 'tcx: 'cx> {
     all_facts: &'cg mut Option<AllFacts>,
     location_table: &'cg LocationTable,
     regioncx: &'cg mut RegionInferenceContext<'tcx>,
-    mir: &'cg Mir<'tcx>,
     borrow_set: &'cg BorrowSet<'tcx>,
 }
 
@@ -184,41 +179,6 @@ fn visit_terminator(
         self.super_terminator(block, terminator, location);
     }
 
-    fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
-        debug!("visit_rvalue(rvalue={:?}, location={:?})", rvalue, location);
-
-        match rvalue {
-            Rvalue::Ref(region, _borrow_kind, borrowed_place) => {
-                // In some cases, e.g. when borrowing from an unsafe
-                // place, we don't bother to create a loan, since
-                // there are no conditions to validate.
-                if let Some(all_facts) = self.all_facts {
-                    if let Some(borrow_index) = self.borrow_set.location_map.get(&location) {
-                        let region_vid = region.to_region_vid();
-                        all_facts.borrow_region.push((
-                            region_vid,
-                            *borrow_index,
-                            self.location_table.mid_index(location),
-                        ));
-                    }
-                }
-
-                // Look for an rvalue like:
-                //
-                //     & L
-                //
-                // where L is the path that is borrowed. In that case, we have
-                // to add the reborrow constraints (which don't fall out
-                // naturally from the type-checker).
-                self.add_reborrow_constraint(location, region, borrowed_place);
-            }
-
-            _ => {}
-        }
-
-        self.super_rvalue(rvalue, location);
-    }
-
     fn visit_user_assert_ty(
         &mut self,
         _c_ty: &CanonicalTy<'tcx>,
@@ -250,7 +210,7 @@ fn add_region_liveness_constraints_from_type_check(
         for (region, location) in liveness_set {
             debug!("generate: {:#?} is live at {:#?}", region, location);
             let region_vid = regioncx.to_region_vid(region);
-            regioncx.add_live_point(region_vid, *location);
+            regioncx.add_live_element(region_vid, *location);
         }
 
         if let Some(all_facts) = all_facts {
@@ -282,103 +242,7 @@ fn add_regular_live_constraint<T>(&mut self, live_ty: T, location: Location)
             .tcx
             .for_each_free_region(&live_ty, |live_region| {
                 let vid = live_region.to_region_vid();
-                self.regioncx.add_live_point(vid, location);
+                self.regioncx.add_live_element(vid, location);
             });
     }
-
-    // Add the reborrow constraint at `location` so that `borrowed_place`
-    // is valid for `borrow_region`.
-    fn add_reborrow_constraint(
-        &mut self,
-        location: Location,
-        borrow_region: ty::Region<'tcx>,
-        borrowed_place: &Place<'tcx>,
-    ) {
-        let mut borrowed_place = borrowed_place;
-
-        debug!(
-            "add_reborrow_constraint({:?}, {:?}, {:?})",
-            location, borrow_region, borrowed_place
-        );
-        while let Projection(box PlaceProjection { base, elem }) = borrowed_place {
-            debug!("add_reborrow_constraint - iteration {:?}", borrowed_place);
-
-            match *elem {
-                ProjectionElem::Deref => {
-                    let tcx = self.infcx.tcx;
-                    let base_ty = base.ty(self.mir, tcx).to_ty(tcx);
-
-                    debug!("add_reborrow_constraint - base_ty = {:?}", base_ty);
-                    match base_ty.sty {
-                        ty::TyRef(ref_region, _, mutbl) => {
-                            self.regioncx.add_outlives(
-                                location.boring(),
-                                ref_region.to_region_vid(),
-                                borrow_region.to_region_vid(),
-                            );
-
-                            if let Some(all_facts) = self.all_facts {
-                                all_facts.outlives.push((
-                                    ref_region.to_region_vid(),
-                                    borrow_region.to_region_vid(),
-                                    self.location_table.mid_index(location),
-                                ));
-                            }
-
-                            match mutbl {
-                                hir::Mutability::MutImmutable => {
-                                    // Immutable reference. We don't need the base
-                                    // to be valid for the entire lifetime of
-                                    // the borrow.
-                                    break;
-                                }
-                                hir::Mutability::MutMutable => {
-                                    // Mutable reference. We *do* need the base
-                                    // to be valid, because after the base becomes
-                                    // invalid, someone else can use our mutable deref.
-
-                                    // This is in order to make the following function
-                                    // illegal:
-                                    // ```
-                                    // fn unsafe_deref<'a, 'b>(x: &'a &'b mut T) -> &'b mut T {
-                                    //     &mut *x
-                                    // }
-                                    // ```
-                                    //
-                                    // As otherwise you could clone `&mut T` using the
-                                    // following function:
-                                    // ```
-                                    // fn bad(x: &mut T) -> (&mut T, &mut T) {
-                                    //     let my_clone = unsafe_deref(&'a x);
-                                    //     ENDREGION 'a;
-                                    //     (my_clone, x)
-                                    // }
-                                    // ```
-                                }
-                            }
-                        }
-                        ty::TyRawPtr(..) => {
-                            // deref of raw pointer, guaranteed to be valid
-                            break;
-                        }
-                        ty::TyAdt(def, _) if def.is_box() => {
-                            // deref of `Box`, need the base to be valid - propagate
-                        }
-                        _ => bug!("unexpected deref ty {:?} in {:?}", base_ty, borrowed_place),
-                    }
-                }
-                ProjectionElem::Field(..)
-                | ProjectionElem::Downcast(..)
-                | ProjectionElem::Index(..)
-                | ProjectionElem::ConstantIndex { .. }
-                | ProjectionElem::Subslice { .. } => {
-                    // other field access
-                }
-            }
-
-            // The "propagate" case. We need to check that our base is valid
-            // for the borrow's lifetime.
-            borrowed_place = base;
-        }
-    }
 }
diff --git a/src/librustc_mir/borrow_check/nll/constraint_set.rs b/src/librustc_mir/borrow_check/nll/constraint_set.rs
deleted file mode 100644 (file)
index 3bdf78f..0000000
+++ /dev/null
@@ -1,112 +0,0 @@
-// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-use rustc::ty::RegionVid;
-use rustc_data_structures::indexed_vec::{Idx, IndexVec};
-use borrow_check::nll::type_check::Locations;
-
-use std::fmt;
-use std::ops::Deref;
-
-#[derive(Clone, Default)]
-crate struct ConstraintSet {
-    constraints: IndexVec<ConstraintIndex, OutlivesConstraint>,
-}
-
-impl ConstraintSet {
-    pub fn push(&mut self, constraint: OutlivesConstraint) {
-        debug!(
-            "add_outlives({:?}: {:?} @ {:?})",
-            constraint.sup, constraint.sub, constraint.locations
-        );
-        if constraint.sup == constraint.sub {
-            // 'a: 'a is pretty uninteresting
-            return;
-        }
-        self.constraints.push(constraint);
-    }
-
-    /// Once all constraints have been added, `link()` is used to thread together the constraints
-    /// based on which would be affected when a particular region changes. See the next field of
-    /// `OutlivesContraint` for more details.
-    /// link returns a map that is needed later by `each_affected_by_dirty`.
-    pub fn link(&mut self, len: usize) -> IndexVec<RegionVid, Option<ConstraintIndex>> {
-        let mut map = IndexVec::from_elem_n(None, len);
-
-        for (idx, constraint) in self.constraints.iter_enumerated_mut().rev() {
-            let mut head = &mut map[constraint.sub];
-            debug_assert!(constraint.next.is_none());
-            constraint.next = *head;
-            *head = Some(idx);
-        }
-
-        map
-    }
-
-    /// When a region R1 changes, we need to reprocess all constraints R2: R1 to take into account
-    /// any new elements that R1 now has. This method will quickly enumerate all such constraints
-    /// (that is, constraints where R1 is in the "subregion" position).
-    /// To use it, invoke with `map[R1]` where map is the map returned by `link`;
-    /// the callback op will be invoked for each affected constraint.
-    pub fn each_affected_by_dirty(
-        &self,
-        mut opt_dep_idx: Option<ConstraintIndex>,
-        mut op: impl FnMut(ConstraintIndex),
-    ) {
-        while let Some(dep_idx) = opt_dep_idx {
-            op(dep_idx);
-            opt_dep_idx = self.constraints[dep_idx].next;
-        }
-    }
-}
-
-impl Deref for ConstraintSet {
-    type Target = IndexVec<ConstraintIndex, OutlivesConstraint>;
-
-    fn deref(&self) -> &Self::Target { &self.constraints }
-}
-
-#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
-pub struct OutlivesConstraint {
-    // NB. The ordering here is not significant for correctness, but
-    // it is for convenience. Before we dump the constraints in the
-    // debugging logs, we sort them, and we'd like the "super region"
-    // to be first, etc. (In particular, span should remain last.)
-    /// The region SUP must outlive SUB...
-    pub sup: RegionVid,
-
-    /// Region that must be outlived.
-    pub sub: RegionVid,
-
-    /// Later on, we thread the constraints onto a linked list
-    /// grouped by their `sub` field. So if you had:
-    ///
-    /// Index | Constraint | Next Field
-    /// ----- | ---------- | ----------
-    /// 0     | `'a: 'b`   | Some(2)
-    /// 1     | `'b: 'c`   | None
-    /// 2     | `'c: 'b`   | None
-    pub next: Option<ConstraintIndex>,
-
-    /// Where did this constraint arise?
-    pub locations: Locations,
-}
-
-impl fmt::Debug for OutlivesConstraint {
-    fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
-        write!(
-            formatter,
-            "({:?}: {:?}) due to {:?}",
-            self.sup, self.sub, self.locations
-        )
-    }
-}
-
-newtype_index!(ConstraintIndex { DEBUG_FORMAT = "ConstraintIndex({})" });
diff --git a/src/librustc_mir/borrow_check/nll/constraints/graph.rs b/src/librustc_mir/borrow_check/nll/constraints/graph.rs
new file mode 100644 (file)
index 0000000..45ed37a
--- /dev/null
@@ -0,0 +1,134 @@
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use borrow_check::nll::constraints::{ConstraintIndex, ConstraintSet};
+use rustc::ty::RegionVid;
+use rustc_data_structures::graph;
+use rustc_data_structures::indexed_vec::IndexVec;
+
+crate struct ConstraintGraph {
+    first_constraints: IndexVec<RegionVid, Option<ConstraintIndex>>,
+    next_constraints: IndexVec<ConstraintIndex, Option<ConstraintIndex>>,
+}
+
+impl ConstraintGraph {
+    /// Create a "dependency graph" where each region constraint `R1:
+    /// R2` is treated as an edge `R1 -> R2`. We use this graph to
+    /// construct SCCs for region inference but also for error
+    /// reporting.
+    crate fn new(set: &ConstraintSet, num_region_vars: usize) -> Self {
+        let mut first_constraints = IndexVec::from_elem_n(None, num_region_vars);
+        let mut next_constraints = IndexVec::from_elem(None, &set.constraints);
+
+        for (idx, constraint) in set.constraints.iter_enumerated().rev() {
+            let mut head = &mut first_constraints[constraint.sup];
+            let mut next = &mut next_constraints[idx];
+            debug_assert!(next.is_none());
+            *next = *head;
+            *head = Some(idx);
+        }
+
+        Self {
+            first_constraints,
+            next_constraints,
+        }
+    }
+
+    /// Given a region `R`, iterate over all constraints `R: R1`.
+    crate fn outgoing_edges(&self, region_sup: RegionVid) -> Edges<'_> {
+        let first = self.first_constraints[region_sup];
+        Edges {
+            graph: self,
+            pointer: first,
+        }
+    }
+}
+
+crate struct Edges<'s> {
+    graph: &'s ConstraintGraph,
+    pointer: Option<ConstraintIndex>,
+}
+
+impl<'s> Iterator for Edges<'s> {
+    type Item = ConstraintIndex;
+
+    fn next(&mut self) -> Option<Self::Item> {
+        if let Some(p) = self.pointer {
+            self.pointer = self.graph.next_constraints[p];
+            Some(p)
+        } else {
+            None
+        }
+    }
+}
+
+crate struct RegionGraph<'s> {
+    set: &'s ConstraintSet,
+    constraint_graph: &'s ConstraintGraph,
+}
+
+impl<'s> RegionGraph<'s> {
+    /// Create a "dependency graph" where each region constraint `R1:
+    /// R2` is treated as an edge `R1 -> R2`. We use this graph to
+    /// construct SCCs for region inference but also for error
+    /// reporting.
+    crate fn new(set: &'s ConstraintSet, constraint_graph: &'s ConstraintGraph) -> Self {
+        Self {
+            set,
+            constraint_graph,
+        }
+    }
+
+    /// Given a region `R`, iterate over all regions `R1` such that
+    /// there exists a constraint `R: R1`.
+    crate fn sub_regions(&self, region_sup: RegionVid) -> Successors<'_> {
+        Successors {
+            set: self.set,
+            edges: self.constraint_graph.outgoing_edges(region_sup),
+        }
+    }
+}
+
+crate struct Successors<'s> {
+    set: &'s ConstraintSet,
+    edges: Edges<'s>,
+}
+
+impl<'s> Iterator for Successors<'s> {
+    type Item = RegionVid;
+
+    fn next(&mut self) -> Option<Self::Item> {
+        self.edges.next().map(|c| self.set[c].sub)
+    }
+}
+
+impl<'s> graph::DirectedGraph for RegionGraph<'s> {
+    type Node = RegionVid;
+}
+
+impl<'s> graph::WithNumNodes for RegionGraph<'s> {
+    fn num_nodes(&self) -> usize {
+        self.constraint_graph.first_constraints.len()
+    }
+}
+
+impl<'s> graph::WithSuccessors for RegionGraph<'s> {
+    fn successors<'graph>(
+        &'graph self,
+        node: Self::Node,
+    ) -> <Self as graph::GraphSuccessors<'graph>>::Iter {
+        self.sub_regions(node)
+    }
+}
+
+impl<'s, 'graph> graph::GraphSuccessors<'graph> for RegionGraph<'s> {
+    type Item = RegionVid;
+    type Iter = Successors<'graph>;
+}
diff --git a/src/librustc_mir/borrow_check/nll/constraints/mod.rs b/src/librustc_mir/borrow_check/nll/constraints/mod.rs
new file mode 100644 (file)
index 0000000..5972412
--- /dev/null
@@ -0,0 +1,95 @@
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use rustc::ty::RegionVid;
+use rustc_data_structures::graph::scc::Sccs;
+use rustc_data_structures::indexed_vec::{Idx, IndexVec};
+use borrow_check::nll::type_check::Locations;
+
+use std::fmt;
+use std::ops::Deref;
+
+crate mod graph;
+
+#[derive(Clone, Default)]
+crate struct ConstraintSet {
+    constraints: IndexVec<ConstraintIndex, OutlivesConstraint>,
+}
+
+impl ConstraintSet {
+    crate fn push(&mut self, constraint: OutlivesConstraint) {
+        debug!(
+            "ConstraintSet::push({:?}: {:?} @ {:?}",
+            constraint.sup, constraint.sub, constraint.locations
+        );
+        if constraint.sup == constraint.sub {
+            // 'a: 'a is pretty uninteresting
+            return;
+        }
+        self.constraints.push(constraint);
+    }
+
+    /// Constructs a graph from the constraint set; the graph makes it
+    /// easy to find the constraints affecting a particular region
+    /// (you should not mutate the set once this graph is
+    /// constructed).
+    crate fn graph(&self, num_region_vars: usize) -> graph::ConstraintGraph {
+        graph::ConstraintGraph::new(self, num_region_vars)
+    }
+
+    /// Compute cycles (SCCs) in the graph of regions. In particular,
+    /// find all regions R1, R2 such that R1: R2 and R2: R1 and group
+    /// them into an SCC, and find the relationships between SCCs.
+    crate fn compute_sccs(
+        &self,
+        constraint_graph: &graph::ConstraintGraph,
+    ) -> Sccs<RegionVid, ConstraintSccIndex> {
+        let region_graph = &graph::RegionGraph::new(self, constraint_graph);
+        Sccs::new(region_graph)
+    }
+}
+
+impl Deref for ConstraintSet {
+    type Target = IndexVec<ConstraintIndex, OutlivesConstraint>;
+
+    fn deref(&self) -> &Self::Target {
+        &self.constraints
+    }
+}
+
+#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub struct OutlivesConstraint {
+    // NB. The ordering here is not significant for correctness, but
+    // it is for convenience. Before we dump the constraints in the
+    // debugging logs, we sort them, and we'd like the "super region"
+    // to be first, etc. (In particular, span should remain last.)
+    /// The region SUP must outlive SUB...
+    pub sup: RegionVid,
+
+    /// Region that must be outlived.
+    pub sub: RegionVid,
+
+    /// Where did this constraint arise?
+    pub locations: Locations,
+}
+
+impl fmt::Debug for OutlivesConstraint {
+    fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+        write!(
+            formatter,
+            "({:?}: {:?}) due to {:?}",
+            self.sup, self.sub, self.locations
+        )
+    }
+}
+
+newtype_index!(ConstraintIndex { DEBUG_FORMAT = "ConstraintIndex({})" });
+
+newtype_index!(ConstraintSccIndex { DEBUG_FORMAT = "ConstraintSccIndex({})" });
index a65019690e3077488bf9c341721fafac2d0c943b..9fd9d6cd97c597e193c9fe42027cdf35645f29e4 100644 (file)
@@ -57,7 +57,7 @@ fn find(&mut self) -> Option<Cause> {
 
         queue.push_back(self.start_point);
         while let Some(p) = queue.pop_front() {
-            if !self.regioncx.region_contains_point(self.region_vid, p) {
+            if !self.regioncx.region_contains(self.region_vid, p) {
                 continue;
             }
 
index 46026cdc9412121ecef012b186f858f41f1226c6..301999cc4a51e120755a6755cde8d6d6a1e07cae 100644 (file)
@@ -29,7 +29,7 @@
 use rustc::mir::{Field, Operand, BorrowKind};
 use rustc::ty::{self, ParamEnv};
 use rustc_data_structures::indexed_vec::Idx;
-use rustc_data_structures::control_flow_graph::dominators::Dominators;
+use rustc_data_structures::graph::dominators::Dominators;
 
 pub(super) fn generate_invalidates<'cx, 'gcx, 'tcx>(
     infcx: &InferCtxt<'cx, 'gcx, 'tcx>,
index 16506800c9e16025f63d81db5b3e0e42fe76f24e..acd9223e42545c40453e49f78377eb415ecd4df4 100644 (file)
@@ -45,7 +45,7 @@
 crate mod type_check;
 mod universal_regions;
 
-crate mod constraint_set;
+mod constraints;
 
 use self::facts::AllFacts;
 use self::region_infer::RegionInferenceContext;
@@ -108,6 +108,7 @@ pub(in borrow_check) fn compute_regions<'cx, 'gcx, 'tcx>(
         def_id,
         &universal_regions,
         location_table,
+        borrow_set,
         &liveness,
         &mut all_facts,
         flow_inits,
@@ -294,8 +295,15 @@ fn dump_mir_results<'a, 'gcx, 'tcx>(
     // Also dump the inference graph constraints as a graphviz file.
     let _: io::Result<()> = do catch {
         let mut file =
-            pretty::create_dump_file(infcx.tcx, "regioncx.dot", None, "nll", &0, source)?;
-        regioncx.dump_graphviz(&mut file)?;
+            pretty::create_dump_file(infcx.tcx, "regioncx.all.dot", None, "nll", &0, source)?;
+        regioncx.dump_graphviz_raw_constraints(&mut file)?;
+    };
+
+    // Also dump the inference graph constraints as a graphviz file.
+    let _: io::Result<()> = do catch {
+        let mut file =
+            pretty::create_dump_file(infcx.tcx, "regioncx.scc.dot", None, "nll", &0, source)?;
+        regioncx.dump_graphviz_scc_constraints(&mut file)?;
     };
 }
 
index 88d9f46e340d33e204753915ecfbaec18790a45e..3c73203706dcb633f019328719ea51c2be7a647e 100644 (file)
@@ -83,7 +83,6 @@ fn for_each_constraint(
                 sup,
                 sub,
                 locations,
-                next: _,
             } = constraint;
             with_msg(&format!(
                 "{:?}: {:?} due to {:?}",
index 786b6a77d2bdbc77a4a8091f63a24066d7b1cc30..c1b73fac893888f35e574edae22a167fd17fc9e7 100644 (file)
@@ -50,18 +50,10 @@ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
 
 impl<'tcx> RegionInferenceContext<'tcx> {
     /// Walks the graph of constraints (where `'a: 'b` is considered
-    /// an edge `'b -> 'a`) to find all paths from `from_region` to
+    /// an edge `'a -> 'b`) to find all paths from `from_region` to
     /// `to_region`. The paths are accumulated into the vector
     /// `results`. The paths are stored as a series of
     /// `ConstraintIndex` values -- in other words, a list of *edges*.
-    ///
-    /// # Parameters
-    ///
-    /// - `from_region`
-    /// When reporting an error, it is useful to be able to determine
-    /// which constraints influenced the region being reported as an
-    /// error. This function finds all of the paths from the
-    /// constraint.
     fn find_constraint_paths_between_regions(
         &self,
         from_region: RegionVid,
@@ -89,8 +81,6 @@ fn find_constraint_paths_between_regions_helper(
         stack: &mut Vec<ConstraintIndex>,
         results: &mut Vec<Vec<ConstraintIndex>>,
     ) {
-        let dependency_map = self.dependency_map.as_ref().unwrap();
-
         // Check if we already visited this region.
         if !visited.insert(current_region) {
             return;
@@ -99,26 +89,25 @@ fn find_constraint_paths_between_regions_helper(
         // Check if we reached the region we were looking for.
         if target_test(current_region) {
             if !stack.is_empty() {
-                assert_eq!(self.constraints[stack[0]].sub, from_region);
+                assert_eq!(self.constraints[stack[0]].sup, from_region);
                 results.push(stack.clone());
             }
             return;
         }
 
-        self.constraints
-            .each_affected_by_dirty(dependency_map[current_region], |constraint| {
-                assert_eq!(self.constraints[constraint].sub, current_region);
-                stack.push(constraint);
-                self.find_constraint_paths_between_regions_helper(
-                    from_region,
-                    self.constraints[constraint].sup,
-                    target_test,
-                    visited,
-                    stack,
-                    results,
-                );
-                stack.pop();
-            });
+        for constraint in self.constraint_graph.outgoing_edges(current_region) {
+            assert_eq!(self.constraints[constraint].sup, current_region);
+            stack.push(constraint);
+            self.find_constraint_paths_between_regions_helper(
+                from_region,
+                self.constraints[constraint].sub,
+                target_test,
+                visited,
+                stack,
+                results,
+            );
+            stack.pop();
+        }
     }
 
     /// This function will return true if a constraint is interesting and false if a constraint
@@ -210,7 +199,7 @@ pub(super) fn report_error(
         }
 
         // Find all paths
-        let constraint_paths = self.find_constraint_paths_between_regions(outlived_fr, |r| r == fr);
+        let constraint_paths = self.find_constraint_paths_between_regions(fr, |r| r == outlived_fr);
         debug!("report_error: constraint_paths={:#?}", constraint_paths);
 
         // Find the shortest such path.
@@ -319,7 +308,7 @@ fn dependencies(&self, r0: RegionVid) -> IndexVec<RegionVid, Option<usize>> {
 
         while changed {
             changed = false;
-            for constraint in &*self.constraints {
+            for constraint in self.constraints.iter() {
                 if let Some(n) = result_set[constraint.sup] {
                     let m = n + 1;
                     if result_set[constraint.sub]
index 0116fbcfc8860d39e648cb4b7cf7e774985624be..13d754f5cab5c2c6877b1e3ad15bdafa98539ace 100644 (file)
 //! libgraphviz traits, specialized to attaching borrowck analysis
 //! data to rendered labels.
 
+use super::*;
+use borrow_check::nll::constraints::OutlivesConstraint;
 use dot::{self, IntoCow};
 use rustc_data_structures::indexed_vec::Idx;
 use std::borrow::Cow;
 use std::io::{self, Write};
-use super::*;
-use borrow_check::nll::constraint_set::OutlivesConstraint;
-
 
 impl<'tcx> RegionInferenceContext<'tcx> {
     /// Write out the region constraint graph.
-    pub(crate) fn dump_graphviz(&self, mut w: &mut dyn Write) -> io::Result<()> {
-        dot::render(self, &mut w)
+    crate fn dump_graphviz_raw_constraints(&self, mut w: &mut dyn Write) -> io::Result<()> {
+        dot::render(&RawConstraints { regioncx: self }, &mut w)
+    }
+
+    /// Write out the region constraint graph.
+    crate fn dump_graphviz_scc_constraints(&self, mut w: &mut dyn Write) -> io::Result<()> {
+        let mut nodes_per_scc: IndexVec<ConstraintSccIndex, _> = self.constraint_sccs
+            .all_sccs()
+            .map(|_| Vec::new())
+            .collect();
+
+        for region in self.definitions.indices() {
+            let scc = self.constraint_sccs.scc(region);
+            nodes_per_scc[scc].push(region);
+        }
+
+        dot::render(&SccConstraints { regioncx: self, nodes_per_scc }, &mut w)
     }
 }
 
-impl<'this, 'tcx> dot::Labeller<'this> for RegionInferenceContext<'tcx> {
+struct RawConstraints<'a, 'tcx: 'a> {
+    regioncx: &'a RegionInferenceContext<'tcx>,
+}
+
+impl<'a, 'this, 'tcx> dot::Labeller<'this> for RawConstraints<'a, 'tcx> {
     type Node = RegionVid;
     type Edge = OutlivesConstraint;
 
@@ -48,26 +66,86 @@ fn edge_label(&'this self, e: &OutlivesConstraint) -> dot::LabelText<'this> {
     }
 }
 
-impl<'this, 'tcx> dot::GraphWalk<'this> for RegionInferenceContext<'tcx> {
+impl<'a, 'this, 'tcx> dot::GraphWalk<'this> for RawConstraints<'a, 'tcx> {
     type Node = RegionVid;
     type Edge = OutlivesConstraint;
 
     fn nodes(&'this self) -> dot::Nodes<'this, RegionVid> {
-        let vids: Vec<RegionVid> = self.definitions.indices().collect();
+        let vids: Vec<RegionVid> = self.regioncx.definitions.indices().collect();
         vids.into_cow()
     }
     fn edges(&'this self) -> dot::Edges<'this, OutlivesConstraint> {
-        (&self.constraints.raw[..]).into_cow()
+        (&self.regioncx.constraints.raw[..]).into_cow()
     }
 
-    // Render `a: b` as `a <- b`, indicating the flow
+    // Render `a: b` as `a -> b`, indicating the flow
     // of data during inference.
 
     fn source(&'this self, edge: &OutlivesConstraint) -> RegionVid {
-        edge.sub
+        edge.sup
     }
 
     fn target(&'this self, edge: &OutlivesConstraint) -> RegionVid {
-        edge.sup
+        edge.sub
+    }
+}
+
+struct SccConstraints<'a, 'tcx: 'a> {
+    regioncx: &'a RegionInferenceContext<'tcx>,
+    nodes_per_scc: IndexVec<ConstraintSccIndex, Vec<RegionVid>>,
+}
+
+impl<'a, 'this, 'tcx> dot::Labeller<'this> for SccConstraints<'a, 'tcx> {
+    type Node = ConstraintSccIndex;
+    type Edge = (ConstraintSccIndex, ConstraintSccIndex);
+
+    fn graph_id(&'this self) -> dot::Id<'this> {
+        dot::Id::new(format!("RegionInferenceContext")).unwrap()
+    }
+    fn node_id(&'this self, n: &ConstraintSccIndex) -> dot::Id<'this> {
+        dot::Id::new(format!("r{}", n.index())).unwrap()
+    }
+    fn node_shape(&'this self, _node: &ConstraintSccIndex) -> Option<dot::LabelText<'this>> {
+        Some(dot::LabelText::LabelStr(Cow::Borrowed("box")))
+    }
+    fn node_label(&'this self, n: &ConstraintSccIndex) -> dot::LabelText<'this> {
+        let nodes = &self.nodes_per_scc[*n];
+        dot::LabelText::LabelStr(format!("{:?} = {:?}", n, nodes).into_cow())
+    }
+}
+
+impl<'a, 'this, 'tcx> dot::GraphWalk<'this> for SccConstraints<'a, 'tcx> {
+    type Node = ConstraintSccIndex;
+    type Edge = (ConstraintSccIndex, ConstraintSccIndex);
+
+    fn nodes(&'this self) -> dot::Nodes<'this, ConstraintSccIndex> {
+        let vids: Vec<ConstraintSccIndex> = self.regioncx.constraint_sccs.all_sccs().collect();
+        vids.into_cow()
+    }
+    fn edges(&'this self) -> dot::Edges<'this, (ConstraintSccIndex, ConstraintSccIndex)> {
+        let edges: Vec<_> = self.regioncx
+            .constraint_sccs
+            .all_sccs()
+            .flat_map(|scc_a| {
+                self.regioncx
+                    .constraint_sccs
+                    .successors(scc_a)
+                    .iter()
+                    .map(move |&scc_b| (scc_a, scc_b))
+            })
+            .collect();
+
+        edges.into_cow()
+    }
+
+    // Render `a: b` as `a -> b`, indicating the flow
+    // of data during inference.
+
+    fn source(&'this self, edge: &(ConstraintSccIndex, ConstraintSccIndex)) -> ConstraintSccIndex {
+        edge.0
+    }
+
+    fn target(&'this self, edge: &(ConstraintSccIndex, ConstraintSccIndex)) -> ConstraintSccIndex {
+        edge.1
     }
 }
index 164941203e05443a2319c59aac6ed8f5bc54ca15..369f6bd36f84603538ee34db60c3f099c5346385 100644 (file)
@@ -9,7 +9,11 @@
 // except according to those terms.
 
 use super::universal_regions::UniversalRegions;
-use borrow_check::nll::constraint_set::{ConstraintIndex, ConstraintSet, OutlivesConstraint};
+use borrow_check::nll::constraints::{
+    ConstraintIndex, ConstraintSccIndex, ConstraintSet, OutlivesConstraint,
+};
+use borrow_check::nll::constraints::graph::ConstraintGraph;
+use borrow_check::nll::region_infer::values::ToElementIndex;
 use borrow_check::nll::type_check::Locations;
 use rustc::hir::def_id::DefId;
 use rustc::infer::canonical::QueryRegionConstraint;
@@ -23,8 +27,9 @@
 };
 use rustc::ty::{self, RegionVid, Ty, TyCtxt, TypeFoldable};
 use rustc::util::common;
-use rustc_data_structures::bitvec::BitVector;
-use rustc_data_structures::indexed_vec::{Idx, IndexVec};
+use rustc_data_structures::graph::scc::Sccs;
+use rustc_data_structures::indexed_set::{IdxSet, IdxSetBuf};
+use rustc_data_structures::indexed_vec::IndexVec;
 
 use std::rc::Rc;
 
@@ -51,28 +56,31 @@ pub struct RegionInferenceContext<'tcx> {
     /// regions, these start out empty and steadily grow, though for
     /// each universally quantified region R they start out containing
     /// the entire CFG and `end(R)`.
-    liveness_constraints: RegionValues,
+    liveness_constraints: RegionValues<RegionVid>,
 
-    /// The final inferred values of the inference variables; `None`
-    /// until `solve` is invoked.
-    inferred_values: Option<RegionValues>,
+    /// The outlives constraints computed by the type-check.
+    constraints: Rc<ConstraintSet>,
 
-    /// For each variable, stores the index of the first constraint
-    /// where that variable appears on the RHS. This is the start of a
-    /// 'linked list' threaded by the `next` field in `Constraint`.
-    ///
-    /// This map is build when values are inferred.
-    dependency_map: Option<IndexVec<RegionVid, Option<ConstraintIndex>>>,
+    /// The constraint-set, but in graph form, making it easy to traverse
+    /// the constraints adjacent to a particular region. Used to construct
+    /// the SCC (see `constraint_sccs`) and for error reporting.
+    constraint_graph: Rc<ConstraintGraph>,
+
+    /// The SCC computed from `constraints` and
+    /// `constraint_graph`. Used to compute the values of each region.
+    constraint_sccs: Rc<Sccs<RegionVid, ConstraintSccIndex>>,
 
-    /// The constraints we have accumulated and used during solving.
-    constraints: ConstraintSet,
+    /// The final inferred values of the region variables; we compute
+    /// one value per SCC. To get the value for any given *region*,
+    /// you first find which scc it is a part of.
+    scc_values: RegionValues<ConstraintSccIndex>,
 
     /// Type constraints that we check after solving.
     type_tests: Vec<TypeTest<'tcx>>,
 
     /// Information about the universally quantified regions in scope
     /// on this function and their (known) relations to one another.
-    universal_regions: UniversalRegions<'tcx>,
+    universal_regions: Rc<UniversalRegions<'tcx>>,
 }
 
 struct RegionDefinition<'tcx> {
@@ -203,27 +211,32 @@ pub(crate) fn new(
         outlives_constraints: ConstraintSet,
         type_tests: Vec<TypeTest<'tcx>>,
     ) -> Self {
-        // The `next` field should not yet have been initialized:
-        debug_assert!(outlives_constraints.iter().all(|c| c.next.is_none()));
-
+        let universal_regions = Rc::new(universal_regions);
         let num_region_variables = var_infos.len();
         let num_universal_regions = universal_regions.len();
 
         let elements = &Rc::new(RegionValueElements::new(mir, num_universal_regions));
 
         // Create a RegionDefinition for each inference variable.
-        let definitions = var_infos
+        let definitions: IndexVec<_, _> = var_infos
             .into_iter()
             .map(|info| RegionDefinition::new(info.origin))
             .collect();
 
+        let constraints = Rc::new(outlives_constraints); // freeze constraints
+        let constraint_graph = Rc::new(constraints.graph(definitions.len()));
+        let constraint_sccs = Rc::new(constraints.compute_sccs(&constraint_graph));
+
+        let scc_values = RegionValues::new(elements, constraint_sccs.num_sccs());
+
         let mut result = Self {
             definitions,
             elements: elements.clone(),
             liveness_constraints: RegionValues::new(elements, num_region_variables),
-            inferred_values: None,
-            dependency_map: None,
-            constraints: outlives_constraints,
+            constraints,
+            constraint_sccs,
+            constraint_graph,
+            scc_values,
             type_tests,
             universal_regions,
         };
@@ -265,7 +278,9 @@ fn init_universal_regions(&mut self) {
         }
 
         // For each universally quantified region X:
-        for variable in self.universal_regions.universal_regions() {
+        let elements = self.elements.clone();
+        let universal_regions = self.universal_regions.clone();
+        for variable in universal_regions.universal_regions() {
             // These should be free-region variables.
             assert!(match self.definitions[variable].origin {
                 RegionVariableOrigin::NLL(NLLRegionVariableOrigin::FreeRegion) => true,
@@ -275,12 +290,12 @@ fn init_universal_regions(&mut self) {
             self.definitions[variable].is_universal = true;
 
             // Add all nodes in the CFG to liveness constraints
-            for point_index in self.elements.all_point_indices() {
-                self.liveness_constraints.add_element(variable, point_index);
+            for point_index in elements.all_point_indices() {
+                self.add_live_element(variable, point_index);
             }
 
             // Add `end(X)` into the set for X.
-            self.liveness_constraints.add_element(variable, variable);
+            self.add_live_element(variable, variable);
         }
     }
 
@@ -300,48 +315,38 @@ pub fn to_region_vid(&self, r: ty::Region<'tcx>) -> RegionVid {
     /// Returns true if the region `r` contains the point `p`.
     ///
     /// Panics if called before `solve()` executes,
-    pub fn region_contains_point<R>(&self, r: R, p: Location) -> bool
-    where
-        R: ToRegionVid,
-    {
-        let inferred_values = self
-            .inferred_values
-            .as_ref()
-            .expect("region values not yet inferred");
-        inferred_values.contains(r.to_region_vid(), p)
+    crate fn region_contains(&self, r: impl ToRegionVid, p: impl ToElementIndex) -> bool {
+        let scc = self.constraint_sccs.scc(r.to_region_vid());
+        self.scc_values.contains(scc, p)
     }
 
     /// Returns access to the value of `r` for debugging purposes.
     crate fn region_value_str(&self, r: RegionVid) -> String {
-        let inferred_values = self
-            .inferred_values
-            .as_ref()
-            .expect("region values not yet inferred");
-
-        inferred_values.region_value_str(r)
+        let scc = self.constraint_sccs.scc(r.to_region_vid());
+        self.scc_values.region_value_str(scc)
     }
 
     /// Indicates that the region variable `v` is live at the point `point`.
     ///
     /// Returns `true` if this constraint is new and `false` is the
     /// constraint was already present.
-    pub(super) fn add_live_point(&mut self, v: RegionVid, point: Location) -> bool {
-        debug!("add_live_point({:?}, {:?})", v, point);
-        assert!(self.inferred_values.is_none(), "values already inferred");
+    pub(super) fn add_live_element(
+        &mut self,
+        v: RegionVid,
+        elem: impl ToElementIndex,
+    ) -> bool {
+        debug!("add_live_element({:?}, {:?})", v, elem);
 
-        let element = self.elements.index(point);
-        self.liveness_constraints.add_element(v, element)
-    }
+        // Add to the liveness values for `v`...
+        if self.liveness_constraints.add_element(v, elem) {
+            // ...but also add to the SCC in which `v` appears.
+            let scc = self.constraint_sccs.scc(v);
+            self.scc_values.add_element(scc, elem);
 
-    /// Indicates that the region variable `sup` must outlive `sub` is live at the point `point`.
-    pub(super) fn add_outlives(&mut self, locations: Locations, sup: RegionVid, sub: RegionVid) {
-        assert!(self.inferred_values.is_none(), "values already inferred");
-        self.constraints.push(OutlivesConstraint {
-            locations,
-            sup,
-            sub,
-            next: None,
-        })
+            true
+        } else {
+            false
+        }
     }
 
     /// Perform region inference and report errors if we see any
@@ -366,8 +371,6 @@ fn solve_inner<'gcx>(
         mir: &Mir<'tcx>,
         mir_def_id: DefId,
     ) -> Option<ClosureRegionRequirements<'gcx>> {
-        assert!(self.inferred_values.is_none(), "values already inferred");
-
         self.propagate_constraints(mir);
 
         // If this is a closure, we can propagate unsatisfied
@@ -402,65 +405,62 @@ fn solve_inner<'gcx>(
     /// for each region variable until all the constraints are
     /// satisfied. Note that some values may grow **too** large to be
     /// feasible, but we check this later.
-    fn propagate_constraints(&mut self, mir: &Mir<'tcx>) {
-        self.dependency_map = Some(self.build_dependency_map());
-        let inferred_values = self.compute_region_values(mir);
-        self.inferred_values = Some(inferred_values);
-    }
+    fn propagate_constraints(&mut self, _mir: &Mir<'tcx>) {
+        debug!("propagate_constraints()");
 
-    fn compute_region_values(&self, _mir: &Mir<'tcx>) -> RegionValues {
-        debug!("compute_region_values()");
-        debug!("compute_region_values: constraints={:#?}", {
+        debug!("propagate_constraints: constraints={:#?}", {
             let mut constraints: Vec<_> = self.constraints.iter().collect();
             constraints.sort();
             constraints
         });
 
-        // The initial values for each region are derived from the liveness
-        // constraints we have accumulated.
-        let mut inferred_values = self.liveness_constraints.clone();
-
-        let dependency_map = self.dependency_map.as_ref().unwrap();
-
-        // Constraints that may need to be repropagated (initially all):
-        let mut dirty_list: Vec<_> = self.constraints.indices().collect();
-
-        // Set to 0 for each constraint that is on the dirty list:
-        let mut clean_bit_vec = BitVector::new(dirty_list.len());
+        // To propagate constriants, we walk the DAG induced by the
+        // SCC. For each SCC, we visit its successors and compute
+        // their values, then we union all those values to get our
+        // own.
+        let visited = &mut IdxSetBuf::new_empty(self.constraint_sccs.num_sccs());
+        for scc_index in self.constraint_sccs.all_sccs() {
+            self.propagate_constraint_sccs_if_new(scc_index, visited);
+        }
+    }
 
-        debug!("propagate_constraints: --------------------");
-        while let Some(constraint_idx) = dirty_list.pop() {
-            clean_bit_vec.insert(constraint_idx.index());
+    #[inline]
+    fn propagate_constraint_sccs_if_new(
+        &mut self,
+        scc_a: ConstraintSccIndex,
+        visited: &mut IdxSet<ConstraintSccIndex>,
+    ) {
+        if visited.add(&scc_a) {
+            self.propagate_constraint_sccs_new(scc_a, visited);
+        }
+    }
 
-            let constraint = &self.constraints[constraint_idx];
-            debug!("propagate_constraints: constraint={:?}", constraint);
+    fn propagate_constraint_sccs_new(
+        &mut self,
+        scc_a: ConstraintSccIndex,
+        visited: &mut IdxSet<ConstraintSccIndex>,
+    ) {
+        let constraint_sccs = self.constraint_sccs.clone();
 
-            if inferred_values.add_region(constraint.sup, constraint.sub) {
-                debug!("propagate_constraints:   sub={:?}", constraint.sub);
-                debug!("propagate_constraints:   sup={:?}", constraint.sup);
+        // Walk each SCC `B` such that `A: B`...
+        for &scc_b in constraint_sccs.successors(scc_a) {
+            debug!(
+                "propagate_constraint_sccs: scc_a = {:?} scc_b = {:?}",
+                scc_a, scc_b
+            );
 
-                self.constraints.each_affected_by_dirty(
-                    dependency_map[constraint.sup],
-                    |dep_idx| {
-                        if clean_bit_vec.remove(dep_idx.index()) {
-                            dirty_list.push(dep_idx);
-                        }
-                    },
-                );
-            }
+            // ...compute the value of `B`...
+            self.propagate_constraint_sccs_if_new(scc_b, visited);
 
-            debug!("\n");
+            // ...and add elements from `B` into `A`.
+            self.scc_values.add_region(scc_a, scc_b);
         }
 
-        inferred_values
-    }
-
-    /// Builds up a map from each region variable X to a vector with the
-    /// indices of constraints that need to be re-evaluated when X changes.
-    /// These are constraints like Y: X @ P -- so if X changed, we may
-    /// need to grow Y.
-    fn build_dependency_map(&mut self) -> IndexVec<RegionVid, Option<ConstraintIndex>> {
-        self.constraints.link(self.definitions.len())
+        debug!(
+            "propagate_constraint_sccs: scc_a = {:?} has value {:?}",
+            scc_a,
+            self.scc_values.region_value_str(scc_a),
+        );
     }
 
     /// Once regions have been propagated, this method is used to see
@@ -535,12 +535,9 @@ pub fn to_error_region(&self, r: RegionVid) -> Option<ty::Region<'tcx>> {
         if self.universal_regions.is_universal_region(r) {
             return self.definitions[r].external_name;
         } else {
-            let inferred_values = self
-                .inferred_values
-                .as_ref()
-                .expect("region values not yet inferred");
+            let r_scc = self.constraint_sccs.scc(r);
             let upper_bound = self.universal_upper_bound(r);
-            if inferred_values.contains(r, upper_bound) {
+            if self.scc_values.contains(r_scc, upper_bound) {
                 self.to_error_region(upper_bound)
             } else {
                 None
@@ -575,11 +572,8 @@ fn try_promote_type_test<'gcx>(
         // region, which ensures it can be encoded in a `ClosureOutlivesRequirement`.
         let lower_bound_plus = self.non_local_universal_upper_bound(*lower_bound);
         assert!(self.universal_regions.is_universal_region(lower_bound_plus));
-        assert!(
-            !self
-                .universal_regions
-                .is_local_free_region(lower_bound_plus)
-        );
+        assert!(!self.universal_regions
+            .is_local_free_region(lower_bound_plus));
 
         propagated_outlives_requirements.push(ClosureOutlivesRequirement {
             subject,
@@ -607,10 +601,6 @@ fn try_promote_type_test_subject<'gcx>(
     ) -> Option<ClosureOutlivesSubject<'gcx>> {
         let tcx = infcx.tcx;
         let gcx = tcx.global_tcx();
-        let inferred_values = self
-            .inferred_values
-            .as_ref()
-            .expect("region values not yet inferred");
 
         debug!("try_promote_type_test_subject(ty = {:?})", ty);
 
@@ -653,7 +643,7 @@ fn try_promote_type_test_subject<'gcx>(
             // `'static` is not contained in `r`, we would fail to
             // find an equivalent.
             let upper_bound = self.non_local_universal_upper_bound(region_vid);
-            if inferred_values.contains(region_vid, upper_bound) {
+            if self.region_contains(region_vid, upper_bound) {
                 tcx.mk_region(ty::ReClosureBound(upper_bound))
             } else {
                 // In the case of a failure, use a `ReVar`
@@ -686,12 +676,10 @@ fn try_promote_type_test_subject<'gcx>(
     /// except that it converts further takes the non-local upper
     /// bound of `'y`, so that the final result is non-local.
     fn non_local_universal_upper_bound(&self, r: RegionVid) -> RegionVid {
-        let inferred_values = self.inferred_values.as_ref().unwrap();
-
         debug!(
             "non_local_universal_upper_bound(r={:?}={})",
             r,
-            inferred_values.region_value_str(r)
+            self.region_value_str(r)
         );
 
         let lub = self.universal_upper_bound(r);
@@ -723,18 +711,17 @@ fn non_local_universal_upper_bound(&self, r: RegionVid) -> RegionVid {
     /// - For each `end('x)` element in `'r`, compute the mutual LUB, yielding
     ///   a result `'y`.
     fn universal_upper_bound(&self, r: RegionVid) -> RegionVid {
-        let inferred_values = self.inferred_values.as_ref().unwrap();
-
         debug!(
             "universal_upper_bound(r={:?}={})",
             r,
-            inferred_values.region_value_str(r)
+            self.region_value_str(r)
         );
 
         // Find the smallest universal region that contains all other
         // universal regions within `region`.
         let mut lub = self.universal_regions.fr_fn_body;
-        for ur in inferred_values.universal_regions_outlived_by(r) {
+        let r_scc = self.constraint_sccs.scc(r);
+        for ur in self.scc_values.universal_regions_outlived_by(r_scc) {
             lub = self.universal_regions.postdom_upper_bound(lub, ur);
         }
 
@@ -779,31 +766,29 @@ fn eval_outlives(
     ) -> bool {
         debug!("eval_outlives({:?}: {:?})", sup_region, sub_region);
 
-        let inferred_values = self
-            .inferred_values
-            .as_ref()
-            .expect("values for regions not yet inferred");
-
         debug!(
             "eval_outlives: sup_region's value = {:?}",
-            inferred_values.region_value_str(sup_region),
+            self.region_value_str(sup_region),
         );
         debug!(
             "eval_outlives: sub_region's value = {:?}",
-            inferred_values.region_value_str(sub_region),
+            self.region_value_str(sub_region),
         );
 
+        let sub_region_scc = self.constraint_sccs.scc(sub_region);
+        let sup_region_scc = self.constraint_sccs.scc(sup_region);
+
         // Both the `sub_region` and `sup_region` consist of the union
         // of some number of universal regions (along with the union
         // of various points in the CFG; ignore those points for
         // now). Therefore, the sup-region outlives the sub-region if,
         // for each universal region R1 in the sub-region, there
         // exists some region R2 in the sup-region that outlives R1.
-        let universal_outlives = inferred_values
-            .universal_regions_outlived_by(sub_region)
+        let universal_outlives = self.scc_values
+            .universal_regions_outlived_by(sub_region_scc)
             .all(|r1| {
-                inferred_values
-                    .universal_regions_outlived_by(sup_region)
+                self.scc_values
+                    .universal_regions_outlived_by(sup_region_scc)
                     .any(|r2| self.universal_regions.outlives(r2, r1))
             });
 
@@ -819,7 +804,8 @@ fn eval_outlives(
             return true;
         }
 
-        inferred_values.contains_points(sup_region, sub_region)
+        self.scc_values
+            .contains_points(sup_region_scc, sub_region_scc)
     }
 
     /// Once regions have been propagated, this method is used to see
@@ -848,8 +834,7 @@ fn check_universal_regions<'gcx>(
     ) {
         // The universal regions are always found in a prefix of the
         // full list.
-        let universal_definitions = self
-            .definitions
+        let universal_definitions = self.definitions
             .iter_enumerated()
             .take_while(|(_, fr_definition)| fr_definition.is_universal);
 
@@ -883,13 +868,13 @@ fn check_universal_region<'gcx>(
         longer_fr: RegionVid,
         propagated_outlives_requirements: &mut Option<&mut Vec<ClosureOutlivesRequirement<'gcx>>>,
     ) {
-        let inferred_values = self.inferred_values.as_ref().unwrap();
-
         debug!("check_universal_region(fr={:?})", longer_fr);
 
+        let longer_fr_scc = self.constraint_sccs.scc(longer_fr);
+
         // Find every region `o` such that `fr: o`
         // (because `fr` includes `end(o)`).
-        for shorter_fr in inferred_values.universal_regions_outlived_by(longer_fr) {
+        for shorter_fr in self.scc_values.universal_regions_outlived_by(longer_fr_scc) {
             // If it is known that `fr: o`, carry on.
             if self.universal_regions.outlives(longer_fr, shorter_fr) {
                 continue;
index 1039e6d7b972cbcc1acda41f8303b100775e47c6..c5bfb1fc6a588addd9254223423593d56f1e6c78 100644 (file)
@@ -18,7 +18,7 @@
 
 /// Maps between the various kinds of elements of a region value to
 /// the internal indices that w use.
-pub(super) struct RegionValueElements {
+crate struct RegionValueElements {
     /// For each basic block, how many points are contained within?
     statements_before_block: IndexVec<BasicBlock, usize>,
     num_points: usize,
@@ -26,7 +26,7 @@ pub(super) struct RegionValueElements {
 }
 
 impl RegionValueElements {
-    pub(super) fn new(mir: &Mir<'_>, num_universal_regions: usize) -> Self {
+    crate fn new(mir: &Mir<'_>, num_universal_regions: usize) -> Self {
         let mut num_points = 0;
         let statements_before_block = mir
             .basic_blocks()
@@ -56,22 +56,22 @@ pub(super) fn new(mir: &Mir<'_>, num_universal_regions: usize) -> Self {
     }
 
     /// Total number of element indices that exist.
-    pub(super) fn num_elements(&self) -> usize {
+    crate fn num_elements(&self) -> usize {
         self.num_points + self.num_universal_regions
     }
 
     /// Converts an element of a region value into a `RegionElementIndex`.
-    pub(super) fn index<T: ToElementIndex>(&self, elem: T) -> RegionElementIndex {
+    crate fn index<T: ToElementIndex>(&self, elem: T) -> RegionElementIndex {
         elem.to_element_index(self)
     }
 
     /// Iterates over the `RegionElementIndex` for all points in the CFG.
-    pub(super) fn all_point_indices<'a>(&'a self) -> impl Iterator<Item = RegionElementIndex> + 'a {
+    crate fn all_point_indices<'a>(&'a self) -> impl Iterator<Item = RegionElementIndex> + 'a {
         (0..self.num_points).map(move |i| RegionElementIndex::new(i + self.num_universal_regions))
     }
 
     /// Converts a particular `RegionElementIndex` to the `RegionElement` it represents.
-    pub(super) fn to_element(&self, i: RegionElementIndex) -> RegionElement {
+    crate fn to_element(&self, i: RegionElementIndex) -> RegionElement {
         debug!("to_element(i={:?})", i);
 
         if let Some(r) = self.to_universal_region(i) {
@@ -114,7 +114,7 @@ pub(super) fn to_element(&self, i: RegionElementIndex) -> RegionElement {
     /// Converts a particular `RegionElementIndex` to a universal
     /// region, if that is what it represents. Returns `None`
     /// otherwise.
-    pub(super) fn to_universal_region(&self, i: RegionElementIndex) -> Option<RegionVid> {
+    crate fn to_universal_region(&self, i: RegionElementIndex) -> Option<RegionVid> {
         if i.index() < self.num_universal_regions {
             Some(RegionVid::new(i.index()))
         } else {
@@ -138,7 +138,7 @@ pub(super) fn to_universal_region(&self, i: RegionElementIndex) -> Option<Region
 /// An individual element in a region value -- the value of a
 /// particular region variable consists of a set of these elements.
 #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
-pub(super) enum RegionElement {
+crate enum RegionElement {
     /// A point in the control-flow graph.
     Location(Location),
 
@@ -146,7 +146,7 @@ pub(super) enum RegionElement {
     UniversalRegion(RegionVid),
 }
 
-pub(super) trait ToElementIndex: Debug + Copy {
+crate trait ToElementIndex: Debug + Copy {
     fn to_element_index(self, elements: &RegionValueElements) -> RegionElementIndex;
 }
 
@@ -179,16 +179,16 @@ fn to_element_index(self, _elements: &RegionValueElements) -> RegionElementIndex
 /// variable. The columns consist of either universal regions or
 /// points in the CFG.
 #[derive(Clone)]
-pub(super) struct RegionValues {
+crate struct RegionValues<N: Idx> {
     elements: Rc<RegionValueElements>,
-    matrix: SparseBitMatrix<RegionVid, RegionElementIndex>,
+    matrix: SparseBitMatrix<N, RegionElementIndex>,
 }
 
-impl RegionValues {
+impl<N: Idx> RegionValues<N> {
     /// Creates a new set of "region values" that tracks causal information.
     /// Each of the regions in num_region_variables will be initialized with an
     /// empty set of points and no causal information.
-    pub(super) fn new(elements: &Rc<RegionValueElements>, num_region_variables: usize) -> Self {
+    crate fn new(elements: &Rc<RegionValueElements>, num_region_variables: usize) -> Self {
         assert!(
             elements.num_universal_regions <= num_region_variables,
             "universal regions are a subset of the region variables"
@@ -197,7 +197,7 @@ pub(super) fn new(elements: &Rc<RegionValueElements>, num_region_variables: usiz
         Self {
             elements: elements.clone(),
             matrix: SparseBitMatrix::new(
-                RegionVid::new(num_region_variables),
+                N::new(num_region_variables),
                 RegionElementIndex::new(elements.num_elements()),
             ),
         }
@@ -205,7 +205,11 @@ pub(super) fn new(elements: &Rc<RegionValueElements>, num_region_variables: usiz
 
     /// Adds the given element to the value for the given region. Returns true if
     /// the element is newly added (i.e., was not already present).
-    pub(super) fn add_element<E: ToElementIndex>(&mut self, r: RegionVid, elem: E) -> bool {
+    crate fn add_element(
+        &mut self,
+        r: N,
+        elem: impl ToElementIndex,
+    ) -> bool {
         let i = self.elements.index(elem);
         debug!("add(r={:?}, elem={:?})", r, elem);
         self.matrix.add(r, i)
@@ -213,19 +217,19 @@ pub(super) fn add_element<E: ToElementIndex>(&mut self, r: RegionVid, elem: E) -
 
     /// Add all elements in `r_from` to `r_to` (because e.g. `r_to:
     /// r_from`).
-    pub(super) fn add_region(&mut self, r_to: RegionVid, r_from: RegionVid) -> bool {
+    crate fn add_region(&mut self, r_to: N, r_from: N) -> bool {
         self.matrix.merge(r_from, r_to)
     }
 
     /// True if the region `r` contains the given element.
-    pub(super) fn contains<E: ToElementIndex>(&self, r: RegionVid, elem: E) -> bool {
+    crate fn contains(&self, r: N, elem: impl ToElementIndex) -> bool {
         let i = self.elements.index(elem);
         self.matrix.contains(r, i)
     }
 
     /// True if `sup_region` contains all the CFG points that
     /// `sub_region` contains. Ignores universal regions.
-    pub(super) fn contains_points(&self, sup_region: RegionVid, sub_region: RegionVid) -> bool {
+    crate fn contains_points(&self, sup_region: N, sub_region: N) -> bool {
         // This could be done faster by comparing the bitsets. But I
         // am lazy.
         self.element_indices_contained_in(sub_region)
@@ -236,17 +240,17 @@ pub(super) fn contains_points(&self, sup_region: RegionVid, sub_region: RegionVi
     /// Iterate over the value of the region `r`, yielding up element
     /// indices. You may prefer `universal_regions_outlived_by` or
     /// `elements_contained_in`.
-    pub(super) fn element_indices_contained_in<'a>(
+    crate fn element_indices_contained_in<'a>(
         &'a self,
-        r: RegionVid,
+        r: N,
     ) -> impl Iterator<Item = RegionElementIndex> + 'a {
         self.matrix.iter(r).map(move |i| i)
     }
 
     /// Returns just the universal regions that are contained in a given region's value.
-    pub(super) fn universal_regions_outlived_by<'a>(
+    crate fn universal_regions_outlived_by<'a>(
         &'a self,
-        r: RegionVid,
+        r: N,
     ) -> impl Iterator<Item = RegionVid> + 'a {
         self.element_indices_contained_in(r)
             .map(move |i| self.elements.to_universal_region(i))
@@ -255,16 +259,16 @@ pub(super) fn universal_regions_outlived_by<'a>(
     }
 
     /// Returns all the elements contained in a given region's value.
-    pub(super) fn elements_contained_in<'a>(
+    crate fn elements_contained_in<'a>(
         &'a self,
-        r: RegionVid,
+        r: N,
     ) -> impl Iterator<Item = RegionElement> + 'a {
         self.element_indices_contained_in(r)
             .map(move |r| self.elements.to_element(r))
     }
 
     /// Returns a "pretty" string value of the region. Meant for debugging.
-    pub(super) fn region_value_str(&self, r: RegionVid) -> String {
+    crate fn region_value_str(&self, r: N) -> String {
         let mut result = String::new();
         result.push_str("{");
 
index 27bd50427772df4a465f5dadac14d7af27488c4b..64a61972a2206de18fdb52f5b5be8b20e67945d4 100644 (file)
@@ -9,12 +9,11 @@
 // except according to those terms.
 
 use borrow_check::location::LocationTable;
-use borrow_check::nll::constraint_set::OutlivesConstraint;
+use borrow_check::nll::constraints::{ConstraintSet, OutlivesConstraint};
 use borrow_check::nll::facts::AllFacts;
 use borrow_check::nll::region_infer::{RegionTest, TypeTest};
 use borrow_check::nll::type_check::Locations;
 use borrow_check::nll::universal_regions::UniversalRegions;
-use borrow_check::nll::constraint_set::ConstraintSet;
 use rustc::infer::canonical::QueryRegionConstraint;
 use rustc::infer::outlives::obligations::{TypeOutlives, TypeOutlivesDelegate};
 use rustc::infer::region_constraints::{GenericKind, VerifyBound};
@@ -186,7 +185,6 @@ fn add_outlives(&mut self, sup: ty::RegionVid, sub: ty::RegionVid) {
             locations: self.locations,
             sub,
             sup,
-            next: None,
         });
     }
 
index 2b47d50b4c2c7a2d29ee5796cbedc5603d6e5435..25f2be231772d58cfaaea9e108a6ca6cda522802 100644 (file)
 //! This pass type-checks the MIR to ensure it is not broken.
 #![allow(unreachable_code)]
 
+use borrow_check::borrow_set::BorrowSet;
 use borrow_check::location::LocationTable;
-use borrow_check::nll::constraint_set::ConstraintSet;
+use borrow_check::nll::constraints::{ConstraintSet, OutlivesConstraint};
 use borrow_check::nll::facts::AllFacts;
 use borrow_check::nll::region_infer::{ClosureRegionRequirementsExt, TypeTest};
 use borrow_check::nll::universal_regions::UniversalRegions;
+use borrow_check::nll::ToRegionVid;
 use dataflow::move_paths::MoveData;
 use dataflow::FlowAtLocation;
 use dataflow::MaybeInitializedPlaces;
+use rustc::hir;
 use rustc::hir::def_id::DefId;
 use rustc::infer::canonical::QueryRegionConstraint;
 use rustc::infer::region_constraints::GenericKind;
@@ -103,6 +106,7 @@ pub(crate) fn type_check<'gcx, 'tcx>(
     mir_def_id: DefId,
     universal_regions: &UniversalRegions<'tcx>,
     location_table: &LocationTable,
+    borrow_set: &BorrowSet<'tcx>,
     liveness: &LivenessResults,
     all_facts: &mut Option<AllFacts>,
     flow_inits: &mut FlowAtLocation<MaybeInitializedPlaces<'_, 'gcx, 'tcx>>,
@@ -119,6 +123,7 @@ pub(crate) fn type_check<'gcx, 'tcx>(
         Some(BorrowCheckContext {
             universal_regions,
             location_table,
+            borrow_set,
             all_facts,
         }),
         &mut |cx| {
@@ -141,6 +146,7 @@ fn type_check_internal<'gcx, 'tcx>(
 ) -> MirTypeckRegionConstraints<'tcx> {
     let mut checker = TypeChecker::new(
         infcx,
+        mir,
         mir_def_id,
         param_env,
         region_bound_pairs,
@@ -592,6 +598,7 @@ struct TypeChecker<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> {
     infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
     param_env: ty::ParamEnv<'gcx>,
     last_span: Span,
+    mir: &'a Mir<'tcx>,
     mir_def_id: DefId,
     region_bound_pairs: &'a [(ty::Region<'tcx>, GenericKind<'tcx>)],
     implicit_region_bound: Option<ty::Region<'tcx>>,
@@ -604,6 +611,7 @@ struct BorrowCheckContext<'a, 'tcx: 'a> {
     universal_regions: &'a UniversalRegions<'tcx>,
     location_table: &'a LocationTable,
     all_facts: &'a mut Option<AllFacts>,
+    borrow_set: &'a BorrowSet<'tcx>,
 }
 
 /// A collection of region constraints that must be satisfied for the
@@ -704,6 +712,7 @@ pub fn span(&self, mir: &Mir<'_>) -> Span {
 impl<'a, 'gcx, 'tcx> TypeChecker<'a, 'gcx, 'tcx> {
     fn new(
         infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
+        mir: &'a Mir<'tcx>,
         mir_def_id: DefId,
         param_env: ty::ParamEnv<'gcx>,
         region_bound_pairs: &'a [(ty::Region<'tcx>, GenericKind<'tcx>)],
@@ -713,6 +722,7 @@ fn new(
         TypeChecker {
             infcx,
             last_span: DUMMY_SP,
+            mir,
             mir_def_id,
             param_env,
             region_bound_pairs,
@@ -857,8 +867,7 @@ fn check_stmt(&mut self, mir: &Mir<'tcx>, stmt: &Statement<'tcx>, location: Loca
             }
             StatementKind::UserAssertTy(ref c_ty, ref local) => {
                 let local_ty = mir.local_decls()[*local].ty;
-                let (ty, _) = self
-                    .infcx
+                let (ty, _) = self.infcx
                     .instantiate_canonical_with_fresh_inference_vars(stmt.source_info.span, c_ty);
                 debug!(
                     "check_stmt: user_assert_ty ty={:?} local_ty={:?}",
@@ -1400,9 +1409,12 @@ fn check_rvalue(&mut self, mir: &Mir<'tcx>, rvalue: &Rvalue<'tcx>, location: Loc
                 CastKind::Misc => {}
             },
 
+            Rvalue::Ref(region, _borrow_kind, borrowed_place) => {
+                self.add_reborrow_constraint(location, region, borrowed_place);
+            }
+
             // FIXME: These other cases have to be implemented in future PRs
             Rvalue::Use(..)
-            | Rvalue::Ref(..)
             | Rvalue::Len(..)
             | Rvalue::BinaryOp(..)
             | Rvalue::CheckedBinaryOp(..)
@@ -1457,6 +1469,141 @@ fn check_aggregate_rvalue(
         }
     }
 
+    /// Add the constraints that arise from a borrow expression `&'a P` at the location `L`.
+    ///
+    /// # Parameters
+    ///
+    /// - `location`: the location `L` where the borrow expression occurs
+    /// - `borrow_region`: the region `'a` associated with the borrow
+    /// - `borrowed_place`: the place `P` being borrowed
+    fn add_reborrow_constraint(
+        &mut self,
+        location: Location,
+        borrow_region: ty::Region<'tcx>,
+        borrowed_place: &Place<'tcx>,
+    ) {
+        // These constraints are only meaningful during borrowck:
+        let BorrowCheckContext {
+            borrow_set,
+            location_table,
+            all_facts,
+            ..
+        } = match &mut self.borrowck_context {
+            Some(borrowck_context) => borrowck_context,
+            None => return,
+        };
+
+        // In Polonius mode, we also push a `borrow_region` fact
+        // linking the loan to the region (in some cases, though,
+        // there is no loan associated with this borrow expression --
+        // that occurs when we are borrowing an unsafe place, for
+        // example).
+        if let Some(all_facts) = all_facts {
+            if let Some(borrow_index) = borrow_set.location_map.get(&location) {
+                let region_vid = borrow_region.to_region_vid();
+                all_facts.borrow_region.push((
+                    region_vid,
+                    *borrow_index,
+                    location_table.mid_index(location),
+                ));
+            }
+        }
+
+        // If we are reborrowing the referent of another reference, we
+        // need to add outlives relationships. In a case like `&mut
+        // *p`, where the `p` has type `&'b mut Foo`, for example, we
+        // need to ensure that `'b: 'a`.
+
+        let mut borrowed_place = borrowed_place;
+
+        debug!(
+            "add_reborrow_constraint({:?}, {:?}, {:?})",
+            location, borrow_region, borrowed_place
+        );
+        while let Place::Projection(box PlaceProjection { base, elem }) = borrowed_place {
+            debug!("add_reborrow_constraint - iteration {:?}", borrowed_place);
+
+            match *elem {
+                ProjectionElem::Deref => {
+                    let tcx = self.infcx.tcx;
+                    let base_ty = base.ty(self.mir, tcx).to_ty(tcx);
+
+                    debug!("add_reborrow_constraint - base_ty = {:?}", base_ty);
+                    match base_ty.sty {
+                        ty::TyRef(ref_region, _, mutbl) => {
+                            self.constraints
+                                .outlives_constraints
+                                .push(OutlivesConstraint {
+                                    sup: ref_region.to_region_vid(),
+                                    sub: borrow_region.to_region_vid(),
+                                    locations: location.boring(),
+                                });
+
+                            if let Some(all_facts) = all_facts {
+                                all_facts.outlives.push((
+                                    ref_region.to_region_vid(),
+                                    borrow_region.to_region_vid(),
+                                    location_table.mid_index(location),
+                                ));
+                            }
+
+                            match mutbl {
+                                hir::Mutability::MutImmutable => {
+                                    // Immutable reference. We don't need the base
+                                    // to be valid for the entire lifetime of
+                                    // the borrow.
+                                    break;
+                                }
+                                hir::Mutability::MutMutable => {
+                                    // Mutable reference. We *do* need the base
+                                    // to be valid, because after the base becomes
+                                    // invalid, someone else can use our mutable deref.
+
+                                    // This is in order to make the following function
+                                    // illegal:
+                                    // ```
+                                    // fn unsafe_deref<'a, 'b>(x: &'a &'b mut T) -> &'b mut T {
+                                    //     &mut *x
+                                    // }
+                                    // ```
+                                    //
+                                    // As otherwise you could clone `&mut T` using the
+                                    // following function:
+                                    // ```
+                                    // fn bad(x: &mut T) -> (&mut T, &mut T) {
+                                    //     let my_clone = unsafe_deref(&'a x);
+                                    //     ENDREGION 'a;
+                                    //     (my_clone, x)
+                                    // }
+                                    // ```
+                                }
+                            }
+                        }
+                        ty::TyRawPtr(..) => {
+                            // deref of raw pointer, guaranteed to be valid
+                            break;
+                        }
+                        ty::TyAdt(def, _) if def.is_box() => {
+                            // deref of `Box`, need the base to be valid - propagate
+                        }
+                        _ => bug!("unexpected deref ty {:?} in {:?}", base_ty, borrowed_place),
+                    }
+                }
+                ProjectionElem::Field(..)
+                | ProjectionElem::Downcast(..)
+                | ProjectionElem::Index(..)
+                | ProjectionElem::ConstantIndex { .. }
+                | ProjectionElem::Subslice { .. } => {
+                    // other field access
+                }
+            }
+
+            // The "propagate" case. We need to check that our base is valid
+            // for the borrow's lifetime.
+            borrowed_place = base;
+        }
+    }
+
     fn prove_aggregate_predicates(
         &mut self,
         aggregate_kind: &AggregateKind<'tcx>,
index ca2a120ceb7375e3ece7b6eb3ed23e376a41fdbc..499170acee31d980093e5e11593378c2b25da047 100644 (file)
@@ -16,7 +16,7 @@
 use rustc::mir::{BasicBlock, Location, Mir, Place};
 use rustc::mir::{ProjectionElem, BorrowKind};
 use rustc::ty::TyCtxt;
-use rustc_data_structures::control_flow_graph::dominators::Dominators;
+use rustc_data_structures::graph::dominators::Dominators;
 
 /// Returns true if the borrow represented by `kind` is
 /// allowed to be split into separate Reservation and
index 9a756cdfb418e1dc49938a3a53e2c3d26937bd14..7bd9a241a534ef3eaa72b58000e894f18dc5f4f0 100644 (file)
@@ -102,7 +102,9 @@ fn expr_as_rvalue(&mut self,
                 });
                 if let Some(scope) = scope {
                     // schedule a shallow free of that memory, lest we unwind:
-                    this.schedule_drop(expr_span, scope, &Place::Local(result), value.ty);
+                    this.schedule_drop_storage_and_value(
+                        expr_span, scope, &Place::Local(result), value.ty,
+                    );
                 }
 
                 // malloc some memory of suitable type (thus far, uninitialized):
index d905b38331607da2cc4e6f923001e94c75592c11..f66fe763b759d621147579bf7f8fecb27462382f 100644 (file)
@@ -62,7 +62,9 @@ fn expr_as_temp(&mut self,
         // anything because no values with a destructor can be created in
         // a constant at this time, even if the type may need dropping.
         if let Some(temp_lifetime) = temp_lifetime {
-            this.schedule_drop(expr_span, temp_lifetime, &Place::Local(temp), expr_ty);
+            this.schedule_drop_storage_and_value(
+                expr_span, temp_lifetime, &Place::Local(temp), expr_ty,
+            );
         }
 
         block.and(temp)
index 79dbdfefeb8e129fc194dab843d5d286f9b0cde0..3a6c7dc9754a751c0603b8625ef41b2d717c96f1 100644 (file)
@@ -16,6 +16,7 @@
 use build::{BlockAnd, BlockAndExtension, Builder};
 use build::{GuardFrame, GuardFrameLocal, LocalsForNode};
 use build::ForGuard::{self, OutsideGuard, RefWithinGuard, ValWithinGuard};
+use build::scope::{CachedBlock, DropKind};
 use rustc_data_structures::fx::FxHashMap;
 use rustc_data_structures::bitvec::BitVector;
 use rustc::ty::{self, Ty};
@@ -367,7 +368,15 @@ pub fn storage_live_binding(&mut self,
             source_info,
             kind: StatementKind::StorageLive(local_id)
         });
-        Place::Local(local_id)
+        let place = Place::Local(local_id);
+        let var_ty = self.local_decls[local_id].ty;
+        let hir_id = self.hir.tcx().hir.node_to_hir_id(var);
+        let region_scope = self.hir.region_scope_tree.var_scope(hir_id.local_id);
+        self.schedule_drop(
+            span, region_scope, &place, var_ty,
+            DropKind::Storage,
+        );
+        place
     }
 
     pub fn schedule_drop_for_binding(&mut self,
@@ -378,7 +387,12 @@ pub fn schedule_drop_for_binding(&mut self,
         let var_ty = self.local_decls[local_id].ty;
         let hir_id = self.hir.tcx().hir.node_to_hir_id(var);
         let region_scope = self.hir.region_scope_tree.var_scope(hir_id.local_id);
-        self.schedule_drop(span, region_scope, &Place::Local(local_id), var_ty);
+        self.schedule_drop(
+            span, region_scope, &Place::Local(local_id), var_ty,
+            DropKind::Value {
+                cached_block: CachedBlock::default(),
+            },
+        );
     }
 
     pub fn visit_bindings<F>(&mut self, pattern: &Pattern<'tcx>, f: &mut F)
index a3f97376e90d386e62b62cba08f773a259264c6d..cfdb8b0048a863024d38098fad80ea4a6d1b95dc 100644 (file)
@@ -10,6 +10,7 @@
 
 
 use build;
+use build::scope::{CachedBlock, DropKind};
 use hair::cx::Cx;
 use hair::{LintLevel, BindingMode, PatternKind};
 use rustc::hir;
@@ -744,9 +745,11 @@ fn args_and_body(&mut self,
             }
 
             // Make sure we drop (parts of) the argument even when not matched on.
-            self.schedule_drop(pattern.as_ref().map_or(ast_body.span, |pat| pat.span),
-                               argument_scope, &place, ty);
-
+            self.schedule_drop(
+                pattern.as_ref().map_or(ast_body.span, |pat| pat.span),
+                argument_scope, &place, ty,
+                DropKind::Value { cached_block: CachedBlock::default() },
+            );
         }
 
         // Enter the argument pattern bindings source scope, if it exists.
index 502091e5192012872f41b15386ca76ae26ae4eba..2dc5138c6f08248b77810c6f176e78a8234f1096 100644 (file)
@@ -144,12 +144,12 @@ struct DropData<'tcx> {
     /// place to drop
     location: Place<'tcx>,
 
-    /// Whether this is a full value Drop, or just a StorageDead.
-    kind: DropKind
+    /// Whether this is a value Drop or a StorageDead.
+    kind: DropKind,
 }
 
 #[derive(Debug, Default, Clone, Copy)]
-struct CachedBlock {
+pub(crate) struct CachedBlock {
     /// The cached block for the cleanups-on-diverge path. This block
     /// contains code to run the current drop and all the preceding
     /// drops (i.e. those having lower index in Drop’s Scope drop
@@ -166,7 +166,7 @@ struct CachedBlock {
 }
 
 #[derive(Debug)]
-enum DropKind {
+pub(crate) enum DropKind {
     Value {
         cached_block: CachedBlock,
     },
@@ -622,25 +622,58 @@ pub fn schedule_abort(&mut self) -> BasicBlock {
         abortblk
     }
 
+    pub fn schedule_drop_storage_and_value(
+        &mut self,
+        span: Span,
+        region_scope: region::Scope,
+        place: &Place<'tcx>,
+        place_ty: Ty<'tcx>,
+    ) {
+        self.schedule_drop(
+            span, region_scope, place, place_ty,
+            DropKind::Storage,
+        );
+        self.schedule_drop(
+            span, region_scope, place, place_ty,
+            DropKind::Value {
+                cached_block: CachedBlock::default(),
+            },
+        );
+    }
+
     // Scheduling drops
     // ================
     /// Indicates that `place` should be dropped on exit from
     /// `region_scope`.
-    pub fn schedule_drop(&mut self,
-                         span: Span,
-                         region_scope: region::Scope,
-                         place: &Place<'tcx>,
-                         place_ty: Ty<'tcx>) {
+    ///
+    /// When called with `DropKind::Storage`, `place` should be a local
+    /// with an index higher than the current `self.arg_count`.
+    pub fn schedule_drop(
+        &mut self,
+        span: Span,
+        region_scope: region::Scope,
+        place: &Place<'tcx>,
+        place_ty: Ty<'tcx>,
+        drop_kind: DropKind,
+    ) {
         let needs_drop = self.hir.needs_drop(place_ty);
-        let drop_kind = if needs_drop {
-            DropKind::Value { cached_block: CachedBlock::default() }
-        } else {
-            // Only temps and vars need their storage dead.
-            match *place {
-                Place::Local(index) if index.index() > self.arg_count => DropKind::Storage,
-                _ => return
+        match drop_kind {
+            DropKind::Value { .. } => if !needs_drop { return },
+            DropKind::Storage => {
+                match *place {
+                    Place::Local(index) => if index.index() <= self.arg_count {
+                        span_bug!(
+                            span, "`schedule_drop` called with index {} and arg_count {}",
+                            index.index(),
+                            self.arg_count,
+                        )
+                    },
+                    _ => span_bug!(
+                        span, "`schedule_drop` called with non-`Local` place {:?}", place
+                    ),
+                }
             }
-        };
+        }
 
         for scope in self.scopes.iter_mut().rev() {
             let this_scope = scope.region_scope == region_scope;
@@ -895,24 +928,24 @@ fn build_scope_drops<'tcx>(cfg: &mut CFG<'tcx>,
                 });
                 block = next;
             }
-            DropKind::Storage => {}
-        }
-
-        // We do not need to emit StorageDead for generator drops
-        if generator_drop {
-            continue
-        }
+            DropKind::Storage => {
+                // We do not need to emit StorageDead for generator drops
+                if generator_drop {
+                    continue
+                }
 
-        // Drop the storage for both value and storage drops.
-        // Only temps and vars need their storage dead.
-        match drop_data.location {
-            Place::Local(index) if index.index() > arg_count => {
-                cfg.push(block, Statement {
-                    source_info,
-                    kind: StatementKind::StorageDead(index)
-                });
+                // Drop the storage for both value and storage drops.
+                // Only temps and vars need their storage dead.
+                match drop_data.location {
+                    Place::Local(index) if index.index() > arg_count => {
+                        cfg.push(block, Statement {
+                            source_info,
+                            kind: StatementKind::StorageDead(index)
+                        });
+                    }
+                    _ => unreachable!(),
+                }
             }
-            _ => continue
         }
     }
     block.unit()
index a109389aa312c30d562247623ac1bb78b05dc6fe..9736ab797b2c14967f68ff9cbd93fad62239b6d4 100644 (file)
@@ -76,7 +76,7 @@ fn precompute_borrows_out_of_scope<'a, 'tcx>(
     while let Some(location) = stack.pop() {
         // If region does not contain a point at the location, then add to list and skip
         // successor locations.
-        if !regioncx.region_contains_point(borrow_region, location) {
+        if !regioncx.region_contains(borrow_region, location) {
             debug!("borrow {:?} gets killed at {:?}", borrow_index, location);
             borrows_out_of_scope_at_location
                 .entry(location)
index 0c4b9a546cbd18617a52bdb3d9f670d08638f985..ef24a201e0f5b2fe668307ec7409a7f24e58344d 100644 (file)
@@ -129,7 +129,22 @@ pub fn check_crate(resolver: &mut Resolver, krate: &ast::Crate) {
         match directive.subclass {
             _ if directive.used.get() ||
                  directive.vis.get() == ty::Visibility::Public ||
-                 directive.span.is_dummy() => {}
+                 directive.span.is_dummy() => {
+                if let ImportDirectiveSubclass::MacroUse = directive.subclass {
+                    if resolver.session.features_untracked().use_extern_macros &&
+                        !directive.span.is_dummy() {
+                        resolver.session.buffer_lint(
+                            lint::builtin::MACRO_USE_EXTERN_CRATE,
+                            directive.id,
+                            directive.span,
+                            "deprecated `#[macro_use]` directive used to \
+                             import macros should be replaced at use sites \
+                             with a `use` statement to import the macro \
+                             instead",
+                        );
+                    }
+                }
+            }
             ImportDirectiveSubclass::ExternCrate(_) => {
                 resolver.maybe_unused_extern_crates.push((directive.id, directive.span));
             }
index 8a47b8ea6485a341ab8de71453186ccd5b562260..aed70861e33837d1d8cd462e8f2694f08dcc98b8 100644 (file)
@@ -8,6 +8,8 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
+#![deny(bare_trait_objects)]
+
 #![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
       html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
       html_root_url = "https://doc.rust-lang.org/nightly/")]
@@ -1292,7 +1294,7 @@ fn intern(&mut self, string: &str, primitive_type: PrimTy) {
 /// This is the visitor that walks the whole crate.
 pub struct Resolver<'a> {
     session: &'a Session,
-    cstore: &'a CrateStore,
+    cstore: &'a dyn CrateStore,
 
     pub definitions: Definitions,
 
@@ -1388,7 +1390,7 @@ pub struct Resolver<'a> {
     /// true if `#![feature(use_extern_macros)]`
     use_extern_macros: bool,
 
-    crate_loader: &'a mut CrateLoader,
+    crate_loader: &'a mut dyn CrateLoader,
     macro_names: FxHashSet<Ident>,
     global_macros: FxHashMap<Name, &'a NameBinding<'a>>,
     pub all_macros: FxHashMap<Name, Def>,
@@ -1604,11 +1606,11 @@ fn resolve_hir_path_cb<F>(&mut self, path: &mut hir::Path, is_value: bool, error
 
 impl<'a> Resolver<'a> {
     pub fn new(session: &'a Session,
-               cstore: &'a CrateStore,
+               cstore: &'a dyn CrateStore,
                krate: &Crate,
                crate_name: &str,
                make_glob_map: MakeGlobMap,
-               crate_loader: &'a mut CrateLoader,
+               crate_loader: &'a mut dyn CrateLoader,
                arenas: &'a ResolverArenas<'a>)
                -> Resolver<'a> {
         let root_def_id = DefId::local(CRATE_DEF_INDEX);
index 9ce1e21d0d03ebfef3fb899ed2a8db6f5820d63d..024506ed7f8e73d11a5841fb2902e7830947c687 100644 (file)
@@ -385,6 +385,22 @@ fn resolve_invoc_to_def(&mut self, invoc: &mut Invocation, scope: Mark, force: b
             Err(Determinacy::Determined) => {}
         }
 
+        // Ok at this point we've determined that the `attr` above doesn't
+        // actually resolve at this time, so we may want to report an error.
+        // It could be the case, though, that `attr` won't ever resolve! If
+        // there's a custom derive that could be used it might declare `attr` as
+        // a custom attribute accepted by the derive. In this case we don't want
+        // to report this particular invocation as unresolved, but rather we'd
+        // want to move on to the next invocation.
+        //
+        // This loop here looks through all of the derive annotations in scope
+        // and tries to resolve them. If they themselves successfully resolve
+        // *and* the resolve mentions that this attribute's name is a registered
+        // custom attribute then we flag this attribute as known and update
+        // `invoc` above to point to the next invocation.
+        //
+        // By then returning `Undetermined` we should continue resolution to
+        // resolve the next attribute.
         let attr_name = match path.segments.len() {
             1 => path.segments[0].ident.name,
             _ => return Err(determinacy),
@@ -406,8 +422,8 @@ fn resolve_invoc_to_def(&mut self, invoc: &mut Invocation, scope: Mark, force: b
                             attrs.push(inert_attr);
                             attrs
                         });
+                        return Err(Determinacy::Undetermined)
                     }
-                    return Err(Determinacy::Undetermined);
                 },
                 Err(Determinacy::Undetermined) => determinacy = Determinacy::Undetermined,
                 Err(Determinacy::Determined) => {}
index 1b09df16a7d16ba88771ad809646b5c72eb85bf8..2fe7d73de8aa02b3532edeebfa029da9ca32bd1e 100644 (file)
@@ -46,7 +46,7 @@ fn dump(&mut self, result: &Analysis) {
 }
 
 pub struct CallbackOutput<'b> {
-    callback: &'b mut FnMut(&Analysis),
+    callback: &'b mut dyn FnMut(&Analysis),
 }
 
 impl<'b> DumpOutput for CallbackOutput<'b> {
@@ -67,7 +67,7 @@ pub fn new(writer: &'b mut W, config: Config) -> JsonDumper<WriteOutput<'b, W>>
 
 impl<'b> JsonDumper<CallbackOutput<'b>> {
     pub fn with_callback(
-        callback: &'b mut FnMut(&Analysis),
+        callback: &'b mut dyn FnMut(&Analysis),
         config: Config,
     ) -> JsonDumper<CallbackOutput<'b>> {
         JsonDumper {
index 447b5f1fe47e71b94c080c2da4979a701fd316ea..055fbb236d8fa0a27169ee0d57f8b557d9bcf6ea 100644 (file)
@@ -13,6 +13,7 @@
        html_root_url = "https://doc.rust-lang.org/nightly/")]
 #![feature(custom_attribute)]
 #![allow(unused_attributes)]
+#![deny(bare_trait_objects)]
 
 #![recursion_limit="256"]
 
@@ -1088,7 +1089,7 @@ fn save<'l, 'tcx>(
 
 /// Call a callback with the results of save-analysis.
 pub struct CallbackHandler<'b> {
-    pub callback: &'b mut FnMut(&rls_data::Analysis),
+    pub callback: &'b mut dyn FnMut(&rls_data::Analysis),
 }
 
 impl<'b> SaveHandler for CallbackHandler<'b> {
index 2e467d315bedd7d7baf21a8f6b15955be39307f3..5e38c0bbcb4a3c5e6670409ea3ccd864e34109f4 100644 (file)
@@ -98,7 +98,7 @@ struct ParamRange {
 /// This type must not appear anywhere in other converted types.
 const TRAIT_OBJECT_DUMMY_SELF: ty::TypeVariants<'static> = ty::TyInfer(ty::FreshTy(0));
 
-impl<'o, 'gcx: 'tcx, 'tcx> AstConv<'gcx, 'tcx>+'o {
+impl<'o, 'gcx: 'tcx, 'tcx> dyn AstConv<'gcx, 'tcx>+'o {
     pub fn ast_region_to_region(&self,
         lifetime: &hir::Lifetime,
         def: Option<&ty::GenericParamDef>)
index cfe9e420c5fee79b739b4f1fe923880baf6efbd1..f2745d06390e87f469ce7ecc28fdb641af856a86 100644 (file)
@@ -604,7 +604,7 @@ fn check_supplied_sig_against_expectation(
     /// If there is no expected signature, then we will convert the
     /// types that the user gave into a signature.
     fn supplied_sig_of_closure(&self, decl: &hir::FnDecl) -> ty::PolyFnSig<'tcx> {
-        let astconv: &AstConv = self;
+        let astconv: &dyn AstConv = self;
 
         // First, convert the types that the user supplied (if any).
         let supplied_arguments = decl.inputs.iter().map(|a| astconv.ast_ty_to_ty(a));
@@ -630,7 +630,7 @@ fn supplied_sig_of_closure(&self, decl: &hir::FnDecl) -> ty::PolyFnSig<'tcx> {
     /// so should yield an error, but returns back a signature where
     /// all parameters are of type `TyErr`.
     fn error_sig_of_closure(&self, decl: &hir::FnDecl) -> ty::PolyFnSig<'tcx> {
-        let astconv: &AstConv = self;
+        let astconv: &dyn AstConv = self;
 
         let supplied_arguments = decl.inputs.iter().map(|a| {
             // Convert the types that the user supplied (if any), but ignore them.
index e276dcff0601dca195a53556bd141d68365b28bc..e3b0b8cccf31c073e069913044ed20d9fd62a88d 100644 (file)
@@ -1071,7 +1071,7 @@ pub fn coerce<'a>(&mut self,
     pub fn coerce_forced_unit<'a>(&mut self,
                                   fcx: &FnCtxt<'a, 'gcx, 'tcx>,
                                   cause: &ObligationCause<'tcx>,
-                                  augment_error: &mut FnMut(&mut DiagnosticBuilder),
+                                  augment_error: &mut dyn FnMut(&mut DiagnosticBuilder),
                                   label_unit_as_expected: bool)
     {
         self.coerce_inner(fcx,
@@ -1090,7 +1090,7 @@ fn coerce_inner<'a>(&mut self,
                         cause: &ObligationCause<'tcx>,
                         expression: Option<&'gcx hir::Expr>,
                         mut expression_ty: Ty<'tcx>,
-                        augment_error: Option<&mut FnMut(&mut DiagnosticBuilder)>,
+                        augment_error: Option<&mut dyn FnMut(&mut DiagnosticBuilder)>,
                         label_expression_as_expected: bool)
     {
         // Incorporate whatever type inference information we have
index 2445cae98607ae4a93bd01a65b8892782bcbfb85..b7233217d5f3b8c1c1e7ed8a0f9c37d2546faaf2 100644 (file)
@@ -526,7 +526,7 @@ fn visit_fru_field_types(&mut self) {
         }
     }
 
-    fn resolve<T>(&self, x: &T, span: &Locatable) -> T::Lifted
+    fn resolve<T>(&self, x: &T, span: &dyn Locatable) -> T::Lifted
     where
         T: TypeFoldable<'tcx> + ty::Lift<'gcx>,
     {
@@ -580,14 +580,14 @@ fn to_span(&self, tcx: &TyCtxt) -> Span {
 struct Resolver<'cx, 'gcx: 'cx + 'tcx, 'tcx: 'cx> {
     tcx: TyCtxt<'cx, 'gcx, 'tcx>,
     infcx: &'cx InferCtxt<'cx, 'gcx, 'tcx>,
-    span: &'cx Locatable,
+    span: &'cx dyn Locatable,
     body: &'gcx hir::Body,
 }
 
 impl<'cx, 'gcx, 'tcx> Resolver<'cx, 'gcx, 'tcx> {
     fn new(
         fcx: &'cx FnCtxt<'cx, 'gcx, 'tcx>,
-        span: &'cx Locatable,
+        span: &'cx dyn Locatable,
         body: &'gcx hir::Body,
     ) -> Resolver<'cx, 'gcx, 'tcx> {
         Resolver {
index dde13680260687af7f97085f97a9f40e3640d1d1..393904583ca42560c862b94940110d8337a48aff 100644 (file)
@@ -212,7 +212,7 @@ pub fn coerce_unsized_info<'a, 'gcx>(gcx: TyCtxt<'a, 'gcx, 'gcx>,
         let cause = ObligationCause::misc(span, impl_node_id);
         let check_mutbl = |mt_a: ty::TypeAndMut<'gcx>,
                            mt_b: ty::TypeAndMut<'gcx>,
-                           mk_ptr: &Fn(Ty<'gcx>) -> Ty<'gcx>| {
+                           mk_ptr: &dyn Fn(Ty<'gcx>) -> Ty<'gcx>| {
             if (mt_a.mutbl, mt_b.mutbl) == (hir::MutImmutable, hir::MutMutable) {
                 infcx.report_mismatched_types(&cause,
                                              mk_ptr(mt_b.ty),
index fa2f9885964dee3bfe3b4c007a11c4299b858941..5fa98e3ebe6951b1477271aa40bab9ccb9c87cbb 100644 (file)
@@ -1244,7 +1244,7 @@ fn impl_polarity<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
 }
 
 // Is it marked with ?Sized
-fn is_unsized<'gcx: 'tcx, 'tcx>(astconv: &AstConv<'gcx, 'tcx>,
+fn is_unsized<'gcx: 'tcx, 'tcx>(astconv: &dyn AstConv<'gcx, 'tcx>,
                                 ast_bounds: &[hir::GenericBound],
                                 span: Span) -> bool
 {
@@ -1598,7 +1598,7 @@ pub enum SizedByDefault { Yes, No, }
 /// Translate the AST's notion of ty param bounds (which are an enum consisting of a newtyped Ty or
 /// a region) to ty's notion of ty param bounds, which can either be user-defined traits, or the
 /// built-in trait (formerly known as kind): Send.
-pub fn compute_bounds<'gcx: 'tcx, 'tcx>(astconv: &AstConv<'gcx, 'tcx>,
+pub fn compute_bounds<'gcx: 'tcx, 'tcx>(astconv: &dyn AstConv<'gcx, 'tcx>,
                                         param_ty: Ty<'tcx>,
                                         ast_bounds: &[hir::GenericBound],
                                         sized_by_default: SizedByDefault,
@@ -1646,7 +1646,7 @@ pub fn compute_bounds<'gcx: 'tcx, 'tcx>(astconv: &AstConv<'gcx, 'tcx>,
 /// because this can be anywhere from 0 predicates (`T:?Sized` adds no
 /// predicates) to 1 (`T:Foo`) to many (`T:Bar<X=i32>` adds `T:Bar`
 /// and `<T as Bar>::X == i32`).
-fn predicates_from_bound<'tcx>(astconv: &AstConv<'tcx, 'tcx>,
+fn predicates_from_bound<'tcx>(astconv: &dyn AstConv<'tcx, 'tcx>,
                                param_ty: Ty<'tcx>,
                                bound: &hir::GenericBound)
                                -> Vec<ty::Predicate<'tcx>>
index dd09bf96da594e3dedd478073b545b808fc0cc66..4d957c9aa4520eed64f80847bbf0e9ece29c547a 100644 (file)
@@ -2338,7 +2338,7 @@ fn main() {
 
 ```compile_fail,E0225
 fn main() {
-    let _: Box<std::io::Read + std::io::Write>;
+    let _: Box<dyn std::io::Read + std::io::Write>;
 }
 ```
 
@@ -2348,7 +2348,7 @@ fn main() {
 
 ```
 fn main() {
-    let _: Box<std::io::Read + Send + Sync>;
+    let _: Box<dyn std::io::Read + Send + Sync>;
 }
 ```
 "##,
index b18e5ca54ff476775b6ce75b0ddbe379a65f8768..b50f55effad456e1052b5da2ddf9b66d4ea30420 100644 (file)
@@ -70,6 +70,7 @@
       html_root_url = "https://doc.rust-lang.org/nightly/")]
 
 #![allow(non_camel_case_types)]
+#![deny(bare_trait_objects)]
 
 #![feature(box_patterns)]
 #![feature(box_syntax)]
index 6472edb0aa7d3981c8629d98185955c0244f1d89..376410677346cea1be104a402d4334d0e734880f 100644 (file)
@@ -163,7 +163,9 @@ pub trait AsciiExt {
     /// # Note
     ///
     /// This method will be deprecated in favor of the identically-named
-    /// inherent methods on `u8`, `char`, `[u8]` and `str`.
+    /// inherent methods on `u8` and `char`.
+    /// For `[u8]` use `.iter().all(u8::is_ascii_alphabetic)`.
+    /// For `str` use `.bytes().all(u8::is_ascii_alphabetic)`.
     #[unstable(feature = "ascii_ctype", issue = "39658")]
     #[rustc_deprecated(since = "1.26.0", reason = "use inherent methods instead")]
     fn is_ascii_alphabetic(&self) -> bool { unimplemented!(); }
@@ -176,7 +178,9 @@ pub trait AsciiExt {
     /// # Note
     ///
     /// This method will be deprecated in favor of the identically-named
-    /// inherent methods on `u8`, `char`, `[u8]` and `str`.
+    /// inherent methods on `u8` and `char`.
+    /// For `[u8]` use `.iter().all(u8::is_ascii_uppercase)`.
+    /// For `str` use `.bytes().all(u8::is_ascii_uppercase)`.
     #[unstable(feature = "ascii_ctype", issue = "39658")]
     #[rustc_deprecated(since = "1.26.0", reason = "use inherent methods instead")]
     fn is_ascii_uppercase(&self) -> bool { unimplemented!(); }
@@ -189,7 +193,9 @@ pub trait AsciiExt {
     /// # Note
     ///
     /// This method will be deprecated in favor of the identically-named
-    /// inherent methods on `u8`, `char`, `[u8]` and `str`.
+    /// inherent methods on `u8` and `char`.
+    /// For `[u8]` use `.iter().all(u8::is_ascii_lowercase)`.
+    /// For `str` use `.bytes().all(u8::is_ascii_lowercase)`.
     #[unstable(feature = "ascii_ctype", issue = "39658")]
     #[rustc_deprecated(since = "1.26.0", reason = "use inherent methods instead")]
     fn is_ascii_lowercase(&self) -> bool { unimplemented!(); }
@@ -203,7 +209,9 @@ pub trait AsciiExt {
     /// # Note
     ///
     /// This method will be deprecated in favor of the identically-named
-    /// inherent methods on `u8`, `char`, `[u8]` and `str`.
+    /// inherent methods on `u8` and `char`.
+    /// For `[u8]` use `.iter().all(u8::is_ascii_alphanumeric)`.
+    /// For `str` use `.bytes().all(u8::is_ascii_alphanumeric)`.
     #[unstable(feature = "ascii_ctype", issue = "39658")]
     #[rustc_deprecated(since = "1.26.0", reason = "use inherent methods instead")]
     fn is_ascii_alphanumeric(&self) -> bool { unimplemented!(); }
@@ -216,7 +224,9 @@ pub trait AsciiExt {
     /// # Note
     ///
     /// This method will be deprecated in favor of the identically-named
-    /// inherent methods on `u8`, `char`, `[u8]` and `str`.
+    /// inherent methods on `u8` and `char`.
+    /// For `[u8]` use `.iter().all(u8::is_ascii_digit)`.
+    /// For `str` use `.bytes().all(u8::is_ascii_digit)`.
     #[unstable(feature = "ascii_ctype", issue = "39658")]
     #[rustc_deprecated(since = "1.26.0", reason = "use inherent methods instead")]
     fn is_ascii_digit(&self) -> bool { unimplemented!(); }
@@ -230,7 +240,9 @@ pub trait AsciiExt {
     /// # Note
     ///
     /// This method will be deprecated in favor of the identically-named
-    /// inherent methods on `u8`, `char`, `[u8]` and `str`.
+    /// inherent methods on `u8` and `char`.
+    /// For `[u8]` use `.iter().all(u8::is_ascii_hexdigit)`.
+    /// For `str` use `.bytes().all(u8::is_ascii_hexdigit)`.
     #[unstable(feature = "ascii_ctype", issue = "39658")]
     #[rustc_deprecated(since = "1.26.0", reason = "use inherent methods instead")]
     fn is_ascii_hexdigit(&self) -> bool { unimplemented!(); }
@@ -248,7 +260,9 @@ pub trait AsciiExt {
     /// # Note
     ///
     /// This method will be deprecated in favor of the identically-named
-    /// inherent methods on `u8`, `char`, `[u8]` and `str`.
+    /// inherent methods on `u8` and `char`.
+    /// For `[u8]` use `.iter().all(u8::is_ascii_punctuation)`.
+    /// For `str` use `.bytes().all(u8::is_ascii_punctuation)`.
     #[unstable(feature = "ascii_ctype", issue = "39658")]
     #[rustc_deprecated(since = "1.26.0", reason = "use inherent methods instead")]
     fn is_ascii_punctuation(&self) -> bool { unimplemented!(); }
@@ -261,7 +275,9 @@ pub trait AsciiExt {
     /// # Note
     ///
     /// This method will be deprecated in favor of the identically-named
-    /// inherent methods on `u8`, `char`, `[u8]` and `str`.
+    /// inherent methods on `u8` and `char`.
+    /// For `[u8]` use `.iter().all(u8::is_ascii_graphic)`.
+    /// For `str` use `.bytes().all(u8::is_ascii_graphic)`.
     #[unstable(feature = "ascii_ctype", issue = "39658")]
     #[rustc_deprecated(since = "1.26.0", reason = "use inherent methods instead")]
     fn is_ascii_graphic(&self) -> bool { unimplemented!(); }
@@ -291,7 +307,9 @@ pub trait AsciiExt {
     /// # Note
     ///
     /// This method will be deprecated in favor of the identically-named
-    /// inherent methods on `u8`, `char`, `[u8]` and `str`.
+    /// inherent methods on `u8` and `char`.
+    /// For `[u8]` use `.iter().all(u8::is_ascii_whitespace)`.
+    /// For `str` use `.bytes().all(u8::is_ascii_whitespace)`.
     #[unstable(feature = "ascii_ctype", issue = "39658")]
     #[rustc_deprecated(since = "1.26.0", reason = "use inherent methods instead")]
     fn is_ascii_whitespace(&self) -> bool { unimplemented!(); }
@@ -304,7 +322,9 @@ pub trait AsciiExt {
     /// # Note
     ///
     /// This method will be deprecated in favor of the identically-named
-    /// inherent methods on `u8`, `char`, `[u8]` and `str`.
+    /// inherent methods on `u8` and `char`.
+    /// For `[u8]` use `.iter().all(u8::is_ascii_control)`.
+    /// For `str` use `.bytes().all(u8::is_ascii_control)`.
     #[unstable(feature = "ascii_ctype", issue = "39658")]
     #[rustc_deprecated(since = "1.26.0", reason = "use inherent methods instead")]
     fn is_ascii_control(&self) -> bool { unimplemented!(); }
index c0e1e2533a0f01975d28c89fe2940e1488436185..9066c0b7694793b9319789634df49d5587250486 100644 (file)
@@ -541,7 +541,7 @@ fn description(&self) -> &str { self.inner.description() }
 /// ```
 #[rustc_deprecated(since = "1.29.0",
     reason = "This function's behavior is unexpected and probably not what you want. \
-              Consider using the home_dir function from crates.io/crates/dirs instead.")]
+              Consider using the home_dir function from https://crates.io/crates/dirs instead.")]
 #[stable(feature = "env", since = "1.0.0")]
 pub fn home_dir() -> Option<PathBuf> {
     os_imp::home_dir()
index 6513d11dd517c436c25afe16c7def1f54d1cea0c..b816f4b7850efb17aaea0f3e4b0bcca774a838a4 100644 (file)
@@ -706,6 +706,14 @@ fn from(s: Box<CStr>) -> CString {
     }
 }
 
+#[stable(feature = "more_box_slice_clone", since = "1.29.0")]
+impl Clone for Box<CStr> {
+    #[inline]
+    fn clone(&self) -> Self {
+        (**self).into()
+    }
+}
+
 #[stable(feature = "box_from_c_string", since = "1.20.0")]
 impl From<CString> for Box<CStr> {
     #[inline]
index 4ada6a77a8eec9ff1d24388103d48cf2e4134db6..b1c6e7af693d0a00eae72517ef9fe984dfe644d9 100644 (file)
@@ -642,6 +642,14 @@ fn from(s: OsString) -> Box<OsStr> {
     }
 }
 
+#[stable(feature = "more_box_slice_clone", since = "1.29.0")]
+impl Clone for Box<OsStr> {
+    #[inline]
+    fn clone(&self) -> Self {
+        self.to_os_string().into_boxed_os_str()
+    }
+}
+
 #[stable(feature = "shared_from_slice2", since = "1.24.0")]
 impl From<OsString> for Arc<OsStr> {
     #[inline]
index 3dc1e9c3dadc85ff4c5c250d8f0d58a68c997fe5..2d8686292788448fa13b98156fdaab240cfabcb4 100644 (file)
@@ -1410,6 +1410,14 @@ fn from(p: PathBuf) -> Box<Path> {
     }
 }
 
+#[stable(feature = "more_box_slice_clone", since = "1.29.0")]
+impl Clone for Box<Path> {
+    #[inline]
+    fn clone(&self) -> Self {
+        self.to_path_buf().into_boxed_path()
+    }
+}
+
 #[stable(feature = "rust1", since = "1.0.0")]
 impl<'a, T: ?Sized + AsRef<OsStr>> From<&'a T> for PathBuf {
     fn from(s: &'a T) -> PathBuf {
index 2dd3aebe6108ee839a83e5088d387bc8f76e0a53..cbda5afadcdfc14858813589527d7ba76615fe14 100644 (file)
@@ -689,7 +689,7 @@ fn inner_unsafe(&self) -> &UnsafeCell<Flavor<T>> {
 /// only one [`Receiver`] is supported.
 ///
 /// If the [`Receiver`] is disconnected while trying to [`send`] with the
-/// [`Sender`], the [`send`] method will return a [`SendError`]. Similarly, If the
+/// [`Sender`], the [`send`] method will return a [`SendError`]. Similarly, if the
 /// [`Sender`] is disconnected while trying to [`recv`], the [`recv`] method will
 /// return a [`RecvError`].
 ///
index 6cdbe5df75d5162b2d0f051fb134af80ea95f612..d59d800a579474b5c320eb40be2003737530fab2 100644 (file)
@@ -20,7 +20,7 @@
 // fallback implementation to use as well.
 //
 // Due to rust-lang/rust#18804, make sure this is not generic!
-#[cfg(target_os = "linux")]
+#[cfg(any(target_os = "linux", target_os = "fuchsia"))]
 pub unsafe fn register_dtor(t: *mut u8, dtor: unsafe extern fn(*mut u8)) {
     use libc;
     use mem;
@@ -55,11 +55,6 @@ fn _tlv_atexit(dtor: unsafe extern fn(*mut u8),
     _tlv_atexit(dtor, t);
 }
 
-// Just use the thread_local fallback implementation, at least until there's
-// a more direct implementation.
-#[cfg(target_os = "fuchsia")]
-pub use sys_common::thread_local::register_dtor_fallback as register_dtor;
-
 pub fn requires_move_before_drop() -> bool {
     // The macOS implementation of TLS apparently had an odd aspect to it
     // where the pointer we have may be overwritten while this destructor
index dd8f79d20abda429638cfc6f2cdce96b7430bdb1..4ebb1fcb65393da183cdf489cd0166cb93aa5017 100644 (file)
@@ -50,7 +50,7 @@ fn next(&self) -> State {
 pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt,
                        sp: Span,
                        tts: &[tokenstream::TokenTree])
-                       -> Box<base::MacResult + 'cx> {
+                       -> Box<dyn base::MacResult + 'cx> {
     if !cx.ecfg.enable_asm() {
         feature_gate::emit_feature_err(&cx.parse_sess,
                                        "asm",
index fe4d599d8242633a34ffd173f04adcf331ab4214..8d0a04831fcb48c4b5cb5f0da5a166604099ef00 100644 (file)
@@ -22,7 +22,7 @@ pub fn expand_assert<'cx>(
     cx: &'cx mut ExtCtxt,
     sp: Span,
     tts: &[TokenTree],
-) -> Box<MacResult + 'cx> {
+) -> Box<dyn MacResult + 'cx> {
     let mut parser = cx.new_parser_from_tts(tts);
     let cond_expr = panictry!(parser.parse_expr());
     let custom_msg_args = if parser.eat(&token::Comma) {
index 6acc578d07e783040ba0aec680b6b242f4d008c0..2384b6a796e198cc1e0aab1c3c552f7d7f4789ea 100644 (file)
@@ -23,7 +23,7 @@
 pub fn expand_cfg<'cx>(cx: &mut ExtCtxt,
                        sp: Span,
                        tts: &[tokenstream::TokenTree])
-                       -> Box<base::MacResult + 'static> {
+                       -> Box<dyn base::MacResult + 'static> {
     let sp = sp.apply_mark(cx.current_expansion.mark);
     let mut p = cx.new_parser_from_tts(tts);
     let cfg = panictry!(p.parse_meta_item());
index 7bc7afba63cb4d8f43539e851847391d74c84df7..ce7fb400bd547b51b6e08b8195f8654d86020967 100644 (file)
@@ -18,7 +18,7 @@
 pub fn expand_compile_error<'cx>(cx: &'cx mut ExtCtxt,
                               sp: Span,
                               tts: &[tokenstream::TokenTree])
-                              -> Box<base::MacResult + 'cx> {
+                              -> Box<dyn base::MacResult + 'cx> {
     let var = match get_single_str_from_tts(cx, sp, tts, "compile_error!") {
         None => return DummyResult::expr(sp),
         Some(v) => v,
index 1c6f0089503e20d0e1c003ce45b0467916f6c256..69b4a83764e4624f999864f50f4a23d12b073f6d 100644 (file)
@@ -21,7 +21,7 @@ pub fn expand_syntax_ext(
     cx: &mut base::ExtCtxt,
     sp: syntax_pos::Span,
     tts: &[tokenstream::TokenTree],
-) -> Box<base::MacResult + 'static> {
+) -> Box<dyn base::MacResult + 'static> {
     let es = match base::get_exprs_from_tts(cx, sp, tts) {
         Some(e) => e,
         None => return base::DummyResult::expr(sp),
index 828c24708416732e2708a77c8e5284b4fa2be032..a3c5c3df66e4c424e4c2d3709d689d9d21f724f4 100644 (file)
@@ -21,7 +21,7 @@
 pub fn expand_syntax_ext<'cx>(cx: &'cx mut ExtCtxt,
                               sp: Span,
                               tts: &[TokenTree])
-                              -> Box<base::MacResult + 'cx> {
+                              -> Box<dyn base::MacResult + 'cx> {
     if !cx.ecfg.enable_concat_idents() {
         feature_gate::emit_feature_err(&cx.parse_sess,
                                        "concat_idents",
index 7f03001d9c6ebf92259aba08c3211a2a95560d7d..41e980b334616ade5931bb301b9880daaa96544c 100644 (file)
@@ -19,7 +19,7 @@ pub fn expand_deriving_unsafe_bound(cx: &mut ExtCtxt,
                                     span: Span,
                                     _: &MetaItem,
                                     _: &Annotatable,
-                                    _: &mut FnMut(Annotatable)) {
+                                    _: &mut dyn FnMut(Annotatable)) {
     cx.span_err(span, "this unsafe trait should be implemented explicitly");
 }
 
@@ -27,7 +27,7 @@ pub fn expand_deriving_copy(cx: &mut ExtCtxt,
                             span: Span,
                             mitem: &MetaItem,
                             item: &Annotatable,
-                            push: &mut FnMut(Annotatable)) {
+                            push: &mut dyn FnMut(Annotatable)) {
     let trait_def = TraitDef {
         span,
         attributes: Vec::new(),
index 9aeac5b1ddb2ae9937cb952897cde1b90e4cda49..ec935b3e72f230955fc8884210c09e8d7c6d200d 100644 (file)
@@ -25,7 +25,7 @@ pub fn expand_deriving_clone(cx: &mut ExtCtxt,
                              span: Span,
                              mitem: &MetaItem,
                              item: &Annotatable,
-                             push: &mut FnMut(Annotatable)) {
+                             push: &mut dyn FnMut(Annotatable)) {
     // check if we can use a short form
     //
     // the short form is `fn clone(&self) -> Self { *self }`
index 00ab39032acbd2489dbdc6273ca1812d1a8f9259..f202bc4e524d2204ca854861d8a499e85a8766b0 100644 (file)
@@ -23,7 +23,7 @@ pub fn expand_deriving_eq(cx: &mut ExtCtxt,
                           span: Span,
                           mitem: &MetaItem,
                           item: &Annotatable,
-                          push: &mut FnMut(Annotatable)) {
+                          push: &mut dyn FnMut(Annotatable)) {
     let inline = cx.meta_word(span, Symbol::intern("inline"));
     let hidden = cx.meta_list_item_word(span, Symbol::intern("hidden"));
     let doc = cx.meta_list(span, Symbol::intern("doc"), vec![hidden]);
index 99b6f752e9406a48c91e12f1ef8a806897cb9d71..117bedf453e6c33b17cdcc11b56d686915fd599b 100644 (file)
@@ -23,7 +23,7 @@ pub fn expand_deriving_ord(cx: &mut ExtCtxt,
                            span: Span,
                            mitem: &MetaItem,
                            item: &Annotatable,
-                           push: &mut FnMut(Annotatable)) {
+                           push: &mut dyn FnMut(Annotatable)) {
     let inline = cx.meta_word(span, Symbol::intern("inline"));
     let attrs = vec![cx.attribute(span, inline)];
     let trait_def = TraitDef {
index c259733d81abdc8e8e288b3f7a91c940672b40d5..24a3a7542fb669507be2dc3044d056c172269d68 100644 (file)
@@ -23,7 +23,7 @@ pub fn expand_deriving_partial_eq(cx: &mut ExtCtxt,
                                   span: Span,
                                   mitem: &MetaItem,
                                   item: &Annotatable,
-                                  push: &mut FnMut(Annotatable)) {
+                                  push: &mut dyn FnMut(Annotatable)) {
     // structures are equal if all fields are equal, and non equal, if
     // any fields are not equal or if the enum variants are different
     fn cs_op(cx: &mut ExtCtxt,
index 2b3930063f369028e9c23122ff398e1885d61c0a..3705a245584d02f1127599ab2de717e103bd38e0 100644 (file)
@@ -25,7 +25,7 @@ pub fn expand_deriving_partial_ord(cx: &mut ExtCtxt,
                                    span: Span,
                                    mitem: &MetaItem,
                                    item: &Annotatable,
-                                   push: &mut FnMut(Annotatable)) {
+                                   push: &mut dyn FnMut(Annotatable)) {
     macro_rules! md {
         ($name:expr, $op:expr, $equal:expr) => { {
             let inline = cx.meta_word(span, Symbol::intern("inline"));
index b546f5df15799467ad2daf2f8dd9408c4692a9e1..c2a7dea331673f835394c07edf53b026f82e52ba 100644 (file)
@@ -23,7 +23,7 @@ pub fn expand_deriving_debug(cx: &mut ExtCtxt,
                              span: Span,
                              mitem: &MetaItem,
                              item: &Annotatable,
-                             push: &mut FnMut(Annotatable)) {
+                             push: &mut dyn FnMut(Annotatable)) {
     // &mut ::std::fmt::Formatter
     let fmtr = Ptr(Box::new(Literal(path_std!(cx, fmt::Formatter))),
                    Borrowed(None, ast::Mutability::Mutable));
index 7618fe63ab3354461b499c423792defbe1f3a93b..1e04d8fa22a673e522c6b9f80ee1067572c65bb3 100644 (file)
@@ -27,7 +27,7 @@ pub fn expand_deriving_rustc_decodable(cx: &mut ExtCtxt,
                                        span: Span,
                                        mitem: &MetaItem,
                                        item: &Annotatable,
-                                       push: &mut FnMut(Annotatable)) {
+                                       push: &mut dyn FnMut(Annotatable)) {
     expand_deriving_decodable_imp(cx, span, mitem, item, push, "rustc_serialize")
 }
 
@@ -35,7 +35,7 @@ pub fn expand_deriving_decodable(cx: &mut ExtCtxt,
                                  span: Span,
                                  mitem: &MetaItem,
                                  item: &Annotatable,
-                                 push: &mut FnMut(Annotatable)) {
+                                 push: &mut dyn FnMut(Annotatable)) {
     warn_if_deprecated(cx, span, "Decodable");
     expand_deriving_decodable_imp(cx, span, mitem, item, push, "serialize")
 }
@@ -44,7 +44,7 @@ fn expand_deriving_decodable_imp(cx: &mut ExtCtxt,
                                  span: Span,
                                  mitem: &MetaItem,
                                  item: &Annotatable,
-                                 push: &mut FnMut(Annotatable),
+                                 push: &mut dyn FnMut(Annotatable),
                                  krate: &'static str) {
     let typaram = &*deriving::hygienic_type_parameter(item, "__D");
 
index cbd6a257b77e8ae2ae23951d0915495547c09624..958116f7809bbb3d078455fbc80be8f863763795 100644 (file)
@@ -23,7 +23,7 @@ pub fn expand_deriving_default(cx: &mut ExtCtxt,
                                span: Span,
                                mitem: &MetaItem,
                                item: &Annotatable,
-                               push: &mut FnMut(Annotatable)) {
+                               push: &mut dyn FnMut(Annotatable)) {
     let inline = cx.meta_word(span, Symbol::intern("inline"));
     let attrs = vec![cx.attribute(span, inline)];
     let trait_def = TraitDef {
index 8b409df1f09964616c0c941db37cf1b04d1110ea..5438c8b52af05e283bd9038549ef159aa58cc7ce 100644 (file)
@@ -108,7 +108,7 @@ pub fn expand_deriving_rustc_encodable(cx: &mut ExtCtxt,
                                        span: Span,
                                        mitem: &MetaItem,
                                        item: &Annotatable,
-                                       push: &mut FnMut(Annotatable)) {
+                                       push: &mut dyn FnMut(Annotatable)) {
     expand_deriving_encodable_imp(cx, span, mitem, item, push, "rustc_serialize")
 }
 
@@ -116,7 +116,7 @@ pub fn expand_deriving_encodable(cx: &mut ExtCtxt,
                                  span: Span,
                                  mitem: &MetaItem,
                                  item: &Annotatable,
-                                 push: &mut FnMut(Annotatable)) {
+                                 push: &mut dyn FnMut(Annotatable)) {
     warn_if_deprecated(cx, span, "Encodable");
     expand_deriving_encodable_imp(cx, span, mitem, item, push, "serialize")
 }
@@ -125,7 +125,7 @@ fn expand_deriving_encodable_imp(cx: &mut ExtCtxt,
                                  span: Span,
                                  mitem: &MetaItem,
                                  item: &Annotatable,
-                                 push: &mut FnMut(Annotatable),
+                                 push: &mut dyn FnMut(Annotatable),
                                  krate: &'static str) {
     let typaram = &*deriving::hygienic_type_parameter(item, "__S");
 
index 3ea0eb8bbd842e63155a3c1369b9116a540892fa..aad69c109f9795f75bee0ffa5776f90947a34068 100644 (file)
@@ -330,7 +330,7 @@ pub enum SubstructureFields<'a> {
 /// Combine the values of all the fields together. The last argument is
 /// all the fields of all the structures.
 pub type CombineSubstructureFunc<'a> =
-    Box<FnMut(&mut ExtCtxt, Span, &Substructure) -> P<Expr> + 'a>;
+    Box<dyn FnMut(&mut ExtCtxt, Span, &Substructure) -> P<Expr> + 'a>;
 
 /// Deal with non-matching enum variants.  The tuple is a list of
 /// identifiers (one for each `Self` argument, which could be any of the
@@ -338,7 +338,7 @@ pub enum SubstructureFields<'a> {
 /// holding the variant index value for each of the `Self` arguments.  The
 /// last argument is all the non-`Self` args of the method being derived.
 pub type EnumNonMatchCollapsedFunc<'a> =
-    Box<FnMut(&mut ExtCtxt, Span, (&[Ident], &[Ident]), &[P<Expr>]) -> P<Expr> + 'a>;
+    Box<dyn FnMut(&mut ExtCtxt, Span, (&[Ident], &[Ident]), &[P<Expr>]) -> P<Expr> + 'a>;
 
 pub fn combine_substructure<'a>(f: CombineSubstructureFunc<'a>)
                                 -> RefCell<CombineSubstructureFunc<'a>> {
@@ -398,7 +398,7 @@ pub fn expand(self,
                   cx: &mut ExtCtxt,
                   mitem: &ast::MetaItem,
                   item: &'a Annotatable,
-                  push: &mut FnMut(Annotatable)) {
+                  push: &mut dyn FnMut(Annotatable)) {
         self.expand_ext(cx, mitem, item, push, false);
     }
 
@@ -406,7 +406,7 @@ pub fn expand_ext(self,
                       cx: &mut ExtCtxt,
                       mitem: &ast::MetaItem,
                       item: &'a Annotatable,
-                      push: &mut FnMut(Annotatable),
+                      push: &mut dyn FnMut(Annotatable),
                       from_scratch: bool) {
         match *item {
             Annotatable::Item(ref item) => {
index 67096cdb49a3c5da2d589f47c4978b8a12a9cc3d..7d22998487ba7598010c5996c677e03150b02a93 100644 (file)
@@ -22,7 +22,7 @@ pub fn expand_deriving_hash(cx: &mut ExtCtxt,
                             span: Span,
                             mitem: &MetaItem,
                             item: &Annotatable,
-                            push: &mut FnMut(Annotatable)) {
+                            push: &mut dyn FnMut(Annotatable)) {
 
     let path = Path::new_(pathvec_std!(cx, hash::Hash), None, vec![], PathKind::Std);
 
index e6a1434ca9d10ca540432f668a1008ed6be7da3f..2f5e42d2f7b17f6d70be355adea003a2c34f953d 100644 (file)
@@ -72,7 +72,7 @@ pub fn is_builtin_trait(name: ast::Name) -> bool {
             }
         }
 
-        pub fn register_builtin_derives(resolver: &mut Resolver) {
+        pub fn register_builtin_derives(resolver: &mut dyn Resolver) {
             $(
                 resolver.add_builtin(
                     ast::Ident::with_empty_ctxt(Symbol::intern($name)),
index bbc5b03d6885eaa3f1a69cf2119b7d12f3550827..3c34bf496da594a460022f81773d3291755299fe 100644 (file)
@@ -26,7 +26,7 @@
 pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt,
                               sp: Span,
                               tts: &[tokenstream::TokenTree])
-                              -> Box<base::MacResult + 'cx> {
+                              -> Box<dyn base::MacResult + 'cx> {
     let var = match get_single_str_from_tts(cx, sp, tts, "option_env!") {
         None => return DummyResult::expr(sp),
         Some(v) => v,
@@ -57,7 +57,7 @@ pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt,
 pub fn expand_env<'cx>(cx: &'cx mut ExtCtxt,
                        sp: Span,
                        tts: &[tokenstream::TokenTree])
-                       -> Box<base::MacResult + 'cx> {
+                       -> Box<dyn base::MacResult + 'cx> {
     let mut exprs = match get_exprs_from_tts(cx, sp, tts) {
         Some(ref exprs) if exprs.is_empty() => {
             cx.span_err(sp, "env! takes 1 or 2 arguments");
index 4bf764b1101fa6f1ffae26c15a65f5aaff2a31c7..8587d11b2278650fe2547e3e6a3ebd0076421821 100644 (file)
@@ -679,7 +679,7 @@ fn format_arg(ecx: &ExtCtxt,
 pub fn expand_format_args<'cx>(ecx: &'cx mut ExtCtxt,
                                mut sp: Span,
                                tts: &[tokenstream::TokenTree])
-                               -> Box<base::MacResult + 'cx> {
+                               -> Box<dyn base::MacResult + 'cx> {
     sp = sp.apply_mark(ecx.current_expansion.mark);
     match parse_args(ecx, sp, tts) {
         Some((efmt, args, names)) => {
index 642aa0e5b125da1ea99803eb361f3e516266ab8e..40ecd6e1519c3f91584731673a5205ba43aea758 100644 (file)
@@ -34,7 +34,7 @@
 
 pub fn expand_global_asm<'cx>(cx: &'cx mut ExtCtxt,
                               sp: Span,
-                              tts: &[tokenstream::TokenTree]) -> Box<base::MacResult + 'cx> {
+                              tts: &[tokenstream::TokenTree]) -> Box<dyn base::MacResult + 'cx> {
     if !cx.ecfg.enable_global_asm() {
         feature_gate::emit_feature_err(&cx.parse_sess,
                                        MACRO,
index 311251832664e960e25285aee14d6c6bb80c3233..bdf7a8d704042bd92c4e39f1489fa38cde56bc19 100644 (file)
@@ -10,6 +10,8 @@
 
 //! Syntax extensions in the Rust compiler.
 
+#![deny(bare_trait_objects)]
+
 #![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
        html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
        html_root_url = "https://doc.rust-lang.org/nightly/")]
@@ -59,7 +61,7 @@
 use syntax::ext::hygiene;
 use syntax::symbol::Symbol;
 
-pub fn register_builtins(resolver: &mut syntax::ext::base::Resolver,
+pub fn register_builtins(resolver: &mut dyn syntax::ext::base::Resolver,
                          user_exts: Vec<NamedSyntaxExtension>,
                          enable_quotes: bool) {
     deriving::register_builtin_derives(resolver);
index 71f1951d5d455c168d7c9d23d784d8ed8a19691b..7b76b1e8914684a292bc0947dad5a796eff6cbf0 100644 (file)
@@ -17,7 +17,7 @@
 pub fn expand_syntax_ext<'cx>(cx: &'cx mut base::ExtCtxt,
                               sp: syntax_pos::Span,
                               tts: &[tokenstream::TokenTree])
-                              -> Box<base::MacResult + 'cx> {
+                              -> Box<dyn base::MacResult + 'cx> {
     if !cx.ecfg.enable_log_syntax() {
         feature_gate::emit_feature_err(&cx.parse_sess,
                                        "log_syntax",
index ef29e5a6b022b624498854b93e13b4d81be48a22..85aa84acc4221f71279c51be50294cad20257a33 100644 (file)
@@ -55,7 +55,7 @@ struct CollectProcMacros<'a> {
 }
 
 pub fn modify(sess: &ParseSess,
-              resolver: &mut ::syntax::ext::base::Resolver,
+              resolver: &mut dyn (::syntax::ext::base::Resolver),
               mut krate: ast::Crate,
               is_proc_macro_crate: bool,
               is_test_crate: bool,
@@ -200,8 +200,8 @@ fn collect_custom_derive(&mut self, item: &'a ast::Item, attr: &'a ast::Attribut
     }
 
     fn collect_attr_proc_macro(&mut self, item: &'a ast::Item, attr: &'a ast::Attribute) {
-        if let Some(_) = attr.meta_item_list() {
-            self.handler.span_err(attr.span, "`#[proc_macro_attribute]` attribute
+        if !attr.is_word() {
+            self.handler.span_err(attr.span, "`#[proc_macro_attribute]` attribute \
                 does not take any arguments");
             return;
         }
@@ -223,8 +223,8 @@ fn collect_attr_proc_macro(&mut self, item: &'a ast::Item, attr: &'a ast::Attrib
     }
 
     fn collect_bang_proc_macro(&mut self, item: &'a ast::Item, attr: &'a ast::Attribute) {
-        if let Some(_) = attr.meta_item_list() {
-            self.handler.span_err(attr.span, "`#[proc_macro]` attribute
+        if !attr.is_word() {
+            self.handler.span_err(attr.span, "`#[proc_macro]` attribute \
                 does not take any arguments");
             return;
         }
index 48be8e0c53c2ed2c41363da6e7c843efb8053135..256b525b8bea636f40dc98b745c147c8bc10db5d 100644 (file)
@@ -18,7 +18,7 @@
 pub fn expand_trace_macros(cx: &mut ExtCtxt,
                            sp: Span,
                            tt: &[TokenTree])
-                           -> Box<base::MacResult + 'static> {
+                           -> Box<dyn base::MacResult + 'static> {
     if !cx.ecfg.enable_trace_macros() {
         feature_gate::emit_feature_err(&cx.parse_sess,
                                        "trace_macros",
diff --git a/src/test/codegen/issue-45222.rs b/src/test/codegen/issue-45222.rs
new file mode 100644 (file)
index 0000000..30a0324
--- /dev/null
@@ -0,0 +1,74 @@
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// compile-flags: -O
+// min-llvm-version 6.0
+
+#![crate_type = "lib"]
+
+// verify that LLVM recognizes a loop involving 0..=n and will const-fold it.
+
+//------------------------------------------------------------------------------
+// Example from original issue #45222
+
+fn foo2(n: u64) -> u64 {
+    let mut count = 0;
+    for _ in 0..n {
+        for j in (0..=n).rev() {
+            count += j;
+        }
+    }
+    count
+}
+
+// CHECK-LABEL: @check_foo2
+#[no_mangle]
+pub fn check_foo2() -> u64 {
+    // CHECK: ret i64 500005000000000
+    foo2(100000)
+}
+
+//------------------------------------------------------------------------------
+// Simplified example of #45222
+
+fn triangle_inc(n: u64) -> u64 {
+    let mut count = 0;
+    for j in 0 ..= n {
+        count += j;
+    }
+    count
+}
+
+// CHECK-LABEL: @check_triangle_inc
+#[no_mangle]
+pub fn check_triangle_inc() -> u64 {
+    // CHECK: ret i64 5000050000
+    triangle_inc(100000)
+}
+
+//------------------------------------------------------------------------------
+// Demo in #48012
+
+fn foo3r(n: u64) -> u64 {
+    let mut count = 0;
+    (0..n).for_each(|_| {
+        (0 ..= n).rev().for_each(|j| {
+            count += j;
+        })
+    });
+    count
+}
+
+// CHECK-LABEL: @check_foo3r
+#[no_mangle]
+pub fn check_foo3r() -> u64 {
+    // CHECK: ret i64 500005000000000
+    foo3r(100000)
+}
index ea3f0de5d082ecf114fc4c0dae9d18ab1f3f4370..9f5170cc89ee422436abe5a2a8c3ff0b6b3fd146 100644 (file)
@@ -31,11 +31,11 @@ pub fn test() {
 // CHECK: [[S__4:%[0-9]+]] = bitcast { i32, i32 }* %_4 to i8*
 // CHECK: call void @llvm.lifetime.start{{.*}}(i{{[0-9 ]+}}, i8* [[S__4]])
 
-// CHECK: [[E_b:%[0-9]+]] = bitcast { i32, i32 }** %b to i8*
-// CHECK: call void @llvm.lifetime.end{{.*}}(i{{[0-9 ]+}}, i8* [[E_b]])
-
 // CHECK: [[E__4:%[0-9]+]] = bitcast { i32, i32 }* %_4 to i8*
 // CHECK: call void @llvm.lifetime.end{{.*}}(i{{[0-9 ]+}}, i8* [[E__4]])
+
+// CHECK: [[E_b:%[0-9]+]] = bitcast { i32, i32 }** %b to i8*
+// CHECK: call void @llvm.lifetime.end{{.*}}(i{{[0-9 ]+}}, i8* [[E_b]])
     }
 
     let c = 1;
diff --git a/src/test/mir-opt/issue-49232.rs b/src/test/mir-opt/issue-49232.rs
new file mode 100644 (file)
index 0000000..8e5a94a
--- /dev/null
@@ -0,0 +1,148 @@
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// We must mark a variable whose initialization fails due to an
+// abort statement as StorageDead.
+
+fn main() {
+    loop {
+        let beacon = {
+            match true {
+                false => 4,
+                true => break,
+            }
+        };
+        drop(&beacon);
+    }
+}
+
+// END RUST SOURCE
+// START rustc.main.mir_map.0.mir
+// fn main() -> (){
+//     let mut _0: ();
+//     scope 1 {
+//     }
+//     scope 2 {
+//         let _2: i32;
+//     }
+//     let mut _1: ();
+//     let mut _3: bool;
+//     let mut _4: u8;
+//     let mut _5: !;
+//     let mut _6: ();
+//     let mut _7: &i32;
+//     bb0: {
+//         goto -> bb1;
+//     }
+//     bb1: {
+//         falseUnwind -> [real: bb3, cleanup: bb4];
+//     }
+//     bb2: {
+//         goto -> bb29;
+//     }
+//     bb3: {
+//         StorageLive(_2);
+//         StorageLive(_3);
+//         _3 = const true;
+//         _4 = discriminant(_3);
+//         switchInt(_3) -> [false: bb11, otherwise: bb10];
+//     }
+//     bb4: {
+//         resume;
+//     }
+//     bb5: {
+//         _2 = const 4i32;
+//         goto -> bb14;
+//     }
+//     bb6: {
+//         _0 = ();
+//         goto -> bb15;
+//     }
+//     bb7: {
+//         falseEdges -> [real: bb12, imaginary: bb8];
+//     }
+//     bb8: {
+//         falseEdges -> [real: bb13, imaginary: bb9];
+//     }
+//     bb9: {
+//         unreachable;
+//     }
+//     bb10: {
+//         goto -> bb8;
+//     }
+//     bb11: {
+//         goto -> bb7;
+//     }
+//     bb12: {
+//         goto -> bb5;
+//     }
+//     bb13: {
+//         goto -> bb6;
+//     }
+//     bb14: {
+//         StorageDead(_3);
+//         StorageLive(_7);
+//         _7 = &_2;
+//         _6 = const std::mem::drop(move _7) -> [return: bb28, unwind: bb4];
+//     }
+//     bb15: {
+//         goto -> bb16;
+//     }
+//     bb16: {
+//         goto -> bb17;
+//     }
+//     bb17: {
+//         goto -> bb18;
+//     }
+//     bb18: {
+//         goto -> bb19;
+//     }
+//     bb19: {
+//         goto -> bb20;
+//     }
+//     bb20: {
+//         StorageDead(_3);
+//         goto -> bb21;
+//     }
+//     bb21: {
+//         goto -> bb22;
+//     }
+//     bb22: {
+//         StorageDead(_2);
+//         goto -> bb23;
+//     }
+//     bb23: {
+//         goto -> bb24;
+//     }
+//     bb24: {
+//         goto -> bb25;
+//     }
+//     bb25: {
+//         goto -> bb2;
+//     }
+//     bb26: {
+//         _5 = ();
+//         unreachable;
+//     }
+//     bb27: {
+//         StorageDead(_5);
+//         goto -> bb14;
+//     }
+//     bb28: {
+//         StorageDead(_7);
+//         _1 = ();
+//         StorageDead(_2);
+//         goto -> bb1;
+//     }
+//     bb29: {
+//         return;
+//     }
+// }
+// END rustc.main.mir_map.0.mir
index 41eaf67d292a616aad60032ff876ad4681a5b256..16e30f84d17565788afca1912eb879a402d5132b 100644 (file)
@@ -31,8 +31,8 @@ fn main() {
 //         StorageDead(_5);
 //         _3 = &_4;
 //         _2 = ();
-//         StorageDead(_3);
 //         StorageDead(_4);
+//         StorageDead(_3);
 //         StorageLive(_6);
 //         _6 = const 1i32;
 //         _0 = ();
index b21158abfe551d53be911e1bbd635bdfce2f82eb..f1b4b4f5a0c8a05d16a6e2fe12fcb8db95577a51 100644 (file)
@@ -3,7 +3,7 @@
 #[prelude_import]
 use std::prelude::v1::*;
 #[macro_use]
-extern crate std as std;
+extern crate std;
 // Copyright 2017 The Rust Project Developers. See the COPYRIGHT
 // file at the top-level directory of this distribution and at
 // http://rust-lang.org/COPYRIGHT.
@@ -21,4 +21,3 @@ extern crate std as std;
 macro_rules! negative(( $ e : expr ) => { $ e < 0 });
 
 fn main() { (1 as i32) < 0; }
-
index 87b5274545f385425b4b4ac682c7ef5c97bf587f..ca4c364c6315778aa8743534914c836001370c1e 100644 (file)
@@ -19,4 +19,3 @@ macro_rules! negative {
 fn main() {
       negative!(1 as i32);
 }
-
index 81518b0b87271c85d5168bc4ec076240a604ea35..a4380d9212fdf1e596fc25c7b72a611701bffd53 100644 (file)
@@ -1,7 +1,7 @@
 #[prelude_import]
 use std::prelude::v1::*;
 #[macro_use]
-extern crate std as std;
+extern crate std;
 // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
 // file at the top-level directory of this distribution and at
 // http://rust-lang.org/COPYRIGHT.
diff --git a/src/test/run-pass-fulldeps/proc-macro/auxiliary/custom-attr-only-one-derive.rs b/src/test/run-pass-fulldeps/proc-macro/auxiliary/custom-attr-only-one-derive.rs
new file mode 100644 (file)
index 0000000..4609f57
--- /dev/null
@@ -0,0 +1,27 @@
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// no-prefer-dynamic
+
+#![crate_type = "proc-macro"]
+
+extern crate proc_macro;
+
+use proc_macro::TokenStream;
+
+#[proc_macro_derive(Foo)]
+pub fn foo(a: TokenStream) -> TokenStream {
+    "".parse().unwrap()
+}
+
+#[proc_macro_derive(Bar, attributes(custom))]
+pub fn bar(a: TokenStream) -> TokenStream {
+    "".parse().unwrap()
+}
diff --git a/src/test/run-pass-fulldeps/proc-macro/custom-attr-only-one-derive.rs b/src/test/run-pass-fulldeps/proc-macro/custom-attr-only-one-derive.rs
new file mode 100644 (file)
index 0000000..3b2833a
--- /dev/null
@@ -0,0 +1,25 @@
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// aux-build:custom-attr-only-one-derive.rs
+
+#![feature(rust_2018_preview)]
+
+#[macro_use]
+extern crate custom_attr_only_one_derive;
+
+#[derive(Bar, Foo)]
+#[custom = "test"]
+pub enum A {
+    B,
+    C,
+}
+
+fn main() {}
index b08d16e5088d767ac9568037a2b53905e3449495..2bedfc133b5819485986a71f14eb517b8a1087d5 100644 (file)
 
 // Test inclusive range syntax.
 
-use std::ops::{RangeInclusive, RangeToInclusive};
+#![feature(range_is_empty)]
+#![allow(unused_comparisons)]
+
+use std::ops::RangeToInclusive;
 
 fn foo() -> isize { 42 }
 
 // Test that range syntax works in return statements
-fn return_range_to() -> RangeToInclusive<i32> { return ..=1; }
+pub fn return_range_to() -> RangeToInclusive<i32> { return ..=1; }
+
+#[derive(Debug)]
+struct P(u8);
 
 pub fn main() {
     let mut count = 0;
@@ -26,7 +32,7 @@ pub fn main() {
     assert_eq!(count, 55);
 
     let mut count = 0;
-    let mut range = 0_usize..=10;
+    let range = 0_usize..=10;
     for i in range {
         assert!(i >= 0 && i <= 10);
         count += i;
@@ -80,7 +86,7 @@ pub fn main() {
     short.next();
     assert_eq!(long.size_hint(), (255, Some(255)));
     assert_eq!(short.size_hint(), (0, Some(0)));
-    assert_eq!(short, 1..=0);
+    assert!(short.is_empty());
 
     assert_eq!(long.len(), 255);
     assert_eq!(short.len(), 0);
@@ -95,28 +101,31 @@ pub fn main() {
     for i in 3..=251 {
         assert_eq!(long.next(), Some(i));
     }
-    assert_eq!(long, 1..=0);
+    assert!(long.is_empty());
 
     // check underflow
     let mut narrow = 1..=0;
     assert_eq!(narrow.next_back(), None);
-    assert_eq!(narrow, 1..=0);
+    assert!(narrow.is_empty());
     let mut zero = 0u8..=0;
     assert_eq!(zero.next_back(), Some(0));
     assert_eq!(zero.next_back(), None);
-    assert_eq!(zero, 1..=0);
+    assert!(zero.is_empty());
     let mut high = 255u8..=255;
     assert_eq!(high.next_back(), Some(255));
     assert_eq!(high.next_back(), None);
-    assert_eq!(high, 1..=0);
+    assert!(high.is_empty());
 
     // what happens if you have a nonsense range?
     let mut nonsense = 10..=5;
     assert_eq!(nonsense.next(), None);
-    assert_eq!(nonsense, 10..=5);
+    assert!(nonsense.is_empty());
 
     // output
     assert_eq!(format!("{:?}", 0..=10), "0..=10");
     assert_eq!(format!("{:?}", ..=10), "..=10");
-    assert_eq!(format!("{:?}", long), "1..=0");
+    assert_eq!(format!("{:?}", 9..=6), "9..=6");
+
+    // ensure that constructing a RangeInclusive does not need PartialOrd bound
+    assert_eq!(format!("{:?}", P(1)..=P(2)), "P(1)..=P(2)");
 }
diff --git a/src/test/ui-fulldeps/proc-macro/invalid-attributes.rs b/src/test/ui-fulldeps/proc-macro/invalid-attributes.rs
new file mode 100644 (file)
index 0000000..c06f98e
--- /dev/null
@@ -0,0 +1,36 @@
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// no-prefer-dynamic
+
+#![crate_type = "proc-macro"]
+#![feature(proc_macro)]
+
+extern crate proc_macro;
+
+use proc_macro::TokenStream;
+
+#[proc_macro = "test"] //~ ERROR: does not take any arguments
+pub fn a(a: TokenStream) -> TokenStream { a }
+
+#[proc_macro()] //~ ERROR: does not take any arguments
+pub fn c(a: TokenStream) -> TokenStream { a }
+
+#[proc_macro(x)] //~ ERROR: does not take any arguments
+pub fn d(a: TokenStream) -> TokenStream { a }
+
+#[proc_macro_attribute = "test"] //~ ERROR: does not take any arguments
+pub fn e(_: TokenStream, a: TokenStream) -> TokenStream { a }
+
+#[proc_macro_attribute()] //~ ERROR: does not take any arguments
+pub fn g(_: TokenStream, a: TokenStream) -> TokenStream { a }
+
+#[proc_macro_attribute(x)] //~ ERROR: does not take any arguments
+pub fn h(_: TokenStream, a: TokenStream) -> TokenStream { a }
diff --git a/src/test/ui-fulldeps/proc-macro/invalid-attributes.stderr b/src/test/ui-fulldeps/proc-macro/invalid-attributes.stderr
new file mode 100644 (file)
index 0000000..c480bcb
--- /dev/null
@@ -0,0 +1,38 @@
+error: `#[proc_macro]` attribute does not take any arguments
+  --> $DIR/invalid-attributes.rs:20:1
+   |
+LL | #[proc_macro = "test"] //~ ERROR: does not take any arguments
+   | ^^^^^^^^^^^^^^^^^^^^^^
+
+error: `#[proc_macro]` attribute does not take any arguments
+  --> $DIR/invalid-attributes.rs:23:1
+   |
+LL | #[proc_macro()] //~ ERROR: does not take any arguments
+   | ^^^^^^^^^^^^^^^
+
+error: `#[proc_macro]` attribute does not take any arguments
+  --> $DIR/invalid-attributes.rs:26:1
+   |
+LL | #[proc_macro(x)] //~ ERROR: does not take any arguments
+   | ^^^^^^^^^^^^^^^^
+
+error: `#[proc_macro_attribute]` attribute does not take any arguments
+  --> $DIR/invalid-attributes.rs:29:1
+   |
+LL | #[proc_macro_attribute = "test"] //~ ERROR: does not take any arguments
+   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: `#[proc_macro_attribute]` attribute does not take any arguments
+  --> $DIR/invalid-attributes.rs:32:1
+   |
+LL | #[proc_macro_attribute()] //~ ERROR: does not take any arguments
+   | ^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: `#[proc_macro_attribute]` attribute does not take any arguments
+  --> $DIR/invalid-attributes.rs:35:1
+   |
+LL | #[proc_macro_attribute(x)] //~ ERROR: does not take any arguments
+   | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 6 previous errors
+
index b879f9a33986d0b8c184a73f28d9378f38fa3aaa..39050864768ae3875f20830288e75c67f198a4b3 100644 (file)
@@ -52,9 +52,9 @@ fn supply<'a, 'b, 'c>(cell_a: Cell<&'a u32>, cell_b: Cell<&'b u32>, cell_c: Cell
         cell_c,
         |_outlives1, _outlives2, _outlives3, x, y| {
             // Only works if 'x: 'y:
-            let p = x.get();
+            let p = x.get(); //~ ERROR
             //~^ WARN not reporting region error due to nll
-            demand_y(x, y, p) //~ ERROR
+            demand_y(x, y, p)
         },
     );
 }
index a7a50a3a029817e099b4a2433a4ac47608c4f135..6588cbe8bdf26c297f93e52a44527fe69ecbaed3 100644 (file)
@@ -1,28 +1,28 @@
 warning: not reporting region error due to nll
   --> $DIR/propagate-approximated-fail-no-postdom.rs:55:21
    |
-LL |             let p = x.get();
+LL |             let p = x.get(); //~ ERROR
    |                     ^^^^^^^
 
 error: unsatisfied lifetime constraints
-  --> $DIR/propagate-approximated-fail-no-postdom.rs:57:13
+  --> $DIR/propagate-approximated-fail-no-postdom.rs:55:21
    |
 LL |         |_outlives1, _outlives2, _outlives3, x, y| {
    |          ----------              ---------- lifetime `'2` appears in this argument
    |          |
    |          lifetime `'1` appears in this argument
-...
-LL |             demand_y(x, y, p) //~ ERROR
-   |             ^^^^^^^^^^^^^^^^^ argument requires that `'1` must outlive `'2`
+LL |             // Only works if 'x: 'y:
+LL |             let p = x.get(); //~ ERROR
+   |                     ^^^^^^^ argument requires that `'1` must outlive `'2`
 
 note: No external requirements
   --> $DIR/propagate-approximated-fail-no-postdom.rs:53:9
    |
 LL | /         |_outlives1, _outlives2, _outlives3, x, y| {
 LL | |             // Only works if 'x: 'y:
-LL | |             let p = x.get();
+LL | |             let p = x.get(); //~ ERROR
 LL | |             //~^ WARN not reporting region error due to nll
-LL | |             demand_y(x, y, p) //~ ERROR
+LL | |             demand_y(x, y, p)
 LL | |         },
    | |_________^
    |
index 96f3d6a6a533ab69ce8cde1c86487c072ed76632..8fd5e898c8d9bff140e95e3701a7118ad3d50096 100644 (file)
@@ -5,7 +5,7 @@ LL |     foo(cell, |cell_a, cell_x| {
    |     ^^^
 
 error: unsatisfied lifetime constraints
-  --> $DIR/propagate-approximated-shorter-to-static-comparing-against-free.rs:33:9
+  --> $DIR/propagate-approximated-shorter-to-static-comparing-against-free.rs:33:20
    |
 LL |     foo(cell, |cell_a, cell_x| {
    |                ------  ------ lifetime `'1` appears in this argument
@@ -13,7 +13,7 @@ LL |     foo(cell, |cell_a, cell_x| {
    |                lifetime `'2` appears in this argument
 LL |         //~^ WARNING not reporting region error due to nll
 LL |         cell_a.set(cell_x.get()); // forces 'x: 'a, error in closure
-   |         ^^^^^^^^^^^^^^^^^^^^^^^^ argument requires that `'1` must outlive `'2`
+   |                    ^^^^^^^^^^^^ argument requires that `'1` must outlive `'2`
 
 note: No external requirements
   --> $DIR/propagate-approximated-shorter-to-static-comparing-against-free.rs:31:15
index fb98c506c7d280a913b785441d731b06f4f7c1d7..c75b3e6670cdc018ff06ec43a3d3bf3eea0ef96e 100644 (file)
@@ -5,7 +5,7 @@ LL |         demand_y(x, y, x.get())
    |         ^^^^^^^^^^^^^^^^^^^^^^^
 
 error: unsatisfied lifetime constraints
-  --> $DIR/propagate-fail-to-approximate-longer-no-bounds.rs:47:9
+  --> $DIR/propagate-fail-to-approximate-longer-no-bounds.rs:47:24
    |
 LL |     establish_relationships(&cell_a, &cell_b, |_outlives, x, y| {
    |                                                ---------  - lifetime `'1` appears in this argument
@@ -13,7 +13,7 @@ LL |     establish_relationships(&cell_a, &cell_b, |_outlives, x, y| {
    |                                                lifetime `'2` appears in this argument
 LL |         // Only works if 'x: 'y:
 LL |         demand_y(x, y, x.get())
-   |         ^^^^^^^^^^^^^^^^^^^^^^^ argument requires that `'1` must outlive `'2`
+   |                        ^^^^^^^ argument requires that `'1` must outlive `'2`
 
 note: No external requirements
   --> $DIR/propagate-fail-to-approximate-longer-no-bounds.rs:45:47
index 73d39a8502b6438060cf366c70aad478c2dd4daa..2465219ee552a809155510f9178d46559f1f5df0 100644 (file)
@@ -5,7 +5,7 @@ LL |         demand_y(x, y, x.get())
    |         ^^^^^^^^^^^^^^^^^^^^^^^
 
 error: unsatisfied lifetime constraints
-  --> $DIR/propagate-fail-to-approximate-longer-wrong-bounds.rs:51:9
+  --> $DIR/propagate-fail-to-approximate-longer-wrong-bounds.rs:51:24
    |
 LL |     establish_relationships(&cell_a, &cell_b, |_outlives1, _outlives2, x, y| {
    |                                                ----------  ---------- lifetime `'2` appears in this argument
@@ -13,7 +13,7 @@ LL |     establish_relationships(&cell_a, &cell_b, |_outlives1, _outlives2, x, y
    |                                                lifetime `'1` appears in this argument
 LL |         // Only works if 'x: 'y:
 LL |         demand_y(x, y, x.get())
-   |         ^^^^^^^^^^^^^^^^^^^^^^^ argument requires that `'1` must outlive `'2`
+   |                        ^^^^^^^ argument requires that `'1` must outlive `'2`
 
 note: No external requirements
   --> $DIR/propagate-fail-to-approximate-longer-wrong-bounds.rs:49:47
diff --git a/src/test/ui/rust-2018/auxiliary/macro-use-warned-against.rs b/src/test/ui/rust-2018/auxiliary/macro-use-warned-against.rs
new file mode 100644 (file)
index 0000000..9487fd8
--- /dev/null
@@ -0,0 +1,12 @@
+// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#[macro_export]
+macro_rules! foo { () => () }
diff --git a/src/test/ui/rust-2018/auxiliary/macro-use-warned-against2.rs b/src/test/ui/rust-2018/auxiliary/macro-use-warned-against2.rs
new file mode 100644 (file)
index 0000000..6391db8
--- /dev/null
@@ -0,0 +1,10 @@
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
diff --git a/src/test/ui/rust-2018/macro-use-warned-against.rs b/src/test/ui/rust-2018/macro-use-warned-against.rs
new file mode 100644 (file)
index 0000000..f7a6b56
--- /dev/null
@@ -0,0 +1,25 @@
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// aux-build:macro-use-warned-against.rs
+// aux-build:macro-use-warned-against2.rs
+// compile-pass
+
+#![warn(rust_2018_idioms, unused)]
+#![feature(use_extern_macros)]
+
+#[macro_use] //~ WARN should be replaced at use sites with a `use` statement
+extern crate macro_use_warned_against;
+#[macro_use] //~ WARN unused `#[macro_use]`
+extern crate macro_use_warned_against2;
+
+fn main() {
+    foo!();
+}
diff --git a/src/test/ui/rust-2018/macro-use-warned-against.stderr b/src/test/ui/rust-2018/macro-use-warned-against.stderr
new file mode 100644 (file)
index 0000000..bebad31
--- /dev/null
@@ -0,0 +1,26 @@
+warning: deprecated `#[macro_use]` directive used to import macros should be replaced at use sites with a `use` statement to import the macro instead
+  --> $DIR/macro-use-warned-against.rs:18:1
+   |
+LL | #[macro_use] //~ WARN should be replaced at use sites with a `use` statement
+   | ^^^^^^^^^^^^
+   |
+note: lint level defined here
+  --> $DIR/macro-use-warned-against.rs:15:9
+   |
+LL | #![warn(rust_2018_idioms, unused)]
+   |         ^^^^^^^^^^^^^^^^
+   = note: #[warn(macro_use_extern_crate)] implied by #[warn(rust_2018_idioms)]
+
+warning: unused `#[macro_use]` import
+  --> $DIR/macro-use-warned-against.rs:20:1
+   |
+LL | #[macro_use] //~ WARN unused `#[macro_use]`
+   | ^^^^^^^^^^^^
+   |
+note: lint level defined here
+  --> $DIR/macro-use-warned-against.rs:15:27
+   |
+LL | #![warn(rust_2018_idioms, unused)]
+   |                           ^^^^^^
+   = note: #[warn(unused_imports)] implied by #[warn(unused)]
+
index c2f0687a5a994e3dbc8ec257b4812e25c2c5a433..bb20678d4a11ba707fab252518638b751a84def9 100644 (file)
@@ -492,7 +492,7 @@ fn filename(&self, component: &str, target: &str) -> String {
             format!("clippy-{}-{}.tar.gz", self.clippy_release, target)
         } else if component == "rustfmt" || component == "rustfmt-preview" {
             format!("rustfmt-{}-{}.tar.gz", self.rustfmt_release, target)
-        } else if component == "llvm_tools" {
+        } else if component == "llvm-tools" || component == "llvm-tools-preview" {
             format!("llvm-tools-{}-{}.tar.gz", self.llvm_tools_release, target)
         } else {
             format!("{}-{}-{}.tar.gz", component, self.rust_release, target)
index 408eda5ba5bb58aa6d19848ac6be270c2a15ee4d..59ddc16715d3b6e52ad8264e894a84e2f5bdeb08 100644 (file)
@@ -519,9 +519,8 @@ fn print_source(&self, src: String, pretty_type: &str) -> ProcRes {
 
     fn compare_source(&self, expected: &str, actual: &str) {
         if expected != actual {
-            self.error("pretty-printed source does not match expected source");
-            println!(
-                "\n\
+            self.fatal(&format!(
+                "pretty-printed source does not match expected source\n\
                  expected:\n\
                  ------------------------------------------\n\
                  {}\n\
@@ -531,7 +530,7 @@ fn compare_source(&self, expected: &str, actual: &str) {
                  {}\n\
                  ------------------------------------------\n\
                  \n",
-                expected, actual
+                expected, actual)
             );
         }
     }