]> git.lizzy.rs Git - rust.git/commitdiff
Rollup merge of #35528 - Vassah:master, r=jonathandturner
authorJonathan Turner <jonathandturner@users.noreply.github.com>
Thu, 11 Aug 2016 13:33:59 +0000 (06:33 -0700)
committerGitHub <noreply@github.com>
Thu, 11 Aug 2016 13:33:59 +0000 (06:33 -0700)
Update Error Format for E0091 and E0092

Addresses [#35228](https://github.com/rust-lang/rust/issues/35228) and [#35229](https://github.com/rust-lang/rust/issues/35229) as part of [#35233](https://github.com/rust-lang/rust/issues/35233).

Please let me know if there are any issues; first time contributor.

r? @jonathandturner

160 files changed:
.gitignore
mk/rustllvm.mk
src/bootstrap/compile.rs
src/bootstrap/lib.rs
src/doc/book/crates-and-modules.md
src/doc/book/patterns.md
src/doc/reference.md
src/libcollections/slice.rs
src/libcollections/string.rs
src/libcore/cell.rs
src/libcore/macros.rs
src/libcore/slice.rs
src/librustc/dep_graph/README.md
src/librustc/dep_graph/dep_node.rs
src/librustc/dep_graph/dep_tracking_map.rs
src/librustc/dep_graph/visit.rs
src/librustc/hir/lowering.rs
src/librustc/hir/map/def_collector.rs
src/librustc/hir/map/definitions.rs
src/librustc/hir/map/mod.rs
src/librustc/infer/error_reporting.rs
src/librustc/middle/cstore.rs
src/librustc/mir/mir_map.rs
src/librustc/mir/transform.rs
src/librustc/session/config.rs
src/librustc/session/mod.rs
src/librustc/ty/context.rs
src/librustc/ty/ivar.rs
src/librustc/ty/mod.rs
src/librustc_back/target/mod.rs
src/librustc_borrowck/borrowck/mod.rs
src/librustc_const_eval/check_match.rs
src/librustc_const_eval/eval.rs
src/librustc_data_structures/bitvec.rs
src/librustc_data_structures/graph/mod.rs
src/librustc_data_structures/transitive_relation.rs
src/librustc_driver/driver.rs
src/librustc_driver/lib.rs
src/librustc_driver/pretty.rs
src/librustc_driver/test.rs
src/librustc_errors/emitter.rs
src/librustc_errors/lib.rs
src/librustc_errors/snippet.rs
src/librustc_incremental/assert_dep_graph.rs
src/librustc_incremental/calculate_svh.rs [deleted file]
src/librustc_incremental/calculate_svh/mod.rs [new file with mode: 0644]
src/librustc_incremental/calculate_svh/svh_visitor.rs [new file with mode: 0644]
src/librustc_incremental/persist/data.rs
src/librustc_incremental/persist/directory.rs
src/librustc_incremental/persist/dirty_clean.rs
src/librustc_incremental/persist/hash.rs
src/librustc_incremental/persist/load.rs
src/librustc_incremental/persist/mod.rs
src/librustc_incremental/persist/preds.rs [new file with mode: 0644]
src/librustc_incremental/persist/save.rs
src/librustc_llvm/build.rs
src/librustc_llvm/ffi.rs
src/librustc_metadata/astencode.rs
src/librustc_metadata/csearch.rs
src/librustc_metadata/decoder.rs
src/librustc_metadata/def_key.rs
src/librustc_metadata/encoder.rs
src/librustc_mir/graphviz.rs
src/librustc_mir/hair/cx/mod.rs
src/librustc_mir/mir_map.rs
src/librustc_mir/pretty.rs
src/librustc_mir/transform/qualify_consts.rs
src/librustc_mir/transform/type_check.rs
src/librustc_passes/ast_validation.rs
src/librustc_resolve/lib.rs
src/librustc_trans/back/symbol_names.rs
src/librustc_trans/back/write.rs
src/librustc_trans/common.rs
src/librustc_trans/consts.rs
src/librustc_trans/context.rs
src/librustc_trans/debuginfo/metadata.rs
src/librustc_trans/trans_item.rs
src/librustc_typeck/astconv.rs
src/librustc_typeck/check/_match.rs
src/librustc_typeck/check/compare_method.rs
src/librustc_typeck/check/method/probe.rs
src/librustc_typeck/check/method/suggest.rs
src/librustc_typeck/check/mod.rs
src/librustc_typeck/coherence/mod.rs
src/librustc_typeck/collect.rs
src/librustc_typeck/lib.rs
src/librustdoc/core.rs
src/librustdoc/test.rs
src/libstd/collections/hash/map.rs
src/libstd/error.rs
src/libstd/ffi/c_str.rs
src/libstd/lib.rs
src/libstd/panic.rs
src/libstd/panicking.rs
src/libstd/rt.rs
src/libstd/sys/unix/os.rs
src/libsyntax/parse/lexer/mod.rs
src/libsyntax/parse/mod.rs
src/libtest/lib.rs
src/llvm
src/rustllvm/PassWrapper.cpp
src/rustllvm/llvm-auto-clean-trigger
src/test/compile-fail/E0017.rs
src/test/compile-fail/E0023.rs
src/test/compile-fail/E0026.rs
src/test/compile-fail/E0081.rs
src/test/compile-fail/E0087.rs
src/test/compile-fail/E0130.rs
src/test/compile-fail/E0162.rs
src/test/compile-fail/E0191.rs
src/test/compile-fail/E0205.rs
src/test/compile-fail/E0206.rs
src/test/compile-fail/E0214.rs
src/test/compile-fail/E0248.rs
src/test/compile-fail/associated-const-impl-wrong-type.rs
src/test/compile-fail/associated-type-projection-from-multiple-supertraits.rs
src/test/compile-fail/coherence-impls-copy.rs
src/test/compile-fail/issue-15524.rs
src/test/compile-fail/issue-23024.rs
src/test/compile-fail/issue-33784.rs [new file with mode: 0644]
src/test/compile-fail/no-patterns-in-args.rs
src/test/incremental/callee_caller_cross_crate/b.rs
src/test/incremental/dirty_clean.rs
src/test/incremental/foreign.rs [new file with mode: 0644]
src/test/incremental/hello_world.rs
src/test/incremental/ich_method_call_trait_scope.rs [new file with mode: 0644]
src/test/incremental/ich_nested_items.rs [new file with mode: 0644]
src/test/incremental/ich_resolve_results.rs [new file with mode: 0644]
src/test/incremental/inlined_hir_34991/main.rs [new file with mode: 0644]
src/test/incremental/krate_reassign_34991/auxiliary/a.rs [new file with mode: 0644]
src/test/incremental/krate_reassign_34991/main.rs [new file with mode: 0644]
src/test/incremental/rlib_cross_crate/b.rs
src/test/incremental/spike.rs
src/test/incremental/string_constant.rs
src/test/incremental/struct_add_field.rs
src/test/incremental/struct_change_field_name.rs
src/test/incremental/struct_change_field_type.rs
src/test/incremental/struct_change_field_type_cross_crate/b.rs
src/test/incremental/struct_change_nothing.rs
src/test/incremental/struct_remove_field.rs
src/test/incremental/type_alias_cross_crate/b.rs
src/test/run-make/error-found-staticlib-instead-crate/Makefile
src/test/run-make/many-crates-but-no-match/Makefile
src/test/run-make/missing-crate-dependency/Makefile
src/test/run-make/unicode-input/Makefile [deleted file]
src/test/run-make/unicode-input/multiple_files.rs [deleted file]
src/test/run-make/unicode-input/span_length.rs [deleted file]
src/test/run-pass/issue-29053.rs [new file with mode: 0644]
src/test/run-pass/issue-33498.rs [new file with mode: 0644]
src/test/run-pass/slice_binary_search.rs [new file with mode: 0644]
src/tools/cargotest/main.rs
src/tools/compiletest/src/common.rs
src/tools/compiletest/src/errors.rs
src/tools/compiletest/src/header.rs
src/tools/compiletest/src/json.rs
src/tools/compiletest/src/procsrv.rs
src/tools/compiletest/src/raise_fd_limit.rs
src/tools/compiletest/src/runtest.rs
src/tools/compiletest/src/uidiff.rs
src/tools/compiletest/src/util.rs

index 5e8c40d03fbeff96ea8c04fe9f9717056b285663..6de43f471d8860229cb72c236b260d9303de1553 100644 (file)
@@ -57,6 +57,7 @@ __pycache__/
 .project
 .settings/
 .valgrindrc
+.vscode/
 /*-*-*-*/
 /*-*-*/
 /Makefile
index b50dbd01ad0cc0071bcf20cf8b5ba366ca724dc2..2d63f69960f78a949b952d4fdf8e916c1559c398 100644 (file)
@@ -32,6 +32,11 @@ RUSTLLVM_INCS_$(1) = $$(LLVM_EXTRA_INCDIRS_$(1)) \
                      $$(call CFG_CC_INCLUDE_$(1),$$(S)src/rustllvm/include)
 RUSTLLVM_OBJS_OBJS_$(1) := $$(RUSTLLVM_OBJS_CS_$(1):rustllvm/%.cpp=$(1)/rustllvm/%.o)
 
+# Flag that we are building with Rust's llvm fork
+ifeq ($(CFG_LLVM_ROOT),)
+RUSTLLVM_CXXFLAGS_$(1) := -DLLVM_RUSTLLVM
+endif
+
 # Note that we appease `cl.exe` and its need for some sort of exception
 # handling flag with the `EHsc` argument here as well.
 ifeq ($$(findstring msvc,$(1)),msvc)
@@ -55,6 +60,7 @@ $(1)/rustllvm/%.o: $(S)src/rustllvm/%.cpp $$(MKFILE_DEPS) $$(LLVM_CONFIG_$(1))
        $$(Q)$$(call CFG_COMPILE_CXX_$(1), $$@,) \
                $$(subst  /,//,$$(LLVM_CXXFLAGS_$(1))) \
                $$(RUSTLLVM_COMPONENTS_$(1)) \
+               $$(RUSTLLVM_CXXFLAGS_$(1)) \
                $$(EXTRA_RUSTLLVM_CXXFLAGS_$(1)) \
                $$(RUSTLLVM_INCS_$(1)) \
                $$<
index 061192ebd1340b318aedbea2441339eebb4282fe..155848901cdb49c8919d1d77f9d0756f18328b4d 100644 (file)
@@ -198,6 +198,10 @@ pub fn rustc<'a>(build: &'a Build, target: &str, compiler: &Compiler<'a>) {
     if !build.unstable_features {
         cargo.env("CFG_DISABLE_UNSTABLE_FEATURES", "1");
     }
+    // Flag that rust llvm is in use
+    if build.is_rust_llvm(target) {
+        cargo.env("LLVM_RUSTLLVM", "1");
+    }
     cargo.env("LLVM_CONFIG", build.llvm_config(target));
     if build.config.llvm_static_stdcpp {
         cargo.env("LLVM_STATIC_STDCPP",
index acb7e0fadd90a5049aeed4de71982625d2cc5a75..5d61abe5e086ad23c54c2d2e42a70ece43d30ff7 100644 (file)
@@ -727,6 +727,16 @@ fn llvm_out(&self, target: &str) -> PathBuf {
         self.out.join(target).join("llvm")
     }
 
+    /// Returns true if no custom `llvm-config` is set for the specified target.
+    ///
+    /// If no custom `llvm-config` was specified then Rust's llvm will be used.
+    fn is_rust_llvm(&self, target: &str) -> bool {
+        match self.config.target_config.get(target) {
+            Some(ref c) => c.llvm_config.is_none(),
+            None => true
+        }
+    }
+
     /// Returns the path to `llvm-config` for the specified target.
     ///
     /// If a custom `llvm-config` was specified for target then that's returned
index 67fe8ba2c11a48e0d5bde95d9a769caeef0a4541..fcb7e0bc7eacd9e2caff84ff60d71026264c9075 100644 (file)
@@ -22,6 +22,7 @@ As an example, let’s make a *phrases* crate, which will give us various phrase
 in different languages. To keep things simple, we’ll stick to ‘greetings’ and
 ‘farewells’ as two kinds of phrases, and use English and Japanese (日本語) as
 two languages for those phrases to be in. We’ll use this module layout:
+
 ```text
                                     +-----------+
                                 +---| greetings |
index a0245d4c7b163f724157366db677946dbdd1d8f3..910b13754767facc46acd73885ae9ba02ef73fd1 100644 (file)
@@ -109,14 +109,14 @@ struct Point {
     y: i32,
 }
 
-let origin = Point { x: 0, y: 0 };
+let point = Point { x: 2, y: 3 };
 
-match origin {
+match point {
     Point { x, .. } => println!("x is {}", x),
 }
 ```
 
-This prints `x is 0`.
+This prints `x is 2`.
 
 You can do this kind of match on any member, not only the first:
 
@@ -126,14 +126,14 @@ struct Point {
     y: i32,
 }
 
-let origin = Point { x: 0, y: 0 };
+let point = Point { x: 2, y: 3 };
 
-match origin {
+match point {
     Point { y, .. } => println!("y is {}", y),
 }
 ```
 
-This prints `y is 0`.
+This prints `y is 3`.
 
 This ‘destructuring’ behavior works on any compound data type, like
 [tuples][tuples] or [enums][enums].
index f4ffe5774d27cd0f3dd121f42bb89255e0e11da6..f0ab1488d4015962f432ba4971aeb0b9bc2a2f79 100644 (file)
@@ -3039,7 +3039,7 @@ The precedence of Rust binary operators is ordered as follows, going from
 strong to weak:
 
 ```{.text .precedence}
-as
+as :
 * / %
 + -
 << >>
@@ -3050,6 +3050,7 @@ as
 &&
 ||
 .. ...
+<-
 =
 ```
 
index ff2b8cdea22789076a31416392827ecfee6ba742..5cdf4ee88c00cd511016b2620908251d1f0dc126 100644 (file)
@@ -973,8 +973,8 @@ pub fn binary_search(&self, x: &T) -> Result<usize, usize>
     /// ```
     #[stable(feature = "rust1", since = "1.0.0")]
     #[inline]
-    pub fn binary_search_by<F>(&self, f: F) -> Result<usize, usize>
-        where F: FnMut(&T) -> Ordering
+    pub fn binary_search_by<'a, F>(&'a self, f: F) -> Result<usize, usize>
+        where F: FnMut(&'a T) -> Ordering
     {
         core_slice::SliceExt::binary_search_by(self, f)
     }
@@ -1009,8 +1009,8 @@ pub fn binary_search_by<F>(&self, f: F) -> Result<usize, usize>
     /// ```
     #[stable(feature = "slice_binary_search_by_key", since = "1.10.0")]
     #[inline]
-    pub fn binary_search_by_key<B, F>(&self, b: &B, f: F) -> Result<usize, usize>
-        where F: FnMut(&T) -> B,
+    pub fn binary_search_by_key<'a, B, F>(&'a self, b: &B, f: F) -> Result<usize, usize>
+        where F: FnMut(&'a T) -> B,
               B: Ord
     {
         core_slice::SliceExt::binary_search_by_key(self, b, f)
index 06952253ef3b09540017a1211dccd8e75b21bfe5..70b514afd035f1d17d446c0dbbee6f158b1eb950 100644 (file)
@@ -1874,6 +1874,27 @@ fn from(s: String) -> Cow<'a, str> {
     }
 }
 
+#[stable(feature = "cow_str_from_iter", since = "1.12.0")]
+impl<'a> FromIterator<char> for Cow<'a, str> {
+    fn from_iter<I: IntoIterator<Item = char>>(it: I) -> Cow<'a, str> {
+        Cow::Owned(FromIterator::from_iter(it))
+    }
+}
+
+#[stable(feature = "cow_str_from_iter", since = "1.12.0")]
+impl<'a, 'b> FromIterator<&'b str> for Cow<'a, str> {
+    fn from_iter<I: IntoIterator<Item = &'b str>>(it: I) -> Cow<'a, str> {
+        Cow::Owned(FromIterator::from_iter(it))
+    }
+}
+
+#[stable(feature = "cow_str_from_iter", since = "1.12.0")]
+impl<'a> FromIterator<String> for Cow<'a, str> {
+    fn from_iter<I: IntoIterator<Item = String>>(it: I) -> Cow<'a, str> {
+        Cow::Owned(FromIterator::from_iter(it))
+    }
+}
+
 #[stable(feature = "rust1", since = "1.0.0")]
 impl Into<Vec<u8>> for String {
     fn into(self) -> Vec<u8> {
index 06af200e47839b9c5a5d5ed6aa6c249264a71fb5..434084d3af8653f9888fb3f41cc75c1a19f5ffd2 100644 (file)
 use clone::Clone;
 use cmp::{PartialEq, Eq, PartialOrd, Ord, Ordering};
 use default::Default;
-use marker::{Copy, Send, Sync, Sized, Unsize};
+use fmt::{self, Debug, Display};
+use marker::{Copy, PhantomData, Send, Sync, Sized, Unsize};
 use ops::{Deref, DerefMut, Drop, FnOnce, CoerceUnsized};
 use option::Option;
 use option::Option::{None, Some};
+use result::Result;
+use result::Result::{Ok, Err};
 
 /// A mutable memory location that admits only `Copy` data.
 ///
@@ -347,6 +350,46 @@ pub enum BorrowState {
     Unused,
 }
 
+/// An error returned by [`RefCell::try_borrow`](struct.RefCell.html#method.try_borrow).
+#[unstable(feature = "try_borrow", issue = "35070")]
+pub struct BorrowError<'a, T: 'a + ?Sized> {
+    marker: PhantomData<&'a RefCell<T>>,
+}
+
+#[unstable(feature = "try_borrow", issue = "35070")]
+impl<'a, T: ?Sized> Debug for BorrowError<'a, T> {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        f.debug_struct("BorrowError").finish()
+    }
+}
+
+#[unstable(feature = "try_borrow", issue = "35070")]
+impl<'a, T: ?Sized> Display for BorrowError<'a, T> {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        Display::fmt("already mutably borrowed", f)
+    }
+}
+
+/// An error returned by [`RefCell::try_borrow_mut`](struct.RefCell.html#method.try_borrow_mut).
+#[unstable(feature = "try_borrow", issue = "35070")]
+pub struct BorrowMutError<'a, T: 'a + ?Sized> {
+    marker: PhantomData<&'a RefCell<T>>,
+}
+
+#[unstable(feature = "try_borrow", issue = "35070")]
+impl<'a, T: ?Sized> Debug for BorrowMutError<'a, T> {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        f.debug_struct("BorrowMutError").finish()
+    }
+}
+
+#[unstable(feature = "try_borrow", issue = "35070")]
+impl<'a, T: ?Sized> Display for BorrowMutError<'a, T> {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        Display::fmt("already borrowed", f)
+    }
+}
+
 // Values [1, MAX-1] represent the number of `Ref` active
 // (will not outgrow its range since `usize` is the size of the address space)
 type BorrowFlag = usize;
@@ -432,7 +475,8 @@ pub fn borrow_state(&self) -> BorrowState {
     ///
     /// # Panics
     ///
-    /// Panics if the value is currently mutably borrowed.
+    /// Panics if the value is currently mutably borrowed. For a non-panicking variant, use
+    /// [`try_borrow`](#method.try_borrow).
     ///
     /// # Examples
     ///
@@ -463,12 +507,45 @@ pub fn borrow_state(&self) -> BorrowState {
     #[stable(feature = "rust1", since = "1.0.0")]
     #[inline]
     pub fn borrow(&self) -> Ref<T> {
+        self.try_borrow().expect("already mutably borrowed")
+    }
+
+    /// Immutably borrows the wrapped value, returning an error if the value is currently mutably
+    /// borrowed.
+    ///
+    /// The borrow lasts until the returned `Ref` exits scope. Multiple immutable borrows can be
+    /// taken out at the same time.
+    ///
+    /// This is the non-panicking variant of [`borrow`](#method.borrow).
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(try_borrow)]
+    ///
+    /// use std::cell::RefCell;
+    ///
+    /// let c = RefCell::new(5);
+    ///
+    /// {
+    ///     let m = c.borrow_mut();
+    ///     assert!(c.try_borrow().is_err());
+    /// }
+    ///
+    /// {
+    ///     let m = c.borrow();
+    ///     assert!(c.try_borrow().is_ok());
+    /// }
+    /// ```
+    #[unstable(feature = "try_borrow", issue = "35070")]
+    #[inline]
+    pub fn try_borrow(&self) -> Result<Ref<T>, BorrowError<T>> {
         match BorrowRef::new(&self.borrow) {
-            Some(b) => Ref {
+            Some(b) => Ok(Ref {
                 value: unsafe { &*self.value.get() },
                 borrow: b,
-            },
-            None => panic!("RefCell<T> already mutably borrowed"),
+            }),
+            None => Err(BorrowError { marker: PhantomData }),
         }
     }
 
@@ -479,7 +556,8 @@ pub fn borrow(&self) -> Ref<T> {
     ///
     /// # Panics
     ///
-    /// Panics if the value is currently borrowed.
+    /// Panics if the value is currently borrowed. For a non-panicking variant, use
+    /// [`try_borrow_mut`](#method.try_borrow_mut).
     ///
     /// # Examples
     ///
@@ -511,12 +589,41 @@ pub fn borrow(&self) -> Ref<T> {
     #[stable(feature = "rust1", since = "1.0.0")]
     #[inline]
     pub fn borrow_mut(&self) -> RefMut<T> {
+        self.try_borrow_mut().expect("already borrowed")
+    }
+
+    /// Mutably borrows the wrapped value, returning an error if the value is currently borrowed.
+    ///
+    /// The borrow lasts until the returned `RefMut` exits scope. The value cannot be borrowed
+    /// while this borrow is active.
+    ///
+    /// This is the non-panicking variant of [`borrow_mut`](#method.borrow_mut).
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(try_borrow)]
+    ///
+    /// use std::cell::RefCell;
+    ///
+    /// let c = RefCell::new(5);
+    ///
+    /// {
+    ///     let m = c.borrow();
+    ///     assert!(c.try_borrow_mut().is_err());
+    /// }
+    ///
+    /// assert!(c.try_borrow_mut().is_ok());
+    /// ```
+    #[unstable(feature = "try_borrow", issue = "35070")]
+    #[inline]
+    pub fn try_borrow_mut(&self) -> Result<RefMut<T>, BorrowMutError<T>> {
         match BorrowRefMut::new(&self.borrow) {
-            Some(b) => RefMut {
+            Some(b) => Ok(RefMut {
                 value: unsafe { &mut *self.value.get() },
                 borrow: b,
-            },
-            None => panic!("RefCell<T> already borrowed"),
+            }),
+            None => Err(BorrowMutError { marker: PhantomData }),
         }
     }
 
index b0c79a3a88547b0551436df2d562bc4ea6d0c6d9..c916ad930ff105988b7be772c07b45a595f3aa25 100644 (file)
@@ -229,14 +229,28 @@ macro_rules! try {
     })
 }
 
-/// Use the `format!` syntax to write data into a buffer.
+/// Write formatted data into a buffer
 ///
-/// This macro is typically used with a buffer of `&mut `[`Write`][write].
+/// This macro accepts any value with `write_fmt` method as a writer, a format string, and a list
+/// of arguments to format.
+///
+/// `write_fmt` method usually comes from an implementation of [`std::fmt::Write`][fmt_write] or
+/// [`std::io::Write`][io_write] traits. These are sometimes called 'writers'.
+///
+/// Passed arguments will be formatted according to the specified format string and the resulting
+/// string will be passed to the writer.
 ///
 /// See [`std::fmt`][fmt] for more information on format syntax.
 ///
+/// Return value is completely dependent on the 'write_fmt' method.
+///
+/// Common return values are: [`Result`][enum_result], [`io::Result`][type_result]
+///
 /// [fmt]: ../std/fmt/index.html
-/// [write]: ../std/io/trait.Write.html
+/// [fmt_write]: ../std/fmt/trait.Write.html
+/// [io_write]: ../std/io/trait.Write.html
+/// [enum_result]: ../std/result/enum.Result.html
+/// [type_result]: ../std/io/type.Result.html
 ///
 /// # Examples
 ///
@@ -255,16 +269,31 @@ macro_rules! write {
     ($dst:expr, $($arg:tt)*) => ($dst.write_fmt(format_args!($($arg)*)))
 }
 
-/// Use the `format!` syntax to write data into a buffer, appending a newline.
-/// On all platforms, the newline is the LINE FEED character (`\n`/`U+000A`)
-/// alone (no additional CARRIAGE RETURN (`\r`/`U+000D`).
+/// Write formatted data into a buffer, with appending a newline.
+///
+/// On all platforms, the newline is the LINE FEED character (`\n`/`U+000A`) alone
+/// (no additional CARRIAGE RETURN (`\r`/`U+000D`).
 ///
-/// This macro is typically used with a buffer of `&mut `[`Write`][write].
+/// This macro accepts any value with `write_fmt` method as a writer, a format string, and a list
+/// of arguments to format.
+///
+/// `write_fmt` method usually comes from an implementation of [`std::fmt::Write`][fmt_write] or
+/// [`std::io::Write`][io_write] traits. These are sometimes called 'writers'.
+///
+/// Passed arguments will be formatted according to the specified format string and the resulting
+/// string will be passed to the writer.
 ///
 /// See [`std::fmt`][fmt] for more information on format syntax.
 ///
+/// Return value is completely dependent on the 'write_fmt' method.
+///
+/// Common return values are: [`Result`][enum_result], [`io::Result`][type_result]
+///
 /// [fmt]: ../std/fmt/index.html
-/// [write]: ../std/io/trait.Write.html
+/// [fmt_write]: ../std/fmt/trait.Write.html
+/// [io_write]: ../std/io/trait.Write.html
+/// [enum_result]: ../std/result/enum.Result.html
+/// [type_result]: ../std/io/type.Result.html
 ///
 /// # Examples
 ///
index d8a11581c3b69be0de75868e938add9a8d135a20..3141c289e931ce0b5e001213edec511ca0dbe996 100644 (file)
@@ -105,11 +105,11 @@ fn rsplitn<P>(&self,  n: usize, pred: P) -> RSplitN<Self::Item, P>
     fn binary_search(&self, x: &Self::Item) -> Result<usize, usize>
         where Self::Item: Ord;
     #[stable(feature = "core", since = "1.6.0")]
-    fn binary_search_by<F>(&self, f: F) -> Result<usize, usize>
-        where F: FnMut(&Self::Item) -> Ordering;
+    fn binary_search_by<'a, F>(&'a self, f: F) -> Result<usize, usize>
+        where F: FnMut(&'a Self::Item) -> Ordering;
     #[stable(feature = "slice_binary_search_by_key", since = "1.10.0")]
-    fn binary_search_by_key<B, F>(&self, b: &B, f: F) -> Result<usize, usize>
-        where F: FnMut(&Self::Item) -> B,
+    fn binary_search_by_key<'a, B, F>(&'a self, b: &B, f: F) -> Result<usize, usize>
+        where F: FnMut(&'a Self::Item) -> B,
               B: Ord;
     #[stable(feature = "core", since = "1.6.0")]
     fn len(&self) -> usize;
@@ -301,8 +301,8 @@ fn as_ptr(&self) -> *const T {
         self as *const [T] as *const T
     }
 
-    fn binary_search_by<F>(&self, mut f: F) -> Result<usize, usize> where
-        F: FnMut(&T) -> Ordering
+    fn binary_search_by<'a, F>(&'a self, mut f: F) -> Result<usize, usize>
+        where F: FnMut(&'a T) -> Ordering
     {
         let mut base = 0usize;
         let mut s = self;
@@ -514,8 +514,8 @@ fn copy_from_slice(&mut self, src: &[T]) where T: Copy {
     }
 
     #[inline]
-    fn binary_search_by_key<B, F>(&self, b: &B, mut f: F) -> Result<usize, usize>
-        where F: FnMut(&Self::Item) -> B,
+    fn binary_search_by_key<'a, B, F>(&'a self, b: &B, mut f: F) -> Result<usize, usize>
+        where F: FnMut(&'a Self::Item) -> B,
               B: Ord
     {
         self.binary_search_by(|k| f(k).cmp(b))
index ece5819829baa67b05adbca4f30a3375dfeda7a7..f16a9b386bb8aeb3c985b6d227cc92a86fc45198 100644 (file)
@@ -134,6 +134,10 @@ to read from it. Similarly, reading from the `tcache` map for item `X`
 (which is a `DepTrackingMap`, described below) automatically invokes
 `dep_graph.read(ItemSignature(X))`.
 
+**Note:** adding `Hir` nodes requires a bit of caution due to the
+"inlining" that old trans and constant evaluation still use. See the
+section on inlining below.
+
 To make this strategy work, a certain amount of indirection is
 required. For example, modules in the HIR do not have direct pointers
 to the items that they contain. Rather, they contain node-ids -- one
@@ -387,3 +391,24 @@ RUST_DEP_GRAPH_FILTER='Hir&foo -> TypeckItemBody & bar'
 This will dump out all the nodes that lead from `Hir(foo)` to
 `TypeckItemBody(bar)`, from which you can (hopefully) see the source
 of the erroneous edge.
+
+### Inlining of HIR nodes
+
+For the time being, at least, we still sometimes "inline" HIR nodes
+from other crates into the current HIR map. This creates a weird
+scenario where the same logical item (let's call it `X`) has two
+def-ids: the original def-id `X` and a new, inlined one `X'`. `X'` is
+in the current crate, but it's not like other HIR nodes: in
+particular, when we restart compilation, it will not be available to
+hash. Therefore, we do not want `Hir(X')` nodes appearing in our
+graph.  Instead, we want a "read" of `Hir(X')` to be represented as a
+read of `MetaData(X)`, since the metadata for `X` is where the inlined
+representation originated in the first place.
+
+To achieve this, the HIR map will detect if the def-id originates in
+an inlined node and add a dependency to a suitable `MetaData` node
+instead. If you are reading a HIR node and are not sure if it may be
+inlined or not, you can use `tcx.map.read(node_id)` and it will detect
+whether the node is inlined or not and do the right thing.  You can
+also use `tcx.map.is_inlined_def_id()` and
+`tcx.map.is_inlined_node_id()` to test.
index c9247539990a9a8273508a2ea673a6ce6d65a0aa..40fd3dede3d08b5399312d8fc2fa82719c188d42 100644 (file)
@@ -20,7 +20,7 @@ macro_rules! try_opt {
     )
 }
 
-#[derive(Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
+#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable)]
 pub enum DepNode<D: Clone + Debug> {
     // The `D` type is "how definitions are identified".
     // During compilation, it is always `DefId`, but when serializing
@@ -82,9 +82,11 @@ pub enum DepNode<D: Clone + Debug> {
     Privacy,
     IntrinsicCheck(D),
     MatchCheck(D),
-    MirMapConstruction(D),
-    MirPass(D),
-    MirTypeck(D),
+
+    // Represents the MIR for a fn; also used as the task node for
+    // things read/modify that MIR.
+    Mir(D),
+
     BorrowCheck(D),
     RvalueCheck(D),
     Reachability,
@@ -148,6 +150,7 @@ macro_rules! check {
         check! {
             CollectItem,
             BorrowCheck,
+            Hir,
             TransCrateItem,
             TypeckItemType,
             TypeckItemBody,
@@ -214,9 +217,7 @@ pub fn map_def<E, OP>(&self, mut op: OP) -> Option<DepNode<E>>
             CheckConst(ref d) => op(d).map(CheckConst),
             IntrinsicCheck(ref d) => op(d).map(IntrinsicCheck),
             MatchCheck(ref d) => op(d).map(MatchCheck),
-            MirMapConstruction(ref d) => op(d).map(MirMapConstruction),
-            MirPass(ref d) => op(d).map(MirPass),
-            MirTypeck(ref d) => op(d).map(MirTypeck),
+            Mir(ref d) => op(d).map(Mir),
             BorrowCheck(ref d) => op(d).map(BorrowCheck),
             RvalueCheck(ref d) => op(d).map(RvalueCheck),
             TransCrateItem(ref d) => op(d).map(TransCrateItem),
@@ -245,6 +246,6 @@ pub fn map_def<E, OP>(&self, mut op: OP) -> Option<DepNode<E>>
 /// some independent path or string that persists between runs without
 /// the need to be mapped or unmapped. (This ensures we can serialize
 /// them even in the absence of a tcx.)
-#[derive(Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
+#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable)]
 pub struct WorkProductId(pub String);
 
index 922d32a3067969f626a299f4a19d01ddbc83f69b..88cd1efd3459ac6f703ea35c57f45ac3a5e4d66d 100644 (file)
@@ -61,6 +61,12 @@ pub fn get(&self, k: &M::Key) -> Option<&M::Value> {
         self.map.get(k)
     }
 
+    pub fn get_mut(&mut self, k: &M::Key) -> Option<&mut M::Value> {
+        self.read(k);
+        self.write(k);
+        self.map.get_mut(k)
+    }
+
     pub fn insert(&mut self, k: M::Key, v: M::Value) -> Option<M::Value> {
         self.write(&k);
         self.map.insert(k, v)
@@ -70,6 +76,10 @@ pub fn contains_key(&self, k: &M::Key) -> bool {
         self.read(k);
         self.map.contains_key(k)
     }
+
+    pub fn keys(&self) -> Vec<M::Key> {
+        self.map.keys().cloned().collect()
+    }
 }
 
 impl<M: DepTrackingMapConfig> MemoizationMap for RefCell<DepTrackingMap<M>> {
index 5dd71db2f1832870b8e87171499b772c32acf02e..d085c24036cef88ec83b52e4772ff4a0fdebaecd 100644 (file)
@@ -41,6 +41,7 @@ fn visit_item(&mut self, i: &'tcx hir::Item) {
             let task_id = (self.dep_node_fn)(item_def_id);
             let _task = self.tcx.dep_graph.in_task(task_id.clone());
             debug!("Started task {:?}", task_id);
+            assert!(!self.tcx.map.is_inlined_def_id(item_def_id));
             self.tcx.dep_graph.read(DepNode::Hir(item_def_id));
             self.visitor.visit_item(i);
             debug!("Ended task {:?}", task_id);
index 9d124dadb766adb911324f8a5cdc78f1eed57e38..789b70ccfa412998157b36c220c0715baafc33e3 100644 (file)
@@ -1854,7 +1854,7 @@ fn pat_ident_binding_mode(&mut self, span: Span, name: Name, bm: hir::BindingMod
 
         let parent_def = self.parent_def;
         let def = self.resolver.definitions().map(|defs| {
-            let def_path_data = DefPathData::Binding(name);
+            let def_path_data = DefPathData::Binding(name.as_str());
             let def_index = defs.create_def_with_parent(parent_def, pat.id, def_path_data);
             Def::Local(DefId::local(def_index), pat.id)
         }).unwrap_or(Def::Err);
index 2b89695ab41cad2fdbb39f846547a314f1ece101..58bbd8add26d96e660b1164e9e02675185c210b0 100644 (file)
@@ -135,11 +135,11 @@ fn visit_item(&mut self, i: &Item) {
                 DefPathData::Impl,
             ItemKind::Enum(..) | ItemKind::Struct(..) | ItemKind::Trait(..) |
             ItemKind::ExternCrate(..) | ItemKind::ForeignMod(..) | ItemKind::Ty(..) =>
-                DefPathData::TypeNs(i.ident.name),
-            ItemKind::Mod(..) => DefPathData::Module(i.ident.name),
+                DefPathData::TypeNs(i.ident.name.as_str()),
+            ItemKind::Mod(..) => DefPathData::Module(i.ident.name.as_str()),
             ItemKind::Static(..) | ItemKind::Const(..) | ItemKind::Fn(..) =>
-                DefPathData::ValueNs(i.ident.name),
-            ItemKind::Mac(..) => DefPathData::MacroDef(i.ident.name),
+                DefPathData::ValueNs(i.ident.name.as_str()),
+            ItemKind::Mac(..) => DefPathData::MacroDef(i.ident.name.as_str()),
             ItemKind::Use(..) => DefPathData::Misc,
         };
         let def = self.create_def(i.id, def_data);
@@ -150,12 +150,12 @@ fn visit_item(&mut self, i: &Item) {
                     for v in &enum_definition.variants {
                         let variant_def_index =
                             this.create_def(v.node.data.id(),
-                                            DefPathData::EnumVariant(v.node.name.name));
+                                            DefPathData::EnumVariant(v.node.name.name.as_str()));
                         this.with_parent(variant_def_index, |this| {
                             for (index, field) in v.node.data.fields().iter().enumerate() {
                                 let name = field.ident.map(|ident| ident.name)
                                     .unwrap_or_else(|| token::intern(&index.to_string()));
-                                this.create_def(field.id, DefPathData::Field(name));
+                                this.create_def(field.id, DefPathData::Field(name.as_str()));
                             }
 
                             if let Some(ref expr) = v.node.disr_expr {
@@ -172,8 +172,8 @@ fn visit_item(&mut self, i: &Item) {
                     }
 
                     for (index, field) in struct_def.fields().iter().enumerate() {
-                        let name = field.ident.map(|ident| ident.name)
-                            .unwrap_or(token::intern(&index.to_string()));
+                        let name = field.ident.map(|ident| ident.name.as_str())
+                            .unwrap_or(token::intern(&index.to_string()).as_str());
                         this.create_def(field.id, DefPathData::Field(name));
                     }
                 }
@@ -184,7 +184,8 @@ fn visit_item(&mut self, i: &Item) {
     }
 
     fn visit_foreign_item(&mut self, foreign_item: &ForeignItem) {
-        let def = self.create_def(foreign_item.id, DefPathData::ValueNs(foreign_item.ident.name));
+        let def = self.create_def(foreign_item.id,
+                                  DefPathData::ValueNs(foreign_item.ident.name.as_str()));
 
         self.with_parent(def, |this| {
             visit::walk_foreign_item(this, foreign_item);
@@ -193,7 +194,7 @@ fn visit_foreign_item(&mut self, foreign_item: &ForeignItem) {
 
     fn visit_generics(&mut self, generics: &Generics) {
         for ty_param in generics.ty_params.iter() {
-            self.create_def(ty_param.id, DefPathData::TypeParam(ty_param.ident.name));
+            self.create_def(ty_param.id, DefPathData::TypeParam(ty_param.ident.name.as_str()));
         }
 
         visit::walk_generics(self, generics);
@@ -202,9 +203,9 @@ fn visit_generics(&mut self, generics: &Generics) {
     fn visit_trait_item(&mut self, ti: &TraitItem) {
         let def_data = match ti.node {
             TraitItemKind::Method(..) | TraitItemKind::Const(..) =>
-                DefPathData::ValueNs(ti.ident.name),
-            TraitItemKind::Type(..) => DefPathData::TypeNs(ti.ident.name),
-            TraitItemKind::Macro(..) => DefPathData::MacroDef(ti.ident.name),
+                DefPathData::ValueNs(ti.ident.name.as_str()),
+            TraitItemKind::Type(..) => DefPathData::TypeNs(ti.ident.name.as_str()),
+            TraitItemKind::Macro(..) => DefPathData::MacroDef(ti.ident.name.as_str()),
         };
 
         let def = self.create_def(ti.id, def_data);
@@ -220,9 +221,9 @@ fn visit_trait_item(&mut self, ti: &TraitItem) {
     fn visit_impl_item(&mut self, ii: &ImplItem) {
         let def_data = match ii.node {
             ImplItemKind::Method(..) | ImplItemKind::Const(..) =>
-                DefPathData::ValueNs(ii.ident.name),
-            ImplItemKind::Type(..) => DefPathData::TypeNs(ii.ident.name),
-            ImplItemKind::Macro(..) => DefPathData::MacroDef(ii.ident.name),
+                DefPathData::ValueNs(ii.ident.name.as_str()),
+            ImplItemKind::Type(..) => DefPathData::TypeNs(ii.ident.name.as_str()),
+            ImplItemKind::Macro(..) => DefPathData::MacroDef(ii.ident.name.as_str()),
         };
 
         let def = self.create_def(ii.id, def_data);
@@ -239,7 +240,7 @@ fn visit_pat(&mut self, pat: &Pat) {
         let parent_def = self.parent_def;
 
         if let PatKind::Ident(_, id, _) = pat.node {
-            let def = self.create_def(pat.id, DefPathData::Binding(id.node.name));
+            let def = self.create_def(pat.id, DefPathData::Binding(id.node.name.as_str()));
             self.parent_def = Some(def);
         }
 
@@ -271,11 +272,11 @@ fn visit_ty(&mut self, ty: &Ty) {
     }
 
     fn visit_lifetime_def(&mut self, def: &LifetimeDef) {
-        self.create_def(def.lifetime.id, DefPathData::LifetimeDef(def.lifetime.name));
+        self.create_def(def.lifetime.id, DefPathData::LifetimeDef(def.lifetime.name.as_str()));
     }
 
     fn visit_macro_def(&mut self, macro_def: &MacroDef) {
-        self.create_def(macro_def.id, DefPathData::MacroDef(macro_def.ident.name));
+        self.create_def(macro_def.id, DefPathData::MacroDef(macro_def.ident.name.as_str()));
     }
 }
 
@@ -301,9 +302,9 @@ fn visit_item(&mut self, i: &'ast hir::Item) {
             hir::ItemEnum(..) | hir::ItemStruct(..) | hir::ItemTrait(..) |
             hir::ItemExternCrate(..) | hir::ItemMod(..) | hir::ItemForeignMod(..) |
             hir::ItemTy(..) =>
-                DefPathData::TypeNs(i.name),
+                DefPathData::TypeNs(i.name.as_str()),
             hir::ItemStatic(..) | hir::ItemConst(..) | hir::ItemFn(..) =>
-                DefPathData::ValueNs(i.name),
+                DefPathData::ValueNs(i.name.as_str()),
             hir::ItemUse(..) => DefPathData::Misc,
         };
         let def = self.create_def(i.id, def_data);
@@ -314,12 +315,12 @@ fn visit_item(&mut self, i: &'ast hir::Item) {
                     for v in &enum_definition.variants {
                         let variant_def_index =
                             this.create_def(v.node.data.id(),
-                                            DefPathData::EnumVariant(v.node.name));
+                                            DefPathData::EnumVariant(v.node.name.as_str()));
 
                         this.with_parent(variant_def_index, |this| {
                             for field in v.node.data.fields() {
                                 this.create_def(field.id,
-                                                DefPathData::Field(field.name));
+                                                DefPathData::Field(field.name.as_str()));
                             }
                             if let Some(ref expr) = v.node.disr_expr {
                                 this.visit_hir_const_integer(expr);
@@ -335,7 +336,7 @@ fn visit_item(&mut self, i: &'ast hir::Item) {
                     }
 
                     for field in struct_def.fields() {
-                        this.create_def(field.id, DefPathData::Field(field.name));
+                        this.create_def(field.id, DefPathData::Field(field.name.as_str()));
                     }
                 }
                 _ => {}
@@ -345,7 +346,8 @@ fn visit_item(&mut self, i: &'ast hir::Item) {
     }
 
     fn visit_foreign_item(&mut self, foreign_item: &'ast hir::ForeignItem) {
-        let def = self.create_def(foreign_item.id, DefPathData::ValueNs(foreign_item.name));
+        let def = self.create_def(foreign_item.id,
+                                  DefPathData::ValueNs(foreign_item.name.as_str()));
 
         self.with_parent(def, |this| {
             intravisit::walk_foreign_item(this, foreign_item);
@@ -354,7 +356,7 @@ fn visit_foreign_item(&mut self, foreign_item: &'ast hir::ForeignItem) {
 
     fn visit_generics(&mut self, generics: &'ast hir::Generics) {
         for ty_param in generics.ty_params.iter() {
-            self.create_def(ty_param.id, DefPathData::TypeParam(ty_param.name));
+            self.create_def(ty_param.id, DefPathData::TypeParam(ty_param.name.as_str()));
         }
 
         intravisit::walk_generics(self, generics);
@@ -363,8 +365,8 @@ fn visit_generics(&mut self, generics: &'ast hir::Generics) {
     fn visit_trait_item(&mut self, ti: &'ast hir::TraitItem) {
         let def_data = match ti.node {
             hir::MethodTraitItem(..) | hir::ConstTraitItem(..) =>
-                DefPathData::ValueNs(ti.name),
-            hir::TypeTraitItem(..) => DefPathData::TypeNs(ti.name),
+                DefPathData::ValueNs(ti.name.as_str()),
+            hir::TypeTraitItem(..) => DefPathData::TypeNs(ti.name.as_str()),
         };
 
         let def = self.create_def(ti.id, def_data);
@@ -380,8 +382,8 @@ fn visit_trait_item(&mut self, ti: &'ast hir::TraitItem) {
     fn visit_impl_item(&mut self, ii: &'ast hir::ImplItem) {
         let def_data = match ii.node {
             hir::ImplItemKind::Method(..) | hir::ImplItemKind::Const(..) =>
-                DefPathData::ValueNs(ii.name),
-            hir::ImplItemKind::Type(..) => DefPathData::TypeNs(ii.name),
+                DefPathData::ValueNs(ii.name.as_str()),
+            hir::ImplItemKind::Type(..) => DefPathData::TypeNs(ii.name.as_str()),
         };
 
         let def = self.create_def(ii.id, def_data);
@@ -398,7 +400,7 @@ fn visit_pat(&mut self, pat: &'ast hir::Pat) {
         let parent_def = self.parent_def;
 
         if let hir::PatKind::Binding(_, name, _) = pat.node {
-            let def = self.create_def(pat.id, DefPathData::Binding(name.node));
+            let def = self.create_def(pat.id, DefPathData::Binding(name.node.as_str()));
             self.parent_def = Some(def);
         }
 
@@ -430,10 +432,10 @@ fn visit_ty(&mut self, ty: &'ast hir::Ty) {
     }
 
     fn visit_lifetime_def(&mut self, def: &'ast hir::LifetimeDef) {
-        self.create_def(def.lifetime.id, DefPathData::LifetimeDef(def.lifetime.name));
+        self.create_def(def.lifetime.id, DefPathData::LifetimeDef(def.lifetime.name.as_str()));
     }
 
     fn visit_macro_def(&mut self, macro_def: &'ast hir::MacroDef) {
-        self.create_def(macro_def.id, DefPathData::MacroDef(macro_def.name));
+        self.create_def(macro_def.id, DefPathData::MacroDef(macro_def.name.as_str()));
     }
 }
index 3317585f820aa60b30647c97d7b33c1561a7b784..e3425d7fa61f5f73d9270f4ed5f4163e49efc59c 100644 (file)
 use hir::def_id::{DefId, DefIndex};
 use hir::map::def_collector::DefCollector;
 use rustc_data_structures::fnv::FnvHashMap;
+use std::fmt::Write;
+use std::hash::{Hash, Hasher, SipHasher};
 use syntax::{ast, visit};
 use syntax::parse::token::InternedString;
+use ty::TyCtxt;
 use util::nodemap::NodeMap;
 
 /// The definition table containing node definitions
@@ -109,6 +112,40 @@ pub fn make<FN>(start_krate: ast::CrateNum,
         data.reverse();
         DefPath { data: data, krate: krate }
     }
+
+    pub fn to_string(&self, tcx: TyCtxt) -> String {
+        let mut s = String::with_capacity(self.data.len() * 16);
+
+        if self.krate == LOCAL_CRATE {
+            s.push_str(&tcx.crate_name(self.krate));
+        } else {
+            s.push_str(&tcx.sess.cstore.original_crate_name(self.krate));
+        }
+        s.push_str("/");
+        s.push_str(&tcx.crate_disambiguator(self.krate));
+
+        for component in &self.data {
+            write!(s,
+                   "::{}[{}]",
+                   component.data.as_interned_str(),
+                   component.disambiguator)
+                .unwrap();
+        }
+
+        s
+    }
+
+    pub fn deterministic_hash(&self, tcx: TyCtxt) -> u64 {
+        let mut state = SipHasher::new();
+        self.deterministic_hash_to(tcx, &mut state);
+        state.finish()
+    }
+
+    pub fn deterministic_hash_to<H: Hasher>(&self, tcx: TyCtxt, state: &mut H) {
+        tcx.crate_name(self.krate).hash(state);
+        tcx.crate_disambiguator(self.krate).hash(state);
+        self.data.hash(state);
+    }
 }
 
 /// Root of an inlined item. We track the `DefPath` of the item within
@@ -153,31 +190,31 @@ pub enum DefPathData {
     /// An impl
     Impl,
     /// Something in the type NS
-    TypeNs(ast::Name),
+    TypeNs(InternedString),
     /// Something in the value NS
-    ValueNs(ast::Name),
+    ValueNs(InternedString),
     /// A module declaration
-    Module(ast::Name),
+    Module(InternedString),
     /// A macro rule
-    MacroDef(ast::Name),
+    MacroDef(InternedString),
     /// A closure expression
     ClosureExpr,
 
     // Subportions of items
     /// A type parameter (generic parameter)
-    TypeParam(ast::Name),
+    TypeParam(InternedString),
     /// A lifetime definition
-    LifetimeDef(ast::Name),
+    LifetimeDef(InternedString),
     /// A variant of a enum
-    EnumVariant(ast::Name),
+    EnumVariant(InternedString),
     /// A struct field
-    Field(ast::Name),
+    Field(InternedString),
     /// Implicit ctor for a tuple-like struct
     StructCtor,
     /// Initializer for a const
     Initializer,
     /// Pattern binding
-    Binding(ast::Name),
+    Binding(InternedString),
 }
 
 impl Definitions {
@@ -291,16 +328,16 @@ impl DefPathData {
     pub fn as_interned_str(&self) -> InternedString {
         use self::DefPathData::*;
         match *self {
-            TypeNs(name) |
-            ValueNs(name) |
-            Module(name) |
-            MacroDef(name) |
-            TypeParam(name) |
-            LifetimeDef(name) |
-            EnumVariant(name) |
-            Binding(name) |
-            Field(name) => {
-                name.as_str()
+            TypeNs(ref name) |
+            ValueNs(ref name) |
+            Module(ref name) |
+            MacroDef(ref name) |
+            TypeParam(ref name) |
+            LifetimeDef(ref name) |
+            EnumVariant(ref name) |
+            Binding(ref name) |
+            Field(ref name) => {
+                name.clone()
             }
 
             Impl => {
index aed3613f44ed489328e3df2f1a6ae1013fafd684..86d29a6fc717fea971bcbd5d5b27c653cd188116 100644 (file)
@@ -204,9 +204,21 @@ pub struct Map<'ast> {
     /// All NodeIds that are numerically greater or equal to this value come
     /// from inlined items.
     local_node_id_watermark: NodeId,
+
+    /// All def-indices that are numerically greater or equal to this value come
+    /// from inlined items.
+    local_def_id_watermark: usize,
 }
 
 impl<'ast> Map<'ast> {
+    pub fn is_inlined_def_id(&self, id: DefId) -> bool {
+        id.is_local() && id.index.as_usize() >= self.local_def_id_watermark
+    }
+
+    pub fn is_inlined_node_id(&self, id: NodeId) -> bool {
+        id >= self.local_node_id_watermark
+    }
+
     /// Registers a read in the dependency graph of the AST node with
     /// the given `id`. This needs to be called each time a public
     /// function returns the HIR for a node -- in other words, when it
@@ -214,60 +226,99 @@ impl<'ast> Map<'ast> {
     /// otherwise have had access to those contents, and hence needs a
     /// read recorded). If the function just returns a DefId or
     /// NodeId, no actual content was returned, so no read is needed.
-    fn read(&self, id: NodeId) {
+    pub fn read(&self, id: NodeId) {
         self.dep_graph.read(self.dep_node(id));
     }
 
     fn dep_node(&self, id0: NodeId) -> DepNode<DefId> {
         let map = self.map.borrow();
         let mut id = id0;
-        loop {
-            match map[id as usize] {
-                EntryItem(_, item) => {
-                    let def_id = self.local_def_id(item.id);
-                    // NB                          ^~~~~~~
-                    //
-                    // You would expect that `item.id == id`, but this
-                    // is not always the case. In particular, for a
-                    // ViewPath item like `use self::{mem, foo}`, we
-                    // map the ids for `mem` and `foo` to the
-                    // enclosing view path item. This seems mega super
-                    // ultra wrong, but then who am I to judge?
-                    // -nmatsakis
-                    return DepNode::Hir(def_id);
-                }
+        if !self.is_inlined_node_id(id) {
+            loop {
+                match map[id as usize] {
+                    EntryItem(_, item) => {
+                        let def_id = self.local_def_id(item.id);
+                        // NB                          ^~~~~~~
+                        //
+                        // You would expect that `item.id == id`, but this
+                        // is not always the case. In particular, for a
+                        // ViewPath item like `use self::{mem, foo}`, we
+                        // map the ids for `mem` and `foo` to the
+                        // enclosing view path item. This seems mega super
+                        // ultra wrong, but then who am I to judge?
+                        // -nmatsakis
+                        assert!(!self.is_inlined_def_id(def_id));
+                        return DepNode::Hir(def_id);
+                    }
 
-                EntryForeignItem(p, _) |
-                EntryTraitItem(p, _) |
-                EntryImplItem(p, _) |
-                EntryVariant(p, _) |
-                EntryExpr(p, _) |
-                EntryStmt(p, _) |
-                EntryLocal(p, _) |
-                EntryPat(p, _) |
-                EntryBlock(p, _) |
-                EntryStructCtor(p, _) |
-                EntryLifetime(p, _) |
-                EntryTyParam(p, _) =>
-                    id = p,
-
-                RootCrate |
-                RootInlinedParent(_) =>
-                    // FIXME(#32015) clarify story about cross-crate dep tracking
-                    return DepNode::Krate,
-
-                NotPresent =>
-                    // Some nodes, notably struct fields, are not
-                    // present in the map for whatever reason, but
-                    // they *do* have def-ids. So if we encounter an
-                    // empty hole, check for that case.
-                    return self.opt_local_def_id(id)
-                               .map(|def_id| DepNode::Hir(def_id))
-                               .unwrap_or_else(|| {
-                                   bug!("Walking parents from `{}` \
-                                         led to `NotPresent` at `{}`",
-                                        id0, id)
-                               }),
+                    EntryForeignItem(p, _) |
+                    EntryTraitItem(p, _) |
+                    EntryImplItem(p, _) |
+                    EntryVariant(p, _) |
+                    EntryExpr(p, _) |
+                    EntryStmt(p, _) |
+                    EntryLocal(p, _) |
+                    EntryPat(p, _) |
+                    EntryBlock(p, _) |
+                    EntryStructCtor(p, _) |
+                    EntryLifetime(p, _) |
+                    EntryTyParam(p, _) =>
+                        id = p,
+
+                    RootCrate =>
+                        return DepNode::Krate,
+
+                    RootInlinedParent(_) =>
+                        bug!("node {} has inlined ancestor but is not inlined", id0),
+
+                    NotPresent =>
+                        // Some nodes, notably struct fields, are not
+                        // present in the map for whatever reason, but
+                        // they *do* have def-ids. So if we encounter an
+                        // empty hole, check for that case.
+                        return self.opt_local_def_id(id)
+                                   .map(|def_id| DepNode::Hir(def_id))
+                                   .unwrap_or_else(|| {
+                                       bug!("Walking parents from `{}` \
+                                             led to `NotPresent` at `{}`",
+                                            id0, id)
+                                   }),
+                }
+            }
+        } else {
+            // reading from an inlined def-id is really a read out of
+            // the metadata from which we loaded the item.
+            loop {
+                match map[id as usize] {
+                    EntryItem(p, _) |
+                    EntryForeignItem(p, _) |
+                    EntryTraitItem(p, _) |
+                    EntryImplItem(p, _) |
+                    EntryVariant(p, _) |
+                    EntryExpr(p, _) |
+                    EntryStmt(p, _) |
+                    EntryLocal(p, _) |
+                    EntryPat(p, _) |
+                    EntryBlock(p, _) |
+                    EntryStructCtor(p, _) |
+                    EntryLifetime(p, _) |
+                    EntryTyParam(p, _) =>
+                        id = p,
+
+                    RootInlinedParent(parent) => match *parent {
+                        InlinedItem::Item(def_id, _) |
+                        InlinedItem::TraitItem(def_id, _) |
+                        InlinedItem::ImplItem(def_id, _) |
+                        InlinedItem::Foreign(def_id, _) =>
+                            return DepNode::MetaData(def_id)
+                    },
+
+                    RootCrate =>
+                        bug!("node {} has crate ancestor but is inlined", id0),
+
+                    NotPresent =>
+                        bug!("node {} is inlined but not present in map", id0),
+                }
             }
         }
     }
@@ -664,10 +715,6 @@ pub fn node_to_string(&self, id: NodeId) -> String {
     pub fn node_to_user_string(&self, id: NodeId) -> String {
         node_id_to_string(self, id, false)
     }
-
-    pub fn is_inlined(&self, id: NodeId) -> bool {
-        id >= self.local_node_id_watermark
-    }
 }
 
 pub struct NodesMatchingSuffix<'a, 'ast:'a> {
@@ -846,13 +893,15 @@ pub fn map_crate<'ast>(forest: &'ast mut Forest,
     }
 
     let local_node_id_watermark = map.len() as NodeId;
+    let local_def_id_watermark = definitions.len();
 
     Map {
         forest: forest,
         dep_graph: forest.dep_graph.clone(),
         map: RefCell::new(map),
         definitions: RefCell::new(definitions),
-        local_node_id_watermark: local_node_id_watermark
+        local_node_id_watermark: local_node_id_watermark,
+        local_def_id_watermark: local_def_id_watermark,
     }
 }
 
@@ -866,7 +915,8 @@ pub fn map_decoded_item<'ast, F: FoldOps>(map: &Map<'ast>,
                                           -> &'ast InlinedItem {
     let mut fld = IdAndSpanUpdater::new(fold_ops);
     let ii = match ii {
-        II::Item(i) => II::Item(i.map(|i| fld.fold_item(i))),
+        II::Item(d, i) => II::Item(fld.fold_ops.new_def_id(d),
+                                   i.map(|i| fld.fold_item(i))),
         II::TraitItem(d, ti) => {
             II::TraitItem(fld.fold_ops.new_def_id(d),
                           ti.map(|ti| fld.fold_trait_item(ti)))
@@ -875,7 +925,8 @@ pub fn map_decoded_item<'ast, F: FoldOps>(map: &Map<'ast>,
             II::ImplItem(fld.fold_ops.new_def_id(d),
                          ii.map(|ii| fld.fold_impl_item(ii)))
         }
-        II::Foreign(i) => II::Foreign(i.map(|i| fld.fold_foreign_item(i)))
+        II::Foreign(d, i) => II::Foreign(fld.fold_ops.new_def_id(d),
+                                         i.map(|i| fld.fold_foreign_item(i)))
     };
 
     let ii = map.forest.inlined_items.alloc(ii);
index 511cc32d2e1e6cfeb47d00fa65076fba5445de64..3ad5ef1e0ff6d438720898027ddbed945692dcde 100644 (file)
@@ -94,7 +94,7 @@
 use syntax::parse::token;
 use syntax::ptr::P;
 use syntax_pos::{self, Pos, Span};
-use errors::{DiagnosticBuilder, check_old_school};
+use errors::DiagnosticBuilder;
 
 impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
     pub fn note_and_explain_region(self,
@@ -541,25 +541,19 @@ pub fn note_type_err(&self,
 
         let span = origin.span();
 
-        let mut is_simple_error = false;
-
         if let Some((expected, found)) = expected_found {
-            is_simple_error = if let &TypeError::Sorts(ref values) = terr {
+            let is_simple_error = if let &TypeError::Sorts(ref values) = terr {
                 values.expected.is_primitive() && values.found.is_primitive()
             } else {
                 false
             };
 
-            if !is_simple_error || check_old_school() {
+            if !is_simple_error {
                 diag.note_expected_found(&"type", &expected, &found);
             }
         }
 
-        if !is_simple_error && check_old_school() {
-            diag.span_note(span, &format!("{}", terr));
-        } else {
-            diag.span_label(span, &terr);
-        }
+        diag.span_label(span, &terr);
 
         self.note_error_origin(diag, &origin);
         self.check_and_note_conflicting_crates(diag, terr, span);
index 32344a7b9c8deb1485d2d5ac8e27c8102144c0c3..f1bb3a37e3c273c9ebbfbf39850cff2cd8a3b1f6 100644 (file)
@@ -94,19 +94,19 @@ pub enum DefLike {
 /// that we trans.
 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
 pub enum InlinedItem {
-    Item(P<hir::Item>),
+    Item(DefId /* def-id in source crate */, P<hir::Item>),
     TraitItem(DefId /* impl id */, P<hir::TraitItem>),
     ImplItem(DefId /* impl id */, P<hir::ImplItem>),
-    Foreign(P<hir::ForeignItem>),
+    Foreign(DefId /* extern item */, P<hir::ForeignItem>),
 }
 
 /// A borrowed version of `hir::InlinedItem`.
 #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
 pub enum InlinedItemRef<'a> {
-    Item(&'a hir::Item),
+    Item(DefId, &'a hir::Item),
     TraitItem(DefId, &'a hir::TraitItem),
     ImplItem(DefId, &'a hir::ImplItem),
-    Foreign(&'a hir::ForeignItem)
+    Foreign(DefId, &'a hir::ForeignItem)
 }
 
 /// Item definitions in the currently-compiled crate would have the CrateNum
@@ -283,8 +283,8 @@ pub fn visit<'ast,V>(&'ast self, visitor: &mut V)
         where V: Visitor<'ast>
     {
         match *self {
-            InlinedItem::Item(ref i) => visitor.visit_item(&i),
-            InlinedItem::Foreign(ref i) => visitor.visit_foreign_item(&i),
+            InlinedItem::Item(_, ref i) => visitor.visit_item(&i),
+            InlinedItem::Foreign(_, ref i) => visitor.visit_foreign_item(&i),
             InlinedItem::TraitItem(_, ref ti) => visitor.visit_trait_item(ti),
             InlinedItem::ImplItem(_, ref ii) => visitor.visit_impl_item(ii),
         }
index 1a34699aff491754aea088576fdefa5c47f00106..92de65798d3cbb18db8277631034ce72439f3f6d 100644 (file)
@@ -8,9 +8,31 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-use util::nodemap::NodeMap;
+use dep_graph::{DepGraph, DepNode, DepTrackingMap, DepTrackingMapConfig};
+use hir::def_id::DefId;
 use mir::repr::Mir;
+use std::marker::PhantomData;
 
 pub struct MirMap<'tcx> {
-    pub map: NodeMap<Mir<'tcx>>,
+    pub map: DepTrackingMap<MirMapConfig<'tcx>>,
+}
+
+impl<'tcx> MirMap<'tcx> {
+    pub fn new(graph: DepGraph) -> Self {
+        MirMap {
+            map: DepTrackingMap::new(graph)
+        }
+    }
+}
+
+pub struct MirMapConfig<'tcx> {
+    data: PhantomData<&'tcx ()>
+}
+
+impl<'tcx> DepTrackingMapConfig for MirMapConfig<'tcx> {
+    type Key = DefId;
+    type Value = Mir<'tcx>;
+    fn to_dep_node(key: &DefId) -> DepNode<DefId> {
+        DepNode::Mir(*key)
+    }
 }
index 4ca3907d4e602bcf94c9945818b91eed0b6eab13..57601e67504328f7f422a631be2c174c61852781 100644 (file)
@@ -11,7 +11,6 @@
 use dep_graph::DepNode;
 use hir;
 use hir::map::DefPathData;
-use hir::def_id::DefId;
 use mir::mir_map::MirMap;
 use mir::repr::{Mir, Promoted};
 use ty::TyCtxt;
@@ -73,9 +72,6 @@ pub fn item_id(&self) -> NodeId {
 /// Various information about pass.
 pub trait Pass {
     // fn should_run(Session) to check if pass should run?
-    fn dep_node(&self, def_id: DefId) -> DepNode<DefId> {
-        DepNode::MirPass(def_id)
-    }
     fn name(&self) -> &str {
         let name = unsafe { ::std::intrinsics::type_name::<Self>() };
         if let Some(tail) = name.rfind(":") {
@@ -119,10 +115,11 @@ fn run_pass<'a>(&mut self,
                     map: &mut MirMap<'tcx>,
                     hooks: &mut [Box<for<'s> MirPassHook<'s>>])
     {
-        for (&id, mir) in &mut map.map {
-            let def_id = tcx.map.local_def_id(id);
-            let _task = tcx.dep_graph.in_task(self.dep_node(def_id));
-
+        let def_ids = map.map.keys();
+        for def_id in def_ids {
+            let _task = tcx.dep_graph.in_task(DepNode::Mir(def_id));
+            let mir = map.map.get_mut(&def_id).unwrap();
+            let id = tcx.map.as_local_node_id(def_id).unwrap();
             let src = MirSource::from_node(tcx, id);
 
             for hook in &mut *hooks {
index 6ed91cdbe18bc3c2551c7dbee48803ba11eabb61..0fb4d0f8fea5e63f171c5ddbc642505775b894f8 100644 (file)
@@ -176,6 +176,10 @@ pub enum PrintRequest {
     CrateName,
     Cfg,
     TargetList,
+    TargetCPUs,
+    TargetFeatures,
+    RelocationModels,
+    CodeModels,
 }
 
 pub enum Input {
@@ -629,9 +633,9 @@ fn parse_panic_strategy(slot: &mut PanicStrategy, v: Option<&str>) -> bool {
     lto: bool = (false, parse_bool,
         "perform LLVM link-time optimizations"),
     target_cpu: Option<String> = (None, parse_opt_string,
-        "select target processor (llc -mcpu=help for details)"),
+        "select target processor (rustc --print target-cpus for details)"),
     target_feature: String = ("".to_string(), parse_string,
-        "target specific attributes (llc -mattr=help for details)"),
+        "target specific attributes (rustc --print target-features for details)"),
     passes: Vec<String> = (Vec::new(), parse_list,
         "a list of extra LLVM passes to run (space separated)"),
     llvm_args: Vec<String> = (Vec::new(), parse_list,
@@ -655,9 +659,9 @@ fn parse_panic_strategy(slot: &mut PanicStrategy, v: Option<&str>) -> bool {
     no_redzone: Option<bool> = (None, parse_opt_bool,
         "disable the use of the redzone"),
     relocation_model: Option<String> = (None, parse_opt_string,
-         "choose the relocation model to use (llc -relocation-model for details)"),
+         "choose the relocation model to use (rustc --print relocation-models for details)"),
     code_model: Option<String> = (None, parse_opt_string,
-         "choose the code model to use (llc -code-model for details)"),
+         "choose the code model to use (rustc --print code-models for details)"),
     metadata: Vec<String> = (Vec::new(), parse_list,
          "metadata to mangle symbol names with"),
     extra_filename: String = ("".to_string(), parse_string,
@@ -745,6 +749,8 @@ fn parse_panic_strategy(slot: &mut PanicStrategy, v: Option<&str>) -> bool {
           "attempt to recover from parse errors (experimental)"),
     incremental: Option<String> = (None, parse_opt_string,
           "enable incremental compilation (experimental)"),
+    incremental_info: bool = (false, parse_bool,
+        "print high-level information about incremental reuse (or the lack thereof)"),
     dump_dep_graph: bool = (false, parse_bool,
           "dump the dependency graph to $RUST_DEP_GRAPH (default: /tmp/dep_graph.gv)"),
     query_dep_graph: bool = (false, parse_bool,
@@ -1022,7 +1028,8 @@ pub fn rustc_short_optgroups() -> Vec<RustcOptGroup> {
                  "[asm|llvm-bc|llvm-ir|obj|link|dep-info]"),
         opt::multi_s("", "print", "Comma separated list of compiler information to \
                                print on stdout",
-                 "[crate-name|file-names|sysroot|cfg|target-list]"),
+                 "[crate-name|file-names|sysroot|cfg|target-list|target-cpus|\
+                   target-features|relocation-models|code-models]"),
         opt::flagmulti_s("g",  "",  "Equivalent to -C debuginfo=2"),
         opt::flagmulti_s("O", "", "Equivalent to -C opt-level=2"),
         opt::opt_s("o", "", "Write output to <filename>", "FILENAME"),
@@ -1055,7 +1062,7 @@ pub fn rustc_optgroups() -> Vec<RustcOptGroup> {
                      "NAME=PATH"),
         opt::opt_s("", "sysroot", "Override the system root", "PATH"),
         opt::multi_ubnr("Z", "", "Set internal debugging options", "FLAG"),
-        opt::opt_ubnr("", "error-format",
+        opt::opt_s("", "error-format",
                       "How errors and other messages are produced",
                       "human|json"),
         opt::opt_s("", "color", "Configure coloring of output:
@@ -1236,6 +1243,24 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
         early_error(error_format, "Value for codegen units must be a positive nonzero integer");
     }
 
+    let mut prints = Vec::<PrintRequest>::new();
+    if cg.target_cpu.as_ref().map_or(false, |s| s == "help") {
+        prints.push(PrintRequest::TargetCPUs);
+        cg.target_cpu = None;
+    };
+    if cg.target_feature == "help" {
+        prints.push(PrintRequest::TargetFeatures);
+        cg.target_feature = "".to_string();
+    }
+    if cg.relocation_model.as_ref().map_or(false, |s| s == "help") {
+        prints.push(PrintRequest::RelocationModels);
+        cg.relocation_model = None;
+    }
+    if cg.code_model.as_ref().map_or(false, |s| s == "help") {
+        prints.push(PrintRequest::CodeModels);
+        cg.code_model = None;
+    }
+
     let cg = cg;
 
     let sysroot_opt = matches.opt_str("sysroot").map(|m| PathBuf::from(&m));
@@ -1313,18 +1338,22 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
     let cfg = parse_cfgspecs(matches.opt_strs("cfg"));
     let test = matches.opt_present("test");
 
-    let prints = matches.opt_strs("print").into_iter().map(|s| {
+    prints.extend(matches.opt_strs("print").into_iter().map(|s| {
         match &*s {
             "crate-name" => PrintRequest::CrateName,
             "file-names" => PrintRequest::FileNames,
             "sysroot" => PrintRequest::Sysroot,
             "cfg" => PrintRequest::Cfg,
             "target-list" => PrintRequest::TargetList,
+            "target-cpus" => PrintRequest::TargetCPUs,
+            "target-features" => PrintRequest::TargetFeatures,
+            "relocation-models" => PrintRequest::RelocationModels,
+            "code-models" => PrintRequest::CodeModels,
             req => {
                 early_error(error_format, &format!("unknown print request `{}`", req))
             }
         }
-    }).collect::<Vec<_>>();
+    }));
 
     if !cg.remark.is_empty() && debuginfo == NoDebugInfo {
         early_warn(error_format, "-C remark will not show source locations without \
index cee18232ec98a05b71e2400ffc63b9a30bb833b3..9ab75c8a5a20c8e17a9c47c3d5c70c18d779ccc0 100644 (file)
@@ -23,7 +23,6 @@
 use syntax::ast::{NodeId, Name};
 use errors::{self, DiagnosticBuilder};
 use errors::emitter::{Emitter, EmitterWriter};
-use errors::snippet::FormatMode;
 use syntax::json::JsonEmitter;
 use syntax::feature_gate;
 use syntax::parse;
@@ -369,9 +368,7 @@ pub fn build_session_with_codemap(sopts: config::Options,
     let emitter: Box<Emitter> = match sopts.error_format {
         config::ErrorOutputType::HumanReadable(color_config) => {
             Box::new(EmitterWriter::stderr(color_config,
-                                           Some(registry),
-                                           Some(codemap.clone()),
-                                           errors::snippet::FormatMode::EnvironmentSelected))
+                                           Some(codemap.clone())))
         }
         config::ErrorOutputType::Json => {
             Box::new(JsonEmitter::stderr(Some(registry), codemap.clone()))
@@ -509,9 +506,7 @@ pub fn early_error(output: config::ErrorOutputType, msg: &str) -> ! {
     let emitter: Box<Emitter> = match output {
         config::ErrorOutputType::HumanReadable(color_config) => {
             Box::new(EmitterWriter::stderr(color_config,
-                                           None,
-                                           None,
-                                           FormatMode::EnvironmentSelected))
+                                           None))
         }
         config::ErrorOutputType::Json => Box::new(JsonEmitter::basic()),
     };
@@ -524,9 +519,7 @@ pub fn early_warn(output: config::ErrorOutputType, msg: &str) {
     let emitter: Box<Emitter> = match output {
         config::ErrorOutputType::HumanReadable(color_config) => {
             Box::new(EmitterWriter::stderr(color_config,
-                                           None,
-                                           None,
-                                           FormatMode::EnvironmentSelected))
+                                           None))
         }
         config::ErrorOutputType::Json => Box::new(JsonEmitter::basic()),
     };
index 5444dd9476120ed2f8cb01418c7bb14b81afb83c..a816d26edadf36678d254f83b15f8932e5b4bdc9 100644 (file)
@@ -14,6 +14,7 @@
 use session::Session;
 use middle;
 use middle::cstore::LOCAL_CRATE;
+use hir::TraitMap;
 use hir::def::DefMap;
 use hir::def_id::{DefId, DefIndex};
 use hir::map as ast_map;
@@ -299,8 +300,16 @@ pub struct GlobalCtxt<'tcx> {
     pub types: CommonTypes<'tcx>,
 
     pub sess: &'tcx Session,
+
+    /// Map from path id to the results from resolve; generated
+    /// initially by resolve and updated during typeck in some cases
+    /// (e.g., UFCS paths)
     pub def_map: RefCell<DefMap>,
 
+    /// Map indicating what traits are in scope for places where this
+    /// is relevant; generated by resolve.
+    pub trait_map: TraitMap,
+
     pub named_region_map: resolve_lifetime::NamedRegionMap,
 
     pub region_maps: RegionMaps,
@@ -525,7 +534,7 @@ pub fn def_index_for_def_key(self, krate: ast::CrateNum, key: DefKey)
     }
 
     pub fn retrace_path(self, path: &DefPath) -> Option<DefId> {
-        debug!("retrace_path(path={:?})", path);
+        debug!("retrace_path(path={:?}, krate={:?})", path, self.crate_name(path.krate));
 
         let root_key = DefKey {
             parent: None,
@@ -666,6 +675,7 @@ fn is_global(self) -> bool {
     pub fn create_and_enter<F, R>(s: &'tcx Session,
                                   arenas: &'tcx CtxtArenas<'tcx>,
                                   def_map: DefMap,
+                                  trait_map: TraitMap,
                                   named_region_map: resolve_lifetime::NamedRegionMap,
                                   map: ast_map::Map<'tcx>,
                                   freevars: FreevarMap,
@@ -694,6 +704,7 @@ pub fn create_and_enter<F, R>(s: &'tcx Session,
             variance_computed: Cell::new(false),
             sess: s,
             def_map: RefCell::new(def_map),
+            trait_map: trait_map,
             tables: RefCell::new(Tables::empty()),
             impl_trait_refs: RefCell::new(DepTrackingMap::new(dep_graph.clone())),
             trait_defs: RefCell::new(DepTrackingMap::new(dep_graph.clone())),
index 88327ab19a5cb66350a06e707fa9954766d62b55..634599406afb2fc49627a734559fccb5e0c4abc1 100644 (file)
@@ -52,8 +52,10 @@ pub fn get(&self, dep_node: DepNode<DefId>) -> Option<Ty<'tcx>> {
         self.untracked_get()
     }
 
+    /// Reads the ivar without registered a dep-graph read. Use with
+    /// caution.
     #[inline]
-    fn untracked_get(&self) -> Option<Ty<'tcx>> {
+    pub fn untracked_get(&self) -> Option<Ty<'tcx>> {
         match self.0.get() {
             None => None,
             // valid because of invariant (A)
index 03e893727d1b58c7d88142ee72bb336dfe509cfd..a7c534198923b7a1183bb7f9b7f0a0d65dcd24bf 100644 (file)
@@ -1757,8 +1757,7 @@ impl<'a, 'gcx, 'tcx, 'container> AdtDefData<'tcx, 'container> {
     /// Due to normalization being eager, this applies even if
     /// the associated type is behind a pointer, e.g. issue #31299.
     pub fn sized_constraint(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Ty<'tcx> {
-        let dep_node = DepNode::SizedConstraint(self.did);
-        match self.sized_constraint.get(dep_node) {
+        match self.sized_constraint.get(DepNode::SizedConstraint(self.did)) {
             None => {
                 let global_tcx = tcx.global_tcx();
                 let this = global_tcx.lookup_adt_def_master(self.did);
@@ -1786,12 +1785,18 @@ impl<'a, 'tcx> AdtDefData<'tcx, 'tcx> {
     ///       such.
     ///     - a TyError, if a type contained itself. The representability
     ///       check should catch this case.
-    fn calculate_sized_constraint_inner(&'tcx self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
+    fn calculate_sized_constraint_inner(&'tcx self,
+                                        tcx: TyCtxt<'a, 'tcx, 'tcx>,
                                         stack: &mut Vec<AdtDefMaster<'tcx>>)
     {
-
         let dep_node = || DepNode::SizedConstraint(self.did);
-        if self.sized_constraint.get(dep_node()).is_some() {
+
+        // Follow the memoization pattern: push the computation of
+        // DepNode::SizedConstraint as our current task.
+        let _task = tcx.dep_graph.in_task(dep_node());
+        if self.sized_constraint.untracked_get().is_some() {
+            //                   ---------------
+            // can skip the dep-graph read since we just pushed the task
             return;
         }
 
index 3d24fd8ab67e4a2d4b1bd1d1a25ff95f3336933b..18686e3f1d6489e642f0735755485cba953fbe62 100644 (file)
@@ -94,7 +94,7 @@ fn load_specific(target: &str) -> TargetResult {
         pub fn get_targets() -> Box<Iterator<Item=String>> {
             Box::new(TARGETS.iter().filter_map(|t| -> Option<String> {
                 load_specific(t)
-                    .map(|t| t.llvm_target)
+                    .and(Ok(t.to_string()))
                     .ok()
             }))
         }
index 9115fd42be870e02aec87792b163d02ca5657720..904cffac6b3cd88e06b21391382e9f198f3a22b1 100644 (file)
@@ -168,8 +168,10 @@ fn borrowck_fn(this: &mut BorrowckCtxt,
                attributes: &[ast::Attribute]) {
     debug!("borrowck_fn(id={})", id);
 
+    let def_id = this.tcx.map.local_def_id(id);
+
     if attributes.iter().any(|item| item.check_name("rustc_mir_borrowck")) {
-        let mir = this.mir_map.unwrap().map.get(&id).unwrap();
+        let mir = this.mir_map.unwrap().map.get(&def_id).unwrap();
         this.with_temp_region_map(id, |this| {
             mir::borrowck_mir(this, fk, decl, mir, body, sp, id, attributes)
         });
index 2fe4ae627c1dc7be3ea12168369e8df3cb17939f..366b58e06c4768050a0763d6f1d05b2b7e919b3e 100644 (file)
@@ -235,7 +235,12 @@ fn check_expr(cx: &mut MatchCheckCtxt, ex: &hir::Expr) {
                 .flat_map(|arm| &arm.0)
                 .map(|pat| vec![wrap_pat(cx, &pat)])
                 .collect();
-            check_exhaustive(cx, ex.span, &matrix, source);
+            let match_span = Span {
+                lo: ex.span.lo,
+                hi: scrut.span.hi,
+                expn_id: ex.span.expn_id
+            };
+            check_exhaustive(cx, match_span, &matrix, source);
         },
         _ => ()
     }
@@ -311,7 +316,10 @@ fn check_arms(cx: &MatchCheckCtxt,
                                 let &(ref first_arm_pats, _) = &arms[0];
                                 let first_pat = &first_arm_pats[0];
                                 let span = first_pat.span;
-                                span_err!(cx.tcx.sess, span, E0162, "irrefutable if-let pattern");
+                                struct_span_err!(cx.tcx.sess, span, E0162,
+                                                "irrefutable if-let pattern")
+                                    .span_label(span, &format!("irrefutable pattern"))
+                                    .emit();
                                 printed_if_let_err = true;
                             }
                         },
index 43d9725baaf004e9e970fb466452d714186a1c87..9c9453ae3d3145019af640b0fb16d909fd9be910 100644 (file)
@@ -44,7 +44,7 @@
 use std::collections::hash_map::Entry::Vacant;
 
 use rustc_const_math::*;
-use rustc_errors::{DiagnosticBuilder, check_old_school};
+use rustc_errors::DiagnosticBuilder;
 
 macro_rules! math {
     ($e:expr, $op:expr) => {
@@ -142,7 +142,7 @@ pub fn lookup_const_by_id<'a, 'tcx: 'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
         }
         let mut used_substs = false;
         let expr_ty = match tcx.sess.cstore.maybe_get_item_ast(tcx, def_id) {
-            Some((&InlinedItem::Item(ref item), _)) => match item.node {
+            Some((&InlinedItem::Item(_, ref item), _)) => match item.node {
                 hir::ItemConst(ref ty, ref const_expr) => {
                     Some((&**const_expr, tcx.ast_ty_to_prim_ty(ty)))
                 },
@@ -198,7 +198,7 @@ fn inline_const_fn_from_external_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
     }
 
     let fn_id = match tcx.sess.cstore.maybe_get_item_ast(tcx, def_id) {
-        Some((&InlinedItem::Item(ref item), _)) => Some(item.id),
+        Some((&InlinedItem::Item(_, ref item), _)) => Some(item.id),
         Some((&InlinedItem::ImplItem(_, ref item), _)) => Some(item.id),
         _ => None
     };
@@ -378,11 +378,7 @@ pub fn note_const_eval_err<'a, 'tcx>(
 {
     match err.description() {
         ConstEvalErrDescription::Simple(message) => {
-            if check_old_school() {
-                diag.note(&message);
-            } else {
-                diag.span_label(err.span, &message);
-            }
+            diag.span_label(err.span, &message);
         }
     }
 
index 2c3a2e8ef6c3b97c9514bdf0cca99b850e3fdedd..0dab230f47a2d036bac2fc9b520417d592c249ef 100644 (file)
@@ -22,6 +22,12 @@ pub fn new(num_bits: usize) -> BitVector {
         BitVector { data: vec![0; num_words] }
     }
 
+    pub fn clear(&mut self) {
+        for p in &mut self.data {
+            *p = 0;
+        }
+    }
+
     pub fn contains(&self, bit: usize) -> bool {
         let (word, mask) = word_mask(bit);
         (self.data[word] & mask) != 0
@@ -118,32 +124,32 @@ fn from_iter<I>(iter: I) -> BitVector where I: IntoIterator<Item=bool> {
     }
 }
 
-/// A "bit matrix" is basically a square matrix of booleans
-/// represented as one gigantic bitvector. In other words, it is as if
-/// you have N bitvectors, each of length N. Note that `elements` here is `N`/
+/// A "bit matrix" is basically a matrix of booleans represented as
+/// one gigantic bitvector. In other words, it is as if you have
+/// `rows` bitvectors, each of length `columns`.
 #[derive(Clone)]
 pub struct BitMatrix {
-    elements: usize,
+    columns: usize,
     vector: Vec<u64>,
 }
 
 impl BitMatrix {
-    // Create a new `elements x elements` matrix, initially empty.
-    pub fn new(elements: usize) -> BitMatrix {
+    // Create a new `rows x columns` matrix, initially empty.
+    pub fn new(rows: usize, columns: usize) -> BitMatrix {
         // For every element, we need one bit for every other
         // element. Round up to an even number of u64s.
-        let u64s_per_elem = u64s(elements);
+        let u64s_per_row = u64s(columns);
         BitMatrix {
-            elements: elements,
-            vector: vec![0; elements * u64s_per_elem],
+            columns: columns,
+            vector: vec![0; rows * u64s_per_row],
         }
     }
 
-    /// The range of bits for a given element.
-    fn range(&self, element: usize) -> (usize, usize) {
-        let u64s_per_elem = u64s(self.elements);
-        let start = element * u64s_per_elem;
-        (start, start + u64s_per_elem)
+    /// The range of bits for a given row.
+    fn range(&self, row: usize) -> (usize, usize) {
+        let u64s_per_row = u64s(self.columns);
+        let start = row * u64s_per_row;
+        (start, start + u64s_per_row)
     }
 
     pub fn add(&mut self, source: usize, target: usize) -> bool {
@@ -173,7 +179,7 @@ pub fn contains(&self, source: usize, target: usize) -> bool {
     pub fn intersection(&self, a: usize, b: usize) -> Vec<usize> {
         let (a_start, a_end) = self.range(a);
         let (b_start, b_end) = self.range(b);
-        let mut result = Vec::with_capacity(self.elements);
+        let mut result = Vec::with_capacity(self.columns);
         for (base, (i, j)) in (a_start..a_end).zip(b_start..b_end).enumerate() {
             let mut v = self.vector[i] & self.vector[j];
             for bit in 0..64 {
@@ -209,6 +215,15 @@ pub fn merge(&mut self, read: usize, write: usize) -> bool {
         }
         changed
     }
+
+    pub fn iter<'a>(&'a self, row: usize) -> BitVectorIter<'a> {
+        let (start, end) = self.range(row);
+        BitVectorIter {
+            iter: self.vector[start..end].iter(),
+            current: 0,
+            idx: 0,
+        }
+    }
 }
 
 fn u64s(elements: usize) -> usize {
@@ -294,7 +309,7 @@ fn grow() {
 
 #[test]
 fn matrix_intersection() {
-    let mut vec1 = BitMatrix::new(200);
+    let mut vec1 = BitMatrix::new(200, 200);
 
     // (*) Elements reachable from both 2 and 65.
 
@@ -322,3 +337,45 @@ fn matrix_intersection() {
     let intersection = vec1.intersection(2, 65);
     assert_eq!(intersection, &[10, 64, 160]);
 }
+
+#[test]
+fn matrix_iter() {
+    let mut matrix = BitMatrix::new(64, 100);
+    matrix.add(3, 22);
+    matrix.add(3, 75);
+    matrix.add(2, 99);
+    matrix.add(4, 0);
+    matrix.merge(3, 5);
+
+    let expected = [99];
+    let mut iter = expected.iter();
+    for i in matrix.iter(2) {
+        let j = *iter.next().unwrap();
+        assert_eq!(i, j);
+    }
+    assert!(iter.next().is_none());
+
+    let expected = [22, 75];
+    let mut iter = expected.iter();
+    for i in matrix.iter(3) {
+        let j = *iter.next().unwrap();
+        assert_eq!(i, j);
+    }
+    assert!(iter.next().is_none());
+
+    let expected = [0];
+    let mut iter = expected.iter();
+    for i in matrix.iter(4) {
+        let j = *iter.next().unwrap();
+        assert_eq!(i, j);
+    }
+    assert!(iter.next().is_none());
+
+    let expected = [22, 75];
+    let mut iter = expected.iter();
+    for i in matrix.iter(5) {
+        let j = *iter.next().unwrap();
+        assert_eq!(i, j);
+    }
+    assert!(iter.next().is_none());
+}
index 731471b0600f3cfe635e813d997d6d5f3223bb52..4561a3d084c2064183aac1d611cf1850d514b608 100644 (file)
@@ -296,12 +296,7 @@ pub fn depth_traverse<'a>(&'a self,
                               start: NodeIndex,
                               direction: Direction)
                               -> DepthFirstTraversal<'a, N, E> {
-        DepthFirstTraversal {
-            graph: self,
-            stack: vec![start],
-            visited: BitVector::new(self.nodes.len()),
-            direction: direction,
-        }
+        DepthFirstTraversal::with_start_node(self, start, direction)
     }
 }
 
@@ -378,26 +373,57 @@ pub struct DepthFirstTraversal<'g, N: 'g, E: 'g> {
     direction: Direction,
 }
 
+impl<'g, N: Debug, E: Debug> DepthFirstTraversal<'g, N, E> {
+    pub fn new(graph: &'g Graph<N, E>, direction: Direction) -> Self {
+        let visited = BitVector::new(graph.len_nodes());
+        DepthFirstTraversal {
+            graph: graph,
+            stack: vec![],
+            visited: visited,
+            direction: direction
+        }
+    }
+
+    pub fn with_start_node(graph: &'g Graph<N, E>,
+                           start_node: NodeIndex,
+                           direction: Direction)
+                           -> Self {
+        let mut visited = BitVector::new(graph.len_nodes());
+        visited.insert(start_node.node_id());
+        DepthFirstTraversal {
+            graph: graph,
+            stack: vec![start_node],
+            visited: visited,
+            direction: direction
+        }
+    }
+
+    pub fn reset(&mut self, start_node: NodeIndex) {
+        self.stack.truncate(0);
+        self.stack.push(start_node);
+        self.visited.clear();
+        self.visited.insert(start_node.node_id());
+    }
+
+    fn visit(&mut self, node: NodeIndex) {
+        if self.visited.insert(node.node_id()) {
+            self.stack.push(node);
+        }
+    }
+}
+
 impl<'g, N: Debug, E: Debug> Iterator for DepthFirstTraversal<'g, N, E> {
     type Item = NodeIndex;
 
     fn next(&mut self) -> Option<NodeIndex> {
-        while let Some(idx) = self.stack.pop() {
-            if !self.visited.insert(idx.node_id()) {
-                continue;
-            }
-
+        let next = self.stack.pop();
+        if let Some(idx) = next {
             for (_, edge) in self.graph.adjacent_edges(idx, self.direction) {
                 let target = edge.source_or_target(self.direction);
-                if !self.visited.contains(target.node_id()) {
-                    self.stack.push(target);
-                }
+                self.visit(target);
             }
-
-            return Some(idx);
         }
-
-        return None;
+        next
     }
 }
 
index c3a2f978e1a8a4945f39ed8c091edbe5857ef762..e09e260afc8d99c0e0fddc6d0c3b6a85f26deb49 100644 (file)
@@ -252,7 +252,8 @@ fn with_closure<OP, R>(&self, op: OP) -> R
     }
 
     fn compute_closure(&self) -> BitMatrix {
-        let mut matrix = BitMatrix::new(self.elements.len());
+        let mut matrix = BitMatrix::new(self.elements.len(),
+                                        self.elements.len());
         let mut changed = true;
         while changed {
             changed = false;
index f1f5e194da4d16538418838c23725cad0f45e392..7f50522b2032b0e10f51d9cdb38f20efe7a9a182 100644 (file)
@@ -846,10 +846,10 @@ macro_rules! try_with_f {
 
     let index = stability::Index::new(&hir_map);
 
-    let trait_map = resolutions.trait_map;
     TyCtxt::create_and_enter(sess,
                              arenas,
                              resolutions.def_map,
+                             resolutions.trait_map,
                              named_region_map,
                              hir_map,
                              resolutions.freevars,
@@ -864,7 +864,7 @@ macro_rules! try_with_f {
              || rustc_incremental::load_dep_graph(tcx));
 
         // passes are timed inside typeck
-        try_with_f!(typeck::check_crate(tcx, trait_map), (tcx, None, analysis));
+        try_with_f!(typeck::check_crate(tcx), (tcx, None, analysis));
 
         time(time_passes,
              "const checking",
index 772c59b34dd02725897eca4d397c6d5bac309aa8..98860c8f900eb397a68851509144182c89bc8364 100644 (file)
@@ -68,6 +68,7 @@
 use rustc_resolve as resolve;
 use rustc_save_analysis as save;
 use rustc_trans::back::link;
+use rustc_trans::back::write::{create_target_machine, RELOC_MODEL_ARGS, CODE_GEN_MODEL_ARGS};
 use rustc::dep_graph::DepGraph;
 use rustc::session::{self, config, Session, build_session, CompileResult};
 use rustc::session::config::{Input, PrintRequest, OutputType, ErrorOutputType};
 use syntax::parse::{self, PResult};
 use syntax_pos::MultiSpan;
 use errors::emitter::Emitter;
-use errors::snippet::FormatMode;
 
 #[cfg(test)]
 pub mod test;
@@ -141,9 +141,7 @@ pub fn run(args: Vec<String>) -> isize {
                     None => {
                         let emitter =
                             errors::emitter::EmitterWriter::stderr(errors::ColorConfig::Auto,
-                                                                   None,
-                                                                   None,
-                                                                   FormatMode::EnvironmentSelected);
+                                                                   None);
                         let handler = errors::Handler::with_emitter(true, false, Box::new(emitter));
                         handler.emit(&MultiSpan::new(),
                                      &abort_msg(err_count),
@@ -381,10 +379,7 @@ fn check_cfg(sopts: &config::Options,
              output: ErrorOutputType) {
     let emitter: Box<Emitter> = match output {
         config::ErrorOutputType::HumanReadable(color_config) => {
-            Box::new(errors::emitter::EmitterWriter::stderr(color_config,
-                                                            None,
-                                                            None,
-                                                            FormatMode::EnvironmentSelected))
+            Box::new(errors::emitter::EmitterWriter::stderr(color_config, None))
         }
         config::ErrorOutputType::Json => Box::new(json::JsonEmitter::basic()),
     };
@@ -660,6 +655,28 @@ fn print_crate_info(sess: &Session,
                         }
                     }
                 }
+                PrintRequest::TargetCPUs => {
+                    let tm = create_target_machine(sess);
+                    unsafe { llvm::LLVMRustPrintTargetCPUs(tm); }
+                }
+                PrintRequest::TargetFeatures => {
+                    let tm = create_target_machine(sess);
+                    unsafe { llvm::LLVMRustPrintTargetFeatures(tm); }
+                }
+                PrintRequest::RelocationModels => {
+                    println!("Available relocation models:");
+                    for &(name, _) in RELOC_MODEL_ARGS.iter() {
+                        println!("    {}", name);
+                    }
+                    println!("");
+                }
+                PrintRequest::CodeModels => {
+                    println!("Available code models:");
+                    for &(name, _) in CODE_GEN_MODEL_ARGS.iter(){
+                        println!("    {}", name);
+                    }
+                    println!("");
+                }
             }
         }
         return Compilation::Stop;
@@ -1050,10 +1067,7 @@ fn flush(&mut self) -> io::Result<()> {
         // Thread panicked without emitting a fatal diagnostic
         if !value.is::<errors::FatalError>() {
             let emitter =
-                Box::new(errors::emitter::EmitterWriter::stderr(errors::ColorConfig::Auto,
-                                                       None,
-                                                       None,
-                                                       FormatMode::EnvironmentSelected));
+                Box::new(errors::emitter::EmitterWriter::stderr(errors::ColorConfig::Auto, None));
             let handler = errors::Handler::with_emitter(true, false, emitter);
 
             // a .span_bug or .bug call has already printed what
index 14476cc997ff3eb35968e2540a7a99902d81205c..e3e06963ad43bf59f242c1025decbd173fe729ff 100644 (file)
@@ -956,20 +956,24 @@ fn print_with_analysis<'tcx, 'a: 'tcx>(sess: &'a Session,
             PpmMir | PpmMirCFG => {
                 if let Some(mir_map) = mir_map {
                     if let Some(nodeid) = nodeid {
-                        let mir = mir_map.map.get(&nodeid).unwrap_or_else(|| {
-                            sess.fatal(&format!("no MIR map entry for node {}", nodeid))
-                        });
+                        let def_id = tcx.map.local_def_id(nodeid);
                         match ppm {
-                            PpmMir => write_mir_pretty(tcx, iter::once((&nodeid, mir)), &mut out),
+                            PpmMir => write_mir_pretty(tcx, iter::once(def_id), &mir_map, &mut out),
                             PpmMirCFG => {
-                                write_mir_graphviz(tcx, iter::once((&nodeid, mir)), &mut out)
+                                write_mir_graphviz(tcx, iter::once(def_id), &mir_map, &mut out)
                             }
                             _ => unreachable!(),
                         }?;
                     } else {
                         match ppm {
-                            PpmMir => write_mir_pretty(tcx, mir_map.map.iter(), &mut out),
-                            PpmMirCFG => write_mir_graphviz(tcx, mir_map.map.iter(), &mut out),
+                            PpmMir => write_mir_pretty(tcx,
+                                                       mir_map.map.keys().into_iter(),
+                                                       &mir_map,
+                                                       &mut out),
+                            PpmMirCFG => write_mir_graphviz(tcx,
+                                                            mir_map.map.keys().into_iter(),
+                                                            &mir_map,
+                                                            &mut out),
                             _ => unreachable!(),
                         }?;
                     }
index 39763bfa0eb61c6d279054166cf57eaeab03de87..2b0bd14d83fa0bb35ff36d65a6dace4442b3c223 100644 (file)
@@ -131,6 +131,7 @@ fn test_env<F>(source_string: &str,
     TyCtxt::create_and_enter(&sess,
                              &arenas,
                              resolutions.def_map,
+                             resolutions.trait_map,
                              named_region_map.unwrap(),
                              ast_map,
                              resolutions.freevars,
index 893f8a6e4ddb01b4a09963ec5afe445964d81cc1..981729ddb839581ceecabe70f30093b388313de1 100644 (file)
 
 use self::Destination::*;
 
-use syntax_pos::{COMMAND_LINE_SP, DUMMY_SP, FileMap, Span, MultiSpan, LineInfo, CharPos};
-use registry;
+use syntax_pos::{COMMAND_LINE_SP, DUMMY_SP, FileMap, Span, MultiSpan, CharPos};
 
-use check_old_school;
 use {Level, CodeSuggestion, DiagnosticBuilder, CodeMapper};
 use RenderSpan::*;
-use snippet::{StyledString, Style, FormatMode, Annotation, Line};
+use snippet::{StyledString, Style, Annotation, Line};
 use styled_buffer::StyledBuffer;
 
-use std::cmp;
 use std::io::prelude::*;
 use std::io;
 use std::rc::Rc;
@@ -33,18 +30,7 @@ pub trait Emitter {
 
 impl Emitter for EmitterWriter {
     fn emit(&mut self, db: &DiagnosticBuilder) {
-        // Pick old school mode either from env or let the test dictate the format
-        let old_school = match self.format_mode {
-            FormatMode::NewErrorFormat => false,
-            FormatMode::OriginalErrorFormat => true,
-            FormatMode::EnvironmentSelected => check_old_school()
-        };
-
-        if old_school {
-            self.emit_messages_old_school(db);
-        } else {
-            self.emit_messages_default(db);
-        }
+        self.emit_messages_default(db);
     }
 }
 
@@ -70,11 +56,7 @@ fn use_color(&self) -> bool {
 
 pub struct EmitterWriter {
     dst: Destination,
-    registry: Option<registry::Registry>,
     cm: Option<Rc<CodeMapper>>,
-
-    // For now, allow an old-school mode while we transition
-    format_mode: FormatMode
 }
 
 struct FileWithAnnotatedLines {
@@ -99,33 +81,23 @@ macro_rules! println_maybe_styled {
 
 impl EmitterWriter {
     pub fn stderr(color_config: ColorConfig,
-                  registry: Option<registry::Registry>,
-                  code_map: Option<Rc<CodeMapper>>,
-                  format_mode: FormatMode)
+                  code_map: Option<Rc<CodeMapper>>)
                   -> EmitterWriter {
         if color_config.use_color() {
             let dst = Destination::from_stderr();
             EmitterWriter { dst: dst,
-                            registry: registry,
-                            cm: code_map,
-                            format_mode: format_mode.clone() }
+                            cm: code_map}
         } else {
             EmitterWriter { dst: Raw(Box::new(io::stderr())),
-                            registry: registry,
-                            cm: code_map,
-                            format_mode: format_mode.clone() }
+                            cm: code_map}
         }
     }
 
     pub fn new(dst: Box<Write + Send>,
-               registry: Option<registry::Registry>,
-               code_map: Option<Rc<CodeMapper>>,
-               format_mode: FormatMode)
+               code_map: Option<Rc<CodeMapper>>)
                -> EmitterWriter {
         EmitterWriter { dst: Raw(dst),
-                        registry: registry,
-                        cm: code_map,
-                        format_mode: format_mode.clone() }
+                        cm: code_map}
     }
 
     fn preprocess_annotations(&self, msp: &MultiSpan) -> Vec<FileWithAnnotatedLines> {
@@ -668,240 +640,6 @@ fn emit_messages_default(&mut self, db: &DiagnosticBuilder) {
             _ => ()
         }
     }
-    fn emit_message_old_school(&mut self,
-                               msp: &MultiSpan,
-                               msg: &str,
-                               code: &Option<String>,
-                               level: &Level,
-                               show_snippet: bool)
-                               -> io::Result<()> {
-        let mut buffer = StyledBuffer::new();
-
-        let loc = match msp.primary_span() {
-            Some(COMMAND_LINE_SP) | Some(DUMMY_SP) => "".to_string(),
-            Some(ps) => if let Some(ref cm) = self.cm {
-                cm.span_to_string(ps)
-            } else {
-                "".to_string()
-            },
-            None => {
-                "".to_string()
-            }
-        };
-        if loc != "" {
-            buffer.append(0, &loc, Style::NoStyle);
-            buffer.append(0, " ", Style::NoStyle);
-        }
-        buffer.append(0, &level.to_string(), Style::Level(level.clone()));
-        buffer.append(0, ": ", Style::HeaderMsg);
-        buffer.append(0, msg, Style::HeaderMsg);
-        buffer.append(0, " ", Style::NoStyle);
-        match code {
-            &Some(ref code) => {
-                buffer.append(0, "[", Style::ErrorCode);
-                buffer.append(0, &code, Style::ErrorCode);
-                buffer.append(0, "]", Style::ErrorCode);
-            }
-            _ => {}
-        }
-
-        if !show_snippet {
-            emit_to_destination(&buffer.render(), level, &mut self.dst)?;
-            return Ok(());
-        }
-
-        // Watch out for various nasty special spans; don't try to
-        // print any filename or anything for those.
-        match msp.primary_span() {
-            Some(COMMAND_LINE_SP) | Some(DUMMY_SP) => {
-                emit_to_destination(&buffer.render(), level, &mut self.dst)?;
-                return Ok(());
-            }
-            _ => { }
-        }
-
-        let annotated_files = self.preprocess_annotations(msp);
-
-        if let (Some(ref cm), Some(ann_file), Some(ref primary_span)) =
-            (self.cm.as_ref(), annotated_files.first(), msp.primary_span().as_ref()) {
-
-            // Next, print the source line and its squiggle
-            // for old school mode, we will render them to the buffer, then insert the file loc
-            // (or space the same amount) in front of the line and the squiggle
-            let source_string = ann_file.file.get_line(ann_file.lines[0].line_index - 1)
-                .unwrap_or("");
-
-            let line_offset = buffer.num_lines();
-
-            let lo = cm.lookup_char_pos(primary_span.lo);
-            //Before each secondary line in old skool-mode, print the label
-            //as an old-style note
-            let file_pos = format!("{}:{} ", lo.file.name.clone(), lo.line);
-            let file_pos_len = file_pos.len();
-
-            // First create the source line we will highlight.
-            buffer.puts(line_offset, 0, &file_pos, Style::FileNameStyle);
-            buffer.puts(line_offset, file_pos_len, &source_string, Style::Quotation);
-            // Sort the annotations by (start, end col)
-            let annotations = ann_file.lines[0].annotations.clone();
-
-            // Next, create the highlight line.
-            for annotation in &annotations {
-                for p in annotation.start_col..annotation.end_col {
-                    if p == annotation.start_col {
-                        buffer.putc(line_offset + 1,
-                                    file_pos_len + p,
-                                    '^',
-                                    if annotation.is_primary {
-                                        Style::UnderlinePrimary
-                                    } else {
-                                        Style::OldSchoolNote
-                                    });
-                    } else {
-                        buffer.putc(line_offset + 1,
-                                    file_pos_len + p,
-                                    '~',
-                                    if annotation.is_primary {
-                                        Style::UnderlinePrimary
-                                    } else {
-                                        Style::OldSchoolNote
-                                    });
-                    }
-                }
-            }
-        }
-        if let Some(ref primary_span) = msp.primary_span().as_ref() {
-            self.render_macro_backtrace_old_school(primary_span, &mut buffer)?;
-        }
-
-        match code {
-            &Some(ref code) if self.registry.as_ref()
-                                           .and_then(|registry| registry.find_description(code))
-                                           .is_some() => {
-                let msg = "run `rustc --explain ".to_string() + &code.to_string() +
-                    "` to see a detailed explanation";
-
-                let line_offset = buffer.num_lines();
-                buffer.append(line_offset, &loc, Style::NoStyle);
-                buffer.append(line_offset, " ", Style::NoStyle);
-                buffer.append(line_offset, &Level::Help.to_string(), Style::Level(Level::Help));
-                buffer.append(line_offset, ": ", Style::HeaderMsg);
-                buffer.append(line_offset, &msg, Style::HeaderMsg);
-            }
-            _ => ()
-        }
-
-        // final step: take our styled buffer, render it, then output it
-        emit_to_destination(&buffer.render(), level, &mut self.dst)?;
-        Ok(())
-    }
-    fn emit_suggestion_old_school(&mut self,
-                                  suggestion: &CodeSuggestion,
-                                  level: &Level,
-                                  msg: &str)
-                                  -> io::Result<()> {
-        use std::borrow::Borrow;
-
-        let primary_span = suggestion.msp.primary_span().unwrap();
-        if let Some(ref cm) = self.cm {
-            let mut buffer = StyledBuffer::new();
-
-            let loc = cm.span_to_string(primary_span);
-
-            if loc != "" {
-                buffer.append(0, &loc, Style::NoStyle);
-                buffer.append(0, " ", Style::NoStyle);
-            }
-
-            buffer.append(0, &level.to_string(), Style::Level(level.clone()));
-            buffer.append(0, ": ", Style::HeaderMsg);
-            buffer.append(0, msg, Style::HeaderMsg);
-
-            let lines = cm.span_to_lines(primary_span).unwrap();
-
-            assert!(!lines.lines.is_empty());
-
-            let complete = suggestion.splice_lines(cm.borrow());
-            let line_count = cmp::min(lines.lines.len(), MAX_HIGHLIGHT_LINES);
-            let display_lines = &lines.lines[..line_count];
-
-            let fm = &*lines.file;
-            // Calculate the widest number to format evenly
-            let max_digits = line_num_max_digits(display_lines.last().unwrap());
-
-            // print the suggestion without any line numbers, but leave
-            // space for them. This helps with lining up with previous
-            // snippets from the actual error being reported.
-            let mut lines = complete.lines();
-            let mut row_num = 1;
-            for line in lines.by_ref().take(MAX_HIGHLIGHT_LINES) {
-                buffer.append(row_num, &fm.name, Style::FileNameStyle);
-                for _ in 0..max_digits+2 {
-                    buffer.append(row_num, &" ", Style::NoStyle);
-                }
-                buffer.append(row_num, line, Style::NoStyle);
-                row_num += 1;
-            }
-
-            // if we elided some lines, add an ellipsis
-            if let Some(_) = lines.next() {
-                buffer.append(row_num, "...", Style::NoStyle);
-            }
-            emit_to_destination(&buffer.render(), level, &mut self.dst)?;
-        }
-        Ok(())
-    }
-
-    fn emit_messages_old_school(&mut self, db: &DiagnosticBuilder) {
-        match self.emit_message_old_school(&db.span,
-                                           &db.message,
-                                           &db.code,
-                                           &db.level,
-                                           true) {
-            Ok(()) => {
-                for child in &db.children {
-                    let (span, show_snippet) = if child.span.primary_spans().is_empty() {
-                        (db.span.clone(), false)
-                    } else {
-                        (child.span.clone(), true)
-                    };
-
-                    match child.render_span {
-                        Some(FullSpan(_)) => {
-                            match self.emit_message_old_school(&span,
-                                                               &child.message,
-                                                               &None,
-                                                               &child.level,
-                                                               show_snippet) {
-                                Err(e) => panic!("failed to emit error: {}", e),
-                                _ => ()
-                            }
-                        },
-                        Some(Suggestion(ref cs)) => {
-                            match self.emit_suggestion_old_school(cs,
-                                                                  &child.level,
-                                                                  &child.message) {
-                                Err(e) => panic!("failed to emit error: {}", e),
-                                _ => ()
-                            }
-                        },
-                        None => {
-                            match self.emit_message_old_school(&span,
-                                                               &child.message,
-                                                               &None,
-                                                               &child.level,
-                                                               show_snippet) {
-                                Err(e) => panic!("failed to emit error: {}", e),
-                                _ => ()
-                            }
-                        }
-                    }
-                }
-            }
-            Err(e) => panic!("failed to emit error: {}", e)
-        }
-    }
-
     fn render_macro_backtrace_old_school(&mut self,
                                          sp: &Span,
                                          buffer: &mut StyledBuffer) -> io::Result<()> {
@@ -958,16 +696,6 @@ fn emit_to_destination(rendered_buffer: &Vec<Vec<StyledString>>,
     Ok(())
 }
 
-fn line_num_max_digits(line: &LineInfo) -> usize {
-    let mut max_line_num = line.line_index + 1;
-    let mut digits = 0;
-    while max_line_num > 0 {
-        max_line_num /= 10;
-        digits += 1;
-    }
-    digits
-}
-
 #[cfg(unix)]
 fn stderr_isatty() -> bool {
     use libc;
index 610e5647d6d12893547923c65d361474d0914c00..172e27d56d4852b83bb80be3e0ff00112a0c1500 100644 (file)
@@ -420,13 +420,11 @@ pub struct Handler {
 
 impl Handler {
     pub fn with_tty_emitter(color_config: ColorConfig,
-                            registry: Option<registry::Registry>,
                             can_emit_warnings: bool,
                             treat_err_as_bug: bool,
                             cm: Option<Rc<CodeMapper>>)
                             -> Handler {
-        let emitter = Box::new(EmitterWriter::stderr(color_config, registry, cm,
-                               snippet::FormatMode::EnvironmentSelected));
+        let emitter = Box::new(EmitterWriter::stderr(color_config, cm));
         Handler::with_emitter(can_emit_warnings, treat_err_as_bug, emitter)
     }
 
@@ -750,21 +748,4 @@ pub fn expect<T, M>(diag: &Handler, opt: Option<T>, msg: M) -> T where
         Some(t) => t,
         None => diag.bug(&msg()),
     }
-}
-
-/// True if we should use the old-skool error format style. This is
-/// the default setting until the new errors are deemed stable enough
-/// for general use.
-///
-/// FIXME(#33240)
-#[cfg(not(test))]
-pub fn check_old_school() -> bool {
-    use std::env;
-    env::var("RUST_NEW_ERROR_FORMAT").is_err()
-}
-
-/// For unit tests, use the new format.
-#[cfg(test)]
-pub fn check_old_school() -> bool {
-    false
-}
+}
\ No newline at end of file
index 2f94a7f6832fe34401b3a52b6d54cb7d610023ef..5ade8cd9bad82be4f2c34b6ffe98fe5d7494e02c 100644 (file)
 use std::rc::Rc;
 use {Level};
 
-#[derive(Clone)]
-pub enum FormatMode {
-    NewErrorFormat,
-    OriginalErrorFormat,
-    EnvironmentSelected
-}
-
 #[derive(Clone)]
 pub struct SnippetData {
     codemap: Rc<CodeMapper>,
-    files: Vec<FileInfo>,
-    format_mode: FormatMode,
+    files: Vec<FileInfo>
 }
 
 #[derive(Clone)]
@@ -41,10 +33,6 @@ pub struct FileInfo {
     primary_span: Option<Span>,
 
     lines: Vec<Line>,
-
-    /// The type of error format to render.  We keep it here so that
-    /// it's easy to configure for both tests and regular usage
-    format_mode: FormatMode,
 }
 
 #[derive(Clone, Debug, PartialOrd, Ord, PartialEq, Eq)]
index 774c5ca6d6b239a75b8a72ad583f23248e698280..420c88e89be0d0373aff440bc5bbc9173a1543b8 100644 (file)
@@ -71,6 +71,13 @@ pub fn assert_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
         dump_graph(tcx);
     }
 
+    // if the `rustc_attrs` feature is not enabled, then the
+    // attributes we are interested in cannot be present anyway, so
+    // skip the walk.
+    if !tcx.sess.features.borrow().rustc_attrs {
+        return;
+    }
+
     // Find annotations supplied by user (if any).
     let (if_this_changed, then_this_would_need) = {
         let mut visitor = IfThisChanged { tcx: tcx,
diff --git a/src/librustc_incremental/calculate_svh.rs b/src/librustc_incremental/calculate_svh.rs
deleted file mode 100644 (file)
index bea6b7e..0000000
+++ /dev/null
@@ -1,454 +0,0 @@
-// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Calculation of a Strict Version Hash for crates.  For a length
-//! comment explaining the general idea, see `librustc/middle/svh.rs`.
-
-use syntax::attr::AttributeMethods;
-use std::hash::{Hash, SipHasher, Hasher};
-use rustc::hir::def_id::{CRATE_DEF_INDEX, DefId};
-use rustc::hir::svh::Svh;
-use rustc::ty::TyCtxt;
-use rustc::hir::intravisit::{self, Visitor};
-
-use self::svh_visitor::StrictVersionHashVisitor;
-
-pub trait SvhCalculate {
-    /// Calculate the SVH for an entire krate.
-    fn calculate_krate_hash(self) -> Svh;
-
-    /// Calculate the SVH for a particular item.
-    fn calculate_item_hash(self, def_id: DefId) -> u64;
-}
-
-impl<'a, 'tcx> SvhCalculate for TyCtxt<'a, 'tcx, 'tcx> {
-    fn calculate_krate_hash(self) -> Svh {
-        // FIXME (#14132): This is better than it used to be, but it still not
-        // ideal. We now attempt to hash only the relevant portions of the
-        // Crate AST as well as the top-level crate attributes. (However,
-        // the hashing of the crate attributes should be double-checked
-        // to ensure it is not incorporating implementation artifacts into
-        // the hash that are not otherwise visible.)
-
-        let crate_disambiguator = self.sess.local_crate_disambiguator();
-        let krate = self.map.krate();
-
-        // FIXME: this should use SHA1, not SipHash. SipHash is not built to
-        //        avoid collisions.
-        let mut state = SipHasher::new();
-        debug!("state: {:?}", state);
-
-        // FIXME(#32753) -- at (*) we `to_le` for endianness, but is
-        // this enough, and does it matter anyway?
-        "crate_disambiguator".hash(&mut state);
-        crate_disambiguator.len().to_le().hash(&mut state); // (*)
-        crate_disambiguator.hash(&mut state);
-
-        debug!("crate_disambiguator: {:?}", crate_disambiguator);
-        debug!("state: {:?}", state);
-
-        {
-            let mut visit = StrictVersionHashVisitor::new(&mut state, self);
-            krate.visit_all_items(&mut visit);
-        }
-
-        // FIXME (#14132): This hash is still sensitive to e.g. the
-        // spans of the crate Attributes and their underlying
-        // MetaItems; we should make ContentHashable impl for those
-        // types and then use hash_content.  But, since all crate
-        // attributes should appear near beginning of the file, it is
-        // not such a big deal to be sensitive to their spans for now.
-        //
-        // We hash only the MetaItems instead of the entire Attribute
-        // to avoid hashing the AttrId
-        for attr in &krate.attrs {
-            debug!("krate attr {:?}", attr);
-            attr.meta().hash(&mut state);
-        }
-
-        Svh::new(state.finish())
-    }
-
-    fn calculate_item_hash(self, def_id: DefId) -> u64 {
-        assert!(def_id.is_local());
-
-        debug!("calculate_item_hash(def_id={:?})", def_id);
-
-        let mut state = SipHasher::new();
-
-        {
-            let mut visit = StrictVersionHashVisitor::new(&mut state, self);
-            if def_id.index == CRATE_DEF_INDEX {
-                // the crate root itself is not registered in the map
-                // as an item, so we have to fetch it this way
-                let krate = self.map.krate();
-                intravisit::walk_crate(&mut visit, krate);
-            } else {
-                let node_id = self.map.as_local_node_id(def_id).unwrap();
-                let item = self.map.expect_item(node_id);
-                visit.visit_item(item);
-            }
-        }
-
-        let hash = state.finish();
-
-        debug!("calculate_item_hash: def_id={:?} hash={:?}", def_id, hash);
-
-        hash
-    }
-}
-
-// FIXME (#14132): Even this SVH computation still has implementation
-// artifacts: namely, the order of item declaration will affect the
-// hash computation, but for many kinds of items the order of
-// declaration should be irrelevant to the ABI.
-
-mod svh_visitor {
-    pub use self::SawExprComponent::*;
-    pub use self::SawStmtComponent::*;
-    use self::SawAbiComponent::*;
-    use syntax::ast::{self, Name, NodeId};
-    use syntax::parse::token;
-    use syntax_pos::Span;
-    use rustc::ty::TyCtxt;
-    use rustc::hir;
-    use rustc::hir::*;
-    use rustc::hir::map::DefPath;
-    use rustc::hir::intravisit as visit;
-    use rustc::hir::intravisit::{Visitor, FnKind};
-
-    use std::hash::{Hash, SipHasher};
-
-    pub struct StrictVersionHashVisitor<'a, 'tcx: 'a> {
-        pub tcx: TyCtxt<'a, 'tcx, 'tcx>,
-        pub st: &'a mut SipHasher,
-    }
-
-    impl<'a, 'tcx> StrictVersionHashVisitor<'a, 'tcx> {
-        pub fn new(st: &'a mut SipHasher,
-                   tcx: TyCtxt<'a, 'tcx, 'tcx>)
-                   -> Self {
-            StrictVersionHashVisitor { st: st, tcx: tcx }
-        }
-
-        fn hash_def_path(&mut self, path: &DefPath) {
-            self.tcx.crate_name(path.krate).hash(self.st);
-            self.tcx.crate_disambiguator(path.krate).hash(self.st);
-            for data in &path.data {
-                data.data.as_interned_str().hash(self.st);
-                data.disambiguator.hash(self.st);
-            }
-        }
-    }
-
-    // To off-load the bulk of the hash-computation on #[derive(Hash)],
-    // we define a set of enums corresponding to the content that our
-    // crate visitor will encounter as it traverses the ast.
-    //
-    // The important invariant is that all of the Saw*Component enums
-    // do not carry any Spans, Names, or Idents.
-    //
-    // Not carrying any Names/Idents is the important fix for problem
-    // noted on PR #13948: using the ident.name as the basis for a
-    // hash leads to unstable SVH, because ident.name is just an index
-    // into intern table (i.e. essentially a random address), not
-    // computed from the name content.
-    //
-    // With the below enums, the SVH computation is not sensitive to
-    // artifacts of how rustc was invoked nor of how the source code
-    // was laid out.  (Or at least it is *less* sensitive.)
-
-    // This enum represents the different potential bits of code the
-    // visitor could encounter that could affect the ABI for the crate,
-    // and assigns each a distinct tag to feed into the hash computation.
-    #[derive(Hash)]
-    enum SawAbiComponent<'a> {
-
-        // FIXME (#14132): should we include (some function of)
-        // ident.ctxt as well?
-        SawIdent(token::InternedString),
-        SawStructDef(token::InternedString),
-
-        SawLifetime(token::InternedString),
-        SawLifetimeDef(token::InternedString),
-
-        SawMod,
-        SawForeignItem,
-        SawItem,
-        SawDecl,
-        SawTy,
-        SawGenerics,
-        SawFn,
-        SawTraitItem,
-        SawImplItem,
-        SawStructField,
-        SawVariant,
-        SawPath,
-        SawBlock,
-        SawPat,
-        SawLocal,
-        SawArm,
-        SawExpr(SawExprComponent<'a>),
-        SawStmt(SawStmtComponent),
-    }
-
-    /// SawExprComponent carries all of the information that we want
-    /// to include in the hash that *won't* be covered by the
-    /// subsequent recursive traversal of the expression's
-    /// substructure by the visitor.
-    ///
-    /// We know every Expr_ variant is covered by a variant because
-    /// `fn saw_expr` maps each to some case below.  Ensuring that
-    /// each variant carries an appropriate payload has to be verified
-    /// by hand.
-    ///
-    /// (However, getting that *exactly* right is not so important
-    /// because the SVH is just a developer convenience; there is no
-    /// guarantee of collision-freedom, hash collisions are just
-    /// (hopefully) unlikely.)
-    #[derive(Hash)]
-    pub enum SawExprComponent<'a> {
-
-        SawExprLoop(Option<token::InternedString>),
-        SawExprField(token::InternedString),
-        SawExprTupField(usize),
-        SawExprBreak(Option<token::InternedString>),
-        SawExprAgain(Option<token::InternedString>),
-
-        SawExprBox,
-        SawExprVec,
-        SawExprCall,
-        SawExprMethodCall,
-        SawExprTup,
-        SawExprBinary(hir::BinOp_),
-        SawExprUnary(hir::UnOp),
-        SawExprLit(ast::LitKind),
-        SawExprCast,
-        SawExprType,
-        SawExprIf,
-        SawExprWhile,
-        SawExprMatch,
-        SawExprClosure,
-        SawExprBlock,
-        SawExprAssign,
-        SawExprAssignOp(hir::BinOp_),
-        SawExprIndex,
-        SawExprPath(Option<usize>),
-        SawExprAddrOf(hir::Mutability),
-        SawExprRet,
-        SawExprInlineAsm(&'a hir::InlineAsm),
-        SawExprStruct,
-        SawExprRepeat,
-    }
-
-    fn saw_expr<'a>(node: &'a Expr_) -> SawExprComponent<'a> {
-        match *node {
-            ExprBox(..)              => SawExprBox,
-            ExprVec(..)              => SawExprVec,
-            ExprCall(..)             => SawExprCall,
-            ExprMethodCall(..)       => SawExprMethodCall,
-            ExprTup(..)              => SawExprTup,
-            ExprBinary(op, _, _)     => SawExprBinary(op.node),
-            ExprUnary(op, _)         => SawExprUnary(op),
-            ExprLit(ref lit)         => SawExprLit(lit.node.clone()),
-            ExprCast(..)             => SawExprCast,
-            ExprType(..)             => SawExprType,
-            ExprIf(..)               => SawExprIf,
-            ExprWhile(..)            => SawExprWhile,
-            ExprLoop(_, id)          => SawExprLoop(id.map(|id| id.node.as_str())),
-            ExprMatch(..)            => SawExprMatch,
-            ExprClosure(..)          => SawExprClosure,
-            ExprBlock(..)            => SawExprBlock,
-            ExprAssign(..)           => SawExprAssign,
-            ExprAssignOp(op, _, _)   => SawExprAssignOp(op.node),
-            ExprField(_, name)       => SawExprField(name.node.as_str()),
-            ExprTupField(_, id)      => SawExprTupField(id.node),
-            ExprIndex(..)            => SawExprIndex,
-            ExprPath(ref qself, _)   => SawExprPath(qself.as_ref().map(|q| q.position)),
-            ExprAddrOf(m, _)         => SawExprAddrOf(m),
-            ExprBreak(id)            => SawExprBreak(id.map(|id| id.node.as_str())),
-            ExprAgain(id)            => SawExprAgain(id.map(|id| id.node.as_str())),
-            ExprRet(..)              => SawExprRet,
-            ExprInlineAsm(ref a,_,_) => SawExprInlineAsm(a),
-            ExprStruct(..)           => SawExprStruct,
-            ExprRepeat(..)           => SawExprRepeat,
-        }
-    }
-
-    /// SawStmtComponent is analogous to SawExprComponent, but for statements.
-    #[derive(Hash)]
-    pub enum SawStmtComponent {
-        SawStmtDecl,
-        SawStmtExpr,
-        SawStmtSemi,
-    }
-
-    fn saw_stmt(node: &Stmt_) -> SawStmtComponent {
-        match *node {
-            StmtDecl(..) => SawStmtDecl,
-            StmtExpr(..) => SawStmtExpr,
-            StmtSemi(..) => SawStmtSemi,
-        }
-    }
-
-    impl<'a, 'tcx> Visitor<'a> for StrictVersionHashVisitor<'a, 'tcx> {
-        fn visit_nested_item(&mut self, item: ItemId) {
-            let def_path = self.tcx.map.def_path_from_id(item.id).unwrap();
-            debug!("visit_nested_item: def_path={:?} st={:?}", def_path, self.st);
-            self.hash_def_path(&def_path);
-        }
-
-        fn visit_variant_data(&mut self, s: &'a VariantData, name: Name,
-                              g: &'a Generics, _: NodeId, _: Span) {
-            debug!("visit_variant_data: st={:?}", self.st);
-            SawStructDef(name.as_str()).hash(self.st);
-            visit::walk_generics(self, g);
-            visit::walk_struct_def(self, s)
-        }
-
-        fn visit_variant(&mut self, v: &'a Variant, g: &'a Generics, item_id: NodeId) {
-            debug!("visit_variant: st={:?}", self.st);
-            SawVariant.hash(self.st);
-            // walk_variant does not call walk_generics, so do it here.
-            visit::walk_generics(self, g);
-            visit::walk_variant(self, v, g, item_id)
-        }
-
-        // All of the remaining methods just record (in the hash
-        // SipHasher) that the visitor saw that particular variant
-        // (with its payload), and continue walking as the default
-        // visitor would.
-        //
-        // Some of the implementations have some notes as to how one
-        // might try to make their SVH computation less discerning
-        // (e.g. by incorporating reachability analysis).  But
-        // currently all of their implementations are uniform and
-        // uninteresting.
-        //
-        // (If you edit a method such that it deviates from the
-        // pattern, please move that method up above this comment.)
-
-        fn visit_name(&mut self, _: Span, name: Name) {
-            debug!("visit_name: st={:?}", self.st);
-            SawIdent(name.as_str()).hash(self.st);
-        }
-
-        fn visit_lifetime(&mut self, l: &'a Lifetime) {
-            debug!("visit_lifetime: st={:?}", self.st);
-            SawLifetime(l.name.as_str()).hash(self.st);
-        }
-
-        fn visit_lifetime_def(&mut self, l: &'a LifetimeDef) {
-            debug!("visit_lifetime_def: st={:?}", self.st);
-            SawLifetimeDef(l.lifetime.name.as_str()).hash(self.st);
-        }
-
-        // We do recursively walk the bodies of functions/methods
-        // (rather than omitting their bodies from the hash) since
-        // monomorphization and cross-crate inlining generally implies
-        // that a change to a crate body will require downstream
-        // crates to be recompiled.
-        fn visit_expr(&mut self, ex: &'a Expr) {
-            debug!("visit_expr: st={:?}", self.st);
-            SawExpr(saw_expr(&ex.node)).hash(self.st); visit::walk_expr(self, ex)
-        }
-
-        fn visit_stmt(&mut self, s: &'a Stmt) {
-            debug!("visit_stmt: st={:?}", self.st);
-            SawStmt(saw_stmt(&s.node)).hash(self.st); visit::walk_stmt(self, s)
-        }
-
-        fn visit_foreign_item(&mut self, i: &'a ForeignItem) {
-            debug!("visit_foreign_item: st={:?}", self.st);
-
-            // FIXME (#14132) ideally we would incorporate privacy (or
-            // perhaps reachability) somewhere here, so foreign items
-            // that do not leak into downstream crates would not be
-            // part of the ABI.
-            SawForeignItem.hash(self.st); visit::walk_foreign_item(self, i)
-        }
-
-        fn visit_item(&mut self, i: &'a Item) {
-            debug!("visit_item: {:?} st={:?}", i, self.st);
-
-            // FIXME (#14132) ideally would incorporate reachability
-            // analysis somewhere here, so items that never leak into
-            // downstream crates (e.g. via monomorphisation or
-            // inlining) would not be part of the ABI.
-            SawItem.hash(self.st); visit::walk_item(self, i)
-        }
-
-        fn visit_mod(&mut self, m: &'a Mod, _s: Span, n: NodeId) {
-            debug!("visit_mod: st={:?}", self.st);
-            SawMod.hash(self.st); visit::walk_mod(self, m, n)
-        }
-
-        fn visit_decl(&mut self, d: &'a Decl) {
-            debug!("visit_decl: st={:?}", self.st);
-            SawDecl.hash(self.st); visit::walk_decl(self, d)
-        }
-
-        fn visit_ty(&mut self, t: &'a Ty) {
-            debug!("visit_ty: st={:?}", self.st);
-            SawTy.hash(self.st); visit::walk_ty(self, t)
-        }
-
-        fn visit_generics(&mut self, g: &'a Generics) {
-            debug!("visit_generics: st={:?}", self.st);
-            SawGenerics.hash(self.st); visit::walk_generics(self, g)
-        }
-
-        fn visit_fn(&mut self, fk: FnKind<'a>, fd: &'a FnDecl,
-                    b: &'a Block, s: Span, n: NodeId) {
-            debug!("visit_fn: st={:?}", self.st);
-            SawFn.hash(self.st); visit::walk_fn(self, fk, fd, b, s, n)
-        }
-
-        fn visit_trait_item(&mut self, ti: &'a TraitItem) {
-            debug!("visit_trait_item: st={:?}", self.st);
-            SawTraitItem.hash(self.st); visit::walk_trait_item(self, ti)
-        }
-
-        fn visit_impl_item(&mut self, ii: &'a ImplItem) {
-            debug!("visit_impl_item: st={:?}", self.st);
-            SawImplItem.hash(self.st); visit::walk_impl_item(self, ii)
-        }
-
-        fn visit_struct_field(&mut self, s: &'a StructField) {
-            debug!("visit_struct_field: st={:?}", self.st);
-            SawStructField.hash(self.st); visit::walk_struct_field(self, s)
-        }
-
-        fn visit_path(&mut self, path: &'a Path, _: ast::NodeId) {
-            debug!("visit_path: st={:?}", self.st);
-            SawPath.hash(self.st); visit::walk_path(self, path)
-        }
-
-        fn visit_block(&mut self, b: &'a Block) {
-            debug!("visit_block: st={:?}", self.st);
-            SawBlock.hash(self.st); visit::walk_block(self, b)
-        }
-
-        fn visit_pat(&mut self, p: &'a Pat) {
-            debug!("visit_pat: st={:?}", self.st);
-            SawPat.hash(self.st); visit::walk_pat(self, p)
-        }
-
-        fn visit_local(&mut self, l: &'a Local) {
-            debug!("visit_local: st={:?}", self.st);
-            SawLocal.hash(self.st); visit::walk_local(self, l)
-        }
-
-        fn visit_arm(&mut self, a: &'a Arm) {
-            debug!("visit_arm: st={:?}", self.st);
-            SawArm.hash(self.st); visit::walk_arm(self, a)
-        }
-    }
-}
diff --git a/src/librustc_incremental/calculate_svh/mod.rs b/src/librustc_incremental/calculate_svh/mod.rs
new file mode 100644 (file)
index 0000000..d7caf8c
--- /dev/null
@@ -0,0 +1,113 @@
+// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Calculation of a Strict Version Hash for crates.  For a length
+//! comment explaining the general idea, see `librustc/middle/svh.rs`.
+
+use syntax::attr::AttributeMethods;
+use std::hash::{Hash, SipHasher, Hasher};
+use rustc::hir::def_id::{CRATE_DEF_INDEX, DefId};
+use rustc::hir::map::{NodeItem, NodeForeignItem};
+use rustc::hir::svh::Svh;
+use rustc::ty::TyCtxt;
+use rustc::hir::intravisit::{self, Visitor};
+
+use self::svh_visitor::StrictVersionHashVisitor;
+
+mod svh_visitor;
+
+pub trait SvhCalculate {
+    /// Calculate the SVH for an entire krate.
+    fn calculate_krate_hash(self) -> Svh;
+
+    /// Calculate the SVH for a particular item.
+    fn calculate_item_hash(self, def_id: DefId) -> u64;
+}
+
+impl<'a, 'tcx> SvhCalculate for TyCtxt<'a, 'tcx, 'tcx> {
+    fn calculate_krate_hash(self) -> Svh {
+        // FIXME (#14132): This is better than it used to be, but it still not
+        // ideal. We now attempt to hash only the relevant portions of the
+        // Crate AST as well as the top-level crate attributes. (However,
+        // the hashing of the crate attributes should be double-checked
+        // to ensure it is not incorporating implementation artifacts into
+        // the hash that are not otherwise visible.)
+
+        let crate_disambiguator = self.sess.local_crate_disambiguator();
+        let krate = self.map.krate();
+
+        // FIXME: this should use SHA1, not SipHash. SipHash is not built to
+        //        avoid collisions.
+        let mut state = SipHasher::new();
+        debug!("state: {:?}", state);
+
+        // FIXME(#32753) -- at (*) we `to_le` for endianness, but is
+        // this enough, and does it matter anyway?
+        "crate_disambiguator".hash(&mut state);
+        crate_disambiguator.len().to_le().hash(&mut state); // (*)
+        crate_disambiguator.hash(&mut state);
+
+        debug!("crate_disambiguator: {:?}", crate_disambiguator);
+        debug!("state: {:?}", state);
+
+        {
+            let mut visit = StrictVersionHashVisitor::new(&mut state, self);
+            krate.visit_all_items(&mut visit);
+        }
+
+        // FIXME (#14132): This hash is still sensitive to e.g. the
+        // spans of the crate Attributes and their underlying
+        // MetaItems; we should make ContentHashable impl for those
+        // types and then use hash_content.  But, since all crate
+        // attributes should appear near beginning of the file, it is
+        // not such a big deal to be sensitive to their spans for now.
+        //
+        // We hash only the MetaItems instead of the entire Attribute
+        // to avoid hashing the AttrId
+        for attr in &krate.attrs {
+            debug!("krate attr {:?}", attr);
+            attr.meta().hash(&mut state);
+        }
+
+        Svh::new(state.finish())
+    }
+
+    fn calculate_item_hash(self, def_id: DefId) -> u64 {
+        assert!(def_id.is_local());
+
+        debug!("calculate_item_hash(def_id={:?})", def_id);
+
+        let mut state = SipHasher::new();
+
+        {
+            let mut visit = StrictVersionHashVisitor::new(&mut state, self);
+            if def_id.index == CRATE_DEF_INDEX {
+                // the crate root itself is not registered in the map
+                // as an item, so we have to fetch it this way
+                let krate = self.map.krate();
+                intravisit::walk_crate(&mut visit, krate);
+            } else {
+                let node_id = self.map.as_local_node_id(def_id).unwrap();
+                match self.map.find(node_id) {
+                    Some(NodeItem(item)) => visit.visit_item(item),
+                    Some(NodeForeignItem(item)) => visit.visit_foreign_item(item),
+                    r => bug!("calculate_item_hash: expected an item for node {} not {:?}",
+                              node_id, r),
+                }
+            }
+        }
+
+        let hash = state.finish();
+
+        debug!("calculate_item_hash: def_id={:?} hash={:?}", def_id, hash);
+
+        hash
+    }
+}
diff --git a/src/librustc_incremental/calculate_svh/svh_visitor.rs b/src/librustc_incremental/calculate_svh/svh_visitor.rs
new file mode 100644 (file)
index 0000000..42e7abe
--- /dev/null
@@ -0,0 +1,439 @@
+// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// FIXME (#14132): Even this SVH computation still has implementation
+// artifacts: namely, the order of item declaration will affect the
+// hash computation, but for many kinds of items the order of
+// declaration should be irrelevant to the ABI.
+
+pub use self::SawExprComponent::*;
+pub use self::SawStmtComponent::*;
+use self::SawAbiComponent::*;
+use syntax::ast::{self, Name, NodeId};
+use syntax::parse::token;
+use syntax_pos::Span;
+use rustc::hir;
+use rustc::hir::*;
+use rustc::hir::def::{Def, PathResolution};
+use rustc::hir::def_id::DefId;
+use rustc::hir::intravisit as visit;
+use rustc::hir::intravisit::{Visitor, FnKind};
+use rustc::hir::map::DefPath;
+use rustc::ty::TyCtxt;
+
+use std::hash::{Hash, SipHasher};
+
+pub struct StrictVersionHashVisitor<'a, 'tcx: 'a> {
+    pub tcx: TyCtxt<'a, 'tcx, 'tcx>,
+    pub st: &'a mut SipHasher,
+}
+
+impl<'a, 'tcx> StrictVersionHashVisitor<'a, 'tcx> {
+    pub fn new(st: &'a mut SipHasher,
+               tcx: TyCtxt<'a, 'tcx, 'tcx>)
+               -> Self {
+        StrictVersionHashVisitor { st: st, tcx: tcx }
+    }
+
+    fn hash_def_path(&mut self, path: &DefPath) {
+        path.deterministic_hash_to(self.tcx, self.st);
+    }
+}
+
+// To off-load the bulk of the hash-computation on #[derive(Hash)],
+// we define a set of enums corresponding to the content that our
+// crate visitor will encounter as it traverses the ast.
+//
+// The important invariant is that all of the Saw*Component enums
+// do not carry any Spans, Names, or Idents.
+//
+// Not carrying any Names/Idents is the important fix for problem
+// noted on PR #13948: using the ident.name as the basis for a
+// hash leads to unstable SVH, because ident.name is just an index
+// into intern table (i.e. essentially a random address), not
+// computed from the name content.
+//
+// With the below enums, the SVH computation is not sensitive to
+// artifacts of how rustc was invoked nor of how the source code
+// was laid out.  (Or at least it is *less* sensitive.)
+
+// This enum represents the different potential bits of code the
+// visitor could encounter that could affect the ABI for the crate,
+// and assigns each a distinct tag to feed into the hash computation.
+#[derive(Hash)]
+enum SawAbiComponent<'a> {
+
+    // FIXME (#14132): should we include (some function of)
+    // ident.ctxt as well?
+    SawIdent(token::InternedString),
+    SawStructDef(token::InternedString),
+
+    SawLifetime(token::InternedString),
+    SawLifetimeDef(token::InternedString),
+
+    SawMod,
+    SawForeignItem,
+    SawItem,
+    SawTy,
+    SawGenerics,
+    SawFn,
+    SawTraitItem,
+    SawImplItem,
+    SawStructField,
+    SawVariant,
+    SawPath,
+    SawBlock,
+    SawPat,
+    SawLocal,
+    SawArm,
+    SawExpr(SawExprComponent<'a>),
+    SawStmt(SawStmtComponent),
+}
+
+/// SawExprComponent carries all of the information that we want
+/// to include in the hash that *won't* be covered by the
+/// subsequent recursive traversal of the expression's
+/// substructure by the visitor.
+///
+/// We know every Expr_ variant is covered by a variant because
+/// `fn saw_expr` maps each to some case below.  Ensuring that
+/// each variant carries an appropriate payload has to be verified
+/// by hand.
+///
+/// (However, getting that *exactly* right is not so important
+/// because the SVH is just a developer convenience; there is no
+/// guarantee of collision-freedom, hash collisions are just
+/// (hopefully) unlikely.)
+#[derive(Hash)]
+pub enum SawExprComponent<'a> {
+
+    SawExprLoop(Option<token::InternedString>),
+    SawExprField(token::InternedString),
+    SawExprTupField(usize),
+    SawExprBreak(Option<token::InternedString>),
+    SawExprAgain(Option<token::InternedString>),
+
+    SawExprBox,
+    SawExprVec,
+    SawExprCall,
+    SawExprMethodCall,
+    SawExprTup,
+    SawExprBinary(hir::BinOp_),
+    SawExprUnary(hir::UnOp),
+    SawExprLit(ast::LitKind),
+    SawExprCast,
+    SawExprType,
+    SawExprIf,
+    SawExprWhile,
+    SawExprMatch,
+    SawExprClosure,
+    SawExprBlock,
+    SawExprAssign,
+    SawExprAssignOp(hir::BinOp_),
+    SawExprIndex,
+    SawExprPath(Option<usize>),
+    SawExprAddrOf(hir::Mutability),
+    SawExprRet,
+    SawExprInlineAsm(&'a hir::InlineAsm),
+    SawExprStruct,
+    SawExprRepeat,
+}
+
+fn saw_expr<'a>(node: &'a Expr_) -> SawExprComponent<'a> {
+    match *node {
+        ExprBox(..)              => SawExprBox,
+        ExprVec(..)              => SawExprVec,
+        ExprCall(..)             => SawExprCall,
+        ExprMethodCall(..)       => SawExprMethodCall,
+        ExprTup(..)              => SawExprTup,
+        ExprBinary(op, _, _)     => SawExprBinary(op.node),
+        ExprUnary(op, _)         => SawExprUnary(op),
+        ExprLit(ref lit)         => SawExprLit(lit.node.clone()),
+        ExprCast(..)             => SawExprCast,
+        ExprType(..)             => SawExprType,
+        ExprIf(..)               => SawExprIf,
+        ExprWhile(..)            => SawExprWhile,
+        ExprLoop(_, id)          => SawExprLoop(id.map(|id| id.node.as_str())),
+        ExprMatch(..)            => SawExprMatch,
+        ExprClosure(..)          => SawExprClosure,
+        ExprBlock(..)            => SawExprBlock,
+        ExprAssign(..)           => SawExprAssign,
+        ExprAssignOp(op, _, _)   => SawExprAssignOp(op.node),
+        ExprField(_, name)       => SawExprField(name.node.as_str()),
+        ExprTupField(_, id)      => SawExprTupField(id.node),
+        ExprIndex(..)            => SawExprIndex,
+        ExprPath(ref qself, _)   => SawExprPath(qself.as_ref().map(|q| q.position)),
+        ExprAddrOf(m, _)         => SawExprAddrOf(m),
+        ExprBreak(id)            => SawExprBreak(id.map(|id| id.node.as_str())),
+        ExprAgain(id)            => SawExprAgain(id.map(|id| id.node.as_str())),
+        ExprRet(..)              => SawExprRet,
+        ExprInlineAsm(ref a,_,_) => SawExprInlineAsm(a),
+        ExprStruct(..)           => SawExprStruct,
+        ExprRepeat(..)           => SawExprRepeat,
+    }
+}
+
+/// SawStmtComponent is analogous to SawExprComponent, but for statements.
+#[derive(Hash)]
+pub enum SawStmtComponent {
+    SawStmtExpr,
+    SawStmtSemi,
+}
+
+impl<'a, 'tcx> Visitor<'a> for StrictVersionHashVisitor<'a, 'tcx> {
+    fn visit_nested_item(&mut self, _: ItemId) {
+        // Each item is hashed independently; ignore nested items.
+    }
+
+    fn visit_variant_data(&mut self, s: &'a VariantData, name: Name,
+                          g: &'a Generics, _: NodeId, _: Span) {
+        debug!("visit_variant_data: st={:?}", self.st);
+        SawStructDef(name.as_str()).hash(self.st);
+        visit::walk_generics(self, g);
+        visit::walk_struct_def(self, s)
+    }
+
+    fn visit_variant(&mut self, v: &'a Variant, g: &'a Generics, item_id: NodeId) {
+        debug!("visit_variant: st={:?}", self.st);
+        SawVariant.hash(self.st);
+        // walk_variant does not call walk_generics, so do it here.
+        visit::walk_generics(self, g);
+        visit::walk_variant(self, v, g, item_id)
+    }
+
+    // All of the remaining methods just record (in the hash
+    // SipHasher) that the visitor saw that particular variant
+    // (with its payload), and continue walking as the default
+    // visitor would.
+    //
+    // Some of the implementations have some notes as to how one
+    // might try to make their SVH computation less discerning
+    // (e.g. by incorporating reachability analysis).  But
+    // currently all of their implementations are uniform and
+    // uninteresting.
+    //
+    // (If you edit a method such that it deviates from the
+    // pattern, please move that method up above this comment.)
+
+    fn visit_name(&mut self, _: Span, name: Name) {
+        debug!("visit_name: st={:?}", self.st);
+        SawIdent(name.as_str()).hash(self.st);
+    }
+
+    fn visit_lifetime(&mut self, l: &'a Lifetime) {
+        debug!("visit_lifetime: st={:?}", self.st);
+        SawLifetime(l.name.as_str()).hash(self.st);
+    }
+
+    fn visit_lifetime_def(&mut self, l: &'a LifetimeDef) {
+        debug!("visit_lifetime_def: st={:?}", self.st);
+        SawLifetimeDef(l.lifetime.name.as_str()).hash(self.st);
+    }
+
+    // We do recursively walk the bodies of functions/methods
+    // (rather than omitting their bodies from the hash) since
+    // monomorphization and cross-crate inlining generally implies
+    // that a change to a crate body will require downstream
+    // crates to be recompiled.
+    fn visit_expr(&mut self, ex: &'a Expr) {
+        debug!("visit_expr: st={:?}", self.st);
+        SawExpr(saw_expr(&ex.node)).hash(self.st); visit::walk_expr(self, ex)
+    }
+
+    fn visit_stmt(&mut self, s: &'a Stmt) {
+        debug!("visit_stmt: st={:?}", self.st);
+
+        // We don't want to modify the hash for decls, because
+        // they might be item decls (if they are local decls,
+        // we'll hash that fact in visit_local); but we do want to
+        // remember if this was a StmtExpr or StmtSemi (the later
+        // had an explicit semi-colon; this affects the typing
+        // rules).
+        match s.node {
+            StmtDecl(..) => (),
+            StmtExpr(..) => SawStmt(SawStmtExpr).hash(self.st),
+            StmtSemi(..) => SawStmt(SawStmtSemi).hash(self.st),
+        }
+
+        visit::walk_stmt(self, s)
+    }
+
+    fn visit_foreign_item(&mut self, i: &'a ForeignItem) {
+        debug!("visit_foreign_item: st={:?}", self.st);
+
+        // FIXME (#14132) ideally we would incorporate privacy (or
+        // perhaps reachability) somewhere here, so foreign items
+        // that do not leak into downstream crates would not be
+        // part of the ABI.
+        SawForeignItem.hash(self.st); visit::walk_foreign_item(self, i)
+    }
+
+    fn visit_item(&mut self, i: &'a Item) {
+        debug!("visit_item: {:?} st={:?}", i, self.st);
+
+        // FIXME (#14132) ideally would incorporate reachability
+        // analysis somewhere here, so items that never leak into
+        // downstream crates (e.g. via monomorphisation or
+        // inlining) would not be part of the ABI.
+        SawItem.hash(self.st); visit::walk_item(self, i)
+    }
+
+    fn visit_mod(&mut self, m: &'a Mod, _s: Span, n: NodeId) {
+        debug!("visit_mod: st={:?}", self.st);
+        SawMod.hash(self.st); visit::walk_mod(self, m, n)
+    }
+
+    fn visit_ty(&mut self, t: &'a Ty) {
+        debug!("visit_ty: st={:?}", self.st);
+        SawTy.hash(self.st); visit::walk_ty(self, t)
+    }
+
+    fn visit_generics(&mut self, g: &'a Generics) {
+        debug!("visit_generics: st={:?}", self.st);
+        SawGenerics.hash(self.st); visit::walk_generics(self, g)
+    }
+
+    fn visit_fn(&mut self, fk: FnKind<'a>, fd: &'a FnDecl,
+                b: &'a Block, s: Span, n: NodeId) {
+        debug!("visit_fn: st={:?}", self.st);
+        SawFn.hash(self.st); visit::walk_fn(self, fk, fd, b, s, n)
+    }
+
+    fn visit_trait_item(&mut self, ti: &'a TraitItem) {
+        debug!("visit_trait_item: st={:?}", self.st);
+        SawTraitItem.hash(self.st); visit::walk_trait_item(self, ti)
+    }
+
+    fn visit_impl_item(&mut self, ii: &'a ImplItem) {
+        debug!("visit_impl_item: st={:?}", self.st);
+        SawImplItem.hash(self.st); visit::walk_impl_item(self, ii)
+    }
+
+    fn visit_struct_field(&mut self, s: &'a StructField) {
+        debug!("visit_struct_field: st={:?}", self.st);
+        SawStructField.hash(self.st); visit::walk_struct_field(self, s)
+    }
+
+    fn visit_path(&mut self, path: &'a Path, _: ast::NodeId) {
+        debug!("visit_path: st={:?}", self.st);
+        SawPath.hash(self.st); visit::walk_path(self, path)
+    }
+
+    fn visit_block(&mut self, b: &'a Block) {
+        debug!("visit_block: st={:?}", self.st);
+        SawBlock.hash(self.st); visit::walk_block(self, b)
+    }
+
+    fn visit_pat(&mut self, p: &'a Pat) {
+        debug!("visit_pat: st={:?}", self.st);
+        SawPat.hash(self.st); visit::walk_pat(self, p)
+    }
+
+    fn visit_local(&mut self, l: &'a Local) {
+        debug!("visit_local: st={:?}", self.st);
+        SawLocal.hash(self.st); visit::walk_local(self, l)
+    }
+
+    fn visit_arm(&mut self, a: &'a Arm) {
+        debug!("visit_arm: st={:?}", self.st);
+        SawArm.hash(self.st); visit::walk_arm(self, a)
+    }
+
+    fn visit_id(&mut self, id: NodeId) {
+        debug!("visit_id: id={} st={:?}", id, self.st);
+        self.hash_resolve(id);
+    }
+}
+
+#[derive(Hash)]
+pub enum DefHash {
+    SawDefId,
+    SawLabel,
+    SawPrimTy,
+    SawSelfTy,
+    SawErr,
+}
+
+impl<'a, 'tcx> StrictVersionHashVisitor<'a, 'tcx> {
+    fn hash_resolve(&mut self, id: ast::NodeId) {
+        // Because whether or not a given id has an entry is dependent
+        // solely on expr variant etc, we don't need to hash whether
+        // or not an entry was present (we are already hashing what
+        // variant it is above when we visit the HIR).
+
+        if let Some(def) = self.tcx.def_map.borrow().get(&id) {
+            self.hash_partial_def(def);
+        }
+
+        if let Some(traits) = self.tcx.trait_map.get(&id) {
+            traits.len().hash(self.st);
+            for candidate in traits {
+                self.hash_def_id(candidate.def_id);
+            }
+        }
+    }
+
+    fn hash_def_id(&mut self, def_id: DefId) {
+        let def_path = self.tcx.def_path(def_id);
+        self.hash_def_path(&def_path);
+    }
+
+    fn hash_partial_def(&mut self, def: &PathResolution) {
+        self.hash_def(def.base_def);
+        def.depth.hash(self.st);
+    }
+
+    fn hash_def(&mut self, def: Def) {
+        match def {
+            // Crucial point: for all of these variants, the variant +
+            // add'l data that is added is always the same if the
+            // def-id is the same, so it suffices to hash the def-id
+            Def::Fn(..) |
+            Def::Mod(..) |
+            Def::ForeignMod(..) |
+            Def::Static(..) |
+            Def::Variant(..) |
+            Def::Enum(..) |
+            Def::TyAlias(..) |
+            Def::AssociatedTy(..) |
+            Def::TyParam(..) |
+            Def::Struct(..) |
+            Def::Trait(..) |
+            Def::Method(..) |
+            Def::Const(..) |
+            Def::AssociatedConst(..) |
+            Def::Local(..) |
+            Def::Upvar(..) => {
+                DefHash::SawDefId.hash(self.st);
+                self.hash_def_id(def.def_id());
+            }
+
+            Def::Label(..) => {
+                DefHash::SawLabel.hash(self.st);
+                // we don't encode the `id` because it always refers to something
+                // within this item, so if it changed, there would have to be other
+                // changes too
+            }
+            Def::PrimTy(ref prim_ty) => {
+                DefHash::SawPrimTy.hash(self.st);
+                prim_ty.hash(self.st);
+            }
+            Def::SelfTy(..) => {
+                DefHash::SawSelfTy.hash(self.st);
+                // the meaning of Self is always the same within a
+                // given context, so we don't need to hash the other
+                // fields
+            }
+            Def::Err => {
+                DefHash::SawErr.hash(self.st);
+            }
+        }
+    }
+}
index 95e9a16f29bbe927db40164283e00576231eb789..12f3ed8ae2bd4caefa6e1e32c3695ebd093e890d 100644 (file)
@@ -19,7 +19,6 @@
 /// Data for use when recompiling the **current crate**.
 #[derive(Debug, RustcEncodable, RustcDecodable)]
 pub struct SerializedDepGraph {
-    pub nodes: Vec<DepNode<DefPathIndex>>,
     pub edges: Vec<SerializedEdge>,
 
     /// These are hashes of two things:
@@ -44,15 +43,22 @@ pub struct SerializedDepGraph {
     pub hashes: Vec<SerializedHash>,
 }
 
+/// Represents a "reduced" dependency edge. Unlike the full dep-graph,
+/// the dep-graph we serialize contains only edges `S -> T` where the
+/// source `S` is something hashable (a HIR node or foreign metadata)
+/// and the target `T` is something significant, like a work-product.
+/// Normally, significant nodes are only those that have saved data on
+/// disk, but in unit-testing the set of significant nodes can be
+/// increased.
 pub type SerializedEdge = (DepNode<DefPathIndex>, DepNode<DefPathIndex>);
 
 #[derive(Debug, RustcEncodable, RustcDecodable)]
 pub struct SerializedHash {
-    /// node being hashed; either a Hir or MetaData variant, in
-    /// practice
-    pub node: DepNode<DefPathIndex>,
+    /// def-id of thing being hashed
+    pub dep_node: DepNode<DefPathIndex>,
 
-    /// the hash itself, computed by `calculate_item_hash`
+    /// the hash as of previous compilation, computed by code in
+    /// `hash` module
     pub hash: u64,
 }
 
index 2fd6973909a8ec59c33a146658b60913049be8a2..084b6714b67b95544ff0e98fd5b4b70bef1f74f3 100644 (file)
 use rustc::dep_graph::DepNode;
 use rustc::hir::map::DefPath;
 use rustc::hir::def_id::DefId;
+use rustc::middle::cstore::LOCAL_CRATE;
 use rustc::ty::TyCtxt;
 use rustc::util::nodemap::DefIdMap;
 use std::fmt::{self, Debug};
+use std::iter::once;
+use syntax::ast;
 
 /// Index into the DefIdDirectory
 #[derive(Copy, Clone, Debug, PartialOrd, Ord, Hash, PartialEq, Eq,
@@ -31,17 +34,79 @@ pub struct DefPathIndex {
 pub struct DefIdDirectory {
     // N.B. don't use Removable here because these def-ids are loaded
     // directly without remapping, so loading them should not fail.
-    paths: Vec<DefPath>
+    paths: Vec<DefPath>,
+
+    // For each crate, saves the crate-name/disambiguator so that
+    // later we can match crate-numbers up again.
+    krates: Vec<CrateInfo>,
+}
+
+#[derive(Debug, RustcEncodable, RustcDecodable)]
+pub struct CrateInfo {
+    krate: ast::CrateNum,
+    name: String,
+    disambiguator: String,
 }
 
 impl DefIdDirectory {
-    pub fn new() -> DefIdDirectory {
-        DefIdDirectory { paths: vec![] }
+    pub fn new(krates: Vec<CrateInfo>) -> DefIdDirectory {
+        DefIdDirectory { paths: vec![], krates: krates }
+    }
+
+    fn max_current_crate(&self, tcx: TyCtxt) -> ast::CrateNum {
+        tcx.sess.cstore.crates()
+                       .into_iter()
+                       .max()
+                       .unwrap_or(LOCAL_CRATE)
+    }
+
+    /// Returns a string form for `index`; useful for debugging
+    pub fn def_path_string(&self, tcx: TyCtxt, index: DefPathIndex) -> String {
+        let path = &self.paths[index.index as usize];
+        if self.krate_still_valid(tcx, self.max_current_crate(tcx), path.krate) {
+            path.to_string(tcx)
+        } else {
+            format!("<crate {} changed>", path.krate)
+        }
+    }
+
+    pub fn krate_still_valid(&self,
+                             tcx: TyCtxt,
+                             max_current_crate: ast::CrateNum,
+                             krate: ast::CrateNum) -> bool {
+        // Check that the crate-number still matches. For now, if it
+        // doesn't, just return None. We could do better, such as
+        // finding the new number.
+
+        if krate > max_current_crate {
+            false
+        } else {
+            let old_info = &self.krates[krate as usize];
+            assert_eq!(old_info.krate, krate);
+            let old_name: &str = &old_info.name;
+            let old_disambiguator: &str = &old_info.disambiguator;
+            let new_name: &str = &tcx.crate_name(krate);
+            let new_disambiguator: &str = &tcx.crate_disambiguator(krate);
+            old_name == new_name && old_disambiguator == new_disambiguator
+        }
     }
 
     pub fn retrace(&self, tcx: TyCtxt) -> RetracedDefIdDirectory {
+        let max_current_crate = self.max_current_crate(tcx);
+
         let ids = self.paths.iter()
-                            .map(|path| tcx.retrace_path(path))
+                            .map(|path| {
+                                if self.krate_still_valid(tcx, max_current_crate, path.krate) {
+                                    tcx.retrace_path(path)
+                                } else {
+                                    debug!("crate {} changed from {:?} to {:?}/{:?}",
+                                           path.krate,
+                                           self.krates[path.krate as usize],
+                                           tcx.crate_name(path.krate),
+                                           tcx.crate_disambiguator(path.krate));
+                                    None
+                                }
+                            })
                             .collect();
         RetracedDefIdDirectory { ids: ids }
     }
@@ -70,10 +135,26 @@ pub struct DefIdDirectoryBuilder<'a,'tcx:'a> {
 
 impl<'a,'tcx> DefIdDirectoryBuilder<'a,'tcx> {
     pub fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> DefIdDirectoryBuilder<'a, 'tcx> {
+        let mut krates: Vec<_> =
+            once(LOCAL_CRATE)
+            .chain(tcx.sess.cstore.crates())
+            .map(|krate| {
+                CrateInfo {
+                    krate: krate,
+                    name: tcx.crate_name(krate).to_string(),
+                    disambiguator: tcx.crate_disambiguator(krate).to_string()
+                }
+            })
+            .collect();
+
+        // the result of crates() is not in order, so sort list of
+        // crates so that we can just index it later
+        krates.sort_by_key(|k| k.krate);
+
         DefIdDirectoryBuilder {
             tcx: tcx,
             hash: DefIdMap(),
-            directory: DefIdDirectory::new()
+            directory: DefIdDirectory::new(krates),
         }
     }
 
@@ -91,12 +172,17 @@ pub fn add(&mut self, def_id: DefId) -> DefPathIndex {
                  .clone()
     }
 
+    pub fn lookup_def_path(&self, id: DefPathIndex) -> &DefPath {
+        &self.directory.paths[id.index as usize]
+    }
+
+
     pub fn map(&mut self, node: &DepNode<DefId>) -> DepNode<DefPathIndex> {
         node.map_def(|&def_id| Some(self.add(def_id))).unwrap()
     }
 
-    pub fn into_directory(self) -> DefIdDirectory {
-        self.directory
+    pub fn directory(&self) -> &DefIdDirectory {
+        &self.directory
     }
 }
 
index dee4d667b8d95c6b1a23b740455cda4e2d58d2b8..3c77cc07d3d89283b37a1feda18444e8309618c7 100644 (file)
 //! Errors are reported if we are in the suitable configuration but
 //! the required condition is not met.
 
+use super::directory::RetracedDefIdDirectory;
+use super::load::DirtyNodes;
 use rustc::dep_graph::{DepGraphQuery, DepNode};
 use rustc::hir;
 use rustc::hir::def_id::DefId;
 use rustc::hir::intravisit::Visitor;
+use rustc_data_structures::fnv::FnvHashSet;
 use syntax::ast::{self, Attribute, MetaItem};
 use syntax::attr::AttrMetaMethods;
 use syntax::parse::token::InternedString;
 const LABEL: &'static str = "label";
 const CFG: &'static str = "cfg";
 
-pub fn check_dirty_clean_annotations<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
+pub fn check_dirty_clean_annotations<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+                                               dirty_inputs: &DirtyNodes,
+                                               retraced: &RetracedDefIdDirectory) {
+    // can't add `#[rustc_dirty]` etc without opting in to this feature
+    if !tcx.sess.features.borrow().rustc_attrs {
+        return;
+    }
+
     let _ignore = tcx.dep_graph.in_ignore();
+    let dirty_inputs: FnvHashSet<DepNode<DefId>> =
+        dirty_inputs.iter()
+                   .filter_map(|d| retraced.map(d))
+                   .collect();
     let query = tcx.dep_graph.query();
+    debug!("query-nodes: {:?}", query.nodes());
     let krate = tcx.map.krate();
     krate.visit_all_items(&mut DirtyCleanVisitor {
         tcx: tcx,
         query: &query,
+        dirty_inputs: dirty_inputs,
     });
 }
 
 pub struct DirtyCleanVisitor<'a, 'tcx:'a> {
     tcx: TyCtxt<'a, 'tcx, 'tcx>,
     query: &'a DepGraphQuery<DefId>,
+    dirty_inputs: FnvHashSet<DepNode<DefId>>,
 }
 
 impl<'a, 'tcx> DirtyCleanVisitor<'a, 'tcx> {
@@ -81,10 +98,13 @@ fn check_config(&self, attr: &ast::Attribute) -> bool {
                         return true;
                     }
                 }
+                return false;
             }
         }
-        debug!("check_config: no match found");
-        return false;
+
+        self.tcx.sess.span_fatal(
+            attr.span,
+            &format!("no cfg attribute"));
     }
 
     fn dep_node(&self, attr: &Attribute, def_id: DefId) -> DepNode<DefId> {
@@ -105,29 +125,62 @@ fn dep_node(&self, attr: &Attribute, def_id: DefId) -> DepNode<DefId> {
         self.tcx.sess.span_fatal(attr.span, "no `label` found");
     }
 
-    fn dep_node_str(&self, dep_node: DepNode<DefId>) -> DepNode<String> {
+    fn dep_node_str(&self, dep_node: &DepNode<DefId>) -> DepNode<String> {
         dep_node.map_def(|&def_id| Some(self.tcx.item_path_str(def_id))).unwrap()
     }
 
     fn assert_dirty(&self, item: &hir::Item, dep_node: DepNode<DefId>) {
         debug!("assert_dirty({:?})", dep_node);
 
-        if self.query.contains_node(&dep_node) {
-            let dep_node_str = self.dep_node_str(dep_node);
-            self.tcx.sess.span_err(
-                item.span,
-                &format!("`{:?}` found in dep graph, but should be dirty", dep_node_str));
+        match dep_node {
+            DepNode::Hir(_) => {
+                // HIR nodes are inputs, so if we are asserting that the HIR node is
+                // dirty, we check the dirty input set.
+                if !self.dirty_inputs.contains(&dep_node) {
+                    let dep_node_str = self.dep_node_str(&dep_node);
+                    self.tcx.sess.span_err(
+                        item.span,
+                        &format!("`{:?}` not found in dirty set, but should be dirty",
+                                 dep_node_str));
+                }
+            }
+            _ => {
+                // Other kinds of nodes would be targets, so check if
+                // the dep-graph contains the node.
+                if self.query.contains_node(&dep_node) {
+                    let dep_node_str = self.dep_node_str(&dep_node);
+                    self.tcx.sess.span_err(
+                        item.span,
+                        &format!("`{:?}` found in dep graph, but should be dirty", dep_node_str));
+                }
+            }
         }
     }
 
     fn assert_clean(&self, item: &hir::Item, dep_node: DepNode<DefId>) {
         debug!("assert_clean({:?})", dep_node);
 
-        if !self.query.contains_node(&dep_node) {
-            let dep_node_str = self.dep_node_str(dep_node);
-            self.tcx.sess.span_err(
-                item.span,
-                &format!("`{:?}` not found in dep graph, but should be clean", dep_node_str));
+        match dep_node {
+            DepNode::Hir(_) => {
+                // For HIR nodes, check the inputs.
+                if self.dirty_inputs.contains(&dep_node) {
+                    let dep_node_str = self.dep_node_str(&dep_node);
+                    self.tcx.sess.span_err(
+                        item.span,
+                        &format!("`{:?}` found in dirty-node set, but should be clean",
+                                 dep_node_str));
+                }
+            }
+            _ => {
+                // Otherwise, check if the dep-node exists.
+                if !self.query.contains_node(&dep_node) {
+                    let dep_node_str = self.dep_node_str(&dep_node);
+                    self.tcx.sess.span_err(
+                        item.span,
+                        &format!("`{:?}` not found in dep graph, but should be clean",
+                                 dep_node_str));
+                }
+            }
         }
     }
 }
index 99119dd184c8b3929045d5e75afa08134e3a776a..344b05f095457ec4d5e2c6eccce7bf0b31afab94 100644 (file)
@@ -39,12 +39,19 @@ pub fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Self {
         }
     }
 
-    pub fn hash(&mut self, dep_node: &DepNode<DefId>) -> Option<u64> {
+    pub fn is_hashable(dep_node: &DepNode<DefId>) -> bool {
+        match *dep_node {
+            DepNode::Hir(_) => true,
+            DepNode::MetaData(def_id) => !def_id.is_local(),
+            _ => false,
+        }
+    }
+
+    pub fn hash(&mut self, dep_node: &DepNode<DefId>) -> Option<(DefId, u64)> {
         match *dep_node {
             // HIR nodes (which always come from our crate) are an input:
             DepNode::Hir(def_id) => {
-                assert!(def_id.is_local());
-                Some(self.hir_hash(def_id))
+                Some((def_id, self.hir_hash(def_id)))
             }
 
             // MetaData from other crates is an *input* to us.
@@ -52,7 +59,7 @@ pub fn hash(&mut self, dep_node: &DepNode<DefId>) -> Option<u64> {
             // don't hash them, but we do compute a hash for them and
             // save it for others to use.
             DepNode::MetaData(def_id) if !def_id.is_local() => {
-                Some(self.metadata_hash(def_id))
+                Some((def_id, self.metadata_hash(def_id)))
             }
 
             _ => {
@@ -66,7 +73,16 @@ pub fn hash(&mut self, dep_node: &DepNode<DefId>) -> Option<u64> {
     }
 
     fn hir_hash(&mut self, def_id: DefId) -> u64 {
-        assert!(def_id.is_local());
+        assert!(def_id.is_local(),
+                "cannot hash HIR for non-local def-id {:?} => {:?}",
+                def_id,
+                self.tcx.item_path_str(def_id));
+
+        assert!(!self.tcx.map.is_inlined_def_id(def_id),
+                "cannot hash HIR for inlined def-id {:?} => {:?}",
+                def_id,
+                self.tcx.item_path_str(def_id));
+
         // FIXME(#32753) -- should we use a distinct hash here
         self.tcx.calculate_item_hash(def_id)
     }
index 36b6c79c40f5dc9379fac023bb43b567d0334198..79b90b63dc608d8b238eea13d3b84ac3f890c9d8 100644 (file)
@@ -28,7 +28,7 @@
 use super::hash::*;
 use super::util::*;
 
-type DirtyNodes = FnvHashSet<DepNode<DefId>>;
+pub type DirtyNodes = FnvHashSet<DepNode<DefPathIndex>>;
 
 type CleanEdges = Vec<(DepNode<DefId>, DepNode<DefId>)>;
 
@@ -45,7 +45,6 @@ pub fn load_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
 
     let _ignore = tcx.dep_graph.in_ignore();
     load_dep_graph_if_exists(tcx);
-    dirty_clean::check_dirty_clean_annotations(tcx);
 }
 
 fn load_dep_graph_if_exists<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
@@ -62,7 +61,7 @@ fn load_dep_graph_if_exists<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
     };
 
     match decode_dep_graph(tcx, &dep_graph_data, &work_products_data) {
-        Ok(()) => return,
+        Ok(dirty_nodes) => dirty_nodes,
         Err(err) => {
             tcx.sess.warn(
                 &format!("decoding error in dep-graph from `{}` and `{}`: {}",
@@ -93,7 +92,6 @@ fn load_data(sess: &Session, path: &Path) -> Option<Vec<u8>> {
             None
         }
     }
-
 }
 
 /// Decode the dep graph and load the edges/nodes that are still clean
@@ -108,143 +106,116 @@ pub fn decode_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
     let directory = try!(DefIdDirectory::decode(&mut dep_graph_decoder));
     let serialized_dep_graph = try!(SerializedDepGraph::decode(&mut dep_graph_decoder));
 
-    debug!("decode_dep_graph: directory = {:#?}", directory);
-    debug!("decode_dep_graph: serialized_dep_graph = {:#?}", serialized_dep_graph);
-
     // Retrace the paths in the directory to find their current location (if any).
     let retraced = directory.retrace(tcx);
 
-    debug!("decode_dep_graph: retraced = {:#?}", retraced);
-
-    // Compute the set of Hir nodes whose data has changed.
-    let mut dirty_nodes =
-        initial_dirty_nodes(tcx, &serialized_dep_graph.hashes, &retraced);
-
-    debug!("decode_dep_graph: initial dirty_nodes = {:#?}", dirty_nodes);
+    // Compute the set of Hir nodes whose data has changed or which
+    // have been removed.  These are "raw" source nodes, which means
+    // that they still use the original `DefPathIndex` values from the
+    // encoding, rather than having been retraced to a `DefId`. The
+    // reason for this is that this way we can include nodes that have
+    // been removed (which no longer have a `DefId` in the current
+    // compilation).
+    let dirty_raw_source_nodes = dirty_nodes(tcx, &serialized_dep_graph.hashes, &retraced);
+
+    // Create a list of (raw-source-node ->
+    // retracted-target-node) edges. In the process of retracing the
+    // target nodes, we may discover some of them def-paths no longer exist,
+    // in which case there is no need to mark the corresopnding nodes as dirty
+    // (they are just not present). So this list may be smaller than the original.
+    //
+    // Note though that in the common case the target nodes are
+    // `DepNode::WorkProduct` instances, and those don't have a
+    // def-id, so they will never be considered to not exist. Instead,
+    // we do a secondary hashing step (later, in trans) when we know
+    // the set of symbols that go into a work-product: if any symbols
+    // have been removed (or added) the hash will be different and
+    // we'll ignore the work-product then.
+    let retraced_edges: Vec<_> =
+        serialized_dep_graph.edges.iter()
+                                  .filter_map(|&(ref raw_source_node, ref raw_target_node)| {
+                                      retraced.map(raw_target_node)
+                                              .map(|target_node| (raw_source_node, target_node))
+                                  })
+                                  .collect();
+
+    // Compute which work-products have an input that has changed or
+    // been removed. Put the dirty ones into a set.
+    let mut dirty_target_nodes = FnvHashSet();
+    for &(raw_source_node, ref target_node) in &retraced_edges {
+        if dirty_raw_source_nodes.contains(raw_source_node) {
+            if !dirty_target_nodes.contains(target_node) {
+                dirty_target_nodes.insert(target_node.clone());
+
+                if tcx.sess.opts.debugging_opts.incremental_info {
+                    // It'd be nice to pretty-print these paths better than just
+                    // using the `Debug` impls, but wev.
+                    println!("module {:?} is dirty because {:?} changed or was removed",
+                             target_node,
+                             raw_source_node.map_def(|&index| {
+                                 Some(directory.def_path_string(tcx, index))
+                             }).unwrap());
+                }
+            }
+        }
+    }
 
-    // Find all DepNodes reachable from that core set. This loop
-    // iterates repeatedly over the list of edges whose source is not
-    // known to be dirty (`clean_edges`). If it finds an edge whose
-    // source is dirty, it removes it from that list and adds the
-    // target to `dirty_nodes`. It stops when it reaches a fixed
-    // point.
-    let clean_edges = compute_clean_edges(&serialized_dep_graph.edges,
-                                          &retraced,
-                                          &mut dirty_nodes);
+    // For work-products that are still clean, add their deps into the
+    // graph. This is needed because later we will have to save this
+    // back out again!
+    let dep_graph = tcx.dep_graph.clone();
+    for (raw_source_node, target_node) in retraced_edges {
+        if dirty_target_nodes.contains(&target_node) {
+            continue;
+        }
 
-    // Add synthetic `foo->foo` edges for each clean node `foo` that
-    // we had before. This is sort of a hack to create clean nodes in
-    // the graph, since the existence of a node is a signal that the
-    // work it represents need not be repeated.
-    let clean_nodes =
-        serialized_dep_graph.nodes
-                            .iter()
-                            .filter_map(|node| retraced.map(node))
-                            .filter(|node| !dirty_nodes.contains(node))
-                            .map(|node| (node.clone(), node));
+        let source_node = retraced.map(raw_source_node).unwrap();
 
-    // Add nodes and edges that are not dirty into our main graph.
-    let dep_graph = tcx.dep_graph.clone();
-    for (source, target) in clean_edges.into_iter().chain(clean_nodes) {
-        debug!("decode_dep_graph: clean edge: {:?} -> {:?}", source, target);
+        debug!("decode_dep_graph: clean edge: {:?} -> {:?}", source_node, target_node);
 
-        let _task = dep_graph.in_task(target);
-        dep_graph.read(source);
+        let _task = dep_graph.in_task(target_node);
+        dep_graph.read(source_node);
     }
 
     // Add in work-products that are still clean, and delete those that are
     // dirty.
     let mut work_product_decoder = Decoder::new(work_products_data, 0);
     let work_products = try!(<Vec<SerializedWorkProduct>>::decode(&mut work_product_decoder));
-    reconcile_work_products(tcx, work_products, &dirty_nodes);
+    reconcile_work_products(tcx, work_products, &dirty_target_nodes);
+
+    dirty_clean::check_dirty_clean_annotations(tcx, &dirty_raw_source_nodes, &retraced);
 
     Ok(())
 }
 
-fn initial_dirty_nodes<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                                 hashes: &[SerializedHash],
-                                 retraced: &RetracedDefIdDirectory)
-                                 -> DirtyNodes {
+/// Computes which of the original set of def-ids are dirty. Stored in
+/// a bit vector where the index is the DefPathIndex.
+fn dirty_nodes<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+                         hashes: &[SerializedHash],
+                         retraced: &RetracedDefIdDirectory)
+                         -> DirtyNodes {
     let mut hcx = HashContext::new(tcx);
-    let mut items_removed = false;
     let mut dirty_nodes = FnvHashSet();
-    for hash in hashes {
-        match hash.node.map_def(|&i| retraced.def_id(i)) {
-            Some(dep_node) => {
-                let current_hash = hcx.hash(&dep_node).unwrap();
-                if current_hash != hash.hash {
-                    debug!("initial_dirty_nodes: {:?} is dirty as hash is {:?}, was {:?}",
-                           dep_node, current_hash, hash.hash);
-                    dirty_nodes.insert(dep_node);
-                }
-            }
-            None => {
-                items_removed = true;
-            }
-        }
-    }
-
-    // If any of the items in the krate have changed, then we consider
-    // the meta-node `Krate` to be dirty, since that means something
-    // which (potentially) read the contents of every single item.
-    if items_removed || !dirty_nodes.is_empty() {
-        dirty_nodes.insert(DepNode::Krate);
-    }
 
-    dirty_nodes
-}
-
-fn compute_clean_edges(serialized_edges: &[(SerializedEdge)],
-                       retraced: &RetracedDefIdDirectory,
-                       dirty_nodes: &mut DirtyNodes)
-                       -> CleanEdges {
-    // Build up an initial list of edges. Include an edge (source,
-    // target) if neither node has been removed. If the source has
-    // been removed, add target to the list of dirty nodes.
-    let mut clean_edges = Vec::with_capacity(serialized_edges.len());
-    for &(ref serialized_source, ref serialized_target) in serialized_edges {
-        if let Some(target) = retraced.map(serialized_target) {
-            if let Some(source) = retraced.map(serialized_source) {
-                clean_edges.push((source, target))
-            } else {
-                // source removed, target must be dirty
-                debug!("compute_clean_edges: {:?} dirty because {:?} no longer exists",
-                       target, serialized_source);
-                dirty_nodes.insert(target);
+    for hash in hashes {
+        if let Some(dep_node) = retraced.map(&hash.dep_node) {
+            let (_, current_hash) = hcx.hash(&dep_node).unwrap();
+            if current_hash == hash.hash {
+                continue;
             }
+            debug!("initial_dirty_nodes: {:?} is dirty as hash is {:?}, was {:?}",
+                   dep_node.map_def(|&def_id| Some(tcx.def_path(def_id))).unwrap(),
+                   current_hash,
+                   hash.hash);
         } else {
-            // target removed, ignore the edge
+            debug!("initial_dirty_nodes: {:?} is dirty as it was removed",
+                   hash.dep_node);
         }
-    }
 
-    debug!("compute_clean_edges: dirty_nodes={:#?}", dirty_nodes);
-
-    // Propagate dirty marks by iterating repeatedly over
-    // `clean_edges`. If we find an edge `(source, target)` where
-    // `source` is dirty, add `target` to the list of dirty nodes and
-    // remove it. Keep doing this until we find no more dirty nodes.
-    let mut previous_size = 0;
-    while dirty_nodes.len() > previous_size {
-        debug!("compute_clean_edges: previous_size={}", previous_size);
-        previous_size = dirty_nodes.len();
-        let mut i = 0;
-        while i < clean_edges.len() {
-            if dirty_nodes.contains(&clean_edges[i].0) {
-                let (source, target) = clean_edges.swap_remove(i);
-                debug!("compute_clean_edges: dirty source {:?} -> {:?}",
-                       source, target);
-                dirty_nodes.insert(target);
-            } else if dirty_nodes.contains(&clean_edges[i].1) {
-                let (source, target) = clean_edges.swap_remove(i);
-                debug!("compute_clean_edges: dirty target {:?} -> {:?}",
-                       source, target);
-            } else {
-                i += 1;
-            }
-        }
+        dirty_nodes.insert(hash.dep_node.clone());
     }
 
-    clean_edges
+    dirty_nodes
 }
 
 /// Go through the list of work-products produced in the previous run.
@@ -252,11 +223,10 @@ fn compute_clean_edges(serialized_edges: &[(SerializedEdge)],
 /// otherwise no longer applicable.
 fn reconcile_work_products<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
                                      work_products: Vec<SerializedWorkProduct>,
-                                     dirty_nodes: &DirtyNodes) {
+                                     dirty_target_nodes: &FnvHashSet<DepNode<DefId>>) {
     debug!("reconcile_work_products({:?})", work_products);
     for swp in work_products {
-        let dep_node = DepNode::WorkProduct(swp.id.clone());
-        if dirty_nodes.contains(&dep_node) {
+        if dirty_target_nodes.contains(&DepNode::WorkProduct(swp.id.clone())) {
             debug!("reconcile_work_products: dep-node for {:?} is dirty", swp);
             delete_dirty_work_product(tcx, swp);
         } else {
index 1157f494ce604e32c2d9fdd5512416f1c8c967fe..4a042497e044114c3d3421ff777cfa906826f548 100644 (file)
@@ -17,6 +17,7 @@
 mod dirty_clean;
 mod hash;
 mod load;
+mod preds;
 mod save;
 mod util;
 mod work_product;
diff --git a/src/librustc_incremental/persist/preds.rs b/src/librustc_incremental/persist/preds.rs
new file mode 100644 (file)
index 0000000..a82951a
--- /dev/null
@@ -0,0 +1,73 @@
+// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use rustc::dep_graph::{DepGraphQuery, DepNode};
+use rustc::hir::def_id::DefId;
+use rustc_data_structures::fnv::FnvHashMap;
+use rustc_data_structures::graph::{DepthFirstTraversal, INCOMING, NodeIndex};
+
+use super::hash::*;
+
+/// A data-structure that makes it easy to enumerate the hashable
+/// predecessors of any given dep-node.
+pub struct Predecessors<'query> {
+    // - Keys: dep-nodes that may have work-products, output meta-data
+    //   nodes.
+    // - Values: transitive predecessors of the key that are hashable
+    //   (e.g., HIR nodes, input meta-data nodes)
+    pub inputs: FnvHashMap<&'query DepNode<DefId>, Vec<&'query DepNode<DefId>>>,
+
+    // - Keys: some hashable node
+    // - Values: the hash thereof
+    pub hashes: FnvHashMap<&'query DepNode<DefId>, u64>,
+}
+
+impl<'q> Predecessors<'q> {
+    pub fn new(query: &'q DepGraphQuery<DefId>, hcx: &mut HashContext) -> Self {
+        // Find nodes for which we want to know the full set of preds
+        let mut dfs = DepthFirstTraversal::new(&query.graph, INCOMING);
+        let all_nodes = query.graph.all_nodes();
+        let tcx = hcx.tcx;
+
+        let inputs: FnvHashMap<_, _> = all_nodes.iter()
+            .enumerate()
+            .filter(|&(_, node)| match node.data {
+                DepNode::WorkProduct(_) => true,
+                DepNode::MetaData(ref def_id) => def_id.is_local(),
+
+                // if -Z query-dep-graph is passed, save more extended data
+                // to enable better unit testing
+                DepNode::TypeckItemBody(_) |
+                DepNode::TransCrateItem(_) => tcx.sess.opts.debugging_opts.query_dep_graph,
+
+                _ => false,
+            })
+            .map(|(node_index, node)| {
+                dfs.reset(NodeIndex(node_index));
+                let inputs: Vec<_> = dfs.by_ref()
+                    .map(|i| &all_nodes[i.node_id()].data)
+                    .filter(|d| HashContext::is_hashable(d))
+                    .collect();
+                (&node.data, inputs)
+            })
+            .collect();
+
+        let mut hashes = FnvHashMap();
+        for input in inputs.values().flat_map(|v| v.iter().cloned()) {
+            hashes.entry(input)
+                  .or_insert_with(|| hcx.hash(input).unwrap().1);
+        }
+
+        Predecessors {
+            inputs: inputs,
+            hashes: hashes,
+        }
+    }
+}
index 305250d59623c50d644da6ad3cb3e7de2f538e19..f296cd3172fb0262ea82d7a877a423d012c9260a 100644 (file)
 
 use rbml::opaque::Encoder;
 use rustc::dep_graph::DepNode;
+use rustc::hir::def_id::DefId;
 use rustc::middle::cstore::LOCAL_CRATE;
 use rustc::session::Session;
 use rustc::ty::TyCtxt;
-use rustc_serialize::{Encodable as RustcEncodable};
-use std::hash::{Hasher, SipHasher};
+use rustc_data_structures::fnv::FnvHashMap;
+use rustc_serialize::Encodable as RustcEncodable;
+use std::hash::{Hash, Hasher, SipHasher};
 use std::io::{self, Cursor, Write};
 use std::fs::{self, File};
 use std::path::PathBuf;
 use super::data::*;
 use super::directory::*;
 use super::hash::*;
+use super::preds::*;
 use super::util::*;
 
 pub fn save_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
     debug!("save_dep_graph()");
     let _ignore = tcx.dep_graph.in_ignore();
     let sess = tcx.sess;
+    if sess.opts.incremental.is_none() {
+        return;
+    }
     let mut hcx = HashContext::new(tcx);
-    save_in(sess, dep_graph_path(tcx), |e| encode_dep_graph(&mut hcx, e));
-    save_in(sess, metadata_hash_path(tcx, LOCAL_CRATE), |e| encode_metadata_hashes(&mut hcx, e));
+    let mut builder = DefIdDirectoryBuilder::new(tcx);
+    let query = tcx.dep_graph.query();
+    let preds = Predecessors::new(&query, &mut hcx);
+    save_in(sess,
+            dep_graph_path(tcx),
+            |e| encode_dep_graph(&preds, &mut builder, e));
+    save_in(sess,
+            metadata_hash_path(tcx, LOCAL_CRATE),
+            |e| encode_metadata_hashes(tcx, &preds, &mut builder, e));
 }
 
 pub fn save_work_products(sess: &Session, local_crate_name: &str) {
@@ -40,14 +53,12 @@ pub fn save_work_products(sess: &Session, local_crate_name: &str) {
     save_in(sess, path, |e| encode_work_products(sess, e));
 }
 
-fn save_in<F>(sess: &Session,
-              opt_path_buf: Option<PathBuf>,
-              encode: F)
+fn save_in<F>(sess: &Session, opt_path_buf: Option<PathBuf>, encode: F)
     where F: FnOnce(&mut Encoder) -> io::Result<()>
 {
     let path_buf = match opt_path_buf {
         Some(p) => p,
-        None => return
+        None => return,
     };
 
     // FIXME(#32754) lock file?
@@ -55,11 +66,11 @@ fn save_in<F>(sess: &Session,
     // delete the old dep-graph, if any
     if path_buf.exists() {
         match fs::remove_file(&path_buf) {
-            Ok(()) => { }
+            Ok(()) => {}
             Err(err) => {
-                sess.err(
-                    &format!("unable to delete old dep-graph at `{}`: {}",
-                             path_buf.display(), err));
+                sess.err(&format!("unable to delete old dep-graph at `{}`: {}",
+                                  path_buf.display(),
+                                  err));
                 return;
             }
         }
@@ -68,97 +79,106 @@ fn save_in<F>(sess: &Session,
     // generate the data in a memory buffer
     let mut wr = Cursor::new(Vec::new());
     match encode(&mut Encoder::new(&mut wr)) {
-        Ok(()) => { }
+        Ok(()) => {}
         Err(err) => {
-            sess.err(
-                &format!("could not encode dep-graph to `{}`: {}",
-                         path_buf.display(), err));
+            sess.err(&format!("could not encode dep-graph to `{}`: {}",
+                              path_buf.display(),
+                              err));
             return;
         }
     }
 
     // write the data out
     let data = wr.into_inner();
-    match
-        File::create(&path_buf)
-        .and_then(|mut file| file.write_all(&data))
-    {
-        Ok(_) => { }
+    match File::create(&path_buf).and_then(|mut file| file.write_all(&data)) {
+        Ok(_) => {}
         Err(err) => {
-            sess.err(
-                &format!("failed to write dep-graph to `{}`: {}",
-                         path_buf.display(), err));
+            sess.err(&format!("failed to write dep-graph to `{}`: {}",
+                              path_buf.display(),
+                              err));
             return;
         }
     }
 }
 
-pub fn encode_dep_graph<'a, 'tcx>(hcx: &mut HashContext<'a, 'tcx>,
-                                  encoder: &mut Encoder)
-                                  -> io::Result<()>
-{
-    let tcx = hcx.tcx;
-    let query = tcx.dep_graph.query();
-
-    let mut builder = DefIdDirectoryBuilder::new(tcx);
-
-    // Create hashes for inputs.
-    let hashes =
-        query.nodes()
-             .into_iter()
-             .filter_map(|dep_node| {
-                 hcx.hash(&dep_node)
-                    .map(|hash| {
-                        let node = builder.map(dep_node);
-                        SerializedHash { node: node, hash: hash }
-                    })
-             })
-             .collect();
+pub fn encode_dep_graph(preds: &Predecessors,
+                        builder: &mut DefIdDirectoryBuilder,
+                        encoder: &mut Encoder)
+                        -> io::Result<()> {
+    // Create a flat list of (Input, WorkProduct) edges for
+    // serialization.
+    let mut edges = vec![];
+    for (&target, sources) in &preds.inputs {
+        match *target {
+            DepNode::MetaData(ref def_id) => {
+                // Metadata *targets* are always local metadata nodes. We handle
+                // those in `encode_metadata_hashes`, which comes later.
+                assert!(def_id.is_local());
+                continue;
+            }
+            _ => (),
+        }
+        let target = builder.map(target);
+        for &source in sources {
+            let source = builder.map(source);
+            edges.push((source, target.clone()));
+        }
+    }
 
     // Create the serialized dep-graph.
     let graph = SerializedDepGraph {
-        nodes: query.nodes().into_iter()
-                            .map(|node| builder.map(node))
-                            .collect(),
-        edges: query.edges().into_iter()
-                            .map(|(source_node, target_node)| {
-                                let source = builder.map(source_node);
-                                let target = builder.map(target_node);
-                                (source, target)
-                            })
-                            .collect(),
-        hashes: hashes,
+        edges: edges,
+        hashes: preds.hashes
+            .iter()
+            .map(|(&dep_node, &hash)| {
+                SerializedHash {
+                    dep_node: builder.map(dep_node),
+                    hash: hash,
+                }
+            })
+            .collect(),
     };
 
     debug!("graph = {:#?}", graph);
 
     // Encode the directory and then the graph data.
-    let directory = builder.into_directory();
-    try!(directory.encode(encoder));
+    try!(builder.directory().encode(encoder));
     try!(graph.encode(encoder));
 
     Ok(())
 }
 
-pub fn encode_metadata_hashes<'a, 'tcx>(hcx: &mut HashContext<'a, 'tcx>,
-                                        encoder: &mut Encoder)
-                                        -> io::Result<()>
-{
-    let tcx = hcx.tcx;
-    let query = tcx.dep_graph.query();
+pub fn encode_metadata_hashes(tcx: TyCtxt,
+                              preds: &Predecessors,
+                              builder: &mut DefIdDirectoryBuilder,
+                              encoder: &mut Encoder)
+                              -> io::Result<()> {
+    let mut def_id_hashes = FnvHashMap();
+    let mut def_id_hash = |def_id: DefId| -> u64 {
+        *def_id_hashes.entry(def_id)
+            .or_insert_with(|| {
+                let index = builder.add(def_id);
+                let path = builder.lookup_def_path(index);
+                path.deterministic_hash(tcx)
+            })
+    };
 
-    let serialized_hashes = {
-        // Identify the `MetaData(X)` nodes where `X` is local. These are
-        // the metadata items we export. Downstream crates will want to
-        // see a hash that tells them whether we might have changed the
-        // metadata for a given item since they last compiled.
-        let meta_data_def_ids =
-            query.nodes()
-                 .into_iter()
-                 .filter_map(|dep_node| match *dep_node {
-                     DepNode::MetaData(def_id) if def_id.is_local() => Some(def_id),
-                     _ => None,
-                 });
+    // For each `MetaData(X)` node where `X` is local, accumulate a
+    // hash.  These are the metadata items we export. Downstream
+    // crates will want to see a hash that tells them whether we might
+    // have changed the metadata for a given item since they last
+    // compiled.
+    //
+    // (I initially wrote this with an iterator, but it seemed harder to read.)
+    let mut serialized_hashes = SerializedMetadataHashes { hashes: vec![] };
+    for (&target, sources) in &preds.inputs {
+        let def_id = match *target {
+            DepNode::MetaData(def_id) => {
+                assert!(def_id.is_local());
+                def_id
+            }
+            _ => continue,
+        };
 
         // To create the hash for each item `X`, we don't hash the raw
         // bytes of the metadata (though in principle we
@@ -166,34 +186,32 @@ pub fn encode_metadata_hashes<'a, 'tcx>(hcx: &mut HashContext<'a, 'tcx>,
         // from the dep-graph. This corresponds to all the inputs that
         // were read to construct the metadata. To create the hash for
         // the metadata, we hash (the hash of) all of those inputs.
-        let hashes =
-            meta_data_def_ids
-            .map(|def_id| {
-                assert!(def_id.is_local());
-                let dep_node = DepNode::MetaData(def_id);
-                let mut state = SipHasher::new();
-                debug!("save: computing metadata hash for {:?}", dep_node);
-                for node in query.transitive_predecessors(&dep_node) {
-                    if let Some(hash) = hcx.hash(&node) {
-                        debug!("save: predecessor {:?} has hash {}", node, hash);
-                        state.write_u64(hash.to_le());
-                    } else {
-                        debug!("save: predecessor {:?} cannot be hashed", node);
-                    }
-                }
-                let hash = state.finish();
-                debug!("save: metadata hash for {:?} is {}", dep_node, hash);
-                SerializedMetadataHash {
-                    def_index: def_id.index,
-                    hash: hash,
-                }
-            });
-
-        // Collect these up into a vector.
-        SerializedMetadataHashes {
-            hashes: hashes.collect()
-        }
-    };
+        debug!("save: computing metadata hash for {:?}", def_id);
+
+        // Create a vector containing a pair of (source-id, hash).
+        // The source-id is stored as a `DepNode<u64>`, where the u64
+        // is the det. hash of the def-path. This is convenient
+        // because we can sort this to get a stable ordering across
+        // compilations, even if the def-ids themselves have changed.
+        let mut hashes: Vec<(DepNode<u64>, u64)> = sources.iter()
+            .map(|dep_node| {
+                let hash_dep_node = dep_node.map_def(|&def_id| Some(def_id_hash(def_id))).unwrap();
+                let hash = preds.hashes[dep_node];
+                (hash_dep_node, hash)
+            })
+            .collect();
+
+        hashes.sort();
+        let mut state = SipHasher::new();
+        hashes.hash(&mut state);
+        let hash = state.finish();
+
+        debug!("save: metadata hash for {:?} is {}", def_id, hash);
+        serialized_hashes.hashes.push(SerializedMetadataHash {
+            def_index: def_id.index,
+            hash: hash,
+        });
+    }
 
     // Encode everything.
     try!(serialized_hashes.encode(encoder));
@@ -201,21 +219,17 @@ pub fn encode_metadata_hashes<'a, 'tcx>(hcx: &mut HashContext<'a, 'tcx>,
     Ok(())
 }
 
-pub fn encode_work_products(sess: &Session,
-                            encoder: &mut Encoder)
-                            -> io::Result<()>
-{
-    let work_products: Vec<_> =
-        sess.dep_graph.work_products()
-                     .iter()
-                     .map(|(id, work_product)| {
-                         SerializedWorkProduct {
-                             id: id.clone(),
-                             work_product: work_product.clone(),
-                         }
-                     })
-                     .collect();
+pub fn encode_work_products(sess: &Session, encoder: &mut Encoder) -> io::Result<()> {
+    let work_products: Vec<_> = sess.dep_graph
+        .work_products()
+        .iter()
+        .map(|(id, work_product)| {
+            SerializedWorkProduct {
+                id: id.clone(),
+                work_product: work_product.clone(),
+            }
+        })
+        .collect();
 
     work_products.encode(encoder)
 }
-
index b8548aaec5bd74d023d99016d3042f1c220913aa..5f7a0f788ca1289e8da69b9e001289622041af8b 100644 (file)
@@ -112,6 +112,10 @@ fn main() {
         cfg.flag(&flag);
     }
 
+    if env::var_os("LLVM_RUSTLLVM").is_some() {
+        cfg.flag("-DLLVM_RUSTLLVM");
+    }
+
     cfg.file("../rustllvm/PassWrapper.cpp")
        .file("../rustllvm/RustWrapper.cpp")
        .file("../rustllvm/ArchiveWrapper.cpp")
index 6301c57c55540ebe7fe260db96eb82055f33114e..b2ffcac365bad34760351052003f8411d7265fb0 100644 (file)
@@ -1940,6 +1940,9 @@ pub fn LLVMRustDIBuilderCreateDebugLocation(Context: ContextRef,
     pub fn LLVMRustHasFeature(T: TargetMachineRef,
                               s: *const c_char) -> bool;
 
+    pub fn LLVMRustPrintTargetCPUs(T: TargetMachineRef);
+    pub fn LLVMRustPrintTargetFeatures(T: TargetMachineRef);
+
     pub fn LLVMRustCreateTargetMachine(Triple: *const c_char,
                                        CPU: *const c_char,
                                        Features: *const c_char,
index c39ad414492ed637fd0faa29134e7e504ccea3d7..f03c432c0917791c4a9ca8531517090785b32787 100644 (file)
@@ -78,8 +78,8 @@ pub fn encode_inlined_item(ecx: &e::EncodeContext,
                            rbml_w: &mut Encoder,
                            ii: InlinedItemRef) {
     let id = match ii {
-        InlinedItemRef::Item(i) => i.id,
-        InlinedItemRef::Foreign(i) => i.id,
+        InlinedItemRef::Item(_, i) => i.id,
+        InlinedItemRef::Foreign(_, i) => i.id,
         InlinedItemRef::TraitItem(_, ti) => ti.id,
         InlinedItemRef::ImplItem(_, ii) => ii.id,
     };
@@ -146,8 +146,8 @@ pub fn decode_inlined_item<'a, 'tcx>(cdata: &cstore::CrateMetadata,
                                        decode_ast(ast_doc),
                                        dcx);
     let name = match *ii {
-        InlinedItem::Item(ref i) => i.name,
-        InlinedItem::Foreign(ref i) => i.name,
+        InlinedItem::Item(_, ref i) => i.name,
+        InlinedItem::Foreign(_, ref i) => i.name,
         InlinedItem::TraitItem(_, ref ti) => ti.name,
         InlinedItem::ImplItem(_, ref ii) => ii.name
     };
@@ -158,7 +158,7 @@ pub fn decode_inlined_item<'a, 'tcx>(cdata: &cstore::CrateMetadata,
     region::resolve_inlined_item(&tcx.sess, &tcx.region_maps, ii);
     decode_side_tables(dcx, ast_doc);
     copy_item_types(dcx, ii, orig_did);
-    if let InlinedItem::Item(ref i) = *ii {
+    if let InlinedItem::Item(_, ref i) = *ii {
         debug!(">>> DECODED ITEM >>>\n{}\n<<< DECODED ITEM <<<",
                ::rustc::hir::print::item_to_string(&i));
     }
@@ -348,8 +348,8 @@ fn simplify_ast(ii: InlinedItemRef) -> (InlinedItem, IdRange) {
 
     let ii = match ii {
         // HACK we're not dropping items.
-        InlinedItemRef::Item(i) => {
-            InlinedItem::Item(P(fold::noop_fold_item(i.clone(), &mut fld)))
+        InlinedItemRef::Item(d, i) => {
+            InlinedItem::Item(d, P(fold::noop_fold_item(i.clone(), &mut fld)))
         }
         InlinedItemRef::TraitItem(d, ti) => {
             InlinedItem::TraitItem(d, P(fold::noop_fold_trait_item(ti.clone(), &mut fld)))
@@ -357,8 +357,8 @@ fn simplify_ast(ii: InlinedItemRef) -> (InlinedItem, IdRange) {
         InlinedItemRef::ImplItem(d, ii) => {
             InlinedItem::ImplItem(d, P(fold::noop_fold_impl_item(ii.clone(), &mut fld)))
         }
-        InlinedItemRef::Foreign(i) => {
-            InlinedItem::Foreign(P(fold::noop_fold_foreign_item(i.clone(), &mut fld)))
+        InlinedItemRef::Foreign(d, i) => {
+            InlinedItem::Foreign(d, P(fold::noop_fold_foreign_item(i.clone(), &mut fld)))
         }
     };
 
@@ -1241,15 +1241,15 @@ fn copy_item_type(dcx: &DecodeContext,
     }
     // copy the entry for the item itself
     let item_node_id = match ii {
-        &InlinedItem::Item(ref i) => i.id,
+        &InlinedItem::Item(_, ref i) => i.id,
         &InlinedItem::TraitItem(_, ref ti) => ti.id,
         &InlinedItem::ImplItem(_, ref ii) => ii.id,
-        &InlinedItem::Foreign(ref fi) => fi.id
+        &InlinedItem::Foreign(_, ref fi) => fi.id
     };
     copy_item_type(dcx, item_node_id, orig_did);
 
     // copy the entries of inner items
-    if let &InlinedItem::Item(ref item) = ii {
+    if let &InlinedItem::Item(_, ref item) = ii {
         match item.node {
             hir::ItemEnum(ref def, _) => {
                 let orig_def = dcx.tcx.lookup_adt_def(orig_did);
@@ -1383,6 +1383,9 @@ fn foo(x: usize, y: usize) -> usize {
 
 #[test]
 fn test_simplification() {
+    use middle::cstore::LOCAL_CRATE;
+    use rustc::hir::def_id::CRATE_DEF_INDEX;
+
     let cx = mk_ctxt();
     let item = quote_item!(&cx,
         fn new_int_alist<B>() -> alist<isize, B> {
@@ -1393,15 +1396,16 @@ fn eq_int(a: isize, b: isize) -> bool { a == b }
     let cx = mk_ctxt();
     with_testing_context(|lcx| {
         let hir_item = lcx.lower_item(&item);
-        let item_in = InlinedItemRef::Item(&hir_item);
+        let def_id = DefId { krate: LOCAL_CRATE, index: CRATE_DEF_INDEX }; // dummy
+        let item_in = InlinedItemRef::Item(def_id, &hir_item);
         let (item_out, _) = simplify_ast(item_in);
-        let item_exp = InlinedItem::Item(P(lcx.lower_item(&quote_item!(&cx,
+        let item_exp = InlinedItem::Item(def_id, P(lcx.lower_item(&quote_item!(&cx,
             fn new_int_alist<B>() -> alist<isize, B> {
                 return alist {eq_fn: eq_int, data: Vec::new()};
             }
         ).unwrap())));
         match (item_out, item_exp) {
-            (InlinedItem::Item(item_out), InlinedItem::Item(item_exp)) => {
+            (InlinedItem::Item(_, item_out), InlinedItem::Item(_, item_exp)) => {
                  assert!(pprust::item_to_string(&item_out) ==
                          pprust::item_to_string(&item_exp));
             }
index 862245b9b78694bcb22cba1271a98ced9ba0c0e2..7ee6e54a666d6d524520ee745440e422a50a56c3 100644 (file)
@@ -546,11 +546,13 @@ fn maybe_get_item_ast<'a>(&'tcx self,
                     .borrow_mut()
                     .insert(def_id, None);
             }
-            decoder::FoundAst::Found(&InlinedItem::Item(ref item)) => {
+            decoder::FoundAst::Found(&InlinedItem::Item(d, ref item)) => {
+                assert_eq!(d, def_id);
                 let inlined_root_node_id = find_inlined_item_root(item.id);
                 cache_inlined_item(def_id, item.id, inlined_root_node_id);
             }
-            decoder::FoundAst::Found(&InlinedItem::Foreign(ref item)) => {
+            decoder::FoundAst::Found(&InlinedItem::Foreign(d, ref item)) => {
+                assert_eq!(d, def_id);
                 let inlined_root_node_id = find_inlined_item_root(item.id);
                 cache_inlined_item(def_id, item.id, inlined_root_node_id);
             }
index d8fd25d62774af11721a3db8b604a6ff6c0f4400..64b614b56e12fdffe444b86545fb03c77226adaa 100644 (file)
@@ -797,7 +797,7 @@ pub fn maybe_get_item_ast<'a, 'tcx>(cdata: Cmd, tcx: TyCtxt<'a, 'tcx, 'tcx>, id:
                                          grandparent_def_id,
                                          ast_doc,
                                          parent_did);
-            if let &InlinedItem::Item(ref i) = ii {
+            if let &InlinedItem::Item(_, ref i) = ii {
                 return FoundAst::FoundParent(parent_did, i);
             }
         }
@@ -1690,7 +1690,7 @@ fn item_def_key(item_doc: rbml::Doc) -> hir_map::DefKey {
             let mut decoder = reader::Decoder::new(def_key_doc);
             let simple_key = def_key::DefKey::decode(&mut decoder).unwrap();
             let name = reader::maybe_get_doc(item_doc, tag_paths_data_name).map(|name| {
-                token::intern(name.as_str_slice())
+                token::intern(name.as_str_slice()).as_str()
             });
             def_key::recover_def_key(simple_key, name)
         }
index 05ad333ed3adcf9513e9340cfcc4820920325a7b..2444d669f7f32345c8c95ec007130f17123a091f 100644 (file)
@@ -10,7 +10,7 @@
 
 use rustc::hir::def_id::DefIndex;
 use rustc::hir::map as hir_map;
-use syntax::ast::Name;
+use syntax::parse::token::InternedString;
 
 #[derive(RustcEncodable, RustcDecodable)]
 pub struct DefKey {
@@ -75,7 +75,7 @@ fn simplify_def_path_data(data: hir_map::DefPathData) -> DefPathData {
     }
 }
 
-pub fn recover_def_key(key: DefKey, name: Option<Name>) -> hir_map::DefKey {
+pub fn recover_def_key(key: DefKey, name: Option<InternedString>) -> hir_map::DefKey {
     let data = hir_map::DisambiguatedDefPathData {
         data: recover_def_path_data(key.disambiguated_data.data, name),
         disambiguator: key.disambiguated_data.disambiguator,
@@ -86,7 +86,7 @@ pub fn recover_def_key(key: DefKey, name: Option<Name>) -> hir_map::DefKey {
     }
 }
 
-fn recover_def_path_data(data: DefPathData, name: Option<Name>) -> hir_map::DefPathData {
+fn recover_def_path_data(data: DefPathData, name: Option<InternedString>) -> hir_map::DefPathData {
     match data {
         DefPathData::CrateRoot => hir_map::DefPathData::CrateRoot,
         DefPathData::Misc => hir_map::DefPathData::Misc,
index 732c256a1910f18ca8e8b629d158d5476512f286..4e754abe2aec04b19341c5f564caa482ecc4b115 100644 (file)
@@ -743,7 +743,8 @@ fn encode_repr_attrs(rbml_w: &mut Encoder,
 }
 
 fn encode_mir(ecx: &EncodeContext, rbml_w: &mut Encoder, node_id: NodeId) {
-    if let Some(mir) = ecx.mir_map.map.get(&node_id) {
+    let def_id = ecx.tcx.map.local_def_id(node_id);
+    if let Some(mir) = ecx.mir_map.map.get(&def_id) {
         rbml_w.start_tag(tag_mir as usize);
         rbml_w.emit_opaque(|opaque_encoder| {
             tls::enter_encoding_context(ecx, opaque_encoder, |_, opaque_encoder| {
@@ -861,7 +862,7 @@ fn encode_info_for_item<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>,
         encode_bounds_and_type_for_item(rbml_w, ecx, index, item.id);
         encode_name(rbml_w, item.name);
         encode_attributes(rbml_w, &item.attrs);
-        encode_inlined_item(ecx, rbml_w, InlinedItemRef::Item(item));
+        encode_inlined_item(ecx, rbml_w, InlinedItemRef::Item(def_id, item));
         encode_mir(ecx, rbml_w, item.id);
         encode_visibility(rbml_w, vis);
         encode_stability(rbml_w, stab);
@@ -879,7 +880,7 @@ fn encode_info_for_item<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>,
         encode_attributes(rbml_w, &item.attrs);
         let needs_inline = tps_len > 0 || attr::requests_inline(&item.attrs);
         if needs_inline || constness == hir::Constness::Const {
-            encode_inlined_item(ecx, rbml_w, InlinedItemRef::Item(item));
+            encode_inlined_item(ecx, rbml_w, InlinedItemRef::Item(def_id, item));
             encode_mir(ecx, rbml_w, item.id);
         }
         encode_constness(rbml_w, constness);
@@ -942,7 +943,7 @@ fn encode_info_for_item<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>,
         for v in &enum_definition.variants {
             encode_variant_id(rbml_w, ecx.tcx.map.local_def_id(v.node.data.id()));
         }
-        encode_inlined_item(ecx, rbml_w, InlinedItemRef::Item(item));
+        encode_inlined_item(ecx, rbml_w, InlinedItemRef::Item(def_id, item));
         encode_mir(ecx, rbml_w, item.id);
 
         // Encode inherent implementations for this enumeration.
@@ -989,7 +990,7 @@ fn encode_info_for_item<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>,
         needs to know*/
         encode_struct_fields(rbml_w, variant);
 
-        encode_inlined_item(ecx, rbml_w, InlinedItemRef::Item(item));
+        encode_inlined_item(ecx, rbml_w, InlinedItemRef::Item(def_id, item));
         encode_mir(ecx, rbml_w, item.id);
 
         // Encode inherent implementations for this structure.
@@ -1311,7 +1312,7 @@ fn encode_info_for_foreign_item<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>,
         encode_bounds_and_type_for_item(rbml_w, ecx, index, nitem.id);
         encode_name(rbml_w, nitem.name);
         if abi == Abi::RustIntrinsic || abi == Abi::PlatformIntrinsic {
-            encode_inlined_item(ecx, rbml_w, InlinedItemRef::Foreign(nitem));
+            encode_inlined_item(ecx, rbml_w, InlinedItemRef::Foreign(def_id, nitem));
             encode_mir(ecx, rbml_w, nitem.id);
         }
         encode_attributes(rbml_w, &nitem.attrs);
@@ -1361,7 +1362,7 @@ fn my_visit_expr(expr: &hir::Expr,
             ecx.tcx.closure_kind(def_id).encode(rbml_w).unwrap();
             rbml_w.end_tag();
 
-            assert!(ecx.mir_map.map.contains_key(&expr.id));
+            assert!(ecx.mir_map.map.contains_key(&def_id));
             encode_mir(ecx, rbml_w, expr.id);
 
             rbml_w.end_tag();
index fdfa872b0b698805f140863294837893e86a70e7..d986d88dafc94c8b7c507457e3d74ce6d51fe953 100644 (file)
@@ -9,7 +9,9 @@
 // except according to those terms.
 
 use dot;
+use rustc::hir::def_id::DefId;
 use rustc::mir::repr::*;
+use rustc::mir::mir_map::MirMap;
 use rustc::ty::{self, TyCtxt};
 use std::fmt::Debug;
 use std::io::{self, Write};
 
 /// Write a graphviz DOT graph of a list of MIRs.
 pub fn write_mir_graphviz<'a, 'b, 'tcx, W, I>(tcx: TyCtxt<'b, 'tcx, 'tcx>,
-                                              iter: I, w: &mut W)
+                                              iter: I,
+                                              mir_map: &MirMap<'tcx>,
+                                              w: &mut W)
                                               -> io::Result<()>
-where W: Write, I: Iterator<Item=(&'a NodeId, &'a Mir<'a>)> {
-    for (&nodeid, mir) in iter {
+    where W: Write, I: Iterator<Item=DefId>
+{
+    for def_id in iter {
+        let nodeid = tcx.map.as_local_node_id(def_id).unwrap();
+        let mir = &mir_map.map[&def_id];
+
         writeln!(w, "digraph Mir_{} {{", nodeid)?;
 
         // Global graph properties
index 2438f43d24e0a42f8e87854af51e0ffe35c90ddb..df1fec75939b5097e8d62ec45e6d481007e92253 100644 (file)
@@ -22,6 +22,7 @@
 use rustc::middle::const_val::ConstVal;
 use rustc_const_eval as const_eval;
 use rustc_data_structures::indexed_vec::Idx;
+use rustc::dep_graph::DepNode;
 use rustc::hir::def_id::DefId;
 use rustc::hir::intravisit::FnKind;
 use rustc::hir::map::blocks::FnLikeNode;
@@ -61,7 +62,17 @@ pub fn new(infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
             MirSource::Promoted(..) => bug!()
         };
 
-        let attrs = infcx.tcx.map.attrs(src.item_id());
+        let src_node_id = src.item_id();
+
+        // We are going to be accessing various tables
+        // generated by TypeckItemBody; we also assume
+        // that the body passes type check. These tables
+        // are not individually tracked, so just register
+        // a read here.
+        let src_def_id = infcx.tcx.map.local_def_id(src_node_id);
+        infcx.tcx.dep_graph.read(DepNode::TypeckItemBody(src_def_id));
+
+        let attrs = infcx.tcx.map.attrs(src_node_id);
 
         // Some functions always have overflow checks enabled,
         // however, they may not get codegen'd, depending on
index 11d6b0779275ee40f6f6a56ae6728fdb9d0c687f..42a643b8af6fa167933e2d7eddb14b77fb7fa029 100644 (file)
@@ -18,6 +18,7 @@
 
 use build;
 use rustc::dep_graph::DepNode;
+use rustc::hir::def_id::DefId;
 use rustc::mir::repr::Mir;
 use rustc::mir::transform::MirSource;
 use rustc::mir::visit::MutVisitor;
@@ -29,7 +30,6 @@
 use rustc::traits::ProjectionMode;
 use rustc::ty::{self, Ty, TyCtxt};
 use rustc::ty::subst::Substs;
-use rustc::util::nodemap::NodeMap;
 use rustc::hir;
 use rustc::hir::intravisit::{self, FnKind, Visitor};
 use syntax::ast;
 use std::mem;
 
 pub fn build_mir_for_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> MirMap<'tcx> {
-    let mut map = MirMap {
-        map: NodeMap(),
-    };
+    let mut map = MirMap::new(tcx.dep_graph.clone());
     {
         let mut dump = BuildMir {
             tcx: tcx,
             map: &mut map,
         };
-        tcx.visit_all_items_in_krate(DepNode::MirMapConstruction, &mut dump);
+        tcx.visit_all_items_in_krate(DepNode::Mir, &mut dump);
     }
     map
 }
@@ -94,6 +92,7 @@ struct BuildMir<'a, 'tcx: 'a> {
 /// F: for<'b, 'tcx> where 'gcx: 'tcx FnOnce(Cx<'b, 'gcx, 'tcx>).
 struct CxBuilder<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
     src: MirSource,
+    def_id: DefId,
     infcx: InferCtxtBuilder<'a, 'gcx, 'tcx>,
     map: &'a mut MirMap<'gcx>,
 }
@@ -101,9 +100,11 @@ struct CxBuilder<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
 impl<'a, 'gcx, 'tcx> BuildMir<'a, 'gcx> {
     fn cx<'b>(&'b mut self, src: MirSource) -> CxBuilder<'b, 'gcx, 'tcx> {
         let param_env = ty::ParameterEnvironment::for_item(self.tcx, src.item_id());
+        let def_id = self.tcx.map.local_def_id(src.item_id());
         CxBuilder {
             src: src,
             infcx: self.tcx.infer_ctxt(None, Some(param_env), ProjectionMode::AnyFinal),
+            def_id: def_id,
             map: self.map
         }
     }
@@ -133,7 +134,7 @@ fn build<F>(&'tcx mut self, f: F)
             mir
         });
 
-        assert!(self.map.map.insert(src.item_id(), mir).is_none())
+        assert!(self.map.map.insert(self.def_id, mir).is_none())
     }
 }
 
index d1b88ddda0c674987bb3e605b315b9cc20330b36..55e7408b0fd5d4ec881c2387fa94023abc2e938b 100644 (file)
@@ -10,7 +10,9 @@
 
 use build::{Location, ScopeAuxiliaryVec, ScopeId};
 use rustc::hir;
+use rustc::hir::def_id::DefId;
 use rustc::mir::repr::*;
+use rustc::mir::mir_map::MirMap;
 use rustc::mir::transform::MirSource;
 use rustc::ty::{self, TyCtxt};
 use rustc_data_structures::fnv::FnvHashMap;
@@ -18,7 +20,6 @@
 use std::fmt::Display;
 use std::fs;
 use std::io::{self, Write};
-use syntax::ast::NodeId;
 use std::path::{PathBuf, Path};
 
 const INDENT: &'static str = "    ";
@@ -89,12 +90,15 @@ pub fn dump_mir<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
 /// Write out a human-readable textual representation for the given MIR.
 pub fn write_mir_pretty<'a, 'b, 'tcx, I>(tcx: TyCtxt<'b, 'tcx, 'tcx>,
                                          iter: I,
+                                         mir_map: &MirMap<'tcx>,
                                          w: &mut Write)
                                          -> io::Result<()>
-    where I: Iterator<Item=(&'a NodeId, &'a Mir<'tcx>)>, 'tcx: 'a
+    where I: Iterator<Item=DefId>, 'tcx: 'a
 {
     let mut first = true;
-    for (&id, mir) in iter {
+    for def_id in iter {
+        let mir = &mir_map.map[&def_id];
+
         if first {
             first = false;
         } else {
@@ -102,6 +106,7 @@ pub fn write_mir_pretty<'a, 'b, 'tcx, I>(tcx: TyCtxt<'b, 'tcx, 'tcx>,
             writeln!(w, "")?;
         }
 
+        let id = tcx.map.as_local_node_id(def_id).unwrap();
         let src = MirSource::from_node(tcx, id);
         write_mir_fn(tcx, src, mir, w, None)?;
 
index 5455ca62ea46e6eb3a426af70db3a8b5946881a0..132234c8c6b230a9b5bd6c343ea9653165586e7c 100644 (file)
@@ -16,6 +16,7 @@
 
 use rustc_data_structures::bitvec::BitVector;
 use rustc_data_structures::indexed_vec::{IndexVec, Idx};
+use rustc::dep_graph::DepNode;
 use rustc::hir;
 use rustc::hir::def_id::DefId;
 use rustc::hir::intravisit::FnKind;
@@ -616,9 +617,12 @@ fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>) {
                     if !allow {
                         self.add(Qualif::NOT_CONST);
                         if self.mode != Mode::Fn {
-                            span_err!(self.tcx.sess, self.span, E0017,
-                                      "references in {}s may only refer \
-                                       to immutable values", self.mode);
+                            struct_span_err!(self.tcx.sess,  self.span, E0017,
+                                             "references in {}s may only refer \
+                                              to immutable values", self.mode)
+                                .span_label(self.span, &format!("{}s require immutable values",
+                                                                self.mode))
+                                .emit();
                         }
                     }
                 } else {
@@ -883,8 +887,8 @@ fn qualify_const_item_cached<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
 
     let extern_mir;
     let param_env_and_mir = if def_id.is_local() {
-        let node_id = tcx.map.as_local_node_id(def_id).unwrap();
-        mir_map.and_then(|map| map.map.get(&node_id)).map(|mir| {
+        mir_map.and_then(|map| map.map.get(&def_id)).map(|mir| {
+            let node_id = tcx.map.as_local_node_id(def_id).unwrap();
             (ty::ParameterEnvironment::for_item(tcx, node_id), mir)
         })
     } else if let Some(mir) = tcx.sess.cstore.maybe_get_item_mir(tcx, def_id) {
@@ -919,9 +923,10 @@ fn run_pass<'a>(&mut self,
 
         // First, visit `const` items, potentially recursing, to get
         // accurate MUTABLE_INTERIOR and NEEDS_DROP qualifications.
-        for &id in map.map.keys() {
-            let def_id = tcx.map.local_def_id(id);
-            let _task = tcx.dep_graph.in_task(self.dep_node(def_id));
+        let keys = map.map.keys();
+        for &def_id in &keys {
+            let _task = tcx.dep_graph.in_task(DepNode::Mir(def_id));
+            let id = tcx.map.as_local_node_id(def_id).unwrap();
             let src = MirSource::from_node(tcx, id);
             if let MirSource::Const(_) = src {
                 qualify_const_item_cached(tcx, &mut qualif_map, Some(map), def_id);
@@ -931,9 +936,9 @@ fn run_pass<'a>(&mut self,
         // Then, handle everything else, without recursing,
         // as the MIR map is not shared, since promotion
         // in functions (including `const fn`) mutates it.
-        for (&id, mir) in &mut map.map {
-            let def_id = tcx.map.local_def_id(id);
-            let _task = tcx.dep_graph.in_task(self.dep_node(def_id));
+        for &def_id in &keys {
+            let _task = tcx.dep_graph.in_task(DepNode::Mir(def_id));
+            let id = tcx.map.as_local_node_id(def_id).unwrap();
             let src = MirSource::from_node(tcx, id);
             let mode = match src {
                 MirSource::Fn(_) => {
@@ -950,6 +955,7 @@ fn run_pass<'a>(&mut self,
             };
             let param_env = ty::ParameterEnvironment::for_item(tcx, id);
 
+            let mir = map.map.get_mut(&def_id).unwrap();
             for hook in &mut *hooks {
                 hook.on_mir_pass(tcx, src, mir, self, false);
             }
index db49e1e040791803103f66e7e34742e5d4b60248..26a907920e8dbf528389165d8c70a88d4e9f17cf 100644 (file)
@@ -11,8 +11,6 @@
 //! This pass type-checks the MIR to ensure it is not broken.
 #![allow(unreachable_code)]
 
-use rustc::dep_graph::DepNode;
-use rustc::hir::def_id::DefId;
 use rustc::infer::{self, InferCtxt, InferOk};
 use rustc::traits::{self, ProjectionMode};
 use rustc::ty::fold::TypeFoldable;
@@ -714,7 +712,4 @@ fn run_pass<'a>(&mut self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
 }
 
 impl Pass for TypeckMir {
-    fn dep_node(&self, def_id: DefId) -> DepNode<DefId> {
-        DepNode::MirTypeck(def_id)
-    }
 }
index 91d2500564fd3b7d246f406fcb8d117fb0fede0b..341c9d820e6519b060445ec621a56a3cc49faee4 100644 (file)
@@ -183,6 +183,7 @@ fn visit_foreign_item(&mut self, fi: &ForeignItem) {
                                                    E0130,
                                                    "patterns aren't allowed in foreign function \
                                                     declarations");
+                    err.span_label(span, &format!("pattern not allowed in foreign function"));
                     if is_recent {
                         err.span_note(span,
                                       "this is a recent error, see issue #35203 for more details");
index 9f1f07004f4aecc9978fefb658e20a85a3b3fbbf..860e569ba7e5e3dc6ed477f30d53d607cd902931 100644 (file)
@@ -1005,7 +1005,7 @@ pub struct Resolver<'a> {
     //
     // There will be an anonymous module created around `g` with the ID of the
     // entry block for `f`.
-    pub module_map: NodeMap<Module<'a>>,
+    module_map: NodeMap<Module<'a>>,
 
     // Whether or not to print error messages. Can be set to true
     // when getting additional info for error message suggestions,
index ebb6e0baf20a00e8c837a18249c5e9853e706caf..5e2c0805c2ea3a6c4bda553e9d773ffbdd58ff91 100644 (file)
 use rustc::ty::item_path::{self, ItemPathBuffer, RootMode};
 use rustc::hir::map::definitions::{DefPath, DefPathData};
 
-use std::fmt::Write;
 use syntax::attr;
 use syntax::parse::token::{self, InternedString};
 use serialize::hex::ToHex;
 
 pub fn def_id_to_string<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> String {
     let def_path = tcx.def_path(def_id);
-    def_path_to_string(tcx, &def_path)
-}
-
-fn def_path_to_string<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_path: &DefPath) -> String {
-    let mut s = String::with_capacity(def_path.data.len() * 16);
-
-    if def_path.krate == cstore::LOCAL_CRATE {
-        s.push_str(&tcx.crate_name(def_path.krate));
-    } else {
-        s.push_str(&tcx.sess.cstore.original_crate_name(def_path.krate));
-    }
-    s.push_str("/");
-    s.push_str(&tcx.crate_disambiguator(def_path.krate));
-
-    for component in &def_path.data {
-        write!(s,
-               "::{}[{}]",
-               component.data.as_interned_str(),
-               component.disambiguator)
-            .unwrap();
-    }
-
-    s
+    def_path.to_string(tcx)
 }
 
 fn get_symbol_hash<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
@@ -167,7 +144,7 @@ fn get_symbol_hash<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
     // the main symbol name is not necessarily unique; hash in the
     // compiler's internal def-path, guaranteeing each symbol has a
     // truly unique path
-    hash_state.input_str(&def_path_to_string(tcx, def_path));
+    hash_state.input_str(&def_path.to_string(tcx));
 
     // Include the main item-type. Note that, in this case, the
     // assertions about `needs_subst` may not hold, but this item-type
index 8ce2fa762f9e2606103399f371c6db5afd9a2329..b5c993b86ecb2e0c09d015f9d93117ec02534b60 100644 (file)
 use std::thread;
 use libc::{c_uint, c_void};
 
+pub const RELOC_MODEL_ARGS : [(&'static str, llvm::RelocMode); 4] = [
+    ("pic", llvm::RelocMode::PIC),
+    ("static", llvm::RelocMode::Static),
+    ("default", llvm::RelocMode::Default),
+    ("dynamic-no-pic", llvm::RelocMode::DynamicNoPic),
+];
+
+pub const CODE_GEN_MODEL_ARGS : [(&'static str, llvm::CodeModel); 5] = [
+    ("default", llvm::CodeModel::Default),
+    ("small", llvm::CodeModel::Small),
+    ("kernel", llvm::CodeModel::Kernel),
+    ("medium", llvm::CodeModel::Medium),
+    ("large", llvm::CodeModel::Large),
+];
+
 pub fn llvm_err(handler: &errors::Handler, msg: String) -> ! {
     match llvm::last_error() {
         Some(err) => panic!(handler.fatal(&format!("{}: {}", msg, err))),
@@ -168,12 +183,9 @@ pub fn create_target_machine(sess: &Session) -> TargetMachineRef {
         None => &sess.target.target.options.code_model[..],
     };
 
-    let code_model = match code_model_arg {
-        "default" => llvm::CodeModel::Default,
-        "small" => llvm::CodeModel::Small,
-        "kernel" => llvm::CodeModel::Kernel,
-        "medium" => llvm::CodeModel::Medium,
-        "large" => llvm::CodeModel::Large,
+    let code_model = match CODE_GEN_MODEL_ARGS.iter().find(
+        |&&arg| arg.0 == code_model_arg) {
+        Some(x) => x.1,
         _ => {
             sess.err(&format!("{:?} is not a valid code model",
                              sess.opts
@@ -725,6 +737,10 @@ pub fn run_passes(sess: &Session,
         work_items.push(work);
     }
 
+    if sess.opts.debugging_opts.incremental_info {
+        dump_incremental_data(&trans);
+    }
+
     // Process the work items, optionally using worker threads.
     // NOTE: This code is not really adapted to incremental compilation where
     //       the compiler decides the number of codegen units (and will
@@ -902,6 +918,17 @@ pub fn run_passes(sess: &Session,
     }
 }
 
+fn dump_incremental_data(trans: &CrateTranslation) {
+    let mut reuse = 0;
+    for mtrans in trans.modules.iter() {
+        match mtrans.source {
+            ModuleSource::Preexisting(..) => reuse += 1,
+            ModuleSource::Translated(..) => (),
+        }
+    }
+    println!("incremental: re-using {} out of {} modules", reuse, trans.modules.len());
+}
+
 struct WorkItem {
     mtrans: ModuleTranslation,
     config: ModuleConfig,
index a1783e9c0a3829960035925d7f39d6183b28f4cb..b3729cc23f60fd3582253c9dbec04d102637ce67 100644 (file)
@@ -1244,7 +1244,7 @@ pub fn inlined_variant_def<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
         }), ..}) => ty,
         _ => ctor_ty
     }.ty_adt_def().unwrap();
-    let variant_def_id = if ccx.tcx().map.is_inlined(inlined_vid) {
+    let variant_def_id = if ccx.tcx().map.is_inlined_node_id(inlined_vid) {
         ccx.defid_for_inlined_node(inlined_vid).unwrap()
     } else {
         ccx.tcx().map.local_def_id(inlined_vid)
index 7afb5257258ec38f5c5b26100e7ececad4bbff9f..769dd008af8f495c3602737f04e7b99e2187f7ed 100644 (file)
@@ -1026,7 +1026,7 @@ pub fn get_static<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, def_id: DefId)
                              .get(TransItem::Static(id))
                              .expect("Local statics should always be in the SymbolMap");
                 // Make sure that this is never executed for something inlined.
-                assert!(!ccx.tcx().map.is_inlined(id));
+                assert!(!ccx.tcx().map.is_inlined_node_id(id));
 
                 let defined_in_current_codegen_unit = ccx.codegen_unit()
                                                          .items()
index 166ce990fddfa72303618ca5d0aea7cad9c20dee..65eea1bbb6337cba32e1737be9da86b04a3f6d19 100644 (file)
@@ -81,7 +81,7 @@ pub struct SharedCrateContext<'a, 'tcx: 'a> {
     check_overflow: bool,
     check_drop_flag_for_sanity: bool,
     mir_map: &'a MirMap<'tcx>,
-    mir_cache: RefCell<DefIdMap<Rc<mir::Mir<'tcx>>>>,
+    mir_cache: RefCell<DepTrackingMap<MirCache<'tcx>>>,
 
     use_dll_storage_attrs: bool,
 
@@ -186,6 +186,19 @@ fn to_dep_node(key: &ty::PolyTraitRef<'tcx>) -> DepNode<DefId> {
     }
 }
 
+// Cache for mir loaded from metadata
+struct MirCache<'tcx> {
+    data: PhantomData<&'tcx ()>
+}
+
+impl<'tcx> DepTrackingMapConfig for MirCache<'tcx> {
+    type Key = DefId;
+    type Value = Rc<mir::Mir<'tcx>>;
+    fn to_dep_node(key: &DefId) -> DepNode<DefId> {
+        DepNode::Mir(*key)
+    }
+}
+
 /// This list owns a number of LocalCrateContexts and binds them to their common
 /// SharedCrateContext. This type just exists as a convenience, something to
 /// pass around all LocalCrateContexts with and get an iterator over them.
@@ -324,16 +337,14 @@ pub fn get_reloc_model(sess: &Session) -> llvm::RelocMode {
         None => &sess.target.target.options.relocation_model[..],
     };
 
-    match reloc_model_arg {
-        "pic" => llvm::RelocMode::PIC,
-        "static" => llvm::RelocMode::Static,
-        "default" => llvm::RelocMode::Default,
-        "dynamic-no-pic" => llvm::RelocMode::DynamicNoPic,
+    match ::back::write::RELOC_MODEL_ARGS.iter().find(
+        |&&arg| arg.0 == reloc_model_arg) {
+        Some(x) => x.1,
         _ => {
             sess.err(&format!("{:?} is not a valid relocation mode",
                              sess.opts
                                  .cg
-                                 .relocation_model));
+                                 .code_model));
             sess.abort_if_errors();
             bug!();
         }
@@ -474,7 +485,7 @@ pub fn new(tcx: TyCtxt<'b, 'tcx, 'tcx>,
             symbol_hasher: RefCell::new(symbol_hasher),
             tcx: tcx,
             mir_map: mir_map,
-            mir_cache: RefCell::new(DefIdMap()),
+            mir_cache: RefCell::new(DepTrackingMap::new(tcx.dep_graph.clone())),
             stats: Stats {
                 n_glues_created: Cell::new(0),
                 n_null_glues: Cell::new(0),
@@ -538,8 +549,7 @@ pub fn use_dll_storage_attrs(&self) -> bool {
 
     pub fn get_mir(&self, def_id: DefId) -> Option<CachedMir<'b, 'tcx>> {
         if def_id.is_local() {
-            let node_id = self.tcx.map.as_local_node_id(def_id).unwrap();
-            self.mir_map.map.get(&node_id).map(CachedMir::Ref)
+            self.mir_map.map.get(&def_id).map(CachedMir::Ref)
         } else {
             if let Some(mir) = self.mir_cache.borrow().get(&def_id).cloned() {
                 return Some(CachedMir::Owned(mir));
index 8011347d3eb12c76770dd9c447b03147653d5214..ee13af80b2ba0aeff7741bb4a5a687305b0bbb50 100644 (file)
@@ -326,7 +326,7 @@ fn from_def_id_and_substs<'a, 'tcx>(type_map: &mut TypeMap<'tcx>,
             // First, find out the 'real' def_id of the type. Items inlined from
             // other crates have to be mapped back to their source.
             let def_id = if let Some(node_id) = cx.tcx().map.as_local_node_id(def_id) {
-                if cx.tcx().map.is_inlined(node_id) {
+                if cx.tcx().map.is_inlined_node_id(node_id) {
                     // The given def_id identifies the inlined copy of a
                     // type definition, let's take the source of the copy.
                     cx.defid_for_inlined_node(node_id).unwrap()
@@ -1846,7 +1846,7 @@ pub fn create_global_var_metadata(cx: &CrateContext,
     // crate should already contain debuginfo for it. More importantly, the
     // global might not even exist in un-inlined form anywhere which would lead
     // to a linker errors.
-    if cx.tcx().map.is_inlined(node_id) {
+    if cx.tcx().map.is_inlined_node_id(node_id) {
         return;
     }
 
index 35bb0481c8e979e6f82248a111fa18254fb078eb..d0e00a6d593a5edf18742ebdae8156e467ac9ea0 100644 (file)
@@ -83,8 +83,6 @@ pub fn define(&self, ccx: &CrateContext<'a, 'tcx>) {
         // that the incoming edges to a particular fn are from a
         // particular set.
 
-        self.register_reads(ccx);
-
         match *self {
             TransItem::Static(node_id) => {
                 let def_id = ccx.tcx().map.local_def_id(node_id);
@@ -120,46 +118,6 @@ pub fn define(&self, ccx: &CrateContext<'a, 'tcx>) {
                ccx.codegen_unit().name());
     }
 
-    /// If necessary, creates a subtask for trans'ing a particular item and registers reads on
-    /// `TypeckItemBody` and `Hir`.
-    fn register_reads(&self, ccx: &CrateContext<'a, 'tcx>) {
-        let tcx = ccx.tcx();
-        let def_id = match *self {
-            TransItem::Static(node_id) => {
-                tcx.map.local_def_id(node_id)
-            }
-            TransItem::Fn(instance) => {
-                if let Some(node) = tcx.map.as_local_node_id(instance.def) {
-                    if let hir_map::Node::NodeItem(_) = tcx.map.get(node) {
-                        // This already is a "real" item
-                        instance.def
-                    } else {
-                        // Get the enclosing item and register a read on it
-                        tcx.map.get_parent_did(node)
-                    }
-                } else {
-                    // Translating an inlined item from another crate? Don't track anything.
-                    return;
-                }
-            }
-            TransItem::DropGlue(_) => {
-                // Nothing to track for drop glue
-                return;
-            }
-        };
-
-        tcx.dep_graph.with_task(DepNode::TransCrateItem(def_id), || {
-            tcx.dep_graph.read(DepNode::Hir(def_id));
-
-            // We are going to be accessing various tables
-            // generated by TypeckItemBody; we also assume
-            // that the body passes type check. These tables
-            // are not individually tracked, so just register
-            // a read here.
-            tcx.dep_graph.read(DepNode::TypeckItemBody(def_id));
-        });
-    }
-
     pub fn predefine(&self,
                      ccx: &CrateContext<'a, 'tcx>,
                      linkage: llvm::Linkage) {
index b4e9fb5c65bb3576417b53469e454c262a249830..07595c5dbe1144cb987de5165a121575e420234a 100644 (file)
@@ -360,8 +360,11 @@ pub fn ast_path_substs_for_ty(&self,
                 self.convert_angle_bracketed_parameters(rscope, span, decl_generics, data)
             }
             hir::ParenthesizedParameters(..) => {
-                span_err!(tcx.sess, span, E0214,
-                          "parenthesized parameters may only be used with a trait");
+                struct_span_err!(tcx.sess, span, E0214,
+                          "parenthesized parameters may only be used with a trait")
+                    .span_label(span, &format!("only traits may use parentheses"))
+                    .emit();
+
                 let ty_param_defs = decl_generics.types.get_slice(TypeSpace);
                 (Substs::empty(),
                  ty_param_defs.iter().map(|_| tcx.types.err).collect(),
@@ -1201,10 +1204,13 @@ fn make_object_type(&self,
         }
 
         for (trait_def_id, name) in associated_types {
-            span_err!(tcx.sess, span, E0191,
+            struct_span_err!(tcx.sess, span, E0191,
                 "the value of the associated type `{}` (from the trait `{}`) must be specified",
                         name,
-                        tcx.item_path_str(trait_def_id));
+                        tcx.item_path_str(trait_def_id))
+                        .span_label(span, &format!(
+                            "missing associated type `{}` value", name))
+                        .emit();
         }
 
         tcx.mk_trait(object.principal, object.bounds)
@@ -1281,10 +1287,12 @@ fn one_bound_for_assoc_type(&self,
         }
 
         if bounds.len() > 1 {
-            let mut err = struct_span_err!(self.tcx().sess, span, E0221,
-                                           "ambiguous associated type `{}` in bounds of `{}`",
-                                           assoc_name,
-                                           ty_param_name);
+            let mut err = struct_span_err!(
+                self.tcx().sess, span, E0221,
+                "ambiguous associated type `{}` in bounds of `{}`",
+                assoc_name,
+                ty_param_name);
+            err.span_label(span, &format!("ambiguous associated type `{}`", assoc_name));
 
             for bound in &bounds {
                 span_note!(&mut err, span,
@@ -1584,9 +1592,11 @@ fn base_def_to_ty(&self,
                 return self.tcx().types.err;
             }
             _ => {
-                span_err!(tcx.sess, span, E0248,
-                          "found value `{}` used as a type",
-                          tcx.item_path_str(def.def_id()));
+                struct_span_err!(tcx.sess, span, E0248,
+                           "found value `{}` used as a type",
+                            tcx.item_path_str(def.def_id()))
+                           .span_label(span, &format!("value used as a type"))
+                           .emit();
                 return self.tcx().types.err;
             }
         }
index fe68690d4e97447efbc826717314234bbffb86ec..5f255cc1fb730112b1df500d28a65b18fec18dfb 100644 (file)
@@ -633,10 +633,23 @@ fn check_pat_tuple_struct(&self,
                 self.check_pat(&subpat, field_ty);
             }
         } else {
-            span_err!(tcx.sess, pat.span, E0023,
-                      "this pattern has {} field{s}, but the corresponding {} has {} field{s}",
-                      subpats.len(), def.kind_name(), variant.fields.len(),
-                      s = if variant.fields.len() == 1 {""} else {"s"});
+            let subpats_ending = if subpats.len() == 1 {
+                ""
+            } else {
+                "s"
+            };
+            let fields_ending = if variant.fields.len() == 1 {
+                ""
+            } else {
+                "s"
+            };
+            struct_span_err!(tcx.sess, pat.span, E0023,
+                             "this pattern has {} field{}, but the corresponding {} has {} field{}",
+                             subpats.len(), subpats_ending, def.kind_name(),
+                             variant.fields.len(),  fields_ending)
+                .span_label(pat.span, &format!("expected {} field{}, found {}",
+                                               variant.fields.len(), fields_ending, subpats.len()))
+                .emit();
             on_error();
         }
     }
@@ -682,10 +695,16 @@ pub fn check_struct_pat_fields(&self,
                     field_map.get(&field.name)
                         .map(|f| self.field_ty(span, f, substs))
                         .unwrap_or_else(|| {
-                            span_err!(tcx.sess, span, E0026,
-                                "struct `{}` does not have a field named `{}`",
-                                tcx.item_path_str(variant.did),
-                                field.name);
+                            struct_span_err!(tcx.sess, span, E0026,
+                                             "struct `{}` does not have a field named `{}`",
+                                             tcx.item_path_str(variant.did),
+                                             field.name)
+                                .span_label(span,
+                                            &format!("struct `{}` does not have field `{}`",
+                                                     tcx.item_path_str(variant.did),
+                                                     field.name))
+                                .emit();
+
                             tcx.types.err
                         })
                 }
index b971ae02cd0bd27a87905137df7540a80142257b..140fabce76b4d5401414e96da5a6755f385147e2 100644 (file)
@@ -14,6 +14,8 @@
 use rustc::traits::{self, ProjectionMode};
 use rustc::ty::error::ExpectedFound;
 use rustc::ty::subst::{self, Subst, Substs, VecPerParamSpace};
+use rustc::hir::map::Node;
+use rustc::hir::{ImplItemKind, TraitItem_};
 
 use syntax::ast;
 use syntax_pos::Span;
@@ -461,7 +463,7 @@ pub fn compare_const_impl<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
         // Compute skolemized form of impl and trait const tys.
         let impl_ty = impl_c.ty.subst(tcx, impl_to_skol_substs);
         let trait_ty = trait_c.ty.subst(tcx, &trait_to_skol_substs);
-        let origin = TypeOrigin::Misc(impl_c_span);
+        let mut origin = TypeOrigin::Misc(impl_c_span);
 
         let err = infcx.commit_if_ok(|_| {
             // There is no "body" here, so just pass dummy id.
@@ -496,11 +498,31 @@ pub fn compare_const_impl<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
             debug!("checking associated const for compatibility: impl ty {:?}, trait ty {:?}",
                    impl_ty,
                    trait_ty);
+
+            // Locate the Span containing just the type of the offending impl
+            if let Some(impl_trait_node) = tcx.map.get_if_local(impl_c.def_id) {
+                if let Node::NodeImplItem(impl_trait_item) = impl_trait_node {
+                    if let ImplItemKind::Const(ref ty, _) = impl_trait_item.node {
+                        origin = TypeOrigin::Misc(ty.span);
+                    }
+                }
+            }
+
             let mut diag = struct_span_err!(
                 tcx.sess, origin.span(), E0326,
                 "implemented const `{}` has an incompatible type for trait",
                 trait_c.name
             );
+
+            // Add a label to the Span containing just the type of the item
+            if let Some(orig_trait_node) = tcx.map.get_if_local(trait_c.def_id) {
+                if let Node::NodeTraitItem(orig_trait_item) = orig_trait_node {
+                    if let TraitItem_::ConstTraitItem(ref ty, _) = orig_trait_item.node {
+                        diag.span_label(ty.span, &format!("original trait requirement"));
+                    }
+                }
+            }
+
             infcx.note_type_err(
                 &mut diag, origin,
                 Some(infer::ValuePairs::Types(ExpectedFound {
index 6a1baf13b273d4f2bda0c4f4c3d4cf1b688ca38c..648d1f42fb5208322843c8a3d41791ae357ef2aa 100644 (file)
@@ -580,7 +580,7 @@ fn assemble_extension_candidates_for_traits_in_scope(&mut self,
                                                          -> Result<(), MethodError<'tcx>>
     {
         let mut duplicates = HashSet::new();
-        let opt_applicable_traits = self.ccx.trait_map.get(&expr_id);
+        let opt_applicable_traits = self.tcx.trait_map.get(&expr_id);
         if let Some(applicable_traits) = opt_applicable_traits {
             for trait_candidate in applicable_traits {
                 let trait_did = trait_candidate.def_id;
index 346449d0a51331465bde2b99b5eec3048bb74e7c..54521782474fe05ac1e8b15ae5e0fec19a193def 100644 (file)
@@ -162,26 +162,34 @@ pub fn report_method_error(&self,
                     },
                     rcvr_ty);
 
-                // If the item has the name of a field, give a help note
-                if let (&ty::TyStruct(def, substs), Some(expr)) = (&rcvr_ty.sty, rcvr_expr) {
-                    if let Some(field) = def.struct_variant().find_field_named(item_name) {
-                        let expr_string = match tcx.sess.codemap().span_to_snippet(expr.span) {
-                            Ok(expr_string) => expr_string,
-                            _ => "s".into() // Default to a generic placeholder for the
-                                            // expression when we can't generate a string
-                                            // snippet
-                        };
-
-                        let field_ty = field.ty(tcx, substs);
-
-                        if self.is_fn_ty(&field_ty, span) {
-                            err.span_note(span,
-                                          &format!("use `({0}.{1})(...)` if you meant to call \
-                                                   the function stored in the `{1}` field",
-                                                   expr_string, item_name));
-                        } else {
-                            err.span_note(span, &format!("did you mean to write `{0}.{1}`?",
-                                                         expr_string, item_name));
+                // If the method name is the name of a field with a function or closure type,
+                // give a helping note that it has to be called as (x.f)(...).
+                if let Some(expr) = rcvr_expr {
+                    for (ty, _) in self.autoderef(span, rcvr_ty) {
+                        if let ty::TyStruct(def, substs) = ty.sty {
+                            if let Some(field) = def.struct_variant().find_field_named(item_name) {
+                                let snippet = tcx.sess.codemap().span_to_snippet(expr.span);
+                                let expr_string = match snippet {
+                                    Ok(expr_string) => expr_string,
+                                    _ => "s".into() // Default to a generic placeholder for the
+                                                    // expression when we can't generate a
+                                                    // string snippet
+                                };
+
+                                let field_ty = field.ty(tcx, substs);
+
+                                if self.is_fn_ty(&field_ty, span) {
+                                    err.span_note(span, &format!(
+                                        "use `({0}.{1})(...)` if you meant to call the function \
+                                         stored in the `{1}` field",
+                                        expr_string, item_name));
+                                } else {
+                                    err.span_note(span, &format!(
+                                        "did you mean to write `{0}.{1}`?",
+                                        expr_string, item_name));
+                                }
+                                break;
+                            }
                         }
                     }
                 }
index b1362e1d0b8238f3068df3d27b0623ac6ba01073..36fdba3706109682a0f4998040f50e00cd53c906 100644 (file)
@@ -1272,13 +1272,21 @@ pub fn check_enum_variants<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>,
 
             // Check for duplicate discriminant values
             if let Some(i) = disr_vals.iter().position(|&x| x == current_disr_val) {
-                let mut err = struct_span_err!(ccx.tcx.sess, v.span, E0081,
-                    "discriminant value `{}` already exists", disr_vals[i]);
                 let variant_i_node_id = ccx.tcx.map.as_local_node_id(variants[i].did).unwrap();
-                err.span_label(ccx.tcx.map.span(variant_i_node_id),
-                               &format!("first use of `{}`", disr_vals[i]));
-                err.span_label(v.span , &format!("enum already has `{}`", disr_vals[i]));
-                err.emit();
+                let variant_i = ccx.tcx.map.expect_variant(variant_i_node_id);
+                let i_span = match variant_i.node.disr_expr {
+                    Some(ref expr) => expr.span,
+                    None => ccx.tcx.map.span(variant_i_node_id)
+                };
+                let span = match v.node.disr_expr {
+                    Some(ref expr) => expr.span,
+                    None => v.span
+                };
+                struct_span_err!(ccx.tcx.sess, span, E0081,
+                                 "discriminant value `{}` already exists", disr_vals[i])
+                    .span_label(i_span, &format!("first use of `{}`", disr_vals[i]))
+                    .span_label(span , &format!("enum already has `{}`", disr_vals[i]))
+                    .emit();
             }
             disr_vals.push(current_disr_val);
         }
@@ -4372,14 +4380,17 @@ fn push_explicit_angle_bracketed_parameters_from_segment_to_substs(&self,
                 if i < type_count {
                     substs.types.push(space, t);
                 } else if i == type_count {
-                    span_err!(self.tcx.sess, typ.span, E0087,
-                        "too many type parameters provided: \
-                         expected at most {} parameter{}, \
-                         found {} parameter{}",
-                         type_count,
-                         if type_count == 1 {""} else {"s"},
-                         data.types.len(),
-                         if data.types.len() == 1 {""} else {"s"});
+                    struct_span_err!(self.tcx.sess, typ.span, E0087,
+                                     "too many type parameters provided: \
+                                      expected at most {} parameter{}, \
+                                      found {} parameter{}",
+                                     type_count,
+                                     if type_count == 1 {""} else {"s"},
+                                     data.types.len(),
+                                     if data.types.len() == 1 {""} else {"s"})
+                        .span_label(typ.span , &format!("expected {} parameter{}",
+                                    type_count,
+                                    if type_count == 1 {""} else {"s"})).emit();
                     substs.types.truncate(space, 0);
                     break;
                 }
index 939d81bf8477b730ff8d5b2e0cb44a03fe6dafbe..7a923cd29d0fb9cde009f0a63416fe959fab9cb1 100644 (file)
@@ -321,19 +321,31 @@ fn check_implementations_of_copy(&self) {
 
                 }
                 Err(CopyImplementationError::InfrigingVariant(name)) => {
-                       struct_span_err!(tcx.sess, span, E0205,
-                                 "the trait `Copy` may not be \
-                                          implemented for this type")
-                           .span_label(span, &format!("variant \
-                                          `{}` does not implement `Copy`",
-                                         name))
-                           .emit()
+                    let item = tcx.map.expect_item(impl_node_id);
+                    let span = if let ItemImpl(_, _, _, Some(ref tr), _, _) = item.node {
+                        tr.path.span
+                    } else {
+                        span
+                    };
+
+                    struct_span_err!(tcx.sess, span, E0205,
+                                     "the trait `Copy` may not be implemented for this type")
+                        .span_label(span, &format!("variant `{}` does not implement `Copy`",
+                                                   name))
+                        .emit()
                 }
                 Err(CopyImplementationError::NotAnAdt) => {
-                       span_err!(tcx.sess, span, E0206,
-                                 "the trait `Copy` may not be implemented \
-                                  for this type; type is not a structure or \
-                                  enumeration")
+                    let item = tcx.map.expect_item(impl_node_id);
+                    let span = if let ItemImpl(_, _, _, _, ref ty, _) = item.node {
+                        ty.span
+                    } else {
+                        span
+                    };
+
+                    struct_span_err!(tcx.sess, span, E0206,
+                                     "the trait `Copy` may not be implemented for this type")
+                        .span_label(span, &format!("type is not a structure or enumeration"))
+                        .emit();
                 }
                 Err(CopyImplementationError::HasDestructor) => {
                     span_err!(tcx.sess, span, E0184,
index 9b1a6c78335f189470f69c1d31b59706e37c21b3..f0ce4f6d2ec4225f9a022543cb8ca9ff6959cf6d 100644 (file)
@@ -1442,6 +1442,7 @@ fn type_scheme_of_item<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>,
         // NB. Since the `memoized` function enters a new task, and we
         // are giving this task access to the item `item`, we must
         // register a read.
+        assert!(!ccx.tcx.map.is_inlined_def_id(item_def_id));
         ccx.tcx.dep_graph.read(DepNode::Hir(item_def_id));
         compute_type_scheme_of_item(ccx, item)
     })
@@ -1569,6 +1570,7 @@ fn type_scheme_of_foreign_item<'a, 'tcx>(
         // NB. Since the `memoized` function enters a new task, and we
         // are giving this task access to the item `item`, we must
         // register a read.
+        assert!(!ccx.tcx.map.is_inlined_def_id(item_def_id));
         ccx.tcx.dep_graph.read(DepNode::Hir(item_def_id));
         compute_type_scheme_of_foreign_item(ccx, item, abi)
     })
index 65e00705121a7e6dcc5745d3034f10d5c0550053..5e733389e24c8b8e520eb61e5319e871d907d76a 100644 (file)
@@ -139,9 +139,6 @@ pub struct TypeAndSubsts<'tcx> {
 pub struct CrateCtxt<'a, 'tcx: 'a> {
     ast_ty_to_ty_cache: RefCell<NodeMap<Ty<'tcx>>>,
 
-    /// A mapping from method call sites to traits that have that method.
-    pub trait_map: hir::TraitMap,
-
     /// A vector of every trait accessible in the whole crate
     /// (i.e. including those from subcrates). This is used only for
     /// error reporting, and so is lazily initialised and generally
@@ -265,9 +262,10 @@ fn check_start_fn_ty(ccx: &CrateCtxt,
                     match it.node {
                         hir::ItemFn(_,_,_,_,ref ps,_)
                         if ps.is_parameterized() => {
-                            struct_span_err!(tcx.sess, start_span, E0132,
+                            let sp = if let Some(sp) = ps.span() { sp } else { start_span };
+                            struct_span_err!(tcx.sess, sp, E0132,
                                 "start function is not allowed to have type parameters")
-                                .span_label(ps.span().unwrap(),
+                                .span_label(sp,
                                             &format!("start function cannot have type parameters"))
                                 .emit();
                             return;
@@ -321,13 +319,11 @@ fn check_for_entry_fn(ccx: &CrateCtxt) {
     }
 }
 
-pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                             trait_map: hir::TraitMap)
+pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>)
                              -> CompileResult {
     let time_passes = tcx.sess.time_passes();
     let ccx = CrateCtxt {
         ast_ty_to_ty_cache: RefCell::new(NodeMap()),
-        trait_map: trait_map,
         all_traits: RefCell::new(None),
         stack: RefCell::new(Vec::new()),
         tcx: tcx
index fc9ae73f5ce7e2ec96a4c584c4e8f652c9b38746..7d8ebc403b08f1c5cc59f050a0df042ce42a5650 100644 (file)
@@ -128,7 +128,6 @@ pub fn run_core(search_paths: SearchPaths,
 
     let codemap = Rc::new(codemap::CodeMap::new());
     let diagnostic_handler = errors::Handler::with_tty_emitter(ColorConfig::Auto,
-                                                               None,
                                                                true,
                                                                false,
                                                                Some(codemap.clone()));
index f9d0df9981a1d73bc9e796746aaca850cda0736d..5f1d28c8d316ad35014bd2f4a56f124449a6fdfc 100644 (file)
@@ -74,7 +74,6 @@ pub fn run(input: &str,
 
     let codemap = Rc::new(CodeMap::new());
     let diagnostic_handler = errors::Handler::with_tty_emitter(ColorConfig::Auto,
-                                                               None,
                                                                true,
                                                                false,
                                                                Some(codemap.clone()));
@@ -228,9 +227,7 @@ fn drop(&mut self) {
     let data = Arc::new(Mutex::new(Vec::new()));
     let codemap = Rc::new(CodeMap::new());
     let emitter = errors::emitter::EmitterWriter::new(box Sink(data.clone()),
-                                                None,
-                                                Some(codemap.clone()),
-                                                errors::snippet::FormatMode::EnvironmentSelected);
+                                                      Some(codemap.clone()));
     let old = io::set_panic(box Sink(data.clone()));
     let _bomb = Bomb(data.clone(), old.unwrap_or(box io::stdout()));
 
index fd7b0a2e6bbf6d9b0d8834c911e7dca320c7c298..8039421ae77306418d2895f00284e6dad9e6c030 100644 (file)
@@ -199,13 +199,12 @@ fn test_resize_policy() {
 /// A hash map implementation which uses linear probing with Robin
 /// Hood bucket stealing.
 ///
-/// The hashes are all keyed by the thread-local random number generator
-/// on creation by default. This means that the ordering of the keys is
-/// randomized, but makes the tables more resistant to
-/// denial-of-service attacks (Hash DoS). No guarantees are made to the
-/// quality of the random data. The implementation uses the best available
-/// random data from your platform at the time of creation. This behavior
-/// can be overridden with one of the constructors.
+/// By default, HashMap uses a somewhat slow hashing algorithm which can provide resistance
+/// to DoS attacks. Rust makes a best attempt at acquiring random numbers without IO
+/// blocking from your system. Because of this HashMap is not guaranteed to provide
+/// DoS resistance since the numbers generated might not be truly random. If you do
+/// require this behavior you can create your own hashing function using
+/// [BuildHasherDefault](../hash/struct.BuildHasherDefault.html).
 ///
 /// It is required that the keys implement the `Eq` and `Hash` traits, although
 /// this can frequently be achieved by using `#[derive(PartialEq, Eq, Hash)]`.
index 1459420cdc00ce0b4f446ee5169ba36808f27070..914599271aca279044667c6ff0cb572d0a7d2ce2 100644 (file)
@@ -49,6 +49,7 @@
 
 use any::TypeId;
 use boxed::Box;
+use cell;
 use char;
 use fmt::{self, Debug, Display};
 use marker::{Send, Sync, Reflect};
@@ -289,6 +290,20 @@ fn description(&self) -> &str {
     }
 }
 
+#[unstable(feature = "try_borrow", issue = "35070")]
+impl<'a, T: ?Sized + Reflect> Error for cell::BorrowError<'a, T> {
+    fn description(&self) -> &str {
+        "already mutably borrowed"
+    }
+}
+
+#[unstable(feature = "try_borrow", issue = "35070")]
+impl<'a, T: ?Sized + Reflect> Error for cell::BorrowMutError<'a, T> {
+    fn description(&self) -> &str {
+        "already borrowed"
+    }
+}
+
 // copied from any.rs
 impl Error + 'static {
     /// Returns true if the boxed type is the same as `T`
index e0501f9cc61d24c70781bab1cf2645d2ed143ffe..77b90c0846bbe44f2349b7a674223d6f47ea59f9 100644 (file)
@@ -99,11 +99,9 @@ pub struct CString {
 ///
 /// extern { fn my_string() -> *const c_char; }
 ///
-/// fn main() {
-///     unsafe {
-///         let slice = CStr::from_ptr(my_string());
-///         println!("string length: {}", slice.to_bytes().len());
-///     }
+/// unsafe {
+///     let slice = CStr::from_ptr(my_string());
+///     println!("string length: {}", slice.to_bytes().len());
 /// }
 /// ```
 ///
@@ -119,10 +117,8 @@ pub struct CString {
 ///     unsafe { work_with(data.as_ptr()) }
 /// }
 ///
-/// fn main() {
-///     let s = CString::new("data data data data").unwrap();
-///     work(&s);
-/// }
+/// let s = CString::new("data data data data").unwrap();
+/// work(&s);
 /// ```
 ///
 /// Converting a foreign C string into a Rust `String`
@@ -139,9 +135,7 @@ pub struct CString {
 ///     }
 /// }
 ///
-/// fn main() {
-///     println!("string: {}", my_string_safe());
-/// }
+/// println!("string: {}", my_string_safe());
 /// ```
 #[derive(Hash)]
 #[stable(feature = "rust1", since = "1.0.0")]
@@ -188,11 +182,9 @@ impl CString {
     ///
     /// extern { fn puts(s: *const c_char); }
     ///
-    /// fn main() {
-    ///     let to_print = CString::new("Hello!").unwrap();
-    ///     unsafe {
-    ///         puts(to_print.as_ptr());
-    ///     }
+    /// let to_print = CString::new("Hello!").unwrap();
+    /// unsafe {
+    ///     puts(to_print.as_ptr());
     /// }
     /// ```
     ///
index 865d067cdb6dced4522f9873b87575ecfbb4aa0f..c05e0c3ca68dfeea25c7ba5c56a57fd65c00fed1 100644 (file)
 #![feature(str_utf16)]
 #![feature(test, rustc_private)]
 #![feature(thread_local)]
+#![feature(try_borrow)]
 #![feature(try_from)]
 #![feature(unboxed_closures)]
 #![feature(unicode)]
 #![feature(unwind_attributes)]
 #![feature(vec_push_all)]
 #![feature(zero_one)]
+#![cfg_attr(test, feature(update_panic_count))]
 
 // Issue# 30592: Systematically use alloc_system during stage0 since jemalloc
 // might be unavailable or disabled
index ba18d15f5c4e384a8be0eeb0c0d83f7442d5ba01..2f67081e0d71093c29625a4de7c292112cca4dad 100644 (file)
@@ -340,5 +340,5 @@ pub fn catch_unwind<F: FnOnce() -> R + UnwindSafe, R>(f: F) -> Result<R> {
 /// ```
 #[stable(feature = "resume_unwind", since = "1.9.0")]
 pub fn resume_unwind(payload: Box<Any + Send>) -> ! {
-    panicking::rust_panic(payload)
+    panicking::update_count_then_panic(payload)
 }
index 57a4c3df70a476eeeb680233db6ab390a4c95490..8c1567939fb37caea977653bf5e6e4148ea3662b 100644 (file)
@@ -21,7 +21,6 @@
 use io::prelude::*;
 
 use any::Any;
-use cell::Cell;
 use cell::RefCell;
 use fmt;
 use intrinsics;
@@ -39,8 +38,6 @@
     }
 }
 
-thread_local! { pub static PANIC_COUNT: Cell<usize> = Cell::new(0) }
-
 // Binary interface to the panic runtime that the standard library depends on.
 //
 // The standard library is tagged with `#![needs_panic_runtime]` (introduced in
@@ -187,7 +184,7 @@ fn default_hook(info: &PanicInfo) {
     // for this panic. Otherwise only print it if logging is enabled.
     #[cfg(any(not(cargobuild), feature = "backtrace"))]
     let log_backtrace = {
-        let panics = PANIC_COUNT.with(|c| c.get());
+        let panics = update_panic_count(0);
 
         panics >= 2 || backtrace::log_enabled()
     };
@@ -238,14 +235,31 @@ fn default_hook(info: &PanicInfo) {
     }
 }
 
+
+#[cfg(not(test))]
+#[doc(hidden)]
+#[unstable(feature = "update_panic_count", issue = "0")]
+pub fn update_panic_count(amt: isize) -> usize {
+    use cell::Cell;
+    thread_local! { static PANIC_COUNT: Cell<usize> = Cell::new(0) }
+
+    PANIC_COUNT.with(|c| {
+        let next = (c.get() as isize + amt) as usize;
+        c.set(next);
+        return next
+    })
+}
+
+#[cfg(test)]
+pub use realstd::rt::update_panic_count;
+
 /// Invoke a closure, capturing the cause of an unwinding panic if one occurs.
 pub unsafe fn try<R, F: FnOnce() -> R>(f: F) -> Result<R, Box<Any + Send>> {
     let mut slot = None;
     let mut f = Some(f);
-    let ret = PANIC_COUNT.with(|s| {
-        let prev = s.get();
-        s.set(0);
+    let ret;
 
+    {
         let mut to_run = || {
             slot = Some(f.take().unwrap()());
         };
@@ -258,18 +272,18 @@ pub unsafe fn try<R, F: FnOnce() -> R>(f: F) -> Result<R, Box<Any + Send>> {
                                          dataptr,
                                          &mut any_data,
                                          &mut any_vtable);
-        s.set(prev);
-
         if r == 0 {
-            Ok(())
+            ret = Ok(());
         } else {
-            Err(mem::transmute(raw::TraitObject {
+            update_panic_count(-1);
+            ret = Err(mem::transmute(raw::TraitObject {
                 data: any_data as *mut _,
                 vtable: any_vtable as *mut _,
-            }))
+            }));
         }
-    });
+    }
 
+    debug_assert!(update_panic_count(0) == 0);
     return ret.map(|()| {
         slot.take().unwrap()
     });
@@ -285,7 +299,7 @@ fn call<F: FnMut()>(f: &mut F) {
 
 /// Determines whether the current thread is unwinding because of panic.
 pub fn panicking() -> bool {
-    PANIC_COUNT.with(|c| c.get() != 0)
+    update_panic_count(0) != 0
 }
 
 /// Entry point of panic from the libcore crate.
@@ -350,18 +364,14 @@ fn rust_panic_with_hook(msg: Box<Any + Send>,
                         file_line: &(&'static str, u32)) -> ! {
     let (file, line) = *file_line;
 
-    let panics = PANIC_COUNT.with(|c| {
-        let prev = c.get();
-        c.set(prev + 1);
-        prev
-    });
+    let panics = update_panic_count(1);
 
     // If this is the third nested call (e.g. panics == 2, this is 0-indexed),
     // the panic hook probably triggered the last panic, otherwise the
     // double-panic check would have aborted the process. In this case abort the
     // process real quickly as we don't want to try calling it again as it'll
     // probably just panic again.
-    if panics > 1 {
+    if panics > 2 {
         util::dumb_print(format_args!("thread panicked while processing \
                                        panic. aborting.\n"));
         unsafe { intrinsics::abort() }
@@ -383,7 +393,7 @@ fn rust_panic_with_hook(msg: Box<Any + Send>,
         HOOK_LOCK.read_unlock();
     }
 
-    if panics > 0 {
+    if panics > 1 {
         // If a thread panics while it's already unwinding then we
         // have limited options. Currently our preference is to
         // just abort. In the future we may consider resuming
@@ -396,6 +406,12 @@ fn rust_panic_with_hook(msg: Box<Any + Send>,
     rust_panic(msg)
 }
 
+/// Shim around rust_panic. Called by resume_unwind.
+pub fn update_count_then_panic(msg: Box<Any + Send>) -> ! {
+    update_panic_count(1);
+    rust_panic(msg)
+}
+
 /// A private no-mangle function on which to slap yer breakpoints.
 #[no_mangle]
 #[allow(private_no_mangle_fns)] // yes we get it, but we like breakpoints
index 5a7c0fe4816c981334849c6fd2d5a91602fe08f2..a3d9e4db7d19a4b0927b5e30356b813c412438d7 100644 (file)
@@ -25,7 +25,7 @@
 
 
 // Reexport some of our utilities which are expected by other crates.
-pub use panicking::{begin_panic, begin_panic_fmt};
+pub use panicking::{begin_panic, begin_panic_fmt, update_panic_count};
 
 #[cfg(not(test))]
 #[lang = "start"]
index 33c526532c7a842896f3b897510a22d7716fc5b6..9c57f25dfcce8eb65585d51fa42d06459f114f94 100644 (file)
@@ -21,6 +21,7 @@
 use io;
 use iter;
 use libc::{self, c_int, c_char, c_void};
+use marker::PhantomData;
 use mem;
 use memchr;
 use path::{self, PathBuf};
@@ -305,7 +306,7 @@ pub fn current_exe() -> io::Result<PathBuf> {
 
 pub struct Args {
     iter: vec::IntoIter<OsString>,
-    _dont_send_or_sync_me: *mut (),
+    _dont_send_or_sync_me: PhantomData<*mut ()>,
 }
 
 impl Iterator for Args {
@@ -343,7 +344,7 @@ pub fn args() -> Args {
     };
     Args {
         iter: vec.into_iter(),
-        _dont_send_or_sync_me: ptr::null_mut(),
+        _dont_send_or_sync_me: PhantomData,
     }
 }
 
@@ -400,7 +401,7 @@ pub fn args() -> Args {
         }
     }
 
-    Args { iter: res.into_iter(), _dont_send_or_sync_me: ptr::null_mut() }
+    Args { iter: res.into_iter(), _dont_send_or_sync_me: PhantomData }
 }
 
 #[cfg(any(target_os = "linux",
@@ -419,12 +420,12 @@ pub fn args() -> Args {
     let v: Vec<OsString> = bytes.into_iter().map(|v| {
         OsStringExt::from_vec(v)
     }).collect();
-    Args { iter: v.into_iter(), _dont_send_or_sync_me: ptr::null_mut() }
+    Args { iter: v.into_iter(), _dont_send_or_sync_me: PhantomData }
 }
 
 pub struct Env {
     iter: vec::IntoIter<(OsString, OsString)>,
-    _dont_send_or_sync_me: *mut (),
+    _dont_send_or_sync_me: PhantomData<*mut ()>,
 }
 
 impl Iterator for Env {
@@ -465,7 +466,7 @@ pub fn env() -> Env {
         }
         let ret = Env {
             iter: result.into_iter(),
-            _dont_send_or_sync_me: ptr::null_mut(),
+            _dont_send_or_sync_me: PhantomData,
         };
         ENV_LOCK.unlock();
         return ret
index 5ea1d6be9fec9903b3155b7faf60cafe2efe240d..9e9ea09646088bd2b2726eab8058939a45e6bc44 100644 (file)
@@ -1685,9 +1685,7 @@ mod tests {
     fn mk_sh(cm: Rc<CodeMap>) -> errors::Handler {
         // FIXME (#22405): Replace `Box::new` with `box` here when/if possible.
         let emitter = errors::emitter::EmitterWriter::new(Box::new(io::sink()),
-                                                None,
-                                                Some(cm),
-                                                errors::snippet::FormatMode::EnvironmentSelected);
+                                                Some(cm));
         errors::Handler::with_emitter(true, false, Box::new(emitter))
     }
 
index 7b28952aff6b6576b740ab809ed75fd1a15e77a6..cd1fdcfe9d130ce94a4e428a9c6d1255222c8190 100644 (file)
@@ -51,7 +51,6 @@ impl ParseSess {
     pub fn new() -> ParseSess {
         let cm = Rc::new(CodeMap::new());
         let handler = Handler::with_tty_emitter(ColorConfig::Auto,
-                                                None,
                                                 true,
                                                 false,
                                                 Some(cm.clone()));
index 248f6f98650a526778ae67e84d66f4eb2d3bea6b..64515b900d5b188e5b364319ecdd2d40ebd2ce90 100644 (file)
@@ -42,6 +42,7 @@
 #![feature(staged_api)]
 #![feature(question_mark)]
 #![feature(panic_unwind)]
+#![feature(mpsc_recv_timeout)]
 
 extern crate getopts;
 extern crate term;
@@ -73,6 +74,8 @@
 use std::thread;
 use std::time::{Instant, Duration};
 
+const TEST_WARN_TIMEOUT_S: u64 = 60;
+
 // to be used by rustc to compile tests in libtest
 pub mod test {
     pub use {Bencher, TestName, TestResult, TestDesc, TestDescAndFn, TestOpts, TrFailed,
@@ -592,6 +595,12 @@ pub fn write_result(&mut self, result: &TestResult) -> io::Result<()> {
         }
     }
 
+    pub fn write_timeout(&mut self, desc: &TestDesc) -> io::Result<()> {
+        self.write_plain(&format!("test {} has been running for over {} seconds\n",
+                                  desc.name,
+                                  TEST_WARN_TIMEOUT_S))
+    }
+
     pub fn write_log(&mut self, test: &TestDesc, result: &TestResult) -> io::Result<()> {
         match self.log_out {
             None => Ok(()),
@@ -709,6 +718,7 @@ fn callback<T: Write>(event: &TestEvent, st: &mut ConsoleTestState<T>) -> io::Re
         match (*event).clone() {
             TeFiltered(ref filtered_tests) => st.write_run_start(filtered_tests.len()),
             TeWait(ref test, padding) => st.write_test_start(test, padding),
+            TeTimeout(ref test) => st.write_timeout(test),
             TeResult(test, result, stdout) => {
                 st.write_log(&test, &result)?;
                 st.write_result(&result)?;
@@ -830,6 +840,7 @@ enum TestEvent {
     TeFiltered(Vec<TestDesc>),
     TeWait(TestDesc, NamePadding),
     TeResult(TestDesc, TestResult, Vec<u8>),
+    TeTimeout(TestDesc),
 }
 
 pub type MonitorMsg = (TestDesc, TestResult, Vec<u8>);
@@ -838,6 +849,9 @@ enum TestEvent {
 fn run_tests<F>(opts: &TestOpts, tests: Vec<TestDescAndFn>, mut callback: F) -> io::Result<()>
     where F: FnMut(TestEvent) -> io::Result<()>
 {
+    use std::collections::HashMap;
+    use std::sync::mpsc::RecvTimeoutError;
+
     let mut filtered_tests = filter_tests(opts, tests);
     if !opts.bench_benchmarks {
         filtered_tests = convert_benchmarks_to_tests(filtered_tests);
@@ -867,6 +881,29 @@ fn run_tests<F>(opts: &TestOpts, tests: Vec<TestDescAndFn>, mut callback: F) ->
 
     let (tx, rx) = channel::<MonitorMsg>();
 
+    let mut running_tests: HashMap<TestDesc, Instant> = HashMap::new();
+
+    fn get_timed_out_tests(running_tests: &mut HashMap<TestDesc, Instant>) -> Vec<TestDesc> {
+        let now = Instant::now();
+        let timed_out = running_tests.iter()
+            .filter_map(|(desc, timeout)| if &now >= timeout { Some(desc.clone())} else { None })
+            .collect();
+        for test in &timed_out {
+            running_tests.remove(test);
+        }
+        timed_out
+    };
+
+    fn calc_timeout(running_tests: &HashMap<TestDesc, Instant>) -> Option<Duration> {
+        running_tests.values().min().map(|next_timeout| {
+            let now = Instant::now();
+            if *next_timeout >= now {
+                *next_timeout - now
+            } else {
+                Duration::new(0, 0)
+            }})
+    };
+
     while pending > 0 || !remaining.is_empty() {
         while pending < concurrency && !remaining.is_empty() {
             let test = remaining.pop().unwrap();
@@ -876,11 +913,31 @@ fn run_tests<F>(opts: &TestOpts, tests: Vec<TestDescAndFn>, mut callback: F) ->
                 // that hang forever.
                 callback(TeWait(test.desc.clone(), test.testfn.padding()))?;
             }
+            let timeout = Instant::now() + Duration::from_secs(TEST_WARN_TIMEOUT_S);
+            running_tests.insert(test.desc.clone(), timeout);
             run_test(opts, !opts.run_tests, test, tx.clone());
             pending += 1;
         }
 
-        let (desc, result, stdout) = rx.recv().unwrap();
+        let mut res;
+        loop {
+            if let Some(timeout) = calc_timeout(&running_tests) {
+                res = rx.recv_timeout(timeout);
+                for test in get_timed_out_tests(&mut running_tests) {
+                    callback(TeTimeout(test))?;
+                }
+                if res != Err(RecvTimeoutError::Timeout) {
+                    break;
+                }
+            } else {
+                res = rx.recv().map_err(|_| RecvTimeoutError::Disconnected);
+                break;
+            }
+        }
+
+        let (desc, result, stdout) = res.unwrap();
+        running_tests.remove(&desc);
+
         if concurrency != 1 {
             callback(TeWait(desc.clone(), PadNone))?;
         }
index d1cc48989b13780f21c408fef17dceb104a09c9d..786aad117be48547f4ca50fae84c4879fa992d4d 160000 (submodule)
--- a/src/llvm
+++ b/src/llvm
@@ -1 +1 @@
-Subproject commit d1cc48989b13780f21c408fef17dceb104a09c9d
+Subproject commit 786aad117be48547f4ca50fae84c4879fa992d4d
index 3a20bb2714ece5b7967f9ee0eb698cd46d3a11dd..0555a96ff24ce900d6b6f53f56021a6c02496a91 100644 (file)
@@ -226,6 +226,58 @@ from_rust(LLVMRustCodeGenOptLevel level)
   }
 }
 
+#if LLVM_RUSTLLVM
+/// getLongestEntryLength - Return the length of the longest entry in the table.
+///
+static size_t getLongestEntryLength(ArrayRef<SubtargetFeatureKV> Table) {
+  size_t MaxLen = 0;
+  for (auto &I : Table)
+    MaxLen = std::max(MaxLen, std::strlen(I.Key));
+  return MaxLen;
+}
+
+extern "C" void
+LLVMRustPrintTargetCPUs(LLVMTargetMachineRef TM) {
+    const TargetMachine *Target = unwrap(TM);
+    const MCSubtargetInfo *MCInfo = Target->getMCSubtargetInfo();
+    const ArrayRef<SubtargetFeatureKV> CPUTable = MCInfo->getCPUTable();
+    unsigned MaxCPULen = getLongestEntryLength(CPUTable);
+
+    printf("Available CPUs for this target:\n");
+    for (auto &CPU : CPUTable)
+        printf("    %-*s - %s.\n", MaxCPULen, CPU.Key, CPU.Desc);
+    printf("\n");
+}
+
+extern "C" void
+LLVMRustPrintTargetFeatures(LLVMTargetMachineRef TM) {
+    const TargetMachine *Target = unwrap(TM);
+    const MCSubtargetInfo *MCInfo = Target->getMCSubtargetInfo();
+    const ArrayRef<SubtargetFeatureKV> FeatTable = MCInfo->getFeatureTable();
+    unsigned MaxFeatLen = getLongestEntryLength(FeatTable);
+
+    printf("Available features for this target:\n");
+    for (auto &Feature : FeatTable)
+        printf("    %-*s - %s.\n", MaxFeatLen, Feature.Key, Feature.Desc);
+    printf("\n");
+
+    printf("Use +feature to enable a feature, or -feature to disable it.\n"
+            "For example, rustc -C -target-cpu=mycpu -C target-feature=+feature1,-feature2\n\n");
+}
+
+#else
+
+extern "C" void
+LLVMRustPrintTargetCPUs(LLVMTargetMachineRef) {
+    printf("Target CPU help is not supported by this LLVM version.\n\n");
+}
+
+extern "C" void
+LLVMRustPrintTargetFeatures(LLVMTargetMachineRef) {
+    printf("Target features help is not supported by this LLVM version.\n\n");
+}
+#endif
+
 extern "C" LLVMTargetMachineRef
 LLVMRustCreateTargetMachine(const char *triple,
                             const char *cpu,
index e871763a48dc3fafb34f81ee2d10ad2c6cb4402c..378810a8b89fc9f0caad49ae7101c62d4241e8a0 100644 (file)
@@ -1,4 +1,4 @@
 # If this file is modified, then llvm will be forcibly cleaned and then rebuilt.
 # The actual contents of this file do not matter, but to trigger a change on the
 # build bots then the contents should be changed so git updates the mtime.
-2016-07-25b
+2016-08-07
index 13f2c23d8c4a9912624fc6a67505262d3a963769..1223a01cbcb6f29fc0d14d04215597b02c5cd58b 100644 (file)
 const C: i32 = 2;
 
 const CR: &'static mut i32 = &mut C; //~ ERROR E0017
+                                     //~| NOTE constants require immutable values
                                      //~| ERROR E0017
+                                     //~| NOTE constants require immutable values
 static STATIC_REF: &'static mut i32 = &mut X; //~ ERROR E0017
+                                              //~| NOTE statics require immutable values
                                               //~| ERROR E0017
+                                              //~| NOTE statics require immutable values
                                               //~| ERROR E0388
 static CONST_REF: &'static mut i32 = &mut C; //~ ERROR E0017
+                                             //~| NOTE statics require immutable values
                                              //~| ERROR E0017
-
+                                             //~| NOTE statics require immutable values
 fn main() {}
index 05f126baf9a7036764b1f1ea4a6fc72969d18831..c3623e3177b56207606de2bf18b45f8e38d45aa3 100644 (file)
@@ -13,10 +13,15 @@ enum Fruit {
     Pear(u32),
 }
 
+
 fn main() {
     let x = Fruit::Apple(String::new(), String::new());
     match x {
         Fruit::Apple(a) => {}, //~ ERROR E0023
+                               //~| NOTE expected 2 fields, found 1
         Fruit::Apple(a, b, c) => {}, //~ ERROR E0023
+                                     //~| NOTE expected 2 fields, found 3
+        Fruit::Pear(1, 2) => {}, //~ ERROR E0023
+                                 //~| NOTE expected 1 field, found 2
     }
 }
index 359c2a822a243d40fca9275d401f0bff8eace55a..ac609da4cbdde72a105871b8e8e74a1d6edf4845 100644 (file)
@@ -16,6 +16,8 @@ struct Thing {
 fn main() {
     let thing = Thing { x: 0, y: 0 };
     match thing {
-        Thing { x, y, z } => {} //~ ERROR E0026
+        Thing { x, y, z } => {}
+        //~^ ERROR struct `Thing` does not have a field named `z` [E0026]
+        //~| NOTE struct `Thing` does not have field `z`
     }
 }
index b63265564b334674ff9a86000f065cc92a10aef7..9911e093a898036d5fcae44fe656f6add46c2225 100644 (file)
@@ -9,8 +9,10 @@
 // except according to those terms.
 
 enum Enum {
-    P = 3,
-    X = 3, //~ ERROR E0081
+    P = 3, //~ NOTE first use of `3isize`
+    X = 3,
+    //~^ ERROR discriminant value `3isize` already exists
+    //~| NOTE enum already has `3isize`
     Y = 5
 }
 
index ec559fc8389d25dd99d8865a8eb8ff12f9f49b79..437ad3698a20664a2f4792e0e28cb67e97a5fc2e 100644 (file)
@@ -12,4 +12,5 @@ fn foo<T>() {}
 
 fn main() {
     foo::<f64, bool>(); //~ ERROR E0087
+    //~^ NOTE expected
 }
index ef5961e133894d37c90103238f9cbb280d64fd58..e9e027fd1dc1972aea32aabfc7005274dd2c4f76 100644 (file)
@@ -9,7 +9,9 @@
 // except according to those terms.
 
 extern {
-    fn foo((a, b): (u32, u32)); //~ ERROR E0130
+    fn foo((a, b): (u32, u32));
+    //~^ ERROR E0130
+    //~| NOTE pattern not allowed in foreign function
 }
 
 fn main() {
index e13b0af6f7977436c43684f1ee62b89eb1582cea..0b63d7c3f85c71873a0a405258513b483ac72483 100644 (file)
@@ -13,6 +13,7 @@
 fn main() {
     let irr = Irrefutable(0);
     if let Irrefutable(x) = irr { //~ ERROR E0162
+        //~| NOTE irrefutable pattern
         println!("{}", x);
     }
 }
index 489ebb033f84e8133c98961750ff7530ba4878dc..dcfe441ab0d00ca0287fde3100f1a3ea3cd4b19c 100644 (file)
@@ -13,6 +13,7 @@ trait Trait {
 }
 
 type Foo = Trait; //~ ERROR E0191
+                  //~| NOTE missing associated type `Bar` value
 
 fn main() {
 }
index 37ac57af524a6d1f5ba06d7752019b94e5e613e7..c73e7534301055dd55729582ad1229c5646e1be5 100644 (file)
@@ -14,11 +14,11 @@ enum Foo {
 }
 
 impl Copy for Foo { }
-//~^ ERROR E0205
+//~^ ERROR the trait `Copy` may not be implemented for this type
 //~| NOTE variant `Bar` does not implement `Copy`
 
 #[derive(Copy)]
-//~^ ERROR E0205
+//~^ ERROR the trait `Copy` may not be implemented for this type
 //~| NOTE variant `Bar` does not implement `Copy`
 //~| NOTE in this expansion of #[derive(Copy)]
 enum Foo2<'a> {
index 31b01da3d75b566e75f87e498eb1a893c91a926b..888e42ed3a18cea72870207b7b9cbc459fea7417 100644 (file)
 
 type Foo = i32;
 
-impl Copy for Foo { } //~ ERROR E0206
-                      //~^ ERROR E0117
+impl Copy for Foo { }
+//~^ ERROR the trait `Copy` may not be implemented for this type
+//~| NOTE type is not a structure or enumeration
+//~| ERROR only traits defined in the current crate can be implemented for arbitrary types
+//~| NOTE impl doesn't use types inside crate
+//~| NOTE the impl does not reference any types defined in this crate
 
 #[derive(Copy, Clone)]
 struct Bar;
 
-impl Copy for &'static Bar { } //~ ERROR E0206
+impl Copy for &'static Bar { }
+//~^ ERROR the trait `Copy` may not be implemented for this type
+//~| NOTE type is not a structure or enumeration
 
 fn main() {
 }
index 59609345ee5235f65ea67e669a3daa4d5489c37a..e9c3cb72c11b07a7075c5a905ea4f120b2a77a0b 100644 (file)
@@ -9,5 +9,7 @@
 // except according to those terms.
 
 fn main() {
-    let v: Vec(&str) = vec!["foo"]; //~ ERROR E0214
+    let v: Vec(&str) = vec!["foo"];
+    //~^ ERROR E0214
+    //~| NOTE only traits may use parentheses
 }
index fdfd41a456bf601316e05cf46fc78eb086697ab8..25568a323e161d42a1a6a8d219c76dae51f345dd 100644 (file)
@@ -13,6 +13,6 @@ enum Foo {
 }
 
 fn do_something(x: Foo::Bar) { } //~ ERROR E0248
-
+                //~| NOTE value used as a type
 fn main() {
 }
index 95508a31044b87797dff021bc5a6a07b23cd2ff9..b3776091682da18b3f149c01f8786e830657df6d 100644 (file)
@@ -11,7 +11,7 @@
 #![feature(associated_consts)]
 
 trait Foo {
-    const BAR: u32;
+    const BAR: u32; //~ NOTE original trait requirement
 }
 
 struct SignedBar;
@@ -19,7 +19,7 @@ trait Foo {
 impl Foo for SignedBar {
     const BAR: i32 = -1;
     //~^ ERROR implemented const `BAR` has an incompatible type for trait [E0326]
-    //~| expected u32, found i32
+    //~| NOTE expected u32, found i32
 }
 
 fn main() {}
index 2b34fcab24c04728918afa4130549f1f2b393a0c..a2a11c62bb83245a851c7158f7dea07901bcb2b9 100644 (file)
@@ -28,6 +28,7 @@ pub trait BoxCar : Box + Vehicle {
 
 fn dent<C:BoxCar>(c: C, color: C::Color) {
     //~^ ERROR ambiguous associated type `Color` in bounds of `C`
+    //~| NOTE ambiguous associated type `Color`
     //~| NOTE could derive from `Vehicle`
     //~| NOTE could derive from `Box`
 }
@@ -35,12 +36,15 @@ fn dent<C:BoxCar>(c: C, color: C::Color) {
 fn dent_object<COLOR>(c: BoxCar<Color=COLOR>) {
     //~^ ERROR ambiguous associated type
     //~| ERROR the value of the associated type `Color` (from the trait `Vehicle`) must be specified
+    //~| NOTE ambiguous associated type `Color`
     //~| NOTE could derive from `Vehicle`
     //~| NOTE could derive from `Box`
+    //~| NOTE missing associated type `Color` value
 }
 
 fn paint<C:BoxCar>(c: C, d: C::Color) {
     //~^ ERROR ambiguous associated type `Color` in bounds of `C`
+    //~| NOTE ambiguous associated type `Color`
     //~| NOTE could derive from `Vehicle`
     //~| NOTE could derive from `Box`
 }
index 9c210c132a3131084733aead1eabdf7b34870d8c..f686a146042cee4351a2c1874605d7a3ab7e000a 100644 (file)
@@ -27,23 +27,34 @@ impl Clone for TestE { fn clone(&self) -> Self { *self } }
 impl Copy for MyType {}
 
 impl Copy for &'static mut MyType {}
-//~^ ERROR E0206
+//~^ ERROR the trait `Copy` may not be implemented for this type
+//~| NOTE type is not a structure or enumeration
 impl Clone for MyType { fn clone(&self) -> Self { *self } }
 
 impl Copy for (MyType, MyType) {}
-//~^ ERROR E0206
-//~| ERROR E0117
+//~^ ERROR the trait `Copy` may not be implemented for this type
+//~| NOTE type is not a structure or enumeration
+//~| ERROR only traits defined in the current crate can be implemented for arbitrary types
+//~| NOTE impl doesn't use types inside crate
+//~| NOTE the impl does not reference any types defined in this crate
 
 impl Copy for &'static NotSync {}
-//~^ ERROR E0206
+//~^ ERROR the trait `Copy` may not be implemented for this type
+//~| NOTE type is not a structure or enumeration
 
 impl Copy for [MyType] {}
-//~^ ERROR E0206
-//~| ERROR E0117
+//~^ ERROR the trait `Copy` may not be implemented for this type
+//~| NOTE type is not a structure or enumeration
+//~| ERROR only traits defined in the current crate can be implemented for arbitrary types
+//~| NOTE impl doesn't use types inside crate
+//~| NOTE the impl does not reference any types defined in this crate
 
 impl Copy for &'static [NotSync] {}
-//~^ ERROR E0206
-//~| ERROR E0117
+//~^ ERROR the trait `Copy` may not be implemented for this type
+//~| NOTE type is not a structure or enumeration
+//~| ERROR only traits defined in the current crate can be implemented for arbitrary types
+//~| NOTE impl doesn't use types inside crate
+//~| NOTE the impl does not reference any types defined in this crate
 
 fn main() {
 }
index 3d6f224c249040a1c1d168d0b5357ba9af7c06de..658a0c1546b9f337b27704fa31d060075e91bdfc 100644 (file)
 
 enum Foo {
     A = 1,
-    //~^ NOTE first use
-    //~| NOTE first use
-    //~| NOTE first use
-    B = 1, //~ ERROR discriminant value
-    //~^ NOTE enum already
+    //~^ NOTE first use of `1isize`
+    //~| NOTE first use of `1isize`
+    //~| NOTE first use of `1isize`
+    B = 1,
+    //~^ ERROR discriminant value `1isize` already exists
+    //~| NOTE enum already has `1isize`
     C = 0,
-    D, //~ ERROR discriminant value
-    //~^ NOTE enum already
+    D,
+    //~^ ERROR discriminant value `1isize` already exists
+    //~| NOTE enum already has `1isize`
 
-    E = N, //~ ERROR discriminant value
-    //~^ NOTE enum already
+    E = N,
+    //~^ ERROR discriminant value `1isize` already exists
+    //~| NOTE enum already has `1isize`
 
 }
 
index 50f1323d39c557bc0ef7494eb6d9651d7d6d8821..e266f004317e7f46d1adb52b8bb96498bc56acad 100644 (file)
@@ -19,10 +19,5 @@ fn h(x:i32) -> i32 {3*x}
     println!("{:?}",(vfnfer[0] as Fn)(3));
     //~^ ERROR the precise format of `Fn`-family traits'
     //~| ERROR E0243
-    //~| NOTE expected 1 type arguments, found 0
     //~| ERROR the value of the associated type `Output` (from the trait `std::ops::FnOnce`)
-    //~| NOTE in this expansion of println!
-    //~| NOTE in this expansion of println!
-    //~| NOTE in this expansion of println!
-    //~| NOTE in this expansion of println!
 }
diff --git a/src/test/compile-fail/issue-33784.rs b/src/test/compile-fail/issue-33784.rs
new file mode 100644 (file)
index 0000000..4229be2
--- /dev/null
@@ -0,0 +1,46 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use std::ops::Deref;
+
+struct Obj<F> where F: FnMut() -> u32 {
+    fn_ptr: fn() -> (),
+    closure: F,
+}
+
+struct C {
+    c_fn_ptr: fn() -> (),
+}
+
+struct D(C);
+
+impl Deref for D {
+    type Target = C;
+    fn deref(&self) -> &C {
+        &self.0
+    }
+}
+
+
+fn empty() {}
+
+fn main() {
+    let o = Obj { fn_ptr: empty, closure: || 42 };
+    let p = &o;
+    p.closure(); //~ ERROR no method named `closure` found
+    //~^ NOTE use `(p.closure)(...)` if you meant to call the function stored in the `closure` field
+    let q = &p;
+    q.fn_ptr(); //~ ERROR no method named `fn_ptr` found
+    //~^ NOTE use `(q.fn_ptr)(...)` if you meant to call the function stored in the `fn_ptr` field
+    let r = D(C { c_fn_ptr: empty });
+    let s = &r;
+    s.c_fn_ptr(); //~ ERROR no method named `c_fn_ptr` found
+    //~^ NOTE use `(s.c_fn_ptr)(...)` if you meant to call the function stored in the `c_fn_ptr`
+}
index 3edbdf4ebc9584ce7d5770bd15064dd56fedf64f..b0278476998dd54b42a96a1a9b6089bb1a9db957 100644 (file)
 
 extern {
     fn f1(mut arg: u8); //~ ERROR patterns aren't allowed in foreign function declarations
-                        //~^ NOTE this is a recent error
+                        //~^ NOTE pattern not allowed in foreign function
+                        //~| NOTE this is a recent error
     fn f2(&arg: u8); //~ ERROR patterns aren't allowed in foreign function declarations
+                     //~^ NOTE pattern not allowed in foreign function
     fn f3(arg @ _: u8); //~ ERROR patterns aren't allowed in foreign function declarations
-                        //~^ NOTE this is a recent error
+                        //~^ NOTE pattern not allowed in foreign function
+                        //~| NOTE this is a recent error
     fn g1(arg: u8); // OK
     fn g2(_: u8); // OK
     // fn g3(u8); // Not yet
index e81f828beb19fd4d3fd9b25588de8c382c6400ba..e8b187b5454f6dbe88cafef8dac49cc7b8b03d98 100644 (file)
@@ -10,6 +10,7 @@
 
 // aux-build:a.rs
 // revisions:rpass1 rpass2
+// compile-flags:-Z query-dep-graph
 
 #![feature(rustc_attrs)]
 
index 9a3097831c5631712d47788d7a6a8160c7c3f40e..64b7f2951d274595005c79abcd047ae80c409d89 100644 (file)
@@ -9,6 +9,7 @@
 // except according to those terms.
 
 // revisions: rpass1 cfail2
+// compile-flags: -Z query-dep-graph
 
 #![allow(warnings)]
 #![feature(rustc_attrs)]
diff --git a/src/test/incremental/foreign.rs b/src/test/incremental/foreign.rs
new file mode 100644 (file)
index 0000000..dbdebef
--- /dev/null
@@ -0,0 +1,45 @@
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Test what happens we save incremental compilation state that makes
+// use of foreign items. This used to ICE (#34991).
+
+// revisions: rpass1
+
+#![feature(libc)]
+
+extern crate libc;
+
+use std::ffi::CString;
+
+mod mlibc {
+    use libc::{c_char, c_long, c_longlong};
+
+    extern {
+        pub fn atol(x: *const c_char) -> c_long;
+        pub fn atoll(x: *const c_char) -> c_longlong;
+    }
+}
+
+fn atol(s: String) -> isize {
+    let c = CString::new(s).unwrap();
+    unsafe { mlibc::atol(c.as_ptr()) as isize }
+}
+
+fn atoll(s: String) -> i64 {
+    let c = CString::new(s).unwrap();
+    unsafe { mlibc::atoll(c.as_ptr()) as i64 }
+}
+
+pub fn main() {
+    assert_eq!(atol("1024".to_string()) * 10, atol("10240".to_string()));
+    assert_eq!((atoll("11111111111111111".to_string()) * 10),
+             atoll("111111111111111110".to_string()));
+}
index f98ae188bad9f2647bfb6aeb066caac4eaa01ca2..a06c25ac055c77b6ce313f8d345c5be8156af290 100644 (file)
@@ -9,6 +9,7 @@
 // except according to those terms.
 
 // revisions: rpass1 rpass2
+// compile-flags: -Z query-dep-graph
 
 #![allow(warnings)]
 #![feature(rustc_attrs)]
diff --git a/src/test/incremental/ich_method_call_trait_scope.rs b/src/test/incremental/ich_method_call_trait_scope.rs
new file mode 100644 (file)
index 0000000..f28ecf7
--- /dev/null
@@ -0,0 +1,60 @@
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Check that the hash for a method call is sensitive to the traits in
+// scope.
+
+// revisions: rpass1 rpass2
+
+#![feature(rustc_attrs)]
+
+fn test<T>() { }
+
+trait Trait1 {
+    fn method(&self) { }
+}
+
+impl Trait1 for () { }
+
+trait Trait2 {
+    fn method(&self) { }
+}
+
+impl Trait2 for () { }
+
+#[cfg(rpass1)]
+mod mod3 {
+    use Trait1;
+
+    fn bar() {
+        ().method();
+    }
+
+    fn baz() {
+        22; // no method call, traits in scope don't matter
+    }
+}
+
+#[cfg(rpass2)]
+mod mod3 {
+    use Trait2;
+
+    #[rustc_dirty(label="Hir", cfg="rpass2")]
+    fn bar() {
+        ().method();
+    }
+
+    #[rustc_clean(label="Hir", cfg="rpass2")]
+    fn baz() {
+        22; // no method call, traits in scope don't matter
+    }
+}
+
+fn main() { }
diff --git a/src/test/incremental/ich_nested_items.rs b/src/test/incremental/ich_nested_items.rs
new file mode 100644 (file)
index 0000000..4466cfb
--- /dev/null
@@ -0,0 +1,36 @@
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Check that the hash of `foo` doesn't change just because we ordered
+// the nested items (or even added new ones).
+
+// revisions: rpass1 rpass2
+
+#![feature(rustc_attrs)]
+
+#[cfg(rpass1)]
+fn foo() {
+    fn bar() { }
+    fn baz() { }
+}
+
+#[cfg(rpass2)]
+#[rustc_clean(label="Hir", cfg="rpass2")]
+fn foo() {
+    #[rustc_clean(label="Hir", cfg="rpass2")]
+    fn baz() { } // order is different...
+
+    #[rustc_clean(label="Hir", cfg="rpass2")]
+    fn bar() { } // but that doesn't matter.
+
+    fn bap() { } // neither does adding a new item
+}
+
+fn main() { }
diff --git a/src/test/incremental/ich_resolve_results.rs b/src/test/incremental/ich_resolve_results.rs
new file mode 100644 (file)
index 0000000..680a91d
--- /dev/null
@@ -0,0 +1,74 @@
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Check that the hash for `mod3::bar` changes when we change the
+// `use` to something different.
+
+// revisions: rpass1 rpass2 rpass3
+
+#![feature(rustc_attrs)]
+
+fn test<T>() { }
+
+mod mod1 {
+    pub struct Foo(pub u32);
+}
+
+mod mod2 {
+    pub struct Foo(pub i64);
+}
+
+#[cfg(rpass1)]
+mod mod3 {
+    use test;
+    use mod1::Foo;
+
+    fn in_expr() {
+        Foo(0);
+    }
+
+    fn in_type() {
+        test::<Foo>();
+    }
+}
+
+#[cfg(rpass2)]
+mod mod3 {
+    use mod1::Foo; // <-- Nothing changed, but reordered!
+    use test;
+
+    #[rustc_clean(label="Hir", cfg="rpass2")]
+    fn in_expr() {
+        Foo(0);
+    }
+
+    #[rustc_clean(label="Hir", cfg="rpass2")]
+    fn in_type() {
+        test::<Foo>();
+    }
+}
+
+#[cfg(rpass3)]
+mod mod3 {
+    use test;
+    use mod2::Foo; // <-- This changed!
+
+    #[rustc_dirty(label="Hir", cfg="rpass3")]
+    fn in_expr() {
+        Foo(0);
+    }
+
+    #[rustc_dirty(label="Hir", cfg="rpass3")]
+    fn in_type() {
+        test::<Foo>();
+    }
+}
+
+fn main() { }
diff --git a/src/test/incremental/inlined_hir_34991/main.rs b/src/test/incremental/inlined_hir_34991/main.rs
new file mode 100644 (file)
index 0000000..a150a8c
--- /dev/null
@@ -0,0 +1,33 @@
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Regression test for #34991: an ICE occurred here because we inline
+// some of the vector routines and give them a local def-id `X`. This
+// got hashed after trans (`Hir(X)`). When we load back up, we get an
+// error because the `X` is remapped to the original def-id (in
+// libstd), and we can't hash a HIR node from std.
+
+// revisions:rpass1 rpass2
+
+#![feature(rustc_attrs)]
+
+use std::vec::Vec;
+
+pub fn foo() -> Vec<i32> {
+    vec![1, 2, 3]
+}
+
+pub fn bar() {
+    foo();
+}
+
+pub fn main() {
+    bar();
+}
diff --git a/src/test/incremental/krate_reassign_34991/auxiliary/a.rs b/src/test/incremental/krate_reassign_34991/auxiliary/a.rs
new file mode 100644 (file)
index 0000000..8651560
--- /dev/null
@@ -0,0 +1,14 @@
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![crate_type="rlib"]
+
+pub type X = u32;
+
diff --git a/src/test/incremental/krate_reassign_34991/main.rs b/src/test/incremental/krate_reassign_34991/main.rs
new file mode 100644 (file)
index 0000000..1c80705
--- /dev/null
@@ -0,0 +1,30 @@
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// aux-build:a.rs
+// revisions:rpass1 rpass2
+
+#![feature(rustc_attrs)]
+
+#[cfg(rpass1)]
+extern crate a;
+
+#[cfg(rpass1)]
+pub fn use_X() -> u32 {
+    let x: a::X = 22;
+    x as u32
+}
+
+#[cfg(rpass2)]
+pub fn use_X() -> u32 {
+    22
+}
+
+pub fn main() { }
index 55398370425a3bcecedd4b3d0e12a8bee55d73a2..21b654bdf584bcda118c948979e413395621a0e7 100644 (file)
@@ -16,7 +16,7 @@
 // aux-build:a.rs
 // revisions:rpass1 rpass2 rpass3
 // no-prefer-dynamic
-
+// compile-flags: -Z query-dep-graph
 
 #![feature(rustc_attrs)]
 
index 68af20d41915159dcc778d9e028c50e1589567e5..257699cd3fce15b53a2d61ec02271eb1584858d8 100644 (file)
@@ -35,14 +35,10 @@ fn make() -> X {
         X { x: 11, y: 11 }
     }
 
-    #[rustc_dirty(label="TypeckItemBody", cfg="rpass2")]
-    #[rustc_clean(label="ItemSignature", cfg="rpass2")]
     pub fn new() -> X {
         make()
     }
 
-    #[rustc_clean(label="TypeckItemBody", cfg="rpass2")]
-    #[rustc_clean(label="ItemSignature", cfg="rpass2")]
     pub fn sum(x: &X) -> u32 {
         x.x + x.y
     }
@@ -51,7 +47,6 @@ pub fn sum(x: &X) -> u32 {
 mod y {
     use x;
 
-    #[rustc_clean(label="TypeckItemBody", cfg="rpass2")]
     pub fn assert_sum() -> bool {
         let x = x::new();
         x::sum(&x) == 22
index 0aa728b94dda9634e818fdc6909f14a72454d52c..72072248ec05f2990c2bfe1e026e882319b32b6c 100644 (file)
@@ -9,6 +9,7 @@
 // except according to those terms.
 
 // revisions: rpass1 rpass2
+// compile-flags: -Z query-dep-graph
 
 #![allow(warnings)]
 #![feature(rustc_attrs)]
@@ -40,8 +41,10 @@ pub fn x() {
 mod y {
     use x;
 
-    #[rustc_clean(label="TypeckItemBody", cfg="rpass2")]
-    #[rustc_clean(label="TransCrateItem", cfg="rpass2")]
+    // FIXME(#35078) -- when body of `x` changes, we treat it as
+    // though signature changed.
+    #[rustc_dirty(label="TypeckItemBody", cfg="rpass2")]
+    #[rustc_dirty(label="TransCrateItem", cfg="rpass2")]
     pub fn y() {
         x::x();
     }
index cc8ef8aedd77bbcf4472c882ba6f45a9f830e525..da1b32cd73d6ebd2905d98625961c4fae4212b02 100644 (file)
@@ -12,6 +12,7 @@
 // in between revisions (hashing should be stable).
 
 // revisions:rpass1 rpass2
+// compile-flags: -Z query-dep-graph
 
 #![feature(rustc_attrs)]
 
index fe29ad66b5fd8da5b8f4ee5621ef99cbfa251b17..ba469c62002e4da2f372eee3d77129507cc9fce4 100644 (file)
@@ -12,6 +12,7 @@
 // in between revisions (hashing should be stable).
 
 // revisions:rpass1 cfail2
+// compile-flags: -Z query-dep-graph
 
 #![feature(rustc_attrs)]
 
index 1a50d515db6d07c08d568c23494b7279e2f9a19c..65f3b1b4f368f54cdf32ad748c437d4ab408999e 100644 (file)
@@ -12,6 +12,7 @@
 // in between revisions (hashing should be stable).
 
 // revisions:rpass1 rpass2
+// compile-flags: -Z query-dep-graph
 
 #![feature(rustc_attrs)]
 
index 7a4900d1d9a903c5aedcd996ae4a13bdebaf87f6..95e15d0b7f9a070f1ea618e766f278dfd678548c 100644 (file)
@@ -10,6 +10,7 @@
 
 // aux-build:a.rs
 // revisions:rpass1 rpass2
+// compile-flags: -Z query-dep-graph
 
 #![feature(rustc_attrs)]
 
index 8095e1ecd84a095cc1aca02506ed707981c23530..2bc636153f73522a823b38d144cd61eb7b4fe388 100644 (file)
@@ -12,6 +12,7 @@
 // in between revisions (hashing should be stable).
 
 // revisions:rpass1 rpass2
+// compile-flags: -Z query-dep-graph
 
 #![feature(rustc_attrs)]
 
index ae6399463b81b138ebb25adb4221b9875aaa48da..a7ed79d1a5a35036eef6e822ccd64ede592eb9b2 100644 (file)
@@ -12,6 +12,7 @@
 // in between revisions (hashing should be stable).
 
 // revisions:rpass1 rpass2
+// compile-flags: -Z query-dep-graph
 
 #![feature(rustc_attrs)]
 
index c5421fcbf5cb2310362327ad5ffe0f9e1e8b470e..09d4db331980dcaadd6961e24fd57b77b24c61e5 100644 (file)
@@ -10,6 +10,7 @@
 
 // aux-build:a.rs
 // revisions:rpass1 rpass2 rpass3
+// compile-flags: -Z query-dep-graph
 
 #![feature(rustc_attrs)]
 
index 46be2385636952ef58cff581a3ef23a08286cae3..24ff20ea89242ef7f54009eec003a83d967f2de5 100644 (file)
@@ -2,4 +2,4 @@
 
 all:
        $(RUSTC) foo.rs --crate-type staticlib
-       $(RUSTC) bar.rs 2>&1 | grep "error: found staticlib"
+       $(RUSTC) bar.rs 2>&1 | grep "found staticlib"
index 0da4af34ef03d9ae30dd28a9200174f47f21ee55..239b689b52645b8a00fa63e798a4b13c67babc86 100644 (file)
@@ -27,7 +27,7 @@ all:
        mv $(TMPDIR)/$(call RLIB_GLOB,crateA) $(A3)
        # Ensure crateC fails to compile since A1 is "missing" and A2/A3 hashes do not match
        $(RUSTC) -L $(A2) -L $(A3) crateC.rs >$(LOG) 2>&1 || true
-       grep "error: found possibly newer version of crate \`crateA\` which \`crateB\` depends on" $(LOG)
+       grep "found possibly newer version of crate \`crateA\` which \`crateB\` depends on" $(LOG)
        grep "note: perhaps that crate needs to be recompiled?" $(LOG)
        grep "note: crate \`crateA\` path #1:" $(LOG)
        grep "note: crate \`crateA\` path #2:" $(LOG)
index 275658047d3cec690de04572d4da06eb3b09ef8c..4275c9b3f9fa341e30f06bdf76f9debda6a71eaa 100644 (file)
@@ -6,4 +6,4 @@ all:
        $(call REMOVE_RLIBS,crateA)
        # Ensure crateC fails to compile since dependency crateA is missing
        $(RUSTC) crateC.rs 2>&1 | \
-               grep "error: can't find crate for \`crateA\` which \`crateB\` depends on"
+               grep "can't find crate for \`crateA\` which \`crateB\` depends on"
diff --git a/src/test/run-make/unicode-input/Makefile b/src/test/run-make/unicode-input/Makefile
deleted file mode 100644 (file)
index f834a85..0000000
+++ /dev/null
@@ -1,26 +0,0 @@
--include ../tools.mk
-
-# This test attempts to run rustc itself from the compiled binary; but
-# that means that you need to set the LD_LIBRARY_PATH for rustc itself
-# while running multiple_files, and that won't work for stage1.
-
-# FIXME ignore windows
-ifndef IS_WINDOWS
-ifeq ($(RUST_BUILD_STAGE),1)
-DOTEST=
-else
-DOTEST=dotest
-endif
-endif
-
-all: $(DOTEST)
-
-dotest:
-       # check that we don't ICE on unicode input, issue #11178
-       $(RUSTC) multiple_files.rs
-       $(call RUN,multiple_files)  "$(RUSTC)" "$(TMPDIR)"
-
-       # check that our multibyte-ident spans are (approximately) the
-       # correct length. issue #8706
-       $(RUSTC) span_length.rs
-       $(call RUN,span_length) "$(RUSTC)" "$(TMPDIR)"
diff --git a/src/test/run-make/unicode-input/multiple_files.rs b/src/test/run-make/unicode-input/multiple_files.rs
deleted file mode 100644 (file)
index b1fe938..0000000
+++ /dev/null
@@ -1,74 +0,0 @@
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-#![feature(rand)]
-
-use std::fs::File;
-use std::io::prelude::*;
-use std::path::Path;
-use std::process::Command;
-use std::__rand::{thread_rng, Rng};
-use std::{char, env};
-
-// creates unicode_input_multiple_files_{main,chars}.rs, where the
-// former imports the latter. `_chars` just contains an identifier
-// made up of random characters, because will emit an error message
-// about the ident being in the wrong place, with a span (and creating
-// this span used to upset the compiler).
-
-fn random_char() -> char {
-    let mut rng = thread_rng();
-    // a subset of the XID_start Unicode table (ensuring that the
-    // compiler doesn't fail with an "unrecognised token" error)
-    let (lo, hi): (u32, u32) = match rng.gen_range(1u32, 4u32 + 1) {
-        1 => (0x41, 0x5a),
-        2 => (0xf8, 0x1ba),
-        3 => (0x1401, 0x166c),
-        _ => (0x10400, 0x1044f)
-    };
-
-    char::from_u32(rng.gen_range(lo, hi + 1)).unwrap()
-}
-
-fn main() {
-    let args: Vec<String> = env::args().collect();
-    let rustc = &args[1];
-    let tmpdir = Path::new(&args[2]);
-
-    let main_file = tmpdir.join("unicode_input_multiple_files_main.rs");
-    {
-        let _ = File::create(&main_file).unwrap()
-            .write_all(b"mod unicode_input_multiple_files_chars;").unwrap();
-    }
-
-    for _ in 0..100 {
-        {
-            let randoms = tmpdir.join("unicode_input_multiple_files_chars.rs");
-            let mut w = File::create(&randoms).unwrap();
-            for _ in 0..30 {
-                write!(&mut w, "{}", random_char()).unwrap();
-            }
-        }
-
-        // rustc is passed to us with --out-dir and -L etc., so we
-        // can't exec it directly
-        let result = Command::new("sh")
-                             .arg("-c")
-                             .arg(&format!("{} {}",
-                                           rustc,
-                                           main_file.display()))
-                             .output().unwrap();
-        let err = String::from_utf8_lossy(&result.stderr);
-
-        // positive test so that this test will be updated when the
-        // compiler changes.
-        assert!(err.contains("expected item, found"))
-    }
-}
diff --git a/src/test/run-make/unicode-input/span_length.rs b/src/test/run-make/unicode-input/span_length.rs
deleted file mode 100644 (file)
index da8769e..0000000
+++ /dev/null
@@ -1,130 +0,0 @@
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-#![feature(rand)]
-
-use std::fs::File;
-use std::io::prelude::*;
-use std::iter::repeat;
-use std::path::Path;
-use std::process::Command;
-use std::__rand::{thread_rng, Rng};
-use std::{char, env};
-
-pub fn check_old_skool() -> bool {
-    use std::env;
-    env::var("RUST_NEW_ERROR_FORMAT").is_err()
-}
-
-// creates a file with `fn main() { <random ident> }` and checks the
-// compiler emits a span of the appropriate length (for the
-// "unresolved name" message); currently just using the number of code
-// points, but should be the number of graphemes (FIXME #7043)
-
-fn random_char() -> char {
-    let mut rng = thread_rng();
-    // a subset of the XID_start Unicode table (ensuring that the
-    // compiler doesn't fail with an "unrecognised token" error)
-    let (lo, hi): (u32, u32) = match rng.gen_range(1u32, 4u32 + 1) {
-        1 => (0x41, 0x5a),
-        2 => (0xf8, 0x1ba),
-        3 => (0x1401, 0x166c),
-        _ => (0x10400, 0x1044f)
-    };
-
-    char::from_u32(rng.gen_range(lo, hi + 1)).unwrap()
-}
-
-fn main() {
-    let args: Vec<String> = env::args().collect();
-    let rustc = &args[1];
-    let tmpdir = Path::new(&args[2]);
-    let main_file = tmpdir.join("span_main.rs");
-
-    for _ in 0..100 {
-        let n = thread_rng().gen_range(3, 20);
-
-        {
-            let _ = write!(&mut File::create(&main_file).unwrap(),
-                           "#![feature(non_ascii_idents)] fn main() {{ {} }}",
-                           // random string of length n
-                           (0..n).map(|_| random_char()).collect::<String>());
-        }
-
-        // rustc is passed to us with --out-dir and -L etc., so we
-        // can't exec it directly
-        let result = Command::new("sh")
-                             .arg("-c")
-                             .arg(&format!("{} {}",
-                                           rustc,
-                                           main_file.to_str()
-                                                    .unwrap()))
-                             .output().unwrap();
-
-        let err = String::from_utf8_lossy(&result.stderr);
-
-        if check_old_skool() {
-            // the span should end the line (e.g no extra ~'s)
-            let expected_span = format!("^{}\n", repeat("~").take(n - 1)
-                                                            .collect::<String>());
-            assert!(err.contains(&expected_span));
-        } else {
-            // the span should end the line (e.g no extra ~'s)
-            let expected_span = format!("^{}\n", repeat("^").take(n - 1)
-                                                            .collect::<String>());
-            assert!(err.contains(&expected_span));
-        }
-    }
-
-    // Test multi-column characters and tabs
-    {
-        let _ = write!(&mut File::create(&main_file).unwrap(),
-                       r#"extern "路濫狼á́́" fn foo() {{}} extern "路濫狼á́" fn bar() {{}}"#);
-    }
-
-    let result = Command::new("sh")
-                         .arg("-c")
-                         .arg(format!("{} {}",
-                                      rustc,
-                                      main_file.display()))
-                         .output().unwrap();
-
-    let err = String::from_utf8_lossy(&result.stderr);
-
-    // Test both the length of the snake and the leading spaces up to it
-
-    if check_old_skool() {
-        // Extra characters. Every line is preceded by `filename:lineno <actual code>`
-        let offset = main_file.to_str().unwrap().len() + 3;
-
-        // First snake is 8 ~s long, with 7 preceding spaces (excluding file name/line offset)
-        let expected_span = format!("\n{}^{}\n",
-                                    repeat(" ").take(offset + 7).collect::<String>(),
-                                    repeat("~").take(8).collect::<String>());
-        assert!(err.contains(&expected_span));
-        // Second snake is only 7 ~s long, with 36 preceding spaces,
-        // because rustc counts chars() now rather than width(). This
-        // is because width() functions are to be removed from
-        // librustc_unicode
-        let expected_span = format!("\n{}^{}\n",
-                                    repeat(" ").take(offset + 36).collect::<String>(),
-                                    repeat("~").take(7).collect::<String>());
-        assert!(err.contains(&expected_span));
-    } else {
-        let expected_span = format!("\n  |>{}{}\n",
-                                    repeat(" ").take(8).collect::<String>(),
-                                    repeat("^").take(9).collect::<String>());
-        assert!(err.contains(&expected_span));
-        let expected_span = format!("\n  |>{}{}\n",
-                                    repeat(" ").take(37).collect::<String>(),
-                                    repeat("^").take(8).collect::<String>());
-        assert!(err.contains(&expected_span));
-    }
-}
diff --git a/src/test/run-pass/issue-29053.rs b/src/test/run-pass/issue-29053.rs
new file mode 100644 (file)
index 0000000..7265507
--- /dev/null
@@ -0,0 +1,21 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn main() {
+    let x: &'static str = "x";
+
+    {
+        let y = "y".to_string();
+        let ref mut x = &*x;
+        *x = &*y;
+    }
+
+    assert_eq!(x, "x");
+}
diff --git a/src/test/run-pass/issue-33498.rs b/src/test/run-pass/issue-33498.rs
new file mode 100644 (file)
index 0000000..9b4e191
--- /dev/null
@@ -0,0 +1,19 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+pub fn main() {
+    let x = (0, 2);
+
+    match x {
+        (0, ref y) => {}
+        (y, 0) => {}
+        _ => (),
+    }
+}
diff --git a/src/test/run-pass/slice_binary_search.rs b/src/test/run-pass/slice_binary_search.rs
new file mode 100644 (file)
index 0000000..80b370d
--- /dev/null
@@ -0,0 +1,29 @@
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Test binary_search_by_key lifetime. Issue #34683
+
+#[derive(Debug)]
+struct Assignment {
+    topic: String,
+    partition: i32,
+}
+
+fn main() {
+    let xs = vec![
+        Assignment { topic: "abc".into(), partition: 1 },
+        Assignment { topic: "def".into(), partition: 2 },
+        Assignment { topic: "ghi".into(), partition: 3 },
+    ];
+
+    let key: &str = "def";
+    let r = xs.binary_search_by_key(&key, |e| &e.topic);
+    assert_eq!(Ok(1), r.map(|i| i));
+}
index 72df09b7669f686ed4303f3fab6f0d93bc564fa9..acf12dab16aff4d86b8a940cf84f7af88464cc2d 100644 (file)
@@ -24,7 +24,7 @@ struct Test {
 const TEST_REPOS: &'static [Test] = &[Test {
                                           name: "cargo",
                                           repo: "https://github.com/rust-lang/cargo",
-                                          sha: "fd90fd642d404d8c66505ca8db742c664ea352f2",
+                                          sha: "2d85908217f99a30aa5f68e05a8980704bb71fad",
                                           lock: None,
                                       },
                                       Test {
index 2a35fab9676a71b9f45bea98a44a6409553df18a..6090cb4f52725784789fb5904c7ffc850e334ef1 100644 (file)
@@ -36,22 +36,22 @@ impl FromStr for Mode {
     type Err = ();
     fn from_str(s: &str) -> Result<Mode, ()> {
         match s {
-          "compile-fail" => Ok(CompileFail),
-          "parse-fail" => Ok(ParseFail),
-          "run-fail" => Ok(RunFail),
-          "run-pass" => Ok(RunPass),
-          "run-pass-valgrind" => Ok(RunPassValgrind),
-          "pretty" => Ok(Pretty),
-          "debuginfo-lldb" => Ok(DebugInfoLldb),
-          "debuginfo-gdb" => Ok(DebugInfoGdb),
-          "codegen" => Ok(Codegen),
-          "rustdoc" => Ok(Rustdoc),
-          "codegen-units" => Ok(CodegenUnits),
-          "incremental" => Ok(Incremental),
-          "run-make" => Ok(RunMake),
-          "ui" => Ok(Ui),
-          "mir-opt" => Ok(MirOpt),
-          _ => Err(()),
+            "compile-fail" => Ok(CompileFail),
+            "parse-fail" => Ok(ParseFail),
+            "run-fail" => Ok(RunFail),
+            "run-pass" => Ok(RunPass),
+            "run-pass-valgrind" => Ok(RunPassValgrind),
+            "pretty" => Ok(Pretty),
+            "debuginfo-lldb" => Ok(DebugInfoLldb),
+            "debuginfo-gdb" => Ok(DebugInfoGdb),
+            "codegen" => Ok(Codegen),
+            "rustdoc" => Ok(Rustdoc),
+            "codegen-units" => Ok(CodegenUnits),
+            "incremental" => Ok(Incremental),
+            "run-make" => Ok(RunMake),
+            "ui" => Ok(Ui),
+            "mir-opt" => Ok(MirOpt),
+            _ => Err(()),
         }
     }
 }
@@ -59,22 +59,23 @@ fn from_str(s: &str) -> Result<Mode, ()> {
 impl fmt::Display for Mode {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
         fmt::Display::fmt(match *self {
-            CompileFail => "compile-fail",
-            ParseFail => "parse-fail",
-            RunFail => "run-fail",
-            RunPass => "run-pass",
-            RunPassValgrind => "run-pass-valgrind",
-            Pretty => "pretty",
-            DebugInfoGdb => "debuginfo-gdb",
-            DebugInfoLldb => "debuginfo-lldb",
-            Codegen => "codegen",
-            Rustdoc => "rustdoc",
-            CodegenUnits => "codegen-units",
-            Incremental => "incremental",
-            RunMake => "run-make",
-            Ui => "ui",
-            MirOpt => "mir-opt",
-        }, f)
+                              CompileFail => "compile-fail",
+                              ParseFail => "parse-fail",
+                              RunFail => "run-fail",
+                              RunPass => "run-pass",
+                              RunPassValgrind => "run-pass-valgrind",
+                              Pretty => "pretty",
+                              DebugInfoGdb => "debuginfo-gdb",
+                              DebugInfoLldb => "debuginfo-lldb",
+                              Codegen => "codegen",
+                              Rustdoc => "rustdoc",
+                              CodegenUnits => "codegen-units",
+                              Incremental => "incremental",
+                              RunMake => "run-make",
+                              Ui => "ui",
+                              MirOpt => "mir-opt",
+                          },
+                          f)
     }
 }
 
index c3da891933f6df1bb7fcf054063a6afa036d1032..29ca54fda8db9521eeb68b74686094bf84770a1b 100644 (file)
@@ -64,7 +64,11 @@ pub struct Error {
 }
 
 #[derive(PartialEq, Debug)]
-enum WhichLine { ThisLine, FollowPrevious(usize), AdjustBackward(usize) }
+enum WhichLine {
+    ThisLine,
+    FollowPrevious(usize),
+    AdjustBackward(usize),
+}
 
 /// Looks for either "//~| KIND MESSAGE" or "//~^^... KIND MESSAGE"
 /// The former is a "follow" that inherits its target from the preceding line;
@@ -91,25 +95,22 @@ pub fn load_errors(testfile: &Path, cfg: Option<&str>) -> Vec<Error> {
 
     let tag = match cfg {
         Some(rev) => format!("//[{}]~", rev),
-        None => format!("//~")
+        None => format!("//~"),
     };
 
     rdr.lines()
-       .enumerate()
-       .filter_map(|(line_num, line)| {
-           parse_expected(last_nonfollow_error,
-                          line_num + 1,
-                          &line.unwrap(),
-                          &tag)
-               .map(|(which, error)| {
-                   match which {
-                       FollowPrevious(_) => {}
-                       _ => last_nonfollow_error = Some(error.line_num),
-                   }
-                   error
-               })
-       })
-       .collect()
+        .enumerate()
+        .filter_map(|(line_num, line)| {
+            parse_expected(last_nonfollow_error, line_num + 1, &line.unwrap(), &tag)
+                .map(|(which, error)| {
+                    match which {
+                        FollowPrevious(_) => {}
+                        _ => last_nonfollow_error = Some(error.line_num),
+                    }
+                    error
+                })
+        })
+        .collect()
 }
 
 fn parse_expected(last_nonfollow_error: Option<usize>,
@@ -117,7 +118,10 @@ fn parse_expected(last_nonfollow_error: Option<usize>,
                   line: &str,
                   tag: &str)
                   -> Option<(WhichLine, Error)> {
-    let start = match line.find(tag) { Some(i) => i, None => return None };
+    let start = match line.find(tag) {
+        Some(i) => i,
+        None => return None,
+    };
     let (follow, adjusts) = if line[start + tag.len()..].chars().next().unwrap() == '|' {
         (true, 0)
     } else {
@@ -125,26 +129,25 @@ fn parse_expected(last_nonfollow_error: Option<usize>,
     };
     let kind_start = start + tag.len() + adjusts + (follow as usize);
     let (kind, msg);
-    match
-        line[kind_start..].split_whitespace()
-                          .next()
-                          .expect("Encountered unexpected empty comment")
-                          .parse::<ErrorKind>()
-    {
+    match line[kind_start..]
+        .split_whitespace()
+        .next()
+        .expect("Encountered unexpected empty comment")
+        .parse::<ErrorKind>() {
         Ok(k) => {
             // If we find `//~ ERROR foo` or something like that:
             kind = Some(k);
             let letters = line[kind_start..].chars();
             msg = letters.skip_while(|c| c.is_whitespace())
-                         .skip_while(|c| !c.is_whitespace())
-                         .collect::<String>();
+                .skip_while(|c| !c.is_whitespace())
+                .collect::<String>();
         }
         Err(_) => {
             // Otherwise we found `//~ foo`:
             kind = None;
             let letters = line[kind_start..].chars();
             msg = letters.skip_while(|c| c.is_whitespace())
-                         .collect::<String>();
+                .collect::<String>();
         }
     }
     let msg = msg.trim().to_owned();
@@ -155,15 +158,25 @@ fn parse_expected(last_nonfollow_error: Option<usize>,
                                                     preceding //~^ line.");
         (FollowPrevious(line_num), line_num)
     } else {
-        let which =
-            if adjusts > 0 { AdjustBackward(adjusts) } else { ThisLine };
+        let which = if adjusts > 0 {
+            AdjustBackward(adjusts)
+        } else {
+            ThisLine
+        };
         let line_num = line_num - adjusts;
         (which, line_num)
     };
 
     debug!("line={} tag={:?} which={:?} kind={:?} msg={:?}",
-           line_num, tag, which, kind, msg);
-    Some((which, Error { line_num: line_num,
-                         kind: kind,
-                         msg: msg, }))
+           line_num,
+           tag,
+           which,
+           kind,
+           msg);
+    Some((which,
+          Error {
+        line_num: line_num,
+        kind: kind,
+        msg: msg,
+    }))
 }
index 7593033ffe3991f9c9791aedae294056a9f335bf..af33d76be1b0db0f430b4dcc5a4ee774a2c0d08d 100644 (file)
@@ -32,24 +32,21 @@ pub fn from_file(config: &Config, testfile: &Path) -> Self {
             should_fail: false,
         };
 
-        iter_header(testfile, None, &mut |ln| {
+        iter_header(testfile,
+                    None,
+                    &mut |ln| {
             props.ignore =
-                props.ignore ||
-                parse_name_directive(ln, "ignore-test") ||
+                props.ignore || parse_name_directive(ln, "ignore-test") ||
                 parse_name_directive(ln, &ignore_target(config)) ||
                 parse_name_directive(ln, &ignore_architecture(config)) ||
                 parse_name_directive(ln, &ignore_stage(config)) ||
                 parse_name_directive(ln, &ignore_env(config)) ||
-                (config.mode == common::Pretty &&
-                 parse_name_directive(ln, "ignore-pretty")) ||
+                (config.mode == common::Pretty && parse_name_directive(ln, "ignore-pretty")) ||
                 (config.target != config.host &&
                  parse_name_directive(ln, "ignore-cross-compile")) ||
-                ignore_gdb(config, ln) ||
-                ignore_lldb(config, ln);
+                ignore_gdb(config, ln) || ignore_lldb(config, ln);
 
-            props.should_fail =
-                props.should_fail ||
-                parse_name_directive(ln, "should-fail");
+            props.should_fail = props.should_fail || parse_name_directive(ln, "should-fail");
         });
 
         return props;
@@ -61,11 +58,11 @@ fn ignore_architecture(config: &Config) -> String {
             format!("ignore-{}", util::get_arch(&config.target))
         }
         fn ignore_stage(config: &Config) -> String {
-            format!("ignore-{}",
-                    config.stage_id.split('-').next().unwrap())
+            format!("ignore-{}", config.stage_id.split('-').next().unwrap())
         }
         fn ignore_env(config: &Config) -> String {
-            format!("ignore-{}", util::get_env(&config.target).unwrap_or("<unknown>"))
+            format!("ignore-{}",
+                    util::get_env(&config.target).unwrap_or("<unknown>"))
         }
         fn ignore_gdb(config: &Config, line: &str) -> bool {
             if config.mode != common::DebugInfoGdb {
@@ -79,13 +76,12 @@ fn ignore_gdb(config: &Config, line: &str) -> bool {
             if let Some(ref actual_version) = config.gdb_version {
                 if line.contains("min-gdb-version") {
                     let min_version = line.trim()
-                                          .split(' ')
-                                          .last()
-                                          .expect("Malformed GDB version directive");
+                        .split(' ')
+                        .last()
+                        .expect("Malformed GDB version directive");
                     // Ignore if actual version is smaller the minimum required
                     // version
-                    gdb_version_to_int(actual_version) <
-                        gdb_version_to_int(min_version)
+                    gdb_version_to_int(actual_version) < gdb_version_to_int(min_version)
                 } else {
                     false
                 }
@@ -106,13 +102,12 @@ fn ignore_lldb(config: &Config, line: &str) -> bool {
             if let Some(ref actual_version) = config.lldb_version {
                 if line.contains("min-lldb-version") {
                     let min_version = line.trim()
-                                          .split(' ')
-                                          .last()
-                                          .expect("Malformed lldb version directive");
+                        .split(' ')
+                        .last()
+                        .expect("Malformed lldb version directive");
                     // Ignore if actual version is smaller the minimum required
                     // version
-                    lldb_version_to_int(actual_version) <
-                        lldb_version_to_int(min_version)
+                    lldb_version_to_int(actual_version) < lldb_version_to_int(min_version)
                 } else {
                     false
                 }
@@ -126,7 +121,7 @@ fn ignore_lldb(config: &Config, line: &str) -> bool {
 #[derive(Clone, Debug)]
 pub struct TestProps {
     // Lines that should be expected, in order, on standard out
-    pub error_patterns: Vec<String> ,
+    pub error_patterns: Vec<String>,
     // Extra flags to pass to the compiler
     pub compile_flags: Vec<String>,
     // Extra flags to pass when the compiled code is run (such as --bench)
@@ -137,13 +132,13 @@ pub struct TestProps {
     // Other crates that should be compiled (typically from the same
     // directory as the test, but for backwards compatibility reasons
     // we also check the auxiliary directory)
-    pub aux_builds: Vec<String> ,
+    pub aux_builds: Vec<String>,
     // Environment settings to use for compiling
-    pub rustc_env: Vec<(String,String)> ,
+    pub rustc_env: Vec<(String, String)>,
     // Environment settings to use during execution
-    pub exec_env: Vec<(String,String)> ,
+    pub exec_env: Vec<(String, String)>,
     // Lines to check if they appear in the expected debugger output
-    pub check_lines: Vec<String> ,
+    pub check_lines: Vec<String>,
     // Build documentation for all specified aux-builds as well
     pub build_aux_docs: bool,
     // Flag to force a crate to be built with the host architecture
@@ -226,17 +221,17 @@ pub fn from_file(testfile: &Path) -> Self {
     /// tied to a particular revision `foo` (indicated by writing
     /// `//[foo]`), then the property is ignored unless `cfg` is
     /// `Some("foo")`.
-    pub fn load_from(&mut self, testfile: &Path, cfg: Option<&str>)  {
-        iter_header(testfile, cfg, &mut |ln| {
+    pub fn load_from(&mut self, testfile: &Path, cfg: Option<&str>) {
+        iter_header(testfile,
+                    cfg,
+                    &mut |ln| {
             if let Some(ep) = parse_error_pattern(ln) {
                 self.error_patterns.push(ep);
             }
 
             if let Some(flags) = parse_compile_flags(ln) {
-                self.compile_flags.extend(
-                    flags
-                        .split_whitespace()
-                        .map(|s| s.to_owned()));
+                self.compile_flags.extend(flags.split_whitespace()
+                    .map(|s| s.to_owned()));
             }
 
             if let Some(r) = parse_revisions(ln) {
@@ -279,7 +274,7 @@ pub fn load_from(&mut self, testfile: &Path, cfg: Option<&str>)  {
                 self.pretty_compare_only = parse_pretty_compare_only(ln);
             }
 
-            if let  Some(ab) = parse_aux_build(ln) {
+            if let Some(ab) = parse_aux_build(ln) {
                 self.aux_builds.push(ab);
             }
 
@@ -291,7 +286,7 @@ pub fn load_from(&mut self, testfile: &Path, cfg: Option<&str>)  {
                 self.rustc_env.push(ee);
             }
 
-            if let Some(cl) =  parse_check_line(ln) {
+            if let Some(cl) = parse_check_line(ln) {
                 self.check_lines.push(cl);
             }
 
@@ -302,21 +297,20 @@ pub fn load_from(&mut self, testfile: &Path, cfg: Option<&str>)  {
 
         for key in vec!["RUST_TEST_NOCAPTURE", "RUST_TEST_THREADS"] {
             match env::var(key) {
-                Ok(val) =>
+                Ok(val) => {
                     if self.exec_env.iter().find(|&&(ref x, _)| *x == key).is_none() {
                         self.exec_env.push((key.to_owned(), val))
-                    },
+                    }
+                }
                 Err(..) => {}
             }
         }
     }
 }
 
-fn iter_header(testfile: &Path,
-               cfg: Option<&str>,
-               it: &mut FnMut(&str)) {
+fn iter_header(testfile: &Path, cfg: Option<&str>, it: &mut FnMut(&str)) {
     if testfile.is_dir() {
-        return
+        return;
     }
     let rdr = BufReader::new(File::open(testfile).unwrap());
     for ln in rdr.lines() {
@@ -336,7 +330,7 @@ fn iter_header(testfile: &Path,
                     None => false,
                 };
                 if matches {
-                    it(&ln[close_brace+1..]);
+                    it(&ln[close_brace + 1..]);
                 }
             } else {
                 panic!("malformed condition directive: expected `//[foo]`, found `{}`",
@@ -409,18 +403,17 @@ fn parse_pretty_compare_only(line: &str) -> bool {
 fn parse_env(line: &str, name: &str) -> Option<(String, String)> {
     parse_name_value_directive(line, name).map(|nv| {
         // nv is either FOO or FOO=BAR
-        let mut strs: Vec<String> = nv
-                                      .splitn(2, '=')
-                                      .map(str::to_owned)
-                                      .collect();
+        let mut strs: Vec<String> = nv.splitn(2, '=')
+            .map(str::to_owned)
+            .collect();
 
         match strs.len() {
-          1 => (strs.pop().unwrap(), "".to_owned()),
-          2 => {
-              let end = strs.pop().unwrap();
-              (strs.pop().unwrap(), end)
-          }
-          n => panic!("Expected 1 or 2 strings, not {}", n)
+            1 => (strs.pop().unwrap(), "".to_owned()),
+            2 => {
+                let end = strs.pop().unwrap();
+                (strs.pop().unwrap(), end)
+            }
+            n => panic!("Expected 1 or 2 strings, not {}", n),
         }
     })
 }
@@ -442,11 +435,10 @@ fn parse_name_directive(line: &str, directive: &str) -> bool {
     line.contains(directive) && !line.contains(&("no-".to_owned() + directive))
 }
 
-pub fn parse_name_value_directive(line: &str, directive: &str)
-                                  -> Option<String> {
+pub fn parse_name_value_directive(line: &str, directive: &str) -> Option<String> {
     let keycolon = format!("{}:", directive);
     if let Some(colon) = line.find(&keycolon) {
-        let value = line[(colon + keycolon.len()) .. line.len()].to_owned();
+        let value = line[(colon + keycolon.len())..line.len()].to_owned();
         debug!("{}: {}", directive, value);
         Some(value)
     } else {
@@ -455,9 +447,8 @@ pub fn parse_name_value_directive(line: &str, directive: &str)
 }
 
 pub fn gdb_version_to_int(version_string: &str) -> isize {
-    let error_string = format!(
-        "Encountered GDB version string with unexpected format: {}",
-        version_string);
+    let error_string = format!("Encountered GDB version string with unexpected format: {}",
+                               version_string);
     let error_string = error_string;
 
     let components: Vec<&str> = version_string.trim().split('.').collect();
@@ -473,9 +464,8 @@ pub fn gdb_version_to_int(version_string: &str) -> isize {
 }
 
 pub fn lldb_version_to_int(version_string: &str) -> isize {
-    let error_string = format!(
-        "Encountered LLDB version string with unexpected format: {}",
-        version_string);
+    let error_string = format!("Encountered LLDB version string with unexpected format: {}",
+                               version_string);
     let error_string = error_string;
     let major: isize = version_string.parse().ok().expect(&error_string);
     return major;
index e5b628bb0029545022dab31220dc8e89ef668470..d9da1bdc3485837d8024036f23ff5be81f5e7c71 100644 (file)
@@ -12,7 +12,7 @@
 use rustc_serialize::json;
 use std::str::FromStr;
 use std::path::Path;
-use runtest::{ProcRes};
+use runtest::ProcRes;
 
 // These structs are a subset of the ones found in
 // `syntax::json`.
@@ -58,8 +58,8 @@ struct DiagnosticCode {
 
 pub fn parse_output(file_name: &str, output: &str, proc_res: &ProcRes) -> Vec<Error> {
     output.lines()
-          .flat_map(|line| parse_line(file_name, line, output, proc_res))
-          .collect()
+        .flat_map(|line| parse_line(file_name, line, output, proc_res))
+        .collect()
 }
 
 fn parse_line(file_name: &str, line: &str, output: &str, proc_res: &ProcRes) -> Vec<Error> {
@@ -73,9 +73,11 @@ fn parse_line(file_name: &str, line: &str, output: &str, proc_res: &ProcRes) ->
                 expected_errors
             }
             Err(error) => {
-                proc_res.fatal(Some(&format!(
-                    "failed to decode compiler output as json: `{}`\noutput: {}\nline: {}",
-                    error, line, output)));
+                proc_res.fatal(Some(&format!("failed to decode compiler output as json: \
+                                              `{}`\noutput: {}\nline: {}",
+                                             error,
+                                             line,
+                                             output)));
             }
         }
     } else {
@@ -87,16 +89,15 @@ fn push_expected_errors(expected_errors: &mut Vec<Error>,
                         diagnostic: &Diagnostic,
                         default_spans: &[&DiagnosticSpan],
                         file_name: &str) {
-    let spans_in_this_file: Vec<_> =
-        diagnostic.spans.iter()
-                        .filter(|span| Path::new(&span.file_name) == Path::new(&file_name))
-                        .collect();
-
-    let primary_spans: Vec<_> =
-        spans_in_this_file.iter()
-                          .cloned()
-                          .filter(|span| span.is_primary)
-                          .collect();
+    let spans_in_this_file: Vec<_> = diagnostic.spans
+        .iter()
+        .filter(|span| Path::new(&span.file_name) == Path::new(&file_name))
+        .collect();
+
+    let primary_spans: Vec<_> = spans_in_this_file.iter()
+        .cloned()
+        .filter(|span| span.is_primary)
+        .collect();
     let primary_spans = if primary_spans.is_empty() {
         // subdiagnostics often don't have a span of their own;
         // inherit the span from the parent in that case
@@ -144,24 +145,20 @@ fn push_expected_errors(expected_errors: &mut Vec<Error>,
         for span in primary_spans {
             let msg = with_code(span, first_line);
             let kind = ErrorKind::from_str(&diagnostic.level).ok();
-            expected_errors.push(
-                Error {
-                    line_num: span.line_start,
-                    kind: kind,
-                    msg: msg,
-                }
-            );
+            expected_errors.push(Error {
+                line_num: span.line_start,
+                kind: kind,
+                msg: msg,
+            });
         }
     }
     for next_line in message_lines {
         for span in primary_spans {
-            expected_errors.push(
-                Error {
-                    line_num: span.line_start,
-                    kind: None,
-                    msg: with_code(span, next_line),
-                }
-            );
+            expected_errors.push(Error {
+                line_num: span.line_start,
+                kind: None,
+                msg: with_code(span, next_line),
+            });
         }
     }
 
@@ -170,33 +167,28 @@ fn push_expected_errors(expected_errors: &mut Vec<Error>,
         let start_line = primary_spans.iter().map(|s| s.line_start).min().expect("\
             every suggestion should have at least one span");
         for (index, line) in rendered.lines().enumerate() {
-            expected_errors.push(
-                Error {
-                    line_num: start_line + index,
-                    kind: Some(ErrorKind::Suggestion),
-                    msg: line.to_string()
-                }
-            );
+            expected_errors.push(Error {
+                line_num: start_line + index,
+                kind: Some(ErrorKind::Suggestion),
+                msg: line.to_string(),
+            });
         }
     }
 
     // Add notes for the backtrace
     for span in primary_spans {
         for frame in &span.expansion {
-            push_backtrace(expected_errors,
-                           frame,
-                           file_name);
+            push_backtrace(expected_errors, frame, file_name);
         }
     }
 
     // Add notes for any labels that appear in the message.
     for span in spans_in_this_file.iter()
-                                  .filter(|span| span.label.is_some())
-    {
+        .filter(|span| span.label.is_some()) {
         expected_errors.push(Error {
             line_num: span.line_start,
             kind: Some(ErrorKind::Note),
-            msg: span.label.clone().unwrap()
+            msg: span.label.clone().unwrap(),
         });
     }
 
@@ -210,13 +202,11 @@ fn push_backtrace(expected_errors: &mut Vec<Error>,
                   expansion: &DiagnosticSpanMacroExpansion,
                   file_name: &str) {
     if Path::new(&expansion.span.file_name) == Path::new(&file_name) {
-        expected_errors.push(
-            Error {
-                line_num: expansion.span.line_start,
-                kind: Some(ErrorKind::Note),
-                msg: format!("in this expansion of {}", expansion.macro_decl_name),
-            }
-        );
+        expected_errors.push(Error {
+            line_num: expansion.span.line_start,
+            kind: Some(ErrorKind::Note),
+            msg: format!("in this expansion of {}", expansion.macro_decl_name),
+        });
     }
 
     for previous_expansion in &expansion.span.expansion {
index 53b7cd059be275f8ccd64db3811a422ec8dd5ab6..ed690c08a1ed27e08dc0c6e3ae334d933012fe0b 100644 (file)
@@ -12,7 +12,7 @@
 use std::ffi::OsString;
 use std::io::prelude::*;
 use std::path::PathBuf;
-use std::process::{ExitStatus, Command, Child, Output, Stdio};
+use std::process::{Child, Command, ExitStatus, Output, Stdio};
 
 pub fn dylib_env_var() -> &'static str {
     if cfg!(windows) {
@@ -29,7 +29,7 @@ fn add_target_env(cmd: &mut Command, lib_path: &str, aux_path: Option<&str>) {
     // search path for the child.
     let var = dylib_env_var();
     let mut path = env::split_paths(&env::var_os(var).unwrap_or(OsString::new()))
-                       .collect::<Vec<_>>();
+        .collect::<Vec<_>>();
     if let Some(p) = aux_path {
         path.insert(0, PathBuf::from(p))
     }
@@ -40,20 +40,25 @@ fn add_target_env(cmd: &mut Command, lib_path: &str, aux_path: Option<&str>) {
     cmd.env(var, newpath);
 }
 
-pub struct Result {pub status: ExitStatus, pub out: String, pub err: String}
+pub struct Result {
+    pub status: ExitStatus,
+    pub out: String,
+    pub err: String,
+}
 
 pub fn run(lib_path: &str,
            prog: &str,
            aux_path: Option<&str>,
            args: &[String],
-           env: Vec<(String, String)> ,
-           input: Option<String>) -> Option<Result> {
+           env: Vec<(String, String)>,
+           input: Option<String>)
+           -> Option<Result> {
 
     let mut cmd = Command::new(prog);
     cmd.args(args)
-       .stdin(Stdio::piped())
-       .stdout(Stdio::piped())
-       .stderr(Stdio::piped());
+        .stdin(Stdio::piped())
+        .stdout(Stdio::piped())
+        .stderr(Stdio::piped());
     add_target_env(&mut cmd, lib_path, aux_path);
     for (key, val) in env {
         cmd.env(&key, &val);
@@ -64,31 +69,31 @@ pub fn run(lib_path: &str,
             if let Some(input) = input {
                 process.stdin.as_mut().unwrap().write_all(input.as_bytes()).unwrap();
             }
-            let Output { status, stdout, stderr } =
-                process.wait_with_output().unwrap();
+            let Output { status, stdout, stderr } = process.wait_with_output().unwrap();
 
             Some(Result {
                 status: status,
                 out: String::from_utf8(stdout).unwrap(),
-                err: String::from_utf8(stderr).unwrap()
+                err: String::from_utf8(stderr).unwrap(),
             })
-        },
-        Err(..) => None
+        }
+        Err(..) => None,
     }
 }
 
 pub fn run_background(lib_path: &str,
-           prog: &str,
-           aux_path: Option<&str>,
-           args: &[String],
-           env: Vec<(String, String)> ,
-           input: Option<String>) -> Option<Child> {
+                      prog: &str,
+                      aux_path: Option<&str>,
+                      args: &[String],
+                      env: Vec<(String, String)>,
+                      input: Option<String>)
+                      -> Option<Child> {
 
     let mut cmd = Command::new(prog);
     cmd.args(args)
-       .stdin(Stdio::piped())
-       .stdout(Stdio::piped())
-       .stderr(Stdio::piped());
+        .stdin(Stdio::piped())
+        .stdout(Stdio::piped())
+        .stderr(Stdio::piped());
     add_target_env(&mut cmd, lib_path, aux_path);
     for (key, val) in env {
         cmd.env(&key, &val);
@@ -101,7 +106,7 @@ pub fn run_background(lib_path: &str,
             }
 
             Some(process)
-        },
-        Err(..) => None
+        }
+        Err(..) => None,
     }
 }
index 0cf90ec95f38e3c838e40e08ce6e2f57580cb080..e2629ffd8f54a3e122b373462ed6b711cbb27661 100644 (file)
@@ -34,14 +34,21 @@ pub unsafe fn raise_fd_limit() {
     let mut mib: [libc::c_int; 2] = [CTL_KERN, KERN_MAXFILESPERPROC];
     let mut maxfiles: libc::c_int = 0;
     let mut size: libc::size_t = size_of_val(&maxfiles) as libc::size_t;
-    if libc::sysctl(&mut mib[0], 2, &mut maxfiles as *mut _ as *mut _, &mut size,
-              null_mut(), 0) != 0 {
+    if libc::sysctl(&mut mib[0],
+                    2,
+                    &mut maxfiles as *mut _ as *mut _,
+                    &mut size,
+                    null_mut(),
+                    0) != 0 {
         let err = io::Error::last_os_error();
         panic!("raise_fd_limit: error calling sysctl: {}", err);
     }
 
     // Fetch the current resource limits
-    let mut rlim = libc::rlimit{rlim_cur: 0, rlim_max: 0};
+    let mut rlim = libc::rlimit {
+        rlim_cur: 0,
+        rlim_max: 0,
+    };
     if libc::getrlimit(libc::RLIMIT_NOFILE, &mut rlim) != 0 {
         let err = io::Error::last_os_error();
         panic!("raise_fd_limit: error calling getrlimit: {}", err);
index 6647a1a0a933d41046dc22ad79e0709a52393967..ae8e82e4e2f60e36639358d9fa062381431fbe75 100644 (file)
@@ -1330,9 +1330,7 @@ fn make_compile_args(&self,
                 // patterns still match the raw compiler output.
                 if self.props.error_patterns.is_empty() {
                     args.extend(["--error-format",
-                                 "json",
-                                 "-Z",
-                                 "unstable-options"]
+                                 "json"]
                                 .iter()
                                 .map(|s| s.to_string()));
                 }
index 66573393971c4c6692ce2549017f28e7680b19db..fca01029c44652ba87d542ed43e478f5a2d6313a 100644 (file)
 
 pub fn diff_lines(actual: &str, expected: &str) -> Vec<String> {
     // mega simplistic diff algorithm that just prints the things added/removed
-    zip_all(actual.lines(), expected.lines()).enumerate().filter_map(|(i, (a,e))| {
-        match (a, e) {
-            (Some(a), Some(e)) => {
-                if lines_match(e, a) {
-                    None
-                } else {
-                    Some(format!("{:3} - |{}|\n    + |{}|\n", i, e, a))
+    zip_all(actual.lines(), expected.lines())
+        .enumerate()
+        .filter_map(|(i, (a, e))| {
+            match (a, e) {
+                (Some(a), Some(e)) => {
+                    if lines_match(e, a) {
+                        None
+                    } else {
+                        Some(format!("{:3} - |{}|\n    + |{}|\n", i, e, a))
+                    }
                 }
-            },
-            (Some(a), None) => {
-                Some(format!("{:3} -\n    + |{}|\n", i, a))
-            },
-            (None, Some(e)) => {
-                Some(format!("{:3} - |{}|\n    +\n", i, e))
-            },
-            (None, None) => panic!("Cannot get here")
-        }
-    }).collect()
+                (Some(a), None) => Some(format!("{:3} -\n    + |{}|\n", i, a)),
+                (None, Some(e)) => Some(format!("{:3} - |{}|\n    +\n", i, e)),
+                (None, None) => panic!("Cannot get here"),
+            }
+        })
+        .collect()
 }
 
 fn lines_match(expected: &str, mut actual: &str) -> bool {
@@ -38,13 +37,11 @@ fn lines_match(expected: &str, mut actual: &str) -> bool {
         match actual.find(part) {
             Some(j) => {
                 if i == 0 && j != 0 {
-                    return false
+                    return false;
                 }
                 actual = &actual[j + part.len()..];
             }
-            None => {
-                return false
-            }
+            None => return false,
         }
     }
     actual.is_empty() || expected.ends_with("[..]")
@@ -55,7 +52,7 @@ struct ZipAll<I1: Iterator, I2: Iterator> {
     second: I2,
 }
 
-impl<T, I1: Iterator<Item=T>, I2: Iterator<Item=T>> Iterator for ZipAll<I1, I2> {
+impl<T, I1: Iterator<Item = T>, I2: Iterator<Item = T>> Iterator for ZipAll<I1, I2> {
     type Item = (Option<T>, Option<T>);
     fn next(&mut self) -> Option<(Option<T>, Option<T>)> {
         let first = self.first.next();
@@ -63,12 +60,12 @@ fn next(&mut self) -> Option<(Option<T>, Option<T>)> {
 
         match (first, second) {
             (None, None) => None,
-            (a, b) => Some((a, b))
+            (a, b) => Some((a, b)),
         }
     }
 }
 
-fn zip_all<T, I1: Iterator<Item=T>, I2: Iterator<Item=T>>(a: I1, b: I2) -> ZipAll<I1, I2> {
+fn zip_all<T, I1: Iterator<Item = T>, I2: Iterator<Item = T>>(a: I1, b: I2) -> ZipAll<I1, I2> {
     ZipAll {
         first: a,
         second: b,
index 69b839c5b7d9d0e94cdf58623574ef3ecd8e9dc3..d2872a0a2b7caa18ef6d42a7c879a4297fde8018 100644 (file)
 use common::Config;
 
 /// Conversion table from triple OS name to Rust SYSNAME
-const OS_TABLE: &'static [(&'static str, &'static str)] = &[
-    ("android", "android"),
-    ("bitrig", "bitrig"),
-    ("darwin", "macos"),
-    ("dragonfly", "dragonfly"),
-    ("freebsd", "freebsd"),
-    ("ios", "ios"),
-    ("linux", "linux"),
-    ("mingw32", "windows"),
-    ("netbsd", "netbsd"),
-    ("openbsd", "openbsd"),
-    ("win32", "windows"),
-    ("windows", "windows"),
-    ("solaris", "solaris"),
-    ("emscripten", "emscripten"),
-];
+const OS_TABLE: &'static [(&'static str, &'static str)] = &[("android", "android"),
+                                                            ("bitrig", "bitrig"),
+                                                            ("darwin", "macos"),
+                                                            ("dragonfly", "dragonfly"),
+                                                            ("freebsd", "freebsd"),
+                                                            ("ios", "ios"),
+                                                            ("linux", "linux"),
+                                                            ("mingw32", "windows"),
+                                                            ("netbsd", "netbsd"),
+                                                            ("openbsd", "openbsd"),
+                                                            ("win32", "windows"),
+                                                            ("windows", "windows"),
+                                                            ("solaris", "solaris"),
+                                                            ("emscripten", "emscripten")];
 
-const ARCH_TABLE: &'static [(&'static str, &'static str)] = &[
-    ("aarch64", "aarch64"),
-    ("amd64", "x86_64"),
-    ("arm", "arm"),
-    ("arm64", "aarch64"),
-    ("hexagon", "hexagon"),
-    ("i386", "x86"),
-    ("i686", "x86"),
-    ("mips", "mips"),
-    ("msp430", "msp430"),
-    ("powerpc", "powerpc"),
-    ("powerpc64", "powerpc64"),
-    ("s390x", "systemz"),
-    ("sparc", "sparc"),
-    ("x86_64", "x86_64"),
-    ("xcore", "xcore"),
-    ("asmjs", "asmjs"),
-];
+const ARCH_TABLE: &'static [(&'static str, &'static str)] = &[("aarch64", "aarch64"),
+                                                              ("amd64", "x86_64"),
+                                                              ("arm", "arm"),
+                                                              ("arm64", "aarch64"),
+                                                              ("hexagon", "hexagon"),
+                                                              ("i386", "x86"),
+                                                              ("i686", "x86"),
+                                                              ("mips", "mips"),
+                                                              ("msp430", "msp430"),
+                                                              ("powerpc", "powerpc"),
+                                                              ("powerpc64", "powerpc64"),
+                                                              ("s390x", "systemz"),
+                                                              ("sparc", "sparc"),
+                                                              ("x86_64", "x86_64"),
+                                                              ("xcore", "xcore"),
+                                                              ("asmjs", "asmjs")];
 
 pub fn get_os(triple: &str) -> &'static str {
     for &(triple_os, os) in OS_TABLE {
         if triple.contains(triple_os) {
-            return os
+            return os;
         }
     }
     panic!("Cannot determine OS from triple");
@@ -59,7 +55,7 @@ pub fn get_os(triple: &str) -> &'static str {
 pub fn get_arch(triple: &str) -> &'static str {
     for &(triple_arch, arch) in ARCH_TABLE {
         if triple.contains(triple_arch) {
-            return arch
+            return arch;
         }
     }
     panic!("Cannot determine Architecture from triple");
@@ -74,17 +70,21 @@ pub fn make_new_path(path: &str) -> String {
     // Windows just uses PATH as the library search path, so we have to
     // maintain the current value while adding our own
     match env::var(lib_path_env_var()) {
-        Ok(curr) => {
-            format!("{}{}{}", path, path_div(), curr)
-        }
-        Err(..) => path.to_owned()
+        Ok(curr) => format!("{}{}{}", path, path_div(), curr),
+        Err(..) => path.to_owned(),
     }
 }
 
-pub fn lib_path_env_var() -> &'static str { "PATH" }
-fn path_div() -> &'static str { ";" }
+pub fn lib_path_env_var() -> &'static str {
+    "PATH"
+}
+fn path_div() -> &'static str {
+    ";"
+}
 
 pub fn logv(config: &Config, s: String) {
     debug!("{}", s);
-    if config.verbose { println!("{}", s); }
+    if config.verbose {
+        println!("{}", s);
+    }
 }