.project
.settings/
.valgrindrc
+.vscode/
/*-*-*-*/
/*-*-*/
/Makefile
$$(call CFG_CC_INCLUDE_$(1),$$(S)src/rustllvm/include)
RUSTLLVM_OBJS_OBJS_$(1) := $$(RUSTLLVM_OBJS_CS_$(1):rustllvm/%.cpp=$(1)/rustllvm/%.o)
+# Flag that we are building with Rust's llvm fork
+ifeq ($(CFG_LLVM_ROOT),)
+RUSTLLVM_CXXFLAGS_$(1) := -DLLVM_RUSTLLVM
+endif
+
# Note that we appease `cl.exe` and its need for some sort of exception
# handling flag with the `EHsc` argument here as well.
ifeq ($$(findstring msvc,$(1)),msvc)
$$(Q)$$(call CFG_COMPILE_CXX_$(1), $$@,) \
$$(subst /,//,$$(LLVM_CXXFLAGS_$(1))) \
$$(RUSTLLVM_COMPONENTS_$(1)) \
+ $$(RUSTLLVM_CXXFLAGS_$(1)) \
$$(EXTRA_RUSTLLVM_CXXFLAGS_$(1)) \
$$(RUSTLLVM_INCS_$(1)) \
$$<
if !build.unstable_features {
cargo.env("CFG_DISABLE_UNSTABLE_FEATURES", "1");
}
+ // Flag that rust llvm is in use
+ if build.is_rust_llvm(target) {
+ cargo.env("LLVM_RUSTLLVM", "1");
+ }
cargo.env("LLVM_CONFIG", build.llvm_config(target));
if build.config.llvm_static_stdcpp {
cargo.env("LLVM_STATIC_STDCPP",
self.out.join(target).join("llvm")
}
+ /// Returns true if no custom `llvm-config` is set for the specified target.
+ ///
+ /// If no custom `llvm-config` was specified then Rust's llvm will be used.
+ fn is_rust_llvm(&self, target: &str) -> bool {
+ match self.config.target_config.get(target) {
+ Some(ref c) => c.llvm_config.is_none(),
+ None => true
+ }
+ }
+
/// Returns the path to `llvm-config` for the specified target.
///
/// If a custom `llvm-config` was specified for target then that's returned
in different languages. To keep things simple, we’ll stick to ‘greetings’ and
‘farewells’ as two kinds of phrases, and use English and Japanese (日本語) as
two languages for those phrases to be in. We’ll use this module layout:
+
```text
+-----------+
+---| greetings |
y: i32,
}
-let origin = Point { x: 0, y: 0 };
+let point = Point { x: 2, y: 3 };
-match origin {
+match point {
Point { x, .. } => println!("x is {}", x),
}
```
-This prints `x is 0`.
+This prints `x is 2`.
You can do this kind of match on any member, not only the first:
y: i32,
}
-let origin = Point { x: 0, y: 0 };
+let point = Point { x: 2, y: 3 };
-match origin {
+match point {
Point { y, .. } => println!("y is {}", y),
}
```
-This prints `y is 0`.
+This prints `y is 3`.
This ‘destructuring’ behavior works on any compound data type, like
[tuples][tuples] or [enums][enums].
strong to weak:
```{.text .precedence}
-as
+as :
* / %
+ -
<< >>
&&
||
.. ...
+<-
=
```
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
- pub fn binary_search_by<F>(&self, f: F) -> Result<usize, usize>
- where F: FnMut(&T) -> Ordering
+ pub fn binary_search_by<'a, F>(&'a self, f: F) -> Result<usize, usize>
+ where F: FnMut(&'a T) -> Ordering
{
core_slice::SliceExt::binary_search_by(self, f)
}
/// ```
#[stable(feature = "slice_binary_search_by_key", since = "1.10.0")]
#[inline]
- pub fn binary_search_by_key<B, F>(&self, b: &B, f: F) -> Result<usize, usize>
- where F: FnMut(&T) -> B,
+ pub fn binary_search_by_key<'a, B, F>(&'a self, b: &B, f: F) -> Result<usize, usize>
+ where F: FnMut(&'a T) -> B,
B: Ord
{
core_slice::SliceExt::binary_search_by_key(self, b, f)
}
}
+#[stable(feature = "cow_str_from_iter", since = "1.12.0")]
+impl<'a> FromIterator<char> for Cow<'a, str> {
+ fn from_iter<I: IntoIterator<Item = char>>(it: I) -> Cow<'a, str> {
+ Cow::Owned(FromIterator::from_iter(it))
+ }
+}
+
+#[stable(feature = "cow_str_from_iter", since = "1.12.0")]
+impl<'a, 'b> FromIterator<&'b str> for Cow<'a, str> {
+ fn from_iter<I: IntoIterator<Item = &'b str>>(it: I) -> Cow<'a, str> {
+ Cow::Owned(FromIterator::from_iter(it))
+ }
+}
+
+#[stable(feature = "cow_str_from_iter", since = "1.12.0")]
+impl<'a> FromIterator<String> for Cow<'a, str> {
+ fn from_iter<I: IntoIterator<Item = String>>(it: I) -> Cow<'a, str> {
+ Cow::Owned(FromIterator::from_iter(it))
+ }
+}
+
#[stable(feature = "rust1", since = "1.0.0")]
impl Into<Vec<u8>> for String {
fn into(self) -> Vec<u8> {
use clone::Clone;
use cmp::{PartialEq, Eq, PartialOrd, Ord, Ordering};
use default::Default;
-use marker::{Copy, Send, Sync, Sized, Unsize};
+use fmt::{self, Debug, Display};
+use marker::{Copy, PhantomData, Send, Sync, Sized, Unsize};
use ops::{Deref, DerefMut, Drop, FnOnce, CoerceUnsized};
use option::Option;
use option::Option::{None, Some};
+use result::Result;
+use result::Result::{Ok, Err};
/// A mutable memory location that admits only `Copy` data.
///
Unused,
}
+/// An error returned by [`RefCell::try_borrow`](struct.RefCell.html#method.try_borrow).
+#[unstable(feature = "try_borrow", issue = "35070")]
+pub struct BorrowError<'a, T: 'a + ?Sized> {
+ marker: PhantomData<&'a RefCell<T>>,
+}
+
+#[unstable(feature = "try_borrow", issue = "35070")]
+impl<'a, T: ?Sized> Debug for BorrowError<'a, T> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_struct("BorrowError").finish()
+ }
+}
+
+#[unstable(feature = "try_borrow", issue = "35070")]
+impl<'a, T: ?Sized> Display for BorrowError<'a, T> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ Display::fmt("already mutably borrowed", f)
+ }
+}
+
+/// An error returned by [`RefCell::try_borrow_mut`](struct.RefCell.html#method.try_borrow_mut).
+#[unstable(feature = "try_borrow", issue = "35070")]
+pub struct BorrowMutError<'a, T: 'a + ?Sized> {
+ marker: PhantomData<&'a RefCell<T>>,
+}
+
+#[unstable(feature = "try_borrow", issue = "35070")]
+impl<'a, T: ?Sized> Debug for BorrowMutError<'a, T> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_struct("BorrowMutError").finish()
+ }
+}
+
+#[unstable(feature = "try_borrow", issue = "35070")]
+impl<'a, T: ?Sized> Display for BorrowMutError<'a, T> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ Display::fmt("already borrowed", f)
+ }
+}
+
// Values [1, MAX-1] represent the number of `Ref` active
// (will not outgrow its range since `usize` is the size of the address space)
type BorrowFlag = usize;
///
/// # Panics
///
- /// Panics if the value is currently mutably borrowed.
+ /// Panics if the value is currently mutably borrowed. For a non-panicking variant, use
+ /// [`try_borrow`](#method.try_borrow).
///
/// # Examples
///
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn borrow(&self) -> Ref<T> {
+ self.try_borrow().expect("already mutably borrowed")
+ }
+
+ /// Immutably borrows the wrapped value, returning an error if the value is currently mutably
+ /// borrowed.
+ ///
+ /// The borrow lasts until the returned `Ref` exits scope. Multiple immutable borrows can be
+ /// taken out at the same time.
+ ///
+ /// This is the non-panicking variant of [`borrow`](#method.borrow).
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(try_borrow)]
+ ///
+ /// use std::cell::RefCell;
+ ///
+ /// let c = RefCell::new(5);
+ ///
+ /// {
+ /// let m = c.borrow_mut();
+ /// assert!(c.try_borrow().is_err());
+ /// }
+ ///
+ /// {
+ /// let m = c.borrow();
+ /// assert!(c.try_borrow().is_ok());
+ /// }
+ /// ```
+ #[unstable(feature = "try_borrow", issue = "35070")]
+ #[inline]
+ pub fn try_borrow(&self) -> Result<Ref<T>, BorrowError<T>> {
match BorrowRef::new(&self.borrow) {
- Some(b) => Ref {
+ Some(b) => Ok(Ref {
value: unsafe { &*self.value.get() },
borrow: b,
- },
- None => panic!("RefCell<T> already mutably borrowed"),
+ }),
+ None => Err(BorrowError { marker: PhantomData }),
}
}
///
/// # Panics
///
- /// Panics if the value is currently borrowed.
+ /// Panics if the value is currently borrowed. For a non-panicking variant, use
+ /// [`try_borrow_mut`](#method.try_borrow_mut).
///
/// # Examples
///
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn borrow_mut(&self) -> RefMut<T> {
+ self.try_borrow_mut().expect("already borrowed")
+ }
+
+ /// Mutably borrows the wrapped value, returning an error if the value is currently borrowed.
+ ///
+ /// The borrow lasts until the returned `RefMut` exits scope. The value cannot be borrowed
+ /// while this borrow is active.
+ ///
+ /// This is the non-panicking variant of [`borrow_mut`](#method.borrow_mut).
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(try_borrow)]
+ ///
+ /// use std::cell::RefCell;
+ ///
+ /// let c = RefCell::new(5);
+ ///
+ /// {
+ /// let m = c.borrow();
+ /// assert!(c.try_borrow_mut().is_err());
+ /// }
+ ///
+ /// assert!(c.try_borrow_mut().is_ok());
+ /// ```
+ #[unstable(feature = "try_borrow", issue = "35070")]
+ #[inline]
+ pub fn try_borrow_mut(&self) -> Result<RefMut<T>, BorrowMutError<T>> {
match BorrowRefMut::new(&self.borrow) {
- Some(b) => RefMut {
+ Some(b) => Ok(RefMut {
value: unsafe { &mut *self.value.get() },
borrow: b,
- },
- None => panic!("RefCell<T> already borrowed"),
+ }),
+ None => Err(BorrowMutError { marker: PhantomData }),
}
}
})
}
-/// Use the `format!` syntax to write data into a buffer.
+/// Write formatted data into a buffer
///
-/// This macro is typically used with a buffer of `&mut `[`Write`][write].
+/// This macro accepts any value with `write_fmt` method as a writer, a format string, and a list
+/// of arguments to format.
+///
+/// `write_fmt` method usually comes from an implementation of [`std::fmt::Write`][fmt_write] or
+/// [`std::io::Write`][io_write] traits. These are sometimes called 'writers'.
+///
+/// Passed arguments will be formatted according to the specified format string and the resulting
+/// string will be passed to the writer.
///
/// See [`std::fmt`][fmt] for more information on format syntax.
///
+/// Return value is completely dependent on the 'write_fmt' method.
+///
+/// Common return values are: [`Result`][enum_result], [`io::Result`][type_result]
+///
/// [fmt]: ../std/fmt/index.html
-/// [write]: ../std/io/trait.Write.html
+/// [fmt_write]: ../std/fmt/trait.Write.html
+/// [io_write]: ../std/io/trait.Write.html
+/// [enum_result]: ../std/result/enum.Result.html
+/// [type_result]: ../std/io/type.Result.html
///
/// # Examples
///
($dst:expr, $($arg:tt)*) => ($dst.write_fmt(format_args!($($arg)*)))
}
-/// Use the `format!` syntax to write data into a buffer, appending a newline.
-/// On all platforms, the newline is the LINE FEED character (`\n`/`U+000A`)
-/// alone (no additional CARRIAGE RETURN (`\r`/`U+000D`).
+/// Write formatted data into a buffer, with appending a newline.
+///
+/// On all platforms, the newline is the LINE FEED character (`\n`/`U+000A`) alone
+/// (no additional CARRIAGE RETURN (`\r`/`U+000D`).
///
-/// This macro is typically used with a buffer of `&mut `[`Write`][write].
+/// This macro accepts any value with `write_fmt` method as a writer, a format string, and a list
+/// of arguments to format.
+///
+/// `write_fmt` method usually comes from an implementation of [`std::fmt::Write`][fmt_write] or
+/// [`std::io::Write`][io_write] traits. These are sometimes called 'writers'.
+///
+/// Passed arguments will be formatted according to the specified format string and the resulting
+/// string will be passed to the writer.
///
/// See [`std::fmt`][fmt] for more information on format syntax.
///
+/// Return value is completely dependent on the 'write_fmt' method.
+///
+/// Common return values are: [`Result`][enum_result], [`io::Result`][type_result]
+///
/// [fmt]: ../std/fmt/index.html
-/// [write]: ../std/io/trait.Write.html
+/// [fmt_write]: ../std/fmt/trait.Write.html
+/// [io_write]: ../std/io/trait.Write.html
+/// [enum_result]: ../std/result/enum.Result.html
+/// [type_result]: ../std/io/type.Result.html
///
/// # Examples
///
fn binary_search(&self, x: &Self::Item) -> Result<usize, usize>
where Self::Item: Ord;
#[stable(feature = "core", since = "1.6.0")]
- fn binary_search_by<F>(&self, f: F) -> Result<usize, usize>
- where F: FnMut(&Self::Item) -> Ordering;
+ fn binary_search_by<'a, F>(&'a self, f: F) -> Result<usize, usize>
+ where F: FnMut(&'a Self::Item) -> Ordering;
#[stable(feature = "slice_binary_search_by_key", since = "1.10.0")]
- fn binary_search_by_key<B, F>(&self, b: &B, f: F) -> Result<usize, usize>
- where F: FnMut(&Self::Item) -> B,
+ fn binary_search_by_key<'a, B, F>(&'a self, b: &B, f: F) -> Result<usize, usize>
+ where F: FnMut(&'a Self::Item) -> B,
B: Ord;
#[stable(feature = "core", since = "1.6.0")]
fn len(&self) -> usize;
self as *const [T] as *const T
}
- fn binary_search_by<F>(&self, mut f: F) -> Result<usize, usize> where
- F: FnMut(&T) -> Ordering
+ fn binary_search_by<'a, F>(&'a self, mut f: F) -> Result<usize, usize>
+ where F: FnMut(&'a T) -> Ordering
{
let mut base = 0usize;
let mut s = self;
}
#[inline]
- fn binary_search_by_key<B, F>(&self, b: &B, mut f: F) -> Result<usize, usize>
- where F: FnMut(&Self::Item) -> B,
+ fn binary_search_by_key<'a, B, F>(&'a self, b: &B, mut f: F) -> Result<usize, usize>
+ where F: FnMut(&'a Self::Item) -> B,
B: Ord
{
self.binary_search_by(|k| f(k).cmp(b))
(which is a `DepTrackingMap`, described below) automatically invokes
`dep_graph.read(ItemSignature(X))`.
+**Note:** adding `Hir` nodes requires a bit of caution due to the
+"inlining" that old trans and constant evaluation still use. See the
+section on inlining below.
+
To make this strategy work, a certain amount of indirection is
required. For example, modules in the HIR do not have direct pointers
to the items that they contain. Rather, they contain node-ids -- one
This will dump out all the nodes that lead from `Hir(foo)` to
`TypeckItemBody(bar)`, from which you can (hopefully) see the source
of the erroneous edge.
+
+### Inlining of HIR nodes
+
+For the time being, at least, we still sometimes "inline" HIR nodes
+from other crates into the current HIR map. This creates a weird
+scenario where the same logical item (let's call it `X`) has two
+def-ids: the original def-id `X` and a new, inlined one `X'`. `X'` is
+in the current crate, but it's not like other HIR nodes: in
+particular, when we restart compilation, it will not be available to
+hash. Therefore, we do not want `Hir(X')` nodes appearing in our
+graph. Instead, we want a "read" of `Hir(X')` to be represented as a
+read of `MetaData(X)`, since the metadata for `X` is where the inlined
+representation originated in the first place.
+
+To achieve this, the HIR map will detect if the def-id originates in
+an inlined node and add a dependency to a suitable `MetaData` node
+instead. If you are reading a HIR node and are not sure if it may be
+inlined or not, you can use `tcx.map.read(node_id)` and it will detect
+whether the node is inlined or not and do the right thing. You can
+also use `tcx.map.is_inlined_def_id()` and
+`tcx.map.is_inlined_node_id()` to test.
)
}
-#[derive(Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
+#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable)]
pub enum DepNode<D: Clone + Debug> {
// The `D` type is "how definitions are identified".
// During compilation, it is always `DefId`, but when serializing
Privacy,
IntrinsicCheck(D),
MatchCheck(D),
- MirMapConstruction(D),
- MirPass(D),
- MirTypeck(D),
+
+ // Represents the MIR for a fn; also used as the task node for
+ // things read/modify that MIR.
+ Mir(D),
+
BorrowCheck(D),
RvalueCheck(D),
Reachability,
check! {
CollectItem,
BorrowCheck,
+ Hir,
TransCrateItem,
TypeckItemType,
TypeckItemBody,
CheckConst(ref d) => op(d).map(CheckConst),
IntrinsicCheck(ref d) => op(d).map(IntrinsicCheck),
MatchCheck(ref d) => op(d).map(MatchCheck),
- MirMapConstruction(ref d) => op(d).map(MirMapConstruction),
- MirPass(ref d) => op(d).map(MirPass),
- MirTypeck(ref d) => op(d).map(MirTypeck),
+ Mir(ref d) => op(d).map(Mir),
BorrowCheck(ref d) => op(d).map(BorrowCheck),
RvalueCheck(ref d) => op(d).map(RvalueCheck),
TransCrateItem(ref d) => op(d).map(TransCrateItem),
/// some independent path or string that persists between runs without
/// the need to be mapped or unmapped. (This ensures we can serialize
/// them even in the absence of a tcx.)
-#[derive(Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
+#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable)]
pub struct WorkProductId(pub String);
self.map.get(k)
}
+ pub fn get_mut(&mut self, k: &M::Key) -> Option<&mut M::Value> {
+ self.read(k);
+ self.write(k);
+ self.map.get_mut(k)
+ }
+
pub fn insert(&mut self, k: M::Key, v: M::Value) -> Option<M::Value> {
self.write(&k);
self.map.insert(k, v)
self.read(k);
self.map.contains_key(k)
}
+
+ pub fn keys(&self) -> Vec<M::Key> {
+ self.map.keys().cloned().collect()
+ }
}
impl<M: DepTrackingMapConfig> MemoizationMap for RefCell<DepTrackingMap<M>> {
let task_id = (self.dep_node_fn)(item_def_id);
let _task = self.tcx.dep_graph.in_task(task_id.clone());
debug!("Started task {:?}", task_id);
+ assert!(!self.tcx.map.is_inlined_def_id(item_def_id));
self.tcx.dep_graph.read(DepNode::Hir(item_def_id));
self.visitor.visit_item(i);
debug!("Ended task {:?}", task_id);
let parent_def = self.parent_def;
let def = self.resolver.definitions().map(|defs| {
- let def_path_data = DefPathData::Binding(name);
+ let def_path_data = DefPathData::Binding(name.as_str());
let def_index = defs.create_def_with_parent(parent_def, pat.id, def_path_data);
Def::Local(DefId::local(def_index), pat.id)
}).unwrap_or(Def::Err);
DefPathData::Impl,
ItemKind::Enum(..) | ItemKind::Struct(..) | ItemKind::Trait(..) |
ItemKind::ExternCrate(..) | ItemKind::ForeignMod(..) | ItemKind::Ty(..) =>
- DefPathData::TypeNs(i.ident.name),
- ItemKind::Mod(..) => DefPathData::Module(i.ident.name),
+ DefPathData::TypeNs(i.ident.name.as_str()),
+ ItemKind::Mod(..) => DefPathData::Module(i.ident.name.as_str()),
ItemKind::Static(..) | ItemKind::Const(..) | ItemKind::Fn(..) =>
- DefPathData::ValueNs(i.ident.name),
- ItemKind::Mac(..) => DefPathData::MacroDef(i.ident.name),
+ DefPathData::ValueNs(i.ident.name.as_str()),
+ ItemKind::Mac(..) => DefPathData::MacroDef(i.ident.name.as_str()),
ItemKind::Use(..) => DefPathData::Misc,
};
let def = self.create_def(i.id, def_data);
for v in &enum_definition.variants {
let variant_def_index =
this.create_def(v.node.data.id(),
- DefPathData::EnumVariant(v.node.name.name));
+ DefPathData::EnumVariant(v.node.name.name.as_str()));
this.with_parent(variant_def_index, |this| {
for (index, field) in v.node.data.fields().iter().enumerate() {
let name = field.ident.map(|ident| ident.name)
.unwrap_or_else(|| token::intern(&index.to_string()));
- this.create_def(field.id, DefPathData::Field(name));
+ this.create_def(field.id, DefPathData::Field(name.as_str()));
}
if let Some(ref expr) = v.node.disr_expr {
}
for (index, field) in struct_def.fields().iter().enumerate() {
- let name = field.ident.map(|ident| ident.name)
- .unwrap_or(token::intern(&index.to_string()));
+ let name = field.ident.map(|ident| ident.name.as_str())
+ .unwrap_or(token::intern(&index.to_string()).as_str());
this.create_def(field.id, DefPathData::Field(name));
}
}
}
fn visit_foreign_item(&mut self, foreign_item: &ForeignItem) {
- let def = self.create_def(foreign_item.id, DefPathData::ValueNs(foreign_item.ident.name));
+ let def = self.create_def(foreign_item.id,
+ DefPathData::ValueNs(foreign_item.ident.name.as_str()));
self.with_parent(def, |this| {
visit::walk_foreign_item(this, foreign_item);
fn visit_generics(&mut self, generics: &Generics) {
for ty_param in generics.ty_params.iter() {
- self.create_def(ty_param.id, DefPathData::TypeParam(ty_param.ident.name));
+ self.create_def(ty_param.id, DefPathData::TypeParam(ty_param.ident.name.as_str()));
}
visit::walk_generics(self, generics);
fn visit_trait_item(&mut self, ti: &TraitItem) {
let def_data = match ti.node {
TraitItemKind::Method(..) | TraitItemKind::Const(..) =>
- DefPathData::ValueNs(ti.ident.name),
- TraitItemKind::Type(..) => DefPathData::TypeNs(ti.ident.name),
- TraitItemKind::Macro(..) => DefPathData::MacroDef(ti.ident.name),
+ DefPathData::ValueNs(ti.ident.name.as_str()),
+ TraitItemKind::Type(..) => DefPathData::TypeNs(ti.ident.name.as_str()),
+ TraitItemKind::Macro(..) => DefPathData::MacroDef(ti.ident.name.as_str()),
};
let def = self.create_def(ti.id, def_data);
fn visit_impl_item(&mut self, ii: &ImplItem) {
let def_data = match ii.node {
ImplItemKind::Method(..) | ImplItemKind::Const(..) =>
- DefPathData::ValueNs(ii.ident.name),
- ImplItemKind::Type(..) => DefPathData::TypeNs(ii.ident.name),
- ImplItemKind::Macro(..) => DefPathData::MacroDef(ii.ident.name),
+ DefPathData::ValueNs(ii.ident.name.as_str()),
+ ImplItemKind::Type(..) => DefPathData::TypeNs(ii.ident.name.as_str()),
+ ImplItemKind::Macro(..) => DefPathData::MacroDef(ii.ident.name.as_str()),
};
let def = self.create_def(ii.id, def_data);
let parent_def = self.parent_def;
if let PatKind::Ident(_, id, _) = pat.node {
- let def = self.create_def(pat.id, DefPathData::Binding(id.node.name));
+ let def = self.create_def(pat.id, DefPathData::Binding(id.node.name.as_str()));
self.parent_def = Some(def);
}
}
fn visit_lifetime_def(&mut self, def: &LifetimeDef) {
- self.create_def(def.lifetime.id, DefPathData::LifetimeDef(def.lifetime.name));
+ self.create_def(def.lifetime.id, DefPathData::LifetimeDef(def.lifetime.name.as_str()));
}
fn visit_macro_def(&mut self, macro_def: &MacroDef) {
- self.create_def(macro_def.id, DefPathData::MacroDef(macro_def.ident.name));
+ self.create_def(macro_def.id, DefPathData::MacroDef(macro_def.ident.name.as_str()));
}
}
hir::ItemEnum(..) | hir::ItemStruct(..) | hir::ItemTrait(..) |
hir::ItemExternCrate(..) | hir::ItemMod(..) | hir::ItemForeignMod(..) |
hir::ItemTy(..) =>
- DefPathData::TypeNs(i.name),
+ DefPathData::TypeNs(i.name.as_str()),
hir::ItemStatic(..) | hir::ItemConst(..) | hir::ItemFn(..) =>
- DefPathData::ValueNs(i.name),
+ DefPathData::ValueNs(i.name.as_str()),
hir::ItemUse(..) => DefPathData::Misc,
};
let def = self.create_def(i.id, def_data);
for v in &enum_definition.variants {
let variant_def_index =
this.create_def(v.node.data.id(),
- DefPathData::EnumVariant(v.node.name));
+ DefPathData::EnumVariant(v.node.name.as_str()));
this.with_parent(variant_def_index, |this| {
for field in v.node.data.fields() {
this.create_def(field.id,
- DefPathData::Field(field.name));
+ DefPathData::Field(field.name.as_str()));
}
if let Some(ref expr) = v.node.disr_expr {
this.visit_hir_const_integer(expr);
}
for field in struct_def.fields() {
- this.create_def(field.id, DefPathData::Field(field.name));
+ this.create_def(field.id, DefPathData::Field(field.name.as_str()));
}
}
_ => {}
}
fn visit_foreign_item(&mut self, foreign_item: &'ast hir::ForeignItem) {
- let def = self.create_def(foreign_item.id, DefPathData::ValueNs(foreign_item.name));
+ let def = self.create_def(foreign_item.id,
+ DefPathData::ValueNs(foreign_item.name.as_str()));
self.with_parent(def, |this| {
intravisit::walk_foreign_item(this, foreign_item);
fn visit_generics(&mut self, generics: &'ast hir::Generics) {
for ty_param in generics.ty_params.iter() {
- self.create_def(ty_param.id, DefPathData::TypeParam(ty_param.name));
+ self.create_def(ty_param.id, DefPathData::TypeParam(ty_param.name.as_str()));
}
intravisit::walk_generics(self, generics);
fn visit_trait_item(&mut self, ti: &'ast hir::TraitItem) {
let def_data = match ti.node {
hir::MethodTraitItem(..) | hir::ConstTraitItem(..) =>
- DefPathData::ValueNs(ti.name),
- hir::TypeTraitItem(..) => DefPathData::TypeNs(ti.name),
+ DefPathData::ValueNs(ti.name.as_str()),
+ hir::TypeTraitItem(..) => DefPathData::TypeNs(ti.name.as_str()),
};
let def = self.create_def(ti.id, def_data);
fn visit_impl_item(&mut self, ii: &'ast hir::ImplItem) {
let def_data = match ii.node {
hir::ImplItemKind::Method(..) | hir::ImplItemKind::Const(..) =>
- DefPathData::ValueNs(ii.name),
- hir::ImplItemKind::Type(..) => DefPathData::TypeNs(ii.name),
+ DefPathData::ValueNs(ii.name.as_str()),
+ hir::ImplItemKind::Type(..) => DefPathData::TypeNs(ii.name.as_str()),
};
let def = self.create_def(ii.id, def_data);
let parent_def = self.parent_def;
if let hir::PatKind::Binding(_, name, _) = pat.node {
- let def = self.create_def(pat.id, DefPathData::Binding(name.node));
+ let def = self.create_def(pat.id, DefPathData::Binding(name.node.as_str()));
self.parent_def = Some(def);
}
}
fn visit_lifetime_def(&mut self, def: &'ast hir::LifetimeDef) {
- self.create_def(def.lifetime.id, DefPathData::LifetimeDef(def.lifetime.name));
+ self.create_def(def.lifetime.id, DefPathData::LifetimeDef(def.lifetime.name.as_str()));
}
fn visit_macro_def(&mut self, macro_def: &'ast hir::MacroDef) {
- self.create_def(macro_def.id, DefPathData::MacroDef(macro_def.name));
+ self.create_def(macro_def.id, DefPathData::MacroDef(macro_def.name.as_str()));
}
}
use hir::def_id::{DefId, DefIndex};
use hir::map::def_collector::DefCollector;
use rustc_data_structures::fnv::FnvHashMap;
+use std::fmt::Write;
+use std::hash::{Hash, Hasher, SipHasher};
use syntax::{ast, visit};
use syntax::parse::token::InternedString;
+use ty::TyCtxt;
use util::nodemap::NodeMap;
/// The definition table containing node definitions
data.reverse();
DefPath { data: data, krate: krate }
}
+
+ pub fn to_string(&self, tcx: TyCtxt) -> String {
+ let mut s = String::with_capacity(self.data.len() * 16);
+
+ if self.krate == LOCAL_CRATE {
+ s.push_str(&tcx.crate_name(self.krate));
+ } else {
+ s.push_str(&tcx.sess.cstore.original_crate_name(self.krate));
+ }
+ s.push_str("/");
+ s.push_str(&tcx.crate_disambiguator(self.krate));
+
+ for component in &self.data {
+ write!(s,
+ "::{}[{}]",
+ component.data.as_interned_str(),
+ component.disambiguator)
+ .unwrap();
+ }
+
+ s
+ }
+
+ pub fn deterministic_hash(&self, tcx: TyCtxt) -> u64 {
+ let mut state = SipHasher::new();
+ self.deterministic_hash_to(tcx, &mut state);
+ state.finish()
+ }
+
+ pub fn deterministic_hash_to<H: Hasher>(&self, tcx: TyCtxt, state: &mut H) {
+ tcx.crate_name(self.krate).hash(state);
+ tcx.crate_disambiguator(self.krate).hash(state);
+ self.data.hash(state);
+ }
}
/// Root of an inlined item. We track the `DefPath` of the item within
/// An impl
Impl,
/// Something in the type NS
- TypeNs(ast::Name),
+ TypeNs(InternedString),
/// Something in the value NS
- ValueNs(ast::Name),
+ ValueNs(InternedString),
/// A module declaration
- Module(ast::Name),
+ Module(InternedString),
/// A macro rule
- MacroDef(ast::Name),
+ MacroDef(InternedString),
/// A closure expression
ClosureExpr,
// Subportions of items
/// A type parameter (generic parameter)
- TypeParam(ast::Name),
+ TypeParam(InternedString),
/// A lifetime definition
- LifetimeDef(ast::Name),
+ LifetimeDef(InternedString),
/// A variant of a enum
- EnumVariant(ast::Name),
+ EnumVariant(InternedString),
/// A struct field
- Field(ast::Name),
+ Field(InternedString),
/// Implicit ctor for a tuple-like struct
StructCtor,
/// Initializer for a const
Initializer,
/// Pattern binding
- Binding(ast::Name),
+ Binding(InternedString),
}
impl Definitions {
pub fn as_interned_str(&self) -> InternedString {
use self::DefPathData::*;
match *self {
- TypeNs(name) |
- ValueNs(name) |
- Module(name) |
- MacroDef(name) |
- TypeParam(name) |
- LifetimeDef(name) |
- EnumVariant(name) |
- Binding(name) |
- Field(name) => {
- name.as_str()
+ TypeNs(ref name) |
+ ValueNs(ref name) |
+ Module(ref name) |
+ MacroDef(ref name) |
+ TypeParam(ref name) |
+ LifetimeDef(ref name) |
+ EnumVariant(ref name) |
+ Binding(ref name) |
+ Field(ref name) => {
+ name.clone()
}
Impl => {
/// All NodeIds that are numerically greater or equal to this value come
/// from inlined items.
local_node_id_watermark: NodeId,
+
+ /// All def-indices that are numerically greater or equal to this value come
+ /// from inlined items.
+ local_def_id_watermark: usize,
}
impl<'ast> Map<'ast> {
+ pub fn is_inlined_def_id(&self, id: DefId) -> bool {
+ id.is_local() && id.index.as_usize() >= self.local_def_id_watermark
+ }
+
+ pub fn is_inlined_node_id(&self, id: NodeId) -> bool {
+ id >= self.local_node_id_watermark
+ }
+
/// Registers a read in the dependency graph of the AST node with
/// the given `id`. This needs to be called each time a public
/// function returns the HIR for a node -- in other words, when it
/// otherwise have had access to those contents, and hence needs a
/// read recorded). If the function just returns a DefId or
/// NodeId, no actual content was returned, so no read is needed.
- fn read(&self, id: NodeId) {
+ pub fn read(&self, id: NodeId) {
self.dep_graph.read(self.dep_node(id));
}
fn dep_node(&self, id0: NodeId) -> DepNode<DefId> {
let map = self.map.borrow();
let mut id = id0;
- loop {
- match map[id as usize] {
- EntryItem(_, item) => {
- let def_id = self.local_def_id(item.id);
- // NB ^~~~~~~
- //
- // You would expect that `item.id == id`, but this
- // is not always the case. In particular, for a
- // ViewPath item like `use self::{mem, foo}`, we
- // map the ids for `mem` and `foo` to the
- // enclosing view path item. This seems mega super
- // ultra wrong, but then who am I to judge?
- // -nmatsakis
- return DepNode::Hir(def_id);
- }
+ if !self.is_inlined_node_id(id) {
+ loop {
+ match map[id as usize] {
+ EntryItem(_, item) => {
+ let def_id = self.local_def_id(item.id);
+ // NB ^~~~~~~
+ //
+ // You would expect that `item.id == id`, but this
+ // is not always the case. In particular, for a
+ // ViewPath item like `use self::{mem, foo}`, we
+ // map the ids for `mem` and `foo` to the
+ // enclosing view path item. This seems mega super
+ // ultra wrong, but then who am I to judge?
+ // -nmatsakis
+ assert!(!self.is_inlined_def_id(def_id));
+ return DepNode::Hir(def_id);
+ }
- EntryForeignItem(p, _) |
- EntryTraitItem(p, _) |
- EntryImplItem(p, _) |
- EntryVariant(p, _) |
- EntryExpr(p, _) |
- EntryStmt(p, _) |
- EntryLocal(p, _) |
- EntryPat(p, _) |
- EntryBlock(p, _) |
- EntryStructCtor(p, _) |
- EntryLifetime(p, _) |
- EntryTyParam(p, _) =>
- id = p,
-
- RootCrate |
- RootInlinedParent(_) =>
- // FIXME(#32015) clarify story about cross-crate dep tracking
- return DepNode::Krate,
-
- NotPresent =>
- // Some nodes, notably struct fields, are not
- // present in the map for whatever reason, but
- // they *do* have def-ids. So if we encounter an
- // empty hole, check for that case.
- return self.opt_local_def_id(id)
- .map(|def_id| DepNode::Hir(def_id))
- .unwrap_or_else(|| {
- bug!("Walking parents from `{}` \
- led to `NotPresent` at `{}`",
- id0, id)
- }),
+ EntryForeignItem(p, _) |
+ EntryTraitItem(p, _) |
+ EntryImplItem(p, _) |
+ EntryVariant(p, _) |
+ EntryExpr(p, _) |
+ EntryStmt(p, _) |
+ EntryLocal(p, _) |
+ EntryPat(p, _) |
+ EntryBlock(p, _) |
+ EntryStructCtor(p, _) |
+ EntryLifetime(p, _) |
+ EntryTyParam(p, _) =>
+ id = p,
+
+ RootCrate =>
+ return DepNode::Krate,
+
+ RootInlinedParent(_) =>
+ bug!("node {} has inlined ancestor but is not inlined", id0),
+
+ NotPresent =>
+ // Some nodes, notably struct fields, are not
+ // present in the map for whatever reason, but
+ // they *do* have def-ids. So if we encounter an
+ // empty hole, check for that case.
+ return self.opt_local_def_id(id)
+ .map(|def_id| DepNode::Hir(def_id))
+ .unwrap_or_else(|| {
+ bug!("Walking parents from `{}` \
+ led to `NotPresent` at `{}`",
+ id0, id)
+ }),
+ }
+ }
+ } else {
+ // reading from an inlined def-id is really a read out of
+ // the metadata from which we loaded the item.
+ loop {
+ match map[id as usize] {
+ EntryItem(p, _) |
+ EntryForeignItem(p, _) |
+ EntryTraitItem(p, _) |
+ EntryImplItem(p, _) |
+ EntryVariant(p, _) |
+ EntryExpr(p, _) |
+ EntryStmt(p, _) |
+ EntryLocal(p, _) |
+ EntryPat(p, _) |
+ EntryBlock(p, _) |
+ EntryStructCtor(p, _) |
+ EntryLifetime(p, _) |
+ EntryTyParam(p, _) =>
+ id = p,
+
+ RootInlinedParent(parent) => match *parent {
+ InlinedItem::Item(def_id, _) |
+ InlinedItem::TraitItem(def_id, _) |
+ InlinedItem::ImplItem(def_id, _) |
+ InlinedItem::Foreign(def_id, _) =>
+ return DepNode::MetaData(def_id)
+ },
+
+ RootCrate =>
+ bug!("node {} has crate ancestor but is inlined", id0),
+
+ NotPresent =>
+ bug!("node {} is inlined but not present in map", id0),
+ }
}
}
}
pub fn node_to_user_string(&self, id: NodeId) -> String {
node_id_to_string(self, id, false)
}
-
- pub fn is_inlined(&self, id: NodeId) -> bool {
- id >= self.local_node_id_watermark
- }
}
pub struct NodesMatchingSuffix<'a, 'ast:'a> {
}
let local_node_id_watermark = map.len() as NodeId;
+ let local_def_id_watermark = definitions.len();
Map {
forest: forest,
dep_graph: forest.dep_graph.clone(),
map: RefCell::new(map),
definitions: RefCell::new(definitions),
- local_node_id_watermark: local_node_id_watermark
+ local_node_id_watermark: local_node_id_watermark,
+ local_def_id_watermark: local_def_id_watermark,
}
}
-> &'ast InlinedItem {
let mut fld = IdAndSpanUpdater::new(fold_ops);
let ii = match ii {
- II::Item(i) => II::Item(i.map(|i| fld.fold_item(i))),
+ II::Item(d, i) => II::Item(fld.fold_ops.new_def_id(d),
+ i.map(|i| fld.fold_item(i))),
II::TraitItem(d, ti) => {
II::TraitItem(fld.fold_ops.new_def_id(d),
ti.map(|ti| fld.fold_trait_item(ti)))
II::ImplItem(fld.fold_ops.new_def_id(d),
ii.map(|ii| fld.fold_impl_item(ii)))
}
- II::Foreign(i) => II::Foreign(i.map(|i| fld.fold_foreign_item(i)))
+ II::Foreign(d, i) => II::Foreign(fld.fold_ops.new_def_id(d),
+ i.map(|i| fld.fold_foreign_item(i)))
};
let ii = map.forest.inlined_items.alloc(ii);
use syntax::parse::token;
use syntax::ptr::P;
use syntax_pos::{self, Pos, Span};
-use errors::{DiagnosticBuilder, check_old_school};
+use errors::DiagnosticBuilder;
impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
pub fn note_and_explain_region(self,
let span = origin.span();
- let mut is_simple_error = false;
-
if let Some((expected, found)) = expected_found {
- is_simple_error = if let &TypeError::Sorts(ref values) = terr {
+ let is_simple_error = if let &TypeError::Sorts(ref values) = terr {
values.expected.is_primitive() && values.found.is_primitive()
} else {
false
};
- if !is_simple_error || check_old_school() {
+ if !is_simple_error {
diag.note_expected_found(&"type", &expected, &found);
}
}
- if !is_simple_error && check_old_school() {
- diag.span_note(span, &format!("{}", terr));
- } else {
- diag.span_label(span, &terr);
- }
+ diag.span_label(span, &terr);
self.note_error_origin(diag, &origin);
self.check_and_note_conflicting_crates(diag, terr, span);
/// that we trans.
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum InlinedItem {
- Item(P<hir::Item>),
+ Item(DefId /* def-id in source crate */, P<hir::Item>),
TraitItem(DefId /* impl id */, P<hir::TraitItem>),
ImplItem(DefId /* impl id */, P<hir::ImplItem>),
- Foreign(P<hir::ForeignItem>),
+ Foreign(DefId /* extern item */, P<hir::ForeignItem>),
}
/// A borrowed version of `hir::InlinedItem`.
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
pub enum InlinedItemRef<'a> {
- Item(&'a hir::Item),
+ Item(DefId, &'a hir::Item),
TraitItem(DefId, &'a hir::TraitItem),
ImplItem(DefId, &'a hir::ImplItem),
- Foreign(&'a hir::ForeignItem)
+ Foreign(DefId, &'a hir::ForeignItem)
}
/// Item definitions in the currently-compiled crate would have the CrateNum
where V: Visitor<'ast>
{
match *self {
- InlinedItem::Item(ref i) => visitor.visit_item(&i),
- InlinedItem::Foreign(ref i) => visitor.visit_foreign_item(&i),
+ InlinedItem::Item(_, ref i) => visitor.visit_item(&i),
+ InlinedItem::Foreign(_, ref i) => visitor.visit_foreign_item(&i),
InlinedItem::TraitItem(_, ref ti) => visitor.visit_trait_item(ti),
InlinedItem::ImplItem(_, ref ii) => visitor.visit_impl_item(ii),
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use util::nodemap::NodeMap;
+use dep_graph::{DepGraph, DepNode, DepTrackingMap, DepTrackingMapConfig};
+use hir::def_id::DefId;
use mir::repr::Mir;
+use std::marker::PhantomData;
pub struct MirMap<'tcx> {
- pub map: NodeMap<Mir<'tcx>>,
+ pub map: DepTrackingMap<MirMapConfig<'tcx>>,
+}
+
+impl<'tcx> MirMap<'tcx> {
+ pub fn new(graph: DepGraph) -> Self {
+ MirMap {
+ map: DepTrackingMap::new(graph)
+ }
+ }
+}
+
+pub struct MirMapConfig<'tcx> {
+ data: PhantomData<&'tcx ()>
+}
+
+impl<'tcx> DepTrackingMapConfig for MirMapConfig<'tcx> {
+ type Key = DefId;
+ type Value = Mir<'tcx>;
+ fn to_dep_node(key: &DefId) -> DepNode<DefId> {
+ DepNode::Mir(*key)
+ }
}
use dep_graph::DepNode;
use hir;
use hir::map::DefPathData;
-use hir::def_id::DefId;
use mir::mir_map::MirMap;
use mir::repr::{Mir, Promoted};
use ty::TyCtxt;
/// Various information about pass.
pub trait Pass {
// fn should_run(Session) to check if pass should run?
- fn dep_node(&self, def_id: DefId) -> DepNode<DefId> {
- DepNode::MirPass(def_id)
- }
fn name(&self) -> &str {
let name = unsafe { ::std::intrinsics::type_name::<Self>() };
if let Some(tail) = name.rfind(":") {
map: &mut MirMap<'tcx>,
hooks: &mut [Box<for<'s> MirPassHook<'s>>])
{
- for (&id, mir) in &mut map.map {
- let def_id = tcx.map.local_def_id(id);
- let _task = tcx.dep_graph.in_task(self.dep_node(def_id));
-
+ let def_ids = map.map.keys();
+ for def_id in def_ids {
+ let _task = tcx.dep_graph.in_task(DepNode::Mir(def_id));
+ let mir = map.map.get_mut(&def_id).unwrap();
+ let id = tcx.map.as_local_node_id(def_id).unwrap();
let src = MirSource::from_node(tcx, id);
for hook in &mut *hooks {
CrateName,
Cfg,
TargetList,
+ TargetCPUs,
+ TargetFeatures,
+ RelocationModels,
+ CodeModels,
}
pub enum Input {
lto: bool = (false, parse_bool,
"perform LLVM link-time optimizations"),
target_cpu: Option<String> = (None, parse_opt_string,
- "select target processor (llc -mcpu=help for details)"),
+ "select target processor (rustc --print target-cpus for details)"),
target_feature: String = ("".to_string(), parse_string,
- "target specific attributes (llc -mattr=help for details)"),
+ "target specific attributes (rustc --print target-features for details)"),
passes: Vec<String> = (Vec::new(), parse_list,
"a list of extra LLVM passes to run (space separated)"),
llvm_args: Vec<String> = (Vec::new(), parse_list,
no_redzone: Option<bool> = (None, parse_opt_bool,
"disable the use of the redzone"),
relocation_model: Option<String> = (None, parse_opt_string,
- "choose the relocation model to use (llc -relocation-model for details)"),
+ "choose the relocation model to use (rustc --print relocation-models for details)"),
code_model: Option<String> = (None, parse_opt_string,
- "choose the code model to use (llc -code-model for details)"),
+ "choose the code model to use (rustc --print code-models for details)"),
metadata: Vec<String> = (Vec::new(), parse_list,
"metadata to mangle symbol names with"),
extra_filename: String = ("".to_string(), parse_string,
"attempt to recover from parse errors (experimental)"),
incremental: Option<String> = (None, parse_opt_string,
"enable incremental compilation (experimental)"),
+ incremental_info: bool = (false, parse_bool,
+ "print high-level information about incremental reuse (or the lack thereof)"),
dump_dep_graph: bool = (false, parse_bool,
"dump the dependency graph to $RUST_DEP_GRAPH (default: /tmp/dep_graph.gv)"),
query_dep_graph: bool = (false, parse_bool,
"[asm|llvm-bc|llvm-ir|obj|link|dep-info]"),
opt::multi_s("", "print", "Comma separated list of compiler information to \
print on stdout",
- "[crate-name|file-names|sysroot|cfg|target-list]"),
+ "[crate-name|file-names|sysroot|cfg|target-list|target-cpus|\
+ target-features|relocation-models|code-models]"),
opt::flagmulti_s("g", "", "Equivalent to -C debuginfo=2"),
opt::flagmulti_s("O", "", "Equivalent to -C opt-level=2"),
opt::opt_s("o", "", "Write output to <filename>", "FILENAME"),
"NAME=PATH"),
opt::opt_s("", "sysroot", "Override the system root", "PATH"),
opt::multi_ubnr("Z", "", "Set internal debugging options", "FLAG"),
- opt::opt_ubnr("", "error-format",
+ opt::opt_s("", "error-format",
"How errors and other messages are produced",
"human|json"),
opt::opt_s("", "color", "Configure coloring of output:
early_error(error_format, "Value for codegen units must be a positive nonzero integer");
}
+ let mut prints = Vec::<PrintRequest>::new();
+ if cg.target_cpu.as_ref().map_or(false, |s| s == "help") {
+ prints.push(PrintRequest::TargetCPUs);
+ cg.target_cpu = None;
+ };
+ if cg.target_feature == "help" {
+ prints.push(PrintRequest::TargetFeatures);
+ cg.target_feature = "".to_string();
+ }
+ if cg.relocation_model.as_ref().map_or(false, |s| s == "help") {
+ prints.push(PrintRequest::RelocationModels);
+ cg.relocation_model = None;
+ }
+ if cg.code_model.as_ref().map_or(false, |s| s == "help") {
+ prints.push(PrintRequest::CodeModels);
+ cg.code_model = None;
+ }
+
let cg = cg;
let sysroot_opt = matches.opt_str("sysroot").map(|m| PathBuf::from(&m));
let cfg = parse_cfgspecs(matches.opt_strs("cfg"));
let test = matches.opt_present("test");
- let prints = matches.opt_strs("print").into_iter().map(|s| {
+ prints.extend(matches.opt_strs("print").into_iter().map(|s| {
match &*s {
"crate-name" => PrintRequest::CrateName,
"file-names" => PrintRequest::FileNames,
"sysroot" => PrintRequest::Sysroot,
"cfg" => PrintRequest::Cfg,
"target-list" => PrintRequest::TargetList,
+ "target-cpus" => PrintRequest::TargetCPUs,
+ "target-features" => PrintRequest::TargetFeatures,
+ "relocation-models" => PrintRequest::RelocationModels,
+ "code-models" => PrintRequest::CodeModels,
req => {
early_error(error_format, &format!("unknown print request `{}`", req))
}
}
- }).collect::<Vec<_>>();
+ }));
if !cg.remark.is_empty() && debuginfo == NoDebugInfo {
early_warn(error_format, "-C remark will not show source locations without \
use syntax::ast::{NodeId, Name};
use errors::{self, DiagnosticBuilder};
use errors::emitter::{Emitter, EmitterWriter};
-use errors::snippet::FormatMode;
use syntax::json::JsonEmitter;
use syntax::feature_gate;
use syntax::parse;
let emitter: Box<Emitter> = match sopts.error_format {
config::ErrorOutputType::HumanReadable(color_config) => {
Box::new(EmitterWriter::stderr(color_config,
- Some(registry),
- Some(codemap.clone()),
- errors::snippet::FormatMode::EnvironmentSelected))
+ Some(codemap.clone())))
}
config::ErrorOutputType::Json => {
Box::new(JsonEmitter::stderr(Some(registry), codemap.clone()))
let emitter: Box<Emitter> = match output {
config::ErrorOutputType::HumanReadable(color_config) => {
Box::new(EmitterWriter::stderr(color_config,
- None,
- None,
- FormatMode::EnvironmentSelected))
+ None))
}
config::ErrorOutputType::Json => Box::new(JsonEmitter::basic()),
};
let emitter: Box<Emitter> = match output {
config::ErrorOutputType::HumanReadable(color_config) => {
Box::new(EmitterWriter::stderr(color_config,
- None,
- None,
- FormatMode::EnvironmentSelected))
+ None))
}
config::ErrorOutputType::Json => Box::new(JsonEmitter::basic()),
};
use session::Session;
use middle;
use middle::cstore::LOCAL_CRATE;
+use hir::TraitMap;
use hir::def::DefMap;
use hir::def_id::{DefId, DefIndex};
use hir::map as ast_map;
pub types: CommonTypes<'tcx>,
pub sess: &'tcx Session,
+
+ /// Map from path id to the results from resolve; generated
+ /// initially by resolve and updated during typeck in some cases
+ /// (e.g., UFCS paths)
pub def_map: RefCell<DefMap>,
+ /// Map indicating what traits are in scope for places where this
+ /// is relevant; generated by resolve.
+ pub trait_map: TraitMap,
+
pub named_region_map: resolve_lifetime::NamedRegionMap,
pub region_maps: RegionMaps,
}
pub fn retrace_path(self, path: &DefPath) -> Option<DefId> {
- debug!("retrace_path(path={:?})", path);
+ debug!("retrace_path(path={:?}, krate={:?})", path, self.crate_name(path.krate));
let root_key = DefKey {
parent: None,
pub fn create_and_enter<F, R>(s: &'tcx Session,
arenas: &'tcx CtxtArenas<'tcx>,
def_map: DefMap,
+ trait_map: TraitMap,
named_region_map: resolve_lifetime::NamedRegionMap,
map: ast_map::Map<'tcx>,
freevars: FreevarMap,
variance_computed: Cell::new(false),
sess: s,
def_map: RefCell::new(def_map),
+ trait_map: trait_map,
tables: RefCell::new(Tables::empty()),
impl_trait_refs: RefCell::new(DepTrackingMap::new(dep_graph.clone())),
trait_defs: RefCell::new(DepTrackingMap::new(dep_graph.clone())),
self.untracked_get()
}
+ /// Reads the ivar without registered a dep-graph read. Use with
+ /// caution.
#[inline]
- fn untracked_get(&self) -> Option<Ty<'tcx>> {
+ pub fn untracked_get(&self) -> Option<Ty<'tcx>> {
match self.0.get() {
None => None,
// valid because of invariant (A)
/// Due to normalization being eager, this applies even if
/// the associated type is behind a pointer, e.g. issue #31299.
pub fn sized_constraint(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Ty<'tcx> {
- let dep_node = DepNode::SizedConstraint(self.did);
- match self.sized_constraint.get(dep_node) {
+ match self.sized_constraint.get(DepNode::SizedConstraint(self.did)) {
None => {
let global_tcx = tcx.global_tcx();
let this = global_tcx.lookup_adt_def_master(self.did);
/// such.
/// - a TyError, if a type contained itself. The representability
/// check should catch this case.
- fn calculate_sized_constraint_inner(&'tcx self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ fn calculate_sized_constraint_inner(&'tcx self,
+ tcx: TyCtxt<'a, 'tcx, 'tcx>,
stack: &mut Vec<AdtDefMaster<'tcx>>)
{
-
let dep_node = || DepNode::SizedConstraint(self.did);
- if self.sized_constraint.get(dep_node()).is_some() {
+
+ // Follow the memoization pattern: push the computation of
+ // DepNode::SizedConstraint as our current task.
+ let _task = tcx.dep_graph.in_task(dep_node());
+ if self.sized_constraint.untracked_get().is_some() {
+ // ---------------
+ // can skip the dep-graph read since we just pushed the task
return;
}
pub fn get_targets() -> Box<Iterator<Item=String>> {
Box::new(TARGETS.iter().filter_map(|t| -> Option<String> {
load_specific(t)
- .map(|t| t.llvm_target)
+ .and(Ok(t.to_string()))
.ok()
}))
}
attributes: &[ast::Attribute]) {
debug!("borrowck_fn(id={})", id);
+ let def_id = this.tcx.map.local_def_id(id);
+
if attributes.iter().any(|item| item.check_name("rustc_mir_borrowck")) {
- let mir = this.mir_map.unwrap().map.get(&id).unwrap();
+ let mir = this.mir_map.unwrap().map.get(&def_id).unwrap();
this.with_temp_region_map(id, |this| {
mir::borrowck_mir(this, fk, decl, mir, body, sp, id, attributes)
});
.flat_map(|arm| &arm.0)
.map(|pat| vec![wrap_pat(cx, &pat)])
.collect();
- check_exhaustive(cx, ex.span, &matrix, source);
+ let match_span = Span {
+ lo: ex.span.lo,
+ hi: scrut.span.hi,
+ expn_id: ex.span.expn_id
+ };
+ check_exhaustive(cx, match_span, &matrix, source);
},
_ => ()
}
let &(ref first_arm_pats, _) = &arms[0];
let first_pat = &first_arm_pats[0];
let span = first_pat.span;
- span_err!(cx.tcx.sess, span, E0162, "irrefutable if-let pattern");
+ struct_span_err!(cx.tcx.sess, span, E0162,
+ "irrefutable if-let pattern")
+ .span_label(span, &format!("irrefutable pattern"))
+ .emit();
printed_if_let_err = true;
}
},
use std::collections::hash_map::Entry::Vacant;
use rustc_const_math::*;
-use rustc_errors::{DiagnosticBuilder, check_old_school};
+use rustc_errors::DiagnosticBuilder;
macro_rules! math {
($e:expr, $op:expr) => {
}
let mut used_substs = false;
let expr_ty = match tcx.sess.cstore.maybe_get_item_ast(tcx, def_id) {
- Some((&InlinedItem::Item(ref item), _)) => match item.node {
+ Some((&InlinedItem::Item(_, ref item), _)) => match item.node {
hir::ItemConst(ref ty, ref const_expr) => {
Some((&**const_expr, tcx.ast_ty_to_prim_ty(ty)))
},
}
let fn_id = match tcx.sess.cstore.maybe_get_item_ast(tcx, def_id) {
- Some((&InlinedItem::Item(ref item), _)) => Some(item.id),
+ Some((&InlinedItem::Item(_, ref item), _)) => Some(item.id),
Some((&InlinedItem::ImplItem(_, ref item), _)) => Some(item.id),
_ => None
};
{
match err.description() {
ConstEvalErrDescription::Simple(message) => {
- if check_old_school() {
- diag.note(&message);
- } else {
- diag.span_label(err.span, &message);
- }
+ diag.span_label(err.span, &message);
}
}
BitVector { data: vec![0; num_words] }
}
+ pub fn clear(&mut self) {
+ for p in &mut self.data {
+ *p = 0;
+ }
+ }
+
pub fn contains(&self, bit: usize) -> bool {
let (word, mask) = word_mask(bit);
(self.data[word] & mask) != 0
}
}
-/// A "bit matrix" is basically a square matrix of booleans
-/// represented as one gigantic bitvector. In other words, it is as if
-/// you have N bitvectors, each of length N. Note that `elements` here is `N`/
+/// A "bit matrix" is basically a matrix of booleans represented as
+/// one gigantic bitvector. In other words, it is as if you have
+/// `rows` bitvectors, each of length `columns`.
#[derive(Clone)]
pub struct BitMatrix {
- elements: usize,
+ columns: usize,
vector: Vec<u64>,
}
impl BitMatrix {
- // Create a new `elements x elements` matrix, initially empty.
- pub fn new(elements: usize) -> BitMatrix {
+ // Create a new `rows x columns` matrix, initially empty.
+ pub fn new(rows: usize, columns: usize) -> BitMatrix {
// For every element, we need one bit for every other
// element. Round up to an even number of u64s.
- let u64s_per_elem = u64s(elements);
+ let u64s_per_row = u64s(columns);
BitMatrix {
- elements: elements,
- vector: vec![0; elements * u64s_per_elem],
+ columns: columns,
+ vector: vec![0; rows * u64s_per_row],
}
}
- /// The range of bits for a given element.
- fn range(&self, element: usize) -> (usize, usize) {
- let u64s_per_elem = u64s(self.elements);
- let start = element * u64s_per_elem;
- (start, start + u64s_per_elem)
+ /// The range of bits for a given row.
+ fn range(&self, row: usize) -> (usize, usize) {
+ let u64s_per_row = u64s(self.columns);
+ let start = row * u64s_per_row;
+ (start, start + u64s_per_row)
}
pub fn add(&mut self, source: usize, target: usize) -> bool {
pub fn intersection(&self, a: usize, b: usize) -> Vec<usize> {
let (a_start, a_end) = self.range(a);
let (b_start, b_end) = self.range(b);
- let mut result = Vec::with_capacity(self.elements);
+ let mut result = Vec::with_capacity(self.columns);
for (base, (i, j)) in (a_start..a_end).zip(b_start..b_end).enumerate() {
let mut v = self.vector[i] & self.vector[j];
for bit in 0..64 {
}
changed
}
+
+ pub fn iter<'a>(&'a self, row: usize) -> BitVectorIter<'a> {
+ let (start, end) = self.range(row);
+ BitVectorIter {
+ iter: self.vector[start..end].iter(),
+ current: 0,
+ idx: 0,
+ }
+ }
}
fn u64s(elements: usize) -> usize {
#[test]
fn matrix_intersection() {
- let mut vec1 = BitMatrix::new(200);
+ let mut vec1 = BitMatrix::new(200, 200);
// (*) Elements reachable from both 2 and 65.
let intersection = vec1.intersection(2, 65);
assert_eq!(intersection, &[10, 64, 160]);
}
+
+#[test]
+fn matrix_iter() {
+ let mut matrix = BitMatrix::new(64, 100);
+ matrix.add(3, 22);
+ matrix.add(3, 75);
+ matrix.add(2, 99);
+ matrix.add(4, 0);
+ matrix.merge(3, 5);
+
+ let expected = [99];
+ let mut iter = expected.iter();
+ for i in matrix.iter(2) {
+ let j = *iter.next().unwrap();
+ assert_eq!(i, j);
+ }
+ assert!(iter.next().is_none());
+
+ let expected = [22, 75];
+ let mut iter = expected.iter();
+ for i in matrix.iter(3) {
+ let j = *iter.next().unwrap();
+ assert_eq!(i, j);
+ }
+ assert!(iter.next().is_none());
+
+ let expected = [0];
+ let mut iter = expected.iter();
+ for i in matrix.iter(4) {
+ let j = *iter.next().unwrap();
+ assert_eq!(i, j);
+ }
+ assert!(iter.next().is_none());
+
+ let expected = [22, 75];
+ let mut iter = expected.iter();
+ for i in matrix.iter(5) {
+ let j = *iter.next().unwrap();
+ assert_eq!(i, j);
+ }
+ assert!(iter.next().is_none());
+}
start: NodeIndex,
direction: Direction)
-> DepthFirstTraversal<'a, N, E> {
- DepthFirstTraversal {
- graph: self,
- stack: vec![start],
- visited: BitVector::new(self.nodes.len()),
- direction: direction,
- }
+ DepthFirstTraversal::with_start_node(self, start, direction)
}
}
direction: Direction,
}
+impl<'g, N: Debug, E: Debug> DepthFirstTraversal<'g, N, E> {
+ pub fn new(graph: &'g Graph<N, E>, direction: Direction) -> Self {
+ let visited = BitVector::new(graph.len_nodes());
+ DepthFirstTraversal {
+ graph: graph,
+ stack: vec![],
+ visited: visited,
+ direction: direction
+ }
+ }
+
+ pub fn with_start_node(graph: &'g Graph<N, E>,
+ start_node: NodeIndex,
+ direction: Direction)
+ -> Self {
+ let mut visited = BitVector::new(graph.len_nodes());
+ visited.insert(start_node.node_id());
+ DepthFirstTraversal {
+ graph: graph,
+ stack: vec![start_node],
+ visited: visited,
+ direction: direction
+ }
+ }
+
+ pub fn reset(&mut self, start_node: NodeIndex) {
+ self.stack.truncate(0);
+ self.stack.push(start_node);
+ self.visited.clear();
+ self.visited.insert(start_node.node_id());
+ }
+
+ fn visit(&mut self, node: NodeIndex) {
+ if self.visited.insert(node.node_id()) {
+ self.stack.push(node);
+ }
+ }
+}
+
impl<'g, N: Debug, E: Debug> Iterator for DepthFirstTraversal<'g, N, E> {
type Item = NodeIndex;
fn next(&mut self) -> Option<NodeIndex> {
- while let Some(idx) = self.stack.pop() {
- if !self.visited.insert(idx.node_id()) {
- continue;
- }
-
+ let next = self.stack.pop();
+ if let Some(idx) = next {
for (_, edge) in self.graph.adjacent_edges(idx, self.direction) {
let target = edge.source_or_target(self.direction);
- if !self.visited.contains(target.node_id()) {
- self.stack.push(target);
- }
+ self.visit(target);
}
-
- return Some(idx);
}
-
- return None;
+ next
}
}
}
fn compute_closure(&self) -> BitMatrix {
- let mut matrix = BitMatrix::new(self.elements.len());
+ let mut matrix = BitMatrix::new(self.elements.len(),
+ self.elements.len());
let mut changed = true;
while changed {
changed = false;
let index = stability::Index::new(&hir_map);
- let trait_map = resolutions.trait_map;
TyCtxt::create_and_enter(sess,
arenas,
resolutions.def_map,
+ resolutions.trait_map,
named_region_map,
hir_map,
resolutions.freevars,
|| rustc_incremental::load_dep_graph(tcx));
// passes are timed inside typeck
- try_with_f!(typeck::check_crate(tcx, trait_map), (tcx, None, analysis));
+ try_with_f!(typeck::check_crate(tcx), (tcx, None, analysis));
time(time_passes,
"const checking",
use rustc_resolve as resolve;
use rustc_save_analysis as save;
use rustc_trans::back::link;
+use rustc_trans::back::write::{create_target_machine, RELOC_MODEL_ARGS, CODE_GEN_MODEL_ARGS};
use rustc::dep_graph::DepGraph;
use rustc::session::{self, config, Session, build_session, CompileResult};
use rustc::session::config::{Input, PrintRequest, OutputType, ErrorOutputType};
use syntax::parse::{self, PResult};
use syntax_pos::MultiSpan;
use errors::emitter::Emitter;
-use errors::snippet::FormatMode;
#[cfg(test)]
pub mod test;
None => {
let emitter =
errors::emitter::EmitterWriter::stderr(errors::ColorConfig::Auto,
- None,
- None,
- FormatMode::EnvironmentSelected);
+ None);
let handler = errors::Handler::with_emitter(true, false, Box::new(emitter));
handler.emit(&MultiSpan::new(),
&abort_msg(err_count),
output: ErrorOutputType) {
let emitter: Box<Emitter> = match output {
config::ErrorOutputType::HumanReadable(color_config) => {
- Box::new(errors::emitter::EmitterWriter::stderr(color_config,
- None,
- None,
- FormatMode::EnvironmentSelected))
+ Box::new(errors::emitter::EmitterWriter::stderr(color_config, None))
}
config::ErrorOutputType::Json => Box::new(json::JsonEmitter::basic()),
};
}
}
}
+ PrintRequest::TargetCPUs => {
+ let tm = create_target_machine(sess);
+ unsafe { llvm::LLVMRustPrintTargetCPUs(tm); }
+ }
+ PrintRequest::TargetFeatures => {
+ let tm = create_target_machine(sess);
+ unsafe { llvm::LLVMRustPrintTargetFeatures(tm); }
+ }
+ PrintRequest::RelocationModels => {
+ println!("Available relocation models:");
+ for &(name, _) in RELOC_MODEL_ARGS.iter() {
+ println!(" {}", name);
+ }
+ println!("");
+ }
+ PrintRequest::CodeModels => {
+ println!("Available code models:");
+ for &(name, _) in CODE_GEN_MODEL_ARGS.iter(){
+ println!(" {}", name);
+ }
+ println!("");
+ }
}
}
return Compilation::Stop;
// Thread panicked without emitting a fatal diagnostic
if !value.is::<errors::FatalError>() {
let emitter =
- Box::new(errors::emitter::EmitterWriter::stderr(errors::ColorConfig::Auto,
- None,
- None,
- FormatMode::EnvironmentSelected));
+ Box::new(errors::emitter::EmitterWriter::stderr(errors::ColorConfig::Auto, None));
let handler = errors::Handler::with_emitter(true, false, emitter);
// a .span_bug or .bug call has already printed what
PpmMir | PpmMirCFG => {
if let Some(mir_map) = mir_map {
if let Some(nodeid) = nodeid {
- let mir = mir_map.map.get(&nodeid).unwrap_or_else(|| {
- sess.fatal(&format!("no MIR map entry for node {}", nodeid))
- });
+ let def_id = tcx.map.local_def_id(nodeid);
match ppm {
- PpmMir => write_mir_pretty(tcx, iter::once((&nodeid, mir)), &mut out),
+ PpmMir => write_mir_pretty(tcx, iter::once(def_id), &mir_map, &mut out),
PpmMirCFG => {
- write_mir_graphviz(tcx, iter::once((&nodeid, mir)), &mut out)
+ write_mir_graphviz(tcx, iter::once(def_id), &mir_map, &mut out)
}
_ => unreachable!(),
}?;
} else {
match ppm {
- PpmMir => write_mir_pretty(tcx, mir_map.map.iter(), &mut out),
- PpmMirCFG => write_mir_graphviz(tcx, mir_map.map.iter(), &mut out),
+ PpmMir => write_mir_pretty(tcx,
+ mir_map.map.keys().into_iter(),
+ &mir_map,
+ &mut out),
+ PpmMirCFG => write_mir_graphviz(tcx,
+ mir_map.map.keys().into_iter(),
+ &mir_map,
+ &mut out),
_ => unreachable!(),
}?;
}
TyCtxt::create_and_enter(&sess,
&arenas,
resolutions.def_map,
+ resolutions.trait_map,
named_region_map.unwrap(),
ast_map,
resolutions.freevars,
use self::Destination::*;
-use syntax_pos::{COMMAND_LINE_SP, DUMMY_SP, FileMap, Span, MultiSpan, LineInfo, CharPos};
-use registry;
+use syntax_pos::{COMMAND_LINE_SP, DUMMY_SP, FileMap, Span, MultiSpan, CharPos};
-use check_old_school;
use {Level, CodeSuggestion, DiagnosticBuilder, CodeMapper};
use RenderSpan::*;
-use snippet::{StyledString, Style, FormatMode, Annotation, Line};
+use snippet::{StyledString, Style, Annotation, Line};
use styled_buffer::StyledBuffer;
-use std::cmp;
use std::io::prelude::*;
use std::io;
use std::rc::Rc;
impl Emitter for EmitterWriter {
fn emit(&mut self, db: &DiagnosticBuilder) {
- // Pick old school mode either from env or let the test dictate the format
- let old_school = match self.format_mode {
- FormatMode::NewErrorFormat => false,
- FormatMode::OriginalErrorFormat => true,
- FormatMode::EnvironmentSelected => check_old_school()
- };
-
- if old_school {
- self.emit_messages_old_school(db);
- } else {
- self.emit_messages_default(db);
- }
+ self.emit_messages_default(db);
}
}
pub struct EmitterWriter {
dst: Destination,
- registry: Option<registry::Registry>,
cm: Option<Rc<CodeMapper>>,
-
- // For now, allow an old-school mode while we transition
- format_mode: FormatMode
}
struct FileWithAnnotatedLines {
impl EmitterWriter {
pub fn stderr(color_config: ColorConfig,
- registry: Option<registry::Registry>,
- code_map: Option<Rc<CodeMapper>>,
- format_mode: FormatMode)
+ code_map: Option<Rc<CodeMapper>>)
-> EmitterWriter {
if color_config.use_color() {
let dst = Destination::from_stderr();
EmitterWriter { dst: dst,
- registry: registry,
- cm: code_map,
- format_mode: format_mode.clone() }
+ cm: code_map}
} else {
EmitterWriter { dst: Raw(Box::new(io::stderr())),
- registry: registry,
- cm: code_map,
- format_mode: format_mode.clone() }
+ cm: code_map}
}
}
pub fn new(dst: Box<Write + Send>,
- registry: Option<registry::Registry>,
- code_map: Option<Rc<CodeMapper>>,
- format_mode: FormatMode)
+ code_map: Option<Rc<CodeMapper>>)
-> EmitterWriter {
EmitterWriter { dst: Raw(dst),
- registry: registry,
- cm: code_map,
- format_mode: format_mode.clone() }
+ cm: code_map}
}
fn preprocess_annotations(&self, msp: &MultiSpan) -> Vec<FileWithAnnotatedLines> {
_ => ()
}
}
- fn emit_message_old_school(&mut self,
- msp: &MultiSpan,
- msg: &str,
- code: &Option<String>,
- level: &Level,
- show_snippet: bool)
- -> io::Result<()> {
- let mut buffer = StyledBuffer::new();
-
- let loc = match msp.primary_span() {
- Some(COMMAND_LINE_SP) | Some(DUMMY_SP) => "".to_string(),
- Some(ps) => if let Some(ref cm) = self.cm {
- cm.span_to_string(ps)
- } else {
- "".to_string()
- },
- None => {
- "".to_string()
- }
- };
- if loc != "" {
- buffer.append(0, &loc, Style::NoStyle);
- buffer.append(0, " ", Style::NoStyle);
- }
- buffer.append(0, &level.to_string(), Style::Level(level.clone()));
- buffer.append(0, ": ", Style::HeaderMsg);
- buffer.append(0, msg, Style::HeaderMsg);
- buffer.append(0, " ", Style::NoStyle);
- match code {
- &Some(ref code) => {
- buffer.append(0, "[", Style::ErrorCode);
- buffer.append(0, &code, Style::ErrorCode);
- buffer.append(0, "]", Style::ErrorCode);
- }
- _ => {}
- }
-
- if !show_snippet {
- emit_to_destination(&buffer.render(), level, &mut self.dst)?;
- return Ok(());
- }
-
- // Watch out for various nasty special spans; don't try to
- // print any filename or anything for those.
- match msp.primary_span() {
- Some(COMMAND_LINE_SP) | Some(DUMMY_SP) => {
- emit_to_destination(&buffer.render(), level, &mut self.dst)?;
- return Ok(());
- }
- _ => { }
- }
-
- let annotated_files = self.preprocess_annotations(msp);
-
- if let (Some(ref cm), Some(ann_file), Some(ref primary_span)) =
- (self.cm.as_ref(), annotated_files.first(), msp.primary_span().as_ref()) {
-
- // Next, print the source line and its squiggle
- // for old school mode, we will render them to the buffer, then insert the file loc
- // (or space the same amount) in front of the line and the squiggle
- let source_string = ann_file.file.get_line(ann_file.lines[0].line_index - 1)
- .unwrap_or("");
-
- let line_offset = buffer.num_lines();
-
- let lo = cm.lookup_char_pos(primary_span.lo);
- //Before each secondary line in old skool-mode, print the label
- //as an old-style note
- let file_pos = format!("{}:{} ", lo.file.name.clone(), lo.line);
- let file_pos_len = file_pos.len();
-
- // First create the source line we will highlight.
- buffer.puts(line_offset, 0, &file_pos, Style::FileNameStyle);
- buffer.puts(line_offset, file_pos_len, &source_string, Style::Quotation);
- // Sort the annotations by (start, end col)
- let annotations = ann_file.lines[0].annotations.clone();
-
- // Next, create the highlight line.
- for annotation in &annotations {
- for p in annotation.start_col..annotation.end_col {
- if p == annotation.start_col {
- buffer.putc(line_offset + 1,
- file_pos_len + p,
- '^',
- if annotation.is_primary {
- Style::UnderlinePrimary
- } else {
- Style::OldSchoolNote
- });
- } else {
- buffer.putc(line_offset + 1,
- file_pos_len + p,
- '~',
- if annotation.is_primary {
- Style::UnderlinePrimary
- } else {
- Style::OldSchoolNote
- });
- }
- }
- }
- }
- if let Some(ref primary_span) = msp.primary_span().as_ref() {
- self.render_macro_backtrace_old_school(primary_span, &mut buffer)?;
- }
-
- match code {
- &Some(ref code) if self.registry.as_ref()
- .and_then(|registry| registry.find_description(code))
- .is_some() => {
- let msg = "run `rustc --explain ".to_string() + &code.to_string() +
- "` to see a detailed explanation";
-
- let line_offset = buffer.num_lines();
- buffer.append(line_offset, &loc, Style::NoStyle);
- buffer.append(line_offset, " ", Style::NoStyle);
- buffer.append(line_offset, &Level::Help.to_string(), Style::Level(Level::Help));
- buffer.append(line_offset, ": ", Style::HeaderMsg);
- buffer.append(line_offset, &msg, Style::HeaderMsg);
- }
- _ => ()
- }
-
- // final step: take our styled buffer, render it, then output it
- emit_to_destination(&buffer.render(), level, &mut self.dst)?;
- Ok(())
- }
- fn emit_suggestion_old_school(&mut self,
- suggestion: &CodeSuggestion,
- level: &Level,
- msg: &str)
- -> io::Result<()> {
- use std::borrow::Borrow;
-
- let primary_span = suggestion.msp.primary_span().unwrap();
- if let Some(ref cm) = self.cm {
- let mut buffer = StyledBuffer::new();
-
- let loc = cm.span_to_string(primary_span);
-
- if loc != "" {
- buffer.append(0, &loc, Style::NoStyle);
- buffer.append(0, " ", Style::NoStyle);
- }
-
- buffer.append(0, &level.to_string(), Style::Level(level.clone()));
- buffer.append(0, ": ", Style::HeaderMsg);
- buffer.append(0, msg, Style::HeaderMsg);
-
- let lines = cm.span_to_lines(primary_span).unwrap();
-
- assert!(!lines.lines.is_empty());
-
- let complete = suggestion.splice_lines(cm.borrow());
- let line_count = cmp::min(lines.lines.len(), MAX_HIGHLIGHT_LINES);
- let display_lines = &lines.lines[..line_count];
-
- let fm = &*lines.file;
- // Calculate the widest number to format evenly
- let max_digits = line_num_max_digits(display_lines.last().unwrap());
-
- // print the suggestion without any line numbers, but leave
- // space for them. This helps with lining up with previous
- // snippets from the actual error being reported.
- let mut lines = complete.lines();
- let mut row_num = 1;
- for line in lines.by_ref().take(MAX_HIGHLIGHT_LINES) {
- buffer.append(row_num, &fm.name, Style::FileNameStyle);
- for _ in 0..max_digits+2 {
- buffer.append(row_num, &" ", Style::NoStyle);
- }
- buffer.append(row_num, line, Style::NoStyle);
- row_num += 1;
- }
-
- // if we elided some lines, add an ellipsis
- if let Some(_) = lines.next() {
- buffer.append(row_num, "...", Style::NoStyle);
- }
- emit_to_destination(&buffer.render(), level, &mut self.dst)?;
- }
- Ok(())
- }
-
- fn emit_messages_old_school(&mut self, db: &DiagnosticBuilder) {
- match self.emit_message_old_school(&db.span,
- &db.message,
- &db.code,
- &db.level,
- true) {
- Ok(()) => {
- for child in &db.children {
- let (span, show_snippet) = if child.span.primary_spans().is_empty() {
- (db.span.clone(), false)
- } else {
- (child.span.clone(), true)
- };
-
- match child.render_span {
- Some(FullSpan(_)) => {
- match self.emit_message_old_school(&span,
- &child.message,
- &None,
- &child.level,
- show_snippet) {
- Err(e) => panic!("failed to emit error: {}", e),
- _ => ()
- }
- },
- Some(Suggestion(ref cs)) => {
- match self.emit_suggestion_old_school(cs,
- &child.level,
- &child.message) {
- Err(e) => panic!("failed to emit error: {}", e),
- _ => ()
- }
- },
- None => {
- match self.emit_message_old_school(&span,
- &child.message,
- &None,
- &child.level,
- show_snippet) {
- Err(e) => panic!("failed to emit error: {}", e),
- _ => ()
- }
- }
- }
- }
- }
- Err(e) => panic!("failed to emit error: {}", e)
- }
- }
-
fn render_macro_backtrace_old_school(&mut self,
sp: &Span,
buffer: &mut StyledBuffer) -> io::Result<()> {
Ok(())
}
-fn line_num_max_digits(line: &LineInfo) -> usize {
- let mut max_line_num = line.line_index + 1;
- let mut digits = 0;
- while max_line_num > 0 {
- max_line_num /= 10;
- digits += 1;
- }
- digits
-}
-
#[cfg(unix)]
fn stderr_isatty() -> bool {
use libc;
impl Handler {
pub fn with_tty_emitter(color_config: ColorConfig,
- registry: Option<registry::Registry>,
can_emit_warnings: bool,
treat_err_as_bug: bool,
cm: Option<Rc<CodeMapper>>)
-> Handler {
- let emitter = Box::new(EmitterWriter::stderr(color_config, registry, cm,
- snippet::FormatMode::EnvironmentSelected));
+ let emitter = Box::new(EmitterWriter::stderr(color_config, cm));
Handler::with_emitter(can_emit_warnings, treat_err_as_bug, emitter)
}
Some(t) => t,
None => diag.bug(&msg()),
}
-}
-
-/// True if we should use the old-skool error format style. This is
-/// the default setting until the new errors are deemed stable enough
-/// for general use.
-///
-/// FIXME(#33240)
-#[cfg(not(test))]
-pub fn check_old_school() -> bool {
- use std::env;
- env::var("RUST_NEW_ERROR_FORMAT").is_err()
-}
-
-/// For unit tests, use the new format.
-#[cfg(test)]
-pub fn check_old_school() -> bool {
- false
-}
+}
\ No newline at end of file
use std::rc::Rc;
use {Level};
-#[derive(Clone)]
-pub enum FormatMode {
- NewErrorFormat,
- OriginalErrorFormat,
- EnvironmentSelected
-}
-
#[derive(Clone)]
pub struct SnippetData {
codemap: Rc<CodeMapper>,
- files: Vec<FileInfo>,
- format_mode: FormatMode,
+ files: Vec<FileInfo>
}
#[derive(Clone)]
primary_span: Option<Span>,
lines: Vec<Line>,
-
- /// The type of error format to render. We keep it here so that
- /// it's easy to configure for both tests and regular usage
- format_mode: FormatMode,
}
#[derive(Clone, Debug, PartialOrd, Ord, PartialEq, Eq)]
dump_graph(tcx);
}
+ // if the `rustc_attrs` feature is not enabled, then the
+ // attributes we are interested in cannot be present anyway, so
+ // skip the walk.
+ if !tcx.sess.features.borrow().rustc_attrs {
+ return;
+ }
+
// Find annotations supplied by user (if any).
let (if_this_changed, then_this_would_need) = {
let mut visitor = IfThisChanged { tcx: tcx,
+++ /dev/null
-// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Calculation of a Strict Version Hash for crates. For a length
-//! comment explaining the general idea, see `librustc/middle/svh.rs`.
-
-use syntax::attr::AttributeMethods;
-use std::hash::{Hash, SipHasher, Hasher};
-use rustc::hir::def_id::{CRATE_DEF_INDEX, DefId};
-use rustc::hir::svh::Svh;
-use rustc::ty::TyCtxt;
-use rustc::hir::intravisit::{self, Visitor};
-
-use self::svh_visitor::StrictVersionHashVisitor;
-
-pub trait SvhCalculate {
- /// Calculate the SVH for an entire krate.
- fn calculate_krate_hash(self) -> Svh;
-
- /// Calculate the SVH for a particular item.
- fn calculate_item_hash(self, def_id: DefId) -> u64;
-}
-
-impl<'a, 'tcx> SvhCalculate for TyCtxt<'a, 'tcx, 'tcx> {
- fn calculate_krate_hash(self) -> Svh {
- // FIXME (#14132): This is better than it used to be, but it still not
- // ideal. We now attempt to hash only the relevant portions of the
- // Crate AST as well as the top-level crate attributes. (However,
- // the hashing of the crate attributes should be double-checked
- // to ensure it is not incorporating implementation artifacts into
- // the hash that are not otherwise visible.)
-
- let crate_disambiguator = self.sess.local_crate_disambiguator();
- let krate = self.map.krate();
-
- // FIXME: this should use SHA1, not SipHash. SipHash is not built to
- // avoid collisions.
- let mut state = SipHasher::new();
- debug!("state: {:?}", state);
-
- // FIXME(#32753) -- at (*) we `to_le` for endianness, but is
- // this enough, and does it matter anyway?
- "crate_disambiguator".hash(&mut state);
- crate_disambiguator.len().to_le().hash(&mut state); // (*)
- crate_disambiguator.hash(&mut state);
-
- debug!("crate_disambiguator: {:?}", crate_disambiguator);
- debug!("state: {:?}", state);
-
- {
- let mut visit = StrictVersionHashVisitor::new(&mut state, self);
- krate.visit_all_items(&mut visit);
- }
-
- // FIXME (#14132): This hash is still sensitive to e.g. the
- // spans of the crate Attributes and their underlying
- // MetaItems; we should make ContentHashable impl for those
- // types and then use hash_content. But, since all crate
- // attributes should appear near beginning of the file, it is
- // not such a big deal to be sensitive to their spans for now.
- //
- // We hash only the MetaItems instead of the entire Attribute
- // to avoid hashing the AttrId
- for attr in &krate.attrs {
- debug!("krate attr {:?}", attr);
- attr.meta().hash(&mut state);
- }
-
- Svh::new(state.finish())
- }
-
- fn calculate_item_hash(self, def_id: DefId) -> u64 {
- assert!(def_id.is_local());
-
- debug!("calculate_item_hash(def_id={:?})", def_id);
-
- let mut state = SipHasher::new();
-
- {
- let mut visit = StrictVersionHashVisitor::new(&mut state, self);
- if def_id.index == CRATE_DEF_INDEX {
- // the crate root itself is not registered in the map
- // as an item, so we have to fetch it this way
- let krate = self.map.krate();
- intravisit::walk_crate(&mut visit, krate);
- } else {
- let node_id = self.map.as_local_node_id(def_id).unwrap();
- let item = self.map.expect_item(node_id);
- visit.visit_item(item);
- }
- }
-
- let hash = state.finish();
-
- debug!("calculate_item_hash: def_id={:?} hash={:?}", def_id, hash);
-
- hash
- }
-}
-
-// FIXME (#14132): Even this SVH computation still has implementation
-// artifacts: namely, the order of item declaration will affect the
-// hash computation, but for many kinds of items the order of
-// declaration should be irrelevant to the ABI.
-
-mod svh_visitor {
- pub use self::SawExprComponent::*;
- pub use self::SawStmtComponent::*;
- use self::SawAbiComponent::*;
- use syntax::ast::{self, Name, NodeId};
- use syntax::parse::token;
- use syntax_pos::Span;
- use rustc::ty::TyCtxt;
- use rustc::hir;
- use rustc::hir::*;
- use rustc::hir::map::DefPath;
- use rustc::hir::intravisit as visit;
- use rustc::hir::intravisit::{Visitor, FnKind};
-
- use std::hash::{Hash, SipHasher};
-
- pub struct StrictVersionHashVisitor<'a, 'tcx: 'a> {
- pub tcx: TyCtxt<'a, 'tcx, 'tcx>,
- pub st: &'a mut SipHasher,
- }
-
- impl<'a, 'tcx> StrictVersionHashVisitor<'a, 'tcx> {
- pub fn new(st: &'a mut SipHasher,
- tcx: TyCtxt<'a, 'tcx, 'tcx>)
- -> Self {
- StrictVersionHashVisitor { st: st, tcx: tcx }
- }
-
- fn hash_def_path(&mut self, path: &DefPath) {
- self.tcx.crate_name(path.krate).hash(self.st);
- self.tcx.crate_disambiguator(path.krate).hash(self.st);
- for data in &path.data {
- data.data.as_interned_str().hash(self.st);
- data.disambiguator.hash(self.st);
- }
- }
- }
-
- // To off-load the bulk of the hash-computation on #[derive(Hash)],
- // we define a set of enums corresponding to the content that our
- // crate visitor will encounter as it traverses the ast.
- //
- // The important invariant is that all of the Saw*Component enums
- // do not carry any Spans, Names, or Idents.
- //
- // Not carrying any Names/Idents is the important fix for problem
- // noted on PR #13948: using the ident.name as the basis for a
- // hash leads to unstable SVH, because ident.name is just an index
- // into intern table (i.e. essentially a random address), not
- // computed from the name content.
- //
- // With the below enums, the SVH computation is not sensitive to
- // artifacts of how rustc was invoked nor of how the source code
- // was laid out. (Or at least it is *less* sensitive.)
-
- // This enum represents the different potential bits of code the
- // visitor could encounter that could affect the ABI for the crate,
- // and assigns each a distinct tag to feed into the hash computation.
- #[derive(Hash)]
- enum SawAbiComponent<'a> {
-
- // FIXME (#14132): should we include (some function of)
- // ident.ctxt as well?
- SawIdent(token::InternedString),
- SawStructDef(token::InternedString),
-
- SawLifetime(token::InternedString),
- SawLifetimeDef(token::InternedString),
-
- SawMod,
- SawForeignItem,
- SawItem,
- SawDecl,
- SawTy,
- SawGenerics,
- SawFn,
- SawTraitItem,
- SawImplItem,
- SawStructField,
- SawVariant,
- SawPath,
- SawBlock,
- SawPat,
- SawLocal,
- SawArm,
- SawExpr(SawExprComponent<'a>),
- SawStmt(SawStmtComponent),
- }
-
- /// SawExprComponent carries all of the information that we want
- /// to include in the hash that *won't* be covered by the
- /// subsequent recursive traversal of the expression's
- /// substructure by the visitor.
- ///
- /// We know every Expr_ variant is covered by a variant because
- /// `fn saw_expr` maps each to some case below. Ensuring that
- /// each variant carries an appropriate payload has to be verified
- /// by hand.
- ///
- /// (However, getting that *exactly* right is not so important
- /// because the SVH is just a developer convenience; there is no
- /// guarantee of collision-freedom, hash collisions are just
- /// (hopefully) unlikely.)
- #[derive(Hash)]
- pub enum SawExprComponent<'a> {
-
- SawExprLoop(Option<token::InternedString>),
- SawExprField(token::InternedString),
- SawExprTupField(usize),
- SawExprBreak(Option<token::InternedString>),
- SawExprAgain(Option<token::InternedString>),
-
- SawExprBox,
- SawExprVec,
- SawExprCall,
- SawExprMethodCall,
- SawExprTup,
- SawExprBinary(hir::BinOp_),
- SawExprUnary(hir::UnOp),
- SawExprLit(ast::LitKind),
- SawExprCast,
- SawExprType,
- SawExprIf,
- SawExprWhile,
- SawExprMatch,
- SawExprClosure,
- SawExprBlock,
- SawExprAssign,
- SawExprAssignOp(hir::BinOp_),
- SawExprIndex,
- SawExprPath(Option<usize>),
- SawExprAddrOf(hir::Mutability),
- SawExprRet,
- SawExprInlineAsm(&'a hir::InlineAsm),
- SawExprStruct,
- SawExprRepeat,
- }
-
- fn saw_expr<'a>(node: &'a Expr_) -> SawExprComponent<'a> {
- match *node {
- ExprBox(..) => SawExprBox,
- ExprVec(..) => SawExprVec,
- ExprCall(..) => SawExprCall,
- ExprMethodCall(..) => SawExprMethodCall,
- ExprTup(..) => SawExprTup,
- ExprBinary(op, _, _) => SawExprBinary(op.node),
- ExprUnary(op, _) => SawExprUnary(op),
- ExprLit(ref lit) => SawExprLit(lit.node.clone()),
- ExprCast(..) => SawExprCast,
- ExprType(..) => SawExprType,
- ExprIf(..) => SawExprIf,
- ExprWhile(..) => SawExprWhile,
- ExprLoop(_, id) => SawExprLoop(id.map(|id| id.node.as_str())),
- ExprMatch(..) => SawExprMatch,
- ExprClosure(..) => SawExprClosure,
- ExprBlock(..) => SawExprBlock,
- ExprAssign(..) => SawExprAssign,
- ExprAssignOp(op, _, _) => SawExprAssignOp(op.node),
- ExprField(_, name) => SawExprField(name.node.as_str()),
- ExprTupField(_, id) => SawExprTupField(id.node),
- ExprIndex(..) => SawExprIndex,
- ExprPath(ref qself, _) => SawExprPath(qself.as_ref().map(|q| q.position)),
- ExprAddrOf(m, _) => SawExprAddrOf(m),
- ExprBreak(id) => SawExprBreak(id.map(|id| id.node.as_str())),
- ExprAgain(id) => SawExprAgain(id.map(|id| id.node.as_str())),
- ExprRet(..) => SawExprRet,
- ExprInlineAsm(ref a,_,_) => SawExprInlineAsm(a),
- ExprStruct(..) => SawExprStruct,
- ExprRepeat(..) => SawExprRepeat,
- }
- }
-
- /// SawStmtComponent is analogous to SawExprComponent, but for statements.
- #[derive(Hash)]
- pub enum SawStmtComponent {
- SawStmtDecl,
- SawStmtExpr,
- SawStmtSemi,
- }
-
- fn saw_stmt(node: &Stmt_) -> SawStmtComponent {
- match *node {
- StmtDecl(..) => SawStmtDecl,
- StmtExpr(..) => SawStmtExpr,
- StmtSemi(..) => SawStmtSemi,
- }
- }
-
- impl<'a, 'tcx> Visitor<'a> for StrictVersionHashVisitor<'a, 'tcx> {
- fn visit_nested_item(&mut self, item: ItemId) {
- let def_path = self.tcx.map.def_path_from_id(item.id).unwrap();
- debug!("visit_nested_item: def_path={:?} st={:?}", def_path, self.st);
- self.hash_def_path(&def_path);
- }
-
- fn visit_variant_data(&mut self, s: &'a VariantData, name: Name,
- g: &'a Generics, _: NodeId, _: Span) {
- debug!("visit_variant_data: st={:?}", self.st);
- SawStructDef(name.as_str()).hash(self.st);
- visit::walk_generics(self, g);
- visit::walk_struct_def(self, s)
- }
-
- fn visit_variant(&mut self, v: &'a Variant, g: &'a Generics, item_id: NodeId) {
- debug!("visit_variant: st={:?}", self.st);
- SawVariant.hash(self.st);
- // walk_variant does not call walk_generics, so do it here.
- visit::walk_generics(self, g);
- visit::walk_variant(self, v, g, item_id)
- }
-
- // All of the remaining methods just record (in the hash
- // SipHasher) that the visitor saw that particular variant
- // (with its payload), and continue walking as the default
- // visitor would.
- //
- // Some of the implementations have some notes as to how one
- // might try to make their SVH computation less discerning
- // (e.g. by incorporating reachability analysis). But
- // currently all of their implementations are uniform and
- // uninteresting.
- //
- // (If you edit a method such that it deviates from the
- // pattern, please move that method up above this comment.)
-
- fn visit_name(&mut self, _: Span, name: Name) {
- debug!("visit_name: st={:?}", self.st);
- SawIdent(name.as_str()).hash(self.st);
- }
-
- fn visit_lifetime(&mut self, l: &'a Lifetime) {
- debug!("visit_lifetime: st={:?}", self.st);
- SawLifetime(l.name.as_str()).hash(self.st);
- }
-
- fn visit_lifetime_def(&mut self, l: &'a LifetimeDef) {
- debug!("visit_lifetime_def: st={:?}", self.st);
- SawLifetimeDef(l.lifetime.name.as_str()).hash(self.st);
- }
-
- // We do recursively walk the bodies of functions/methods
- // (rather than omitting their bodies from the hash) since
- // monomorphization and cross-crate inlining generally implies
- // that a change to a crate body will require downstream
- // crates to be recompiled.
- fn visit_expr(&mut self, ex: &'a Expr) {
- debug!("visit_expr: st={:?}", self.st);
- SawExpr(saw_expr(&ex.node)).hash(self.st); visit::walk_expr(self, ex)
- }
-
- fn visit_stmt(&mut self, s: &'a Stmt) {
- debug!("visit_stmt: st={:?}", self.st);
- SawStmt(saw_stmt(&s.node)).hash(self.st); visit::walk_stmt(self, s)
- }
-
- fn visit_foreign_item(&mut self, i: &'a ForeignItem) {
- debug!("visit_foreign_item: st={:?}", self.st);
-
- // FIXME (#14132) ideally we would incorporate privacy (or
- // perhaps reachability) somewhere here, so foreign items
- // that do not leak into downstream crates would not be
- // part of the ABI.
- SawForeignItem.hash(self.st); visit::walk_foreign_item(self, i)
- }
-
- fn visit_item(&mut self, i: &'a Item) {
- debug!("visit_item: {:?} st={:?}", i, self.st);
-
- // FIXME (#14132) ideally would incorporate reachability
- // analysis somewhere here, so items that never leak into
- // downstream crates (e.g. via monomorphisation or
- // inlining) would not be part of the ABI.
- SawItem.hash(self.st); visit::walk_item(self, i)
- }
-
- fn visit_mod(&mut self, m: &'a Mod, _s: Span, n: NodeId) {
- debug!("visit_mod: st={:?}", self.st);
- SawMod.hash(self.st); visit::walk_mod(self, m, n)
- }
-
- fn visit_decl(&mut self, d: &'a Decl) {
- debug!("visit_decl: st={:?}", self.st);
- SawDecl.hash(self.st); visit::walk_decl(self, d)
- }
-
- fn visit_ty(&mut self, t: &'a Ty) {
- debug!("visit_ty: st={:?}", self.st);
- SawTy.hash(self.st); visit::walk_ty(self, t)
- }
-
- fn visit_generics(&mut self, g: &'a Generics) {
- debug!("visit_generics: st={:?}", self.st);
- SawGenerics.hash(self.st); visit::walk_generics(self, g)
- }
-
- fn visit_fn(&mut self, fk: FnKind<'a>, fd: &'a FnDecl,
- b: &'a Block, s: Span, n: NodeId) {
- debug!("visit_fn: st={:?}", self.st);
- SawFn.hash(self.st); visit::walk_fn(self, fk, fd, b, s, n)
- }
-
- fn visit_trait_item(&mut self, ti: &'a TraitItem) {
- debug!("visit_trait_item: st={:?}", self.st);
- SawTraitItem.hash(self.st); visit::walk_trait_item(self, ti)
- }
-
- fn visit_impl_item(&mut self, ii: &'a ImplItem) {
- debug!("visit_impl_item: st={:?}", self.st);
- SawImplItem.hash(self.st); visit::walk_impl_item(self, ii)
- }
-
- fn visit_struct_field(&mut self, s: &'a StructField) {
- debug!("visit_struct_field: st={:?}", self.st);
- SawStructField.hash(self.st); visit::walk_struct_field(self, s)
- }
-
- fn visit_path(&mut self, path: &'a Path, _: ast::NodeId) {
- debug!("visit_path: st={:?}", self.st);
- SawPath.hash(self.st); visit::walk_path(self, path)
- }
-
- fn visit_block(&mut self, b: &'a Block) {
- debug!("visit_block: st={:?}", self.st);
- SawBlock.hash(self.st); visit::walk_block(self, b)
- }
-
- fn visit_pat(&mut self, p: &'a Pat) {
- debug!("visit_pat: st={:?}", self.st);
- SawPat.hash(self.st); visit::walk_pat(self, p)
- }
-
- fn visit_local(&mut self, l: &'a Local) {
- debug!("visit_local: st={:?}", self.st);
- SawLocal.hash(self.st); visit::walk_local(self, l)
- }
-
- fn visit_arm(&mut self, a: &'a Arm) {
- debug!("visit_arm: st={:?}", self.st);
- SawArm.hash(self.st); visit::walk_arm(self, a)
- }
- }
-}
--- /dev/null
+// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Calculation of a Strict Version Hash for crates. For a length
+//! comment explaining the general idea, see `librustc/middle/svh.rs`.
+
+use syntax::attr::AttributeMethods;
+use std::hash::{Hash, SipHasher, Hasher};
+use rustc::hir::def_id::{CRATE_DEF_INDEX, DefId};
+use rustc::hir::map::{NodeItem, NodeForeignItem};
+use rustc::hir::svh::Svh;
+use rustc::ty::TyCtxt;
+use rustc::hir::intravisit::{self, Visitor};
+
+use self::svh_visitor::StrictVersionHashVisitor;
+
+mod svh_visitor;
+
+pub trait SvhCalculate {
+ /// Calculate the SVH for an entire krate.
+ fn calculate_krate_hash(self) -> Svh;
+
+ /// Calculate the SVH for a particular item.
+ fn calculate_item_hash(self, def_id: DefId) -> u64;
+}
+
+impl<'a, 'tcx> SvhCalculate for TyCtxt<'a, 'tcx, 'tcx> {
+ fn calculate_krate_hash(self) -> Svh {
+ // FIXME (#14132): This is better than it used to be, but it still not
+ // ideal. We now attempt to hash only the relevant portions of the
+ // Crate AST as well as the top-level crate attributes. (However,
+ // the hashing of the crate attributes should be double-checked
+ // to ensure it is not incorporating implementation artifacts into
+ // the hash that are not otherwise visible.)
+
+ let crate_disambiguator = self.sess.local_crate_disambiguator();
+ let krate = self.map.krate();
+
+ // FIXME: this should use SHA1, not SipHash. SipHash is not built to
+ // avoid collisions.
+ let mut state = SipHasher::new();
+ debug!("state: {:?}", state);
+
+ // FIXME(#32753) -- at (*) we `to_le` for endianness, but is
+ // this enough, and does it matter anyway?
+ "crate_disambiguator".hash(&mut state);
+ crate_disambiguator.len().to_le().hash(&mut state); // (*)
+ crate_disambiguator.hash(&mut state);
+
+ debug!("crate_disambiguator: {:?}", crate_disambiguator);
+ debug!("state: {:?}", state);
+
+ {
+ let mut visit = StrictVersionHashVisitor::new(&mut state, self);
+ krate.visit_all_items(&mut visit);
+ }
+
+ // FIXME (#14132): This hash is still sensitive to e.g. the
+ // spans of the crate Attributes and their underlying
+ // MetaItems; we should make ContentHashable impl for those
+ // types and then use hash_content. But, since all crate
+ // attributes should appear near beginning of the file, it is
+ // not such a big deal to be sensitive to their spans for now.
+ //
+ // We hash only the MetaItems instead of the entire Attribute
+ // to avoid hashing the AttrId
+ for attr in &krate.attrs {
+ debug!("krate attr {:?}", attr);
+ attr.meta().hash(&mut state);
+ }
+
+ Svh::new(state.finish())
+ }
+
+ fn calculate_item_hash(self, def_id: DefId) -> u64 {
+ assert!(def_id.is_local());
+
+ debug!("calculate_item_hash(def_id={:?})", def_id);
+
+ let mut state = SipHasher::new();
+
+ {
+ let mut visit = StrictVersionHashVisitor::new(&mut state, self);
+ if def_id.index == CRATE_DEF_INDEX {
+ // the crate root itself is not registered in the map
+ // as an item, so we have to fetch it this way
+ let krate = self.map.krate();
+ intravisit::walk_crate(&mut visit, krate);
+ } else {
+ let node_id = self.map.as_local_node_id(def_id).unwrap();
+ match self.map.find(node_id) {
+ Some(NodeItem(item)) => visit.visit_item(item),
+ Some(NodeForeignItem(item)) => visit.visit_foreign_item(item),
+ r => bug!("calculate_item_hash: expected an item for node {} not {:?}",
+ node_id, r),
+ }
+ }
+ }
+
+ let hash = state.finish();
+
+ debug!("calculate_item_hash: def_id={:?} hash={:?}", def_id, hash);
+
+ hash
+ }
+}
--- /dev/null
+// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// FIXME (#14132): Even this SVH computation still has implementation
+// artifacts: namely, the order of item declaration will affect the
+// hash computation, but for many kinds of items the order of
+// declaration should be irrelevant to the ABI.
+
+pub use self::SawExprComponent::*;
+pub use self::SawStmtComponent::*;
+use self::SawAbiComponent::*;
+use syntax::ast::{self, Name, NodeId};
+use syntax::parse::token;
+use syntax_pos::Span;
+use rustc::hir;
+use rustc::hir::*;
+use rustc::hir::def::{Def, PathResolution};
+use rustc::hir::def_id::DefId;
+use rustc::hir::intravisit as visit;
+use rustc::hir::intravisit::{Visitor, FnKind};
+use rustc::hir::map::DefPath;
+use rustc::ty::TyCtxt;
+
+use std::hash::{Hash, SipHasher};
+
+pub struct StrictVersionHashVisitor<'a, 'tcx: 'a> {
+ pub tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ pub st: &'a mut SipHasher,
+}
+
+impl<'a, 'tcx> StrictVersionHashVisitor<'a, 'tcx> {
+ pub fn new(st: &'a mut SipHasher,
+ tcx: TyCtxt<'a, 'tcx, 'tcx>)
+ -> Self {
+ StrictVersionHashVisitor { st: st, tcx: tcx }
+ }
+
+ fn hash_def_path(&mut self, path: &DefPath) {
+ path.deterministic_hash_to(self.tcx, self.st);
+ }
+}
+
+// To off-load the bulk of the hash-computation on #[derive(Hash)],
+// we define a set of enums corresponding to the content that our
+// crate visitor will encounter as it traverses the ast.
+//
+// The important invariant is that all of the Saw*Component enums
+// do not carry any Spans, Names, or Idents.
+//
+// Not carrying any Names/Idents is the important fix for problem
+// noted on PR #13948: using the ident.name as the basis for a
+// hash leads to unstable SVH, because ident.name is just an index
+// into intern table (i.e. essentially a random address), not
+// computed from the name content.
+//
+// With the below enums, the SVH computation is not sensitive to
+// artifacts of how rustc was invoked nor of how the source code
+// was laid out. (Or at least it is *less* sensitive.)
+
+// This enum represents the different potential bits of code the
+// visitor could encounter that could affect the ABI for the crate,
+// and assigns each a distinct tag to feed into the hash computation.
+#[derive(Hash)]
+enum SawAbiComponent<'a> {
+
+ // FIXME (#14132): should we include (some function of)
+ // ident.ctxt as well?
+ SawIdent(token::InternedString),
+ SawStructDef(token::InternedString),
+
+ SawLifetime(token::InternedString),
+ SawLifetimeDef(token::InternedString),
+
+ SawMod,
+ SawForeignItem,
+ SawItem,
+ SawTy,
+ SawGenerics,
+ SawFn,
+ SawTraitItem,
+ SawImplItem,
+ SawStructField,
+ SawVariant,
+ SawPath,
+ SawBlock,
+ SawPat,
+ SawLocal,
+ SawArm,
+ SawExpr(SawExprComponent<'a>),
+ SawStmt(SawStmtComponent),
+}
+
+/// SawExprComponent carries all of the information that we want
+/// to include in the hash that *won't* be covered by the
+/// subsequent recursive traversal of the expression's
+/// substructure by the visitor.
+///
+/// We know every Expr_ variant is covered by a variant because
+/// `fn saw_expr` maps each to some case below. Ensuring that
+/// each variant carries an appropriate payload has to be verified
+/// by hand.
+///
+/// (However, getting that *exactly* right is not so important
+/// because the SVH is just a developer convenience; there is no
+/// guarantee of collision-freedom, hash collisions are just
+/// (hopefully) unlikely.)
+#[derive(Hash)]
+pub enum SawExprComponent<'a> {
+
+ SawExprLoop(Option<token::InternedString>),
+ SawExprField(token::InternedString),
+ SawExprTupField(usize),
+ SawExprBreak(Option<token::InternedString>),
+ SawExprAgain(Option<token::InternedString>),
+
+ SawExprBox,
+ SawExprVec,
+ SawExprCall,
+ SawExprMethodCall,
+ SawExprTup,
+ SawExprBinary(hir::BinOp_),
+ SawExprUnary(hir::UnOp),
+ SawExprLit(ast::LitKind),
+ SawExprCast,
+ SawExprType,
+ SawExprIf,
+ SawExprWhile,
+ SawExprMatch,
+ SawExprClosure,
+ SawExprBlock,
+ SawExprAssign,
+ SawExprAssignOp(hir::BinOp_),
+ SawExprIndex,
+ SawExprPath(Option<usize>),
+ SawExprAddrOf(hir::Mutability),
+ SawExprRet,
+ SawExprInlineAsm(&'a hir::InlineAsm),
+ SawExprStruct,
+ SawExprRepeat,
+}
+
+fn saw_expr<'a>(node: &'a Expr_) -> SawExprComponent<'a> {
+ match *node {
+ ExprBox(..) => SawExprBox,
+ ExprVec(..) => SawExprVec,
+ ExprCall(..) => SawExprCall,
+ ExprMethodCall(..) => SawExprMethodCall,
+ ExprTup(..) => SawExprTup,
+ ExprBinary(op, _, _) => SawExprBinary(op.node),
+ ExprUnary(op, _) => SawExprUnary(op),
+ ExprLit(ref lit) => SawExprLit(lit.node.clone()),
+ ExprCast(..) => SawExprCast,
+ ExprType(..) => SawExprType,
+ ExprIf(..) => SawExprIf,
+ ExprWhile(..) => SawExprWhile,
+ ExprLoop(_, id) => SawExprLoop(id.map(|id| id.node.as_str())),
+ ExprMatch(..) => SawExprMatch,
+ ExprClosure(..) => SawExprClosure,
+ ExprBlock(..) => SawExprBlock,
+ ExprAssign(..) => SawExprAssign,
+ ExprAssignOp(op, _, _) => SawExprAssignOp(op.node),
+ ExprField(_, name) => SawExprField(name.node.as_str()),
+ ExprTupField(_, id) => SawExprTupField(id.node),
+ ExprIndex(..) => SawExprIndex,
+ ExprPath(ref qself, _) => SawExprPath(qself.as_ref().map(|q| q.position)),
+ ExprAddrOf(m, _) => SawExprAddrOf(m),
+ ExprBreak(id) => SawExprBreak(id.map(|id| id.node.as_str())),
+ ExprAgain(id) => SawExprAgain(id.map(|id| id.node.as_str())),
+ ExprRet(..) => SawExprRet,
+ ExprInlineAsm(ref a,_,_) => SawExprInlineAsm(a),
+ ExprStruct(..) => SawExprStruct,
+ ExprRepeat(..) => SawExprRepeat,
+ }
+}
+
+/// SawStmtComponent is analogous to SawExprComponent, but for statements.
+#[derive(Hash)]
+pub enum SawStmtComponent {
+ SawStmtExpr,
+ SawStmtSemi,
+}
+
+impl<'a, 'tcx> Visitor<'a> for StrictVersionHashVisitor<'a, 'tcx> {
+ fn visit_nested_item(&mut self, _: ItemId) {
+ // Each item is hashed independently; ignore nested items.
+ }
+
+ fn visit_variant_data(&mut self, s: &'a VariantData, name: Name,
+ g: &'a Generics, _: NodeId, _: Span) {
+ debug!("visit_variant_data: st={:?}", self.st);
+ SawStructDef(name.as_str()).hash(self.st);
+ visit::walk_generics(self, g);
+ visit::walk_struct_def(self, s)
+ }
+
+ fn visit_variant(&mut self, v: &'a Variant, g: &'a Generics, item_id: NodeId) {
+ debug!("visit_variant: st={:?}", self.st);
+ SawVariant.hash(self.st);
+ // walk_variant does not call walk_generics, so do it here.
+ visit::walk_generics(self, g);
+ visit::walk_variant(self, v, g, item_id)
+ }
+
+ // All of the remaining methods just record (in the hash
+ // SipHasher) that the visitor saw that particular variant
+ // (with its payload), and continue walking as the default
+ // visitor would.
+ //
+ // Some of the implementations have some notes as to how one
+ // might try to make their SVH computation less discerning
+ // (e.g. by incorporating reachability analysis). But
+ // currently all of their implementations are uniform and
+ // uninteresting.
+ //
+ // (If you edit a method such that it deviates from the
+ // pattern, please move that method up above this comment.)
+
+ fn visit_name(&mut self, _: Span, name: Name) {
+ debug!("visit_name: st={:?}", self.st);
+ SawIdent(name.as_str()).hash(self.st);
+ }
+
+ fn visit_lifetime(&mut self, l: &'a Lifetime) {
+ debug!("visit_lifetime: st={:?}", self.st);
+ SawLifetime(l.name.as_str()).hash(self.st);
+ }
+
+ fn visit_lifetime_def(&mut self, l: &'a LifetimeDef) {
+ debug!("visit_lifetime_def: st={:?}", self.st);
+ SawLifetimeDef(l.lifetime.name.as_str()).hash(self.st);
+ }
+
+ // We do recursively walk the bodies of functions/methods
+ // (rather than omitting their bodies from the hash) since
+ // monomorphization and cross-crate inlining generally implies
+ // that a change to a crate body will require downstream
+ // crates to be recompiled.
+ fn visit_expr(&mut self, ex: &'a Expr) {
+ debug!("visit_expr: st={:?}", self.st);
+ SawExpr(saw_expr(&ex.node)).hash(self.st); visit::walk_expr(self, ex)
+ }
+
+ fn visit_stmt(&mut self, s: &'a Stmt) {
+ debug!("visit_stmt: st={:?}", self.st);
+
+ // We don't want to modify the hash for decls, because
+ // they might be item decls (if they are local decls,
+ // we'll hash that fact in visit_local); but we do want to
+ // remember if this was a StmtExpr or StmtSemi (the later
+ // had an explicit semi-colon; this affects the typing
+ // rules).
+ match s.node {
+ StmtDecl(..) => (),
+ StmtExpr(..) => SawStmt(SawStmtExpr).hash(self.st),
+ StmtSemi(..) => SawStmt(SawStmtSemi).hash(self.st),
+ }
+
+ visit::walk_stmt(self, s)
+ }
+
+ fn visit_foreign_item(&mut self, i: &'a ForeignItem) {
+ debug!("visit_foreign_item: st={:?}", self.st);
+
+ // FIXME (#14132) ideally we would incorporate privacy (or
+ // perhaps reachability) somewhere here, so foreign items
+ // that do not leak into downstream crates would not be
+ // part of the ABI.
+ SawForeignItem.hash(self.st); visit::walk_foreign_item(self, i)
+ }
+
+ fn visit_item(&mut self, i: &'a Item) {
+ debug!("visit_item: {:?} st={:?}", i, self.st);
+
+ // FIXME (#14132) ideally would incorporate reachability
+ // analysis somewhere here, so items that never leak into
+ // downstream crates (e.g. via monomorphisation or
+ // inlining) would not be part of the ABI.
+ SawItem.hash(self.st); visit::walk_item(self, i)
+ }
+
+ fn visit_mod(&mut self, m: &'a Mod, _s: Span, n: NodeId) {
+ debug!("visit_mod: st={:?}", self.st);
+ SawMod.hash(self.st); visit::walk_mod(self, m, n)
+ }
+
+ fn visit_ty(&mut self, t: &'a Ty) {
+ debug!("visit_ty: st={:?}", self.st);
+ SawTy.hash(self.st); visit::walk_ty(self, t)
+ }
+
+ fn visit_generics(&mut self, g: &'a Generics) {
+ debug!("visit_generics: st={:?}", self.st);
+ SawGenerics.hash(self.st); visit::walk_generics(self, g)
+ }
+
+ fn visit_fn(&mut self, fk: FnKind<'a>, fd: &'a FnDecl,
+ b: &'a Block, s: Span, n: NodeId) {
+ debug!("visit_fn: st={:?}", self.st);
+ SawFn.hash(self.st); visit::walk_fn(self, fk, fd, b, s, n)
+ }
+
+ fn visit_trait_item(&mut self, ti: &'a TraitItem) {
+ debug!("visit_trait_item: st={:?}", self.st);
+ SawTraitItem.hash(self.st); visit::walk_trait_item(self, ti)
+ }
+
+ fn visit_impl_item(&mut self, ii: &'a ImplItem) {
+ debug!("visit_impl_item: st={:?}", self.st);
+ SawImplItem.hash(self.st); visit::walk_impl_item(self, ii)
+ }
+
+ fn visit_struct_field(&mut self, s: &'a StructField) {
+ debug!("visit_struct_field: st={:?}", self.st);
+ SawStructField.hash(self.st); visit::walk_struct_field(self, s)
+ }
+
+ fn visit_path(&mut self, path: &'a Path, _: ast::NodeId) {
+ debug!("visit_path: st={:?}", self.st);
+ SawPath.hash(self.st); visit::walk_path(self, path)
+ }
+
+ fn visit_block(&mut self, b: &'a Block) {
+ debug!("visit_block: st={:?}", self.st);
+ SawBlock.hash(self.st); visit::walk_block(self, b)
+ }
+
+ fn visit_pat(&mut self, p: &'a Pat) {
+ debug!("visit_pat: st={:?}", self.st);
+ SawPat.hash(self.st); visit::walk_pat(self, p)
+ }
+
+ fn visit_local(&mut self, l: &'a Local) {
+ debug!("visit_local: st={:?}", self.st);
+ SawLocal.hash(self.st); visit::walk_local(self, l)
+ }
+
+ fn visit_arm(&mut self, a: &'a Arm) {
+ debug!("visit_arm: st={:?}", self.st);
+ SawArm.hash(self.st); visit::walk_arm(self, a)
+ }
+
+ fn visit_id(&mut self, id: NodeId) {
+ debug!("visit_id: id={} st={:?}", id, self.st);
+ self.hash_resolve(id);
+ }
+}
+
+#[derive(Hash)]
+pub enum DefHash {
+ SawDefId,
+ SawLabel,
+ SawPrimTy,
+ SawSelfTy,
+ SawErr,
+}
+
+impl<'a, 'tcx> StrictVersionHashVisitor<'a, 'tcx> {
+ fn hash_resolve(&mut self, id: ast::NodeId) {
+ // Because whether or not a given id has an entry is dependent
+ // solely on expr variant etc, we don't need to hash whether
+ // or not an entry was present (we are already hashing what
+ // variant it is above when we visit the HIR).
+
+ if let Some(def) = self.tcx.def_map.borrow().get(&id) {
+ self.hash_partial_def(def);
+ }
+
+ if let Some(traits) = self.tcx.trait_map.get(&id) {
+ traits.len().hash(self.st);
+ for candidate in traits {
+ self.hash_def_id(candidate.def_id);
+ }
+ }
+ }
+
+ fn hash_def_id(&mut self, def_id: DefId) {
+ let def_path = self.tcx.def_path(def_id);
+ self.hash_def_path(&def_path);
+ }
+
+ fn hash_partial_def(&mut self, def: &PathResolution) {
+ self.hash_def(def.base_def);
+ def.depth.hash(self.st);
+ }
+
+ fn hash_def(&mut self, def: Def) {
+ match def {
+ // Crucial point: for all of these variants, the variant +
+ // add'l data that is added is always the same if the
+ // def-id is the same, so it suffices to hash the def-id
+ Def::Fn(..) |
+ Def::Mod(..) |
+ Def::ForeignMod(..) |
+ Def::Static(..) |
+ Def::Variant(..) |
+ Def::Enum(..) |
+ Def::TyAlias(..) |
+ Def::AssociatedTy(..) |
+ Def::TyParam(..) |
+ Def::Struct(..) |
+ Def::Trait(..) |
+ Def::Method(..) |
+ Def::Const(..) |
+ Def::AssociatedConst(..) |
+ Def::Local(..) |
+ Def::Upvar(..) => {
+ DefHash::SawDefId.hash(self.st);
+ self.hash_def_id(def.def_id());
+ }
+
+ Def::Label(..) => {
+ DefHash::SawLabel.hash(self.st);
+ // we don't encode the `id` because it always refers to something
+ // within this item, so if it changed, there would have to be other
+ // changes too
+ }
+ Def::PrimTy(ref prim_ty) => {
+ DefHash::SawPrimTy.hash(self.st);
+ prim_ty.hash(self.st);
+ }
+ Def::SelfTy(..) => {
+ DefHash::SawSelfTy.hash(self.st);
+ // the meaning of Self is always the same within a
+ // given context, so we don't need to hash the other
+ // fields
+ }
+ Def::Err => {
+ DefHash::SawErr.hash(self.st);
+ }
+ }
+ }
+}
/// Data for use when recompiling the **current crate**.
#[derive(Debug, RustcEncodable, RustcDecodable)]
pub struct SerializedDepGraph {
- pub nodes: Vec<DepNode<DefPathIndex>>,
pub edges: Vec<SerializedEdge>,
/// These are hashes of two things:
pub hashes: Vec<SerializedHash>,
}
+/// Represents a "reduced" dependency edge. Unlike the full dep-graph,
+/// the dep-graph we serialize contains only edges `S -> T` where the
+/// source `S` is something hashable (a HIR node or foreign metadata)
+/// and the target `T` is something significant, like a work-product.
+/// Normally, significant nodes are only those that have saved data on
+/// disk, but in unit-testing the set of significant nodes can be
+/// increased.
pub type SerializedEdge = (DepNode<DefPathIndex>, DepNode<DefPathIndex>);
#[derive(Debug, RustcEncodable, RustcDecodable)]
pub struct SerializedHash {
- /// node being hashed; either a Hir or MetaData variant, in
- /// practice
- pub node: DepNode<DefPathIndex>,
+ /// def-id of thing being hashed
+ pub dep_node: DepNode<DefPathIndex>,
- /// the hash itself, computed by `calculate_item_hash`
+ /// the hash as of previous compilation, computed by code in
+ /// `hash` module
pub hash: u64,
}
use rustc::dep_graph::DepNode;
use rustc::hir::map::DefPath;
use rustc::hir::def_id::DefId;
+use rustc::middle::cstore::LOCAL_CRATE;
use rustc::ty::TyCtxt;
use rustc::util::nodemap::DefIdMap;
use std::fmt::{self, Debug};
+use std::iter::once;
+use syntax::ast;
/// Index into the DefIdDirectory
#[derive(Copy, Clone, Debug, PartialOrd, Ord, Hash, PartialEq, Eq,
pub struct DefIdDirectory {
// N.B. don't use Removable here because these def-ids are loaded
// directly without remapping, so loading them should not fail.
- paths: Vec<DefPath>
+ paths: Vec<DefPath>,
+
+ // For each crate, saves the crate-name/disambiguator so that
+ // later we can match crate-numbers up again.
+ krates: Vec<CrateInfo>,
+}
+
+#[derive(Debug, RustcEncodable, RustcDecodable)]
+pub struct CrateInfo {
+ krate: ast::CrateNum,
+ name: String,
+ disambiguator: String,
}
impl DefIdDirectory {
- pub fn new() -> DefIdDirectory {
- DefIdDirectory { paths: vec![] }
+ pub fn new(krates: Vec<CrateInfo>) -> DefIdDirectory {
+ DefIdDirectory { paths: vec![], krates: krates }
+ }
+
+ fn max_current_crate(&self, tcx: TyCtxt) -> ast::CrateNum {
+ tcx.sess.cstore.crates()
+ .into_iter()
+ .max()
+ .unwrap_or(LOCAL_CRATE)
+ }
+
+ /// Returns a string form for `index`; useful for debugging
+ pub fn def_path_string(&self, tcx: TyCtxt, index: DefPathIndex) -> String {
+ let path = &self.paths[index.index as usize];
+ if self.krate_still_valid(tcx, self.max_current_crate(tcx), path.krate) {
+ path.to_string(tcx)
+ } else {
+ format!("<crate {} changed>", path.krate)
+ }
+ }
+
+ pub fn krate_still_valid(&self,
+ tcx: TyCtxt,
+ max_current_crate: ast::CrateNum,
+ krate: ast::CrateNum) -> bool {
+ // Check that the crate-number still matches. For now, if it
+ // doesn't, just return None. We could do better, such as
+ // finding the new number.
+
+ if krate > max_current_crate {
+ false
+ } else {
+ let old_info = &self.krates[krate as usize];
+ assert_eq!(old_info.krate, krate);
+ let old_name: &str = &old_info.name;
+ let old_disambiguator: &str = &old_info.disambiguator;
+ let new_name: &str = &tcx.crate_name(krate);
+ let new_disambiguator: &str = &tcx.crate_disambiguator(krate);
+ old_name == new_name && old_disambiguator == new_disambiguator
+ }
}
pub fn retrace(&self, tcx: TyCtxt) -> RetracedDefIdDirectory {
+ let max_current_crate = self.max_current_crate(tcx);
+
let ids = self.paths.iter()
- .map(|path| tcx.retrace_path(path))
+ .map(|path| {
+ if self.krate_still_valid(tcx, max_current_crate, path.krate) {
+ tcx.retrace_path(path)
+ } else {
+ debug!("crate {} changed from {:?} to {:?}/{:?}",
+ path.krate,
+ self.krates[path.krate as usize],
+ tcx.crate_name(path.krate),
+ tcx.crate_disambiguator(path.krate));
+ None
+ }
+ })
.collect();
RetracedDefIdDirectory { ids: ids }
}
impl<'a,'tcx> DefIdDirectoryBuilder<'a,'tcx> {
pub fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> DefIdDirectoryBuilder<'a, 'tcx> {
+ let mut krates: Vec<_> =
+ once(LOCAL_CRATE)
+ .chain(tcx.sess.cstore.crates())
+ .map(|krate| {
+ CrateInfo {
+ krate: krate,
+ name: tcx.crate_name(krate).to_string(),
+ disambiguator: tcx.crate_disambiguator(krate).to_string()
+ }
+ })
+ .collect();
+
+ // the result of crates() is not in order, so sort list of
+ // crates so that we can just index it later
+ krates.sort_by_key(|k| k.krate);
+
DefIdDirectoryBuilder {
tcx: tcx,
hash: DefIdMap(),
- directory: DefIdDirectory::new()
+ directory: DefIdDirectory::new(krates),
}
}
.clone()
}
+ pub fn lookup_def_path(&self, id: DefPathIndex) -> &DefPath {
+ &self.directory.paths[id.index as usize]
+ }
+
+
pub fn map(&mut self, node: &DepNode<DefId>) -> DepNode<DefPathIndex> {
node.map_def(|&def_id| Some(self.add(def_id))).unwrap()
}
- pub fn into_directory(self) -> DefIdDirectory {
- self.directory
+ pub fn directory(&self) -> &DefIdDirectory {
+ &self.directory
}
}
//! Errors are reported if we are in the suitable configuration but
//! the required condition is not met.
+use super::directory::RetracedDefIdDirectory;
+use super::load::DirtyNodes;
use rustc::dep_graph::{DepGraphQuery, DepNode};
use rustc::hir;
use rustc::hir::def_id::DefId;
use rustc::hir::intravisit::Visitor;
+use rustc_data_structures::fnv::FnvHashSet;
use syntax::ast::{self, Attribute, MetaItem};
use syntax::attr::AttrMetaMethods;
use syntax::parse::token::InternedString;
const LABEL: &'static str = "label";
const CFG: &'static str = "cfg";
-pub fn check_dirty_clean_annotations<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
+pub fn check_dirty_clean_annotations<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ dirty_inputs: &DirtyNodes,
+ retraced: &RetracedDefIdDirectory) {
+ // can't add `#[rustc_dirty]` etc without opting in to this feature
+ if !tcx.sess.features.borrow().rustc_attrs {
+ return;
+ }
+
let _ignore = tcx.dep_graph.in_ignore();
+ let dirty_inputs: FnvHashSet<DepNode<DefId>> =
+ dirty_inputs.iter()
+ .filter_map(|d| retraced.map(d))
+ .collect();
let query = tcx.dep_graph.query();
+ debug!("query-nodes: {:?}", query.nodes());
let krate = tcx.map.krate();
krate.visit_all_items(&mut DirtyCleanVisitor {
tcx: tcx,
query: &query,
+ dirty_inputs: dirty_inputs,
});
}
pub struct DirtyCleanVisitor<'a, 'tcx:'a> {
tcx: TyCtxt<'a, 'tcx, 'tcx>,
query: &'a DepGraphQuery<DefId>,
+ dirty_inputs: FnvHashSet<DepNode<DefId>>,
}
impl<'a, 'tcx> DirtyCleanVisitor<'a, 'tcx> {
return true;
}
}
+ return false;
}
}
- debug!("check_config: no match found");
- return false;
+
+ self.tcx.sess.span_fatal(
+ attr.span,
+ &format!("no cfg attribute"));
}
fn dep_node(&self, attr: &Attribute, def_id: DefId) -> DepNode<DefId> {
self.tcx.sess.span_fatal(attr.span, "no `label` found");
}
- fn dep_node_str(&self, dep_node: DepNode<DefId>) -> DepNode<String> {
+ fn dep_node_str(&self, dep_node: &DepNode<DefId>) -> DepNode<String> {
dep_node.map_def(|&def_id| Some(self.tcx.item_path_str(def_id))).unwrap()
}
fn assert_dirty(&self, item: &hir::Item, dep_node: DepNode<DefId>) {
debug!("assert_dirty({:?})", dep_node);
- if self.query.contains_node(&dep_node) {
- let dep_node_str = self.dep_node_str(dep_node);
- self.tcx.sess.span_err(
- item.span,
- &format!("`{:?}` found in dep graph, but should be dirty", dep_node_str));
+ match dep_node {
+ DepNode::Hir(_) => {
+ // HIR nodes are inputs, so if we are asserting that the HIR node is
+ // dirty, we check the dirty input set.
+ if !self.dirty_inputs.contains(&dep_node) {
+ let dep_node_str = self.dep_node_str(&dep_node);
+ self.tcx.sess.span_err(
+ item.span,
+ &format!("`{:?}` not found in dirty set, but should be dirty",
+ dep_node_str));
+ }
+ }
+ _ => {
+ // Other kinds of nodes would be targets, so check if
+ // the dep-graph contains the node.
+ if self.query.contains_node(&dep_node) {
+ let dep_node_str = self.dep_node_str(&dep_node);
+ self.tcx.sess.span_err(
+ item.span,
+ &format!("`{:?}` found in dep graph, but should be dirty", dep_node_str));
+ }
+ }
}
}
fn assert_clean(&self, item: &hir::Item, dep_node: DepNode<DefId>) {
debug!("assert_clean({:?})", dep_node);
- if !self.query.contains_node(&dep_node) {
- let dep_node_str = self.dep_node_str(dep_node);
- self.tcx.sess.span_err(
- item.span,
- &format!("`{:?}` not found in dep graph, but should be clean", dep_node_str));
+ match dep_node {
+ DepNode::Hir(_) => {
+ // For HIR nodes, check the inputs.
+ if self.dirty_inputs.contains(&dep_node) {
+ let dep_node_str = self.dep_node_str(&dep_node);
+ self.tcx.sess.span_err(
+ item.span,
+ &format!("`{:?}` found in dirty-node set, but should be clean",
+ dep_node_str));
+ }
+ }
+ _ => {
+ // Otherwise, check if the dep-node exists.
+ if !self.query.contains_node(&dep_node) {
+ let dep_node_str = self.dep_node_str(&dep_node);
+ self.tcx.sess.span_err(
+ item.span,
+ &format!("`{:?}` not found in dep graph, but should be clean",
+ dep_node_str));
+ }
+ }
}
}
}
}
}
- pub fn hash(&mut self, dep_node: &DepNode<DefId>) -> Option<u64> {
+ pub fn is_hashable(dep_node: &DepNode<DefId>) -> bool {
+ match *dep_node {
+ DepNode::Hir(_) => true,
+ DepNode::MetaData(def_id) => !def_id.is_local(),
+ _ => false,
+ }
+ }
+
+ pub fn hash(&mut self, dep_node: &DepNode<DefId>) -> Option<(DefId, u64)> {
match *dep_node {
// HIR nodes (which always come from our crate) are an input:
DepNode::Hir(def_id) => {
- assert!(def_id.is_local());
- Some(self.hir_hash(def_id))
+ Some((def_id, self.hir_hash(def_id)))
}
// MetaData from other crates is an *input* to us.
// don't hash them, but we do compute a hash for them and
// save it for others to use.
DepNode::MetaData(def_id) if !def_id.is_local() => {
- Some(self.metadata_hash(def_id))
+ Some((def_id, self.metadata_hash(def_id)))
}
_ => {
}
fn hir_hash(&mut self, def_id: DefId) -> u64 {
- assert!(def_id.is_local());
+ assert!(def_id.is_local(),
+ "cannot hash HIR for non-local def-id {:?} => {:?}",
+ def_id,
+ self.tcx.item_path_str(def_id));
+
+ assert!(!self.tcx.map.is_inlined_def_id(def_id),
+ "cannot hash HIR for inlined def-id {:?} => {:?}",
+ def_id,
+ self.tcx.item_path_str(def_id));
+
// FIXME(#32753) -- should we use a distinct hash here
self.tcx.calculate_item_hash(def_id)
}
use super::hash::*;
use super::util::*;
-type DirtyNodes = FnvHashSet<DepNode<DefId>>;
+pub type DirtyNodes = FnvHashSet<DepNode<DefPathIndex>>;
type CleanEdges = Vec<(DepNode<DefId>, DepNode<DefId>)>;
let _ignore = tcx.dep_graph.in_ignore();
load_dep_graph_if_exists(tcx);
- dirty_clean::check_dirty_clean_annotations(tcx);
}
fn load_dep_graph_if_exists<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
};
match decode_dep_graph(tcx, &dep_graph_data, &work_products_data) {
- Ok(()) => return,
+ Ok(dirty_nodes) => dirty_nodes,
Err(err) => {
tcx.sess.warn(
&format!("decoding error in dep-graph from `{}` and `{}`: {}",
None
}
}
-
}
/// Decode the dep graph and load the edges/nodes that are still clean
let directory = try!(DefIdDirectory::decode(&mut dep_graph_decoder));
let serialized_dep_graph = try!(SerializedDepGraph::decode(&mut dep_graph_decoder));
- debug!("decode_dep_graph: directory = {:#?}", directory);
- debug!("decode_dep_graph: serialized_dep_graph = {:#?}", serialized_dep_graph);
-
// Retrace the paths in the directory to find their current location (if any).
let retraced = directory.retrace(tcx);
- debug!("decode_dep_graph: retraced = {:#?}", retraced);
-
- // Compute the set of Hir nodes whose data has changed.
- let mut dirty_nodes =
- initial_dirty_nodes(tcx, &serialized_dep_graph.hashes, &retraced);
-
- debug!("decode_dep_graph: initial dirty_nodes = {:#?}", dirty_nodes);
+ // Compute the set of Hir nodes whose data has changed or which
+ // have been removed. These are "raw" source nodes, which means
+ // that they still use the original `DefPathIndex` values from the
+ // encoding, rather than having been retraced to a `DefId`. The
+ // reason for this is that this way we can include nodes that have
+ // been removed (which no longer have a `DefId` in the current
+ // compilation).
+ let dirty_raw_source_nodes = dirty_nodes(tcx, &serialized_dep_graph.hashes, &retraced);
+
+ // Create a list of (raw-source-node ->
+ // retracted-target-node) edges. In the process of retracing the
+ // target nodes, we may discover some of them def-paths no longer exist,
+ // in which case there is no need to mark the corresopnding nodes as dirty
+ // (they are just not present). So this list may be smaller than the original.
+ //
+ // Note though that in the common case the target nodes are
+ // `DepNode::WorkProduct` instances, and those don't have a
+ // def-id, so they will never be considered to not exist. Instead,
+ // we do a secondary hashing step (later, in trans) when we know
+ // the set of symbols that go into a work-product: if any symbols
+ // have been removed (or added) the hash will be different and
+ // we'll ignore the work-product then.
+ let retraced_edges: Vec<_> =
+ serialized_dep_graph.edges.iter()
+ .filter_map(|&(ref raw_source_node, ref raw_target_node)| {
+ retraced.map(raw_target_node)
+ .map(|target_node| (raw_source_node, target_node))
+ })
+ .collect();
+
+ // Compute which work-products have an input that has changed or
+ // been removed. Put the dirty ones into a set.
+ let mut dirty_target_nodes = FnvHashSet();
+ for &(raw_source_node, ref target_node) in &retraced_edges {
+ if dirty_raw_source_nodes.contains(raw_source_node) {
+ if !dirty_target_nodes.contains(target_node) {
+ dirty_target_nodes.insert(target_node.clone());
+
+ if tcx.sess.opts.debugging_opts.incremental_info {
+ // It'd be nice to pretty-print these paths better than just
+ // using the `Debug` impls, but wev.
+ println!("module {:?} is dirty because {:?} changed or was removed",
+ target_node,
+ raw_source_node.map_def(|&index| {
+ Some(directory.def_path_string(tcx, index))
+ }).unwrap());
+ }
+ }
+ }
+ }
- // Find all DepNodes reachable from that core set. This loop
- // iterates repeatedly over the list of edges whose source is not
- // known to be dirty (`clean_edges`). If it finds an edge whose
- // source is dirty, it removes it from that list and adds the
- // target to `dirty_nodes`. It stops when it reaches a fixed
- // point.
- let clean_edges = compute_clean_edges(&serialized_dep_graph.edges,
- &retraced,
- &mut dirty_nodes);
+ // For work-products that are still clean, add their deps into the
+ // graph. This is needed because later we will have to save this
+ // back out again!
+ let dep_graph = tcx.dep_graph.clone();
+ for (raw_source_node, target_node) in retraced_edges {
+ if dirty_target_nodes.contains(&target_node) {
+ continue;
+ }
- // Add synthetic `foo->foo` edges for each clean node `foo` that
- // we had before. This is sort of a hack to create clean nodes in
- // the graph, since the existence of a node is a signal that the
- // work it represents need not be repeated.
- let clean_nodes =
- serialized_dep_graph.nodes
- .iter()
- .filter_map(|node| retraced.map(node))
- .filter(|node| !dirty_nodes.contains(node))
- .map(|node| (node.clone(), node));
+ let source_node = retraced.map(raw_source_node).unwrap();
- // Add nodes and edges that are not dirty into our main graph.
- let dep_graph = tcx.dep_graph.clone();
- for (source, target) in clean_edges.into_iter().chain(clean_nodes) {
- debug!("decode_dep_graph: clean edge: {:?} -> {:?}", source, target);
+ debug!("decode_dep_graph: clean edge: {:?} -> {:?}", source_node, target_node);
- let _task = dep_graph.in_task(target);
- dep_graph.read(source);
+ let _task = dep_graph.in_task(target_node);
+ dep_graph.read(source_node);
}
// Add in work-products that are still clean, and delete those that are
// dirty.
let mut work_product_decoder = Decoder::new(work_products_data, 0);
let work_products = try!(<Vec<SerializedWorkProduct>>::decode(&mut work_product_decoder));
- reconcile_work_products(tcx, work_products, &dirty_nodes);
+ reconcile_work_products(tcx, work_products, &dirty_target_nodes);
+
+ dirty_clean::check_dirty_clean_annotations(tcx, &dirty_raw_source_nodes, &retraced);
Ok(())
}
-fn initial_dirty_nodes<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- hashes: &[SerializedHash],
- retraced: &RetracedDefIdDirectory)
- -> DirtyNodes {
+/// Computes which of the original set of def-ids are dirty. Stored in
+/// a bit vector where the index is the DefPathIndex.
+fn dirty_nodes<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ hashes: &[SerializedHash],
+ retraced: &RetracedDefIdDirectory)
+ -> DirtyNodes {
let mut hcx = HashContext::new(tcx);
- let mut items_removed = false;
let mut dirty_nodes = FnvHashSet();
- for hash in hashes {
- match hash.node.map_def(|&i| retraced.def_id(i)) {
- Some(dep_node) => {
- let current_hash = hcx.hash(&dep_node).unwrap();
- if current_hash != hash.hash {
- debug!("initial_dirty_nodes: {:?} is dirty as hash is {:?}, was {:?}",
- dep_node, current_hash, hash.hash);
- dirty_nodes.insert(dep_node);
- }
- }
- None => {
- items_removed = true;
- }
- }
- }
-
- // If any of the items in the krate have changed, then we consider
- // the meta-node `Krate` to be dirty, since that means something
- // which (potentially) read the contents of every single item.
- if items_removed || !dirty_nodes.is_empty() {
- dirty_nodes.insert(DepNode::Krate);
- }
- dirty_nodes
-}
-
-fn compute_clean_edges(serialized_edges: &[(SerializedEdge)],
- retraced: &RetracedDefIdDirectory,
- dirty_nodes: &mut DirtyNodes)
- -> CleanEdges {
- // Build up an initial list of edges. Include an edge (source,
- // target) if neither node has been removed. If the source has
- // been removed, add target to the list of dirty nodes.
- let mut clean_edges = Vec::with_capacity(serialized_edges.len());
- for &(ref serialized_source, ref serialized_target) in serialized_edges {
- if let Some(target) = retraced.map(serialized_target) {
- if let Some(source) = retraced.map(serialized_source) {
- clean_edges.push((source, target))
- } else {
- // source removed, target must be dirty
- debug!("compute_clean_edges: {:?} dirty because {:?} no longer exists",
- target, serialized_source);
- dirty_nodes.insert(target);
+ for hash in hashes {
+ if let Some(dep_node) = retraced.map(&hash.dep_node) {
+ let (_, current_hash) = hcx.hash(&dep_node).unwrap();
+ if current_hash == hash.hash {
+ continue;
}
+ debug!("initial_dirty_nodes: {:?} is dirty as hash is {:?}, was {:?}",
+ dep_node.map_def(|&def_id| Some(tcx.def_path(def_id))).unwrap(),
+ current_hash,
+ hash.hash);
} else {
- // target removed, ignore the edge
+ debug!("initial_dirty_nodes: {:?} is dirty as it was removed",
+ hash.dep_node);
}
- }
- debug!("compute_clean_edges: dirty_nodes={:#?}", dirty_nodes);
-
- // Propagate dirty marks by iterating repeatedly over
- // `clean_edges`. If we find an edge `(source, target)` where
- // `source` is dirty, add `target` to the list of dirty nodes and
- // remove it. Keep doing this until we find no more dirty nodes.
- let mut previous_size = 0;
- while dirty_nodes.len() > previous_size {
- debug!("compute_clean_edges: previous_size={}", previous_size);
- previous_size = dirty_nodes.len();
- let mut i = 0;
- while i < clean_edges.len() {
- if dirty_nodes.contains(&clean_edges[i].0) {
- let (source, target) = clean_edges.swap_remove(i);
- debug!("compute_clean_edges: dirty source {:?} -> {:?}",
- source, target);
- dirty_nodes.insert(target);
- } else if dirty_nodes.contains(&clean_edges[i].1) {
- let (source, target) = clean_edges.swap_remove(i);
- debug!("compute_clean_edges: dirty target {:?} -> {:?}",
- source, target);
- } else {
- i += 1;
- }
- }
+ dirty_nodes.insert(hash.dep_node.clone());
}
- clean_edges
+ dirty_nodes
}
/// Go through the list of work-products produced in the previous run.
/// otherwise no longer applicable.
fn reconcile_work_products<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
work_products: Vec<SerializedWorkProduct>,
- dirty_nodes: &DirtyNodes) {
+ dirty_target_nodes: &FnvHashSet<DepNode<DefId>>) {
debug!("reconcile_work_products({:?})", work_products);
for swp in work_products {
- let dep_node = DepNode::WorkProduct(swp.id.clone());
- if dirty_nodes.contains(&dep_node) {
+ if dirty_target_nodes.contains(&DepNode::WorkProduct(swp.id.clone())) {
debug!("reconcile_work_products: dep-node for {:?} is dirty", swp);
delete_dirty_work_product(tcx, swp);
} else {
mod dirty_clean;
mod hash;
mod load;
+mod preds;
mod save;
mod util;
mod work_product;
--- /dev/null
+// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use rustc::dep_graph::{DepGraphQuery, DepNode};
+use rustc::hir::def_id::DefId;
+use rustc_data_structures::fnv::FnvHashMap;
+use rustc_data_structures::graph::{DepthFirstTraversal, INCOMING, NodeIndex};
+
+use super::hash::*;
+
+/// A data-structure that makes it easy to enumerate the hashable
+/// predecessors of any given dep-node.
+pub struct Predecessors<'query> {
+ // - Keys: dep-nodes that may have work-products, output meta-data
+ // nodes.
+ // - Values: transitive predecessors of the key that are hashable
+ // (e.g., HIR nodes, input meta-data nodes)
+ pub inputs: FnvHashMap<&'query DepNode<DefId>, Vec<&'query DepNode<DefId>>>,
+
+ // - Keys: some hashable node
+ // - Values: the hash thereof
+ pub hashes: FnvHashMap<&'query DepNode<DefId>, u64>,
+}
+
+impl<'q> Predecessors<'q> {
+ pub fn new(query: &'q DepGraphQuery<DefId>, hcx: &mut HashContext) -> Self {
+ // Find nodes for which we want to know the full set of preds
+ let mut dfs = DepthFirstTraversal::new(&query.graph, INCOMING);
+ let all_nodes = query.graph.all_nodes();
+ let tcx = hcx.tcx;
+
+ let inputs: FnvHashMap<_, _> = all_nodes.iter()
+ .enumerate()
+ .filter(|&(_, node)| match node.data {
+ DepNode::WorkProduct(_) => true,
+ DepNode::MetaData(ref def_id) => def_id.is_local(),
+
+ // if -Z query-dep-graph is passed, save more extended data
+ // to enable better unit testing
+ DepNode::TypeckItemBody(_) |
+ DepNode::TransCrateItem(_) => tcx.sess.opts.debugging_opts.query_dep_graph,
+
+ _ => false,
+ })
+ .map(|(node_index, node)| {
+ dfs.reset(NodeIndex(node_index));
+ let inputs: Vec<_> = dfs.by_ref()
+ .map(|i| &all_nodes[i.node_id()].data)
+ .filter(|d| HashContext::is_hashable(d))
+ .collect();
+ (&node.data, inputs)
+ })
+ .collect();
+
+ let mut hashes = FnvHashMap();
+ for input in inputs.values().flat_map(|v| v.iter().cloned()) {
+ hashes.entry(input)
+ .or_insert_with(|| hcx.hash(input).unwrap().1);
+ }
+
+ Predecessors {
+ inputs: inputs,
+ hashes: hashes,
+ }
+ }
+}
use rbml::opaque::Encoder;
use rustc::dep_graph::DepNode;
+use rustc::hir::def_id::DefId;
use rustc::middle::cstore::LOCAL_CRATE;
use rustc::session::Session;
use rustc::ty::TyCtxt;
-use rustc_serialize::{Encodable as RustcEncodable};
-use std::hash::{Hasher, SipHasher};
+use rustc_data_structures::fnv::FnvHashMap;
+use rustc_serialize::Encodable as RustcEncodable;
+use std::hash::{Hash, Hasher, SipHasher};
use std::io::{self, Cursor, Write};
use std::fs::{self, File};
use std::path::PathBuf;
use super::data::*;
use super::directory::*;
use super::hash::*;
+use super::preds::*;
use super::util::*;
pub fn save_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
debug!("save_dep_graph()");
let _ignore = tcx.dep_graph.in_ignore();
let sess = tcx.sess;
+ if sess.opts.incremental.is_none() {
+ return;
+ }
let mut hcx = HashContext::new(tcx);
- save_in(sess, dep_graph_path(tcx), |e| encode_dep_graph(&mut hcx, e));
- save_in(sess, metadata_hash_path(tcx, LOCAL_CRATE), |e| encode_metadata_hashes(&mut hcx, e));
+ let mut builder = DefIdDirectoryBuilder::new(tcx);
+ let query = tcx.dep_graph.query();
+ let preds = Predecessors::new(&query, &mut hcx);
+ save_in(sess,
+ dep_graph_path(tcx),
+ |e| encode_dep_graph(&preds, &mut builder, e));
+ save_in(sess,
+ metadata_hash_path(tcx, LOCAL_CRATE),
+ |e| encode_metadata_hashes(tcx, &preds, &mut builder, e));
}
pub fn save_work_products(sess: &Session, local_crate_name: &str) {
save_in(sess, path, |e| encode_work_products(sess, e));
}
-fn save_in<F>(sess: &Session,
- opt_path_buf: Option<PathBuf>,
- encode: F)
+fn save_in<F>(sess: &Session, opt_path_buf: Option<PathBuf>, encode: F)
where F: FnOnce(&mut Encoder) -> io::Result<()>
{
let path_buf = match opt_path_buf {
Some(p) => p,
- None => return
+ None => return,
};
// FIXME(#32754) lock file?
// delete the old dep-graph, if any
if path_buf.exists() {
match fs::remove_file(&path_buf) {
- Ok(()) => { }
+ Ok(()) => {}
Err(err) => {
- sess.err(
- &format!("unable to delete old dep-graph at `{}`: {}",
- path_buf.display(), err));
+ sess.err(&format!("unable to delete old dep-graph at `{}`: {}",
+ path_buf.display(),
+ err));
return;
}
}
// generate the data in a memory buffer
let mut wr = Cursor::new(Vec::new());
match encode(&mut Encoder::new(&mut wr)) {
- Ok(()) => { }
+ Ok(()) => {}
Err(err) => {
- sess.err(
- &format!("could not encode dep-graph to `{}`: {}",
- path_buf.display(), err));
+ sess.err(&format!("could not encode dep-graph to `{}`: {}",
+ path_buf.display(),
+ err));
return;
}
}
// write the data out
let data = wr.into_inner();
- match
- File::create(&path_buf)
- .and_then(|mut file| file.write_all(&data))
- {
- Ok(_) => { }
+ match File::create(&path_buf).and_then(|mut file| file.write_all(&data)) {
+ Ok(_) => {}
Err(err) => {
- sess.err(
- &format!("failed to write dep-graph to `{}`: {}",
- path_buf.display(), err));
+ sess.err(&format!("failed to write dep-graph to `{}`: {}",
+ path_buf.display(),
+ err));
return;
}
}
}
-pub fn encode_dep_graph<'a, 'tcx>(hcx: &mut HashContext<'a, 'tcx>,
- encoder: &mut Encoder)
- -> io::Result<()>
-{
- let tcx = hcx.tcx;
- let query = tcx.dep_graph.query();
-
- let mut builder = DefIdDirectoryBuilder::new(tcx);
-
- // Create hashes for inputs.
- let hashes =
- query.nodes()
- .into_iter()
- .filter_map(|dep_node| {
- hcx.hash(&dep_node)
- .map(|hash| {
- let node = builder.map(dep_node);
- SerializedHash { node: node, hash: hash }
- })
- })
- .collect();
+pub fn encode_dep_graph(preds: &Predecessors,
+ builder: &mut DefIdDirectoryBuilder,
+ encoder: &mut Encoder)
+ -> io::Result<()> {
+ // Create a flat list of (Input, WorkProduct) edges for
+ // serialization.
+ let mut edges = vec![];
+ for (&target, sources) in &preds.inputs {
+ match *target {
+ DepNode::MetaData(ref def_id) => {
+ // Metadata *targets* are always local metadata nodes. We handle
+ // those in `encode_metadata_hashes`, which comes later.
+ assert!(def_id.is_local());
+ continue;
+ }
+ _ => (),
+ }
+ let target = builder.map(target);
+ for &source in sources {
+ let source = builder.map(source);
+ edges.push((source, target.clone()));
+ }
+ }
// Create the serialized dep-graph.
let graph = SerializedDepGraph {
- nodes: query.nodes().into_iter()
- .map(|node| builder.map(node))
- .collect(),
- edges: query.edges().into_iter()
- .map(|(source_node, target_node)| {
- let source = builder.map(source_node);
- let target = builder.map(target_node);
- (source, target)
- })
- .collect(),
- hashes: hashes,
+ edges: edges,
+ hashes: preds.hashes
+ .iter()
+ .map(|(&dep_node, &hash)| {
+ SerializedHash {
+ dep_node: builder.map(dep_node),
+ hash: hash,
+ }
+ })
+ .collect(),
};
debug!("graph = {:#?}", graph);
// Encode the directory and then the graph data.
- let directory = builder.into_directory();
- try!(directory.encode(encoder));
+ try!(builder.directory().encode(encoder));
try!(graph.encode(encoder));
Ok(())
}
-pub fn encode_metadata_hashes<'a, 'tcx>(hcx: &mut HashContext<'a, 'tcx>,
- encoder: &mut Encoder)
- -> io::Result<()>
-{
- let tcx = hcx.tcx;
- let query = tcx.dep_graph.query();
+pub fn encode_metadata_hashes(tcx: TyCtxt,
+ preds: &Predecessors,
+ builder: &mut DefIdDirectoryBuilder,
+ encoder: &mut Encoder)
+ -> io::Result<()> {
+ let mut def_id_hashes = FnvHashMap();
+ let mut def_id_hash = |def_id: DefId| -> u64 {
+ *def_id_hashes.entry(def_id)
+ .or_insert_with(|| {
+ let index = builder.add(def_id);
+ let path = builder.lookup_def_path(index);
+ path.deterministic_hash(tcx)
+ })
+ };
- let serialized_hashes = {
- // Identify the `MetaData(X)` nodes where `X` is local. These are
- // the metadata items we export. Downstream crates will want to
- // see a hash that tells them whether we might have changed the
- // metadata for a given item since they last compiled.
- let meta_data_def_ids =
- query.nodes()
- .into_iter()
- .filter_map(|dep_node| match *dep_node {
- DepNode::MetaData(def_id) if def_id.is_local() => Some(def_id),
- _ => None,
- });
+ // For each `MetaData(X)` node where `X` is local, accumulate a
+ // hash. These are the metadata items we export. Downstream
+ // crates will want to see a hash that tells them whether we might
+ // have changed the metadata for a given item since they last
+ // compiled.
+ //
+ // (I initially wrote this with an iterator, but it seemed harder to read.)
+ let mut serialized_hashes = SerializedMetadataHashes { hashes: vec![] };
+ for (&target, sources) in &preds.inputs {
+ let def_id = match *target {
+ DepNode::MetaData(def_id) => {
+ assert!(def_id.is_local());
+ def_id
+ }
+ _ => continue,
+ };
// To create the hash for each item `X`, we don't hash the raw
// bytes of the metadata (though in principle we
// from the dep-graph. This corresponds to all the inputs that
// were read to construct the metadata. To create the hash for
// the metadata, we hash (the hash of) all of those inputs.
- let hashes =
- meta_data_def_ids
- .map(|def_id| {
- assert!(def_id.is_local());
- let dep_node = DepNode::MetaData(def_id);
- let mut state = SipHasher::new();
- debug!("save: computing metadata hash for {:?}", dep_node);
- for node in query.transitive_predecessors(&dep_node) {
- if let Some(hash) = hcx.hash(&node) {
- debug!("save: predecessor {:?} has hash {}", node, hash);
- state.write_u64(hash.to_le());
- } else {
- debug!("save: predecessor {:?} cannot be hashed", node);
- }
- }
- let hash = state.finish();
- debug!("save: metadata hash for {:?} is {}", dep_node, hash);
- SerializedMetadataHash {
- def_index: def_id.index,
- hash: hash,
- }
- });
-
- // Collect these up into a vector.
- SerializedMetadataHashes {
- hashes: hashes.collect()
- }
- };
+ debug!("save: computing metadata hash for {:?}", def_id);
+
+ // Create a vector containing a pair of (source-id, hash).
+ // The source-id is stored as a `DepNode<u64>`, where the u64
+ // is the det. hash of the def-path. This is convenient
+ // because we can sort this to get a stable ordering across
+ // compilations, even if the def-ids themselves have changed.
+ let mut hashes: Vec<(DepNode<u64>, u64)> = sources.iter()
+ .map(|dep_node| {
+ let hash_dep_node = dep_node.map_def(|&def_id| Some(def_id_hash(def_id))).unwrap();
+ let hash = preds.hashes[dep_node];
+ (hash_dep_node, hash)
+ })
+ .collect();
+
+ hashes.sort();
+ let mut state = SipHasher::new();
+ hashes.hash(&mut state);
+ let hash = state.finish();
+
+ debug!("save: metadata hash for {:?} is {}", def_id, hash);
+ serialized_hashes.hashes.push(SerializedMetadataHash {
+ def_index: def_id.index,
+ hash: hash,
+ });
+ }
// Encode everything.
try!(serialized_hashes.encode(encoder));
Ok(())
}
-pub fn encode_work_products(sess: &Session,
- encoder: &mut Encoder)
- -> io::Result<()>
-{
- let work_products: Vec<_> =
- sess.dep_graph.work_products()
- .iter()
- .map(|(id, work_product)| {
- SerializedWorkProduct {
- id: id.clone(),
- work_product: work_product.clone(),
- }
- })
- .collect();
+pub fn encode_work_products(sess: &Session, encoder: &mut Encoder) -> io::Result<()> {
+ let work_products: Vec<_> = sess.dep_graph
+ .work_products()
+ .iter()
+ .map(|(id, work_product)| {
+ SerializedWorkProduct {
+ id: id.clone(),
+ work_product: work_product.clone(),
+ }
+ })
+ .collect();
work_products.encode(encoder)
}
-
cfg.flag(&flag);
}
+ if env::var_os("LLVM_RUSTLLVM").is_some() {
+ cfg.flag("-DLLVM_RUSTLLVM");
+ }
+
cfg.file("../rustllvm/PassWrapper.cpp")
.file("../rustllvm/RustWrapper.cpp")
.file("../rustllvm/ArchiveWrapper.cpp")
pub fn LLVMRustHasFeature(T: TargetMachineRef,
s: *const c_char) -> bool;
+ pub fn LLVMRustPrintTargetCPUs(T: TargetMachineRef);
+ pub fn LLVMRustPrintTargetFeatures(T: TargetMachineRef);
+
pub fn LLVMRustCreateTargetMachine(Triple: *const c_char,
CPU: *const c_char,
Features: *const c_char,
rbml_w: &mut Encoder,
ii: InlinedItemRef) {
let id = match ii {
- InlinedItemRef::Item(i) => i.id,
- InlinedItemRef::Foreign(i) => i.id,
+ InlinedItemRef::Item(_, i) => i.id,
+ InlinedItemRef::Foreign(_, i) => i.id,
InlinedItemRef::TraitItem(_, ti) => ti.id,
InlinedItemRef::ImplItem(_, ii) => ii.id,
};
decode_ast(ast_doc),
dcx);
let name = match *ii {
- InlinedItem::Item(ref i) => i.name,
- InlinedItem::Foreign(ref i) => i.name,
+ InlinedItem::Item(_, ref i) => i.name,
+ InlinedItem::Foreign(_, ref i) => i.name,
InlinedItem::TraitItem(_, ref ti) => ti.name,
InlinedItem::ImplItem(_, ref ii) => ii.name
};
region::resolve_inlined_item(&tcx.sess, &tcx.region_maps, ii);
decode_side_tables(dcx, ast_doc);
copy_item_types(dcx, ii, orig_did);
- if let InlinedItem::Item(ref i) = *ii {
+ if let InlinedItem::Item(_, ref i) = *ii {
debug!(">>> DECODED ITEM >>>\n{}\n<<< DECODED ITEM <<<",
::rustc::hir::print::item_to_string(&i));
}
let ii = match ii {
// HACK we're not dropping items.
- InlinedItemRef::Item(i) => {
- InlinedItem::Item(P(fold::noop_fold_item(i.clone(), &mut fld)))
+ InlinedItemRef::Item(d, i) => {
+ InlinedItem::Item(d, P(fold::noop_fold_item(i.clone(), &mut fld)))
}
InlinedItemRef::TraitItem(d, ti) => {
InlinedItem::TraitItem(d, P(fold::noop_fold_trait_item(ti.clone(), &mut fld)))
InlinedItemRef::ImplItem(d, ii) => {
InlinedItem::ImplItem(d, P(fold::noop_fold_impl_item(ii.clone(), &mut fld)))
}
- InlinedItemRef::Foreign(i) => {
- InlinedItem::Foreign(P(fold::noop_fold_foreign_item(i.clone(), &mut fld)))
+ InlinedItemRef::Foreign(d, i) => {
+ InlinedItem::Foreign(d, P(fold::noop_fold_foreign_item(i.clone(), &mut fld)))
}
};
}
// copy the entry for the item itself
let item_node_id = match ii {
- &InlinedItem::Item(ref i) => i.id,
+ &InlinedItem::Item(_, ref i) => i.id,
&InlinedItem::TraitItem(_, ref ti) => ti.id,
&InlinedItem::ImplItem(_, ref ii) => ii.id,
- &InlinedItem::Foreign(ref fi) => fi.id
+ &InlinedItem::Foreign(_, ref fi) => fi.id
};
copy_item_type(dcx, item_node_id, orig_did);
// copy the entries of inner items
- if let &InlinedItem::Item(ref item) = ii {
+ if let &InlinedItem::Item(_, ref item) = ii {
match item.node {
hir::ItemEnum(ref def, _) => {
let orig_def = dcx.tcx.lookup_adt_def(orig_did);
#[test]
fn test_simplification() {
+ use middle::cstore::LOCAL_CRATE;
+ use rustc::hir::def_id::CRATE_DEF_INDEX;
+
let cx = mk_ctxt();
let item = quote_item!(&cx,
fn new_int_alist<B>() -> alist<isize, B> {
let cx = mk_ctxt();
with_testing_context(|lcx| {
let hir_item = lcx.lower_item(&item);
- let item_in = InlinedItemRef::Item(&hir_item);
+ let def_id = DefId { krate: LOCAL_CRATE, index: CRATE_DEF_INDEX }; // dummy
+ let item_in = InlinedItemRef::Item(def_id, &hir_item);
let (item_out, _) = simplify_ast(item_in);
- let item_exp = InlinedItem::Item(P(lcx.lower_item("e_item!(&cx,
+ let item_exp = InlinedItem::Item(def_id, P(lcx.lower_item("e_item!(&cx,
fn new_int_alist<B>() -> alist<isize, B> {
return alist {eq_fn: eq_int, data: Vec::new()};
}
).unwrap())));
match (item_out, item_exp) {
- (InlinedItem::Item(item_out), InlinedItem::Item(item_exp)) => {
+ (InlinedItem::Item(_, item_out), InlinedItem::Item(_, item_exp)) => {
assert!(pprust::item_to_string(&item_out) ==
pprust::item_to_string(&item_exp));
}
.borrow_mut()
.insert(def_id, None);
}
- decoder::FoundAst::Found(&InlinedItem::Item(ref item)) => {
+ decoder::FoundAst::Found(&InlinedItem::Item(d, ref item)) => {
+ assert_eq!(d, def_id);
let inlined_root_node_id = find_inlined_item_root(item.id);
cache_inlined_item(def_id, item.id, inlined_root_node_id);
}
- decoder::FoundAst::Found(&InlinedItem::Foreign(ref item)) => {
+ decoder::FoundAst::Found(&InlinedItem::Foreign(d, ref item)) => {
+ assert_eq!(d, def_id);
let inlined_root_node_id = find_inlined_item_root(item.id);
cache_inlined_item(def_id, item.id, inlined_root_node_id);
}
grandparent_def_id,
ast_doc,
parent_did);
- if let &InlinedItem::Item(ref i) = ii {
+ if let &InlinedItem::Item(_, ref i) = ii {
return FoundAst::FoundParent(parent_did, i);
}
}
let mut decoder = reader::Decoder::new(def_key_doc);
let simple_key = def_key::DefKey::decode(&mut decoder).unwrap();
let name = reader::maybe_get_doc(item_doc, tag_paths_data_name).map(|name| {
- token::intern(name.as_str_slice())
+ token::intern(name.as_str_slice()).as_str()
});
def_key::recover_def_key(simple_key, name)
}
use rustc::hir::def_id::DefIndex;
use rustc::hir::map as hir_map;
-use syntax::ast::Name;
+use syntax::parse::token::InternedString;
#[derive(RustcEncodable, RustcDecodable)]
pub struct DefKey {
}
}
-pub fn recover_def_key(key: DefKey, name: Option<Name>) -> hir_map::DefKey {
+pub fn recover_def_key(key: DefKey, name: Option<InternedString>) -> hir_map::DefKey {
let data = hir_map::DisambiguatedDefPathData {
data: recover_def_path_data(key.disambiguated_data.data, name),
disambiguator: key.disambiguated_data.disambiguator,
}
}
-fn recover_def_path_data(data: DefPathData, name: Option<Name>) -> hir_map::DefPathData {
+fn recover_def_path_data(data: DefPathData, name: Option<InternedString>) -> hir_map::DefPathData {
match data {
DefPathData::CrateRoot => hir_map::DefPathData::CrateRoot,
DefPathData::Misc => hir_map::DefPathData::Misc,
}
fn encode_mir(ecx: &EncodeContext, rbml_w: &mut Encoder, node_id: NodeId) {
- if let Some(mir) = ecx.mir_map.map.get(&node_id) {
+ let def_id = ecx.tcx.map.local_def_id(node_id);
+ if let Some(mir) = ecx.mir_map.map.get(&def_id) {
rbml_w.start_tag(tag_mir as usize);
rbml_w.emit_opaque(|opaque_encoder| {
tls::enter_encoding_context(ecx, opaque_encoder, |_, opaque_encoder| {
encode_bounds_and_type_for_item(rbml_w, ecx, index, item.id);
encode_name(rbml_w, item.name);
encode_attributes(rbml_w, &item.attrs);
- encode_inlined_item(ecx, rbml_w, InlinedItemRef::Item(item));
+ encode_inlined_item(ecx, rbml_w, InlinedItemRef::Item(def_id, item));
encode_mir(ecx, rbml_w, item.id);
encode_visibility(rbml_w, vis);
encode_stability(rbml_w, stab);
encode_attributes(rbml_w, &item.attrs);
let needs_inline = tps_len > 0 || attr::requests_inline(&item.attrs);
if needs_inline || constness == hir::Constness::Const {
- encode_inlined_item(ecx, rbml_w, InlinedItemRef::Item(item));
+ encode_inlined_item(ecx, rbml_w, InlinedItemRef::Item(def_id, item));
encode_mir(ecx, rbml_w, item.id);
}
encode_constness(rbml_w, constness);
for v in &enum_definition.variants {
encode_variant_id(rbml_w, ecx.tcx.map.local_def_id(v.node.data.id()));
}
- encode_inlined_item(ecx, rbml_w, InlinedItemRef::Item(item));
+ encode_inlined_item(ecx, rbml_w, InlinedItemRef::Item(def_id, item));
encode_mir(ecx, rbml_w, item.id);
// Encode inherent implementations for this enumeration.
needs to know*/
encode_struct_fields(rbml_w, variant);
- encode_inlined_item(ecx, rbml_w, InlinedItemRef::Item(item));
+ encode_inlined_item(ecx, rbml_w, InlinedItemRef::Item(def_id, item));
encode_mir(ecx, rbml_w, item.id);
// Encode inherent implementations for this structure.
encode_bounds_and_type_for_item(rbml_w, ecx, index, nitem.id);
encode_name(rbml_w, nitem.name);
if abi == Abi::RustIntrinsic || abi == Abi::PlatformIntrinsic {
- encode_inlined_item(ecx, rbml_w, InlinedItemRef::Foreign(nitem));
+ encode_inlined_item(ecx, rbml_w, InlinedItemRef::Foreign(def_id, nitem));
encode_mir(ecx, rbml_w, nitem.id);
}
encode_attributes(rbml_w, &nitem.attrs);
ecx.tcx.closure_kind(def_id).encode(rbml_w).unwrap();
rbml_w.end_tag();
- assert!(ecx.mir_map.map.contains_key(&expr.id));
+ assert!(ecx.mir_map.map.contains_key(&def_id));
encode_mir(ecx, rbml_w, expr.id);
rbml_w.end_tag();
// except according to those terms.
use dot;
+use rustc::hir::def_id::DefId;
use rustc::mir::repr::*;
+use rustc::mir::mir_map::MirMap;
use rustc::ty::{self, TyCtxt};
use std::fmt::Debug;
use std::io::{self, Write};
/// Write a graphviz DOT graph of a list of MIRs.
pub fn write_mir_graphviz<'a, 'b, 'tcx, W, I>(tcx: TyCtxt<'b, 'tcx, 'tcx>,
- iter: I, w: &mut W)
+ iter: I,
+ mir_map: &MirMap<'tcx>,
+ w: &mut W)
-> io::Result<()>
-where W: Write, I: Iterator<Item=(&'a NodeId, &'a Mir<'a>)> {
- for (&nodeid, mir) in iter {
+ where W: Write, I: Iterator<Item=DefId>
+{
+ for def_id in iter {
+ let nodeid = tcx.map.as_local_node_id(def_id).unwrap();
+ let mir = &mir_map.map[&def_id];
+
writeln!(w, "digraph Mir_{} {{", nodeid)?;
// Global graph properties
use rustc::middle::const_val::ConstVal;
use rustc_const_eval as const_eval;
use rustc_data_structures::indexed_vec::Idx;
+use rustc::dep_graph::DepNode;
use rustc::hir::def_id::DefId;
use rustc::hir::intravisit::FnKind;
use rustc::hir::map::blocks::FnLikeNode;
MirSource::Promoted(..) => bug!()
};
- let attrs = infcx.tcx.map.attrs(src.item_id());
+ let src_node_id = src.item_id();
+
+ // We are going to be accessing various tables
+ // generated by TypeckItemBody; we also assume
+ // that the body passes type check. These tables
+ // are not individually tracked, so just register
+ // a read here.
+ let src_def_id = infcx.tcx.map.local_def_id(src_node_id);
+ infcx.tcx.dep_graph.read(DepNode::TypeckItemBody(src_def_id));
+
+ let attrs = infcx.tcx.map.attrs(src_node_id);
// Some functions always have overflow checks enabled,
// however, they may not get codegen'd, depending on
use build;
use rustc::dep_graph::DepNode;
+use rustc::hir::def_id::DefId;
use rustc::mir::repr::Mir;
use rustc::mir::transform::MirSource;
use rustc::mir::visit::MutVisitor;
use rustc::traits::ProjectionMode;
use rustc::ty::{self, Ty, TyCtxt};
use rustc::ty::subst::Substs;
-use rustc::util::nodemap::NodeMap;
use rustc::hir;
use rustc::hir::intravisit::{self, FnKind, Visitor};
use syntax::ast;
use std::mem;
pub fn build_mir_for_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> MirMap<'tcx> {
- let mut map = MirMap {
- map: NodeMap(),
- };
+ let mut map = MirMap::new(tcx.dep_graph.clone());
{
let mut dump = BuildMir {
tcx: tcx,
map: &mut map,
};
- tcx.visit_all_items_in_krate(DepNode::MirMapConstruction, &mut dump);
+ tcx.visit_all_items_in_krate(DepNode::Mir, &mut dump);
}
map
}
/// F: for<'b, 'tcx> where 'gcx: 'tcx FnOnce(Cx<'b, 'gcx, 'tcx>).
struct CxBuilder<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
src: MirSource,
+ def_id: DefId,
infcx: InferCtxtBuilder<'a, 'gcx, 'tcx>,
map: &'a mut MirMap<'gcx>,
}
impl<'a, 'gcx, 'tcx> BuildMir<'a, 'gcx> {
fn cx<'b>(&'b mut self, src: MirSource) -> CxBuilder<'b, 'gcx, 'tcx> {
let param_env = ty::ParameterEnvironment::for_item(self.tcx, src.item_id());
+ let def_id = self.tcx.map.local_def_id(src.item_id());
CxBuilder {
src: src,
infcx: self.tcx.infer_ctxt(None, Some(param_env), ProjectionMode::AnyFinal),
+ def_id: def_id,
map: self.map
}
}
mir
});
- assert!(self.map.map.insert(src.item_id(), mir).is_none())
+ assert!(self.map.map.insert(self.def_id, mir).is_none())
}
}
use build::{Location, ScopeAuxiliaryVec, ScopeId};
use rustc::hir;
+use rustc::hir::def_id::DefId;
use rustc::mir::repr::*;
+use rustc::mir::mir_map::MirMap;
use rustc::mir::transform::MirSource;
use rustc::ty::{self, TyCtxt};
use rustc_data_structures::fnv::FnvHashMap;
use std::fmt::Display;
use std::fs;
use std::io::{self, Write};
-use syntax::ast::NodeId;
use std::path::{PathBuf, Path};
const INDENT: &'static str = " ";
/// Write out a human-readable textual representation for the given MIR.
pub fn write_mir_pretty<'a, 'b, 'tcx, I>(tcx: TyCtxt<'b, 'tcx, 'tcx>,
iter: I,
+ mir_map: &MirMap<'tcx>,
w: &mut Write)
-> io::Result<()>
- where I: Iterator<Item=(&'a NodeId, &'a Mir<'tcx>)>, 'tcx: 'a
+ where I: Iterator<Item=DefId>, 'tcx: 'a
{
let mut first = true;
- for (&id, mir) in iter {
+ for def_id in iter {
+ let mir = &mir_map.map[&def_id];
+
if first {
first = false;
} else {
writeln!(w, "")?;
}
+ let id = tcx.map.as_local_node_id(def_id).unwrap();
let src = MirSource::from_node(tcx, id);
write_mir_fn(tcx, src, mir, w, None)?;
use rustc_data_structures::bitvec::BitVector;
use rustc_data_structures::indexed_vec::{IndexVec, Idx};
+use rustc::dep_graph::DepNode;
use rustc::hir;
use rustc::hir::def_id::DefId;
use rustc::hir::intravisit::FnKind;
if !allow {
self.add(Qualif::NOT_CONST);
if self.mode != Mode::Fn {
- span_err!(self.tcx.sess, self.span, E0017,
- "references in {}s may only refer \
- to immutable values", self.mode);
+ struct_span_err!(self.tcx.sess, self.span, E0017,
+ "references in {}s may only refer \
+ to immutable values", self.mode)
+ .span_label(self.span, &format!("{}s require immutable values",
+ self.mode))
+ .emit();
}
}
} else {
let extern_mir;
let param_env_and_mir = if def_id.is_local() {
- let node_id = tcx.map.as_local_node_id(def_id).unwrap();
- mir_map.and_then(|map| map.map.get(&node_id)).map(|mir| {
+ mir_map.and_then(|map| map.map.get(&def_id)).map(|mir| {
+ let node_id = tcx.map.as_local_node_id(def_id).unwrap();
(ty::ParameterEnvironment::for_item(tcx, node_id), mir)
})
} else if let Some(mir) = tcx.sess.cstore.maybe_get_item_mir(tcx, def_id) {
// First, visit `const` items, potentially recursing, to get
// accurate MUTABLE_INTERIOR and NEEDS_DROP qualifications.
- for &id in map.map.keys() {
- let def_id = tcx.map.local_def_id(id);
- let _task = tcx.dep_graph.in_task(self.dep_node(def_id));
+ let keys = map.map.keys();
+ for &def_id in &keys {
+ let _task = tcx.dep_graph.in_task(DepNode::Mir(def_id));
+ let id = tcx.map.as_local_node_id(def_id).unwrap();
let src = MirSource::from_node(tcx, id);
if let MirSource::Const(_) = src {
qualify_const_item_cached(tcx, &mut qualif_map, Some(map), def_id);
// Then, handle everything else, without recursing,
// as the MIR map is not shared, since promotion
// in functions (including `const fn`) mutates it.
- for (&id, mir) in &mut map.map {
- let def_id = tcx.map.local_def_id(id);
- let _task = tcx.dep_graph.in_task(self.dep_node(def_id));
+ for &def_id in &keys {
+ let _task = tcx.dep_graph.in_task(DepNode::Mir(def_id));
+ let id = tcx.map.as_local_node_id(def_id).unwrap();
let src = MirSource::from_node(tcx, id);
let mode = match src {
MirSource::Fn(_) => {
};
let param_env = ty::ParameterEnvironment::for_item(tcx, id);
+ let mir = map.map.get_mut(&def_id).unwrap();
for hook in &mut *hooks {
hook.on_mir_pass(tcx, src, mir, self, false);
}
//! This pass type-checks the MIR to ensure it is not broken.
#![allow(unreachable_code)]
-use rustc::dep_graph::DepNode;
-use rustc::hir::def_id::DefId;
use rustc::infer::{self, InferCtxt, InferOk};
use rustc::traits::{self, ProjectionMode};
use rustc::ty::fold::TypeFoldable;
}
impl Pass for TypeckMir {
- fn dep_node(&self, def_id: DefId) -> DepNode<DefId> {
- DepNode::MirTypeck(def_id)
- }
}
E0130,
"patterns aren't allowed in foreign function \
declarations");
+ err.span_label(span, &format!("pattern not allowed in foreign function"));
if is_recent {
err.span_note(span,
"this is a recent error, see issue #35203 for more details");
//
// There will be an anonymous module created around `g` with the ID of the
// entry block for `f`.
- pub module_map: NodeMap<Module<'a>>,
+ module_map: NodeMap<Module<'a>>,
// Whether or not to print error messages. Can be set to true
// when getting additional info for error message suggestions,
use rustc::ty::item_path::{self, ItemPathBuffer, RootMode};
use rustc::hir::map::definitions::{DefPath, DefPathData};
-use std::fmt::Write;
use syntax::attr;
use syntax::parse::token::{self, InternedString};
use serialize::hex::ToHex;
pub fn def_id_to_string<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> String {
let def_path = tcx.def_path(def_id);
- def_path_to_string(tcx, &def_path)
-}
-
-fn def_path_to_string<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_path: &DefPath) -> String {
- let mut s = String::with_capacity(def_path.data.len() * 16);
-
- if def_path.krate == cstore::LOCAL_CRATE {
- s.push_str(&tcx.crate_name(def_path.krate));
- } else {
- s.push_str(&tcx.sess.cstore.original_crate_name(def_path.krate));
- }
- s.push_str("/");
- s.push_str(&tcx.crate_disambiguator(def_path.krate));
-
- for component in &def_path.data {
- write!(s,
- "::{}[{}]",
- component.data.as_interned_str(),
- component.disambiguator)
- .unwrap();
- }
-
- s
+ def_path.to_string(tcx)
}
fn get_symbol_hash<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
// the main symbol name is not necessarily unique; hash in the
// compiler's internal def-path, guaranteeing each symbol has a
// truly unique path
- hash_state.input_str(&def_path_to_string(tcx, def_path));
+ hash_state.input_str(&def_path.to_string(tcx));
// Include the main item-type. Note that, in this case, the
// assertions about `needs_subst` may not hold, but this item-type
use std::thread;
use libc::{c_uint, c_void};
+pub const RELOC_MODEL_ARGS : [(&'static str, llvm::RelocMode); 4] = [
+ ("pic", llvm::RelocMode::PIC),
+ ("static", llvm::RelocMode::Static),
+ ("default", llvm::RelocMode::Default),
+ ("dynamic-no-pic", llvm::RelocMode::DynamicNoPic),
+];
+
+pub const CODE_GEN_MODEL_ARGS : [(&'static str, llvm::CodeModel); 5] = [
+ ("default", llvm::CodeModel::Default),
+ ("small", llvm::CodeModel::Small),
+ ("kernel", llvm::CodeModel::Kernel),
+ ("medium", llvm::CodeModel::Medium),
+ ("large", llvm::CodeModel::Large),
+];
+
pub fn llvm_err(handler: &errors::Handler, msg: String) -> ! {
match llvm::last_error() {
Some(err) => panic!(handler.fatal(&format!("{}: {}", msg, err))),
None => &sess.target.target.options.code_model[..],
};
- let code_model = match code_model_arg {
- "default" => llvm::CodeModel::Default,
- "small" => llvm::CodeModel::Small,
- "kernel" => llvm::CodeModel::Kernel,
- "medium" => llvm::CodeModel::Medium,
- "large" => llvm::CodeModel::Large,
+ let code_model = match CODE_GEN_MODEL_ARGS.iter().find(
+ |&&arg| arg.0 == code_model_arg) {
+ Some(x) => x.1,
_ => {
sess.err(&format!("{:?} is not a valid code model",
sess.opts
work_items.push(work);
}
+ if sess.opts.debugging_opts.incremental_info {
+ dump_incremental_data(&trans);
+ }
+
// Process the work items, optionally using worker threads.
// NOTE: This code is not really adapted to incremental compilation where
// the compiler decides the number of codegen units (and will
}
}
+fn dump_incremental_data(trans: &CrateTranslation) {
+ let mut reuse = 0;
+ for mtrans in trans.modules.iter() {
+ match mtrans.source {
+ ModuleSource::Preexisting(..) => reuse += 1,
+ ModuleSource::Translated(..) => (),
+ }
+ }
+ println!("incremental: re-using {} out of {} modules", reuse, trans.modules.len());
+}
+
struct WorkItem {
mtrans: ModuleTranslation,
config: ModuleConfig,
}), ..}) => ty,
_ => ctor_ty
}.ty_adt_def().unwrap();
- let variant_def_id = if ccx.tcx().map.is_inlined(inlined_vid) {
+ let variant_def_id = if ccx.tcx().map.is_inlined_node_id(inlined_vid) {
ccx.defid_for_inlined_node(inlined_vid).unwrap()
} else {
ccx.tcx().map.local_def_id(inlined_vid)
.get(TransItem::Static(id))
.expect("Local statics should always be in the SymbolMap");
// Make sure that this is never executed for something inlined.
- assert!(!ccx.tcx().map.is_inlined(id));
+ assert!(!ccx.tcx().map.is_inlined_node_id(id));
let defined_in_current_codegen_unit = ccx.codegen_unit()
.items()
check_overflow: bool,
check_drop_flag_for_sanity: bool,
mir_map: &'a MirMap<'tcx>,
- mir_cache: RefCell<DefIdMap<Rc<mir::Mir<'tcx>>>>,
+ mir_cache: RefCell<DepTrackingMap<MirCache<'tcx>>>,
use_dll_storage_attrs: bool,
}
}
+// Cache for mir loaded from metadata
+struct MirCache<'tcx> {
+ data: PhantomData<&'tcx ()>
+}
+
+impl<'tcx> DepTrackingMapConfig for MirCache<'tcx> {
+ type Key = DefId;
+ type Value = Rc<mir::Mir<'tcx>>;
+ fn to_dep_node(key: &DefId) -> DepNode<DefId> {
+ DepNode::Mir(*key)
+ }
+}
+
/// This list owns a number of LocalCrateContexts and binds them to their common
/// SharedCrateContext. This type just exists as a convenience, something to
/// pass around all LocalCrateContexts with and get an iterator over them.
None => &sess.target.target.options.relocation_model[..],
};
- match reloc_model_arg {
- "pic" => llvm::RelocMode::PIC,
- "static" => llvm::RelocMode::Static,
- "default" => llvm::RelocMode::Default,
- "dynamic-no-pic" => llvm::RelocMode::DynamicNoPic,
+ match ::back::write::RELOC_MODEL_ARGS.iter().find(
+ |&&arg| arg.0 == reloc_model_arg) {
+ Some(x) => x.1,
_ => {
sess.err(&format!("{:?} is not a valid relocation mode",
sess.opts
.cg
- .relocation_model));
+ .code_model));
sess.abort_if_errors();
bug!();
}
symbol_hasher: RefCell::new(symbol_hasher),
tcx: tcx,
mir_map: mir_map,
- mir_cache: RefCell::new(DefIdMap()),
+ mir_cache: RefCell::new(DepTrackingMap::new(tcx.dep_graph.clone())),
stats: Stats {
n_glues_created: Cell::new(0),
n_null_glues: Cell::new(0),
pub fn get_mir(&self, def_id: DefId) -> Option<CachedMir<'b, 'tcx>> {
if def_id.is_local() {
- let node_id = self.tcx.map.as_local_node_id(def_id).unwrap();
- self.mir_map.map.get(&node_id).map(CachedMir::Ref)
+ self.mir_map.map.get(&def_id).map(CachedMir::Ref)
} else {
if let Some(mir) = self.mir_cache.borrow().get(&def_id).cloned() {
return Some(CachedMir::Owned(mir));
// First, find out the 'real' def_id of the type. Items inlined from
// other crates have to be mapped back to their source.
let def_id = if let Some(node_id) = cx.tcx().map.as_local_node_id(def_id) {
- if cx.tcx().map.is_inlined(node_id) {
+ if cx.tcx().map.is_inlined_node_id(node_id) {
// The given def_id identifies the inlined copy of a
// type definition, let's take the source of the copy.
cx.defid_for_inlined_node(node_id).unwrap()
// crate should already contain debuginfo for it. More importantly, the
// global might not even exist in un-inlined form anywhere which would lead
// to a linker errors.
- if cx.tcx().map.is_inlined(node_id) {
+ if cx.tcx().map.is_inlined_node_id(node_id) {
return;
}
// that the incoming edges to a particular fn are from a
// particular set.
- self.register_reads(ccx);
-
match *self {
TransItem::Static(node_id) => {
let def_id = ccx.tcx().map.local_def_id(node_id);
ccx.codegen_unit().name());
}
- /// If necessary, creates a subtask for trans'ing a particular item and registers reads on
- /// `TypeckItemBody` and `Hir`.
- fn register_reads(&self, ccx: &CrateContext<'a, 'tcx>) {
- let tcx = ccx.tcx();
- let def_id = match *self {
- TransItem::Static(node_id) => {
- tcx.map.local_def_id(node_id)
- }
- TransItem::Fn(instance) => {
- if let Some(node) = tcx.map.as_local_node_id(instance.def) {
- if let hir_map::Node::NodeItem(_) = tcx.map.get(node) {
- // This already is a "real" item
- instance.def
- } else {
- // Get the enclosing item and register a read on it
- tcx.map.get_parent_did(node)
- }
- } else {
- // Translating an inlined item from another crate? Don't track anything.
- return;
- }
- }
- TransItem::DropGlue(_) => {
- // Nothing to track for drop glue
- return;
- }
- };
-
- tcx.dep_graph.with_task(DepNode::TransCrateItem(def_id), || {
- tcx.dep_graph.read(DepNode::Hir(def_id));
-
- // We are going to be accessing various tables
- // generated by TypeckItemBody; we also assume
- // that the body passes type check. These tables
- // are not individually tracked, so just register
- // a read here.
- tcx.dep_graph.read(DepNode::TypeckItemBody(def_id));
- });
- }
-
pub fn predefine(&self,
ccx: &CrateContext<'a, 'tcx>,
linkage: llvm::Linkage) {
self.convert_angle_bracketed_parameters(rscope, span, decl_generics, data)
}
hir::ParenthesizedParameters(..) => {
- span_err!(tcx.sess, span, E0214,
- "parenthesized parameters may only be used with a trait");
+ struct_span_err!(tcx.sess, span, E0214,
+ "parenthesized parameters may only be used with a trait")
+ .span_label(span, &format!("only traits may use parentheses"))
+ .emit();
+
let ty_param_defs = decl_generics.types.get_slice(TypeSpace);
(Substs::empty(),
ty_param_defs.iter().map(|_| tcx.types.err).collect(),
}
for (trait_def_id, name) in associated_types {
- span_err!(tcx.sess, span, E0191,
+ struct_span_err!(tcx.sess, span, E0191,
"the value of the associated type `{}` (from the trait `{}`) must be specified",
name,
- tcx.item_path_str(trait_def_id));
+ tcx.item_path_str(trait_def_id))
+ .span_label(span, &format!(
+ "missing associated type `{}` value", name))
+ .emit();
}
tcx.mk_trait(object.principal, object.bounds)
}
if bounds.len() > 1 {
- let mut err = struct_span_err!(self.tcx().sess, span, E0221,
- "ambiguous associated type `{}` in bounds of `{}`",
- assoc_name,
- ty_param_name);
+ let mut err = struct_span_err!(
+ self.tcx().sess, span, E0221,
+ "ambiguous associated type `{}` in bounds of `{}`",
+ assoc_name,
+ ty_param_name);
+ err.span_label(span, &format!("ambiguous associated type `{}`", assoc_name));
for bound in &bounds {
span_note!(&mut err, span,
return self.tcx().types.err;
}
_ => {
- span_err!(tcx.sess, span, E0248,
- "found value `{}` used as a type",
- tcx.item_path_str(def.def_id()));
+ struct_span_err!(tcx.sess, span, E0248,
+ "found value `{}` used as a type",
+ tcx.item_path_str(def.def_id()))
+ .span_label(span, &format!("value used as a type"))
+ .emit();
return self.tcx().types.err;
}
}
self.check_pat(&subpat, field_ty);
}
} else {
- span_err!(tcx.sess, pat.span, E0023,
- "this pattern has {} field{s}, but the corresponding {} has {} field{s}",
- subpats.len(), def.kind_name(), variant.fields.len(),
- s = if variant.fields.len() == 1 {""} else {"s"});
+ let subpats_ending = if subpats.len() == 1 {
+ ""
+ } else {
+ "s"
+ };
+ let fields_ending = if variant.fields.len() == 1 {
+ ""
+ } else {
+ "s"
+ };
+ struct_span_err!(tcx.sess, pat.span, E0023,
+ "this pattern has {} field{}, but the corresponding {} has {} field{}",
+ subpats.len(), subpats_ending, def.kind_name(),
+ variant.fields.len(), fields_ending)
+ .span_label(pat.span, &format!("expected {} field{}, found {}",
+ variant.fields.len(), fields_ending, subpats.len()))
+ .emit();
on_error();
}
}
field_map.get(&field.name)
.map(|f| self.field_ty(span, f, substs))
.unwrap_or_else(|| {
- span_err!(tcx.sess, span, E0026,
- "struct `{}` does not have a field named `{}`",
- tcx.item_path_str(variant.did),
- field.name);
+ struct_span_err!(tcx.sess, span, E0026,
+ "struct `{}` does not have a field named `{}`",
+ tcx.item_path_str(variant.did),
+ field.name)
+ .span_label(span,
+ &format!("struct `{}` does not have field `{}`",
+ tcx.item_path_str(variant.did),
+ field.name))
+ .emit();
+
tcx.types.err
})
}
use rustc::traits::{self, ProjectionMode};
use rustc::ty::error::ExpectedFound;
use rustc::ty::subst::{self, Subst, Substs, VecPerParamSpace};
+use rustc::hir::map::Node;
+use rustc::hir::{ImplItemKind, TraitItem_};
use syntax::ast;
use syntax_pos::Span;
// Compute skolemized form of impl and trait const tys.
let impl_ty = impl_c.ty.subst(tcx, impl_to_skol_substs);
let trait_ty = trait_c.ty.subst(tcx, &trait_to_skol_substs);
- let origin = TypeOrigin::Misc(impl_c_span);
+ let mut origin = TypeOrigin::Misc(impl_c_span);
let err = infcx.commit_if_ok(|_| {
// There is no "body" here, so just pass dummy id.
debug!("checking associated const for compatibility: impl ty {:?}, trait ty {:?}",
impl_ty,
trait_ty);
+
+ // Locate the Span containing just the type of the offending impl
+ if let Some(impl_trait_node) = tcx.map.get_if_local(impl_c.def_id) {
+ if let Node::NodeImplItem(impl_trait_item) = impl_trait_node {
+ if let ImplItemKind::Const(ref ty, _) = impl_trait_item.node {
+ origin = TypeOrigin::Misc(ty.span);
+ }
+ }
+ }
+
let mut diag = struct_span_err!(
tcx.sess, origin.span(), E0326,
"implemented const `{}` has an incompatible type for trait",
trait_c.name
);
+
+ // Add a label to the Span containing just the type of the item
+ if let Some(orig_trait_node) = tcx.map.get_if_local(trait_c.def_id) {
+ if let Node::NodeTraitItem(orig_trait_item) = orig_trait_node {
+ if let TraitItem_::ConstTraitItem(ref ty, _) = orig_trait_item.node {
+ diag.span_label(ty.span, &format!("original trait requirement"));
+ }
+ }
+ }
+
infcx.note_type_err(
&mut diag, origin,
Some(infer::ValuePairs::Types(ExpectedFound {
-> Result<(), MethodError<'tcx>>
{
let mut duplicates = HashSet::new();
- let opt_applicable_traits = self.ccx.trait_map.get(&expr_id);
+ let opt_applicable_traits = self.tcx.trait_map.get(&expr_id);
if let Some(applicable_traits) = opt_applicable_traits {
for trait_candidate in applicable_traits {
let trait_did = trait_candidate.def_id;
},
rcvr_ty);
- // If the item has the name of a field, give a help note
- if let (&ty::TyStruct(def, substs), Some(expr)) = (&rcvr_ty.sty, rcvr_expr) {
- if let Some(field) = def.struct_variant().find_field_named(item_name) {
- let expr_string = match tcx.sess.codemap().span_to_snippet(expr.span) {
- Ok(expr_string) => expr_string,
- _ => "s".into() // Default to a generic placeholder for the
- // expression when we can't generate a string
- // snippet
- };
-
- let field_ty = field.ty(tcx, substs);
-
- if self.is_fn_ty(&field_ty, span) {
- err.span_note(span,
- &format!("use `({0}.{1})(...)` if you meant to call \
- the function stored in the `{1}` field",
- expr_string, item_name));
- } else {
- err.span_note(span, &format!("did you mean to write `{0}.{1}`?",
- expr_string, item_name));
+ // If the method name is the name of a field with a function or closure type,
+ // give a helping note that it has to be called as (x.f)(...).
+ if let Some(expr) = rcvr_expr {
+ for (ty, _) in self.autoderef(span, rcvr_ty) {
+ if let ty::TyStruct(def, substs) = ty.sty {
+ if let Some(field) = def.struct_variant().find_field_named(item_name) {
+ let snippet = tcx.sess.codemap().span_to_snippet(expr.span);
+ let expr_string = match snippet {
+ Ok(expr_string) => expr_string,
+ _ => "s".into() // Default to a generic placeholder for the
+ // expression when we can't generate a
+ // string snippet
+ };
+
+ let field_ty = field.ty(tcx, substs);
+
+ if self.is_fn_ty(&field_ty, span) {
+ err.span_note(span, &format!(
+ "use `({0}.{1})(...)` if you meant to call the function \
+ stored in the `{1}` field",
+ expr_string, item_name));
+ } else {
+ err.span_note(span, &format!(
+ "did you mean to write `{0}.{1}`?",
+ expr_string, item_name));
+ }
+ break;
+ }
}
}
}
// Check for duplicate discriminant values
if let Some(i) = disr_vals.iter().position(|&x| x == current_disr_val) {
- let mut err = struct_span_err!(ccx.tcx.sess, v.span, E0081,
- "discriminant value `{}` already exists", disr_vals[i]);
let variant_i_node_id = ccx.tcx.map.as_local_node_id(variants[i].did).unwrap();
- err.span_label(ccx.tcx.map.span(variant_i_node_id),
- &format!("first use of `{}`", disr_vals[i]));
- err.span_label(v.span , &format!("enum already has `{}`", disr_vals[i]));
- err.emit();
+ let variant_i = ccx.tcx.map.expect_variant(variant_i_node_id);
+ let i_span = match variant_i.node.disr_expr {
+ Some(ref expr) => expr.span,
+ None => ccx.tcx.map.span(variant_i_node_id)
+ };
+ let span = match v.node.disr_expr {
+ Some(ref expr) => expr.span,
+ None => v.span
+ };
+ struct_span_err!(ccx.tcx.sess, span, E0081,
+ "discriminant value `{}` already exists", disr_vals[i])
+ .span_label(i_span, &format!("first use of `{}`", disr_vals[i]))
+ .span_label(span , &format!("enum already has `{}`", disr_vals[i]))
+ .emit();
}
disr_vals.push(current_disr_val);
}
if i < type_count {
substs.types.push(space, t);
} else if i == type_count {
- span_err!(self.tcx.sess, typ.span, E0087,
- "too many type parameters provided: \
- expected at most {} parameter{}, \
- found {} parameter{}",
- type_count,
- if type_count == 1 {""} else {"s"},
- data.types.len(),
- if data.types.len() == 1 {""} else {"s"});
+ struct_span_err!(self.tcx.sess, typ.span, E0087,
+ "too many type parameters provided: \
+ expected at most {} parameter{}, \
+ found {} parameter{}",
+ type_count,
+ if type_count == 1 {""} else {"s"},
+ data.types.len(),
+ if data.types.len() == 1 {""} else {"s"})
+ .span_label(typ.span , &format!("expected {} parameter{}",
+ type_count,
+ if type_count == 1 {""} else {"s"})).emit();
substs.types.truncate(space, 0);
break;
}
}
Err(CopyImplementationError::InfrigingVariant(name)) => {
- struct_span_err!(tcx.sess, span, E0205,
- "the trait `Copy` may not be \
- implemented for this type")
- .span_label(span, &format!("variant \
- `{}` does not implement `Copy`",
- name))
- .emit()
+ let item = tcx.map.expect_item(impl_node_id);
+ let span = if let ItemImpl(_, _, _, Some(ref tr), _, _) = item.node {
+ tr.path.span
+ } else {
+ span
+ };
+
+ struct_span_err!(tcx.sess, span, E0205,
+ "the trait `Copy` may not be implemented for this type")
+ .span_label(span, &format!("variant `{}` does not implement `Copy`",
+ name))
+ .emit()
}
Err(CopyImplementationError::NotAnAdt) => {
- span_err!(tcx.sess, span, E0206,
- "the trait `Copy` may not be implemented \
- for this type; type is not a structure or \
- enumeration")
+ let item = tcx.map.expect_item(impl_node_id);
+ let span = if let ItemImpl(_, _, _, _, ref ty, _) = item.node {
+ ty.span
+ } else {
+ span
+ };
+
+ struct_span_err!(tcx.sess, span, E0206,
+ "the trait `Copy` may not be implemented for this type")
+ .span_label(span, &format!("type is not a structure or enumeration"))
+ .emit();
}
Err(CopyImplementationError::HasDestructor) => {
span_err!(tcx.sess, span, E0184,
// NB. Since the `memoized` function enters a new task, and we
// are giving this task access to the item `item`, we must
// register a read.
+ assert!(!ccx.tcx.map.is_inlined_def_id(item_def_id));
ccx.tcx.dep_graph.read(DepNode::Hir(item_def_id));
compute_type_scheme_of_item(ccx, item)
})
// NB. Since the `memoized` function enters a new task, and we
// are giving this task access to the item `item`, we must
// register a read.
+ assert!(!ccx.tcx.map.is_inlined_def_id(item_def_id));
ccx.tcx.dep_graph.read(DepNode::Hir(item_def_id));
compute_type_scheme_of_foreign_item(ccx, item, abi)
})
pub struct CrateCtxt<'a, 'tcx: 'a> {
ast_ty_to_ty_cache: RefCell<NodeMap<Ty<'tcx>>>,
- /// A mapping from method call sites to traits that have that method.
- pub trait_map: hir::TraitMap,
-
/// A vector of every trait accessible in the whole crate
/// (i.e. including those from subcrates). This is used only for
/// error reporting, and so is lazily initialised and generally
match it.node {
hir::ItemFn(_,_,_,_,ref ps,_)
if ps.is_parameterized() => {
- struct_span_err!(tcx.sess, start_span, E0132,
+ let sp = if let Some(sp) = ps.span() { sp } else { start_span };
+ struct_span_err!(tcx.sess, sp, E0132,
"start function is not allowed to have type parameters")
- .span_label(ps.span().unwrap(),
+ .span_label(sp,
&format!("start function cannot have type parameters"))
.emit();
return;
}
}
-pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- trait_map: hir::TraitMap)
+pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>)
-> CompileResult {
let time_passes = tcx.sess.time_passes();
let ccx = CrateCtxt {
ast_ty_to_ty_cache: RefCell::new(NodeMap()),
- trait_map: trait_map,
all_traits: RefCell::new(None),
stack: RefCell::new(Vec::new()),
tcx: tcx
let codemap = Rc::new(codemap::CodeMap::new());
let diagnostic_handler = errors::Handler::with_tty_emitter(ColorConfig::Auto,
- None,
true,
false,
Some(codemap.clone()));
let codemap = Rc::new(CodeMap::new());
let diagnostic_handler = errors::Handler::with_tty_emitter(ColorConfig::Auto,
- None,
true,
false,
Some(codemap.clone()));
let data = Arc::new(Mutex::new(Vec::new()));
let codemap = Rc::new(CodeMap::new());
let emitter = errors::emitter::EmitterWriter::new(box Sink(data.clone()),
- None,
- Some(codemap.clone()),
- errors::snippet::FormatMode::EnvironmentSelected);
+ Some(codemap.clone()));
let old = io::set_panic(box Sink(data.clone()));
let _bomb = Bomb(data.clone(), old.unwrap_or(box io::stdout()));
/// A hash map implementation which uses linear probing with Robin
/// Hood bucket stealing.
///
-/// The hashes are all keyed by the thread-local random number generator
-/// on creation by default. This means that the ordering of the keys is
-/// randomized, but makes the tables more resistant to
-/// denial-of-service attacks (Hash DoS). No guarantees are made to the
-/// quality of the random data. The implementation uses the best available
-/// random data from your platform at the time of creation. This behavior
-/// can be overridden with one of the constructors.
+/// By default, HashMap uses a somewhat slow hashing algorithm which can provide resistance
+/// to DoS attacks. Rust makes a best attempt at acquiring random numbers without IO
+/// blocking from your system. Because of this HashMap is not guaranteed to provide
+/// DoS resistance since the numbers generated might not be truly random. If you do
+/// require this behavior you can create your own hashing function using
+/// [BuildHasherDefault](../hash/struct.BuildHasherDefault.html).
///
/// It is required that the keys implement the `Eq` and `Hash` traits, although
/// this can frequently be achieved by using `#[derive(PartialEq, Eq, Hash)]`.
use any::TypeId;
use boxed::Box;
+use cell;
use char;
use fmt::{self, Debug, Display};
use marker::{Send, Sync, Reflect};
}
}
+#[unstable(feature = "try_borrow", issue = "35070")]
+impl<'a, T: ?Sized + Reflect> Error for cell::BorrowError<'a, T> {
+ fn description(&self) -> &str {
+ "already mutably borrowed"
+ }
+}
+
+#[unstable(feature = "try_borrow", issue = "35070")]
+impl<'a, T: ?Sized + Reflect> Error for cell::BorrowMutError<'a, T> {
+ fn description(&self) -> &str {
+ "already borrowed"
+ }
+}
+
// copied from any.rs
impl Error + 'static {
/// Returns true if the boxed type is the same as `T`
///
/// extern { fn my_string() -> *const c_char; }
///
-/// fn main() {
-/// unsafe {
-/// let slice = CStr::from_ptr(my_string());
-/// println!("string length: {}", slice.to_bytes().len());
-/// }
+/// unsafe {
+/// let slice = CStr::from_ptr(my_string());
+/// println!("string length: {}", slice.to_bytes().len());
/// }
/// ```
///
/// unsafe { work_with(data.as_ptr()) }
/// }
///
-/// fn main() {
-/// let s = CString::new("data data data data").unwrap();
-/// work(&s);
-/// }
+/// let s = CString::new("data data data data").unwrap();
+/// work(&s);
/// ```
///
/// Converting a foreign C string into a Rust `String`
/// }
/// }
///
-/// fn main() {
-/// println!("string: {}", my_string_safe());
-/// }
+/// println!("string: {}", my_string_safe());
/// ```
#[derive(Hash)]
#[stable(feature = "rust1", since = "1.0.0")]
///
/// extern { fn puts(s: *const c_char); }
///
- /// fn main() {
- /// let to_print = CString::new("Hello!").unwrap();
- /// unsafe {
- /// puts(to_print.as_ptr());
- /// }
+ /// let to_print = CString::new("Hello!").unwrap();
+ /// unsafe {
+ /// puts(to_print.as_ptr());
/// }
/// ```
///
#![feature(str_utf16)]
#![feature(test, rustc_private)]
#![feature(thread_local)]
+#![feature(try_borrow)]
#![feature(try_from)]
#![feature(unboxed_closures)]
#![feature(unicode)]
#![feature(unwind_attributes)]
#![feature(vec_push_all)]
#![feature(zero_one)]
+#![cfg_attr(test, feature(update_panic_count))]
// Issue# 30592: Systematically use alloc_system during stage0 since jemalloc
// might be unavailable or disabled
/// ```
#[stable(feature = "resume_unwind", since = "1.9.0")]
pub fn resume_unwind(payload: Box<Any + Send>) -> ! {
- panicking::rust_panic(payload)
+ panicking::update_count_then_panic(payload)
}
use io::prelude::*;
use any::Any;
-use cell::Cell;
use cell::RefCell;
use fmt;
use intrinsics;
}
}
-thread_local! { pub static PANIC_COUNT: Cell<usize> = Cell::new(0) }
-
// Binary interface to the panic runtime that the standard library depends on.
//
// The standard library is tagged with `#![needs_panic_runtime]` (introduced in
// for this panic. Otherwise only print it if logging is enabled.
#[cfg(any(not(cargobuild), feature = "backtrace"))]
let log_backtrace = {
- let panics = PANIC_COUNT.with(|c| c.get());
+ let panics = update_panic_count(0);
panics >= 2 || backtrace::log_enabled()
};
}
}
+
+#[cfg(not(test))]
+#[doc(hidden)]
+#[unstable(feature = "update_panic_count", issue = "0")]
+pub fn update_panic_count(amt: isize) -> usize {
+ use cell::Cell;
+ thread_local! { static PANIC_COUNT: Cell<usize> = Cell::new(0) }
+
+ PANIC_COUNT.with(|c| {
+ let next = (c.get() as isize + amt) as usize;
+ c.set(next);
+ return next
+ })
+}
+
+#[cfg(test)]
+pub use realstd::rt::update_panic_count;
+
/// Invoke a closure, capturing the cause of an unwinding panic if one occurs.
pub unsafe fn try<R, F: FnOnce() -> R>(f: F) -> Result<R, Box<Any + Send>> {
let mut slot = None;
let mut f = Some(f);
- let ret = PANIC_COUNT.with(|s| {
- let prev = s.get();
- s.set(0);
+ let ret;
+ {
let mut to_run = || {
slot = Some(f.take().unwrap()());
};
dataptr,
&mut any_data,
&mut any_vtable);
- s.set(prev);
-
if r == 0 {
- Ok(())
+ ret = Ok(());
} else {
- Err(mem::transmute(raw::TraitObject {
+ update_panic_count(-1);
+ ret = Err(mem::transmute(raw::TraitObject {
data: any_data as *mut _,
vtable: any_vtable as *mut _,
- }))
+ }));
}
- });
+ }
+ debug_assert!(update_panic_count(0) == 0);
return ret.map(|()| {
slot.take().unwrap()
});
/// Determines whether the current thread is unwinding because of panic.
pub fn panicking() -> bool {
- PANIC_COUNT.with(|c| c.get() != 0)
+ update_panic_count(0) != 0
}
/// Entry point of panic from the libcore crate.
file_line: &(&'static str, u32)) -> ! {
let (file, line) = *file_line;
- let panics = PANIC_COUNT.with(|c| {
- let prev = c.get();
- c.set(prev + 1);
- prev
- });
+ let panics = update_panic_count(1);
// If this is the third nested call (e.g. panics == 2, this is 0-indexed),
// the panic hook probably triggered the last panic, otherwise the
// double-panic check would have aborted the process. In this case abort the
// process real quickly as we don't want to try calling it again as it'll
// probably just panic again.
- if panics > 1 {
+ if panics > 2 {
util::dumb_print(format_args!("thread panicked while processing \
panic. aborting.\n"));
unsafe { intrinsics::abort() }
HOOK_LOCK.read_unlock();
}
- if panics > 0 {
+ if panics > 1 {
// If a thread panics while it's already unwinding then we
// have limited options. Currently our preference is to
// just abort. In the future we may consider resuming
rust_panic(msg)
}
+/// Shim around rust_panic. Called by resume_unwind.
+pub fn update_count_then_panic(msg: Box<Any + Send>) -> ! {
+ update_panic_count(1);
+ rust_panic(msg)
+}
+
/// A private no-mangle function on which to slap yer breakpoints.
#[no_mangle]
#[allow(private_no_mangle_fns)] // yes we get it, but we like breakpoints
// Reexport some of our utilities which are expected by other crates.
-pub use panicking::{begin_panic, begin_panic_fmt};
+pub use panicking::{begin_panic, begin_panic_fmt, update_panic_count};
#[cfg(not(test))]
#[lang = "start"]
use io;
use iter;
use libc::{self, c_int, c_char, c_void};
+use marker::PhantomData;
use mem;
use memchr;
use path::{self, PathBuf};
pub struct Args {
iter: vec::IntoIter<OsString>,
- _dont_send_or_sync_me: *mut (),
+ _dont_send_or_sync_me: PhantomData<*mut ()>,
}
impl Iterator for Args {
};
Args {
iter: vec.into_iter(),
- _dont_send_or_sync_me: ptr::null_mut(),
+ _dont_send_or_sync_me: PhantomData,
}
}
}
}
- Args { iter: res.into_iter(), _dont_send_or_sync_me: ptr::null_mut() }
+ Args { iter: res.into_iter(), _dont_send_or_sync_me: PhantomData }
}
#[cfg(any(target_os = "linux",
let v: Vec<OsString> = bytes.into_iter().map(|v| {
OsStringExt::from_vec(v)
}).collect();
- Args { iter: v.into_iter(), _dont_send_or_sync_me: ptr::null_mut() }
+ Args { iter: v.into_iter(), _dont_send_or_sync_me: PhantomData }
}
pub struct Env {
iter: vec::IntoIter<(OsString, OsString)>,
- _dont_send_or_sync_me: *mut (),
+ _dont_send_or_sync_me: PhantomData<*mut ()>,
}
impl Iterator for Env {
}
let ret = Env {
iter: result.into_iter(),
- _dont_send_or_sync_me: ptr::null_mut(),
+ _dont_send_or_sync_me: PhantomData,
};
ENV_LOCK.unlock();
return ret
fn mk_sh(cm: Rc<CodeMap>) -> errors::Handler {
// FIXME (#22405): Replace `Box::new` with `box` here when/if possible.
let emitter = errors::emitter::EmitterWriter::new(Box::new(io::sink()),
- None,
- Some(cm),
- errors::snippet::FormatMode::EnvironmentSelected);
+ Some(cm));
errors::Handler::with_emitter(true, false, Box::new(emitter))
}
pub fn new() -> ParseSess {
let cm = Rc::new(CodeMap::new());
let handler = Handler::with_tty_emitter(ColorConfig::Auto,
- None,
true,
false,
Some(cm.clone()));
#![feature(staged_api)]
#![feature(question_mark)]
#![feature(panic_unwind)]
+#![feature(mpsc_recv_timeout)]
extern crate getopts;
extern crate term;
use std::thread;
use std::time::{Instant, Duration};
+const TEST_WARN_TIMEOUT_S: u64 = 60;
+
// to be used by rustc to compile tests in libtest
pub mod test {
pub use {Bencher, TestName, TestResult, TestDesc, TestDescAndFn, TestOpts, TrFailed,
}
}
+ pub fn write_timeout(&mut self, desc: &TestDesc) -> io::Result<()> {
+ self.write_plain(&format!("test {} has been running for over {} seconds\n",
+ desc.name,
+ TEST_WARN_TIMEOUT_S))
+ }
+
pub fn write_log(&mut self, test: &TestDesc, result: &TestResult) -> io::Result<()> {
match self.log_out {
None => Ok(()),
match (*event).clone() {
TeFiltered(ref filtered_tests) => st.write_run_start(filtered_tests.len()),
TeWait(ref test, padding) => st.write_test_start(test, padding),
+ TeTimeout(ref test) => st.write_timeout(test),
TeResult(test, result, stdout) => {
st.write_log(&test, &result)?;
st.write_result(&result)?;
TeFiltered(Vec<TestDesc>),
TeWait(TestDesc, NamePadding),
TeResult(TestDesc, TestResult, Vec<u8>),
+ TeTimeout(TestDesc),
}
pub type MonitorMsg = (TestDesc, TestResult, Vec<u8>);
fn run_tests<F>(opts: &TestOpts, tests: Vec<TestDescAndFn>, mut callback: F) -> io::Result<()>
where F: FnMut(TestEvent) -> io::Result<()>
{
+ use std::collections::HashMap;
+ use std::sync::mpsc::RecvTimeoutError;
+
let mut filtered_tests = filter_tests(opts, tests);
if !opts.bench_benchmarks {
filtered_tests = convert_benchmarks_to_tests(filtered_tests);
let (tx, rx) = channel::<MonitorMsg>();
+ let mut running_tests: HashMap<TestDesc, Instant> = HashMap::new();
+
+ fn get_timed_out_tests(running_tests: &mut HashMap<TestDesc, Instant>) -> Vec<TestDesc> {
+ let now = Instant::now();
+ let timed_out = running_tests.iter()
+ .filter_map(|(desc, timeout)| if &now >= timeout { Some(desc.clone())} else { None })
+ .collect();
+ for test in &timed_out {
+ running_tests.remove(test);
+ }
+ timed_out
+ };
+
+ fn calc_timeout(running_tests: &HashMap<TestDesc, Instant>) -> Option<Duration> {
+ running_tests.values().min().map(|next_timeout| {
+ let now = Instant::now();
+ if *next_timeout >= now {
+ *next_timeout - now
+ } else {
+ Duration::new(0, 0)
+ }})
+ };
+
while pending > 0 || !remaining.is_empty() {
while pending < concurrency && !remaining.is_empty() {
let test = remaining.pop().unwrap();
// that hang forever.
callback(TeWait(test.desc.clone(), test.testfn.padding()))?;
}
+ let timeout = Instant::now() + Duration::from_secs(TEST_WARN_TIMEOUT_S);
+ running_tests.insert(test.desc.clone(), timeout);
run_test(opts, !opts.run_tests, test, tx.clone());
pending += 1;
}
- let (desc, result, stdout) = rx.recv().unwrap();
+ let mut res;
+ loop {
+ if let Some(timeout) = calc_timeout(&running_tests) {
+ res = rx.recv_timeout(timeout);
+ for test in get_timed_out_tests(&mut running_tests) {
+ callback(TeTimeout(test))?;
+ }
+ if res != Err(RecvTimeoutError::Timeout) {
+ break;
+ }
+ } else {
+ res = rx.recv().map_err(|_| RecvTimeoutError::Disconnected);
+ break;
+ }
+ }
+
+ let (desc, result, stdout) = res.unwrap();
+ running_tests.remove(&desc);
+
if concurrency != 1 {
callback(TeWait(desc.clone(), PadNone))?;
}
-Subproject commit d1cc48989b13780f21c408fef17dceb104a09c9d
+Subproject commit 786aad117be48547f4ca50fae84c4879fa992d4d
}
}
+#if LLVM_RUSTLLVM
+/// getLongestEntryLength - Return the length of the longest entry in the table.
+///
+static size_t getLongestEntryLength(ArrayRef<SubtargetFeatureKV> Table) {
+ size_t MaxLen = 0;
+ for (auto &I : Table)
+ MaxLen = std::max(MaxLen, std::strlen(I.Key));
+ return MaxLen;
+}
+
+extern "C" void
+LLVMRustPrintTargetCPUs(LLVMTargetMachineRef TM) {
+ const TargetMachine *Target = unwrap(TM);
+ const MCSubtargetInfo *MCInfo = Target->getMCSubtargetInfo();
+ const ArrayRef<SubtargetFeatureKV> CPUTable = MCInfo->getCPUTable();
+ unsigned MaxCPULen = getLongestEntryLength(CPUTable);
+
+ printf("Available CPUs for this target:\n");
+ for (auto &CPU : CPUTable)
+ printf(" %-*s - %s.\n", MaxCPULen, CPU.Key, CPU.Desc);
+ printf("\n");
+}
+
+extern "C" void
+LLVMRustPrintTargetFeatures(LLVMTargetMachineRef TM) {
+ const TargetMachine *Target = unwrap(TM);
+ const MCSubtargetInfo *MCInfo = Target->getMCSubtargetInfo();
+ const ArrayRef<SubtargetFeatureKV> FeatTable = MCInfo->getFeatureTable();
+ unsigned MaxFeatLen = getLongestEntryLength(FeatTable);
+
+ printf("Available features for this target:\n");
+ for (auto &Feature : FeatTable)
+ printf(" %-*s - %s.\n", MaxFeatLen, Feature.Key, Feature.Desc);
+ printf("\n");
+
+ printf("Use +feature to enable a feature, or -feature to disable it.\n"
+ "For example, rustc -C -target-cpu=mycpu -C target-feature=+feature1,-feature2\n\n");
+}
+
+#else
+
+extern "C" void
+LLVMRustPrintTargetCPUs(LLVMTargetMachineRef) {
+ printf("Target CPU help is not supported by this LLVM version.\n\n");
+}
+
+extern "C" void
+LLVMRustPrintTargetFeatures(LLVMTargetMachineRef) {
+ printf("Target features help is not supported by this LLVM version.\n\n");
+}
+#endif
+
extern "C" LLVMTargetMachineRef
LLVMRustCreateTargetMachine(const char *triple,
const char *cpu,
# If this file is modified, then llvm will be forcibly cleaned and then rebuilt.
# The actual contents of this file do not matter, but to trigger a change on the
# build bots then the contents should be changed so git updates the mtime.
-2016-07-25b
+2016-08-07
const C: i32 = 2;
const CR: &'static mut i32 = &mut C; //~ ERROR E0017
+ //~| NOTE constants require immutable values
//~| ERROR E0017
+ //~| NOTE constants require immutable values
static STATIC_REF: &'static mut i32 = &mut X; //~ ERROR E0017
+ //~| NOTE statics require immutable values
//~| ERROR E0017
+ //~| NOTE statics require immutable values
//~| ERROR E0388
static CONST_REF: &'static mut i32 = &mut C; //~ ERROR E0017
+ //~| NOTE statics require immutable values
//~| ERROR E0017
-
+ //~| NOTE statics require immutable values
fn main() {}
Pear(u32),
}
+
fn main() {
let x = Fruit::Apple(String::new(), String::new());
match x {
Fruit::Apple(a) => {}, //~ ERROR E0023
+ //~| NOTE expected 2 fields, found 1
Fruit::Apple(a, b, c) => {}, //~ ERROR E0023
+ //~| NOTE expected 2 fields, found 3
+ Fruit::Pear(1, 2) => {}, //~ ERROR E0023
+ //~| NOTE expected 1 field, found 2
}
}
fn main() {
let thing = Thing { x: 0, y: 0 };
match thing {
- Thing { x, y, z } => {} //~ ERROR E0026
+ Thing { x, y, z } => {}
+ //~^ ERROR struct `Thing` does not have a field named `z` [E0026]
+ //~| NOTE struct `Thing` does not have field `z`
}
}
// except according to those terms.
enum Enum {
- P = 3,
- X = 3, //~ ERROR E0081
+ P = 3, //~ NOTE first use of `3isize`
+ X = 3,
+ //~^ ERROR discriminant value `3isize` already exists
+ //~| NOTE enum already has `3isize`
Y = 5
}
fn main() {
foo::<f64, bool>(); //~ ERROR E0087
+ //~^ NOTE expected
}
// except according to those terms.
extern {
- fn foo((a, b): (u32, u32)); //~ ERROR E0130
+ fn foo((a, b): (u32, u32));
+ //~^ ERROR E0130
+ //~| NOTE pattern not allowed in foreign function
}
fn main() {
fn main() {
let irr = Irrefutable(0);
if let Irrefutable(x) = irr { //~ ERROR E0162
+ //~| NOTE irrefutable pattern
println!("{}", x);
}
}
}
type Foo = Trait; //~ ERROR E0191
+ //~| NOTE missing associated type `Bar` value
fn main() {
}
}
impl Copy for Foo { }
-//~^ ERROR E0205
+//~^ ERROR the trait `Copy` may not be implemented for this type
//~| NOTE variant `Bar` does not implement `Copy`
#[derive(Copy)]
-//~^ ERROR E0205
+//~^ ERROR the trait `Copy` may not be implemented for this type
//~| NOTE variant `Bar` does not implement `Copy`
//~| NOTE in this expansion of #[derive(Copy)]
enum Foo2<'a> {
type Foo = i32;
-impl Copy for Foo { } //~ ERROR E0206
- //~^ ERROR E0117
+impl Copy for Foo { }
+//~^ ERROR the trait `Copy` may not be implemented for this type
+//~| NOTE type is not a structure or enumeration
+//~| ERROR only traits defined in the current crate can be implemented for arbitrary types
+//~| NOTE impl doesn't use types inside crate
+//~| NOTE the impl does not reference any types defined in this crate
#[derive(Copy, Clone)]
struct Bar;
-impl Copy for &'static Bar { } //~ ERROR E0206
+impl Copy for &'static Bar { }
+//~^ ERROR the trait `Copy` may not be implemented for this type
+//~| NOTE type is not a structure or enumeration
fn main() {
}
// except according to those terms.
fn main() {
- let v: Vec(&str) = vec!["foo"]; //~ ERROR E0214
+ let v: Vec(&str) = vec!["foo"];
+ //~^ ERROR E0214
+ //~| NOTE only traits may use parentheses
}
}
fn do_something(x: Foo::Bar) { } //~ ERROR E0248
-
+ //~| NOTE value used as a type
fn main() {
}
#![feature(associated_consts)]
trait Foo {
- const BAR: u32;
+ const BAR: u32; //~ NOTE original trait requirement
}
struct SignedBar;
impl Foo for SignedBar {
const BAR: i32 = -1;
//~^ ERROR implemented const `BAR` has an incompatible type for trait [E0326]
- //~| expected u32, found i32
+ //~| NOTE expected u32, found i32
}
fn main() {}
fn dent<C:BoxCar>(c: C, color: C::Color) {
//~^ ERROR ambiguous associated type `Color` in bounds of `C`
+ //~| NOTE ambiguous associated type `Color`
//~| NOTE could derive from `Vehicle`
//~| NOTE could derive from `Box`
}
fn dent_object<COLOR>(c: BoxCar<Color=COLOR>) {
//~^ ERROR ambiguous associated type
//~| ERROR the value of the associated type `Color` (from the trait `Vehicle`) must be specified
+ //~| NOTE ambiguous associated type `Color`
//~| NOTE could derive from `Vehicle`
//~| NOTE could derive from `Box`
+ //~| NOTE missing associated type `Color` value
}
fn paint<C:BoxCar>(c: C, d: C::Color) {
//~^ ERROR ambiguous associated type `Color` in bounds of `C`
+ //~| NOTE ambiguous associated type `Color`
//~| NOTE could derive from `Vehicle`
//~| NOTE could derive from `Box`
}
impl Copy for MyType {}
impl Copy for &'static mut MyType {}
-//~^ ERROR E0206
+//~^ ERROR the trait `Copy` may not be implemented for this type
+//~| NOTE type is not a structure or enumeration
impl Clone for MyType { fn clone(&self) -> Self { *self } }
impl Copy for (MyType, MyType) {}
-//~^ ERROR E0206
-//~| ERROR E0117
+//~^ ERROR the trait `Copy` may not be implemented for this type
+//~| NOTE type is not a structure or enumeration
+//~| ERROR only traits defined in the current crate can be implemented for arbitrary types
+//~| NOTE impl doesn't use types inside crate
+//~| NOTE the impl does not reference any types defined in this crate
impl Copy for &'static NotSync {}
-//~^ ERROR E0206
+//~^ ERROR the trait `Copy` may not be implemented for this type
+//~| NOTE type is not a structure or enumeration
impl Copy for [MyType] {}
-//~^ ERROR E0206
-//~| ERROR E0117
+//~^ ERROR the trait `Copy` may not be implemented for this type
+//~| NOTE type is not a structure or enumeration
+//~| ERROR only traits defined in the current crate can be implemented for arbitrary types
+//~| NOTE impl doesn't use types inside crate
+//~| NOTE the impl does not reference any types defined in this crate
impl Copy for &'static [NotSync] {}
-//~^ ERROR E0206
-//~| ERROR E0117
+//~^ ERROR the trait `Copy` may not be implemented for this type
+//~| NOTE type is not a structure or enumeration
+//~| ERROR only traits defined in the current crate can be implemented for arbitrary types
+//~| NOTE impl doesn't use types inside crate
+//~| NOTE the impl does not reference any types defined in this crate
fn main() {
}
enum Foo {
A = 1,
- //~^ NOTE first use
- //~| NOTE first use
- //~| NOTE first use
- B = 1, //~ ERROR discriminant value
- //~^ NOTE enum already
+ //~^ NOTE first use of `1isize`
+ //~| NOTE first use of `1isize`
+ //~| NOTE first use of `1isize`
+ B = 1,
+ //~^ ERROR discriminant value `1isize` already exists
+ //~| NOTE enum already has `1isize`
C = 0,
- D, //~ ERROR discriminant value
- //~^ NOTE enum already
+ D,
+ //~^ ERROR discriminant value `1isize` already exists
+ //~| NOTE enum already has `1isize`
- E = N, //~ ERROR discriminant value
- //~^ NOTE enum already
+ E = N,
+ //~^ ERROR discriminant value `1isize` already exists
+ //~| NOTE enum already has `1isize`
}
println!("{:?}",(vfnfer[0] as Fn)(3));
//~^ ERROR the precise format of `Fn`-family traits'
//~| ERROR E0243
- //~| NOTE expected 1 type arguments, found 0
//~| ERROR the value of the associated type `Output` (from the trait `std::ops::FnOnce`)
- //~| NOTE in this expansion of println!
- //~| NOTE in this expansion of println!
- //~| NOTE in this expansion of println!
- //~| NOTE in this expansion of println!
}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use std::ops::Deref;
+
+struct Obj<F> where F: FnMut() -> u32 {
+ fn_ptr: fn() -> (),
+ closure: F,
+}
+
+struct C {
+ c_fn_ptr: fn() -> (),
+}
+
+struct D(C);
+
+impl Deref for D {
+ type Target = C;
+ fn deref(&self) -> &C {
+ &self.0
+ }
+}
+
+
+fn empty() {}
+
+fn main() {
+ let o = Obj { fn_ptr: empty, closure: || 42 };
+ let p = &o;
+ p.closure(); //~ ERROR no method named `closure` found
+ //~^ NOTE use `(p.closure)(...)` if you meant to call the function stored in the `closure` field
+ let q = &p;
+ q.fn_ptr(); //~ ERROR no method named `fn_ptr` found
+ //~^ NOTE use `(q.fn_ptr)(...)` if you meant to call the function stored in the `fn_ptr` field
+ let r = D(C { c_fn_ptr: empty });
+ let s = &r;
+ s.c_fn_ptr(); //~ ERROR no method named `c_fn_ptr` found
+ //~^ NOTE use `(s.c_fn_ptr)(...)` if you meant to call the function stored in the `c_fn_ptr`
+}
extern {
fn f1(mut arg: u8); //~ ERROR patterns aren't allowed in foreign function declarations
- //~^ NOTE this is a recent error
+ //~^ NOTE pattern not allowed in foreign function
+ //~| NOTE this is a recent error
fn f2(&arg: u8); //~ ERROR patterns aren't allowed in foreign function declarations
+ //~^ NOTE pattern not allowed in foreign function
fn f3(arg @ _: u8); //~ ERROR patterns aren't allowed in foreign function declarations
- //~^ NOTE this is a recent error
+ //~^ NOTE pattern not allowed in foreign function
+ //~| NOTE this is a recent error
fn g1(arg: u8); // OK
fn g2(_: u8); // OK
// fn g3(u8); // Not yet
// aux-build:a.rs
// revisions:rpass1 rpass2
+// compile-flags:-Z query-dep-graph
#![feature(rustc_attrs)]
// except according to those terms.
// revisions: rpass1 cfail2
+// compile-flags: -Z query-dep-graph
#![allow(warnings)]
#![feature(rustc_attrs)]
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Test what happens we save incremental compilation state that makes
+// use of foreign items. This used to ICE (#34991).
+
+// revisions: rpass1
+
+#![feature(libc)]
+
+extern crate libc;
+
+use std::ffi::CString;
+
+mod mlibc {
+ use libc::{c_char, c_long, c_longlong};
+
+ extern {
+ pub fn atol(x: *const c_char) -> c_long;
+ pub fn atoll(x: *const c_char) -> c_longlong;
+ }
+}
+
+fn atol(s: String) -> isize {
+ let c = CString::new(s).unwrap();
+ unsafe { mlibc::atol(c.as_ptr()) as isize }
+}
+
+fn atoll(s: String) -> i64 {
+ let c = CString::new(s).unwrap();
+ unsafe { mlibc::atoll(c.as_ptr()) as i64 }
+}
+
+pub fn main() {
+ assert_eq!(atol("1024".to_string()) * 10, atol("10240".to_string()));
+ assert_eq!((atoll("11111111111111111".to_string()) * 10),
+ atoll("111111111111111110".to_string()));
+}
// except according to those terms.
// revisions: rpass1 rpass2
+// compile-flags: -Z query-dep-graph
#![allow(warnings)]
#![feature(rustc_attrs)]
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Check that the hash for a method call is sensitive to the traits in
+// scope.
+
+// revisions: rpass1 rpass2
+
+#![feature(rustc_attrs)]
+
+fn test<T>() { }
+
+trait Trait1 {
+ fn method(&self) { }
+}
+
+impl Trait1 for () { }
+
+trait Trait2 {
+ fn method(&self) { }
+}
+
+impl Trait2 for () { }
+
+#[cfg(rpass1)]
+mod mod3 {
+ use Trait1;
+
+ fn bar() {
+ ().method();
+ }
+
+ fn baz() {
+ 22; // no method call, traits in scope don't matter
+ }
+}
+
+#[cfg(rpass2)]
+mod mod3 {
+ use Trait2;
+
+ #[rustc_dirty(label="Hir", cfg="rpass2")]
+ fn bar() {
+ ().method();
+ }
+
+ #[rustc_clean(label="Hir", cfg="rpass2")]
+ fn baz() {
+ 22; // no method call, traits in scope don't matter
+ }
+}
+
+fn main() { }
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Check that the hash of `foo` doesn't change just because we ordered
+// the nested items (or even added new ones).
+
+// revisions: rpass1 rpass2
+
+#![feature(rustc_attrs)]
+
+#[cfg(rpass1)]
+fn foo() {
+ fn bar() { }
+ fn baz() { }
+}
+
+#[cfg(rpass2)]
+#[rustc_clean(label="Hir", cfg="rpass2")]
+fn foo() {
+ #[rustc_clean(label="Hir", cfg="rpass2")]
+ fn baz() { } // order is different...
+
+ #[rustc_clean(label="Hir", cfg="rpass2")]
+ fn bar() { } // but that doesn't matter.
+
+ fn bap() { } // neither does adding a new item
+}
+
+fn main() { }
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Check that the hash for `mod3::bar` changes when we change the
+// `use` to something different.
+
+// revisions: rpass1 rpass2 rpass3
+
+#![feature(rustc_attrs)]
+
+fn test<T>() { }
+
+mod mod1 {
+ pub struct Foo(pub u32);
+}
+
+mod mod2 {
+ pub struct Foo(pub i64);
+}
+
+#[cfg(rpass1)]
+mod mod3 {
+ use test;
+ use mod1::Foo;
+
+ fn in_expr() {
+ Foo(0);
+ }
+
+ fn in_type() {
+ test::<Foo>();
+ }
+}
+
+#[cfg(rpass2)]
+mod mod3 {
+ use mod1::Foo; // <-- Nothing changed, but reordered!
+ use test;
+
+ #[rustc_clean(label="Hir", cfg="rpass2")]
+ fn in_expr() {
+ Foo(0);
+ }
+
+ #[rustc_clean(label="Hir", cfg="rpass2")]
+ fn in_type() {
+ test::<Foo>();
+ }
+}
+
+#[cfg(rpass3)]
+mod mod3 {
+ use test;
+ use mod2::Foo; // <-- This changed!
+
+ #[rustc_dirty(label="Hir", cfg="rpass3")]
+ fn in_expr() {
+ Foo(0);
+ }
+
+ #[rustc_dirty(label="Hir", cfg="rpass3")]
+ fn in_type() {
+ test::<Foo>();
+ }
+}
+
+fn main() { }
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Regression test for #34991: an ICE occurred here because we inline
+// some of the vector routines and give them a local def-id `X`. This
+// got hashed after trans (`Hir(X)`). When we load back up, we get an
+// error because the `X` is remapped to the original def-id (in
+// libstd), and we can't hash a HIR node from std.
+
+// revisions:rpass1 rpass2
+
+#![feature(rustc_attrs)]
+
+use std::vec::Vec;
+
+pub fn foo() -> Vec<i32> {
+ vec![1, 2, 3]
+}
+
+pub fn bar() {
+ foo();
+}
+
+pub fn main() {
+ bar();
+}
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![crate_type="rlib"]
+
+pub type X = u32;
+
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// aux-build:a.rs
+// revisions:rpass1 rpass2
+
+#![feature(rustc_attrs)]
+
+#[cfg(rpass1)]
+extern crate a;
+
+#[cfg(rpass1)]
+pub fn use_X() -> u32 {
+ let x: a::X = 22;
+ x as u32
+}
+
+#[cfg(rpass2)]
+pub fn use_X() -> u32 {
+ 22
+}
+
+pub fn main() { }
// aux-build:a.rs
// revisions:rpass1 rpass2 rpass3
// no-prefer-dynamic
-
+// compile-flags: -Z query-dep-graph
#![feature(rustc_attrs)]
X { x: 11, y: 11 }
}
- #[rustc_dirty(label="TypeckItemBody", cfg="rpass2")]
- #[rustc_clean(label="ItemSignature", cfg="rpass2")]
pub fn new() -> X {
make()
}
- #[rustc_clean(label="TypeckItemBody", cfg="rpass2")]
- #[rustc_clean(label="ItemSignature", cfg="rpass2")]
pub fn sum(x: &X) -> u32 {
x.x + x.y
}
mod y {
use x;
- #[rustc_clean(label="TypeckItemBody", cfg="rpass2")]
pub fn assert_sum() -> bool {
let x = x::new();
x::sum(&x) == 22
// except according to those terms.
// revisions: rpass1 rpass2
+// compile-flags: -Z query-dep-graph
#![allow(warnings)]
#![feature(rustc_attrs)]
mod y {
use x;
- #[rustc_clean(label="TypeckItemBody", cfg="rpass2")]
- #[rustc_clean(label="TransCrateItem", cfg="rpass2")]
+ // FIXME(#35078) -- when body of `x` changes, we treat it as
+ // though signature changed.
+ #[rustc_dirty(label="TypeckItemBody", cfg="rpass2")]
+ #[rustc_dirty(label="TransCrateItem", cfg="rpass2")]
pub fn y() {
x::x();
}
// in between revisions (hashing should be stable).
// revisions:rpass1 rpass2
+// compile-flags: -Z query-dep-graph
#![feature(rustc_attrs)]
// in between revisions (hashing should be stable).
// revisions:rpass1 cfail2
+// compile-flags: -Z query-dep-graph
#![feature(rustc_attrs)]
// in between revisions (hashing should be stable).
// revisions:rpass1 rpass2
+// compile-flags: -Z query-dep-graph
#![feature(rustc_attrs)]
// aux-build:a.rs
// revisions:rpass1 rpass2
+// compile-flags: -Z query-dep-graph
#![feature(rustc_attrs)]
// in between revisions (hashing should be stable).
// revisions:rpass1 rpass2
+// compile-flags: -Z query-dep-graph
#![feature(rustc_attrs)]
// in between revisions (hashing should be stable).
// revisions:rpass1 rpass2
+// compile-flags: -Z query-dep-graph
#![feature(rustc_attrs)]
// aux-build:a.rs
// revisions:rpass1 rpass2 rpass3
+// compile-flags: -Z query-dep-graph
#![feature(rustc_attrs)]
all:
$(RUSTC) foo.rs --crate-type staticlib
- $(RUSTC) bar.rs 2>&1 | grep "error: found staticlib"
+ $(RUSTC) bar.rs 2>&1 | grep "found staticlib"
mv $(TMPDIR)/$(call RLIB_GLOB,crateA) $(A3)
# Ensure crateC fails to compile since A1 is "missing" and A2/A3 hashes do not match
$(RUSTC) -L $(A2) -L $(A3) crateC.rs >$(LOG) 2>&1 || true
- grep "error: found possibly newer version of crate \`crateA\` which \`crateB\` depends on" $(LOG)
+ grep "found possibly newer version of crate \`crateA\` which \`crateB\` depends on" $(LOG)
grep "note: perhaps that crate needs to be recompiled?" $(LOG)
grep "note: crate \`crateA\` path #1:" $(LOG)
grep "note: crate \`crateA\` path #2:" $(LOG)
$(call REMOVE_RLIBS,crateA)
# Ensure crateC fails to compile since dependency crateA is missing
$(RUSTC) crateC.rs 2>&1 | \
- grep "error: can't find crate for \`crateA\` which \`crateB\` depends on"
+ grep "can't find crate for \`crateA\` which \`crateB\` depends on"
+++ /dev/null
--include ../tools.mk
-
-# This test attempts to run rustc itself from the compiled binary; but
-# that means that you need to set the LD_LIBRARY_PATH for rustc itself
-# while running multiple_files, and that won't work for stage1.
-
-# FIXME ignore windows
-ifndef IS_WINDOWS
-ifeq ($(RUST_BUILD_STAGE),1)
-DOTEST=
-else
-DOTEST=dotest
-endif
-endif
-
-all: $(DOTEST)
-
-dotest:
- # check that we don't ICE on unicode input, issue #11178
- $(RUSTC) multiple_files.rs
- $(call RUN,multiple_files) "$(RUSTC)" "$(TMPDIR)"
-
- # check that our multibyte-ident spans are (approximately) the
- # correct length. issue #8706
- $(RUSTC) span_length.rs
- $(call RUN,span_length) "$(RUSTC)" "$(TMPDIR)"
+++ /dev/null
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-#![feature(rand)]
-
-use std::fs::File;
-use std::io::prelude::*;
-use std::path::Path;
-use std::process::Command;
-use std::__rand::{thread_rng, Rng};
-use std::{char, env};
-
-// creates unicode_input_multiple_files_{main,chars}.rs, where the
-// former imports the latter. `_chars` just contains an identifier
-// made up of random characters, because will emit an error message
-// about the ident being in the wrong place, with a span (and creating
-// this span used to upset the compiler).
-
-fn random_char() -> char {
- let mut rng = thread_rng();
- // a subset of the XID_start Unicode table (ensuring that the
- // compiler doesn't fail with an "unrecognised token" error)
- let (lo, hi): (u32, u32) = match rng.gen_range(1u32, 4u32 + 1) {
- 1 => (0x41, 0x5a),
- 2 => (0xf8, 0x1ba),
- 3 => (0x1401, 0x166c),
- _ => (0x10400, 0x1044f)
- };
-
- char::from_u32(rng.gen_range(lo, hi + 1)).unwrap()
-}
-
-fn main() {
- let args: Vec<String> = env::args().collect();
- let rustc = &args[1];
- let tmpdir = Path::new(&args[2]);
-
- let main_file = tmpdir.join("unicode_input_multiple_files_main.rs");
- {
- let _ = File::create(&main_file).unwrap()
- .write_all(b"mod unicode_input_multiple_files_chars;").unwrap();
- }
-
- for _ in 0..100 {
- {
- let randoms = tmpdir.join("unicode_input_multiple_files_chars.rs");
- let mut w = File::create(&randoms).unwrap();
- for _ in 0..30 {
- write!(&mut w, "{}", random_char()).unwrap();
- }
- }
-
- // rustc is passed to us with --out-dir and -L etc., so we
- // can't exec it directly
- let result = Command::new("sh")
- .arg("-c")
- .arg(&format!("{} {}",
- rustc,
- main_file.display()))
- .output().unwrap();
- let err = String::from_utf8_lossy(&result.stderr);
-
- // positive test so that this test will be updated when the
- // compiler changes.
- assert!(err.contains("expected item, found"))
- }
-}
+++ /dev/null
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-#![feature(rand)]
-
-use std::fs::File;
-use std::io::prelude::*;
-use std::iter::repeat;
-use std::path::Path;
-use std::process::Command;
-use std::__rand::{thread_rng, Rng};
-use std::{char, env};
-
-pub fn check_old_skool() -> bool {
- use std::env;
- env::var("RUST_NEW_ERROR_FORMAT").is_err()
-}
-
-// creates a file with `fn main() { <random ident> }` and checks the
-// compiler emits a span of the appropriate length (for the
-// "unresolved name" message); currently just using the number of code
-// points, but should be the number of graphemes (FIXME #7043)
-
-fn random_char() -> char {
- let mut rng = thread_rng();
- // a subset of the XID_start Unicode table (ensuring that the
- // compiler doesn't fail with an "unrecognised token" error)
- let (lo, hi): (u32, u32) = match rng.gen_range(1u32, 4u32 + 1) {
- 1 => (0x41, 0x5a),
- 2 => (0xf8, 0x1ba),
- 3 => (0x1401, 0x166c),
- _ => (0x10400, 0x1044f)
- };
-
- char::from_u32(rng.gen_range(lo, hi + 1)).unwrap()
-}
-
-fn main() {
- let args: Vec<String> = env::args().collect();
- let rustc = &args[1];
- let tmpdir = Path::new(&args[2]);
- let main_file = tmpdir.join("span_main.rs");
-
- for _ in 0..100 {
- let n = thread_rng().gen_range(3, 20);
-
- {
- let _ = write!(&mut File::create(&main_file).unwrap(),
- "#![feature(non_ascii_idents)] fn main() {{ {} }}",
- // random string of length n
- (0..n).map(|_| random_char()).collect::<String>());
- }
-
- // rustc is passed to us with --out-dir and -L etc., so we
- // can't exec it directly
- let result = Command::new("sh")
- .arg("-c")
- .arg(&format!("{} {}",
- rustc,
- main_file.to_str()
- .unwrap()))
- .output().unwrap();
-
- let err = String::from_utf8_lossy(&result.stderr);
-
- if check_old_skool() {
- // the span should end the line (e.g no extra ~'s)
- let expected_span = format!("^{}\n", repeat("~").take(n - 1)
- .collect::<String>());
- assert!(err.contains(&expected_span));
- } else {
- // the span should end the line (e.g no extra ~'s)
- let expected_span = format!("^{}\n", repeat("^").take(n - 1)
- .collect::<String>());
- assert!(err.contains(&expected_span));
- }
- }
-
- // Test multi-column characters and tabs
- {
- let _ = write!(&mut File::create(&main_file).unwrap(),
- r#"extern "路濫狼á́́" fn foo() {{}} extern "路濫狼á́" fn bar() {{}}"#);
- }
-
- let result = Command::new("sh")
- .arg("-c")
- .arg(format!("{} {}",
- rustc,
- main_file.display()))
- .output().unwrap();
-
- let err = String::from_utf8_lossy(&result.stderr);
-
- // Test both the length of the snake and the leading spaces up to it
-
- if check_old_skool() {
- // Extra characters. Every line is preceded by `filename:lineno <actual code>`
- let offset = main_file.to_str().unwrap().len() + 3;
-
- // First snake is 8 ~s long, with 7 preceding spaces (excluding file name/line offset)
- let expected_span = format!("\n{}^{}\n",
- repeat(" ").take(offset + 7).collect::<String>(),
- repeat("~").take(8).collect::<String>());
- assert!(err.contains(&expected_span));
- // Second snake is only 7 ~s long, with 36 preceding spaces,
- // because rustc counts chars() now rather than width(). This
- // is because width() functions are to be removed from
- // librustc_unicode
- let expected_span = format!("\n{}^{}\n",
- repeat(" ").take(offset + 36).collect::<String>(),
- repeat("~").take(7).collect::<String>());
- assert!(err.contains(&expected_span));
- } else {
- let expected_span = format!("\n |>{}{}\n",
- repeat(" ").take(8).collect::<String>(),
- repeat("^").take(9).collect::<String>());
- assert!(err.contains(&expected_span));
- let expected_span = format!("\n |>{}{}\n",
- repeat(" ").take(37).collect::<String>(),
- repeat("^").take(8).collect::<String>());
- assert!(err.contains(&expected_span));
- }
-}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn main() {
+ let x: &'static str = "x";
+
+ {
+ let y = "y".to_string();
+ let ref mut x = &*x;
+ *x = &*y;
+ }
+
+ assert_eq!(x, "x");
+}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+pub fn main() {
+ let x = (0, 2);
+
+ match x {
+ (0, ref y) => {}
+ (y, 0) => {}
+ _ => (),
+ }
+}
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Test binary_search_by_key lifetime. Issue #34683
+
+#[derive(Debug)]
+struct Assignment {
+ topic: String,
+ partition: i32,
+}
+
+fn main() {
+ let xs = vec![
+ Assignment { topic: "abc".into(), partition: 1 },
+ Assignment { topic: "def".into(), partition: 2 },
+ Assignment { topic: "ghi".into(), partition: 3 },
+ ];
+
+ let key: &str = "def";
+ let r = xs.binary_search_by_key(&key, |e| &e.topic);
+ assert_eq!(Ok(1), r.map(|i| i));
+}
const TEST_REPOS: &'static [Test] = &[Test {
name: "cargo",
repo: "https://github.com/rust-lang/cargo",
- sha: "fd90fd642d404d8c66505ca8db742c664ea352f2",
+ sha: "2d85908217f99a30aa5f68e05a8980704bb71fad",
lock: None,
},
Test {
type Err = ();
fn from_str(s: &str) -> Result<Mode, ()> {
match s {
- "compile-fail" => Ok(CompileFail),
- "parse-fail" => Ok(ParseFail),
- "run-fail" => Ok(RunFail),
- "run-pass" => Ok(RunPass),
- "run-pass-valgrind" => Ok(RunPassValgrind),
- "pretty" => Ok(Pretty),
- "debuginfo-lldb" => Ok(DebugInfoLldb),
- "debuginfo-gdb" => Ok(DebugInfoGdb),
- "codegen" => Ok(Codegen),
- "rustdoc" => Ok(Rustdoc),
- "codegen-units" => Ok(CodegenUnits),
- "incremental" => Ok(Incremental),
- "run-make" => Ok(RunMake),
- "ui" => Ok(Ui),
- "mir-opt" => Ok(MirOpt),
- _ => Err(()),
+ "compile-fail" => Ok(CompileFail),
+ "parse-fail" => Ok(ParseFail),
+ "run-fail" => Ok(RunFail),
+ "run-pass" => Ok(RunPass),
+ "run-pass-valgrind" => Ok(RunPassValgrind),
+ "pretty" => Ok(Pretty),
+ "debuginfo-lldb" => Ok(DebugInfoLldb),
+ "debuginfo-gdb" => Ok(DebugInfoGdb),
+ "codegen" => Ok(Codegen),
+ "rustdoc" => Ok(Rustdoc),
+ "codegen-units" => Ok(CodegenUnits),
+ "incremental" => Ok(Incremental),
+ "run-make" => Ok(RunMake),
+ "ui" => Ok(Ui),
+ "mir-opt" => Ok(MirOpt),
+ _ => Err(()),
}
}
}
impl fmt::Display for Mode {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(match *self {
- CompileFail => "compile-fail",
- ParseFail => "parse-fail",
- RunFail => "run-fail",
- RunPass => "run-pass",
- RunPassValgrind => "run-pass-valgrind",
- Pretty => "pretty",
- DebugInfoGdb => "debuginfo-gdb",
- DebugInfoLldb => "debuginfo-lldb",
- Codegen => "codegen",
- Rustdoc => "rustdoc",
- CodegenUnits => "codegen-units",
- Incremental => "incremental",
- RunMake => "run-make",
- Ui => "ui",
- MirOpt => "mir-opt",
- }, f)
+ CompileFail => "compile-fail",
+ ParseFail => "parse-fail",
+ RunFail => "run-fail",
+ RunPass => "run-pass",
+ RunPassValgrind => "run-pass-valgrind",
+ Pretty => "pretty",
+ DebugInfoGdb => "debuginfo-gdb",
+ DebugInfoLldb => "debuginfo-lldb",
+ Codegen => "codegen",
+ Rustdoc => "rustdoc",
+ CodegenUnits => "codegen-units",
+ Incremental => "incremental",
+ RunMake => "run-make",
+ Ui => "ui",
+ MirOpt => "mir-opt",
+ },
+ f)
}
}
}
#[derive(PartialEq, Debug)]
-enum WhichLine { ThisLine, FollowPrevious(usize), AdjustBackward(usize) }
+enum WhichLine {
+ ThisLine,
+ FollowPrevious(usize),
+ AdjustBackward(usize),
+}
/// Looks for either "//~| KIND MESSAGE" or "//~^^... KIND MESSAGE"
/// The former is a "follow" that inherits its target from the preceding line;
let tag = match cfg {
Some(rev) => format!("//[{}]~", rev),
- None => format!("//~")
+ None => format!("//~"),
};
rdr.lines()
- .enumerate()
- .filter_map(|(line_num, line)| {
- parse_expected(last_nonfollow_error,
- line_num + 1,
- &line.unwrap(),
- &tag)
- .map(|(which, error)| {
- match which {
- FollowPrevious(_) => {}
- _ => last_nonfollow_error = Some(error.line_num),
- }
- error
- })
- })
- .collect()
+ .enumerate()
+ .filter_map(|(line_num, line)| {
+ parse_expected(last_nonfollow_error, line_num + 1, &line.unwrap(), &tag)
+ .map(|(which, error)| {
+ match which {
+ FollowPrevious(_) => {}
+ _ => last_nonfollow_error = Some(error.line_num),
+ }
+ error
+ })
+ })
+ .collect()
}
fn parse_expected(last_nonfollow_error: Option<usize>,
line: &str,
tag: &str)
-> Option<(WhichLine, Error)> {
- let start = match line.find(tag) { Some(i) => i, None => return None };
+ let start = match line.find(tag) {
+ Some(i) => i,
+ None => return None,
+ };
let (follow, adjusts) = if line[start + tag.len()..].chars().next().unwrap() == '|' {
(true, 0)
} else {
};
let kind_start = start + tag.len() + adjusts + (follow as usize);
let (kind, msg);
- match
- line[kind_start..].split_whitespace()
- .next()
- .expect("Encountered unexpected empty comment")
- .parse::<ErrorKind>()
- {
+ match line[kind_start..]
+ .split_whitespace()
+ .next()
+ .expect("Encountered unexpected empty comment")
+ .parse::<ErrorKind>() {
Ok(k) => {
// If we find `//~ ERROR foo` or something like that:
kind = Some(k);
let letters = line[kind_start..].chars();
msg = letters.skip_while(|c| c.is_whitespace())
- .skip_while(|c| !c.is_whitespace())
- .collect::<String>();
+ .skip_while(|c| !c.is_whitespace())
+ .collect::<String>();
}
Err(_) => {
// Otherwise we found `//~ foo`:
kind = None;
let letters = line[kind_start..].chars();
msg = letters.skip_while(|c| c.is_whitespace())
- .collect::<String>();
+ .collect::<String>();
}
}
let msg = msg.trim().to_owned();
preceding //~^ line.");
(FollowPrevious(line_num), line_num)
} else {
- let which =
- if adjusts > 0 { AdjustBackward(adjusts) } else { ThisLine };
+ let which = if adjusts > 0 {
+ AdjustBackward(adjusts)
+ } else {
+ ThisLine
+ };
let line_num = line_num - adjusts;
(which, line_num)
};
debug!("line={} tag={:?} which={:?} kind={:?} msg={:?}",
- line_num, tag, which, kind, msg);
- Some((which, Error { line_num: line_num,
- kind: kind,
- msg: msg, }))
+ line_num,
+ tag,
+ which,
+ kind,
+ msg);
+ Some((which,
+ Error {
+ line_num: line_num,
+ kind: kind,
+ msg: msg,
+ }))
}
should_fail: false,
};
- iter_header(testfile, None, &mut |ln| {
+ iter_header(testfile,
+ None,
+ &mut |ln| {
props.ignore =
- props.ignore ||
- parse_name_directive(ln, "ignore-test") ||
+ props.ignore || parse_name_directive(ln, "ignore-test") ||
parse_name_directive(ln, &ignore_target(config)) ||
parse_name_directive(ln, &ignore_architecture(config)) ||
parse_name_directive(ln, &ignore_stage(config)) ||
parse_name_directive(ln, &ignore_env(config)) ||
- (config.mode == common::Pretty &&
- parse_name_directive(ln, "ignore-pretty")) ||
+ (config.mode == common::Pretty && parse_name_directive(ln, "ignore-pretty")) ||
(config.target != config.host &&
parse_name_directive(ln, "ignore-cross-compile")) ||
- ignore_gdb(config, ln) ||
- ignore_lldb(config, ln);
+ ignore_gdb(config, ln) || ignore_lldb(config, ln);
- props.should_fail =
- props.should_fail ||
- parse_name_directive(ln, "should-fail");
+ props.should_fail = props.should_fail || parse_name_directive(ln, "should-fail");
});
return props;
format!("ignore-{}", util::get_arch(&config.target))
}
fn ignore_stage(config: &Config) -> String {
- format!("ignore-{}",
- config.stage_id.split('-').next().unwrap())
+ format!("ignore-{}", config.stage_id.split('-').next().unwrap())
}
fn ignore_env(config: &Config) -> String {
- format!("ignore-{}", util::get_env(&config.target).unwrap_or("<unknown>"))
+ format!("ignore-{}",
+ util::get_env(&config.target).unwrap_or("<unknown>"))
}
fn ignore_gdb(config: &Config, line: &str) -> bool {
if config.mode != common::DebugInfoGdb {
if let Some(ref actual_version) = config.gdb_version {
if line.contains("min-gdb-version") {
let min_version = line.trim()
- .split(' ')
- .last()
- .expect("Malformed GDB version directive");
+ .split(' ')
+ .last()
+ .expect("Malformed GDB version directive");
// Ignore if actual version is smaller the minimum required
// version
- gdb_version_to_int(actual_version) <
- gdb_version_to_int(min_version)
+ gdb_version_to_int(actual_version) < gdb_version_to_int(min_version)
} else {
false
}
if let Some(ref actual_version) = config.lldb_version {
if line.contains("min-lldb-version") {
let min_version = line.trim()
- .split(' ')
- .last()
- .expect("Malformed lldb version directive");
+ .split(' ')
+ .last()
+ .expect("Malformed lldb version directive");
// Ignore if actual version is smaller the minimum required
// version
- lldb_version_to_int(actual_version) <
- lldb_version_to_int(min_version)
+ lldb_version_to_int(actual_version) < lldb_version_to_int(min_version)
} else {
false
}
#[derive(Clone, Debug)]
pub struct TestProps {
// Lines that should be expected, in order, on standard out
- pub error_patterns: Vec<String> ,
+ pub error_patterns: Vec<String>,
// Extra flags to pass to the compiler
pub compile_flags: Vec<String>,
// Extra flags to pass when the compiled code is run (such as --bench)
// Other crates that should be compiled (typically from the same
// directory as the test, but for backwards compatibility reasons
// we also check the auxiliary directory)
- pub aux_builds: Vec<String> ,
+ pub aux_builds: Vec<String>,
// Environment settings to use for compiling
- pub rustc_env: Vec<(String,String)> ,
+ pub rustc_env: Vec<(String, String)>,
// Environment settings to use during execution
- pub exec_env: Vec<(String,String)> ,
+ pub exec_env: Vec<(String, String)>,
// Lines to check if they appear in the expected debugger output
- pub check_lines: Vec<String> ,
+ pub check_lines: Vec<String>,
// Build documentation for all specified aux-builds as well
pub build_aux_docs: bool,
// Flag to force a crate to be built with the host architecture
/// tied to a particular revision `foo` (indicated by writing
/// `//[foo]`), then the property is ignored unless `cfg` is
/// `Some("foo")`.
- pub fn load_from(&mut self, testfile: &Path, cfg: Option<&str>) {
- iter_header(testfile, cfg, &mut |ln| {
+ pub fn load_from(&mut self, testfile: &Path, cfg: Option<&str>) {
+ iter_header(testfile,
+ cfg,
+ &mut |ln| {
if let Some(ep) = parse_error_pattern(ln) {
self.error_patterns.push(ep);
}
if let Some(flags) = parse_compile_flags(ln) {
- self.compile_flags.extend(
- flags
- .split_whitespace()
- .map(|s| s.to_owned()));
+ self.compile_flags.extend(flags.split_whitespace()
+ .map(|s| s.to_owned()));
}
if let Some(r) = parse_revisions(ln) {
self.pretty_compare_only = parse_pretty_compare_only(ln);
}
- if let Some(ab) = parse_aux_build(ln) {
+ if let Some(ab) = parse_aux_build(ln) {
self.aux_builds.push(ab);
}
self.rustc_env.push(ee);
}
- if let Some(cl) = parse_check_line(ln) {
+ if let Some(cl) = parse_check_line(ln) {
self.check_lines.push(cl);
}
for key in vec!["RUST_TEST_NOCAPTURE", "RUST_TEST_THREADS"] {
match env::var(key) {
- Ok(val) =>
+ Ok(val) => {
if self.exec_env.iter().find(|&&(ref x, _)| *x == key).is_none() {
self.exec_env.push((key.to_owned(), val))
- },
+ }
+ }
Err(..) => {}
}
}
}
}
-fn iter_header(testfile: &Path,
- cfg: Option<&str>,
- it: &mut FnMut(&str)) {
+fn iter_header(testfile: &Path, cfg: Option<&str>, it: &mut FnMut(&str)) {
if testfile.is_dir() {
- return
+ return;
}
let rdr = BufReader::new(File::open(testfile).unwrap());
for ln in rdr.lines() {
None => false,
};
if matches {
- it(&ln[close_brace+1..]);
+ it(&ln[close_brace + 1..]);
}
} else {
panic!("malformed condition directive: expected `//[foo]`, found `{}`",
fn parse_env(line: &str, name: &str) -> Option<(String, String)> {
parse_name_value_directive(line, name).map(|nv| {
// nv is either FOO or FOO=BAR
- let mut strs: Vec<String> = nv
- .splitn(2, '=')
- .map(str::to_owned)
- .collect();
+ let mut strs: Vec<String> = nv.splitn(2, '=')
+ .map(str::to_owned)
+ .collect();
match strs.len() {
- 1 => (strs.pop().unwrap(), "".to_owned()),
- 2 => {
- let end = strs.pop().unwrap();
- (strs.pop().unwrap(), end)
- }
- n => panic!("Expected 1 or 2 strings, not {}", n)
+ 1 => (strs.pop().unwrap(), "".to_owned()),
+ 2 => {
+ let end = strs.pop().unwrap();
+ (strs.pop().unwrap(), end)
+ }
+ n => panic!("Expected 1 or 2 strings, not {}", n),
}
})
}
line.contains(directive) && !line.contains(&("no-".to_owned() + directive))
}
-pub fn parse_name_value_directive(line: &str, directive: &str)
- -> Option<String> {
+pub fn parse_name_value_directive(line: &str, directive: &str) -> Option<String> {
let keycolon = format!("{}:", directive);
if let Some(colon) = line.find(&keycolon) {
- let value = line[(colon + keycolon.len()) .. line.len()].to_owned();
+ let value = line[(colon + keycolon.len())..line.len()].to_owned();
debug!("{}: {}", directive, value);
Some(value)
} else {
}
pub fn gdb_version_to_int(version_string: &str) -> isize {
- let error_string = format!(
- "Encountered GDB version string with unexpected format: {}",
- version_string);
+ let error_string = format!("Encountered GDB version string with unexpected format: {}",
+ version_string);
let error_string = error_string;
let components: Vec<&str> = version_string.trim().split('.').collect();
}
pub fn lldb_version_to_int(version_string: &str) -> isize {
- let error_string = format!(
- "Encountered LLDB version string with unexpected format: {}",
- version_string);
+ let error_string = format!("Encountered LLDB version string with unexpected format: {}",
+ version_string);
let error_string = error_string;
let major: isize = version_string.parse().ok().expect(&error_string);
return major;
use rustc_serialize::json;
use std::str::FromStr;
use std::path::Path;
-use runtest::{ProcRes};
+use runtest::ProcRes;
// These structs are a subset of the ones found in
// `syntax::json`.
pub fn parse_output(file_name: &str, output: &str, proc_res: &ProcRes) -> Vec<Error> {
output.lines()
- .flat_map(|line| parse_line(file_name, line, output, proc_res))
- .collect()
+ .flat_map(|line| parse_line(file_name, line, output, proc_res))
+ .collect()
}
fn parse_line(file_name: &str, line: &str, output: &str, proc_res: &ProcRes) -> Vec<Error> {
expected_errors
}
Err(error) => {
- proc_res.fatal(Some(&format!(
- "failed to decode compiler output as json: `{}`\noutput: {}\nline: {}",
- error, line, output)));
+ proc_res.fatal(Some(&format!("failed to decode compiler output as json: \
+ `{}`\noutput: {}\nline: {}",
+ error,
+ line,
+ output)));
}
}
} else {
diagnostic: &Diagnostic,
default_spans: &[&DiagnosticSpan],
file_name: &str) {
- let spans_in_this_file: Vec<_> =
- diagnostic.spans.iter()
- .filter(|span| Path::new(&span.file_name) == Path::new(&file_name))
- .collect();
-
- let primary_spans: Vec<_> =
- spans_in_this_file.iter()
- .cloned()
- .filter(|span| span.is_primary)
- .collect();
+ let spans_in_this_file: Vec<_> = diagnostic.spans
+ .iter()
+ .filter(|span| Path::new(&span.file_name) == Path::new(&file_name))
+ .collect();
+
+ let primary_spans: Vec<_> = spans_in_this_file.iter()
+ .cloned()
+ .filter(|span| span.is_primary)
+ .collect();
let primary_spans = if primary_spans.is_empty() {
// subdiagnostics often don't have a span of their own;
// inherit the span from the parent in that case
for span in primary_spans {
let msg = with_code(span, first_line);
let kind = ErrorKind::from_str(&diagnostic.level).ok();
- expected_errors.push(
- Error {
- line_num: span.line_start,
- kind: kind,
- msg: msg,
- }
- );
+ expected_errors.push(Error {
+ line_num: span.line_start,
+ kind: kind,
+ msg: msg,
+ });
}
}
for next_line in message_lines {
for span in primary_spans {
- expected_errors.push(
- Error {
- line_num: span.line_start,
- kind: None,
- msg: with_code(span, next_line),
- }
- );
+ expected_errors.push(Error {
+ line_num: span.line_start,
+ kind: None,
+ msg: with_code(span, next_line),
+ });
}
}
let start_line = primary_spans.iter().map(|s| s.line_start).min().expect("\
every suggestion should have at least one span");
for (index, line) in rendered.lines().enumerate() {
- expected_errors.push(
- Error {
- line_num: start_line + index,
- kind: Some(ErrorKind::Suggestion),
- msg: line.to_string()
- }
- );
+ expected_errors.push(Error {
+ line_num: start_line + index,
+ kind: Some(ErrorKind::Suggestion),
+ msg: line.to_string(),
+ });
}
}
// Add notes for the backtrace
for span in primary_spans {
for frame in &span.expansion {
- push_backtrace(expected_errors,
- frame,
- file_name);
+ push_backtrace(expected_errors, frame, file_name);
}
}
// Add notes for any labels that appear in the message.
for span in spans_in_this_file.iter()
- .filter(|span| span.label.is_some())
- {
+ .filter(|span| span.label.is_some()) {
expected_errors.push(Error {
line_num: span.line_start,
kind: Some(ErrorKind::Note),
- msg: span.label.clone().unwrap()
+ msg: span.label.clone().unwrap(),
});
}
expansion: &DiagnosticSpanMacroExpansion,
file_name: &str) {
if Path::new(&expansion.span.file_name) == Path::new(&file_name) {
- expected_errors.push(
- Error {
- line_num: expansion.span.line_start,
- kind: Some(ErrorKind::Note),
- msg: format!("in this expansion of {}", expansion.macro_decl_name),
- }
- );
+ expected_errors.push(Error {
+ line_num: expansion.span.line_start,
+ kind: Some(ErrorKind::Note),
+ msg: format!("in this expansion of {}", expansion.macro_decl_name),
+ });
}
for previous_expansion in &expansion.span.expansion {
use std::ffi::OsString;
use std::io::prelude::*;
use std::path::PathBuf;
-use std::process::{ExitStatus, Command, Child, Output, Stdio};
+use std::process::{Child, Command, ExitStatus, Output, Stdio};
pub fn dylib_env_var() -> &'static str {
if cfg!(windows) {
// search path for the child.
let var = dylib_env_var();
let mut path = env::split_paths(&env::var_os(var).unwrap_or(OsString::new()))
- .collect::<Vec<_>>();
+ .collect::<Vec<_>>();
if let Some(p) = aux_path {
path.insert(0, PathBuf::from(p))
}
cmd.env(var, newpath);
}
-pub struct Result {pub status: ExitStatus, pub out: String, pub err: String}
+pub struct Result {
+ pub status: ExitStatus,
+ pub out: String,
+ pub err: String,
+}
pub fn run(lib_path: &str,
prog: &str,
aux_path: Option<&str>,
args: &[String],
- env: Vec<(String, String)> ,
- input: Option<String>) -> Option<Result> {
+ env: Vec<(String, String)>,
+ input: Option<String>)
+ -> Option<Result> {
let mut cmd = Command::new(prog);
cmd.args(args)
- .stdin(Stdio::piped())
- .stdout(Stdio::piped())
- .stderr(Stdio::piped());
+ .stdin(Stdio::piped())
+ .stdout(Stdio::piped())
+ .stderr(Stdio::piped());
add_target_env(&mut cmd, lib_path, aux_path);
for (key, val) in env {
cmd.env(&key, &val);
if let Some(input) = input {
process.stdin.as_mut().unwrap().write_all(input.as_bytes()).unwrap();
}
- let Output { status, stdout, stderr } =
- process.wait_with_output().unwrap();
+ let Output { status, stdout, stderr } = process.wait_with_output().unwrap();
Some(Result {
status: status,
out: String::from_utf8(stdout).unwrap(),
- err: String::from_utf8(stderr).unwrap()
+ err: String::from_utf8(stderr).unwrap(),
})
- },
- Err(..) => None
+ }
+ Err(..) => None,
}
}
pub fn run_background(lib_path: &str,
- prog: &str,
- aux_path: Option<&str>,
- args: &[String],
- env: Vec<(String, String)> ,
- input: Option<String>) -> Option<Child> {
+ prog: &str,
+ aux_path: Option<&str>,
+ args: &[String],
+ env: Vec<(String, String)>,
+ input: Option<String>)
+ -> Option<Child> {
let mut cmd = Command::new(prog);
cmd.args(args)
- .stdin(Stdio::piped())
- .stdout(Stdio::piped())
- .stderr(Stdio::piped());
+ .stdin(Stdio::piped())
+ .stdout(Stdio::piped())
+ .stderr(Stdio::piped());
add_target_env(&mut cmd, lib_path, aux_path);
for (key, val) in env {
cmd.env(&key, &val);
}
Some(process)
- },
- Err(..) => None
+ }
+ Err(..) => None,
}
}
let mut mib: [libc::c_int; 2] = [CTL_KERN, KERN_MAXFILESPERPROC];
let mut maxfiles: libc::c_int = 0;
let mut size: libc::size_t = size_of_val(&maxfiles) as libc::size_t;
- if libc::sysctl(&mut mib[0], 2, &mut maxfiles as *mut _ as *mut _, &mut size,
- null_mut(), 0) != 0 {
+ if libc::sysctl(&mut mib[0],
+ 2,
+ &mut maxfiles as *mut _ as *mut _,
+ &mut size,
+ null_mut(),
+ 0) != 0 {
let err = io::Error::last_os_error();
panic!("raise_fd_limit: error calling sysctl: {}", err);
}
// Fetch the current resource limits
- let mut rlim = libc::rlimit{rlim_cur: 0, rlim_max: 0};
+ let mut rlim = libc::rlimit {
+ rlim_cur: 0,
+ rlim_max: 0,
+ };
if libc::getrlimit(libc::RLIMIT_NOFILE, &mut rlim) != 0 {
let err = io::Error::last_os_error();
panic!("raise_fd_limit: error calling getrlimit: {}", err);
// patterns still match the raw compiler output.
if self.props.error_patterns.is_empty() {
args.extend(["--error-format",
- "json",
- "-Z",
- "unstable-options"]
+ "json"]
.iter()
.map(|s| s.to_string()));
}
pub fn diff_lines(actual: &str, expected: &str) -> Vec<String> {
// mega simplistic diff algorithm that just prints the things added/removed
- zip_all(actual.lines(), expected.lines()).enumerate().filter_map(|(i, (a,e))| {
- match (a, e) {
- (Some(a), Some(e)) => {
- if lines_match(e, a) {
- None
- } else {
- Some(format!("{:3} - |{}|\n + |{}|\n", i, e, a))
+ zip_all(actual.lines(), expected.lines())
+ .enumerate()
+ .filter_map(|(i, (a, e))| {
+ match (a, e) {
+ (Some(a), Some(e)) => {
+ if lines_match(e, a) {
+ None
+ } else {
+ Some(format!("{:3} - |{}|\n + |{}|\n", i, e, a))
+ }
}
- },
- (Some(a), None) => {
- Some(format!("{:3} -\n + |{}|\n", i, a))
- },
- (None, Some(e)) => {
- Some(format!("{:3} - |{}|\n +\n", i, e))
- },
- (None, None) => panic!("Cannot get here")
- }
- }).collect()
+ (Some(a), None) => Some(format!("{:3} -\n + |{}|\n", i, a)),
+ (None, Some(e)) => Some(format!("{:3} - |{}|\n +\n", i, e)),
+ (None, None) => panic!("Cannot get here"),
+ }
+ })
+ .collect()
}
fn lines_match(expected: &str, mut actual: &str) -> bool {
match actual.find(part) {
Some(j) => {
if i == 0 && j != 0 {
- return false
+ return false;
}
actual = &actual[j + part.len()..];
}
- None => {
- return false
- }
+ None => return false,
}
}
actual.is_empty() || expected.ends_with("[..]")
second: I2,
}
-impl<T, I1: Iterator<Item=T>, I2: Iterator<Item=T>> Iterator for ZipAll<I1, I2> {
+impl<T, I1: Iterator<Item = T>, I2: Iterator<Item = T>> Iterator for ZipAll<I1, I2> {
type Item = (Option<T>, Option<T>);
fn next(&mut self) -> Option<(Option<T>, Option<T>)> {
let first = self.first.next();
match (first, second) {
(None, None) => None,
- (a, b) => Some((a, b))
+ (a, b) => Some((a, b)),
}
}
}
-fn zip_all<T, I1: Iterator<Item=T>, I2: Iterator<Item=T>>(a: I1, b: I2) -> ZipAll<I1, I2> {
+fn zip_all<T, I1: Iterator<Item = T>, I2: Iterator<Item = T>>(a: I1, b: I2) -> ZipAll<I1, I2> {
ZipAll {
first: a,
second: b,
use common::Config;
/// Conversion table from triple OS name to Rust SYSNAME
-const OS_TABLE: &'static [(&'static str, &'static str)] = &[
- ("android", "android"),
- ("bitrig", "bitrig"),
- ("darwin", "macos"),
- ("dragonfly", "dragonfly"),
- ("freebsd", "freebsd"),
- ("ios", "ios"),
- ("linux", "linux"),
- ("mingw32", "windows"),
- ("netbsd", "netbsd"),
- ("openbsd", "openbsd"),
- ("win32", "windows"),
- ("windows", "windows"),
- ("solaris", "solaris"),
- ("emscripten", "emscripten"),
-];
+const OS_TABLE: &'static [(&'static str, &'static str)] = &[("android", "android"),
+ ("bitrig", "bitrig"),
+ ("darwin", "macos"),
+ ("dragonfly", "dragonfly"),
+ ("freebsd", "freebsd"),
+ ("ios", "ios"),
+ ("linux", "linux"),
+ ("mingw32", "windows"),
+ ("netbsd", "netbsd"),
+ ("openbsd", "openbsd"),
+ ("win32", "windows"),
+ ("windows", "windows"),
+ ("solaris", "solaris"),
+ ("emscripten", "emscripten")];
-const ARCH_TABLE: &'static [(&'static str, &'static str)] = &[
- ("aarch64", "aarch64"),
- ("amd64", "x86_64"),
- ("arm", "arm"),
- ("arm64", "aarch64"),
- ("hexagon", "hexagon"),
- ("i386", "x86"),
- ("i686", "x86"),
- ("mips", "mips"),
- ("msp430", "msp430"),
- ("powerpc", "powerpc"),
- ("powerpc64", "powerpc64"),
- ("s390x", "systemz"),
- ("sparc", "sparc"),
- ("x86_64", "x86_64"),
- ("xcore", "xcore"),
- ("asmjs", "asmjs"),
-];
+const ARCH_TABLE: &'static [(&'static str, &'static str)] = &[("aarch64", "aarch64"),
+ ("amd64", "x86_64"),
+ ("arm", "arm"),
+ ("arm64", "aarch64"),
+ ("hexagon", "hexagon"),
+ ("i386", "x86"),
+ ("i686", "x86"),
+ ("mips", "mips"),
+ ("msp430", "msp430"),
+ ("powerpc", "powerpc"),
+ ("powerpc64", "powerpc64"),
+ ("s390x", "systemz"),
+ ("sparc", "sparc"),
+ ("x86_64", "x86_64"),
+ ("xcore", "xcore"),
+ ("asmjs", "asmjs")];
pub fn get_os(triple: &str) -> &'static str {
for &(triple_os, os) in OS_TABLE {
if triple.contains(triple_os) {
- return os
+ return os;
}
}
panic!("Cannot determine OS from triple");
pub fn get_arch(triple: &str) -> &'static str {
for &(triple_arch, arch) in ARCH_TABLE {
if triple.contains(triple_arch) {
- return arch
+ return arch;
}
}
panic!("Cannot determine Architecture from triple");
// Windows just uses PATH as the library search path, so we have to
// maintain the current value while adding our own
match env::var(lib_path_env_var()) {
- Ok(curr) => {
- format!("{}{}{}", path, path_div(), curr)
- }
- Err(..) => path.to_owned()
+ Ok(curr) => format!("{}{}{}", path, path_div(), curr),
+ Err(..) => path.to_owned(),
}
}
-pub fn lib_path_env_var() -> &'static str { "PATH" }
-fn path_div() -> &'static str { ";" }
+pub fn lib_path_env_var() -> &'static str {
+ "PATH"
+}
+fn path_div() -> &'static str {
+ ";"
+}
pub fn logv(config: &Config, s: String) {
debug!("{}", s);
- if config.verbose { println!("{}", s); }
+ if config.verbose {
+ println!("{}", s);
+ }
}