#### `[rust]`:
- `debuginfo = true` - Build a compiler with debuginfo. Makes building rustc slower, but then you can use a debugger to debug `rustc`.
- `debuginfo-lines = true` - An alternative to `debuginfo = true` that doesn't let you use a debugger, but doesn't make building rustc slower and still gives you line numbers in backtraces.
+- `debuginfo-tools = true` - Build the extended tools with debuginfo.
- `debug-assertions = true` - Makes the log output of `debug!` work.
- `optimize = false` - Disable optimizations to speed up compilation of stage1 rust, but makes the stage1 compiler x100 slower.
# standard library.
#debuginfo-only-std = false
+# Enable debuginfo for the extended tools: cargo, rls, rustfmt
+# Adding debuginfo makes them several times larger.
+#debuginfo-tools = false
+
# Whether or not jemalloc is built and enabled
#use-jemalloc = true
[[package]]
name = "arena"
version = "0.0.0"
+dependencies = [
+ "rustc_data_structures 0.0.0",
+]
[[package]]
name = "arrayvec"
test::TheBook, test::UnstableBook,
test::Rustfmt, test::Miri, test::Clippy, test::RustdocJS, test::RustdocTheme,
// Run run-make last, since these won't pass without make on Windows
- test::RunMake),
+ test::RunMake, test::RustdocUi),
Kind::Bench => describe!(test::Crate, test::CrateLibrustc),
Kind::Doc => describe!(doc::UnstableBook, doc::UnstableBookGen, doc::TheBook,
doc::Standalone, doc::Std, doc::Test, doc::WhitelistedRustc, doc::Rustc,
cargo.env("RUSTDOC_LIBDIR", self.rustc_libdir(self.compiler(2, self.build.build)));
}
- if mode != Mode::Tool {
- // Tools don't get debuginfo right now, e.g. cargo and rls don't
- // get compiled with debuginfo.
- // Adding debuginfo increases their sizes by a factor of 3-4.
+ if mode == Mode::Tool {
+ // Tools like cargo and rls don't get debuginfo by default right now, but this can be
+ // enabled in the config. Adding debuginfo makes them several times larger.
+ if self.config.rust_debuginfo_tools {
+ cargo.env("RUSTC_DEBUGINFO", self.config.rust_debuginfo.to_string());
+ cargo.env("RUSTC_DEBUGINFO_LINES", self.config.rust_debuginfo_lines.to_string());
+ }
+ } else {
cargo.env("RUSTC_DEBUGINFO", self.config.rust_debuginfo.to_string());
cargo.env("RUSTC_DEBUGINFO_LINES", self.config.rust_debuginfo_lines.to_string());
cargo.env("RUSTC_FORCE_UNSTABLE", "1");
pub rust_debuginfo: bool,
pub rust_debuginfo_lines: bool,
pub rust_debuginfo_only_std: bool,
+ pub rust_debuginfo_tools: bool,
pub rust_rpath: bool,
pub rustc_parallel_queries: bool,
pub rustc_default_linker: Option<String>,
debuginfo: Option<bool>,
debuginfo_lines: Option<bool>,
debuginfo_only_std: Option<bool>,
+ debuginfo_tools: Option<bool>,
experimental_parallel_queries: Option<bool>,
debug_jemalloc: Option<bool>,
use_jemalloc: Option<bool>,
let mut llvm_assertions = None;
let mut debuginfo_lines = None;
let mut debuginfo_only_std = None;
+ let mut debuginfo_tools = None;
let mut debug = None;
let mut debug_jemalloc = None;
let mut debuginfo = None;
debuginfo = rust.debuginfo;
debuginfo_lines = rust.debuginfo_lines;
debuginfo_only_std = rust.debuginfo_only_std;
+ debuginfo_tools = rust.debuginfo_tools;
optimize = rust.optimize;
ignore_git = rust.ignore_git;
debug_jemalloc = rust.debug_jemalloc;
};
config.rust_debuginfo_lines = debuginfo_lines.unwrap_or(default);
config.rust_debuginfo_only_std = debuginfo_only_std.unwrap_or(default);
+ config.rust_debuginfo_tools = debuginfo_tools.unwrap_or(false);
let default = debug == Some(true);
config.debug_jemalloc = debug_jemalloc.unwrap_or(default);
o("debuginfo", "rust.debuginfo", "build with debugger metadata")
o("debuginfo-lines", "rust.debuginfo-lines", "build with line number debugger metadata")
o("debuginfo-only-std", "rust.debuginfo-only-std", "build only libstd with debugging information")
+o("debuginfo-tools", "rust.debuginfo-tools", "build extended tools with debugging information")
o("debug-jemalloc", "rust.debug-jemalloc", "build jemalloc with --enable-debug --enable-fill")
v("save-toolstates", "rust.save-toolstates", "save build and test status of external tools into this file")
continue;
}
- cmd_finder.must_have(build.cc(*target));
- if let Some(ar) = build.ar(*target) {
- cmd_finder.must_have(ar);
+ if !build.config.dry_run {
+ cmd_finder.must_have(build.cc(*target));
+ if let Some(ar) = build.ar(*target) {
+ cmd_finder.must_have(ar);
+ }
}
}
for host in &build.hosts {
- cmd_finder.must_have(build.cxx(*host).unwrap());
+ if !build.config.dry_run {
+ cmd_finder.must_have(build.cxx(*host).unwrap());
+ }
// The msvc hosts don't use jemalloc, turn it off globally to
// avoid packaging the dummy liballoc_jemalloc on that platform.
}
}
+#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+pub struct RustdocUi {
+ pub host: Interned<String>,
+ pub target: Interned<String>,
+ pub compiler: Compiler,
+}
+
+impl Step for RustdocUi {
+ type Output = ();
+ const DEFAULT: bool = true;
+ const ONLY_HOSTS: bool = true;
+
+ fn should_run(run: ShouldRun) -> ShouldRun {
+ run.path("src/test/rustdoc-ui")
+ }
+
+ fn make_run(run: RunConfig) {
+ let compiler = run.builder.compiler(run.builder.top_stage, run.host);
+ run.builder.ensure(RustdocUi {
+ host: run.host,
+ target: run.target,
+ compiler,
+ });
+ }
+
+ fn run(self, builder: &Builder) {
+ builder.ensure(Compiletest {
+ compiler: self.compiler,
+ target: self.target,
+ mode: "ui",
+ suite: "rustdoc-ui",
+ })
+ }
+}
+
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct Tidy;
cmd.arg("--run-lib-path").arg(builder.sysroot_libdir(compiler, target));
cmd.arg("--rustc-path").arg(builder.rustc(compiler));
+ let is_rustdoc_ui = suite.ends_with("rustdoc-ui");
+
// Avoid depending on rustdoc when we don't need it.
- if mode == "rustdoc" || (mode == "run-make" && suite.ends_with("fulldeps")) {
+ if mode == "rustdoc" ||
+ (mode == "run-make" && suite.ends_with("fulldeps")) ||
+ (mode == "ui" && is_rustdoc_ui) {
cmd.arg("--rustdoc-path").arg(builder.rustdoc(compiler.host));
}
cmd.arg("--nodejs").arg(nodejs);
}
- let mut flags = vec!["-Crpath".to_string()];
- if build.config.rust_optimize_tests {
- flags.push("-O".to_string());
- }
- if build.config.rust_debuginfo_tests {
- flags.push("-g".to_string());
+ let mut flags = if is_rustdoc_ui {
+ Vec::new()
+ } else {
+ vec!["-Crpath".to_string()]
+ };
+ if !is_rustdoc_ui {
+ if build.config.rust_optimize_tests {
+ flags.push("-O".to_string());
+ }
+ if build.config.rust_debuginfo_tests {
+ flags.push("-g".to_string());
+ }
}
flags.push("-Zunstable-options".to_string());
flags.push(build.config.cmd.rustc_args().join(" "));
--set build.nodejs=/node-v9.2.0-linux-x64/bin/node \
--set rust.lld
+# Some run-make tests have assertions about code size, and enabling debug
+# assertions in libstd causes the binary to be much bigger than it would
+# otherwise normally be. We already test libstd with debug assertions in lots of
+# other contexts as well
+ENV NO_DEBUG_ASSERTIONS=1
+
ENV SCRIPT python2.7 /checkout/x.py test --target $TARGETS \
src/test/run-make \
src/test/ui \
of a dependency. `--library-path` provides directories to search in, `--extern`
instead lets you specify exactly which dependency is located where.
+## `-C`/`--codegen`: pass codegen options to rustc
+
+Using this flag looks like this:
+
+```bash
+$ rustdoc src/lib.rs -C target_feature=+avx
+$ rustdoc src/lib.rs --codegen target_feature=+avx
+
+$ rustdoc --test src/lib.rs -C target_feature=+avx
+$ rustdoc --test src/lib.rs --codegen target_feature=+avx
+
+$ rustdoc --test README.md -C target_feature=+avx
+$ rustdoc --test README.md --codegen target_feature=+avx
+```
+
+When rustdoc generates documentation, looks for documentation tests, or executes documentation
+tests, it needs to compile some rust code, at least part-way. This flag allows you to tell rustdoc
+to provide some extra codegen options to rustc when it runs these compilations. Most of the time,
+these options won't affect a regular documentation run, but if something depends on target features
+to be enabled, or documentation tests need to use some additional options, this flag allows you to
+affect that.
+
+The arguments to this flag are the same as those for the `-C` flag on rustc. Run `rustc -C help` to
+get the full list.
+
## `--passes`: add more rustdoc passes
Using this flag looks like this:
we don't want the reader to see every line every time. Here's what we put in
our source code:
-```text
- First, we set `x` to five:
+``````markdown
+First, we set `x` to five:
- ```
- let x = 5;
- # let y = 6;
- # println!("{}", x + y);
- ```
+```
+let x = 5;
+# let y = 6;
+# println!("{}", x + y);
+```
- Next, we set `y` to six:
+Next, we set `y` to six:
- ```
- # let x = 5;
- let y = 6;
- # println!("{}", x + y);
- ```
+```
+# let x = 5;
+let y = 6;
+# println!("{}", x + y);
+```
- Finally, we print the sum of `x` and `y`:
+Finally, we print the sum of `x` and `y`:
- ```
- # let x = 5;
- # let y = 6;
- println!("{}", x + y);
- ```
```
+# let x = 5;
+# let y = 6;
+println!("{}", x + y);
+```
+``````
By repeating all parts of the example, you can ensure that your example still
compiles, while only showing the parts that are relevant to that part of your
#[stable(feature = "box_from_slice", since = "1.17.0")]
impl<'a> From<&'a str> for Box<str> {
+ #[inline]
fn from(s: &'a str) -> Box<str> {
unsafe { from_boxed_utf8_unchecked(Box::from(s.as_bytes())) }
}
#[stable(feature = "boxed_str_conv", since = "1.19.0")]
impl From<Box<str>> for Box<[u8]> {
+ #[inline]
fn from(s: Box<str>) -> Self {
unsafe { Box::from_raw(Box::into_raw(s) as *mut [u8]) }
}
#[stable(feature = "btree_drop", since = "1.7.0")]
impl<K, V> Drop for IntoIter<K, V> {
fn drop(&mut self) {
- for _ in &mut *self {
- }
+ self.for_each(drop);
unsafe {
let leaf_node = ptr::read(&self.front).into_node();
if let Some(first_parent) = leaf_node.deallocate_and_ascend() {
where F: FnMut(&mut T) -> bool,
{
fn drop(&mut self) {
- for _ in self { }
+ self.for_each(drop);
}
}
unsafe {
let elem_size = mem::size_of::<T>();
- let alloc_size = cap.checked_mul(elem_size).expect("capacity overflow");
- alloc_guard(alloc_size).expect("capacity overflow");
+ let alloc_size = cap.checked_mul(elem_size).unwrap_or_else(|| capacity_overflow());
+ alloc_guard(alloc_size).unwrap_or_else(|_| capacity_overflow());
// handles ZSTs and `cap = 0` alike
let ptr = if alloc_size == 0 {
// `from_size_align_unchecked`.
let new_cap = 2 * self.cap;
let new_size = new_cap * elem_size;
- alloc_guard(new_size).expect("capacity overflow");
+ alloc_guard(new_size).unwrap_or_else(|_| capacity_overflow());
let ptr_res = self.a.realloc(NonNull::from(self.ptr).as_opaque(),
cur,
new_size);
// overflow and the alignment is sufficiently small.
let new_cap = 2 * self.cap;
let new_size = new_cap * elem_size;
- alloc_guard(new_size).expect("capacity overflow");
+ alloc_guard(new_size).unwrap_or_else(|_| capacity_overflow());
match self.a.grow_in_place(NonNull::from(self.ptr).as_opaque(), old_layout, new_size) {
Ok(_) => {
// We can't directly divide `size`.
pub fn reserve_exact(&mut self, used_cap: usize, needed_extra_cap: usize) {
match self.try_reserve_exact(used_cap, needed_extra_cap) {
- Err(CapacityOverflow) => panic!("capacity overflow"),
+ Err(CapacityOverflow) => capacity_overflow(),
Err(AllocErr) => self.a.oom(),
Ok(()) => { /* yay */ }
}
/// The same as try_reserve, but errors are lowered to a call to oom().
pub fn reserve(&mut self, used_cap: usize, needed_extra_cap: usize) {
match self.try_reserve(used_cap, needed_extra_cap) {
- Err(CapacityOverflow) => panic!("capacity overflow"),
+ Err(CapacityOverflow) => capacity_overflow(),
Err(AllocErr) => self.a.oom(),
Ok(()) => { /* yay */ }
}
}
let new_cap = self.amortized_new_size(used_cap, needed_extra_cap)
- .expect("capacity overflow");
+ .unwrap_or_else(|_| capacity_overflow());
// Here, `cap < used_cap + needed_extra_cap <= new_cap`
// (regardless of whether `self.cap - used_cap` wrapped).
let new_layout = Layout::new::<T>().repeat(new_cap).unwrap().0;
// FIXME: may crash and burn on over-reserve
- alloc_guard(new_layout.size()).expect("capacity overflow");
+ alloc_guard(new_layout.size()).unwrap_or_else(|_| capacity_overflow());
match self.a.grow_in_place(
NonNull::from(self.ptr).as_opaque(), old_layout, new_layout.size(),
) {
}
}
+// One central function responsible for reporting capacity overflows. This'll
+// ensure that the code generation related to these panics is minimal as there's
+// only one location which panics rather than a bunch throughout the module.
+fn capacity_overflow() -> ! {
+ panic!("capacity overflow")
+}
+
#[cfg(test)]
mod tests {
use super::*;
/// assert_eq!(*boxed_bytes, *s.as_bytes());
/// ```
#[stable(feature = "str_box_extras", since = "1.20.0")]
+ #[inline]
pub fn into_boxed_bytes(self: Box<str>) -> Box<[u8]> {
self.into()
}
/// assert_eq!(boxed_str.into_string(), string);
/// ```
#[stable(feature = "box_str", since = "1.4.0")]
+ #[inline]
pub fn into_string(self: Box<str>) -> String {
let slice = Box::<[u8]>::from(self);
unsafe { String::from_utf8_unchecked(slice.into_vec()) }
/// assert_eq!("☺", &*smile);
/// ```
#[stable(feature = "str_box_extras", since = "1.20.0")]
+#[inline]
pub unsafe fn from_boxed_utf8_unchecked(v: Box<[u8]>) -> Box<str> {
Box::from_raw(Box::into_raw(v) as *mut str)
}
/// let b = s.into_boxed_str();
/// ```
#[stable(feature = "box_str", since = "1.4.0")]
+ #[inline]
pub fn into_boxed_str(self) -> Box<str> {
let slice = self.vec.into_boxed_slice();
unsafe { from_boxed_utf8_unchecked(slice) }
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn shrink_to_fit(&mut self) {
- self.buf.shrink_to_fit(self.len);
+ if self.capacity() != self.len {
+ self.buf.shrink_to_fit(self.len);
+ }
}
/// Shrinks the capacity of the vector with a lower bound.
impl<'a, T> Drop for Drain<'a, T> {
fn drop(&mut self) {
// exhaust self first
- while let Some(_) = self.next() {}
+ self.for_each(drop);
if self.tail_len > 0 {
unsafe {
#[stable(feature = "vec_splice", since = "1.21.0")]
impl<'a, I: Iterator> Drop for Splice<'a, I> {
fn drop(&mut self) {
- // exhaust drain first
- while let Some(_) = self.drain.next() {}
-
+ self.drain.by_ref().for_each(drop);
unsafe {
if self.drain.tail_len == 0 {
where F: FnMut(&mut T) -> bool,
{
fn drop(&mut self) {
- for _ in self.by_ref() { }
-
+ self.for_each(drop);
unsafe {
self.vec.set_len(self.old_len - self.del);
}
#[stable(feature = "drain", since = "1.6.0")]
impl<'a, T: 'a> Drop for Drain<'a, T> {
fn drop(&mut self) {
- for _ in self.by_ref() {}
+ self.for_each(drop);
let source_deque = unsafe { self.deque.as_mut() };
name = "arena"
path = "lib.rs"
crate-type = ["dylib"]
+
+[dependencies]
+rustc_data_structures = { path = "../librustc_data_structures" }
\ No newline at end of file
#![allow(deprecated)]
extern crate alloc;
+extern crate rustc_data_structures;
+
+use rustc_data_structures::sync::MTLock;
use std::cell::{Cell, RefCell};
use std::cmp;
chunks: RefCell<Vec<TypedArenaChunk<u8>>>,
}
+unsafe impl Send for DroplessArena {}
+
impl DroplessArena {
pub fn new() -> DroplessArena {
DroplessArena {
}
}
+pub struct SyncTypedArena<T> {
+ lock: MTLock<TypedArena<T>>,
+}
+
+impl<T> SyncTypedArena<T> {
+ #[inline(always)]
+ pub fn new() -> SyncTypedArena<T> {
+ SyncTypedArena {
+ lock: MTLock::new(TypedArena::new())
+ }
+ }
+
+ #[inline(always)]
+ pub fn alloc(&self, object: T) -> &mut T {
+ // Extend the lifetime of the result since it's limited to the lock guard
+ unsafe { &mut *(self.lock.lock().alloc(object) as *mut T) }
+ }
+
+ #[inline(always)]
+ pub fn alloc_slice(&self, slice: &[T]) -> &mut [T]
+ where
+ T: Copy,
+ {
+ // Extend the lifetime of the result since it's limited to the lock guard
+ unsafe { &mut *(self.lock.lock().alloc_slice(slice) as *mut [T]) }
+ }
+
+ #[inline(always)]
+ pub fn clear(&mut self) {
+ self.lock.get_mut().clear();
+ }
+}
+
+pub struct SyncDroplessArena {
+ lock: MTLock<DroplessArena>,
+}
+
+impl SyncDroplessArena {
+ #[inline(always)]
+ pub fn new() -> SyncDroplessArena {
+ SyncDroplessArena {
+ lock: MTLock::new(DroplessArena::new())
+ }
+ }
+
+ #[inline(always)]
+ pub fn in_arena<T: ?Sized>(&self, ptr: *const T) -> bool {
+ self.lock.lock().in_arena(ptr)
+ }
+
+ #[inline(always)]
+ pub fn alloc<T>(&self, object: T) -> &mut T {
+ // Extend the lifetime of the result since it's limited to the lock guard
+ unsafe { &mut *(self.lock.lock().alloc(object) as *mut T) }
+ }
+
+ #[inline(always)]
+ pub fn alloc_slice<T>(&self, slice: &[T]) -> &mut [T]
+ where
+ T: Copy,
+ {
+ // Extend the lifetime of the result since it's limited to the lock guard
+ unsafe { &mut *(self.lock.lock().alloc_slice(slice) as *mut [T]) }
+ }
+}
+
#[cfg(test)]
mod tests {
extern crate test;
// truncation. However other flags like `fill`, `width` and `align`
// must act as always.
if let Some((i, _)) = s.char_indices().skip(max).next() {
- &s[..i]
+ // LLVM here can't prove that `..i` won't panic `&s[..i]`, but
+ // we know that it can't panic. Use `get` + `unwrap_or` to avoid
+ // `unsafe` and otherwise don't emit any panic-related code
+ // here.
+ s.get(..i).unwrap_or(&s)
} else {
&s
}
/// an extra layer of indirection. `flat_map()` will remove this extra layer
/// on its own.
///
- /// You can think of [`flat_map(f)`][flat_map] as the semantic equivalent
+ /// You can think of `flat_map(f)` as the semantic equivalent
/// of [`map`]ping, and then [`flatten`]ing as in `map(f).flatten()`.
///
/// Another way of thinking about `flat_map()`: [`map`]'s closure returns
#![feature(asm)]
#![feature(associated_type_defaults)]
#![feature(attr_literals)]
-#![feature(cfg_target_feature)]
#![feature(cfg_target_has_atomic)]
#![feature(concat_idents)]
#![feature(const_fn)]
#![feature(specialization)]
#![feature(staged_api)]
#![feature(stmt_expr_attributes)]
-#![feature(target_feature)]
#![feature(unboxed_closures)]
#![feature(untagged_unions)]
#![feature(unwind_attributes)]
+#![cfg_attr(not(stage0), feature(mmx_target_feature))]
+#![cfg_attr(not(stage0), feature(tbm_target_feature))]
+#![cfg_attr(not(stage0), feature(sse4a_target_feature))]
+#![cfg_attr(not(stage0), feature(arm_target_feature))]
+#![cfg_attr(not(stage0), feature(powerpc_target_feature))]
+#![cfg_attr(not(stage0), feature(mips_target_feature))]
+#![cfg_attr(not(stage0), feature(aarch64_target_feature))]
+
+#![cfg_attr(stage0, feature(target_feature))]
+#![cfg_attr(stage0, feature(cfg_target_feature))]
+
#[prelude_import]
#[allow(unused)]
use prelude::v1::*;
// things like SIMD and such. Note that the actual source for all this lies in a
// different repository, rust-lang-nursery/stdsimd. That's why the setup here is
// a bit wonky.
+#[allow(unused_macros)]
+macro_rules! test_v16 { ($item:item) => {}; }
+#[allow(unused_macros)]
+macro_rules! test_v32 { ($item:item) => {}; }
+#[allow(unused_macros)]
+macro_rules! test_v64 { ($item:item) => {}; }
+#[allow(unused_macros)]
+macro_rules! test_v128 { ($item:item) => {}; }
+#[allow(unused_macros)]
+macro_rules! test_v256 { ($item:item) => {}; }
+#[allow(unused_macros)]
+macro_rules! test_v512 { ($item:item) => {}; }
+#[allow(unused_macros)]
+macro_rules! vector_impl { ($([$f:ident, $($args:tt)*]),*) => { $($f!($($args)*);)* } }
#[path = "../stdsimd/coresimd/mod.rs"]
#[allow(missing_docs, missing_debug_implementations, dead_code)]
#[unstable(feature = "stdsimd", issue = "48556")]
#[unstable(feature = "stdsimd", issue = "48556")]
#[cfg(not(stage0))]
pub use coresimd::simd;
-#[unstable(feature = "stdsimd", issue = "48556")]
+#[stable(feature = "simd_arch", since = "1.27.0")]
#[cfg(not(stage0))]
pub use coresimd::arch;
/// ```
/// #![feature(range_contains)]
///
- /// assert!(!(3..5).contains(2));
- /// assert!( (3..5).contains(3));
- /// assert!( (3..5).contains(4));
- /// assert!(!(3..5).contains(5));
+ /// use std::f32;
///
- /// assert!(!(3..3).contains(3));
- /// assert!(!(3..2).contains(3));
+ /// assert!(!(3..5).contains(&2));
+ /// assert!( (3..5).contains(&3));
+ /// assert!( (3..5).contains(&4));
+ /// assert!(!(3..5).contains(&5));
+ ///
+ /// assert!(!(3..3).contains(&3));
+ /// assert!(!(3..2).contains(&3));
+ ///
+ /// assert!( (0.0..1.0).contains(&0.5));
+ /// assert!(!(0.0..1.0).contains(&f32::NAN));
+ /// assert!(!(0.0..f32::NAN).contains(&0.5));
+ /// assert!(!(f32::NAN..1.0).contains(&0.5));
/// ```
#[unstable(feature = "range_contains", reason = "recently added as per RFC", issue = "32311")]
- pub fn contains(&self, item: Idx) -> bool {
- (self.start <= item) && (item < self.end)
+ pub fn contains<U>(&self, item: &U) -> bool
+ where
+ Idx: PartialOrd<U>,
+ U: ?Sized + PartialOrd<Idx>,
+ {
+ <Self as RangeBounds<Idx>>::contains(self, item)
}
/// Returns `true` if the range contains no items.
}
}
-#[unstable(feature = "range_contains", reason = "recently added as per RFC", issue = "32311")]
impl<Idx: PartialOrd<Idx>> RangeFrom<Idx> {
/// Returns `true` if `item` is contained in the range.
///
/// ```
/// #![feature(range_contains)]
///
- /// assert!(!(3..).contains(2));
- /// assert!( (3..).contains(3));
- /// assert!( (3..).contains(1_000_000_000));
+ /// use std::f32;
+ ///
+ /// assert!(!(3..).contains(&2));
+ /// assert!( (3..).contains(&3));
+ /// assert!( (3..).contains(&1_000_000_000));
+ ///
+ /// assert!( (0.0..).contains(&0.5));
+ /// assert!(!(0.0..).contains(&f32::NAN));
+ /// assert!(!(f32::NAN..).contains(&0.5));
/// ```
- pub fn contains(&self, item: Idx) -> bool {
- (self.start <= item)
+ #[unstable(feature = "range_contains", reason = "recently added as per RFC", issue = "32311")]
+ pub fn contains<U>(&self, item: &U) -> bool
+ where
+ Idx: PartialOrd<U>,
+ U: ?Sized + PartialOrd<Idx>,
+ {
+ <Self as RangeBounds<Idx>>::contains(self, item)
}
}
}
}
-#[unstable(feature = "range_contains", reason = "recently added as per RFC", issue = "32311")]
impl<Idx: PartialOrd<Idx>> RangeTo<Idx> {
/// Returns `true` if `item` is contained in the range.
///
/// ```
/// #![feature(range_contains)]
///
- /// assert!( (..5).contains(-1_000_000_000));
- /// assert!( (..5).contains(4));
- /// assert!(!(..5).contains(5));
+ /// use std::f32;
+ ///
+ /// assert!( (..5).contains(&-1_000_000_000));
+ /// assert!( (..5).contains(&4));
+ /// assert!(!(..5).contains(&5));
+ ///
+ /// assert!( (..1.0).contains(&0.5));
+ /// assert!(!(..1.0).contains(&f32::NAN));
+ /// assert!(!(..f32::NAN).contains(&0.5));
/// ```
- pub fn contains(&self, item: Idx) -> bool {
- (item < self.end)
+ #[unstable(feature = "range_contains", reason = "recently added as per RFC", issue = "32311")]
+ pub fn contains<U>(&self, item: &U) -> bool
+ where
+ Idx: PartialOrd<U>,
+ U: ?Sized + PartialOrd<Idx>,
+ {
+ <Self as RangeBounds<Idx>>::contains(self, item)
}
}
/// ```
/// #![feature(range_contains)]
///
- /// assert!(!(3..=5).contains(2));
- /// assert!( (3..=5).contains(3));
- /// assert!( (3..=5).contains(4));
- /// assert!( (3..=5).contains(5));
- /// assert!(!(3..=5).contains(6));
+ /// use std::f32;
+ ///
+ /// assert!(!(3..=5).contains(&2));
+ /// assert!( (3..=5).contains(&3));
+ /// assert!( (3..=5).contains(&4));
+ /// assert!( (3..=5).contains(&5));
+ /// assert!(!(3..=5).contains(&6));
///
- /// assert!( (3..=3).contains(3));
- /// assert!(!(3..=2).contains(3));
+ /// assert!( (3..=3).contains(&3));
+ /// assert!(!(3..=2).contains(&3));
+ ///
+ /// assert!( (0.0..=1.0).contains(&1.0));
+ /// assert!(!(0.0..=1.0).contains(&f32::NAN));
+ /// assert!(!(0.0..=f32::NAN).contains(&0.0));
+ /// assert!(!(f32::NAN..=1.0).contains(&1.0));
/// ```
#[unstable(feature = "range_contains", reason = "recently added as per RFC", issue = "32311")]
- pub fn contains(&self, item: Idx) -> bool {
- self.start <= item && item <= self.end
+ pub fn contains<U>(&self, item: &U) -> bool
+ where
+ Idx: PartialOrd<U>,
+ U: ?Sized + PartialOrd<Idx>,
+ {
+ <Self as RangeBounds<Idx>>::contains(self, item)
}
/// Returns `true` if the range contains no items.
/// ```
/// #![feature(range_contains)]
///
- /// assert!( (..=5).contains(-1_000_000_000));
- /// assert!( (..=5).contains(5));
- /// assert!(!(..=5).contains(6));
+ /// use std::f32;
+ ///
+ /// assert!( (..=5).contains(&-1_000_000_000));
+ /// assert!( (..=5).contains(&5));
+ /// assert!(!(..=5).contains(&6));
+ ///
+ /// assert!( (..=1.0).contains(&1.0));
+ /// assert!(!(..=1.0).contains(&f32::NAN));
+ /// assert!(!(..=f32::NAN).contains(&0.5));
/// ```
- pub fn contains(&self, item: Idx) -> bool {
- (item <= self.end)
+ #[unstable(feature = "range_contains", reason = "recently added as per RFC", issue = "32311")]
+ pub fn contains<U>(&self, item: &U) -> bool
+ where
+ Idx: PartialOrd<U>,
+ U: ?Sized + PartialOrd<Idx>,
+ {
+ <Self as RangeBounds<Idx>>::contains(self, item)
}
}
/// # }
/// ```
fn end(&self) -> Bound<&T>;
+
+
+ /// Returns `true` if `item` is contained in the range.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(range_contains)]
+ ///
+ /// use std::f32;
+ ///
+ /// assert!( (3..5).contains(&4));
+ /// assert!(!(3..5).contains(&2));
+ ///
+ /// assert!( (0.0..1.0).contains(&0.5));
+ /// assert!(!(0.0..1.0).contains(&f32::NAN));
+ /// assert!(!(0.0..f32::NAN).contains(&0.5));
+ /// assert!(!(f32::NAN..1.0).contains(&0.5));
+ #[unstable(feature = "range_contains", reason = "recently added as per RFC", issue = "32311")]
+ fn contains<U>(&self, item: &U) -> bool
+ where
+ T: PartialOrd<U>,
+ U: ?Sized + PartialOrd<T>,
+ {
+ (match self.start() {
+ Included(ref start) => *start <= item,
+ Excluded(ref start) => *start < item,
+ Unbounded => true,
+ })
+ &&
+ (match self.end() {
+ Included(ref end) => item <= *end,
+ Excluded(ref end) => item < *end,
+ Unbounded => true,
+ })
+ }
}
use self::Bound::{Excluded, Included, Unbounded};
and related macros",
issue = "0")]
#[doc(hidden)]
- pub fn internal_constructor(payload: &'a (Any + Send),
- message: Option<&'a fmt::Arguments<'a>>,
+ #[inline]
+ pub fn internal_constructor(message: Option<&'a fmt::Arguments<'a>>,
location: Location<'a>)
-> Self {
- PanicInfo { payload, location, message }
+ PanicInfo { payload: &(), location, message }
+ }
+
+ #[doc(hidden)]
+ #[inline]
+ pub fn set_payload(&mut self, info: &'a (Any + Send)) {
+ self.payload = info;
}
/// Returns the payload associated with the panic.
write!(formatter, "{}:{}:{}", self.file, self.line, self.col)
}
}
+
+/// An internal trait used by libstd to pass data from libstd to `panic_unwind`
+/// and other panic runtimes. Not intended to be stabilized any time soon, do
+/// not use.
+#[unstable(feature = "std_internals", issue = "0")]
+#[doc(hidden)]
+pub unsafe trait BoxMeUp {
+ fn box_me_up(&mut self) -> *mut (Any + Send);
+ fn get(&mut self) -> &(Any + Send);
+}
/// `finger` is the current byte index of the forward search.
/// Imagine that it exists before the byte at its index, i.e.
- /// haystack[finger] is the first byte of the slice we must inspect during
+ /// `haystack[finger]` is the first byte of the slice we must inspect during
/// forward searching
finger: usize,
/// `finger_back` is the current byte index of the reverse search.
# Examples
```
-", $extra_feature, "#![feature(atomic_nand)]
-
+", $extra_feature, "
use std::sync::atomic::{", stringify!($atomic_type), ", Ordering};
let foo = ", stringify!($atomic_type), "::new(0x13);
unstable(feature = "integer_atomics", issue = "32976"),
unstable(feature = "integer_atomics", issue = "32976"),
unstable(feature = "integer_atomics", issue = "32976"),
- unstable(feature = "atomic_nand", issue = "13226"),
+ unstable(feature = "integer_atomics", issue = "32976"),
"i8", "../../../std/primitive.i8.html",
"#![feature(integer_atomics)]\n\n",
atomic_min, atomic_max,
unstable(feature = "integer_atomics", issue = "32976"),
unstable(feature = "integer_atomics", issue = "32976"),
unstable(feature = "integer_atomics", issue = "32976"),
- unstable(feature = "atomic_nand", issue = "13226"),
+ unstable(feature = "integer_atomics", issue = "32976"),
"u8", "../../../std/primitive.u8.html",
"#![feature(integer_atomics)]\n\n",
atomic_umin, atomic_umax,
unstable(feature = "integer_atomics", issue = "32976"),
unstable(feature = "integer_atomics", issue = "32976"),
unstable(feature = "integer_atomics", issue = "32976"),
- unstable(feature = "atomic_nand", issue = "13226"),
+ unstable(feature = "integer_atomics", issue = "32976"),
"i16", "../../../std/primitive.i16.html",
"#![feature(integer_atomics)]\n\n",
atomic_min, atomic_max,
unstable(feature = "integer_atomics", issue = "32976"),
unstable(feature = "integer_atomics", issue = "32976"),
unstable(feature = "integer_atomics", issue = "32976"),
- unstable(feature = "atomic_nand", issue = "13226"),
+ unstable(feature = "integer_atomics", issue = "32976"),
"u16", "../../../std/primitive.u16.html",
"#![feature(integer_atomics)]\n\n",
atomic_umin, atomic_umax,
unstable(feature = "integer_atomics", issue = "32976"),
unstable(feature = "integer_atomics", issue = "32976"),
unstable(feature = "integer_atomics", issue = "32976"),
- unstable(feature = "atomic_nand", issue = "13226"),
+ unstable(feature = "integer_atomics", issue = "32976"),
"i32", "../../../std/primitive.i32.html",
"#![feature(integer_atomics)]\n\n",
atomic_min, atomic_max,
unstable(feature = "integer_atomics", issue = "32976"),
unstable(feature = "integer_atomics", issue = "32976"),
unstable(feature = "integer_atomics", issue = "32976"),
- unstable(feature = "atomic_nand", issue = "13226"),
+ unstable(feature = "integer_atomics", issue = "32976"),
"u32", "../../../std/primitive.u32.html",
"#![feature(integer_atomics)]\n\n",
atomic_umin, atomic_umax,
unstable(feature = "integer_atomics", issue = "32976"),
unstable(feature = "integer_atomics", issue = "32976"),
unstable(feature = "integer_atomics", issue = "32976"),
- unstable(feature = "atomic_nand", issue = "13226"),
+ unstable(feature = "integer_atomics", issue = "32976"),
"i64", "../../../std/primitive.i64.html",
"#![feature(integer_atomics)]\n\n",
atomic_min, atomic_max,
unstable(feature = "integer_atomics", issue = "32976"),
unstable(feature = "integer_atomics", issue = "32976"),
unstable(feature = "integer_atomics", issue = "32976"),
- unstable(feature = "atomic_nand", issue = "13226"),
+ unstable(feature = "integer_atomics", issue = "32976"),
"u64", "../../../std/primitive.u64.html",
"#![feature(integer_atomics)]\n\n",
atomic_umin, atomic_umax,
stable(feature = "atomic_debug", since = "1.3.0"),
stable(feature = "atomic_access", since = "1.15.0"),
stable(feature = "atomic_from", since = "1.23.0"),
- unstable(feature = "atomic_nand", issue = "13226"),
+ stable(feature = "atomic_nand", since = "1.27.0"),
"isize", "../../../std/primitive.isize.html",
"",
atomic_min, atomic_max,
stable(feature = "atomic_debug", since = "1.3.0"),
stable(feature = "atomic_access", since = "1.15.0"),
stable(feature = "atomic_from", since = "1.23.0"),
- unstable(feature = "atomic_nand", issue = "13226"),
+ stable(feature = "atomic_nand", since = "1.27.0"),
"usize", "../../../std/primitive.usize.html",
"",
atomic_umin, atomic_umax,
#![feature(trusted_len)]
#![feature(try_trait)]
#![feature(exact_chunks)]
-#![feature(atomic_nand)]
+#![cfg_attr(stage0, feature(atomic_nand))]
#![feature(reverse_bits)]
#![feature(inclusive_range_fields)]
#![feature(iterator_find_map)]
// now hopefully.
#[no_mangle]
#[rustc_std_internal_symbol]
-pub unsafe extern fn __rust_start_panic(_data: usize, _vtable: usize) -> u32 {
+pub unsafe extern fn __rust_start_panic(_payload: usize) -> u32 {
abort();
#[cfg(any(unix, target_os = "cloudabi"))]
html_root_url = "https://doc.rust-lang.org/nightly/",
issue_tracker_base_url = "https://github.com/rust-lang/rust/issues/")]
+#![feature(allocator_api)]
#![feature(alloc)]
#![feature(core_intrinsics)]
#![feature(lang_items)]
#![feature(panic_unwind)]
#![feature(raw)]
#![feature(staged_api)]
+#![feature(std_internals)]
#![feature(unwind_attributes)]
#![cfg_attr(target_env = "msvc", feature(raw))]
#[cfg(not(any(target_env = "msvc", all(windows, target_arch = "x86_64", target_env = "gnu"))))]
extern crate unwind;
+use alloc::boxed::Box;
use core::intrinsics;
use core::mem;
use core::raw;
+use core::panic::BoxMeUp;
// Rust runtime's startup objects depend on these symbols, so make them public.
#[cfg(all(target_os="windows", target_arch = "x86", target_env="gnu"))]
// implementation.
#[no_mangle]
#[unwind(allowed)]
-pub unsafe extern "C" fn __rust_start_panic(data: usize, vtable: usize) -> u32 {
- imp::panic(mem::transmute(raw::TraitObject {
- data: data as *mut (),
- vtable: vtable as *mut (),
- }))
+pub unsafe extern "C" fn __rust_start_panic(payload: usize) -> u32 {
+ let payload = payload as *mut &mut BoxMeUp;
+ imp::panic(Box::from_raw((*payload).box_me_up()))
}
use hir::svh::Svh;
use util::nodemap::{DefIdMap, FxHashMap};
-use arena::TypedArena;
+use arena::SyncTypedArena;
use std::io;
use ty::TyCtxt;
pub struct Forest {
krate: Crate,
pub dep_graph: DepGraph,
- inlined_bodies: TypedArena<Body>
+ inlined_bodies: SyncTypedArena<Body>
}
impl Forest {
Forest {
krate,
dep_graph: dep_graph.clone(),
- inlined_bodies: TypedArena::new()
+ inlined_bodies: SyncTypedArena::new()
}
}
});
impl_stable_hash_for!(struct middle::cstore::ExternCrate {
- def_id,
+ src,
span,
- direct,
- path_len
+ path_len,
+ direct
+});
+
+impl_stable_hash_for!(enum middle::cstore::ExternCrateSource {
+ Extern(def_id),
+ Use,
+ Path,
});
impl_stable_hash_for!(struct middle::cstore::CrateSource {
enum AllocDiscriminant {
Alloc,
- ExternStatic,
+ Static,
Function,
}
impl_stable_hash_for!(enum self::AllocDiscriminant {
Alloc,
- ExternStatic,
+ Static,
Function
});
hasher: &mut StableHasher<W>,
) {
ty::tls::with_opt(|tcx| {
+ trace!("hashing {:?}", *self);
let tcx = tcx.expect("can't hash AllocIds during hir lowering");
- if let Some(alloc) = tcx.interpret_interner.get_alloc(*self) {
+ if let Some(def_id) = tcx.interpret_interner.get_static(*self) {
+ AllocDiscriminant::Static.hash_stable(hcx, hasher);
+ trace!("hashing {:?} as static {:?}", *self, def_id);
+ def_id.hash_stable(hcx, hasher);
+ } else if let Some(alloc) = tcx.interpret_interner.get_alloc(*self) {
AllocDiscriminant::Alloc.hash_stable(hcx, hasher);
if hcx.alloc_id_recursion_tracker.insert(*self) {
- tcx
- .interpret_interner
- .get_corresponding_static_def_id(*self)
- .hash_stable(hcx, hasher);
+ trace!("hashing {:?} as alloc {:#?}", *self, alloc);
alloc.hash_stable(hcx, hasher);
assert!(hcx.alloc_id_recursion_tracker.remove(self));
+ } else {
+ trace!("skipping hashing of {:?} due to recursion", *self);
}
} else if let Some(inst) = tcx.interpret_interner.get_fn(*self) {
+ trace!("hashing {:?} as fn {:#?}", *self, inst);
AllocDiscriminant::Function.hash_stable(hcx, hasher);
inst.hash_stable(hcx, hasher);
- } else if let Some(def_id) = tcx.interpret_interner
- .get_corresponding_static_def_id(*self) {
- AllocDiscriminant::ExternStatic.hash_stable(hcx, hasher);
- def_id.hash_stable(hcx, hasher);
} else {
bug!("no allocation for {}", self);
}
InvalidPointerMath |
ReadUndefBytes |
DeadLocal |
- ExecutionTimeLimitReached |
StackFrameLimitReached |
OutOfTls |
TlsOutOfBounds |
FromEnv(where_clause) => where_clause.hash_stable(hcx, hasher),
WellFormedTy(ty) => ty.hash_stable(hcx, hasher),
+ Normalize(projection) => projection.hash_stable(hcx, hasher),
FromEnvTy(ty) => ty.hash_stable(hcx, hasher),
RegionOutlives(predicate) => predicate.hash_stable(hcx, hasher),
TypeOutlives(predicate) => predicate.hash_stable(hcx, hasher),
use syntax_pos::{self, Span};
use syntax_pos::symbol::InternedString;
use util::nodemap::FxHashMap;
-use arena::DroplessArena;
+use arena::SyncDroplessArena;
use self::combine::CombineFields;
use self::higher_ranked::HrMatchResult;
/// F: for<'b, 'tcx> where 'gcx: 'tcx FnOnce(InferCtxt<'b, 'gcx, 'tcx>).
pub struct InferCtxtBuilder<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
global_tcx: TyCtxt<'a, 'gcx, 'gcx>,
- arena: DroplessArena,
+ arena: SyncDroplessArena,
fresh_tables: Option<RefCell<ty::TypeckTables<'tcx>>>,
}
pub fn infer_ctxt(self) -> InferCtxtBuilder<'a, 'gcx, 'tcx> {
InferCtxtBuilder {
global_tcx: self,
- arena: DroplessArena::new(),
+ arena: SyncDroplessArena::new(),
fresh_tables: None,
}
use self::TargetLint::*;
use std::slice;
+use rustc_data_structures::sync::{RwLock, ReadGuard};
use lint::{EarlyLintPassObject, LateLintPassObject};
use lint::{Level, Lint, LintId, LintPass, LintBuffer};
use lint::builtin::BuiltinLintDiagnostics;
use util::nodemap::FxHashMap;
use std::default::Default as StdDefault;
-use std::cell::{Ref, RefCell};
use syntax::ast;
use syntax::edition;
use syntax_pos::{MultiSpan, Span};
pub struct LintSession<'a, PassObject> {
/// Reference to the store of registered lints.
- lints: Ref<'a, LintStore>,
+ lints: ReadGuard<'a, LintStore>,
/// Trait objects for each lint pass.
passes: Option<Vec<PassObject>>,
/// Creates a new `LintSession`, by moving out the `LintStore`'s initial
/// lint levels and pass objects. These can be restored using the `restore`
/// method.
- fn new(store: &'a RefCell<LintStore>) -> LintSession<'a, PassObject> {
+ fn new(store: &'a RwLock<LintStore>) -> LintSession<'a, PassObject> {
let mut s = store.borrow_mut();
let passes = PassObject::take_passes(&mut *s);
drop(s);
}
/// Restores the levels back to the original lint store.
- fn restore(self, store: &RefCell<LintStore>) {
+ fn restore(self, store: &RwLock<LintStore>) {
drop(self.lints);
let mut s = store.borrow_mut();
PassObject::restore_passes(&mut *s, self.passes);
#[derive(Copy, Clone, Debug)]
pub struct ExternCrate {
- /// def_id of an `extern crate` in the current crate that caused
- /// this crate to be loaded; note that there could be multiple
- /// such ids
- pub def_id: DefId,
+ pub src: ExternCrateSource,
/// span of the extern crate that caused this to be loaded
pub span: Span,
+ /// Number of links to reach the extern;
+ /// used to select the extern with the shortest path
+ pub path_len: usize,
+
/// If true, then this crate is the crate named by the extern
/// crate referenced above. If false, then this crate is a dep
/// of the crate.
pub direct: bool,
+}
- /// Number of links to reach the extern crate `def_id`
- /// declaration; used to select the extern crate with the shortest
- /// path
- pub path_len: usize,
+#[derive(Copy, Clone, Debug)]
+pub enum ExternCrateSource {
+ /// Crate is loaded by `extern crate`.
+ Extern(
+ /// def_id of the item in the current crate that caused
+ /// this crate to be loaded; note that there could be multiple
+ /// such ids
+ DefId,
+ ),
+ // Crate is loaded by `use`.
+ Use,
+ /// Crate is implicitly loaded by an absolute or an `extern::` path.
+ Path,
}
pub struct EncodedMetadata {
}
pub trait CrateLoader {
- fn process_item(&mut self, item: &ast::Item, defs: &Definitions);
+ fn process_extern_crate(&mut self, item: &ast::Item, defs: &Definitions) -> CrateNum;
+
+ fn process_path_extern(
+ &mut self,
+ name: Symbol,
+ span: Span,
+ ) -> CrateNum;
+
+ fn process_use_extern(
+ &mut self,
+ name: Symbol,
+ span: Span,
+ id: ast::NodeId,
+ defs: &Definitions,
+ ) -> CrateNum;
+
fn postprocess(&mut self, krate: &ast::Crate);
- fn resolve_crate_from_path(&mut self, name: Symbol, span: Span) -> CrateNum;
}
// This method is used when generating the command line to pass through to
Intrinsic(String),
OverflowingMath,
InvalidChar(u128),
- ExecutionTimeLimitReached,
StackFrameLimitReached,
OutOfTls,
TlsOutOfBounds,
"mir not found",
InvalidChar(..) =>
"tried to interpret an invalid 32-bit value as a char",
- ExecutionTimeLimitReached =>
- "the expression was too complex to be evaluated or resulted in an infinite loop",
StackFrameLimitReached =>
"reached the configured maximum number of stack frames",
OutOfTls =>
impl ::rustc_serialize::UseSpecializedEncodable for AllocId {}
impl ::rustc_serialize::UseSpecializedDecodable for AllocId {}
-pub const ALLOC_DISCRIMINANT: usize = 0;
-pub const FN_DISCRIMINANT: usize = 1;
-pub const EXTERN_STATIC_DISCRIMINANT: usize = 2;
-pub const SHORTHAND_START: usize = 3;
+#[derive(RustcDecodable, RustcEncodable)]
+enum AllocKind {
+ Alloc,
+ Fn,
+ Static,
+}
pub fn specialized_encode_alloc_id<
'a, 'tcx,
encoder: &mut E,
tcx: TyCtxt<'a, 'tcx, 'tcx>,
alloc_id: AllocId,
- shorthand: Option<usize>,
) -> Result<(), E::Error> {
- if let Some(shorthand) = shorthand {
- return shorthand.encode(encoder);
- }
if let Some(alloc) = tcx.interpret_interner.get_alloc(alloc_id) {
trace!("encoding {:?} with {:#?}", alloc_id, alloc);
- ALLOC_DISCRIMINANT.encode(encoder)?;
+ AllocKind::Alloc.encode(encoder)?;
alloc.encode(encoder)?;
- // encode whether this allocation is the root allocation of a static
- tcx.interpret_interner
- .get_corresponding_static_def_id(alloc_id)
- .encode(encoder)?;
} else if let Some(fn_instance) = tcx.interpret_interner.get_fn(alloc_id) {
trace!("encoding {:?} with {:#?}", alloc_id, fn_instance);
- FN_DISCRIMINANT.encode(encoder)?;
+ AllocKind::Fn.encode(encoder)?;
fn_instance.encode(encoder)?;
- } else if let Some(did) = tcx.interpret_interner.get_corresponding_static_def_id(alloc_id) {
- // extern "C" statics don't have allocations, just encode its def_id
- EXTERN_STATIC_DISCRIMINANT.encode(encoder)?;
+ } else if let Some(did) = tcx.interpret_interner.get_static(alloc_id) {
+ // referring to statics doesn't need to know about their allocations, just about its DefId
+ AllocKind::Static.encode(encoder)?;
did.encode(encoder)?;
} else {
bug!("alloc id without corresponding allocation: {}", alloc_id);
pub fn specialized_decode_alloc_id<
'a, 'tcx,
D: Decoder,
- CACHE: FnOnce(&mut D, usize, AllocId),
- SHORT: FnOnce(&mut D, usize) -> Result<AllocId, D::Error>
+ CACHE: FnOnce(&mut D, AllocId),
>(
decoder: &mut D,
tcx: TyCtxt<'a, 'tcx, 'tcx>,
- pos: usize,
cache: CACHE,
- short: SHORT,
) -> Result<AllocId, D::Error> {
- match usize::decode(decoder)? {
- ALLOC_DISCRIMINANT => {
+ match AllocKind::decode(decoder)? {
+ AllocKind::Alloc => {
let alloc_id = tcx.interpret_interner.reserve();
- trace!("creating alloc id {:?} at {}", alloc_id, pos);
+ trace!("creating alloc id {:?}", alloc_id);
// insert early to allow recursive allocs
- cache(decoder, pos, alloc_id);
+ cache(decoder, alloc_id);
let allocation = Allocation::decode(decoder)?;
trace!("decoded alloc {:?} {:#?}", alloc_id, allocation);
let allocation = tcx.intern_const_alloc(allocation);
tcx.interpret_interner.intern_at_reserved(alloc_id, allocation);
- if let Some(glob) = Option::<DefId>::decode(decoder)? {
- tcx.interpret_interner.cache(glob, alloc_id);
- }
-
Ok(alloc_id)
},
- FN_DISCRIMINANT => {
- trace!("creating fn alloc id at {}", pos);
+ AllocKind::Fn => {
+ trace!("creating fn alloc id");
let instance = ty::Instance::decode(decoder)?;
trace!("decoded fn alloc instance: {:?}", instance);
let id = tcx.interpret_interner.create_fn_alloc(instance);
trace!("created fn alloc id: {:?}", id);
- cache(decoder, pos, id);
+ cache(decoder, id);
Ok(id)
},
- EXTERN_STATIC_DISCRIMINANT => {
- trace!("creating extern static alloc id at {}", pos);
+ AllocKind::Static => {
+ trace!("creating extern static alloc id at");
let did = DefId::decode(decoder)?;
- let alloc_id = tcx.interpret_interner.reserve();
- tcx.interpret_interner.cache(did, alloc_id);
+ let alloc_id = tcx.interpret_interner.cache_static(did);
+ cache(decoder, alloc_id);
Ok(alloc_id)
},
- shorthand => {
- trace!("loading shorthand {}", shorthand);
- short(decoder, shorthand)
- },
}
}
self.0.values()
}
+ pub fn len(&self) -> usize {
+ self.0.len()
+ }
+
// True if any of the output types require codegen or linking.
pub fn should_trans(&self) -> bool {
self.0.keys().any(|k| match *k {
use util::common::{duration_to_secs_str, ErrorReported};
use util::common::ProfileQueriesMsg;
-use rustc_data_structures::sync::{Lrc, Lock, LockCell, OneThread, Once};
+use rustc_data_structures::sync::{Lrc, Lock, LockCell, OneThread, Once, RwLock};
use syntax::ast::NodeId;
use errors::{self, DiagnosticBuilder, DiagnosticId};
// FIXME: lint_store and buffered_lints are not thread-safe,
// but are only used in a single thread
- pub lint_store: OneThread<RefCell<lint::LintStore>>,
- pub buffered_lints: OneThread<RefCell<Option<lint::LintBuffer>>>,
+ pub lint_store: RwLock<lint::LintStore>,
+ pub buffered_lints: Lock<Option<lint::LintBuffer>>,
/// Set of (DiagnosticId, Option<Span>, message) tuples tracking
/// (sub)diagnostics that have been set once, but should not be set again,
/// The maximum number of stackframes allowed in const eval
pub const_eval_stack_frame_limit: usize,
- /// The maximum number miri steps per constant
- pub const_eval_step_limit: usize,
/// The metadata::creader module may inject an allocator/panic_runtime
/// dependency if it didn't already find one, and this tracks what was
default_sysroot,
local_crate_source_file,
working_dir,
- lint_store: OneThread::new(RefCell::new(lint::LintStore::new())),
- buffered_lints: OneThread::new(RefCell::new(Some(lint::LintBuffer::new()))),
+ lint_store: RwLock::new(lint::LintStore::new()),
+ buffered_lints: Lock::new(Some(lint::LintBuffer::new())),
one_time_diagnostics: RefCell::new(FxHashSet()),
plugin_llvm_passes: OneThread::new(RefCell::new(Vec::new())),
plugin_attributes: OneThread::new(RefCell::new(Vec::new())),
recursion_limit: Once::new(),
type_length_limit: Once::new(),
const_eval_stack_frame_limit: 100,
- const_eval_step_limit: 1_000_000,
next_node_id: OneThread::new(Cell::new(NodeId::new(1))),
injected_allocator: Once::new(),
allocator_kind: Once::new(),
WellFormed(WhereClauseAtom<'tcx>),
FromEnv(WhereClauseAtom<'tcx>),
WellFormedTy(Ty<'tcx>),
+ Normalize(ty::ProjectionPredicate<'tcx>),
FromEnvTy(Ty<'tcx>),
RegionOutlives(ty::RegionOutlivesPredicate<'tcx>),
TypeOutlives(ty::TypeOutlivesPredicate<'tcx>),
FromEnv(Implemented(trait_ref)) => write!(fmt, "FromEnv({})", trait_ref),
FromEnv(ProjectionEq(projection)) => write!(fmt, "FromEnv({})", projection),
WellFormedTy(ty) => write!(fmt, "WellFormed({})", ty),
+ Normalize(projection) => write!(fmt, "Normalize({})", projection),
FromEnvTy(ty) => write!(fmt, "FromEnv({})", ty),
RegionOutlives(predicate) => write!(fmt, "RegionOutlives({})", predicate),
TypeOutlives(predicate) => write!(fmt, "TypeOutlives({})", predicate),
(traits::DomainGoal::WellFormed)(wc),
(traits::DomainGoal::FromEnv)(wc),
(traits::DomainGoal::WellFormedTy)(ty),
+ (traits::DomainGoal::Normalize)(projection),
(traits::DomainGoal::FromEnvTy)(ty),
(traits::DomainGoal::RegionOutlives)(predicate),
(traits::DomainGoal::TypeOutlives)(predicate),
use rustc_data_structures::stable_hasher::{HashStable, hash_stable_hashmap,
StableHasher, StableHasherResult,
StableVec};
-use arena::{TypedArena, DroplessArena};
+use arena::{TypedArena, SyncDroplessArena};
use rustc_data_structures::indexed_vec::IndexVec;
use rustc_data_structures::sync::{Lrc, Lock};
use std::any::Any;
use std::borrow::Borrow;
-use std::cell::Cell;
use std::cmp::Ordering;
use std::collections::hash_map::{self, Entry};
use std::hash::{Hash, Hasher};
pub struct AllArenas<'tcx> {
pub global: GlobalArenas<'tcx>,
- pub interner: DroplessArena,
+ pub interner: SyncDroplessArena,
}
impl<'tcx> AllArenas<'tcx> {
pub fn new() -> Self {
AllArenas {
global: GlobalArenas::new(),
- interner: DroplessArena::new(),
+ interner: SyncDroplessArena::new(),
}
}
}
pub struct CtxtInterners<'tcx> {
/// The arena that types, regions, etc are allocated from
- arena: &'tcx DroplessArena,
+ arena: &'tcx SyncDroplessArena,
/// Specifically use a speedy hash algorithm for these hash sets,
/// they're accessed quite often.
}
impl<'gcx: 'tcx, 'tcx> CtxtInterners<'tcx> {
- fn new(arena: &'tcx DroplessArena) -> CtxtInterners<'tcx> {
+ fn new(arena: &'tcx SyncDroplessArena) -> CtxtInterners<'tcx> {
CtxtInterners {
arena,
type_: Default::default(),
return ty;
}
let global_interner = global_interners.map(|interners| {
- interners.type_.borrow_mut()
+ (interners.type_.borrow_mut(), &interners.arena)
});
- if let Some(ref interner) = global_interner {
- if let Some(&Interned(ty)) = interner.get(&st) {
+ if let Some((ref type_, _)) = global_interner {
+ if let Some(&Interned(ty)) = type_.get(&st) {
return ty;
}
}
// determine that all contents are in the global tcx.
// See comments on Lift for why we can't use that.
if !flags.flags.intersects(ty::TypeFlags::KEEP_IN_LOCAL_TCX) {
- if let Some(interner) = global_interners {
+ if let Some((mut type_, arena)) = global_interner {
let ty_struct: TyS<'gcx> = unsafe {
mem::transmute(ty_struct)
};
- let ty: Ty<'gcx> = interner.arena.alloc(ty_struct);
- global_interner.unwrap().insert(Interned(ty));
+ let ty: Ty<'gcx> = arena.alloc(ty_struct);
+ type_.insert(Interned(ty));
return ty;
}
} else {
// Make sure we don't end up with inference
// types/regions in the global tcx.
- if global_interners.is_none() {
+ if global_interner.is_none() {
drop(interner);
bug!("Attempted to intern `{:?}` which contains \
inference types/regions in the global type context",
/// Data layout specification for the current target.
pub data_layout: TargetDataLayout,
- /// Used to prevent layout from recursing too deeply.
- pub layout_depth: Cell<usize>,
-
stability_interner: Lock<FxHashSet<&'tcx attr::Stability>>,
pub interpret_interner: InterpretInterner<'tcx>,
/// Allows obtaining const allocs via a unique identifier
alloc_by_id: FxHashMap<interpret::AllocId, &'tcx interpret::Allocation>,
- /// Reverse map of `alloc_cache`
- global_cache: FxHashMap<interpret::AllocId, DefId>,
+ /// Allows obtaining static def ids via a unique id
+ statics: FxHashMap<interpret::AllocId, DefId>,
/// The AllocId to assign to the next new regular allocation.
/// Always incremented, never gets smaller.
next_id: interpret::AllocId,
- /// Allows checking whether a static already has an allocation
- ///
- /// This is only important for detecting statics referring to themselves
- // FIXME(oli-obk) move it to the EvalContext?
- alloc_cache: FxHashMap<DefId, interpret::AllocId>,
+ /// Inverse map of `statics`
+ /// Used so we don't allocate a new pointer every time we need one
+ static_cache: FxHashMap<DefId, interpret::AllocId>,
/// A cache for basic byte allocations keyed by their contents. This is used to deduplicate
/// allocations for string and bytestring literals.
self.inner.borrow().alloc_by_id.get(&id).cloned()
}
- pub fn get_cached(
- &self,
- static_id: DefId,
- ) -> Option<interpret::AllocId> {
- self.inner.borrow().alloc_cache.get(&static_id).cloned()
- }
-
- pub fn cache(
+ pub fn cache_static(
&self,
static_id: DefId,
- alloc_id: interpret::AllocId,
- ) {
- let mut inner = self.inner.borrow_mut();
- inner.global_cache.insert(alloc_id, static_id);
- if let Some(old) = inner.alloc_cache.insert(static_id, alloc_id) {
- bug!("tried to cache {:?}, but was already existing as {:#?}", static_id, old);
+ ) -> interpret::AllocId {
+ if let Some(alloc_id) = self.inner.borrow().static_cache.get(&static_id).cloned() {
+ return alloc_id;
}
+ let alloc_id = self.reserve();
+ let mut inner = self.inner.borrow_mut();
+ inner.static_cache.insert(static_id, alloc_id);
+ inner.statics.insert(alloc_id, static_id);
+ alloc_id
}
- pub fn get_corresponding_static_def_id(
+ pub fn get_static(
&self,
ptr: interpret::AllocId,
) -> Option<DefId> {
- self.inner.borrow().global_cache.get(&ptr).cloned()
+ self.inner.borrow().statics.get(&ptr).cloned()
}
pub fn intern_at_reserved(
crate_name: Symbol::intern(crate_name),
data_layout,
layout_interner: Lock::new(FxHashSet()),
- layout_depth: Cell::new(0),
stability_interner: Lock::new(FxHashSet()),
interpret_interner: Default::default(),
tx_to_llvm_workers: Lock::new(tx),
/// Call the closure with a local `TyCtxt` using the given arena.
pub fn enter_local<F, R>(
&self,
- arena: &'tcx DroplessArena,
+ arena: &'tcx SyncDroplessArena,
f: F
) -> R
where
let new_icx = ty::tls::ImplicitCtxt {
tcx,
query: icx.query.clone(),
+ layout_depth: icx.layout_depth,
};
ty::tls::enter_context(&new_icx, |new_icx| {
f(new_icx.tcx)
/// The current query job, if any. This is updated by start_job in
/// ty::maps::plumbing when executing a query
pub query: Option<Lrc<maps::QueryJob<'gcx>>>,
+
+ /// Used to prevent layout from recursing too deeply.
+ pub layout_depth: usize,
}
// A thread local value which stores a pointer to the current ImplicitCtxt
let icx = ImplicitCtxt {
tcx,
query: None,
+ layout_depth: 0,
};
enter_context(&icx, |_| {
f(tcx)
use hir::map::DefPathData;
use hir::def_id::{CrateNum, DefId, CRATE_DEF_INDEX, LOCAL_CRATE};
use ty::{self, Ty, TyCtxt};
+use middle::cstore::{ExternCrate, ExternCrateSource};
use syntax::ast;
use syntax::symbol::Symbol;
use syntax::symbol::InternedString;
// `extern crate` manually, we put the `extern
// crate` as the parent. So you wind up with
// something relative to the current crate.
- // 2. for an indirect crate, where there is no extern
- // crate, we just prepend the crate name.
+ // 2. for an extern inferred from a path or an indirect crate,
+ // where there is no explicit `extern crate`, we just prepend
+ // the crate name.
//
// Returns `None` for the local crate.
if cnum != LOCAL_CRATE {
let opt_extern_crate = self.extern_crate(cnum.as_def_id());
- let opt_extern_crate = opt_extern_crate.and_then(|extern_crate| {
- if extern_crate.direct {
- Some(extern_crate.def_id)
- } else {
- None
- }
- });
- if let Some(extern_crate_def_id) = opt_extern_crate {
- self.push_item_path(buffer, extern_crate_def_id);
+ if let Some(ExternCrate {
+ src: ExternCrateSource::Extern(def_id),
+ direct: true,
+ ..
+ }) = *opt_extern_crate
+ {
+ self.push_item_path(buffer, def_id);
} else {
buffer.push(&self.crate_name(cnum).as_str());
}
// followed by the path to the item within the crate and return.
if cur_def.index == CRATE_DEF_INDEX {
match *self.extern_crate(cur_def) {
- Some(ref extern_crate) if extern_crate.direct => {
- self.push_item_path(buffer, extern_crate.def_id);
- cur_path.iter().rev().map(|segment| buffer.push(&segment)).count();
+ Some(ExternCrate {
+ src: ExternCrateSource::Extern(def_id),
+ direct: true,
+ ..
+ }) => {
+ self.push_item_path(buffer, def_id);
+ cur_path.iter().rev().for_each(|segment| buffer.push(&segment));
return true;
}
None => {
buffer.push(&self.crate_name(cur_def.krate).as_str());
- cur_path.iter().rev().map(|segment| buffer.push(&segment)).count();
+ cur_path.iter().rev().for_each(|segment| buffer.push(&segment));
return true;
}
_ => {},
query: ty::ParamEnvAnd<'tcx, Ty<'tcx>>)
-> Result<&'tcx LayoutDetails, LayoutError<'tcx>>
{
- let (param_env, ty) = query.into_parts();
+ ty::tls::with_related_context(tcx, move |icx| {
+ let rec_limit = *tcx.sess.recursion_limit.get();
+ let (param_env, ty) = query.into_parts();
- let rec_limit = *tcx.sess.recursion_limit.get();
- let depth = tcx.layout_depth.get();
- if depth > rec_limit {
- tcx.sess.fatal(
- &format!("overflow representing the type `{}`", ty));
- }
+ if icx.layout_depth > rec_limit {
+ tcx.sess.fatal(
+ &format!("overflow representing the type `{}`", ty));
+ }
- tcx.layout_depth.set(depth+1);
- let cx = LayoutCx { tcx, param_env };
- let layout = cx.layout_raw_uncached(ty);
- tcx.layout_depth.set(depth);
+ // Update the ImplicitCtxt to increase the layout_depth
+ let icx = ty::tls::ImplicitCtxt {
+ layout_depth: icx.layout_depth + 1,
+ ..icx.clone()
+ };
- layout
+ ty::tls::enter_context(&icx, |_| {
+ let cx = LayoutCx { tcx, param_env };
+ cx.layout_raw_uncached(ty)
+ })
+ })
}
pub fn provide(providers: &mut ty::maps::Providers) {
substitute_normalize_and_test_predicates_node((DefId, &'tcx Substs<'tcx>)) -> bool,
[] fn target_features_whitelist:
- target_features_whitelist_node(CrateNum) -> Lrc<FxHashSet<String>>,
+ target_features_whitelist_node(CrateNum) -> Lrc<FxHashMap<String, Option<String>>>,
// Get an estimate of the size of an InstanceDef based on its MIR for CGU partitioning.
[] fn instance_def_size_estimate: instance_def_size_estimate_dep_node(ty::InstanceDef<'tcx>)
use ty::maps::job::QueryResult;
use ty::codec::{self as ty_codec, TyDecoder, TyEncoder};
use ty::context::TyCtxt;
+use util::common::time;
const TAG_FILE_FOOTER: u128 = 0xC0FFEE_C0FFEE_C0FFEE_C0FFEE_C0FFEE;
// `serialized_data`.
prev_diagnostics_index: FxHashMap<SerializedDepNodeIndex, AbsoluteBytePos>,
- // A cache to ensure we don't read allocations twice
- interpret_alloc_cache: RefCell<FxHashMap<usize, interpret::AllocId>>,
+ // Alloc indices to memory location map
+ prev_interpret_alloc_index: Vec<AbsoluteBytePos>,
- // A map from positions to size of the serialized allocation
- // so we can skip over already processed allocations
- interpret_alloc_size: RefCell<FxHashMap<usize, usize>>,
+ /// Deserialization: A cache to ensure we don't read allocations twice
+ interpret_alloc_cache: RefCell<FxHashMap<usize, interpret::AllocId>>,
}
// This type is used only for (de-)serialization.
prev_cnums: Vec<(u32, String, CrateDisambiguator)>,
query_result_index: EncodedQueryResultIndex,
diagnostics_index: EncodedQueryResultIndex,
+ // the location of all allocations
+ interpret_alloc_index: Vec<AbsoluteBytePos>,
}
type EncodedQueryResultIndex = Vec<(SerializedDepNodeIndex, AbsoluteBytePos)>;
query_result_index: footer.query_result_index.into_iter().collect(),
prev_diagnostics_index: footer.diagnostics_index.into_iter().collect(),
synthetic_expansion_infos: Lock::new(FxHashMap()),
+ prev_interpret_alloc_index: footer.interpret_alloc_index,
interpret_alloc_cache: RefCell::new(FxHashMap::default()),
- interpret_alloc_size: RefCell::new(FxHashMap::default()),
}
}
query_result_index: FxHashMap(),
prev_diagnostics_index: FxHashMap(),
synthetic_expansion_infos: Lock::new(FxHashMap()),
+ prev_interpret_alloc_index: Vec::new(),
interpret_alloc_cache: RefCell::new(FxHashMap::default()),
- interpret_alloc_size: RefCell::new(FxHashMap::default()),
}
}
type_shorthands: FxHashMap(),
predicate_shorthands: FxHashMap(),
expn_info_shorthands: FxHashMap(),
- interpret_alloc_shorthands: FxHashMap(),
+ interpret_allocs: FxHashMap(),
+ interpret_allocs_inverse: Vec::new(),
codemap: CachingCodemapView::new(tcx.sess.codemap()),
file_to_file_index,
};
// Encode query results
let mut query_result_index = EncodedQueryResultIndex::new();
- {
+ time(tcx.sess, "encode query results", || {
use ty::maps::queries::*;
let enc = &mut encoder;
let qri = &mut query_result_index;
}
}
}
- }
+
+ Ok(())
+ })?;
// Encode diagnostics
let diagnostics_index = {
diagnostics_index
};
+ let interpret_alloc_index = {
+ let mut interpret_alloc_index = Vec::new();
+ let mut n = 0;
+ loop {
+ let new_n = encoder.interpret_allocs_inverse.len();
+ // if we have found new ids, serialize those, too
+ if n == new_n {
+ // otherwise, abort
+ break;
+ }
+ for idx in n..new_n {
+ let id = encoder.interpret_allocs_inverse[idx];
+ let pos = AbsoluteBytePos::new(encoder.position());
+ interpret_alloc_index.push(pos);
+ interpret::specialized_encode_alloc_id(
+ &mut encoder,
+ tcx,
+ id,
+ )?;
+ }
+ n = new_n;
+ }
+ interpret_alloc_index
+ };
+
let sorted_cnums = sorted_cnums_including_local_crate(tcx);
let prev_cnums: Vec<_> = sorted_cnums.iter().map(|&cnum| {
let crate_name = tcx.original_crate_name(cnum).as_str().to_string();
prev_cnums,
query_result_index,
diagnostics_index,
+ interpret_alloc_index,
})?;
// Encode the position of the footer as the last 8 bytes of the
file_index_to_file: &self.file_index_to_file,
file_index_to_stable_id: &self.file_index_to_stable_id,
synthetic_expansion_infos: &self.synthetic_expansion_infos,
+ prev_interpret_alloc_index: &self.prev_interpret_alloc_index,
interpret_alloc_cache: &self.interpret_alloc_cache,
- interpret_alloc_size: &self.interpret_alloc_size,
};
match decode_tagged(&mut decoder, dep_node_index) {
file_index_to_file: &'x Lock<FxHashMap<FileMapIndex, Lrc<FileMap>>>,
file_index_to_stable_id: &'x FxHashMap<FileMapIndex, StableFilemapId>,
interpret_alloc_cache: &'x RefCell<FxHashMap<usize, interpret::AllocId>>,
- interpret_alloc_size: &'x RefCell<FxHashMap<usize, usize>>,
+ /// maps from index in the cache file to location in the cache file
+ prev_interpret_alloc_index: &'x [AbsoluteBytePos],
}
impl<'a, 'tcx, 'x> CacheDecoder<'a, 'tcx, 'x> {
impl<'a, 'tcx, 'x> SpecializedDecoder<interpret::AllocId> for CacheDecoder<'a, 'tcx, 'x> {
fn specialized_decode(&mut self) -> Result<interpret::AllocId, Self::Error> {
let tcx = self.tcx;
- let pos = TyDecoder::position(self);
- trace!("specialized_decode_alloc_id: {:?}", pos);
- if let Some(cached) = self.interpret_alloc_cache.borrow().get(&pos).cloned() {
- // if there's no end position we are currently deserializing a recursive
- // allocation
- if let Some(end) = self.interpret_alloc_size.borrow().get(&pos).cloned() {
- trace!("{} already cached as {:?}", pos, cached);
- // skip ahead
- self.opaque.set_position(end);
- return Ok(cached)
- }
+ let idx = usize::decode(self)?;
+ trace!("loading index {}", idx);
+
+ if let Some(cached) = self.interpret_alloc_cache.borrow().get(&idx).cloned() {
+ trace!("loading alloc id {:?} from alloc_cache", cached);
+ return Ok(cached);
}
- let id = interpret::specialized_decode_alloc_id(
- self,
- tcx,
- pos,
- |this, pos, alloc_id| {
- assert!(this.interpret_alloc_cache.borrow_mut().insert(pos, alloc_id).is_none());
- },
- |this, shorthand| {
- // need to load allocation
- this.with_position(shorthand, |this| interpret::AllocId::decode(this))
- }
- )?;
- assert!(self
- .interpret_alloc_size
- .borrow_mut()
- .insert(pos, TyDecoder::position(self))
- .is_none());
- Ok(id)
+ let pos = self.prev_interpret_alloc_index[idx].to_usize();
+ trace!("loading position {}", pos);
+ self.with_position(pos, |this| {
+ interpret::specialized_decode_alloc_id(
+ this,
+ tcx,
+ |this, alloc_id| {
+ trace!("caching idx {} for alloc id {} at position {}", idx, alloc_id, pos);
+ assert!(this
+ .interpret_alloc_cache
+ .borrow_mut()
+ .insert(idx, alloc_id)
+ .is_none());
+ },
+ )
+ })
}
}
impl<'a, 'tcx, 'x> SpecializedDecoder<Span> for CacheDecoder<'a, 'tcx, 'x> {
type_shorthands: FxHashMap<ty::Ty<'tcx>, usize>,
predicate_shorthands: FxHashMap<ty::Predicate<'tcx>, usize>,
expn_info_shorthands: FxHashMap<Mark, AbsoluteBytePos>,
- interpret_alloc_shorthands: FxHashMap<interpret::AllocId, usize>,
+ interpret_allocs: FxHashMap<interpret::AllocId, usize>,
+ interpret_allocs_inverse: Vec<interpret::AllocId>,
codemap: CachingCodemapView<'tcx>,
file_to_file_index: FxHashMap<*const FileMap, FileMapIndex>,
}
{
fn specialized_encode(&mut self, alloc_id: &interpret::AllocId) -> Result<(), Self::Error> {
use std::collections::hash_map::Entry;
- let tcx = self.tcx;
- let pos = self.position();
- let shorthand = match self.interpret_alloc_shorthands.entry(*alloc_id) {
- Entry::Occupied(entry) => Some(entry.get().clone()),
- Entry::Vacant(entry) => {
- // ensure that we don't place any AllocIds at the very beginning
- // of the metadata file, because that would end up making our indices
- // not special. It is essentially impossible for that to happen,
- // but let's make sure
- assert!(pos >= interpret::SHORTHAND_START);
- entry.insert(pos);
- None
+ let index = match self.interpret_allocs.entry(*alloc_id) {
+ Entry::Occupied(e) => *e.get(),
+ Entry::Vacant(e) => {
+ let idx = self.interpret_allocs_inverse.len();
+ self.interpret_allocs_inverse.push(*alloc_id);
+ e.insert(idx);
+ idx
},
};
- interpret::specialized_encode_alloc_id(
- self,
- tcx,
- *alloc_id,
- shorthand,
- )
+
+ index.encode(self)
}
}
E: 'enc + TyEncoder,
Q::Value: Encodable,
{
+ let desc = &format!("encode_query_results for {}",
+ unsafe { ::std::intrinsics::type_name::<Q>() });
+
+ time(tcx.sess, desc, || {
+
for (key, entry) in Q::get_cache_internal(tcx).map.iter() {
if Q::cache_on_disk(key.clone()) {
let entry = match *entry {
}
Ok(())
+ })
}
let icx = ty::tls::ImplicitCtxt {
tcx,
query: Some(job.clone()),
+ layout_depth: icx.layout_depth,
};
// Use the ImplicitCtxt while we execute the query
Intrinsic(ref s) => Intrinsic(s.clone()),
OverflowingMath => OverflowingMath,
InvalidChar(c) => InvalidChar(c),
- ExecutionTimeLimitReached => ExecutionTimeLimitReached,
StackFrameLimitReached => StackFrameLimitReached,
OutOfTls => OutOfTls,
TlsOutOfBounds => TlsOutOfBounds,
}
}
- /// Returns the type of ty[i]
+ /// Returns the type of `ty[i]`.
pub fn builtin_index(&self) -> Option<Ty<'tcx>> {
match self.sty {
TyArray(ty, _) | TySlice(ty) => Some(ty),
impl<A: Array> Drop for Iter<A> {
fn drop(&mut self) {
- for _ in self {}
+ self.for_each(drop);
}
}
impl<'a, A: Array> Drop for Drain<'a, A> {
fn drop(&mut self) {
// exhaust self first
- while let Some(_) = self.next() {}
+ self.for_each(drop);
if self.tail_len > 0 {
unsafe {
write_out_deps(sess, &outputs, &output_paths);
if sess.opts.output_types.contains_key(&OutputType::DepInfo) &&
- sess.opts.output_types.keys().count() == 1 {
+ sess.opts.output_types.len() == 1 {
return Ok(())
}
let mut ecx = ExtCtxt::new(&sess.parse_sess, cfg, &mut resolver);
let err_count = ecx.parse_sess.span_diagnostic.err_count();
- let krate = ecx.monotonic_expander().expand_crate(krate);
+ let krate = time(sess, "expand crate", || {
+ ecx.monotonic_expander().expand_crate(krate)
+ });
- ecx.check_unused_macros();
+ time(sess, "check unused macros", || {
+ ecx.check_unused_macros();
+ });
let mut missing_fragment_specifiers: Vec<_> =
ecx.parse_sess.missing_fragment_specifiers.borrow().iter().cloned().collect();
(result, Some(sess))
}
+#[cfg(unix)]
+pub fn set_sigpipe_handler() {
+ unsafe {
+ // Set the SIGPIPE signal handler, so that an EPIPE
+ // will cause rustc to terminate, as expected.
+ assert!(libc::signal(libc::SIGPIPE, libc::SIG_DFL) != libc::SIG_ERR);
+ }
+}
+
+#[cfg(windows)]
+pub fn set_sigpipe_handler() {}
+
// Extract output directory and file from matches.
fn make_output(matches: &getopts::Matches) -> (Option<PathBuf>, Option<PathBuf>) {
let odir = matches.opt_str("out-dir").map(|o| PathBuf::from(&o));
} else {
0
};
- (b_start..b_end + extra).contains(a_start) ||
- (a_start..a_end + extra).contains(b_start)
+ (b_start..b_end + extra).contains(&a_start) ||
+ (a_start..a_end + extra).contains(&b_start)
}
fn overlaps(a1: &Annotation, a2: &Annotation, padding: usize) -> bool {
num_overlap(a1.start_col, a1.end_col + padding, a2.start_col, a2.end_col, false)
time(sess, "persist dep-graph", || {
save_in(sess,
dep_graph_path(sess),
- |e| encode_dep_graph(tcx, e));
+ |e| {
+ time(sess, "encode dep-graph", || {
+ encode_dep_graph(tcx, e)
+ })
+ });
});
}
tcx.sess.opts.dep_tracking_hash().encode(encoder)?;
// Encode the graph data.
- let serialized_graph = tcx.dep_graph.serialize();
+ let serialized_graph = time(tcx.sess, "getting serialized graph", || {
+ tcx.dep_graph.serialize()
+ });
if tcx.sess.opts.debugging_opts.incremental_info {
#[derive(Clone)]
println!("[incremental]");
}
- serialized_graph.encode(encoder)?;
+ time(tcx.sess, "encoding serialized graph", || {
+ serialized_graph.encode(encoder)
+ })?;
Ok(())
}
fn encode_query_cache(tcx: TyCtxt,
encoder: &mut Encoder)
-> io::Result<()> {
- tcx.serialize_query_result_cache(encoder)
+ time(tcx.sess, "serialize query result cache", || {
+ tcx.serialize_query_result_cache(encoder)
+ })
}
use rustc_back::target::TargetTriple;
use rustc::session::search_paths::PathKind;
use rustc::middle;
-use rustc::middle::cstore::{validate_crate_name, ExternCrate};
+use rustc::middle::cstore::{validate_crate_name, ExternCrate, ExternCrateSource};
use rustc::util::common::record_time;
use rustc::util::nodemap::FxHashSet;
use rustc::hir::map::Definitions;
// - something over nothing (tuple.0);
// - direct extern crate to indirect (tuple.1);
// - shorter paths to longer (tuple.2).
- let new_rank = (true, extern_crate.direct, !extern_crate.path_len);
+ let new_rank = (
+ true,
+ extern_crate.direct,
+ cmp::Reverse(extern_crate.path_len),
+ );
let old_rank = match *old_extern_crate {
- None => (false, false, !0),
- Some(ref c) => (true, c.direct, !c.path_len),
+ None => (false, false, cmp::Reverse(usize::max_value())),
+ Some(ref c) => (
+ true,
+ c.direct,
+ cmp::Reverse(c.path_len),
+ ),
};
-
if old_rank >= new_rank {
return; // no change needed
}
}
}
- fn process_item(&mut self, item: &ast::Item, definitions: &Definitions) {
+ fn process_extern_crate(&mut self, item: &ast::Item, definitions: &Definitions) -> CrateNum {
match item.node {
ast::ItemKind::ExternCrate(orig_name) => {
debug!("resolving extern crate stmt. ident: {} orig_name: {:?}",
let def_id = definitions.opt_local_def_id(item.id).unwrap();
let path_len = definitions.def_path(def_id.index).data.len();
-
- let extern_crate = ExternCrate { def_id, span: item.span, direct: true, path_len };
- self.update_extern_crate(cnum, extern_crate, &mut FxHashSet());
+ self.update_extern_crate(
+ cnum,
+ ExternCrate {
+ src: ExternCrateSource::Extern(def_id),
+ span: item.span,
+ path_len,
+ direct: true,
+ },
+ &mut FxHashSet(),
+ );
self.cstore.add_extern_mod_stmt_cnum(item.id, cnum);
+ cnum
}
- _ => {}
+ _ => bug!(),
}
}
- fn resolve_crate_from_path(&mut self, name: Symbol, span: Span) -> CrateNum {
- self.resolve_crate(&None, name, name, None, None, span, PathKind::Crate,
- DepKind::Explicit).0
+ fn process_path_extern(
+ &mut self,
+ name: Symbol,
+ span: Span,
+ ) -> CrateNum {
+ let cnum = self.resolve_crate(
+ &None, name, name, None, None, span, PathKind::Crate, DepKind::Explicit
+ ).0;
+
+ self.update_extern_crate(
+ cnum,
+ ExternCrate {
+ src: ExternCrateSource::Path,
+ span,
+ // to have the least priority in `update_extern_crate`
+ path_len: usize::max_value(),
+ direct: true,
+ },
+ &mut FxHashSet(),
+ );
+
+ cnum
+ }
+
+ fn process_use_extern(
+ &mut self,
+ name: Symbol,
+ span: Span,
+ id: ast::NodeId,
+ definitions: &Definitions,
+ ) -> CrateNum {
+ let cnum = self.resolve_crate(
+ &None, name, name, None, None, span, PathKind::Crate, DepKind::Explicit
+ ).0;
+
+ let def_id = definitions.opt_local_def_id(id).unwrap();
+ let path_len = definitions.def_path(def_id.index).data.len();
+
+ self.update_extern_crate(
+ cnum,
+ ExternCrate {
+ src: ExternCrateSource::Use,
+ span,
+ path_len,
+ direct: true,
+ },
+ &mut FxHashSet(),
+ );
+
+ cnum
}
}
// interpreter allocation cache
interpret_alloc_cache: FxHashMap<usize, interpret::AllocId>,
- // a cache for sizes of interpreter allocations
- // needed to skip already deserialized allocations
- interpret_alloc_size: FxHashMap<usize, usize>,
+
+ // Read from the LazySeq CrateRoot::inpterpret_alloc_index on demand
+ interpret_alloc_index: Option<Vec<u32>>,
}
/// Abstract over the various ways one can create metadata decoders.
last_filemap_index: 0,
lazy_state: LazyState::NoNode,
interpret_alloc_cache: FxHashMap::default(),
- interpret_alloc_size: FxHashMap::default(),
+ interpret_alloc_index: None,
}
}
}
self.lazy_state = LazyState::Previous(position + min_size);
Ok(position)
}
+
+ fn interpret_alloc(&mut self, idx: usize) -> usize {
+ if let Some(index) = self.interpret_alloc_index.as_mut() {
+ return index[idx] as usize;
+ }
+ let cdata = self.cdata();
+ let index: Vec<u32> = cdata.root.interpret_alloc_index.decode(cdata).collect();
+ let pos = index[idx];
+ self.interpret_alloc_index = Some(index);
+ pos as usize
+ }
}
impl<'a, 'tcx: 'a> TyDecoder<'a, 'tcx> for DecodeContext<'a, 'tcx> {
impl<'a, 'tcx> SpecializedDecoder<interpret::AllocId> for DecodeContext<'a, 'tcx> {
fn specialized_decode(&mut self) -> Result<interpret::AllocId, Self::Error> {
- let tcx = self.tcx.expect("need tcx for AllocId decoding");
- let pos = self.position();
- if let Some(cached) = self.interpret_alloc_cache.get(&pos).cloned() {
- // if there's no end position we are currently deserializing a recursive
- // allocation
- if let Some(end) = self.interpret_alloc_size.get(&pos).cloned() {
- trace!("{} already cached as {:?}", pos, cached);
- // skip ahead
- self.opaque.set_position(end);
- return Ok(cached)
- }
+ let tcx = self.tcx.unwrap();
+ let idx = usize::decode(self)?;
+
+ if let Some(cached) = self.interpret_alloc_cache.get(&idx).cloned() {
+ return Ok(cached);
}
- let id = interpret::specialized_decode_alloc_id(
- self,
- tcx,
- pos,
- |this, pos, alloc_id| { this.interpret_alloc_cache.insert(pos, alloc_id); },
- |this, shorthand| {
- // need to load allocation
- this.with_position(shorthand, |this| interpret::AllocId::decode(this))
- }
- )?;
- let end_pos = self.position();
- assert!(self
- .interpret_alloc_size
- .insert(pos, end_pos)
- .is_none());
- Ok(id)
+ let pos = self.interpret_alloc(idx);
+ self.with_position(pos, |this| {
+ interpret::specialized_decode_alloc_id(
+ this,
+ tcx,
+ |this, alloc_id| {
+ assert!(this.interpret_alloc_cache.insert(idx, alloc_id).is_none());
+ },
+ )
+ })
}
}
lazy_state: LazyState,
type_shorthands: FxHashMap<Ty<'tcx>, usize>,
predicate_shorthands: FxHashMap<ty::Predicate<'tcx>, usize>,
- interpret_alloc_shorthands: FxHashMap<interpret::AllocId, usize>,
+
+ interpret_allocs: FxHashMap<interpret::AllocId, usize>,
+ interpret_allocs_inverse: Vec<interpret::AllocId>,
// This is used to speed up Span encoding.
filemap_cache: Lrc<FileMap>,
impl<'a, 'tcx> SpecializedEncoder<interpret::AllocId> for EncodeContext<'a, 'tcx> {
fn specialized_encode(&mut self, alloc_id: &interpret::AllocId) -> Result<(), Self::Error> {
use std::collections::hash_map::Entry;
- let tcx = self.tcx;
- let pos = self.position();
- let shorthand = match self.interpret_alloc_shorthands.entry(*alloc_id) {
- Entry::Occupied(entry) => Some(entry.get().clone()),
- Entry::Vacant(entry) => {
- // ensure that we don't place any AllocIds at the very beginning
- // of the metadata file, because that would end up making our indices
- // not special. This is essentially impossible, but let's make sure
- assert!(pos >= interpret::SHORTHAND_START);
- entry.insert(pos);
- None
+ let index = match self.interpret_allocs.entry(*alloc_id) {
+ Entry::Occupied(e) => *e.get(),
+ Entry::Vacant(e) => {
+ let idx = self.interpret_allocs_inverse.len();
+ self.interpret_allocs_inverse.push(*alloc_id);
+ e.insert(idx);
+ idx
},
};
- interpret::specialized_encode_alloc_id(
- self,
- tcx,
- *alloc_id,
- shorthand,
- )
+
+ index.encode(self)
}
}
start - min_end
}
LazyState::Previous(last_min_end) => {
- assert!(last_min_end <= position);
+ assert!(
+ last_min_end <= position,
+ "make sure that the calls to `lazy*` \
+ are in the same order as the metadata fields",
+ );
position - last_min_end
}
};
IsolatedEncoder::encode_wasm_custom_sections,
&wasm_custom_sections);
- // Encode and index the items.
+ let tcx = self.tcx;
+
+ // Encode the items.
i = self.position();
let items = self.encode_info_for_items();
let item_bytes = self.position() - i;
+ // Encode the allocation index
+ let interpret_alloc_index = {
+ let mut interpret_alloc_index = Vec::new();
+ let mut n = 0;
+ trace!("beginning to encode alloc ids");
+ loop {
+ let new_n = self.interpret_allocs_inverse.len();
+ // if we have found new ids, serialize those, too
+ if n == new_n {
+ // otherwise, abort
+ break;
+ }
+ trace!("encoding {} further alloc ids", new_n - n);
+ for idx in n..new_n {
+ let id = self.interpret_allocs_inverse[idx];
+ let pos = self.position() as u32;
+ interpret_alloc_index.push(pos);
+ interpret::specialized_encode_alloc_id(
+ self,
+ tcx,
+ id,
+ ).unwrap();
+ }
+ n = new_n;
+ }
+ self.lazy_seq(interpret_alloc_index)
+ };
+
+ // Index the items
i = self.position();
let index = items.write_index(&mut self.opaque.cursor);
let index_bytes = self.position() - i;
- let tcx = self.tcx;
let link_meta = self.link_meta;
let is_proc_macro = tcx.sess.crate_types.borrow().contains(&CrateTypeProcMacro);
let has_default_lib_allocator =
attr::contains_name(tcx.hir.krate_attrs(), "default_lib_allocator");
let has_global_allocator = *tcx.sess.has_global_allocator.get();
+
let root = self.lazy(&CrateRoot {
name: tcx.crate_name(LOCAL_CRATE),
extra_filename: tcx.sess.opts.cg.extra_filename.clone(),
impls,
exported_symbols,
wasm_custom_sections,
+ interpret_alloc_index,
index,
});
}
}
+ fn metadata_output_only(&self) -> bool {
+ // MIR optimisation can be skipped when we're just interested in the metadata.
+ !self.tcx.sess.opts.output_types.should_trans()
+ }
+
fn encode_info_for_impl_item(&mut self, def_id: DefId) -> Entry<'tcx> {
debug!("IsolatedEncoder::encode_info_for_impl_item({:?})", def_id);
let tcx = self.tcx;
} else if let hir::ImplItemKind::Method(ref sig, body) = ast_item.node {
let generics = self.tcx.generics_of(def_id);
let types = generics.parent_types as usize + generics.types.len();
- let needs_inline = types > 0 || tcx.trans_fn_attrs(def_id).requests_inline();
+ let needs_inline = (types > 0 || tcx.trans_fn_attrs(def_id).requests_inline()) &&
+ !self.metadata_output_only();
let is_const_fn = sig.constness == hir::Constness::Const;
let ast = if is_const_fn { Some(body) } else { None };
let always_encode_mir = self.tcx.sess.opts.debugging_opts.always_encode_mir;
hir::ItemConst(..) => self.encode_optimized_mir(def_id),
hir::ItemFn(_, _, constness, _, ref generics, _) => {
let has_tps = generics.ty_params().next().is_some();
- let needs_inline = has_tps || tcx.trans_fn_attrs(def_id).requests_inline();
+ let needs_inline = (has_tps || tcx.trans_fn_attrs(def_id).requests_inline()) &&
+ !self.metadata_output_only();
let always_encode_mir = self.tcx.sess.opts.debugging_opts.always_encode_mir;
if needs_inline || constness == hir::Constness::Const || always_encode_mir {
self.encode_optimized_mir(def_id)
type_shorthands: Default::default(),
predicate_shorthands: Default::default(),
filemap_cache: tcx.sess.codemap().files()[0].clone(),
- interpret_alloc_shorthands: Default::default(),
+ interpret_allocs: Default::default(),
+ interpret_allocs_inverse: Default::default(),
};
// Encode the rustc version string in a predictable location.
pub impls: LazySeq<TraitImpls>,
pub exported_symbols: EncodedExportedSymbols,
pub wasm_custom_sections: LazySeq<DefIndex>,
+ pub interpret_alloc_index: LazySeq<u32>,
pub index: LazySeq<index::Index>,
}
} else {
self.get_default_err_msg(place)
};
+ let sp = self.mir.source_info(locations[0]).span;
+ let mut to_suggest_span = String::new();
+ if let Ok(src) =
+ self.tcx.sess.codemap().span_to_snippet(sp) {
+ to_suggest_span = src[1..].to_string();
+ };
err_info = Some((
- self.mir.source_info(locations[0]).span,
+ sp,
"consider changing this to be a \
- mutable reference: `&mut`", item_msg,
+ mutable reference",
+ to_suggest_span,
+ item_msg,
self.get_primary_err_msg(base)));
}
},
_ => {},
}
- if let Some((err_help_span, err_help_stmt, item_msg, sec_span)) = err_info {
+ if let Some((err_help_span,
+ err_help_stmt,
+ to_suggest_span,
+ item_msg,
+ sec_span)) = err_info {
let mut err = self.tcx.cannot_assign(span, &item_msg, Origin::Mir);
- err.span_suggestion(err_help_span, err_help_stmt, format!(""));
+ err.span_suggestion(err_help_span,
+ err_help_stmt,
+ format!("&mut {}", to_suggest_span));
if place != place_err {
err.span_label(span, sec_span);
}
use borrow_check::nll::region_infer::Cause;
use borrow_check::nll::region_infer::ClosureRegionRequirementsExt;
use borrow_check::nll::universal_regions::UniversalRegions;
+use dataflow::move_paths::MoveData;
use dataflow::FlowAtLocation;
use dataflow::MaybeInitializedPlaces;
-use dataflow::move_paths::MoveData;
use rustc::hir::def_id::DefId;
-use rustc::infer::{InferCtxt, InferOk, InferResult, LateBoundRegionConversionTime, UnitResult};
use rustc::infer::region_constraints::{GenericKind, RegionConstraintData};
-use rustc::traits::{self, Normalized, TraitEngine};
+use rustc::infer::{InferCtxt, InferOk, InferResult, LateBoundRegionConversionTime, UnitResult};
+use rustc::mir::tcx::PlaceTy;
+use rustc::mir::visit::{PlaceContext, Visitor};
+use rustc::mir::*;
use rustc::traits::query::NoSolution;
+use rustc::traits::{self, Normalized, TraitEngine};
use rustc::ty::error::TypeError;
use rustc::ty::fold::TypeFoldable;
use rustc::ty::{self, ToPolyTraitRef, Ty, TyCtxt, TypeVariants};
-use rustc::mir::*;
-use rustc::mir::tcx::PlaceTy;
-use rustc::mir::visit::{PlaceContext, Visitor};
use std::fmt;
use syntax::ast;
use syntax_pos::{Span, DUMMY_SP};
})
}
-mod liveness;
mod input_output;
+mod liveness;
/// Type checks the given `mir` in the context of the inference
/// context `infcx`. Returns any region constraints that have yet to
tcx.predicates_of(def_id).instantiate(tcx, substs);
let predicates =
type_checker.normalize(&instantiated_predicates.predicates, location);
- type_checker.prove_predicates(&predicates, location);
+ type_checker.prove_predicates(predicates.iter().cloned(), location);
}
value.ty
}
StatementKind::UserAssertTy(ref c_ty, ref local) => {
let local_ty = mir.local_decls()[*local].ty;
- let (ty, _) = self.infcx.instantiate_canonical_with_fresh_inference_vars(
- stmt.source_info.span, c_ty);
- debug!("check_stmt: user_assert_ty ty={:?} local_ty={:?}", ty, local_ty);
+ let (ty, _) = self.infcx
+ .instantiate_canonical_with_fresh_inference_vars(stmt.source_info.span, c_ty);
+ debug!(
+ "check_stmt: user_assert_ty ty={:?} local_ty={:?}",
+ ty, local_ty
+ );
if let Err(terr) = self.eq_types(ty, local_ty, location.at_self()) {
span_mirbug!(
self,
let sig = self.normalize(&sig, term_location);
self.check_call_dest(mir, term, &sig, destination, term_location);
+ self.prove_predicates(
+ sig.inputs().iter().map(|ty| ty::Predicate::WellFormed(ty)),
+ term_location,
+ );
+
// The ordinary liveness rules will ensure that all
// regions in the type of the callee are live here. We
// then further constrain the late-bound regions that
let predicates = self.normalize(&instantiated_predicates.predicates, location);
debug!("prove_aggregate_predicates: predicates={:?}", predicates);
- self.prove_predicates(&predicates, location);
+ self.prove_predicates(predicates.iter().cloned(), location);
}
fn prove_trait_ref(&mut self, trait_ref: ty::TraitRef<'tcx>, location: Location) {
self.prove_predicates(
- &[
- ty::Predicate::Trait(trait_ref.to_poly_trait_ref().to_poly_trait_predicate()),
- ],
+ [ty::Predicate::Trait(
+ trait_ref.to_poly_trait_ref().to_poly_trait_predicate(),
+ )].iter()
+ .cloned(),
location,
);
}
- fn prove_predicates(&mut self, predicates: &[ty::Predicate<'tcx>], location: Location) {
+ fn prove_predicates(
+ &mut self,
+ predicates: impl IntoIterator<Item = ty::Predicate<'tcx>>,
+ location: Location,
+ ) {
+ let mut predicates_iter = predicates.into_iter();
+
debug!(
"prove_predicates(predicates={:?}, location={:?})",
- predicates, location
+ predicates_iter.by_ref().collect::<Vec<_>>(),
+ location
);
self.fully_perform_op(location.at_self(), |this| {
let cause = this.misc(this.last_span);
- let obligations = predicates
- .iter()
- .map(|&p| traits::Obligation::new(cause.clone(), this.param_env, p))
+ let obligations = predicates_iter
+ .map(|p| traits::Obligation::new(cause.clone(), this.param_env, p))
.collect();
Ok(InferOk {
value: (),
/// True if `r` is a member of this set of universal regions.
pub fn is_universal_region(&self, r: RegionVid) -> bool {
- (FIRST_GLOBAL_INDEX..self.num_universals).contains(r.index())
+ (FIRST_GLOBAL_INDEX..self.num_universals).contains(&r.index())
}
/// Classifies `r` as a universal region, returning `None` if this
/// is not a member of this set of universal regions.
pub fn region_classification(&self, r: RegionVid) -> Option<RegionClassification> {
let index = r.index();
- if (FIRST_GLOBAL_INDEX..self.first_extern_index).contains(index) {
+ if (FIRST_GLOBAL_INDEX..self.first_extern_index).contains(&index) {
Some(RegionClassification::Global)
- } else if (self.first_extern_index..self.first_local_index).contains(index) {
+ } else if (self.first_extern_index..self.first_local_index).contains(&index) {
Some(RegionClassification::External)
- } else if (self.first_local_index..self.num_universals).contains(index) {
+ } else if (self.first_local_index..self.num_universals).contains(&index) {
Some(RegionClassification::Local)
} else {
None
use rustc::ty::{self, TyCtxt, Ty, Instance};
use rustc::ty::layout::{self, LayoutOf};
use rustc::ty::subst::Subst;
-use rustc::util::nodemap::FxHashSet;
use syntax::ast::Mutability;
use syntax::codemap::Span;
}
span = mir.span;
let layout = ecx.layout_of(mir.return_ty().subst(tcx, cid.instance.substs))?;
- let alloc = tcx.interpret_interner.get_cached(cid.instance.def_id());
- let is_static = tcx.is_static(cid.instance.def_id()).is_some();
- let alloc = match alloc {
- Some(alloc) => {
- assert!(cid.promoted.is_none());
- assert!(param_env.caller_bounds.is_empty());
- alloc
- },
- None => {
- assert!(!layout.is_unsized());
- let ptr = ecx.memory.allocate(
- layout.size.bytes(),
- layout.align,
- None,
- )?;
- if is_static {
- tcx.interpret_interner.cache(cid.instance.def_id(), ptr.alloc_id);
- }
- let internally_mutable = !layout.ty.is_freeze(tcx, param_env, mir.span);
- let mutability = tcx.is_static(cid.instance.def_id());
- let mutability = if mutability == Some(hir::Mutability::MutMutable) || internally_mutable {
- Mutability::Mutable
- } else {
- Mutability::Immutable
- };
- let cleanup = StackPopCleanup::MarkStatic(mutability);
- let name = ty::tls::with(|tcx| tcx.item_path_str(cid.instance.def_id()));
- let prom = cid.promoted.map_or(String::new(), |p| format!("::promoted[{:?}]", p));
- trace!("const_eval: pushing stack frame for global: {}{}", name, prom);
- assert!(mir.arg_count == 0);
- ecx.push_stack_frame(
- cid.instance,
- mir.span,
- mir,
- Place::from_ptr(ptr, layout.align),
- cleanup,
- )?;
-
- while ecx.step()? {}
- ptr.alloc_id
- }
+ assert!(!layout.is_unsized());
+ let ptr = ecx.memory.allocate(
+ layout.size.bytes(),
+ layout.align,
+ None,
+ )?;
+ let internally_mutable = !layout.ty.is_freeze(tcx, param_env, mir.span);
+ let mutability = tcx.is_static(cid.instance.def_id());
+ let mutability = if mutability == Some(hir::Mutability::MutMutable) || internally_mutable {
+ Mutability::Mutable
+ } else {
+ Mutability::Immutable
};
- let ptr = MemoryPointer::new(alloc, 0).into();
+ let cleanup = StackPopCleanup::MarkStatic(mutability);
+ let name = ty::tls::with(|tcx| tcx.item_path_str(cid.instance.def_id()));
+ let prom = cid.promoted.map_or(String::new(), |p| format!("::promoted[{:?}]", p));
+ trace!("const_eval: pushing stack frame for global: {}{}", name, prom);
+ assert!(mir.arg_count == 0);
+ ecx.push_stack_frame(
+ cid.instance,
+ mir.span,
+ mir,
+ Place::from_ptr(ptr, layout.align),
+ cleanup,
+ )?;
+
+ while ecx.step()? {}
+ let ptr = ptr.into();
// always try to read the value and report errors
let value = match ecx.try_read_value(ptr, layout.align, layout.ty)? {
// if it's a constant (so it needs no address, directly compute its value)
- Some(val) if !is_static => val,
+ Some(val) if tcx.is_static(cid.instance.def_id()).is_none() => val,
// point at the allocation
_ => Value::ByRef(ptr, layout.align),
};
ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>,
cid: GlobalId<'tcx>,
) -> EvalResult<'tcx, AllocId> {
- let alloc = ecx
- .tcx
- .interpret_interner
- .get_cached(cid.instance.def_id());
- // Don't evaluate when already cached to prevent cycles
- if let Some(alloc) = alloc {
- return Ok(alloc)
- }
- // ensure the static is computed
- ecx.const_eval(cid)?;
Ok(ecx
.tcx
.interpret_interner
- .get_cached(cid.instance.def_id())
- .expect("uncached static"))
+ .cache_static(cid.instance.def_id()))
}
fn box_alloc<'a>(
let def_id = cid.instance.def.def_id();
if tcx.is_foreign_item(def_id) {
- let id = tcx.interpret_interner.get_cached(def_id);
- let id = match id {
- // FIXME: due to caches this shouldn't happen, add some assertions
- Some(id) => id,
- None => {
- let id = tcx.interpret_interner.reserve();
- tcx.interpret_interner.cache(def_id, id);
- id
- },
- };
+ let id = tcx.interpret_interner.cache_static(def_id);
let ty = tcx.type_of(def_id);
let layout = tcx.layout_of(key.param_env.and(ty)).unwrap();
let ptr = MemoryPointer::new(id, 0);
};
let (res, ecx) = eval_body_and_ecx(tcx, cid, None, key.param_env);
- res.map(|(miri_value, ptr, miri_ty)| {
- if tcx.is_static(def_id).is_some() {
- if let Ok(ptr) = ptr.primval.to_ptr() {
- let mut seen = FxHashSet::default();
- create_depgraph_edges(tcx, ptr.alloc_id, &mut seen);
- }
- }
+ res.map(|(miri_value, _, miri_ty)| {
tcx.mk_const(ty::Const {
val: ConstVal::Value(miri_value),
ty: miri_ty,
}
})
}
-
-// This function creates dep graph edges from statics to all referred to statics.
-// This is necessary, because the `const_eval` query cannot directly call itself
-// for other statics, because we cannot prevent recursion in queries.
-//
-// see test/incremental/static_refering_to_other_static2/issue.rs for an example
-// where not creating those edges would cause static A, which refers to static B
-// to point to the old allocation of static B, even though B has changed.
-//
-// In the future we will want to remove this funcion in favour of a system that
-// makes sure that statics don't need to have edges to other statics as long as
-// they are only referring by reference and not inspecting the other static's body.
-fn create_depgraph_edges<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- alloc_id: AllocId,
- seen: &mut FxHashSet<AllocId>,
-) {
- trace!("create_depgraph_edges: {:?}, {:?}", alloc_id, seen);
- if seen.insert(alloc_id) {
- trace!("seen: {:?}, {:?}", alloc_id, seen);
- if let Some(alloc) = tcx.interpret_interner.get_alloc(alloc_id) {
- trace!("get_alloc: {:?}, {:?}, {:?}", alloc_id, seen, alloc);
- for (_, &reloc) in &alloc.relocations {
- if let Some(did) = tcx.interpret_interner.get_corresponding_static_def_id(reloc) {
- trace!("get_corresponding: {:?}, {:?}, {:?}, {:?}, {:?}", alloc_id, seen, alloc, did, reloc);
- let _ = tcx.maybe_optimized_mir(did);
- }
- create_depgraph_edges(tcx, reloc, seen);
- }
- }
- }
-}
/// The maximum number of terminators that may be evaluated.
/// This prevents infinite loops and huge computations from freezing up const eval.
/// Remove once halting problem is solved.
- pub(crate) steps_remaining: usize,
+ pub(crate) terminators_remaining: usize,
}
/// A stack frame.
memory: Memory::new(tcx, memory_data),
stack: Vec::new(),
stack_limit: tcx.sess.const_eval_stack_frame_limit,
- steps_remaining: tcx.sess.const_eval_step_limit,
+ terminators_remaining: 1_000_000,
}
}
}
Aggregate(ref kind, ref operands) => {
- self.inc_step_counter_and_check_limit(operands.len())?;
+ self.inc_step_counter_and_check_limit(operands.len());
let (dest, active_field_index) = match **kind {
mir::AggregateKind::Adt(adt_def, variant_index, _, active_field_index) => {
}
pub fn read_global_as_value(&self, gid: GlobalId<'tcx>, ty: Ty<'tcx>) -> EvalResult<'tcx, Value> {
- if gid.promoted.is_none() {
- let cached = self
+ if self.tcx.is_static(gid.instance.def_id()).is_some() {
+ let alloc_id = self
.tcx
.interpret_interner
- .get_cached(gid.instance.def_id());
- if let Some(alloc_id) = cached {
- let layout = self.layout_of(ty)?;
- let ptr = MemoryPointer::new(alloc_id, 0);
- return Ok(Value::ByRef(ptr.into(), layout.align))
- }
+ .cache_static(gid.instance.def_id());
+ let layout = self.layout_of(ty)?;
+ let ptr = MemoryPointer::new(alloc_id, 0);
+ return Ok(Value::ByRef(ptr.into(), layout.align))
}
let cv = self.const_eval(gid)?;
self.const_to_value(&cv.val, ty)
use super::{EvalContext, Machine};
impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
- pub fn inc_step_counter_and_check_limit(&mut self, n: usize) -> EvalResult<'tcx> {
- self.steps_remaining = self.steps_remaining.saturating_sub(n);
- if self.steps_remaining > 0 {
- Ok(())
- } else {
- err!(ExecutionTimeLimitReached)
+ pub fn inc_step_counter_and_check_limit(&mut self, n: usize) {
+ self.terminators_remaining = self.terminators_remaining.saturating_sub(n);
+ if self.terminators_remaining == 0 {
+ // FIXME(#49980): make this warning a lint
+ self.tcx.sess.span_warn(self.frame().span, "Constant evaluating a complex constant, this might take some time");
+ self.terminators_remaining = 1_000_000;
}
}
return Ok(true);
}
- self.inc_step_counter_and_check_limit(1)?;
+ self.inc_step_counter_and_check_limit(1);
let terminator = basic_block.terminator();
assert_eq!(old_frames, self.cur_frame());
alloc_id: AllocId,
output: &mut Vec<MonoItem<'tcx>>,
) {
- if let Some(did) = tcx.interpret_interner.get_corresponding_static_def_id(alloc_id) {
+ if let Some(did) = tcx.interpret_interner.get_static(alloc_id) {
let instance = Instance::mono(tcx, did);
if should_monomorphize_locally(tcx, &instance) {
trace!("collecting static {:?}", did);
}
ItemKind::ExternCrate(orig_name) => {
- self.crate_loader.process_item(item, &self.definitions);
-
- // n.b. we don't need to look at the path option here, because cstore already did
- let crate_id = self.cstore.extern_mod_stmt_cnum_untracked(item.id).unwrap();
+ let crate_id = self.crate_loader.process_extern_crate(item, &self.definitions);
let module =
self.get_module(DefId { krate: crate_id, index: CRATE_DEF_INDEX });
self.populate_module_if_necessary(module);
self.current_module = module;
}
- ItemKind::ForeignMod(..) => self.crate_loader.process_item(item, &self.definitions),
+ // Handled in `rustc_metadata::{native_libs,link_args}`
+ ItemKind::ForeignMod(..) => {}
// These items live in the value namespace.
ItemKind::Static(_, m, _) => {
prev_name == keywords::CrateRoot.name() &&
self.session.features_untracked().extern_absolute_paths {
// `::extern_crate::a::b`
- let crate_id = self.crate_loader.resolve_crate_from_path(name, ident.span);
+ let crate_id = self.crate_loader.process_path_extern(name, ident.span);
let crate_root =
self.get_module(DefId { krate: crate_id, index: CRATE_DEF_INDEX });
self.populate_module_if_necessary(crate_root);
}
} else if is_extern && !token::is_path_segment_keyword(source) {
let crate_id =
- self.crate_loader.resolve_crate_from_path(source.name, directive.span);
+ self.resolver.crate_loader.process_use_extern(
+ source.name,
+ directive.span,
+ directive.id,
+ &self.resolver.definitions,
+ );
let crate_root =
self.get_module(DefId { krate: crate_id, index: CRATE_DEF_INDEX });
self.populate_module_if_necessary(crate_root);
use rustc::hir::def::Def as HirDef;
use rustc::hir::map::{Node, NodeItem};
use rustc::hir::def_id::{DefId, LOCAL_CRATE};
+use rustc::middle::cstore::ExternCrate;
use rustc::session::config::CrateType::CrateTypeExecutable;
use rustc::ty::{self, TyCtxt};
use rustc_typeck::hir_ty_to_ty;
for &n in self.tcx.crates().iter() {
let span = match *self.tcx.extern_crate(n.as_def_id()) {
- Some(ref c) => c.span,
+ Some(ExternCrate { span, .. }) => span,
None => {
debug!("Skipping crate {}, no data", n);
continue;
FromEnv(..) |
WellFormedTy(..) |
FromEnvTy(..) |
+ Normalize(..) |
RegionOutlives(..) |
TypeOutlives(..) => self,
}
-> Lrc<&'tcx Slice<Clause<'tcx>>>
{
let node_id = tcx.hir.as_local_node_id(def_id).unwrap();
- let item = tcx.hir.expect_item(node_id);
- match item.node {
- hir::ItemTrait(..) => program_clauses_for_trait(tcx, def_id),
- hir::ItemImpl(..) => program_clauses_for_impl(tcx, def_id),
+ let node = tcx.hir.find(node_id).unwrap();
+ match node {
+ hir::map::Node::NodeItem(item) => match item.node {
+ hir::ItemTrait(..) => program_clauses_for_trait(tcx, def_id),
+ hir::ItemImpl(..) => program_clauses_for_impl(tcx, def_id),
+ _ => Lrc::new(tcx.mk_clauses(iter::empty::<Clause>())),
+ }
+ hir::map::Node::NodeImplItem(item) => {
+ if let hir::ImplItemKind::Type(..) = item.node {
+ program_clauses_for_associated_type_value(tcx, def_id)
+ } else {
+ Lrc::new(tcx.mk_clauses(iter::empty::<Clause>()))
+ }
+ },
// FIXME: other constructions e.g. traits, associated types...
_ => Lrc::new(tcx.mk_clauses(iter::empty::<Clause>())),
Lrc::new(tcx.mk_clauses(iter::once(Clause::ForAll(ty::Binder::dummy(clause)))))
}
+pub fn program_clauses_for_associated_type_value<'a, 'tcx>(
+ tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ item_id: DefId,
+) -> Lrc<&'tcx Slice<Clause<'tcx>>> {
+ // Rule Normalize-From-Impl (see rustc guide)
+ //
+ // ```impl<P0..Pn> Trait<A1..An> for A0
+ // {
+ // type AssocType<Pn+1..Pm> where WC = T;
+ // }```
+ //
+ // ```
+ // forall<P0..Pm> {
+ // forall<Pn+1..Pm> {
+ // Normalize(<A0 as Trait<A1..An>>::AssocType<Pn+1..Pm> -> T) :-
+ // Implemented(A0: Trait<A1..An>) && WC
+ // }
+ // }
+ // ```
+
+ let item = tcx.associated_item(item_id);
+ debug_assert_eq!(item.kind, ty::AssociatedKind::Type);
+ let impl_id = if let ty::AssociatedItemContainer::ImplContainer(impl_id) = item.container {
+ impl_id
+ } else {
+ bug!()
+ };
+ // `A0 as Trait<A1..An>`
+ let trait_ref = tcx.impl_trait_ref(impl_id).unwrap();
+ // `T`
+ let ty = tcx.type_of(item_id);
+ // `Implemented(A0: Trait<A1..An>)`
+ let trait_implemented = ty::Binder::dummy(ty::TraitPredicate { trait_ref }.lower());
+ // `WC`
+ let item_where_clauses = tcx.predicates_of(item_id).predicates.lower();
+ // `Implemented(A0: Trait<A1..An>) && WC`
+ let mut where_clauses = vec![trait_implemented];
+ where_clauses.extend(item_where_clauses);
+ // `<A0 as Trait<A1..An>>::AssocType<Pn+1..Pm>`
+ let projection_ty = ty::ProjectionTy::from_ref_and_name(tcx, trait_ref, item.name);
+ // `Normalize(<A0 as Trait<A1..An>>::AssocType<Pn+1..Pm> -> T)`
+ let normalize_goal = DomainGoal::Normalize(ty::ProjectionPredicate { projection_ty, ty });
+ // `Normalize(... -> T) :- ...`
+ let clause = ProgramClause {
+ goal: normalize_goal,
+ hypotheses: tcx.mk_goals(
+ where_clauses.into_iter().map(|wc| Goal::from_poly_domain_goal(wc, tcx))
+ ),
+ };
+ Lrc::new(tcx.mk_clauses(iter::once(Clause::ForAll(ty::Binder::dummy(clause)))))
+}
+
pub fn dump_program_clauses<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
if !tcx.features().rustc_attrs {
return;
// rustdoc needs to be able to document functions that use all the features, so
// whitelist them all
Lrc::new(llvm_util::all_known_features()
- .map(|c| c.to_string())
+ .map(|(a, b)| (a.to_string(), b.map(|s| s.to_string())))
.collect())
} else {
Lrc::new(llvm_util::target_feature_whitelist(tcx.sess)
.iter()
- .map(|c| c.to_string())
+ .map(|&(a, b)| (a.to_string(), b.map(|s| s.to_string())))
.collect())
}
};
#![feature(slice_sort_by_cached_key)]
#![feature(optin_builtin_traits)]
#![feature(inclusive_range_fields)]
-#![feature(underscore_lifetimes)]
use rustc::dep_graph::WorkProduct;
use syntax_pos::symbol::Symbol;
use rustc::session::config::PrintRequest;
use libc::c_int;
use std::ffi::CString;
+use syntax::feature_gate::UnstableFeatures;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Once;
// to LLVM or the feature detection code will walk past the end of the feature
// array, leading to crashes.
-const ARM_WHITELIST: &'static [&'static str] = &["neon", "v7", "vfp2", "vfp3", "vfp4"];
-
-const AARCH64_WHITELIST: &'static [&'static str] = &["fp", "neon", "sve", "crc", "crypto",
- "ras", "lse", "rdm", "fp16", "rcpc",
- "dotprod", "v8.1a", "v8.2a", "v8.3a"];
-
-const X86_WHITELIST: &'static [&'static str] = &["aes", "avx", "avx2", "avx512bw",
- "avx512cd", "avx512dq", "avx512er",
- "avx512f", "avx512ifma", "avx512pf",
- "avx512vbmi", "avx512vl", "avx512vpopcntdq",
- "bmi1", "bmi2", "fma", "fxsr",
- "lzcnt", "mmx", "pclmulqdq",
- "popcnt", "rdrand", "rdseed",
- "sha",
- "sse", "sse2", "sse3", "sse4.1",
- "sse4.2", "sse4a", "ssse3",
- "tbm", "xsave", "xsavec",
- "xsaveopt", "xsaves"];
-
-const HEXAGON_WHITELIST: &'static [&'static str] = &["hvx", "hvx-double"];
-
-const POWERPC_WHITELIST: &'static [&'static str] = &["altivec",
- "power8-altivec", "power9-altivec",
- "power8-vector", "power9-vector",
- "vsx"];
-
-const MIPS_WHITELIST: &'static [&'static str] = &["fp64", "msa"];
+const ARM_WHITELIST: &[(&str, Option<&str>)] = &[
+ ("neon", Some("arm_target_feature")),
+ ("v7", Some("arm_target_feature")),
+ ("vfp2", Some("arm_target_feature")),
+ ("vfp3", Some("arm_target_feature")),
+ ("vfp4", Some("arm_target_feature")),
+];
+
+const AARCH64_WHITELIST: &[(&str, Option<&str>)] = &[
+ ("fp", Some("aarch64_target_feature")),
+ ("neon", Some("aarch64_target_feature")),
+ ("sve", Some("aarch64_target_feature")),
+ ("crc", Some("aarch64_target_feature")),
+ ("crypto", Some("aarch64_target_feature")),
+ ("ras", Some("aarch64_target_feature")),
+ ("lse", Some("aarch64_target_feature")),
+ ("rdm", Some("aarch64_target_feature")),
+ ("fp16", Some("aarch64_target_feature")),
+ ("rcpc", Some("aarch64_target_feature")),
+ ("dotprod", Some("aarch64_target_feature")),
+ ("v8.1a", Some("aarch64_target_feature")),
+ ("v8.2a", Some("aarch64_target_feature")),
+ ("v8.3a", Some("aarch64_target_feature")),
+];
+
+const X86_WHITELIST: &[(&str, Option<&str>)] = &[
+ ("aes", None),
+ ("avx", None),
+ ("avx2", None),
+ ("avx512bw", Some("avx512_target_feature")),
+ ("avx512cd", Some("avx512_target_feature")),
+ ("avx512dq", Some("avx512_target_feature")),
+ ("avx512er", Some("avx512_target_feature")),
+ ("avx512f", Some("avx512_target_feature")),
+ ("avx512ifma", Some("avx512_target_feature")),
+ ("avx512pf", Some("avx512_target_feature")),
+ ("avx512vbmi", Some("avx512_target_feature")),
+ ("avx512vl", Some("avx512_target_feature")),
+ ("avx512vpopcntdq", Some("avx512_target_feature")),
+ ("bmi1", None),
+ ("bmi2", None),
+ ("fma", None),
+ ("fxsr", None),
+ ("lzcnt", None),
+ ("mmx", Some("mmx_target_feature")),
+ ("pclmulqdq", None),
+ ("popcnt", None),
+ ("rdrand", None),
+ ("rdseed", None),
+ ("sha", None),
+ ("sse", None),
+ ("sse2", None),
+ ("sse3", None),
+ ("sse4.1", None),
+ ("sse4.2", None),
+ ("sse4a", Some("sse4a_target_feature")),
+ ("ssse3", None),
+ ("tbm", Some("tbm_target_feature")),
+ ("xsave", None),
+ ("xsavec", None),
+ ("xsaveopt", None),
+ ("xsaves", None),
+];
+
+const HEXAGON_WHITELIST: &[(&str, Option<&str>)] = &[
+ ("hvx", Some("hexagon_target_feature")),
+ ("hvx-double", Some("hexagon_target_feature")),
+];
+
+const POWERPC_WHITELIST: &[(&str, Option<&str>)] = &[
+ ("altivec", Some("powerpc_target_feature")),
+ ("power8-altivec", Some("powerpc_target_feature")),
+ ("power9-altivec", Some("powerpc_target_feature")),
+ ("power8-vector", Some("powerpc_target_feature")),
+ ("power9-vector", Some("powerpc_target_feature")),
+ ("vsx", Some("powerpc_target_feature")),
+];
+
+const MIPS_WHITELIST: &[(&str, Option<&str>)] = &[
+ ("fp64", Some("mips_target_feature")),
+ ("msa", Some("mips_target_feature")),
+];
/// When rustdoc is running, provide a list of all known features so that all their respective
/// primtives may be documented.
///
/// IMPORTANT: If you're adding another whitelist to the above lists, make sure to add it to this
/// iterator!
-pub fn all_known_features() -> impl Iterator<Item=&'static str> {
+pub fn all_known_features() -> impl Iterator<Item=(&'static str, Option<&'static str>)> {
ARM_WHITELIST.iter().cloned()
.chain(AARCH64_WHITELIST.iter().cloned())
.chain(X86_WHITELIST.iter().cloned())
let target_machine = create_target_machine(sess, true);
target_feature_whitelist(sess)
.iter()
+ .filter_map(|&(feature, gate)| {
+ if UnstableFeatures::from_environment().is_nightly_build() || gate.is_none() {
+ Some(feature)
+ } else {
+ None
+ }
+ })
.filter(|feature| {
let llvm_feature = to_llvm_feature(sess, feature);
let cstr = CString::new(llvm_feature).unwrap();
.map(|feature| Symbol::intern(feature)).collect()
}
-pub fn target_feature_whitelist(sess: &Session) -> &'static [&'static str] {
+pub fn target_feature_whitelist(sess: &Session)
+ -> &'static [(&'static str, Option<&'static str>)]
+{
match &*sess.target.target.arch {
"arm" => ARM_WHITELIST,
"aarch64" => AARCH64_WHITELIST,
let static_ = cx
.tcx
.interpret_interner
- .get_corresponding_static_def_id(ptr.alloc_id);
+ .get_static(ptr.alloc_id);
let base_addr = if let Some(def_id) = static_ {
assert!(cx.tcx.is_static(def_id).is_some());
consts::get_static(cx, def_id)
promoted: None
};
let param_env = ty::ParamEnv::reveal_all();
- cx.tcx.const_eval(param_env.and(cid))?;
+ let static_ = cx.tcx.const_eval(param_env.and(cid))?;
- let alloc_id = cx
- .tcx
- .interpret_interner
- .get_cached(def_id)
- .expect("global not cached");
+ let ptr = match static_.val {
+ ConstVal::Value(MiriValue::ByRef(ptr, _)) => ptr,
+ _ => bug!("static const eval returned {:#?}", static_),
+ };
let alloc = cx
.tcx
.interpret_interner
- .get_alloc(alloc_id)
+ .get_alloc(ptr.primval.to_ptr().expect("static has integer pointer").alloc_id)
.expect("miri allocation never successfully created");
Ok(global_initializer(cx, alloc))
}
use rustc::middle::cstore::MetadataLoader;
use rustc::dep_graph::DepGraph;
use rustc_back::target::Target;
-use rustc_data_structures::fx::FxHashSet;
+use rustc_data_structures::fx::FxHashMap;
use rustc_mir::monomorphize::collector;
use link::{build_link_meta, out_filename};
::symbol_names::provide(providers);
providers.target_features_whitelist = |_tcx, _cnum| {
- Lrc::new(FxHashSet()) // Just a dummy
+ Lrc::new(FxHashMap()) // Just a dummy
};
}
fn provide_extern(&self, _providers: &mut Providers) {}
&format!("{}, producing the closest possible value",
msg),
cast_suggestion);
- err.warn("casting here will cause undefined behavior if the value is \
- finite but larger or smaller than the largest or smallest \
- finite value representable by `f32` (this is a bug and will be \
- fixed)");
}
true
}
use rustc::ty::util::IntTypeExt;
use rustc::ty::util::Discr;
use rustc::util::captures::Captures;
-use rustc::util::nodemap::{FxHashSet, FxHashMap};
+use rustc::util::nodemap::FxHashMap;
use syntax::{abi, ast};
use syntax::ast::MetaItemKind;
use syntax::attr::{InlineAttr, list_contains_name, mark_used};
use syntax::codemap::Spanned;
use syntax::symbol::{Symbol, keywords};
+use syntax::feature_gate;
use syntax_pos::{Span, DUMMY_SP};
use rustc::hir::{self, map as hir_map, TransFnAttrs, TransFnAttrFlags, Unsafety};
fn from_target_feature(
tcx: TyCtxt,
attr: &ast::Attribute,
- whitelist: &FxHashSet<String>,
+ whitelist: &FxHashMap<String, Option<String>>,
target_features: &mut Vec<Symbol>,
) {
let list = match attr.meta_item_list() {
return
}
};
-
+ let rust_features = tcx.features();
for item in list {
+ // Only `enable = ...` is accepted in the meta item list
if !item.check_name("enable") {
let msg = "#[target_feature(..)] only accepts sub-keys of `enable` \
currently";
tcx.sess.span_err(item.span, &msg);
continue
}
+
+ // Must be of the form `enable = "..."` ( a string)
let value = match item.value_str() {
- Some(list) => list,
+ Some(value) => value,
None => {
let msg = "#[target_feature] attribute must be of the form \
#[target_feature(enable = \"..\")]";
continue
}
};
- let value = value.as_str();
- for feature in value.split(',') {
- if whitelist.contains(feature) {
- target_features.push(Symbol::intern(feature));
- continue
- }
-
- let msg = format!("the feature named `{}` is not valid for \
- this target", feature);
- let mut err = tcx.sess.struct_span_err(item.span, &msg);
- if feature.starts_with("+") {
- let valid = whitelist.contains(&feature[1..]);
- if valid {
- err.help("consider removing the leading `+` in the feature name");
+ // We allow comma separation to enable multiple features
+ for feature in value.as_str().split(',') {
+
+ // Only allow whitelisted features per platform
+ let feature_gate = match whitelist.get(feature) {
+ Some(g) => g,
+ None => {
+ let msg = format!("the feature named `{}` is not valid for \
+ this target", feature);
+ let mut err = tcx.sess.struct_span_err(item.span, &msg);
+
+ if feature.starts_with("+") {
+ let valid = whitelist.contains_key(&feature[1..]);
+ if valid {
+ err.help("consider removing the leading `+` in the feature name");
+ }
+ }
+ err.emit();
+ continue
}
+ };
+
+ // Only allow features whose feature gates have been enabled
+ let allowed = match feature_gate.as_ref().map(|s| &**s) {
+ Some("arm_target_feature") => rust_features.arm_target_feature,
+ Some("aarch64_target_feature") => rust_features.aarch64_target_feature,
+ Some("hexagon_target_feature") => rust_features.hexagon_target_feature,
+ Some("powerpc_target_feature") => rust_features.powerpc_target_feature,
+ Some("mips_target_feature") => rust_features.mips_target_feature,
+ Some("avx512_target_feature") => rust_features.avx512_target_feature,
+ Some("mmx_target_feature") => rust_features.mmx_target_feature,
+ Some("sse4a_target_feature") => rust_features.sse4a_target_feature,
+ Some("tbm_target_feature") => rust_features.tbm_target_feature,
+ Some(name) => bug!("unknown target feature gate {}", name),
+ None => true,
+ };
+ if !allowed {
+ feature_gate::emit_feature_err(
+ &tcx.sess.parse_sess,
+ feature_gate.as_ref().unwrap(),
+ item.span,
+ feature_gate::GateIssue::Language,
+ &format!("the target feature `{}` is currently unstable",
+ feature),
+ );
+ continue
}
- err.emit();
+ target_features.push(Symbol::intern(feature));
}
}
}
.emit();
}
} else if attr.check_name("target_feature") {
- // handle deprecated #[target_feature = "..."]
- if let Some(val) = attr.value_str() {
- for feat in val.as_str().split(",").map(|f| f.trim()) {
- if !feat.is_empty() && !feat.contains('\0') {
- trans_fn_attrs.target_features.push(Symbol::intern(feat));
- }
- }
- let msg = "#[target_feature = \"..\"] is deprecated and will \
- eventually be removed, use \
- #[target_feature(enable = \"..\")] instead";
- tcx.sess.span_warn(attr.span, &msg);
- continue
- }
-
if tcx.fn_sig(id).unsafety() == Unsafety::Normal {
let msg = "#[target_feature(..)] can only be applied to \
`unsafe` function";
#![feature(slice_patterns)]
#![feature(slice_sort_by_cached_key)]
#![feature(dyn_trait)]
-#![feature(underscore_lifetimes)]
#[macro_use] extern crate log;
#[macro_use] extern crate syntax;
Type,
}
+fn resolution_failure(cx: &DocContext, path_str: &str) {
+ cx.sess().warn(&format!("[{}] cannot be resolved, ignoring it...", path_str));
+}
+
impl Clean<Attributes> for [ast::Attribute] {
fn clean(&self, cx: &DocContext) -> Attributes {
let mut attrs = Attributes::from_ast(cx.sess().diagnostic(), self);
if let Ok(def) = resolve(cx, path_str, true) {
def
} else {
+ resolution_failure(cx, path_str);
// this could just be a normal link or a broken link
// we could potentially check if something is
// "intra-doc-link-like" and warn in that case
if let Ok(def) = resolve(cx, path_str, false) {
def
} else {
+ resolution_failure(cx, path_str);
// this could just be a normal link
continue;
}
} else if let Ok(value_def) = resolve(cx, path_str, true) {
value_def
} else {
+ resolution_failure(cx, path_str);
// this could just be a normal link
continue;
}
if let Some(def) = macro_resolve(cx, path_str) {
(def, None)
} else {
+ resolution_failure(cx, path_str);
continue
}
}
use rustc::ty::{self, TyCtxt, AllArenas};
use rustc::hir::map as hir_map;
use rustc::lint;
+use rustc::session::config::ErrorOutputType;
use rustc::util::nodemap::{FxHashMap, FxHashSet};
use rustc_resolve as resolve;
use rustc_metadata::creader::CrateLoader;
use syntax::codemap;
use syntax::edition::Edition;
use syntax::feature_gate::UnstableFeatures;
+use syntax::json::JsonEmitter;
use errors;
-use errors::emitter::ColorConfig;
+use errors::emitter::{Emitter, EmitterWriter};
use std::cell::{RefCell, Cell};
use std::mem;
use clean::Clean;
use html::render::RenderInfo;
-pub use rustc::session::config::Input;
+pub use rustc::session::config::{Input, CodegenOptions};
pub use rustc::session::search_paths::SearchPaths;
pub type ExternalPaths = FxHashMap<DefId, (Vec<String>, clean::TypeKind)>;
}
}
-
pub fn run_core(search_paths: SearchPaths,
cfgs: Vec<String>,
externs: config::Externs,
allow_warnings: bool,
crate_name: Option<String>,
force_unstable_if_unmarked: bool,
- edition: Edition) -> (clean::Crate, RenderInfo)
+ edition: Edition,
+ cg: CodegenOptions,
+ error_format: ErrorOutputType) -> (clean::Crate, RenderInfo)
{
// Parse, resolve, and typecheck the given crate.
let warning_lint = lint::builtin::WARNINGS.name_lower();
let host_triple = TargetTriple::from_triple(config::host_triple());
+ // plays with error output here!
let sessopts = config::Options {
maybe_sysroot,
search_paths,
crate_types: vec![config::CrateTypeRlib],
lint_opts: if !allow_warnings { vec![(warning_lint, lint::Allow)] } else { vec![] },
lint_cap: Some(lint::Allow),
+ cg,
externs,
target_triple: triple.unwrap_or(host_triple),
// Ensure that rustdoc works even if rustc is feature-staged
edition,
..config::basic_debugging_options()
},
+ error_format,
..config::basic_options().clone()
};
let codemap = Lrc::new(codemap::CodeMap::new(sessopts.file_path_mapping()));
- let diagnostic_handler = errors::Handler::with_tty_emitter(ColorConfig::Auto,
- true,
- false,
- Some(codemap.clone()));
+ let emitter: Box<dyn Emitter> = match error_format {
+ ErrorOutputType::HumanReadable(color_config) => Box::new(
+ EmitterWriter::stderr(
+ color_config,
+ Some(codemap.clone()),
+ false,
+ sessopts.debugging_opts.teach,
+ ).ui_testing(sessopts.debugging_opts.ui_testing)
+ ),
+ ErrorOutputType::Json(pretty) => Box::new(
+ JsonEmitter::stderr(
+ None,
+ codemap.clone(),
+ pretty,
+ sessopts.debugging_opts.approximate_suggestions,
+ ).ui_testing(sessopts.debugging_opts.ui_testing)
+ ),
+ ErrorOutputType::Short(color_config) => Box::new(
+ EmitterWriter::stderr(color_config, Some(codemap.clone()), true, false)
+ ),
+ };
+
+ let diagnostic_handler = errors::Handler::with_emitter_and_flags(
+ emitter,
+ errors::HandlerFlags {
+ can_emit_warnings: true,
+ treat_err_as_bug: false,
+ external_macro_backtrace: false,
+ ..Default::default()
+ },
+ );
let mut sess = session::build_session_(
sessopts, cpath, diagnostic_handler, codemap,
/// This describes the layout of each page, and is not modified after
/// creation of the context (contains info like the favicon and added html).
pub layout: layout::Layout,
- /// This flag indicates whether [src] links should be generated or not. If
+ /// This flag indicates whether `[src]` links should be generated or not. If
/// the source files are present in the html rendering, then this will be
/// `true`.
pub include_sources: bool,
#![feature(test)]
#![feature(vec_remove_item)]
#![feature(entry_and_modify)]
+#![feature(dyn_trait)]
extern crate arena;
extern crate getopts;
extern crate serialize as rustc_serialize; // used by deriving
+use errors::ColorConfig;
+
use std::collections::{BTreeMap, BTreeSet};
use std::default::Default;
use std::env;
use syntax::edition::Edition;
use externalfiles::ExternalHtml;
use rustc::session::search_paths::SearchPaths;
-use rustc::session::config::{ErrorOutputType, RustcOptGroup, nightly_options, Externs};
+use rustc::session::config::{ErrorOutputType, RustcOptGroup, Externs, CodegenOptions};
+use rustc::session::config::{nightly_options, build_codegen_options};
use rustc_back::target::TargetTriple;
#[macro_use]
pub fn main() {
const STACK_SIZE: usize = 32_000_000; // 32MB
+ rustc_driver::set_sigpipe_handler();
env_logger::init();
let res = std::thread::Builder::new().stack_size(STACK_SIZE).spawn(move || {
syntax::with_globals(move || {
stable("plugin-path", |o| {
o.optmulti("", "plugin-path", "directory to load plugins from", "DIR")
}),
+ stable("C", |o| {
+ o.optmulti("C", "codegen", "pass a codegen option to rustc", "OPT[=VALUE]")
+ }),
stable("passes", |o| {
o.optmulti("", "passes",
"list of passes to also run, you might want \
"edition to use when compiling rust code (default: 2015)",
"EDITION")
}),
+ unstable("color", |o| {
+ o.optopt("",
+ "color",
+ "Configure coloring of output:
+ auto = colorize, if output goes to a tty (default);
+ always = always colorize output;
+ never = never colorize output",
+ "auto|always|never")
+ }),
+ unstable("error-format", |o| {
+ o.optopt("",
+ "error-format",
+ "How errors and other messages are produced",
+ "human|json|short")
+ }),
]
}
}
let input = &matches.free[0];
+ let color = match matches.opt_str("color").as_ref().map(|s| &s[..]) {
+ Some("auto") => ColorConfig::Auto,
+ Some("always") => ColorConfig::Always,
+ Some("never") => ColorConfig::Never,
+ None => ColorConfig::Auto,
+ Some(arg) => {
+ print_error(&format!("argument for --color must be `auto`, `always` or `never` \
+ (instead was `{}`)", arg));
+ return 1;
+ }
+ };
+ let error_format = match matches.opt_str("error-format").as_ref().map(|s| &s[..]) {
+ Some("human") => ErrorOutputType::HumanReadable(color),
+ Some("json") => ErrorOutputType::Json(false),
+ Some("pretty-json") => ErrorOutputType::Json(true),
+ Some("short") => ErrorOutputType::Short(color),
+ None => ErrorOutputType::HumanReadable(color),
+ Some(arg) => {
+ print_error(&format!("argument for --error-format must be `human`, `json` or \
+ `short` (instead was `{}`)", arg));
+ return 1;
+ }
+ };
+
let mut libs = SearchPaths::new();
for s in &matches.opt_strs("L") {
- libs.add_path(s, ErrorOutputType::default());
+ libs.add_path(s, error_format);
}
let externs = match parse_externs(&matches) {
Ok(ex) => ex,
}
};
+ let cg = build_codegen_options(&matches, ErrorOutputType::default());
+
match (should_test, markdown_input) {
(true, true) => {
return markdown::test(input, cfgs, libs, externs, test_args, maybe_sysroot,
- display_warnings, linker, edition)
+ display_warnings, linker, edition, cg)
}
(true, false) => {
return test::run(Path::new(input), cfgs, libs, externs, test_args, crate_name,
- maybe_sysroot, display_warnings, linker, edition)
+ maybe_sysroot, display_warnings, linker, edition, cg)
}
(false, true) => return markdown::render(Path::new(input),
output.unwrap_or(PathBuf::from("doc")),
}
let output_format = matches.opt_str("w");
- let res = acquire_input(PathBuf::from(input), externs, edition, &matches, move |out| {
+
+ let res = acquire_input(PathBuf::from(input), externs, edition, cg, &matches, error_format,
+ move |out| {
let Output { krate, passes, renderinfo } = out;
info!("going to format");
match output_format.as_ref().map(|s| &**s) {
fn acquire_input<R, F>(input: PathBuf,
externs: Externs,
edition: Edition,
+ cg: CodegenOptions,
matches: &getopts::Matches,
+ error_format: ErrorOutputType,
f: F)
-> Result<R, String>
where R: 'static + Send, F: 'static + Send + FnOnce(Output) -> R {
match matches.opt_str("r").as_ref().map(|s| &**s) {
- Some("rust") => Ok(rust_input(input, externs, edition, matches, f)),
+ Some("rust") => Ok(rust_input(input, externs, edition, cg, matches, error_format, f)),
Some(s) => Err(format!("unknown input format: {}", s)),
- None => Ok(rust_input(input, externs, edition, matches, f))
+ None => Ok(rust_input(input, externs, edition, cg, matches, error_format, f))
}
}
fn rust_input<R, F>(cratefile: PathBuf,
externs: Externs,
edition: Edition,
+ cg: CodegenOptions,
matches: &getopts::Matches,
+ error_format: ErrorOutputType,
f: F) -> R
where R: 'static + Send,
F: 'static + Send + FnOnce(Output) -> R
let (mut krate, renderinfo) =
core::run_core(paths, cfgs, externs, Input::File(cratefile), triple, maybe_sysroot,
display_warnings, crate_name.clone(),
- force_unstable_if_unmarked, edition);
+ force_unstable_if_unmarked, edition, cg, error_format);
info!("finished with rustc");
use getopts;
use testing;
use rustc::session::search_paths::SearchPaths;
-use rustc::session::config::Externs;
+use rustc::session::config::{Externs, CodegenOptions};
use syntax::codemap::DUMMY_SP;
use syntax::edition::Edition;
/// Run any tests/code examples in the markdown file `input`.
pub fn test(input: &str, cfgs: Vec<String>, libs: SearchPaths, externs: Externs,
mut test_args: Vec<String>, maybe_sysroot: Option<PathBuf>,
- display_warnings: bool, linker: Option<PathBuf>, edition: Edition) -> isize {
+ display_warnings: bool, linker: Option<PathBuf>, edition: Edition,
+ cg: CodegenOptions) -> isize {
let input_str = match load_string(input) {
Ok(s) => s,
Err(LoadStringError::ReadFail) => return 1,
let mut opts = TestOptions::default();
opts.no_crate_inject = true;
opts.display_warnings = display_warnings;
- let mut collector = Collector::new(input.to_owned(), cfgs, libs, externs,
+ let mut collector = Collector::new(input.to_owned(), cfgs, libs, cg, externs,
true, opts, maybe_sysroot, None,
Some(PathBuf::from(input)),
linker, edition);
use rustc::hir;
use rustc::hir::intravisit;
use rustc::session::{self, CompileIncomplete, config};
-use rustc::session::config::{OutputType, OutputTypes, Externs};
+use rustc::session::config::{OutputType, OutputTypes, Externs, CodegenOptions};
use rustc::session::search_paths::{SearchPaths, PathKind};
use rustc_metadata::dynamic_lib::DynamicLibrary;
use tempdir::TempDir;
maybe_sysroot: Option<PathBuf>,
display_warnings: bool,
linker: Option<PathBuf>,
- edition: Edition)
+ edition: Edition,
+ cg: CodegenOptions)
-> isize {
let input = config::Input::File(input_path.to_owned());
|| Some(env::current_exe().unwrap().parent().unwrap().parent().unwrap().to_path_buf())),
search_paths: libs.clone(),
crate_types: vec![config::CrateTypeDylib],
+ cg: cg.clone(),
externs: externs.clone(),
unstable_features: UnstableFeatures::from_environment(),
lint_cap: Some(::rustc::lint::Level::Allow),
let mut collector = Collector::new(crate_name,
cfgs,
libs,
+ cg,
externs,
false,
opts,
fn run_test(test: &str, cratename: &str, filename: &FileName, line: usize,
cfgs: Vec<String>, libs: SearchPaths,
- externs: Externs,
+ cg: CodegenOptions, externs: Externs,
should_panic: bool, no_run: bool, as_test_harness: bool,
compile_fail: bool, mut error_codes: Vec<String>, opts: &TestOptions,
maybe_sysroot: Option<PathBuf>, linker: Option<PathBuf>, edition: Edition) {
cg: config::CodegenOptions {
prefer_dynamic: true,
linker,
- .. config::basic_codegen_options()
+ ..cg
},
test: as_test_harness,
unstable_features: UnstableFeatures::from_environment(),
cfgs: Vec<String>,
libs: SearchPaths,
+ cg: CodegenOptions,
externs: Externs,
use_headers: bool,
cratename: String,
}
impl Collector {
- pub fn new(cratename: String, cfgs: Vec<String>, libs: SearchPaths, externs: Externs,
- use_headers: bool, opts: TestOptions, maybe_sysroot: Option<PathBuf>,
- codemap: Option<Lrc<CodeMap>>, filename: Option<PathBuf>,
- linker: Option<PathBuf>, edition: Edition) -> Collector {
+ pub fn new(cratename: String, cfgs: Vec<String>, libs: SearchPaths, cg: CodegenOptions,
+ externs: Externs, use_headers: bool, opts: TestOptions,
+ maybe_sysroot: Option<PathBuf>, codemap: Option<Lrc<CodeMap>>,
+ filename: Option<PathBuf>, linker: Option<PathBuf>, edition: Edition) -> Collector {
Collector {
tests: Vec::new(),
names: Vec::new(),
cfgs,
libs,
+ cg,
externs,
use_headers,
cratename,
let name = self.generate_name(line, &filename);
let cfgs = self.cfgs.clone();
let libs = self.libs.clone();
+ let cg = self.cg.clone();
let externs = self.externs.clone();
let cratename = self.cratename.to_string();
let opts = self.opts.clone();
line,
cfgs,
libs,
+ cg,
externs,
should_panic,
no_run,
impl<'a, K: 'a, V: 'a> Drop for Drain<'a, K, V> {
fn drop(&mut self) {
- for _ in self {}
+ self.for_each(drop);
}
}
/// # Conversions
///
/// See the [module's toplevel documentation about conversions][conversions] for a discussion on
-/// the traits which `OsString` implements for conversions from/to native representations.
+/// the traits which `OsString` implements for [conversions] from/to native representations.
///
/// [`OsStr`]: struct.OsStr.html
/// [`&OsStr`]: struct.OsStr.html
/// [`new`]: #method.new
/// [`push`]: #method.push
/// [`as_os_str`]: #method.as_os_str
+/// [conversions]: index.html#conversions
#[derive(Clone)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct OsString {
/// references; the latter are owned strings.
///
/// See the [module's toplevel documentation about conversions][conversions] for a discussion on
-/// the traits which `OsStr` implements for conversions from/to native representations.
+/// the traits which `OsStr` implements for [conversions] from/to native representations.
///
/// [`OsString`]: struct.OsString.html
/// [`&str`]: ../primitive.str.html
//!
//! Once you are familiar with the contents of the standard library you may
//! begin to find the verbosity of the prose distracting. At this stage in your
-//! development you may want to press the **[-]** button near the top of the
+//! development you may want to press the `[-]` button near the top of the
//! page to collapse it into a more skimmable view.
//!
-//! While you are looking at that **[-]** button also notice the **[src]**
+//! While you are looking at that `[-]` button also notice the `[src]`
//! button. Rust's API documentation comes with the source code and you are
//! encouraged to read it. The standard library source is generally high
//! quality and a peek behind the curtains is often enlightening.
#![feature(rand)]
#![feature(raw)]
#![feature(rustc_attrs)]
+#![feature(std_internals)]
#![feature(stdsimd)]
#![feature(shrink_to)]
#![feature(slice_bytes)]
#[unstable(feature = "stdsimd", issue = "48556")]
#[cfg(all(not(stage0), not(test)))]
pub use stdsimd::simd;
-#[unstable(feature = "stdsimd", issue = "48556")]
+#[stable(feature = "simd_arch", since = "1.27.0")]
#[cfg(all(not(stage0), not(test)))]
pub use stdsimd::arch;
}
}
-/// A macro for defining #[cfg] if-else statements.
+/// A macro for defining `#[cfg]` if-else statements.
///
/// This is similar to the `if/elif` C preprocessor macro by allowing definition
/// of a cascade of `#[cfg]` cases, emitting the implementation which matches
/// first.
///
-/// This allows you to conveniently provide a long list #[cfg]'d blocks of code
+/// This allows you to conveniently provide a long list `#[cfg]`'d blocks of code
/// without having to rewrite each clause multiple times.
macro_rules! cfg_if {
($(
//! * Executing a panic up to doing the actual implementation
//! * Shims around "try"
+use core::panic::BoxMeUp;
+
use io::prelude::*;
use any::Any;
use mem;
use ptr;
use raw;
-use sys::stdio::Stderr;
+use sys::stdio::{Stderr, stderr_prints_nothing};
use sys_common::rwlock::RWLock;
use sys_common::thread_info;
use sys_common::util;
data_ptr: *mut usize,
vtable_ptr: *mut usize) -> u32;
#[unwind(allowed)]
- fn __rust_start_panic(data: usize, vtable: usize) -> u32;
+ fn __rust_start_panic(payload: usize) -> u32;
}
#[derive(Copy, Clone)]
};
let location = info.location().unwrap(); // The current implementation always returns Some
- let file = location.file();
- let line = location.line();
- let col = location.column();
let msg = match info.payload().downcast_ref::<&'static str>() {
Some(s) => *s,
let name = thread.as_ref().and_then(|t| t.name()).unwrap_or("<unnamed>");
let write = |err: &mut ::io::Write| {
- let _ = writeln!(err, "thread '{}' panicked at '{}', {}:{}:{}",
- name, msg, file, line, col);
+ let _ = writeln!(err, "thread '{}' panicked at '{}', {}",
+ name, msg, location);
#[cfg(feature = "backtrace")]
{
let prev = LOCAL_STDERR.with(|s| s.borrow_mut().take());
match (prev, err.as_mut()) {
- (Some(mut stderr), _) => {
- write(&mut *stderr);
- let mut s = Some(stderr);
- LOCAL_STDERR.with(|slot| {
- *slot.borrow_mut() = s.take();
- });
- }
- (None, Some(ref mut err)) => { write(err) }
- _ => {}
+ (Some(mut stderr), _) => {
+ write(&mut *stderr);
+ let mut s = Some(stderr);
+ LOCAL_STDERR.with(|slot| {
+ *slot.borrow_mut() = s.take();
+ });
+ }
+ (None, Some(ref mut err)) => { write(err) }
+ _ => {}
}
}
// panic + OOM properly anyway (see comment in begin_panic
// below).
- let mut s = String::new();
- let _ = s.write_fmt(*msg);
- rust_panic_with_hook(Box::new(s), Some(msg), file_line_col)
+ rust_panic_with_hook(&mut PanicPayload::new(msg), Some(msg), file_line_col);
+
+ struct PanicPayload<'a> {
+ inner: &'a fmt::Arguments<'a>,
+ string: Option<String>,
+ }
+
+ impl<'a> PanicPayload<'a> {
+ fn new(inner: &'a fmt::Arguments<'a>) -> PanicPayload<'a> {
+ PanicPayload { inner, string: None }
+ }
+
+ fn fill(&mut self) -> &mut String {
+ let inner = self.inner;
+ self.string.get_or_insert_with(|| {
+ let mut s = String::new();
+ drop(s.write_fmt(*inner));
+ s
+ })
+ }
+ }
+
+ unsafe impl<'a> BoxMeUp for PanicPayload<'a> {
+ fn box_me_up(&mut self) -> *mut (Any + Send) {
+ let contents = mem::replace(self.fill(), String::new());
+ Box::into_raw(Box::new(contents))
+ }
+
+ fn get(&mut self) -> &(Any + Send) {
+ self.fill()
+ }
+ }
}
/// This is the entry point of panicking for panic!() and assert!().
// be performed in the parent of this thread instead of the thread that's
// panicking.
- rust_panic_with_hook(Box::new(msg), None, file_line_col)
+ rust_panic_with_hook(&mut PanicPayload::new(msg), None, file_line_col);
+
+ struct PanicPayload<A> {
+ inner: Option<A>,
+ }
+
+ impl<A: Send + 'static> PanicPayload<A> {
+ fn new(inner: A) -> PanicPayload<A> {
+ PanicPayload { inner: Some(inner) }
+ }
+ }
+
+ unsafe impl<A: Send + 'static> BoxMeUp for PanicPayload<A> {
+ fn box_me_up(&mut self) -> *mut (Any + Send) {
+ let data = match self.inner.take() {
+ Some(a) => Box::new(a) as Box<Any + Send>,
+ None => Box::new(()),
+ };
+ Box::into_raw(data)
+ }
+
+ fn get(&mut self) -> &(Any + Send) {
+ match self.inner {
+ Some(ref a) => a,
+ None => &(),
+ }
+ }
+ }
}
-/// Executes the primary logic for a panic, including checking for recursive
-/// panics and panic hooks.
+/// Central point for dispatching panics.
///
-/// This is the entry point or panics from libcore, formatted panics, and
-/// `Box<Any>` panics. Here we'll verify that we're not panicking recursively,
-/// run panic hooks, and then delegate to the actual implementation of panics.
-#[inline(never)]
-#[cold]
-fn rust_panic_with_hook(payload: Box<Any + Send>,
+/// Executes the primary logic for a panic, including checking for recursive
+/// panics, panic hooks, and finally dispatching to the panic runtime to either
+/// abort or unwind.
+fn rust_panic_with_hook(payload: &mut BoxMeUp,
message: Option<&fmt::Arguments>,
file_line_col: &(&'static str, u32, u32)) -> ! {
let (file, line, col) = *file_line_col;
}
unsafe {
- let info = PanicInfo::internal_constructor(
- &*payload,
+ let mut info = PanicInfo::internal_constructor(
message,
Location::internal_constructor(file, line, col),
);
HOOK_LOCK.read();
match HOOK {
- Hook::Default => default_hook(&info),
- Hook::Custom(ptr) => (*ptr)(&info),
+ // Some platforms know that printing to stderr won't ever actually
+ // print anything, and if that's the case we can skip the default
+ // hook.
+ Hook::Default if stderr_prints_nothing() => {}
+ Hook::Default => {
+ info.set_payload(payload.get());
+ default_hook(&info);
+ }
+ Hook::Custom(ptr) => {
+ info.set_payload(payload.get());
+ (*ptr)(&info);
+ }
}
HOOK_LOCK.read_unlock();
}
/// Shim around rust_panic. Called by resume_unwind.
pub fn update_count_then_panic(msg: Box<Any + Send>) -> ! {
update_panic_count(1);
- rust_panic(msg)
+
+ struct RewrapBox(Box<Any + Send>);
+
+ unsafe impl BoxMeUp for RewrapBox {
+ fn box_me_up(&mut self) -> *mut (Any + Send) {
+ Box::into_raw(mem::replace(&mut self.0, Box::new(())))
+ }
+
+ fn get(&mut self) -> &(Any + Send) {
+ &*self.0
+ }
+ }
+
+ rust_panic(&mut RewrapBox(msg))
}
/// A private no-mangle function on which to slap yer breakpoints.
#[no_mangle]
#[allow(private_no_mangle_fns)] // yes we get it, but we like breakpoints
-pub fn rust_panic(msg: Box<Any + Send>) -> ! {
+pub fn rust_panic(mut msg: &mut BoxMeUp) -> ! {
let code = unsafe {
- let obj = mem::transmute::<_, raw::TraitObject>(msg);
- __rust_start_panic(obj.data as usize, obj.vtable as usize)
+ let obj = &mut msg as *mut &mut BoxMeUp;
+ __rust_start_panic(obj as usize)
};
rtabort!("failed to initiate panic, error {}", code)
}
}
pub const STDIN_BUF_SIZE: usize = ::sys_common::io::DEFAULT_BUF_SIZE;
+
+pub fn stderr_prints_nothing() -> bool {
+ false
+}
}
pub const STDIN_BUF_SIZE: usize = ::sys_common::io::DEFAULT_BUF_SIZE;
+
+pub fn stderr_prints_nothing() -> bool {
+ false
+}
reset_sigpipe();
}
- #[cfg(not(any(target_os = "emscripten", target_os="fuchsia")))]
+ #[cfg(not(any(target_os = "emscripten", target_os = "fuchsia")))]
unsafe fn reset_sigpipe() {
assert!(signal(libc::SIGPIPE, libc::SIG_IGN) != libc::SIG_ERR);
}
- #[cfg(any(target_os = "emscripten", target_os="fuchsia"))]
+ #[cfg(any(target_os = "emscripten", target_os = "fuchsia"))]
unsafe fn reset_sigpipe() {}
}
}
pub const STDIN_BUF_SIZE: usize = ::sys_common::io::DEFAULT_BUF_SIZE;
+
+pub fn stderr_prints_nothing() -> bool {
+ false
+}
if *mode >= 0 {
*mode += 1;
} else {
- panic!("rwlock locked for writing");
+ rtabort!("rwlock locked for writing");
}
}
if *mode == 0 {
*mode = -1;
} else {
- panic!("rwlock locked for reading")
+ rtabort!("rwlock locked for reading")
}
}
pub fn is_ebadf(_err: &io::Error) -> bool {
true
}
+
+pub fn stderr_prints_nothing() -> bool {
+ !cfg!(feature = "wasm_syscall")
+}
0 => {}
n => return n as *mut _,
}
- let mut re = Box::new(ReentrantMutex::uninitialized());
+ let mut re = box ReentrantMutex::uninitialized();
re.init();
let re = Box::into_raw(re);
match self.lock.compare_and_swap(0, re as usize, Ordering::SeqCst) {
// idea is that on windows we use a slightly smaller buffer that's
// been seen to be acceptable.
pub const STDIN_BUF_SIZE: usize = 8 * 1024;
+
+pub fn stderr_prints_nothing() -> bool {
+ false
+}
/// Controls how the backtrace should be formatted.
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub enum PrintFormat {
- /// Show all the frames with absolute path for files.
- Full = 2,
/// Show only relevant data from the backtrace.
- Short = 3,
+ Short = 2,
+ /// Show all the frames with absolute path for files.
+ Full = 3,
}
// For now logging is turned off by default, and this function checks to see
pub fn log_enabled() -> Option<PrintFormat> {
static ENABLED: atomic::AtomicIsize = atomic::AtomicIsize::new(0);
match ENABLED.load(Ordering::SeqCst) {
- 0 => {},
+ 0 => {}
1 => return None,
- 2 => return Some(PrintFormat::Full),
- 3 => return Some(PrintFormat::Short),
- _ => unreachable!(),
+ 2 => return Some(PrintFormat::Short),
+ _ => return Some(PrintFormat::Full),
}
let val = match env::var_os("RUST_BACKTRACE") {
use sync::Once;
use sys;
+macro_rules! rtabort {
+ ($($t:tt)*) => (::sys_common::util::abort(format_args!($($t)*)))
+}
+
+macro_rules! rtassert {
+ ($e:expr) => (if !$e {
+ rtabort!(concat!("assertion failed: ", stringify!($e)));
+ })
+}
+
pub mod at_exit_imp;
#[cfg(feature = "backtrace")]
pub mod backtrace;
if at_exit_imp::push(Box::new(f)) {Ok(())} else {Err(())}
}
-macro_rules! rtabort {
- ($($t:tt)*) => (::sys_common::util::abort(format_args!($($t)*)))
-}
-
/// One-time runtime cleanup.
pub fn cleanup() {
static CLEANUP: Once = Once::new();
self.key.store(key, Ordering::SeqCst);
}
INIT_LOCK.unlock();
- assert!(key != 0);
+ rtassert!(key != 0);
return key
}
imp::destroy(key1);
key2
};
- assert!(key != 0);
+ rtassert!(key != 0);
match self.key.compare_and_swap(0, key as usize, Ordering::SeqCst) {
// The CAS succeeded, so we've created the actual key
0 => key as usize,
use fmt;
use io::prelude::*;
-use sys::stdio::Stderr;
+use sys::stdio::{Stderr, stderr_prints_nothing};
use thread;
pub fn dumb_print(args: fmt::Arguments) {
+ if stderr_prints_nothing() {
+ return
+ }
let _ = Stderr::new().map(|mut stderr| stderr.write_fmt(args));
}
&'static $crate::cell::UnsafeCell<
$crate::option::Option<$t>>>
{
+ #[cfg(target_arch = "wasm32")]
+ static __KEY: $crate::thread::__StaticLocalKeyInner<$t> =
+ $crate::thread::__StaticLocalKeyInner::new();
+
#[thread_local]
- #[cfg(target_thread_local)]
+ #[cfg(all(target_thread_local, not(target_arch = "wasm32")))]
static __KEY: $crate::thread::__FastLocalKeyInner<$t> =
$crate::thread::__FastLocalKeyInner::new();
- #[cfg(not(target_thread_local))]
+ #[cfg(all(not(target_thread_local), not(target_arch = "wasm32")))]
static __KEY: $crate::thread::__OsLocalKeyInner<$t> =
$crate::thread::__OsLocalKeyInner::new();
}
}
+/// On some platforms like wasm32 there's no threads, so no need to generate
+/// thread locals and we can instead just use plain statics!
+#[doc(hidden)]
+#[cfg(target_arch = "wasm32")]
+pub mod statik {
+ use cell::UnsafeCell;
+ use fmt;
+
+ pub struct Key<T> {
+ inner: UnsafeCell<Option<T>>,
+ }
+
+ unsafe impl<T> ::marker::Sync for Key<T> { }
+
+ impl<T> fmt::Debug for Key<T> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.pad("Key { .. }")
+ }
+ }
+
+ impl<T> Key<T> {
+ pub const fn new() -> Key<T> {
+ Key {
+ inner: UnsafeCell::new(None),
+ }
+ }
+
+ pub unsafe fn get(&self) -> Option<&'static UnsafeCell<Option<T>>> {
+ Some(&*(&self.inner as *const _))
+ }
+ }
+}
+
#[doc(hidden)]
#[cfg(target_thread_local)]
pub mod fast {
// where fast TLS was not available; end-user code is compiled with fast TLS
// where available, but both are needed.
+#[unstable(feature = "libstd_thread_internals", issue = "0")]
+#[cfg(target_arch = "wasm32")]
+#[doc(hidden)] pub use self::local::statik::Key as __StaticLocalKeyInner;
#[unstable(feature = "libstd_thread_internals", issue = "0")]
#[cfg(target_thread_local)]
#[doc(hidden)] pub use self::local::fast::Key as __FastLocalKeyInner;
{
// We basically look at two token trees here, denoted as #1 and #2 below
let span = match parse_kleene_op(input, span) {
- // #1 is a `+` or `*` KleeneOp
- //
- // `?` is ambiguous: it could be a separator or a Kleene::ZeroOrOne, so we need to look
- // ahead one more token to be sure.
- Ok(Ok(op)) if op != KleeneOp::ZeroOrOne => return (None, op),
-
- // #1 is `?` token, but it could be a Kleene::ZeroOrOne without a separator or it could
- // be a `?` separator followed by any Kleene operator. We need to look ahead 1 token to
- // find out which.
- Ok(Ok(op)) => {
- assert_eq!(op, KleeneOp::ZeroOrOne);
-
- // Lookahead at #2. If it is a KleenOp, then #1 is a separator.
- let is_1_sep = if let Some(&tokenstream::TokenTree::Token(_, ref tok2)) = input.peek() {
- kleene_op(tok2).is_some()
- } else {
- false
- };
-
- if is_1_sep {
- // #1 is a separator and #2 should be a KleepeOp::*
- // (N.B. We need to advance the input iterator.)
- match parse_kleene_op(input, span) {
- // #2 is a KleeneOp (this is the only valid option) :)
- Ok(Ok(op)) if op == KleeneOp::ZeroOrOne => {
- if !features.macro_at_most_once_rep
- && !attr::contains_name(attrs, "allow_internal_unstable")
- {
- let explain = feature_gate::EXPLAIN_MACRO_AT_MOST_ONCE_REP;
- emit_feature_err(
- sess,
- "macro_at_most_once_rep",
- span,
- GateIssue::Language,
- explain,
- );
- }
- return (Some(token::Question), op);
- }
- Ok(Ok(op)) => return (Some(token::Question), op),
-
- // #2 is a random token (this is an error) :(
- Ok(Err((_, span))) => span,
-
- // #2 is not even a token at all :(
- Err(span) => span,
- }
- } else {
- if !features.macro_at_most_once_rep
- && !attr::contains_name(attrs, "allow_internal_unstable")
- {
- let explain = feature_gate::EXPLAIN_MACRO_AT_MOST_ONCE_REP;
- emit_feature_err(
- sess,
- "macro_at_most_once_rep",
- span,
- GateIssue::Language,
- explain,
- );
- }
-
- // #2 is a random tree and #1 is KleeneOp::ZeroOrOne
- return (None, op);
+ // #1 is any KleeneOp (`?`)
+ Ok(Ok(op)) if op == KleeneOp::ZeroOrOne => {
+ if !features.macro_at_most_once_rep
+ && !attr::contains_name(attrs, "allow_internal_unstable")
+ {
+ let explain = feature_gate::EXPLAIN_MACRO_AT_MOST_ONCE_REP;
+ emit_feature_err(
+ sess,
+ "macro_at_most_once_rep",
+ span,
+ GateIssue::Language,
+ explain,
+ );
}
+ return (None, op);
}
+ // #1 is any KleeneOp (`+`, `*`)
+ Ok(Ok(op)) => return (None, op),
+
// #1 is a separator followed by #2, a KleeneOp
Ok(Err((tok, span))) => match parse_kleene_op(input, span) {
// #2 is a KleeneOp :D
GateIssue::Language,
explain,
);
+ } else {
+ sess.span_diagnostic
+ .span_err(span, "`?` macro repetition does not allow a separator");
}
- return (Some(tok), op);
+ return (None, op);
}
Ok(Ok(op)) => return (Some(tok), op),
Err(span) => span,
};
- if !features.macro_at_most_once_rep
- && !attr::contains_name(attrs, "allow_internal_unstable")
- {
+ if !features.macro_at_most_once_rep && !attr::contains_name(attrs, "allow_internal_unstable") {
sess.span_diagnostic
.span_err(span, "expected one of: `*`, `+`, or `?`");
} else {
// allow `repr(simd)`, and importing the various simd intrinsics
(active, repr_simd, "1.4.0", Some(27731), None),
- // Allows cfg(target_feature = "...").
- (active, cfg_target_feature, "1.4.0", Some(29717), None),
-
// allow `extern "platform-intrinsic" { ... }`
(active, platform_intrinsics, "1.4.0", Some(27731), None),
(active, use_extern_macros, "1.15.0", Some(35896), None),
- // Allows #[target_feature(...)]
- (active, target_feature, "1.15.0", None, None),
-
// `extern "ptx-*" fn()`
(active, abi_ptx, "1.15.0", None, None),
// Future-proofing enums/structs with #[non_exhaustive] attribute (RFC 2008)
(active, non_exhaustive, "1.22.0", Some(44109), None),
- // allow `'_` placeholder lifetimes
- (active, underscore_lifetimes, "1.22.0", Some(44524), None),
-
- // Default match binding modes (RFC 2005)
- (active, match_default_bindings, "1.22.0", Some(42640), None),
-
// Trait object syntax with `dyn` prefix
(active, dyn_trait, "1.22.0", Some(44662), Some(Edition::Edition2018)),
// Allows macro invocations in `extern {}` blocks
(active, macros_in_extern, "1.27.0", Some(49476), None),
+
+ // unstable #[target_feature] directives
+ (active, arm_target_feature, "1.27.0", None, None),
+ (active, aarch64_target_feature, "1.27.0", None, None),
+ (active, hexagon_target_feature, "1.27.0", None, None),
+ (active, powerpc_target_feature, "1.27.0", None, None),
+ (active, mips_target_feature, "1.27.0", None, None),
+ (active, avx512_target_feature, "1.27.0", None, None),
+ (active, mmx_target_feature, "1.27.0", None, None),
+ (active, sse4a_target_feature, "1.27.0", None, None),
+ (active, tbm_target_feature, "1.27.0", None, None),
);
declare_features! (
(accepted, underscore_lifetimes, "1.26.0", Some(44524), None),
// Allows attributes on lifetime/type formal parameters in generics (RFC 1327)
(accepted, generic_param_attrs, "1.26.0", Some(48848), None),
+ // Allows cfg(target_feature = "...").
+ (accepted, cfg_target_feature, "1.27.0", Some(29717), None),
+ // Allows #[target_feature(...)]
+ (accepted, target_feature, "1.27.0", None, None),
);
// If you change this, please modify src/doc/unstable-book as well. You must
"the `#[naked]` attribute \
is an experimental feature",
cfg_fn!(naked_functions))),
- ("target_feature", Whitelisted, Gated(
- Stability::Unstable, "target_feature",
- "the `#[target_feature]` attribute is an experimental feature",
- cfg_fn!(target_feature))),
+ ("target_feature", Normal, Ungated),
("export_name", Whitelisted, Ungated),
("inline", Whitelisted, Ungated),
("link", Whitelisted, Ungated),
// cfg(...)'s that are feature gated
const GATED_CFGS: &[(&str, &str, fn(&Features) -> bool)] = &[
// (name in cfg, feature, function to check if the feature is enabled)
- ("target_feature", "cfg_target_feature", cfg_fn!(cfg_target_feature)),
("target_vendor", "cfg_target_vendor", cfg_fn!(cfg_target_vendor)),
("target_thread_local", "cfg_target_thread_local", cfg_fn!(cfg_target_thread_local)),
("target_has_atomic", "cfg_target_has_atomic", cfg_fn!(cfg_target_has_atomic)),
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-//! Machinery for hygienic macros, inspired by the MTWT[1] paper.
+//! Machinery for hygienic macros, inspired by the `MTWT[1]` paper.
//!
-//! [1] Matthew Flatt, Ryan Culpepper, David Darais, and Robert Bruce Findler. 2012.
+//! `[1]` Matthew Flatt, Ryan Culpepper, David Darais, and Robert Bruce Findler. 2012.
//! *Macros that work together: Compile-time bindings, partial expansion,
//! and definition contexts*. J. Funct. Program. 22, 2 (March 2012), 181-216.
//! DOI=10.1017/S0956796812000093 <http://dx.doi.org/10.1017/S0956796812000093>
pub fn filter_tests(opts: &TestOpts, tests: Vec<TestDescAndFn>) -> Vec<TestDescAndFn> {
let mut filtered = tests;
-
// Remove tests that don't match the test filter
filtered = match opts.filter {
None => filtered,
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-/// A macro for defining #[cfg] if-else statements.
+/// A macro for defining `#[cfg]` if-else statements.
///
/// This is similar to the `if/elif` C preprocessor macro by allowing definition
/// of a cascade of `#[cfg]` cases, emitting the implementation which matches
/// first.
///
-/// This allows you to conveniently provide a long list #[cfg]'d blocks of code
+/// This allows you to conveniently provide a long list `#[cfg]`'d blocks of code
/// without having to rewrite each clause multiple times.
macro_rules! cfg_if {
($(
extern crate rustc_driver;
-fn main() { rustc_driver::main() }
+fn main() {
+ rustc_driver::set_sigpipe_handler();
+ rustc_driver::main()
+}
-Subproject commit bcb720e55861c38db47f2ebdf26b7198338cb39d
+Subproject commit effdcd0132d17b6c4badc67b4b6d3fdf749a2d22
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// revisions:rpass1 rpass2
+
+#![cfg_attr(rpass2, warn(dead_code))]
+
+pub static mut BAA: *const i8 = unsafe { &BOO as *const _ as *const i8 };
+
+pub static mut BOO: *const i8 = unsafe { &BAA as *const _ as *const i8 };
+
+fn main() {}
--- /dev/null
+-include ../tools.mk
+
+all: extern_absolute_paths.rs extern_in_paths.rs krate2
+ $(RUSTC) extern_absolute_paths.rs -Zsave-analysis
+ cat $(TMPDIR)/save-analysis/extern_absolute_paths.json | "$(PYTHON)" validate_json.py
+ $(RUSTC) extern_in_paths.rs -Zsave-analysis
+ cat $(TMPDIR)/save-analysis/extern_in_paths.json | "$(PYTHON)" validate_json.py
+
+krate2: krate2.rs
+ $(RUSTC) $<
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(extern_absolute_paths)]
+
+use krate2::hello;
+
+fn main() {
+ hello();
+ ::krate2::hello();
+}
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(extern_in_paths)]
+
+use extern::krate2;
+
+fn main() {
+ extern::krate2::hello();
+}
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![crate_name = "krate2"]
+#![crate_type = "lib"]
+
+pub fn hello() {
+}
--- /dev/null
+#!/usr/bin/env python
+
+# Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+# file at the top-level directory of this distribution and at
+# http://rust-lang.org/COPYRIGHT.
+#
+# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+# option. This file may not be copied, modified, or distributed
+# except according to those terms.
+
+import sys
+import json
+
+crates = json.loads(sys.stdin.readline().strip())["prelude"]["external_crates"]
+assert any(map(lambda c: c["id"]["name"] == "krate2", crates))
--- /dev/null
+-include ../../run-make-fulldeps/tools.mk
+
+ifeq ($(TARGET),wasm32-unknown-unknown)
+all:
+ $(RUSTC) foo.rs -C lto -O --target wasm32-unknown-unknown --cfg a
+ wc -c < $(TMPDIR)/foo.wasm
+ [ "`wc -c < $(TMPDIR)/foo.wasm`" -lt "1024" ]
+ $(RUSTC) foo.rs -C lto -O --target wasm32-unknown-unknown --cfg b
+ wc -c < $(TMPDIR)/foo.wasm
+ [ "`wc -c < $(TMPDIR)/foo.wasm`" -lt "5120" ]
+ $(RUSTC) foo.rs -C lto -O --target wasm32-unknown-unknown --cfg c
+ wc -c < $(TMPDIR)/foo.wasm
+ [ "`wc -c < $(TMPDIR)/foo.wasm`" -lt "5120" ]
+else
+all:
+endif
+
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![crate_type = "cdylib"]
+
+#[no_mangle]
+#[cfg(a)]
+pub fn foo() {
+ panic!("test");
+}
+
+#[no_mangle]
+#[cfg(b)]
+pub fn foo() {
+ panic!("{}", 1);
+}
+
+#[no_mangle]
+#[cfg(c)]
+pub fn foo() {
+ panic!("{}", "a");
+}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-//`#[cfg]` on struct field permits empty unusable struct
+// `#[cfg]` on struct field permits empty unusable struct
struct S {
#[cfg(untrue)]
} }
}
-macro_rules! baz {
- ($($a:ident),? ; $num:expr) => { { // comma separator is meaningless for `?`
- let mut x = 0;
-
- $(
- x += $a;
- )?
-
- assert_eq!(x, $num);
- } }
-}
-
macro_rules! barplus {
($($a:ident)?+ ; $num:expr) => { {
let mut x = 0;
$(
x += $a;
- )+
+ )?
assert_eq!(x, $num);
} }
$(
x += $a;
- )*
+ )?
assert_eq!(x, $num);
} }
// accept 0 or 1 repetitions
foo!( ; 0);
foo!(a ; 1);
- baz!( ; 0);
- baz!(a ; 1);
// Make sure using ? as a separator works as before
- barplus!(a ; 1);
- barplus!(a?a ; 2);
- barplus!(a?a?a ; 3);
- barstar!( ; 0);
- barstar!(a ; 1);
- barstar!(a?a ; 2);
- barstar!(a?a?a ; 3);
+ barplus!(+ ; 0);
+ barplus!(a + ; 1);
+ barstar!(* ; 0);
+ barstar!(a * ; 1);
}
// ignore-emscripten
#![feature(repr_simd, target_feature, cfg_target_feature)]
+#![feature(avx512_target_feature)]
use std::process::{Command, ExitStatus};
use std::env;
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// compile-pass
+
+//! Test with [Foo::baz], [Bar::foo], [Uniooon::X]
+
+pub struct Foo {
+ pub bar: usize,
+}
--- /dev/null
+warning: [Foo::baz] cannot be resolved, ignoring it...
+
+warning: [Bar::foo] cannot be resolved, ignoring it...
+
+warning: [Uniooon::X] cannot be resolved, ignoring it...
+
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// only-x86_64
+// compile-flags:--test -C target-feature=+avx
+// should-fail
+
+/// (written on a spider's web) Some Struct
+///
+/// ```
+/// panic!("oh no");
+/// ```
+#[doc(cfg(target_feature = "avx"))]
+pub struct SomeStruct;
#[rustc_dump_program_clauses] //~ ERROR Implemented(T: Foo) :-
impl<T: 'static> Foo for T where T: Iterator<Item = i32> { }
+trait Bar {
+ type Assoc;
+}
+
+impl<T> Bar for T where T: Iterator<Item = i32> {
+ #[rustc_dump_program_clauses] //~ ERROR Normalize(<T as Bar>::Assoc == std::vec::Vec<T>) :-
+ type Assoc = Vec<T>;
+}
+
fn main() {
println!("hello");
}
LL | #[rustc_dump_program_clauses] //~ ERROR Implemented(T: Foo) :-
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-error: aborting due to previous error
+error: Normalize(<T as Bar>::Assoc == std::vec::Vec<T>) :- Implemented(T: Bar).
+ --> $DIR/lower_impl.rs:23:5
+ |
+LL | #[rustc_dump_program_clauses] //~ ERROR Normalize(<T as Bar>::Assoc == std::vec::Vec<T>) :-
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 2 previous errors
+++ /dev/null
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-#[cfg(target_feature = "x")] //~ ERROR `cfg(target_feature)` is experimental
-#[cfg_attr(target_feature = "x", x)] //~ ERROR `cfg(target_feature)` is experimental
-struct Foo(u64, u64);
-
-#[cfg(not(any(all(target_feature = "x"))))] //~ ERROR `cfg(target_feature)` is experimental
-fn foo() {}
-
-fn main() {
- cfg!(target_feature = "x");
- //~^ ERROR `cfg(target_feature)` is experimental and subject to change
-}
+++ /dev/null
-error[E0658]: `cfg(target_feature)` is experimental and subject to change (see issue #29717)
- --> $DIR/feature-gate-cfg-target-feature.rs:12:12
- |
-LL | #[cfg_attr(target_feature = "x", x)] //~ ERROR `cfg(target_feature)` is experimental
- | ^^^^^^^^^^^^^^^^^^^^
- |
- = help: add #![feature(cfg_target_feature)] to the crate attributes to enable
-
-error[E0658]: `cfg(target_feature)` is experimental and subject to change (see issue #29717)
- --> $DIR/feature-gate-cfg-target-feature.rs:11:7
- |
-LL | #[cfg(target_feature = "x")] //~ ERROR `cfg(target_feature)` is experimental
- | ^^^^^^^^^^^^^^^^^^^^
- |
- = help: add #![feature(cfg_target_feature)] to the crate attributes to enable
-
-error[E0658]: `cfg(target_feature)` is experimental and subject to change (see issue #29717)
- --> $DIR/feature-gate-cfg-target-feature.rs:15:19
- |
-LL | #[cfg(not(any(all(target_feature = "x"))))] //~ ERROR `cfg(target_feature)` is experimental
- | ^^^^^^^^^^^^^^^^^^^^
- |
- = help: add #![feature(cfg_target_feature)] to the crate attributes to enable
-
-error[E0658]: `cfg(target_feature)` is experimental and subject to change (see issue #29717)
- --> $DIR/feature-gate-cfg-target-feature.rs:19:10
- |
-LL | cfg!(target_feature = "x");
- | ^^^^^^^^^^^^^^^^^^^^
- |
- = help: add #![feature(cfg_target_feature)] to the crate attributes to enable
-
-error: aborting due to 4 previous errors
-
-For more information about this error, try `rustc --explain E0658`.
+++ /dev/null
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-#[target_feature = "+sse2"]
-//~^ the `#[target_feature]` attribute is an experimental feature
-fn foo() {}
+++ /dev/null
-error[E0658]: the `#[target_feature]` attribute is an experimental feature
- --> $DIR/feature-gate-target_feature.rs:11:1
- |
-LL | #[target_feature = "+sse2"]
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
- |
- = help: add #![feature(target_feature)] to the crate attributes to enable
-
-error: aborting due to previous error
-
-For more information about this error, try `rustc --explain E0658`.
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(nll)]
+
+fn flatten<'a, 'b, T>(x: &'a &'b T) -> &'a T {
+ x
+}
+
+fn main() {
+ let mut x = "original";
+ let y = &x;
+ let z = &y;
+ let w = flatten(z);
+ x = "modified";
+ //~^ ERROR cannot assign to `x` because it is borrowed [E0506]
+ println!("{}", w); // prints "modified"
+}
--- /dev/null
+error[E0506]: cannot assign to `x` because it is borrowed
+ --> $DIR/issue-48803.rs:22:5
+ |
+LL | let y = &x;
+ | -- borrow of `x` occurs here
+...
+LL | x = "modified";
+ | ^^^^^^^^^^^^^^ assignment to borrowed `x` occurs here
+LL | //~^ ERROR cannot assign to `x` because it is borrowed [E0506]
+LL | println!("{}", w); // prints "modified"
+ | - borrow later used here
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0506`.
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// The logic for parsing Kleene operators in macros has a special case to disambiguate `?`.
-// Specifically, `$(pat)?` is the ZeroOrOne operator whereas `$(pat)?+` or `$(pat)?*` are the
-// ZeroOrMore and OneOrMore operators using `?` as a separator. These tests are intended to
-// exercise that logic in the macro parser.
-//
-// Moreover, we also throw in some tests for using a separator with `?`, which is meaningless but
-// included for consistency with `+` and `*`.
-//
-// This test focuses on error cases.
+// Tests the behavior of various Kleene operators in macros with respect to `?` terminals. In
+// particular, `?` in the position of a separator and of a Kleene operator is tested.
#![feature(macro_at_most_once_rep)]
+// should match `` and `a`
macro_rules! foo {
($(a)?) => {}
}
macro_rules! baz {
- ($(a),?) => {} // comma separator is meaningless for `?`
+ ($(a),?) => {} //~ ERROR `?` macro repetition does not allow a separator
}
+// should match `+` and `a+`
macro_rules! barplus {
($(a)?+) => {}
}
+// should match `*` and `a*`
macro_rules! barstar {
($(a)?*) => {}
}
foo!(a?a?a); //~ ERROR no rules expected the token `?`
foo!(a?a); //~ ERROR no rules expected the token `?`
foo!(a?); //~ ERROR no rules expected the token `?`
- baz!(a?a?a); //~ ERROR no rules expected the token `?`
- baz!(a?a); //~ ERROR no rules expected the token `?`
- baz!(a?); //~ ERROR no rules expected the token `?`
- baz!(a,); //~ ERROR unexpected end of macro invocation
- baz!(a?a?a,); //~ ERROR no rules expected the token `?`
- baz!(a?a,); //~ ERROR no rules expected the token `?`
- baz!(a?,); //~ ERROR no rules expected the token `?`
barplus!(); //~ ERROR unexpected end of macro invocation
- barplus!(a?); //~ ERROR unexpected end of macro invocation
- barstar!(a?); //~ ERROR unexpected end of macro invocation
+ barstar!(); //~ ERROR unexpected end of macro invocation
+ barplus!(a?); //~ ERROR no rules expected the token `?`
+ barplus!(a); //~ ERROR unexpected end of macro invocation
+ barstar!(a?); //~ ERROR no rules expected the token `?`
+ barstar!(a); //~ ERROR unexpected end of macro invocation
+ barplus!(+); // ok
+ barstar!(*); // ok
+ barplus!(a+); // ok
+ barstar!(a*); // ok
}
+error: `?` macro repetition does not allow a separator
+ --> $DIR/macro-at-most-once-rep-ambig.rs:22:10
+ |
+LL | ($(a),?) => {} //~ ERROR `?` macro repetition does not allow a separator
+ | ^
+
error: no rules expected the token `?`
- --> $DIR/macro-at-most-once-rep-ambig.rs:40:11
+ --> $DIR/macro-at-most-once-rep-ambig.rs:36:11
|
LL | foo!(a?a?a); //~ ERROR no rules expected the token `?`
| ^
error: no rules expected the token `?`
- --> $DIR/macro-at-most-once-rep-ambig.rs:41:11
+ --> $DIR/macro-at-most-once-rep-ambig.rs:37:11
|
LL | foo!(a?a); //~ ERROR no rules expected the token `?`
| ^
error: no rules expected the token `?`
- --> $DIR/macro-at-most-once-rep-ambig.rs:42:11
+ --> $DIR/macro-at-most-once-rep-ambig.rs:38:11
|
LL | foo!(a?); //~ ERROR no rules expected the token `?`
| ^
-error: no rules expected the token `?`
- --> $DIR/macro-at-most-once-rep-ambig.rs:43:11
- |
-LL | baz!(a?a?a); //~ ERROR no rules expected the token `?`
- | ^
-
-error: no rules expected the token `?`
- --> $DIR/macro-at-most-once-rep-ambig.rs:44:11
- |
-LL | baz!(a?a); //~ ERROR no rules expected the token `?`
- | ^
-
-error: no rules expected the token `?`
- --> $DIR/macro-at-most-once-rep-ambig.rs:45:11
- |
-LL | baz!(a?); //~ ERROR no rules expected the token `?`
- | ^
-
error: unexpected end of macro invocation
- --> $DIR/macro-at-most-once-rep-ambig.rs:46:11
- |
-LL | baz!(a,); //~ ERROR unexpected end of macro invocation
- | ^
-
-error: no rules expected the token `?`
- --> $DIR/macro-at-most-once-rep-ambig.rs:47:11
+ --> $DIR/macro-at-most-once-rep-ambig.rs:39:5
|
-LL | baz!(a?a?a,); //~ ERROR no rules expected the token `?`
- | ^
+LL | barplus!(); //~ ERROR unexpected end of macro invocation
+ | ^^^^^^^^^^^
-error: no rules expected the token `?`
- --> $DIR/macro-at-most-once-rep-ambig.rs:48:11
+error: unexpected end of macro invocation
+ --> $DIR/macro-at-most-once-rep-ambig.rs:40:5
|
-LL | baz!(a?a,); //~ ERROR no rules expected the token `?`
- | ^
+LL | barstar!(); //~ ERROR unexpected end of macro invocation
+ | ^^^^^^^^^^^
error: no rules expected the token `?`
- --> $DIR/macro-at-most-once-rep-ambig.rs:49:11
+ --> $DIR/macro-at-most-once-rep-ambig.rs:41:15
|
-LL | baz!(a?,); //~ ERROR no rules expected the token `?`
- | ^
+LL | barplus!(a?); //~ ERROR no rules expected the token `?`
+ | ^
error: unexpected end of macro invocation
- --> $DIR/macro-at-most-once-rep-ambig.rs:50:5
+ --> $DIR/macro-at-most-once-rep-ambig.rs:42:14
|
-LL | barplus!(); //~ ERROR unexpected end of macro invocation
- | ^^^^^^^^^^^
+LL | barplus!(a); //~ ERROR unexpected end of macro invocation
+ | ^
-error: unexpected end of macro invocation
- --> $DIR/macro-at-most-once-rep-ambig.rs:51:15
+error: no rules expected the token `?`
+ --> $DIR/macro-at-most-once-rep-ambig.rs:43:15
|
-LL | barplus!(a?); //~ ERROR unexpected end of macro invocation
+LL | barstar!(a?); //~ ERROR no rules expected the token `?`
| ^
error: unexpected end of macro invocation
- --> $DIR/macro-at-most-once-rep-ambig.rs:52:15
+ --> $DIR/macro-at-most-once-rep-ambig.rs:44:14
|
-LL | barstar!(a?); //~ ERROR unexpected end of macro invocation
- | ^
+LL | barstar!(a); //~ ERROR unexpected end of macro invocation
+ | ^
-error: aborting due to 13 previous errors
+error: aborting due to 10 previous errors
--> $DIR/issue-47388.rs:18:5
|
LL | let fancy_ref = &(&mut fancy);
- | ------------- help: consider changing this to be a mutable reference: `&mut`
+ | ------------- help: consider changing this to be a mutable reference: `&mut (&mut fancy)`
LL | fancy_ref.num = 6; //~ ERROR E0594
| ^^^^^^^^^^^^^^^^^ `fancy_ref` is a `&` reference, so the data it refers to cannot be written
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// ignore-arm
+// ignore-aarch64
+// ignore-wasm
+// ignore-emscripten
+// gate-test-sse4a_target_feature
+// gate-test-powerpc_target_feature
+// gate-test-avx512_target_feature
+// gate-test-tbm_target_feature
+// gate-test-arm_target_feature
+// gate-test-aarch64_target_feature
+// gate-test-hexagon_target_feature
+// gate-test-mips_target_feature
+// gate-test-mmx_target_feature
+// min-llvm-version 6.0
+
+#[target_feature(enable = "avx512bw")]
+//~^ ERROR: currently unstable
+unsafe fn foo() {
+}
+
+fn main() {}
--- /dev/null
+error[E0658]: the target feature `avx512bw` is currently unstable
+ --> $DIR/target-feature-gate.rs:26:18
+ |
+LL | #[target_feature(enable = "avx512bw")]
+ | ^^^^^^^^^^^^^^^^^^^
+ |
+ = help: add #![feature(avx512_target_feature)] to the crate attributes to enable
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0658`.
#![feature(target_feature)]
#[target_feature = "+sse2"]
-//~^ WARN: deprecated
+//~^ ERROR: must be of the form
#[target_feature(enable = "foo")]
//~^ ERROR: not valid for this target
#[target_feature(bar)]
-warning: #[target_feature = ".."] is deprecated and will eventually be removed, use #[target_feature(enable = "..")] instead
+error: #[target_feature] attribute must be of the form #[target_feature(..)]
--> $DIR/target-feature-wrong.rs:21:1
|
LL | #[target_feature = "+sse2"]
LL | #[inline(always)]
| ^^^^^^^^^^^^^^^^^
-error: aborting due to 6 previous errors
+error: aborting due to 7 previous errors
),
};
+ let src_base = opt_path(matches, "src-base");
+ let run_ignored = matches.opt_present("ignored");
Config {
compile_lib_path: make_absolute(opt_path(matches, "compile-lib-path")),
run_lib_path: make_absolute(opt_path(matches, "run-lib-path")),
valgrind_path: matches.opt_str("valgrind-path"),
force_valgrind: matches.opt_present("force-valgrind"),
llvm_filecheck: matches.opt_str("llvm-filecheck").map(|s| PathBuf::from(&s)),
- src_base: opt_path(matches, "src-base"),
+ src_base,
build_base: opt_path(matches, "build-base"),
stage_id: matches.opt_str("stage-id").unwrap(),
mode: matches
.unwrap()
.parse()
.expect("invalid mode"),
- run_ignored: matches.opt_present("ignored"),
+ run_ignored,
filter: matches.free.first().cloned(),
filter_exact: matches.opt_present("exact"),
logfile: matches.opt_str("logfile").map(|s| PathBuf::from(&s)),
// want to actually assert warnings about all this code. Instead
// let's just ignore unused code warnings by defaults and tests
// can turn it back on if needed.
- rustc.args(&["-A", "unused"]);
+ if !self.config.src_base.ends_with("rustdoc-ui") {
+ rustc.args(&["-A", "unused"]);
+ }
}
_ => {}
}
}
fn make_compile_args(&self, input_file: &Path, output_file: TargetLocation) -> Command {
- let mut rustc = Command::new(&self.config.rustc_path);
+ let is_rustdoc = self.config.src_base.ends_with("rustdoc-ui");
+ let mut rustc = if !is_rustdoc {
+ Command::new(&self.config.rustc_path)
+ } else {
+ Command::new(&self.config.rustdoc_path.clone().expect("no rustdoc built yet"))
+ };
rustc.arg(input_file).arg("-L").arg(&self.config.build_base);
// Optionally prevent default --target if specified in test compile-flags.
rustc.args(&["--cfg", revision]);
}
- if let Some(ref incremental_dir) = self.props.incremental_dir {
- rustc.args(&[
- "-C",
- &format!("incremental={}", incremental_dir.display()),
- ]);
- rustc.args(&["-Z", "incremental-verify-ich"]);
- rustc.args(&["-Z", "incremental-queries"]);
- }
+ if !is_rustdoc {
+ if let Some(ref incremental_dir) = self.props.incremental_dir {
+ rustc.args(&[
+ "-C",
+ &format!("incremental={}", incremental_dir.display()),
+ ]);
+ rustc.args(&["-Z", "incremental-verify-ich"]);
+ rustc.args(&["-Z", "incremental-queries"]);
+ }
- if self.config.mode == CodegenUnits {
- rustc.args(&["-Z", "human_readable_cgu_names"]);
+ if self.config.mode == CodegenUnits {
+ rustc.args(&["-Z", "human_readable_cgu_names"]);
+ }
}
match self.config.mode {
}
}
-
- if self.config.target == "wasm32-unknown-unknown" {
- // rustc.arg("-g"); // get any backtrace at all on errors
- } else if !self.props.no_prefer_dynamic {
- rustc.args(&["-C", "prefer-dynamic"]);
+ if !is_rustdoc {
+ if self.config.target == "wasm32-unknown-unknown" {
+ // rustc.arg("-g"); // get any backtrace at all on errors
+ } else if !self.props.no_prefer_dynamic {
+ rustc.args(&["-C", "prefer-dynamic"]);
+ }
}
match output_file {
} else {
rustc.args(self.split_maybe_args(&self.config.target_rustcflags));
}
- if let Some(ref linker) = self.config.linker {
- rustc.arg(format!("-Clinker={}", linker));
+ if !is_rustdoc {
+ if let Some(ref linker) = self.config.linker {
+ rustc.arg(format!("-Clinker={}", linker));
+ }
}
rustc.args(&self.props.compile_flags);
.compile_flags
.iter()
.any(|s| s.contains("--error-format"));
-
let proc_res = self.compile_test();
self.check_if_test_should_compile(&proc_res);