#### `[rust]`:
- `debuginfo = true` - Build a compiler with debuginfo. Makes building rustc slower, but then you can use a debugger to debug `rustc`.
- `debuginfo-lines = true` - An alternative to `debuginfo = true` that doesn't let you use a debugger, but doesn't make building rustc slower and still gives you line numbers in backtraces.
+- `debuginfo-tools = true` - Build the extended tools with debuginfo.
- `debug-assertions = true` - Makes the log output of `debug!` work.
- `optimize = false` - Disable optimizations to speed up compilation of stage1 rust, but makes the stage1 compiler x100 slower.
# standard library.
#debuginfo-only-std = false
+# Enable debuginfo for the extended tools: cargo, rls, rustfmt
+# Adding debuginfo makes them several times larger.
+#debuginfo-tools = false
+
# Whether or not jemalloc is built and enabled
#use-jemalloc = true
[[package]]
name = "arena"
version = "0.0.0"
+dependencies = [
+ "rustc_data_structures 0.0.0",
+]
[[package]]
name = "arrayvec"
[[package]]
name = "rustc-ap-rustc_cratesio_shim"
-version = "98.0.0"
+version = "103.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"bitflags 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
[[package]]
name = "rustc-ap-rustc_data_structures"
-version = "98.0.0"
+version = "103.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"cfg-if 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot_core 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-ap-serialize 98.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-ap-serialize 103.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"stable_deref_trait 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"term 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "rustc-ap-rustc_errors"
-version = "98.0.0"
+version = "103.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"atty 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-ap-rustc_data_structures 98.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-ap-serialize 98.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-ap-syntax_pos 98.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-ap-rustc_data_structures 103.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-ap-serialize 103.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-ap-syntax_pos 103.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"term 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"termcolor 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
[[package]]
name = "rustc-ap-serialize"
-version = "98.0.0"
+version = "103.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "rustc-ap-syntax"
-version = "98.0.0"
+version = "103.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"bitflags 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-ap-rustc_cratesio_shim 98.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-ap-rustc_data_structures 98.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-ap-rustc_errors 98.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-ap-serialize 98.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-ap-syntax_pos 98.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-ap-rustc_cratesio_shim 103.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-ap-rustc_data_structures 103.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-ap-rustc_errors 103.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-ap-serialize 103.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-ap-syntax_pos 103.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"scoped-tls 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"term 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "rustc-ap-syntax_pos"
-version = "98.0.0"
+version = "103.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "rustc-ap-rustc_data_structures 98.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-ap-serialize 98.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-ap-rustc_data_structures 103.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-ap-serialize 103.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"scoped-tls 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"term 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.40 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-ap-syntax 98.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-ap-syntax 103.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.37 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.37 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.13 (registry+https://github.com/rust-lang/crates.io-index)",
"checksum rls-rustc 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "885f66b92757420572cbb02e033d4a9558c7413ca9b7ac206f28fd58ffdb44ea"
"checksum rls-span 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5d7c7046dc6a92f2ae02ed302746db4382e75131b9ce20ce967259f6b5867a6a"
"checksum rls-vfs 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)" = "be231e1e559c315bc60ced5ad2cc2d7a9c208ed7d4e2c126500149836fda19bb"
-"checksum rustc-ap-rustc_cratesio_shim 98.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2b576584b70d2b0c5f8a82c98a3eb39ef95eaf9187b90ad8858a149a55e94e85"
-"checksum rustc-ap-rustc_data_structures 98.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "be7c3367229e1497a65c754188842cc02f5e50e93cced2168f621c170cd08ee5"
-"checksum rustc-ap-rustc_errors 98.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "db6440cf26fe79acf54d9d0991835a2eabec4b7039da153889a16f50bda5a7ef"
-"checksum rustc-ap-serialize 98.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3854db2139a75e4d1898289c08dcd8487bec318975877c6268551afccab8844b"
-"checksum rustc-ap-syntax 98.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8f1852c80f5195a3da20023205bd1202254bf0282b9ffbaaa029a6beed31db3d"
-"checksum rustc-ap-syntax_pos 98.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bc60c04eccec0304b3684584b696669b2cfdfbeacee615bb5a9f431aafa64ab9"
+"checksum rustc-ap-rustc_cratesio_shim 103.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "463834ac5ea777cb56c073586675fac37292f8425aafb3757efca7e6a76545aa"
+"checksum rustc-ap-rustc_data_structures 103.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5d256eeab1b8639c2a1fd341e54f3613f8150bc262e4ec9361a29bbcb162906d"
+"checksum rustc-ap-rustc_errors 103.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "cf6dd73033bb512845a6df347174c65ad430c92ecd35527e24d8bb186f5664ee"
+"checksum rustc-ap-serialize 103.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "356d338dbe538c7d6428296872d5d68da8f091e34eb89bca3b3f245ed0785e5e"
+"checksum rustc-ap-syntax 103.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0f02edede4ba70963a7dac2308876f03f76f9edd48a035e5abc8fa37c57a77c8"
+"checksum rustc-ap-syntax_pos 103.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ad8e50d4c38121fa8ded3ffbf94926ec74c95f24316c3b80de84fbfb42c005cf"
"checksum rustc-demangle 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "11fb43a206a04116ffd7cfcf9bcb941f8eb6cc7ff667272246b0a1c74259a3cb"
"checksum rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)" = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda"
"checksum same-file 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "cfb6eded0b06a0b512c8ddbcf04089138c9b4362c2f696f3c3d76039d68f3637"
eprintln!("{:?} not skipped for {:?} -- not in {:?}", pathset,
self.name, builder.config.exclude);
}
- let build = builder.build;
- let hosts = &build.hosts;
+ let hosts = &builder.hosts;
// Determine the targets participating in this rule.
let targets = if self.only_hosts {
- if !build.config.run_host_only {
+ if !builder.config.run_host_only {
return; // don't run anything
} else {
- &build.hosts
+ &builder.hosts
}
} else {
- &build.targets
+ &builder.targets
};
for host in hosts {
test::TheBook, test::UnstableBook,
test::Rustfmt, test::Miri, test::Clippy, test::RustdocJS, test::RustdocTheme,
// Run run-make last, since these won't pass without make on Windows
- test::RunMake),
+ test::RunMake, test::RustdocUi),
Kind::Bench => describe!(test::Crate, test::CrateLibrustc),
Kind::Doc => describe!(doc::UnstableBook, doc::UnstableBookGen, doc::TheBook,
doc::Standalone, doc::Std, doc::Test, doc::WhitelistedRustc, doc::Rustc,
pub fn sysroot_codegen_backends(&self, compiler: Compiler) -> PathBuf {
self.sysroot_libdir(compiler, compiler.host)
- .with_file_name(self.build.config.rust_codegen_backends_dir.clone())
+ .with_file_name(self.config.rust_codegen_backends_dir.clone())
}
/// Returns the compiler's libdir where it stores the dynamic libraries that
/// Windows.
pub fn rustc_libdir(&self, compiler: Compiler) -> PathBuf {
if compiler.is_snapshot(self) {
- self.build.rustc_snapshot_libdir()
+ self.rustc_snapshot_libdir()
} else {
self.sysroot(compiler).join(libdir(&compiler.host))
}
let compiler = self.compiler(self.top_stage, host);
cmd.env("RUSTC_STAGE", compiler.stage.to_string())
.env("RUSTC_SYSROOT", self.sysroot(compiler))
- .env("RUSTDOC_LIBDIR", self.sysroot_libdir(compiler, self.build.build))
- .env("CFG_RELEASE_CHANNEL", &self.build.config.channel)
+ .env("RUSTDOC_LIBDIR", self.sysroot_libdir(compiler, self.config.build))
+ .env("CFG_RELEASE_CHANNEL", &self.config.channel)
.env("RUSTDOC_REAL", self.rustdoc(host))
- .env("RUSTDOC_CRATE_VERSION", self.build.rust_version())
+ .env("RUSTDOC_CRATE_VERSION", self.rust_version())
.env("RUSTC_BOOTSTRAP", "1");
- if let Some(linker) = self.build.linker(host) {
+ if let Some(linker) = self.linker(host) {
cmd.env("RUSTC_TARGET_LINKER", linker);
}
cmd
.env("TEST_MIRI", self.config.test_miri.to_string())
.env("RUSTC_ERROR_METADATA_DST", self.extended_error_dir());
- if let Some(host_linker) = self.build.linker(compiler.host) {
+ if let Some(host_linker) = self.linker(compiler.host) {
cargo.env("RUSTC_HOST_LINKER", host_linker);
}
- if let Some(target_linker) = self.build.linker(target) {
+ if let Some(target_linker) = self.linker(target) {
cargo.env("RUSTC_TARGET_LINKER", target_linker);
}
if let Some(ref error_format) = self.config.rustc_error_format {
cargo.env("RUSTC_ERROR_FORMAT", error_format);
}
if cmd != "build" && cmd != "check" {
- cargo.env("RUSTDOC_LIBDIR", self.rustc_libdir(self.compiler(2, self.build.build)));
+ cargo.env("RUSTDOC_LIBDIR", self.rustc_libdir(self.compiler(2, self.config.build)));
}
- if mode != Mode::Tool {
- // Tools don't get debuginfo right now, e.g. cargo and rls don't
- // get compiled with debuginfo.
- // Adding debuginfo increases their sizes by a factor of 3-4.
+ if mode == Mode::Tool {
+ // Tools like cargo and rls don't get debuginfo by default right now, but this can be
+ // enabled in the config. Adding debuginfo makes them several times larger.
+ if self.config.rust_debuginfo_tools {
+ cargo.env("RUSTC_DEBUGINFO", self.config.rust_debuginfo.to_string());
+ cargo.env("RUSTC_DEBUGINFO_LINES", self.config.rust_debuginfo_lines.to_string());
+ }
+ } else {
cargo.env("RUSTC_DEBUGINFO", self.config.rust_debuginfo.to_string());
cargo.env("RUSTC_DEBUGINFO_LINES", self.config.rust_debuginfo_lines.to_string());
cargo.env("RUSTC_FORCE_UNSTABLE", "1");
//
// If LLVM support is disabled we need to use the snapshot compiler to compile
// build scripts, as the new compiler doesn't support executables.
- if mode == Mode::Libstd || !self.build.config.llvm_enabled {
+ if mode == Mode::Libstd || !self.config.llvm_enabled {
cargo.env("RUSTC_SNAPSHOT", &self.initial_rustc)
.env("RUSTC_SNAPSHOT_LIBDIR", self.rustc_snapshot_libdir());
} else {
}
// For `cargo doc` invocations, make rustdoc print the Rust version into the docs
- cargo.env("RUSTDOC_CRATE_VERSION", self.build.rust_version());
+ cargo.env("RUSTDOC_CRATE_VERSION", self.rust_version());
// Environment variables *required* throughout the build
//
cargo.env("CFG_COMPILER_HOST_TRIPLE", target);
// Set this for all builds to make sure doc builds also get it.
- cargo.env("CFG_RELEASE_CHANNEL", &self.build.config.channel);
+ cargo.env("CFG_RELEASE_CHANNEL", &self.config.channel);
// This one's a bit tricky. As of the time of this writing the compiler
// links to the `winapi` crate on crates.io. This crate provides raw
panic!(out);
}
if let Some(out) = self.cache.get(&step) {
- self.build.verbose(&format!("{}c {:?}", " ".repeat(stack.len()), step));
+ self.verbose(&format!("{}c {:?}", " ".repeat(stack.len()), step));
{
let mut graph = self.graph.borrow_mut();
return out;
}
- self.build.verbose(&format!("{}> {:?}", " ".repeat(stack.len()), step));
+ self.verbose(&format!("{}> {:?}", " ".repeat(stack.len()), step));
stack.push(Box::new(step.clone()));
}
self.parent.set(prev_parent);
- if self.build.config.print_step_timings && dur > Duration::from_millis(100) {
+ if self.config.print_step_timings && dur > Duration::from_millis(100) {
println!("[TIMING] {:?} -- {}.{:03}",
step,
dur.as_secs(),
let cur_step = stack.pop().expect("step stack empty");
assert_eq!(cur_step.downcast_ref(), Some(&step));
}
- self.build.verbose(&format!("{}< {:?}", " ".repeat(self.stack.borrow().len()), step));
+ self.verbose(&format!("{}< {:?}", " ".repeat(self.stack.borrow().len()), step));
self.cache.put(step, out.clone());
out
}
use compile::{run_cargo, std_cargo, test_cargo, rustc_cargo, add_to_sysroot};
use builder::{RunConfig, Builder, ShouldRun, Step};
-use {Build, Compiler, Mode};
+use {Compiler, Mode};
use cache::Interned;
use std::path::PathBuf;
}
fn run(self, builder: &Builder) {
- let build = builder.build;
let target = self.target;
- let compiler = builder.compiler(0, build.build);
+ let compiler = builder.compiler(0, builder.config.build);
- let out_dir = build.stage_out(compiler, Mode::Libstd);
- build.clear_if_dirty(&out_dir, &builder.rustc(compiler));
+ let out_dir = builder.stage_out(compiler, Mode::Libstd);
+ builder.clear_if_dirty(&out_dir, &builder.rustc(compiler));
let mut cargo = builder.cargo(compiler, Mode::Libstd, target, "check");
std_cargo(builder, &compiler, target, &mut cargo);
- let _folder = build.fold_output(|| format!("stage{}-std", compiler.stage));
+ let _folder = builder.fold_output(|| format!("stage{}-std", compiler.stage));
println!("Checking std artifacts ({} -> {})", &compiler.host, target);
- run_cargo(build,
+ run_cargo(builder,
&mut cargo,
- &libstd_stamp(build, compiler, target),
+ &libstd_stamp(builder, compiler, target),
true);
let libdir = builder.sysroot_libdir(compiler, target);
- add_to_sysroot(&build, &libdir, &libstd_stamp(build, compiler, target));
+ add_to_sysroot(&builder, &libdir, &libstd_stamp(builder, compiler, target));
}
}
/// the `compiler` targeting the `target` architecture. The artifacts
/// created will also be linked into the sysroot directory.
fn run(self, builder: &Builder) {
- let build = builder.build;
- let compiler = builder.compiler(0, build.build);
+ let compiler = builder.compiler(0, builder.config.build);
let target = self.target;
let stage_out = builder.stage_out(compiler, Mode::Librustc);
- build.clear_if_dirty(&stage_out, &libstd_stamp(build, compiler, target));
- build.clear_if_dirty(&stage_out, &libtest_stamp(build, compiler, target));
+ builder.clear_if_dirty(&stage_out, &libstd_stamp(builder, compiler, target));
+ builder.clear_if_dirty(&stage_out, &libtest_stamp(builder, compiler, target));
let mut cargo = builder.cargo(compiler, Mode::Librustc, target, "check");
- rustc_cargo(build, &mut cargo);
+ rustc_cargo(builder, &mut cargo);
- let _folder = build.fold_output(|| format!("stage{}-rustc", compiler.stage));
+ let _folder = builder.fold_output(|| format!("stage{}-rustc", compiler.stage));
println!("Checking compiler artifacts ({} -> {})", &compiler.host, target);
- run_cargo(build,
+ run_cargo(builder,
&mut cargo,
- &librustc_stamp(build, compiler, target),
+ &librustc_stamp(builder, compiler, target),
true);
let libdir = builder.sysroot_libdir(compiler, target);
- add_to_sysroot(&build, &libdir, &librustc_stamp(build, compiler, target));
+ add_to_sysroot(&builder, &libdir, &librustc_stamp(builder, compiler, target));
}
}
}
fn run(self, builder: &Builder) {
- let build = builder.build;
let target = self.target;
- let compiler = builder.compiler(0, build.build);
+ let compiler = builder.compiler(0, builder.config.build);
- let out_dir = build.stage_out(compiler, Mode::Libtest);
- build.clear_if_dirty(&out_dir, &libstd_stamp(build, compiler, target));
+ let out_dir = builder.stage_out(compiler, Mode::Libtest);
+ builder.clear_if_dirty(&out_dir, &libstd_stamp(builder, compiler, target));
let mut cargo = builder.cargo(compiler, Mode::Libtest, target, "check");
- test_cargo(build, &compiler, target, &mut cargo);
+ test_cargo(builder, &compiler, target, &mut cargo);
- let _folder = build.fold_output(|| format!("stage{}-test", compiler.stage));
+ let _folder = builder.fold_output(|| format!("stage{}-test", compiler.stage));
println!("Checking test artifacts ({} -> {})", &compiler.host, target);
- run_cargo(build,
+ run_cargo(builder,
&mut cargo,
- &libtest_stamp(build, compiler, target),
+ &libtest_stamp(builder, compiler, target),
true);
let libdir = builder.sysroot_libdir(compiler, target);
- add_to_sysroot(&build, &libdir, &libtest_stamp(build, compiler, target));
+ add_to_sysroot(builder, &libdir, &libtest_stamp(builder, compiler, target));
}
}
/// Cargo's output path for the standard library in a given stage, compiled
/// by a particular compiler for the specified target.
-pub fn libstd_stamp(build: &Build, compiler: Compiler, target: Interned<String>) -> PathBuf {
- build.cargo_out(compiler, Mode::Libstd, target).join(".libstd-check.stamp")
+pub fn libstd_stamp(builder: &Builder, compiler: Compiler, target: Interned<String>) -> PathBuf {
+ builder.cargo_out(compiler, Mode::Libstd, target).join(".libstd-check.stamp")
}
/// Cargo's output path for libtest in a given stage, compiled by a particular
/// compiler for the specified target.
-pub fn libtest_stamp(build: &Build, compiler: Compiler, target: Interned<String>) -> PathBuf {
- build.cargo_out(compiler, Mode::Libtest, target).join(".libtest-check.stamp")
+pub fn libtest_stamp(builder: &Builder, compiler: Compiler, target: Interned<String>) -> PathBuf {
+ builder.cargo_out(compiler, Mode::Libtest, target).join(".libtest-check.stamp")
}
/// Cargo's output path for librustc in a given stage, compiled by a particular
/// compiler for the specified target.
-pub fn librustc_stamp(build: &Build, compiler: Compiler, target: Interned<String>) -> PathBuf {
- build.cargo_out(compiler, Mode::Librustc, target).join(".librustc-check.stamp")
+pub fn librustc_stamp(builder: &Builder, compiler: Compiler, target: Interned<String>) -> PathBuf {
+ builder.cargo_out(compiler, Mode::Librustc, target).join(".librustc-check.stamp")
}
use serde_json;
use util::{exe, libdir, is_dylib, CiEnv};
-use {Build, Compiler, Mode};
+use {Compiler, Mode};
use native;
use tool;
/// using the `compiler` targeting the `target` architecture. The artifacts
/// created will also be linked into the sysroot directory.
fn run(self, builder: &Builder) {
- let build = builder.build;
let target = self.target;
let compiler = self.compiler;
builder.ensure(StartupObjects { compiler, target });
- if build.force_use_stage1(compiler, target) {
- let from = builder.compiler(1, build.build);
+ if builder.force_use_stage1(compiler, target) {
+ let from = builder.compiler(1, builder.config.build);
builder.ensure(Std {
compiler: from,
target,
// still contain the musl startup objects.
if target.contains("musl") {
let libdir = builder.sysroot_libdir(compiler, target);
- copy_musl_third_party_objects(build, target, &libdir);
+ copy_musl_third_party_objects(builder, target, &libdir);
}
builder.ensure(StdLink {
if target.contains("musl") {
let libdir = builder.sysroot_libdir(compiler, target);
- copy_musl_third_party_objects(build, target, &libdir);
+ copy_musl_third_party_objects(builder, target, &libdir);
}
- let out_dir = build.cargo_out(compiler, Mode::Libstd, target);
- build.clear_if_dirty(&out_dir, &builder.rustc(compiler));
+ let out_dir = builder.cargo_out(compiler, Mode::Libstd, target);
+ builder.clear_if_dirty(&out_dir, &builder.rustc(compiler));
let mut cargo = builder.cargo(compiler, Mode::Libstd, target, "build");
std_cargo(builder, &compiler, target, &mut cargo);
- let _folder = build.fold_output(|| format!("stage{}-std", compiler.stage));
- build.info(&format!("Building stage{} std artifacts ({} -> {})", compiler.stage,
+ let _folder = builder.fold_output(|| format!("stage{}-std", compiler.stage));
+ builder.info(&format!("Building stage{} std artifacts ({} -> {})", compiler.stage,
&compiler.host, target));
- run_cargo(build,
+ run_cargo(builder,
&mut cargo,
- &libstd_stamp(build, compiler, target),
+ &libstd_stamp(builder, compiler, target),
false);
builder.ensure(StdLink {
- compiler: builder.compiler(compiler.stage, build.build),
+ compiler: builder.compiler(compiler.stage, builder.config.build),
target_compiler: compiler,
target,
});
/// with a glibc-targeting toolchain, given we have the appropriate startup
/// files. As those shipped with glibc won't work, copy the ones provided by
/// musl so we have them on linux-gnu hosts.
-fn copy_musl_third_party_objects(build: &Build,
+fn copy_musl_third_party_objects(builder: &Builder,
target: Interned<String>,
into: &Path) {
for &obj in &["crt1.o", "crti.o", "crtn.o"] {
- build.copy(&build.musl_root(target).unwrap().join("lib").join(obj), &into.join(obj));
+ builder.copy(&builder.musl_root(target).unwrap().join("lib").join(obj), &into.join(obj));
}
}
/// Configure cargo to compile the standard library, adding appropriate env vars
/// and such.
-pub fn std_cargo(build: &Builder,
+pub fn std_cargo(builder: &Builder,
compiler: &Compiler,
target: Interned<String>,
cargo: &mut Command) {
cargo.env("MACOSX_DEPLOYMENT_TARGET", target);
}
- if build.no_std(target) == Some(true) {
+ if builder.no_std(target) == Some(true) {
// for no-std targets we only compile a few no_std crates
cargo.arg("--features").arg("c mem")
.args(&["-p", "alloc"])
.args(&["-p", "compiler_builtins"])
.args(&["-p", "std_unicode"])
.arg("--manifest-path")
- .arg(build.src.join("src/rustc/compiler_builtins_shim/Cargo.toml"));
+ .arg(builder.src.join("src/rustc/compiler_builtins_shim/Cargo.toml"));
} else {
- let mut features = build.std_features();
+ let mut features = builder.std_features();
// When doing a local rebuild we tell cargo that we're stage1 rather than
// stage0. This works fine if the local rust and being-built rust have the
// same view of what the default allocator is, but fails otherwise. Since
// we don't have a way to express an allocator preference yet, work
// around the issue in the case of a local rebuild with jemalloc disabled.
- if compiler.stage == 0 && build.local_rebuild && !build.config.use_jemalloc {
+ if compiler.stage == 0 && builder.local_rebuild && !builder.config.use_jemalloc {
features.push_str(" force_alloc_system");
}
- if compiler.stage != 0 && build.config.sanitizers {
+ if compiler.stage != 0 && builder.config.sanitizers {
// This variable is used by the sanitizer runtime crates, e.g.
// rustc_lsan, to build the sanitizer runtime from C code
// When this variable is missing, those crates won't compile the C code,
// missing
// We also only build the runtimes when --enable-sanitizers (or its
// config.toml equivalent) is used
- let llvm_config = build.ensure(native::Llvm {
- target: build.config.build,
+ let llvm_config = builder.ensure(native::Llvm {
+ target: builder.config.build,
emscripten: false,
});
cargo.env("LLVM_CONFIG", llvm_config);
cargo.arg("--features").arg(features)
.arg("--manifest-path")
- .arg(build.src.join("src/libstd/Cargo.toml"));
+ .arg(builder.src.join("src/libstd/Cargo.toml"));
- if let Some(target) = build.config.target_config.get(&target) {
+ if let Some(target) = builder.config.target_config.get(&target) {
if let Some(ref jemalloc) = target.jemalloc {
cargo.env("JEMALLOC_OVERRIDE", jemalloc);
}
}
if target.contains("musl") {
- if let Some(p) = build.musl_root(target) {
+ if let Some(p) = builder.musl_root(target) {
cargo.env("MUSL_ROOT", p);
}
}
/// libraries for `target`, and this method will find them in the relevant
/// output directory.
fn run(self, builder: &Builder) {
- let build = builder.build;
let compiler = self.compiler;
let target_compiler = self.target_compiler;
let target = self.target;
- build.info(&format!("Copying stage{} std from stage{} ({} -> {} / {})",
+ builder.info(&format!("Copying stage{} std from stage{} ({} -> {} / {})",
target_compiler.stage,
compiler.stage,
&compiler.host,
target_compiler.host,
target));
let libdir = builder.sysroot_libdir(target_compiler, target);
- add_to_sysroot(&build, &libdir, &libstd_stamp(build, compiler, target));
+ add_to_sysroot(builder, &libdir, &libstd_stamp(builder, compiler, target));
- if build.config.sanitizers && compiler.stage != 0 && target == "x86_64-apple-darwin" {
+ if builder.config.sanitizers && compiler.stage != 0 && target == "x86_64-apple-darwin" {
// The sanitizers are only built in stage1 or above, so the dylibs will
// be missing in stage0 and causes panic. See the `std()` function above
// for reason why the sanitizers are not built in stage0.
- copy_apple_sanitizer_dylibs(&build, &build.native_dir(target), "osx", &libdir);
+ copy_apple_sanitizer_dylibs(builder, &builder.native_dir(target), "osx", &libdir);
}
builder.ensure(tool::CleanTools {
}
}
-fn copy_apple_sanitizer_dylibs(build: &Build, native_dir: &Path, platform: &str, into: &Path) {
+fn copy_apple_sanitizer_dylibs(builder: &Builder, native_dir: &Path, platform: &str, into: &Path) {
for &sanitizer in &["asan", "tsan"] {
let filename = format!("libclang_rt.{}_{}_dynamic.dylib", sanitizer, platform);
let mut src_path = native_dir.join(sanitizer);
src_path.push("lib");
src_path.push("darwin");
src_path.push(&filename);
- build.copy(&src_path, &into.join(filename));
+ builder.copy(&src_path, &into.join(filename));
}
}
/// files, so we just use the nightly snapshot compiler to always build them (as
/// no other compilers are guaranteed to be available).
fn run(self, builder: &Builder) {
- let build = builder.build;
let for_compiler = self.compiler;
let target = self.target;
if !target.contains("pc-windows-gnu") {
return
}
- let src_dir = &build.src.join("src/rtstartup");
- let dst_dir = &build.native_dir(target).join("rtstartup");
+ let src_dir = &builder.src.join("src/rtstartup");
+ let dst_dir = &builder.native_dir(target).join("rtstartup");
let sysroot_dir = &builder.sysroot_libdir(for_compiler, target);
t!(fs::create_dir_all(dst_dir));
let src_file = &src_dir.join(file.to_string() + ".rs");
let dst_file = &dst_dir.join(file.to_string() + ".o");
if !up_to_date(src_file, dst_file) {
- let mut cmd = Command::new(&build.initial_rustc);
- build.run(cmd.env("RUSTC_BOOTSTRAP", "1")
+ let mut cmd = Command::new(&builder.initial_rustc);
+ builder.run(cmd.env("RUSTC_BOOTSTRAP", "1")
.arg("--cfg").arg("stage0")
.arg("--target").arg(target)
.arg("--emit=obj")
.arg(src_file));
}
- build.copy(dst_file, &sysroot_dir.join(file.to_string() + ".o"));
+ builder.copy(dst_file, &sysroot_dir.join(file.to_string() + ".o"));
}
for obj in ["crt2.o", "dllcrt2.o"].iter() {
- let src = compiler_file(build,
- build.cc(target),
+ let src = compiler_file(builder,
+ builder.cc(target),
target,
obj);
- build.copy(&src, &sysroot_dir.join(obj));
+ builder.copy(&src, &sysroot_dir.join(obj));
}
}
}
/// the build using the `compiler` targeting the `target` architecture. The
/// artifacts created will also be linked into the sysroot directory.
fn run(self, builder: &Builder) {
- let build = builder.build;
let target = self.target;
let compiler = self.compiler;
builder.ensure(Std { compiler, target });
- if build.force_use_stage1(compiler, target) {
+ if builder.force_use_stage1(compiler, target) {
builder.ensure(Test {
- compiler: builder.compiler(1, build.build),
+ compiler: builder.compiler(1, builder.config.build),
target,
});
- build.info(&format!("Uplifting stage1 test ({} -> {})", &build.build, target));
+ builder.info(
+ &format!("Uplifting stage1 test ({} -> {})", builder.config.build, target));
builder.ensure(TestLink {
- compiler: builder.compiler(1, build.build),
+ compiler: builder.compiler(1, builder.config.build),
target_compiler: compiler,
target,
});
return;
}
- let out_dir = build.cargo_out(compiler, Mode::Libtest, target);
- build.clear_if_dirty(&out_dir, &libstd_stamp(build, compiler, target));
+ let out_dir = builder.cargo_out(compiler, Mode::Libtest, target);
+ builder.clear_if_dirty(&out_dir, &libstd_stamp(builder, compiler, target));
let mut cargo = builder.cargo(compiler, Mode::Libtest, target, "build");
- test_cargo(build, &compiler, target, &mut cargo);
+ test_cargo(builder, &compiler, target, &mut cargo);
- let _folder = build.fold_output(|| format!("stage{}-test", compiler.stage));
- build.info(&format!("Building stage{} test artifacts ({} -> {})", compiler.stage,
+ let _folder = builder.fold_output(|| format!("stage{}-test", compiler.stage));
+ builder.info(&format!("Building stage{} test artifacts ({} -> {})", compiler.stage,
&compiler.host, target));
- run_cargo(build,
+ run_cargo(builder,
&mut cargo,
- &libtest_stamp(build, compiler, target),
+ &libtest_stamp(builder, compiler, target),
false);
builder.ensure(TestLink {
- compiler: builder.compiler(compiler.stage, build.build),
+ compiler: builder.compiler(compiler.stage, builder.config.build),
target_compiler: compiler,
target,
});
}
/// Same as `std_cargo`, but for libtest
-pub fn test_cargo(build: &Build,
+pub fn test_cargo(builder: &Builder,
_compiler: &Compiler,
_target: Interned<String>,
cargo: &mut Command) {
cargo.env("MACOSX_DEPLOYMENT_TARGET", target);
}
cargo.arg("--manifest-path")
- .arg(build.src.join("src/libtest/Cargo.toml"));
+ .arg(builder.src.join("src/libtest/Cargo.toml"));
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
/// Same as `std_link`, only for libtest
fn run(self, builder: &Builder) {
- let build = builder.build;
let compiler = self.compiler;
let target_compiler = self.target_compiler;
let target = self.target;
- build.info(&format!("Copying stage{} test from stage{} ({} -> {} / {})",
+ builder.info(&format!("Copying stage{} test from stage{} ({} -> {} / {})",
target_compiler.stage,
compiler.stage,
&compiler.host,
target_compiler.host,
target));
- add_to_sysroot(&build, &builder.sysroot_libdir(target_compiler, target),
- &libtest_stamp(build, compiler, target));
+ add_to_sysroot(builder, &builder.sysroot_libdir(target_compiler, target),
+ &libtest_stamp(builder, compiler, target));
builder.ensure(tool::CleanTools {
compiler: target_compiler,
target,
/// the `compiler` targeting the `target` architecture. The artifacts
/// created will also be linked into the sysroot directory.
fn run(self, builder: &Builder) {
- let build = builder.build;
let compiler = self.compiler;
let target = self.target;
builder.ensure(Test { compiler, target });
- if build.force_use_stage1(compiler, target) {
+ if builder.force_use_stage1(compiler, target) {
builder.ensure(Rustc {
- compiler: builder.compiler(1, build.build),
+ compiler: builder.compiler(1, builder.config.build),
target,
});
- build.info(&format!("Uplifting stage1 rustc ({} -> {})", &build.build, target));
+ builder.info(&format!("Uplifting stage1 rustc ({} -> {})",
+ builder.config.build, target));
builder.ensure(RustcLink {
- compiler: builder.compiler(1, build.build),
+ compiler: builder.compiler(1, builder.config.build),
target_compiler: compiler,
target,
});
// Ensure that build scripts have a std to link against.
builder.ensure(Std {
- compiler: builder.compiler(self.compiler.stage, build.build),
- target: build.build,
+ compiler: builder.compiler(self.compiler.stage, builder.config.build),
+ target: builder.config.build,
});
let cargo_out = builder.cargo_out(compiler, Mode::Librustc, target);
- build.clear_if_dirty(&cargo_out, &libstd_stamp(build, compiler, target));
- build.clear_if_dirty(&cargo_out, &libtest_stamp(build, compiler, target));
+ builder.clear_if_dirty(&cargo_out, &libstd_stamp(builder, compiler, target));
+ builder.clear_if_dirty(&cargo_out, &libtest_stamp(builder, compiler, target));
let mut cargo = builder.cargo(compiler, Mode::Librustc, target, "build");
- rustc_cargo(build, &mut cargo);
+ rustc_cargo(builder, &mut cargo);
- let _folder = build.fold_output(|| format!("stage{}-rustc", compiler.stage));
- build.info(&format!("Building stage{} compiler artifacts ({} -> {})",
+ let _folder = builder.fold_output(|| format!("stage{}-rustc", compiler.stage));
+ builder.info(&format!("Building stage{} compiler artifacts ({} -> {})",
compiler.stage, &compiler.host, target));
- run_cargo(build,
+ run_cargo(builder,
&mut cargo,
- &librustc_stamp(build, compiler, target),
+ &librustc_stamp(builder, compiler, target),
false);
builder.ensure(RustcLink {
- compiler: builder.compiler(compiler.stage, build.build),
+ compiler: builder.compiler(compiler.stage, builder.config.build),
target_compiler: compiler,
target,
});
}
}
-pub fn rustc_cargo(build: &Build, cargo: &mut Command) {
- cargo.arg("--features").arg(build.rustc_features())
+pub fn rustc_cargo(builder: &Builder, cargo: &mut Command) {
+ cargo.arg("--features").arg(builder.rustc_features())
.arg("--manifest-path")
- .arg(build.src.join("src/rustc/Cargo.toml"));
- rustc_cargo_env(build, cargo);
+ .arg(builder.src.join("src/rustc/Cargo.toml"));
+ rustc_cargo_env(builder, cargo);
}
-fn rustc_cargo_env(build: &Build, cargo: &mut Command) {
+fn rustc_cargo_env(builder: &Builder, cargo: &mut Command) {
// Set some configuration variables picked up by build scripts and
// the compiler alike
- cargo.env("CFG_RELEASE", build.rust_release())
- .env("CFG_RELEASE_CHANNEL", &build.config.channel)
- .env("CFG_VERSION", build.rust_version())
- .env("CFG_PREFIX", build.config.prefix.clone().unwrap_or_default())
- .env("CFG_CODEGEN_BACKENDS_DIR", &build.config.rust_codegen_backends_dir);
+ cargo.env("CFG_RELEASE", builder.rust_release())
+ .env("CFG_RELEASE_CHANNEL", &builder.config.channel)
+ .env("CFG_VERSION", builder.rust_version())
+ .env("CFG_PREFIX", builder.config.prefix.clone().unwrap_or_default())
+ .env("CFG_CODEGEN_BACKENDS_DIR", &builder.config.rust_codegen_backends_dir);
- let libdir_relative = build.config.libdir_relative().unwrap_or(Path::new("lib"));
+ let libdir_relative = builder.config.libdir_relative().unwrap_or(Path::new("lib"));
cargo.env("CFG_LIBDIR_RELATIVE", libdir_relative);
// If we're not building a compiler with debugging information then remove
// these two env vars which would be set otherwise.
- if build.config.rust_debuginfo_only_std {
+ if builder.config.rust_debuginfo_only_std {
cargo.env_remove("RUSTC_DEBUGINFO");
cargo.env_remove("RUSTC_DEBUGINFO_LINES");
}
- if let Some(ref ver_date) = build.rust_info.commit_date() {
+ if let Some(ref ver_date) = builder.rust_info.commit_date() {
cargo.env("CFG_VER_DATE", ver_date);
}
- if let Some(ref ver_hash) = build.rust_info.sha() {
+ if let Some(ref ver_hash) = builder.rust_info.sha() {
cargo.env("CFG_VER_HASH", ver_hash);
}
- if !build.unstable_features() {
+ if !builder.unstable_features() {
cargo.env("CFG_DISABLE_UNSTABLE_FEATURES", "1");
}
- if let Some(ref s) = build.config.rustc_default_linker {
+ if let Some(ref s) = builder.config.rustc_default_linker {
cargo.env("CFG_DEFAULT_LINKER", s);
}
- if build.config.rustc_parallel_queries {
+ if builder.config.rustc_parallel_queries {
cargo.env("RUSTC_PARALLEL_QUERIES", "1");
}
}
/// Same as `std_link`, only for librustc
fn run(self, builder: &Builder) {
- let build = builder.build;
let compiler = self.compiler;
let target_compiler = self.target_compiler;
let target = self.target;
- build.info(&format!("Copying stage{} rustc from stage{} ({} -> {} / {})",
+ builder.info(&format!("Copying stage{} rustc from stage{} ({} -> {} / {})",
target_compiler.stage,
compiler.stage,
&compiler.host,
target_compiler.host,
target));
- add_to_sysroot(&build, &builder.sysroot_libdir(target_compiler, target),
- &librustc_stamp(build, compiler, target));
+ add_to_sysroot(builder, &builder.sysroot_libdir(target_compiler, target),
+ &librustc_stamp(builder, compiler, target));
builder.ensure(tool::CleanTools {
compiler: target_compiler,
target,
}
fn run(self, builder: &Builder) {
- let build = builder.build;
let compiler = self.compiler;
let target = self.target;
builder.ensure(Rustc { compiler, target });
- if build.force_use_stage1(compiler, target) {
+ if builder.force_use_stage1(compiler, target) {
builder.ensure(CodegenBackend {
- compiler: builder.compiler(1, build.build),
+ compiler: builder.compiler(1, builder.config.build),
target,
backend: self.backend,
});
}
let mut cargo = builder.cargo(compiler, Mode::Librustc, target, "build");
- let mut features = build.rustc_features().to_string();
+ let mut features = builder.rustc_features().to_string();
cargo.arg("--manifest-path")
- .arg(build.src.join("src/librustc_trans/Cargo.toml"));
- rustc_cargo_env(build, &mut cargo);
+ .arg(builder.src.join("src/librustc_trans/Cargo.toml"));
+ rustc_cargo_env(builder, &mut cargo);
match &*self.backend {
"llvm" | "emscripten" => {
features.push_str(" emscripten");
}
- build.info(&format!("Building stage{} codegen artifacts ({} -> {}, {})",
+ builder.info(&format!("Building stage{} codegen artifacts ({} -> {}, {})",
compiler.stage, &compiler.host, target, self.backend));
// Pass down configuration from the LLVM build into the build of
// librustc_llvm and librustc_trans.
- if build.is_rust_llvm(target) {
+ if builder.is_rust_llvm(target) {
cargo.env("LLVM_RUSTLLVM", "1");
}
cargo.env("LLVM_CONFIG", &llvm_config);
if self.backend != "emscripten" {
- let target_config = build.config.target_config.get(&target);
+ let target_config = builder.config.target_config.get(&target);
if let Some(s) = target_config.and_then(|c| c.llvm_config.as_ref()) {
cargo.env("CFG_LLVM_ROOT", s);
}
}
// Building with a static libstdc++ is only supported on linux right now,
// not for MSVC or macOS
- if build.config.llvm_static_stdcpp &&
+ if builder.config.llvm_static_stdcpp &&
!target.contains("freebsd") &&
!target.contains("windows") &&
!target.contains("apple") {
- let file = compiler_file(build,
- build.cxx(target).unwrap(),
+ let file = compiler_file(builder,
+ builder.cxx(target).unwrap(),
target,
"libstdc++.a");
cargo.env("LLVM_STATIC_STDCPP", file);
}
- if build.config.llvm_link_shared {
+ if builder.config.llvm_link_shared {
cargo.env("LLVM_LINK_SHARED", "1");
}
}
_ => panic!("unknown backend: {}", self.backend),
}
- let tmp_stamp = build.cargo_out(compiler, Mode::Librustc, target)
+ let tmp_stamp = builder.cargo_out(compiler, Mode::Librustc, target)
.join(".tmp.stamp");
- let _folder = build.fold_output(|| format!("stage{}-rustc_trans", compiler.stage));
- let files = run_cargo(build,
+ let _folder = builder.fold_output(|| format!("stage{}-rustc_trans", compiler.stage));
+ let files = run_cargo(builder,
cargo.arg("--features").arg(features),
&tmp_stamp,
false);
codegen_backend.display(),
f.display());
}
- let stamp = codegen_backend_stamp(build, compiler, target, self.backend);
+ let stamp = codegen_backend_stamp(builder, compiler, target, self.backend);
let codegen_backend = codegen_backend.to_str().unwrap();
t!(t!(File::create(&stamp)).write_all(codegen_backend.as_bytes()));
}
fn copy_codegen_backends_to_sysroot(builder: &Builder,
compiler: Compiler,
target_compiler: Compiler) {
- let build = builder.build;
let target = target_compiler.host;
// Note that this step is different than all the other `*Link` steps in
}
for backend in builder.config.rust_codegen_backends.iter() {
- let stamp = codegen_backend_stamp(build, compiler, target, *backend);
+ let stamp = codegen_backend_stamp(builder, compiler, target, *backend);
let mut dylib = String::new();
t!(t!(File::open(&stamp)).read_to_string(&mut dylib));
let file = Path::new(&dylib);
backend,
&filename[dot..])
};
- build.copy(&file, &dst.join(target_filename));
+ builder.copy(&file, &dst.join(target_filename));
}
}
/// Cargo's output path for the standard library in a given stage, compiled
/// by a particular compiler for the specified target.
-pub fn libstd_stamp(build: &Build, compiler: Compiler, target: Interned<String>) -> PathBuf {
- build.cargo_out(compiler, Mode::Libstd, target).join(".libstd.stamp")
+pub fn libstd_stamp(builder: &Builder, compiler: Compiler, target: Interned<String>) -> PathBuf {
+ builder.cargo_out(compiler, Mode::Libstd, target).join(".libstd.stamp")
}
/// Cargo's output path for libtest in a given stage, compiled by a particular
/// compiler for the specified target.
-pub fn libtest_stamp(build: &Build, compiler: Compiler, target: Interned<String>) -> PathBuf {
- build.cargo_out(compiler, Mode::Libtest, target).join(".libtest.stamp")
+pub fn libtest_stamp(builder: &Builder, compiler: Compiler, target: Interned<String>) -> PathBuf {
+ builder.cargo_out(compiler, Mode::Libtest, target).join(".libtest.stamp")
}
/// Cargo's output path for librustc in a given stage, compiled by a particular
/// compiler for the specified target.
-pub fn librustc_stamp(build: &Build, compiler: Compiler, target: Interned<String>) -> PathBuf {
- build.cargo_out(compiler, Mode::Librustc, target).join(".librustc.stamp")
+pub fn librustc_stamp(builder: &Builder, compiler: Compiler, target: Interned<String>) -> PathBuf {
+ builder.cargo_out(compiler, Mode::Librustc, target).join(".librustc.stamp")
}
-fn codegen_backend_stamp(build: &Build,
+fn codegen_backend_stamp(builder: &Builder,
compiler: Compiler,
target: Interned<String>,
backend: Interned<String>) -> PathBuf {
- build.cargo_out(compiler, Mode::Librustc, target)
+ builder.cargo_out(compiler, Mode::Librustc, target)
.join(format!(".librustc_trans-{}.stamp", backend))
}
-fn compiler_file(build: &Build,
+fn compiler_file(builder: &Builder,
compiler: &Path,
target: Interned<String>,
file: &str) -> PathBuf {
let mut cmd = Command::new(compiler);
- cmd.args(build.cflags(target));
+ cmd.args(builder.cflags(target));
cmd.arg(format!("-print-file-name={}", file));
let out = output(&mut cmd);
PathBuf::from(out.trim())
/// thinks it is by default, but it's the same as the default for stages
/// 1-3.
fn run(self, builder: &Builder) -> Interned<PathBuf> {
- let build = builder.build;
let compiler = self.compiler;
let sysroot = if compiler.stage == 0 {
- build.out.join(&compiler.host).join("stage0-sysroot")
+ builder.out.join(&compiler.host).join("stage0-sysroot")
} else {
- build.out.join(&compiler.host).join(format!("stage{}", compiler.stage))
+ builder.out.join(&compiler.host).join(format!("stage{}", compiler.stage))
};
let _ = fs::remove_dir_all(&sysroot);
t!(fs::create_dir_all(&sysroot));
/// Prepare a new compiler from the artifacts in `stage`
///
/// This will assemble a compiler in `build/$host/stage$stage`. The compiler
- /// must have been previously produced by the `stage - 1` build.build
+ /// must have been previously produced by the `stage - 1` builder.build
/// compiler.
fn run(self, builder: &Builder) -> Compiler {
- let build = builder.build;
let target_compiler = self.target_compiler;
if target_compiler.stage == 0 {
- assert_eq!(build.build, target_compiler.host,
+ assert_eq!(builder.config.build, target_compiler.host,
"Cannot obtain compiler for non-native build triple at stage 0");
// The stage 0 compiler for the build triple is always pre-built.
return target_compiler;
// FIXME: It may be faster if we build just a stage 1 compiler and then
// use that to bootstrap this compiler forward.
let build_compiler =
- builder.compiler(target_compiler.stage - 1, build.build);
+ builder.compiler(target_compiler.stage - 1, builder.config.build);
// Build the libraries for this compiler to link to (i.e., the libraries
// it uses at runtime). NOTE: Crates the target compiler compiles don't
// link to these. (FIXME: Is that correct? It seems to be correct most
// of the time but I think we do link to these for stage2/bin compilers
// when not performing a full bootstrap).
- if builder.build.config.keep_stage.map_or(false, |s| target_compiler.stage <= s) {
+ if builder.config.keep_stage.map_or(false, |s| target_compiler.stage <= s) {
builder.verbose("skipping compilation of compiler due to --keep-stage");
let compiler = build_compiler;
for stage in 0..min(target_compiler.stage, builder.config.keep_stage.unwrap()) {
compiler: build_compiler,
target: target_compiler.host,
});
- for &backend in build.config.rust_codegen_backends.iter() {
+ for &backend in builder.config.rust_codegen_backends.iter() {
builder.ensure(CodegenBackend {
compiler: build_compiler,
target: target_compiler.host,
}
}
- let lld_install = if build.config.lld_enabled {
+ let lld_install = if builder.config.lld_enabled {
Some(builder.ensure(native::Lld {
target: target_compiler.host,
}))
let stage = target_compiler.stage;
let host = target_compiler.host;
- build.info(&format!("Assembling stage{} compiler ({})", stage, host));
+ builder.info(&format!("Assembling stage{} compiler ({})", stage, host));
// Link in all dylibs to the libdir
let sysroot = builder.sysroot(target_compiler);
}
// Link the compiler binary itself into place
- let out_dir = build.cargo_out(build_compiler, Mode::Librustc, host);
+ let out_dir = builder.cargo_out(build_compiler, Mode::Librustc, host);
let rustc = out_dir.join(exe("rustc", &*host));
let bindir = sysroot.join("bin");
t!(fs::create_dir_all(&bindir));
///
/// For a particular stage this will link the file listed in `stamp` into the
/// `sysroot_dst` provided.
-pub fn add_to_sysroot(build: &Build, sysroot_dst: &Path, stamp: &Path) {
+pub fn add_to_sysroot(builder: &Builder, sysroot_dst: &Path, stamp: &Path) {
t!(fs::create_dir_all(&sysroot_dst));
- for path in build.read_stamp_file(stamp) {
- build.copy(&path, &sysroot_dst.join(path.file_name().unwrap()));
+ for path in builder.read_stamp_file(stamp) {
+ builder.copy(&path, &sysroot_dst.join(path.file_name().unwrap()));
}
}
}
}
-pub fn run_cargo(build: &Build, cargo: &mut Command, stamp: &Path, is_check: bool)
+pub fn run_cargo(builder: &Builder, cargo: &mut Command, stamp: &Path, is_check: bool)
-> Vec<PathBuf>
{
- if build.config.dry_run {
+ if builder.config.dry_run {
return Vec::new();
}
// files we need to probe for later.
let mut deps = Vec::new();
let mut toplevel = Vec::new();
- let ok = stream_cargo(build, cargo, &mut |msg| {
+ let ok = stream_cargo(builder, cargo, &mut |msg| {
let filenames = match msg {
CargoMessage::CompilerArtifact { filenames, .. } => filenames,
_ => return,
let max = max.unwrap();
let max_path = max_path.unwrap();
if stamp_contents == new_contents && max <= stamp_mtime {
- build.verbose(&format!("not updating {:?}; contents equal and {:?} <= {:?}",
+ builder.verbose(&format!("not updating {:?}; contents equal and {:?} <= {:?}",
stamp, max, stamp_mtime));
return deps
}
if max > stamp_mtime {
- build.verbose(&format!("updating {:?} as {:?} changed", stamp, max_path));
+ builder.verbose(&format!("updating {:?} as {:?} changed", stamp, max_path));
} else {
- build.verbose(&format!("updating {:?} as deps changed", stamp));
+ builder.verbose(&format!("updating {:?} as deps changed", stamp));
}
t!(t!(File::create(stamp)).write_all(&new_contents));
deps
}
pub fn stream_cargo(
- build: &Build,
+ builder: &Builder,
cargo: &mut Command,
cb: &mut FnMut(CargoMessage),
) -> bool {
- if build.config.dry_run {
+ if builder.config.dry_run {
return true;
}
// Instruct Cargo to give us json messages on stdout, critically leaving
cargo.arg("--message-format").arg("json")
.stdout(Stdio::piped());
- if stderr_isatty() && build.ci_env == CiEnv::None &&
+ if stderr_isatty() && builder.ci_env == CiEnv::None &&
// if the terminal is reported as dumb, then we don't want to enable color for rustc
env::var_os("TERM").map(|t| t != *"dumb").unwrap_or(true) {
// since we pass message-format=json to cargo, we need to tell the rustc
cargo.env("RUSTC_COLOR", "1");
}
- build.verbose(&format!("running: {:?}", cargo));
+ builder.verbose(&format!("running: {:?}", cargo));
let mut child = match cargo.spawn() {
Ok(child) => child,
Err(e) => panic!("failed to execute command: {:?}\nerror: {}", cargo, e),
pub rust_debuginfo: bool,
pub rust_debuginfo_lines: bool,
pub rust_debuginfo_only_std: bool,
+ pub rust_debuginfo_tools: bool,
pub rust_rpath: bool,
pub rustc_parallel_queries: bool,
pub rustc_default_linker: Option<String>,
debuginfo: Option<bool>,
debuginfo_lines: Option<bool>,
debuginfo_only_std: Option<bool>,
+ debuginfo_tools: Option<bool>,
experimental_parallel_queries: Option<bool>,
debug_jemalloc: Option<bool>,
use_jemalloc: Option<bool>,
let mut llvm_assertions = None;
let mut debuginfo_lines = None;
let mut debuginfo_only_std = None;
+ let mut debuginfo_tools = None;
let mut debug = None;
let mut debug_jemalloc = None;
let mut debuginfo = None;
debuginfo = rust.debuginfo;
debuginfo_lines = rust.debuginfo_lines;
debuginfo_only_std = rust.debuginfo_only_std;
+ debuginfo_tools = rust.debuginfo_tools;
optimize = rust.optimize;
ignore_git = rust.ignore_git;
debug_jemalloc = rust.debug_jemalloc;
};
config.rust_debuginfo_lines = debuginfo_lines.unwrap_or(default);
config.rust_debuginfo_only_std = debuginfo_only_std.unwrap_or(default);
+ config.rust_debuginfo_tools = debuginfo_tools.unwrap_or(false);
let default = debug == Some(true);
config.debug_jemalloc = debug_jemalloc.unwrap_or(default);
o("debuginfo", "rust.debuginfo", "build with debugger metadata")
o("debuginfo-lines", "rust.debuginfo-lines", "build with line number debugger metadata")
o("debuginfo-only-std", "rust.debuginfo-only-std", "build only libstd with debugging information")
+o("debuginfo-tools", "rust.debuginfo-tools", "build extended tools with debugging information")
o("debug-jemalloc", "rust.debug-jemalloc", "build jemalloc with --enable-debug --enable-fill")
v("save-toolstates", "rust.save-toolstates", "save build and test status of external tools into this file")
use build_helper::output;
-use {Build, Compiler, Mode};
+use {Compiler, Mode};
use channel;
use util::{libdir, is_dylib, exe};
use builder::{Builder, RunConfig, ShouldRun, Step};
use cache::{INTERNER, Interned};
use time;
-pub fn pkgname(build: &Build, component: &str) -> String {
+pub fn pkgname(builder: &Builder, component: &str) -> String {
if component == "cargo" {
- format!("{}-{}", component, build.cargo_package_vers())
+ format!("{}-{}", component, builder.cargo_package_vers())
} else if component == "rls" {
- format!("{}-{}", component, build.rls_package_vers())
+ format!("{}-{}", component, builder.rls_package_vers())
} else if component == "rustfmt" {
- format!("{}-{}", component, build.rustfmt_package_vers())
+ format!("{}-{}", component, builder.rustfmt_package_vers())
} else {
assert!(component.starts_with("rust"));
- format!("{}-{}", component, build.rust_package_vers())
+ format!("{}-{}", component, builder.rust_package_vers())
}
}
-fn distdir(build: &Build) -> PathBuf {
- build.out.join("dist")
+fn distdir(builder: &Builder) -> PathBuf {
+ builder.out.join("dist")
}
-pub fn tmpdir(build: &Build) -> PathBuf {
- build.out.join("tmp/dist")
+pub fn tmpdir(builder: &Builder) -> PathBuf {
+ builder.out.join("tmp/dist")
}
fn rust_installer(builder: &Builder) -> Command {
/// Builds the `rust-docs` installer component.
fn run(self, builder: &Builder) -> PathBuf {
- let build = builder.build;
let host = self.host;
- let name = pkgname(build, "rust-docs");
+ let name = pkgname(builder, "rust-docs");
- build.info(&format!("Dist docs ({})", host));
- if !build.config.docs {
- build.info(&format!("\tskipping - docs disabled"));
- return distdir(build).join(format!("{}-{}.tar.gz", name, host));
+ builder.info(&format!("Dist docs ({})", host));
+ if !builder.config.docs {
+ builder.info(&format!("\tskipping - docs disabled"));
+ return distdir(builder).join(format!("{}-{}.tar.gz", name, host));
}
builder.default_doc(None);
- let image = tmpdir(build).join(format!("{}-{}-image", name, host));
+ let image = tmpdir(builder).join(format!("{}-{}-image", name, host));
let _ = fs::remove_dir_all(&image);
let dst = image.join("share/doc/rust/html");
t!(fs::create_dir_all(&dst));
- let src = build.doc_out(host);
- build.cp_r(&src, &dst);
+ let src = builder.doc_out(host);
+ builder.cp_r(&src, &dst);
let mut cmd = rust_installer(builder);
cmd.arg("generate")
.arg("--rel-manifest-dir=rustlib")
.arg("--success-message=Rust-documentation-is-installed.")
.arg("--image-dir").arg(&image)
- .arg("--work-dir").arg(&tmpdir(build))
- .arg("--output-dir").arg(&distdir(build))
+ .arg("--work-dir").arg(&tmpdir(builder))
+ .arg("--output-dir").arg(&distdir(builder))
.arg(format!("--package-name={}-{}", name, host))
.arg("--component-name=rust-docs")
.arg("--legacy-manifest-dirs=rustlib,cargo")
.arg("--bulk-dirs=share/doc/rust/html");
- build.run(&mut cmd);
- build.remove_dir(&image);
+ builder.run(&mut cmd);
+ builder.remove_dir(&image);
- distdir(build).join(format!("{}-{}.tar.gz", name, host))
+ distdir(builder).join(format!("{}-{}.tar.gz", name, host))
}
}
/// Builds the `rustc-docs` installer component.
fn run(self, builder: &Builder) -> PathBuf {
- let build = builder.build;
let host = self.host;
- let name = pkgname(build, "rustc-docs");
+ let name = pkgname(builder, "rustc-docs");
- build.info(&format!("Dist compiler docs ({})", host));
- if !build.config.compiler_docs {
- build.info(&format!("\tskipping - compiler docs disabled"));
- return distdir(build).join(format!("{}-{}.tar.gz", name, host));
+ builder.info(&format!("Dist compiler docs ({})", host));
+ if !builder.config.compiler_docs {
+ builder.info(&format!("\tskipping - compiler docs disabled"));
+ return distdir(builder).join(format!("{}-{}.tar.gz", name, host));
}
builder.default_doc(None);
- let image = tmpdir(build).join(format!("{}-{}-image", name, host));
+ let image = tmpdir(builder).join(format!("{}-{}-image", name, host));
let _ = fs::remove_dir_all(&image);
let dst = image.join("share/doc/rust/html");
t!(fs::create_dir_all(&dst));
- let src = build.compiler_doc_out(host);
- build.cp_r(&src, &dst);
+ let src = builder.compiler_doc_out(host);
+ builder.cp_r(&src, &dst);
let mut cmd = rust_installer(builder);
cmd.arg("generate")
.arg("--rel-manifest-dir=rustlib")
.arg("--success-message=Rustc-documentation-is-installed.")
.arg("--image-dir").arg(&image)
- .arg("--work-dir").arg(&tmpdir(build))
- .arg("--output-dir").arg(&distdir(build))
+ .arg("--work-dir").arg(&tmpdir(builder))
+ .arg("--output-dir").arg(&distdir(builder))
.arg(format!("--package-name={}-{}", name, host))
.arg("--component-name=rustc-docs")
.arg("--legacy-manifest-dirs=rustlib,cargo")
.arg("--bulk-dirs=share/doc/rust/html");
- build.run(&mut cmd);
- build.remove_dir(&image);
+ builder.run(&mut cmd);
+ builder.remove_dir(&image);
- distdir(build).join(format!("{}-{}.tar.gz", name, host))
+ distdir(builder).join(format!("{}-{}.tar.gz", name, host))
}
}
}
fn make_win_dist(
- rust_root: &Path, plat_root: &Path, target_triple: Interned<String>, build: &Build
+ rust_root: &Path, plat_root: &Path, target_triple: Interned<String>, builder: &Builder
) {
//Ask gcc where it keeps its stuff
- let mut cmd = Command::new(build.cc(target_triple));
+ let mut cmd = Command::new(builder.cc(target_triple));
cmd.arg("-print-search-dirs");
let gcc_out = output(&mut cmd);
let dist_bin_dir = rust_root.join("bin/");
fs::create_dir_all(&dist_bin_dir).expect("creating dist_bin_dir failed");
for src in rustc_dlls {
- build.copy_to_folder(&src, &dist_bin_dir);
+ builder.copy_to_folder(&src, &dist_bin_dir);
}
//Copy platform tools to platform-specific bin directory
let target_bin_dir = plat_root.join("lib").join("rustlib").join(target_triple).join("bin");
fs::create_dir_all(&target_bin_dir).expect("creating target_bin_dir failed");
for src in target_tools {
- build.copy_to_folder(&src, &target_bin_dir);
+ builder.copy_to_folder(&src, &target_bin_dir);
}
//Copy platform libs to platform-specific lib directory
let target_lib_dir = plat_root.join("lib").join("rustlib").join(target_triple).join("lib");
fs::create_dir_all(&target_lib_dir).expect("creating target_lib_dir failed");
for src in target_libs {
- build.copy_to_folder(&src, &target_lib_dir);
+ builder.copy_to_folder(&src, &target_lib_dir);
}
}
/// This contains all the bits and pieces to run the MinGW Windows targets
/// without any extra installed software (e.g. we bundle gcc, libraries, etc).
fn run(self, builder: &Builder) -> Option<PathBuf> {
- let build = builder.build;
let host = self.host;
if !host.contains("pc-windows-gnu") {
return None;
}
- build.info(&format!("Dist mingw ({})", host));
- let name = pkgname(build, "rust-mingw");
- let image = tmpdir(build).join(format!("{}-{}-image", name, host));
+ builder.info(&format!("Dist mingw ({})", host));
+ let name = pkgname(builder, "rust-mingw");
+ let image = tmpdir(builder).join(format!("{}-{}-image", name, host));
let _ = fs::remove_dir_all(&image);
t!(fs::create_dir_all(&image));
// thrown away (this contains the runtime DLLs included in the rustc package
// above) and the second argument is where to place all the MinGW components
// (which is what we want).
- make_win_dist(&tmpdir(build), &image, host, &build);
+ make_win_dist(&tmpdir(builder), &image, host, &builder);
let mut cmd = rust_installer(builder);
cmd.arg("generate")
.arg("--rel-manifest-dir=rustlib")
.arg("--success-message=Rust-MinGW-is-installed.")
.arg("--image-dir").arg(&image)
- .arg("--work-dir").arg(&tmpdir(build))
- .arg("--output-dir").arg(&distdir(build))
+ .arg("--work-dir").arg(&tmpdir(builder))
+ .arg("--output-dir").arg(&distdir(builder))
.arg(format!("--package-name={}-{}", name, host))
.arg("--component-name=rust-mingw")
.arg("--legacy-manifest-dirs=rustlib,cargo");
- build.run(&mut cmd);
+ builder.run(&mut cmd);
t!(fs::remove_dir_all(&image));
- Some(distdir(build).join(format!("{}-{}.tar.gz", name, host)))
+ Some(distdir(builder).join(format!("{}-{}.tar.gz", name, host)))
}
}
/// Creates the `rustc` installer component.
fn run(self, builder: &Builder) -> PathBuf {
- let build = builder.build;
let compiler = self.compiler;
let host = self.compiler.host;
- build.info(&format!("Dist rustc stage{} ({})", compiler.stage, compiler.host));
- let name = pkgname(build, "rustc");
- let image = tmpdir(build).join(format!("{}-{}-image", name, host));
+ builder.info(&format!("Dist rustc stage{} ({})", compiler.stage, compiler.host));
+ let name = pkgname(builder, "rustc");
+ let image = tmpdir(builder).join(format!("{}-{}-image", name, host));
let _ = fs::remove_dir_all(&image);
- let overlay = tmpdir(build).join(format!("{}-{}-overlay", name, host));
+ let overlay = tmpdir(builder).join(format!("{}-{}-overlay", name, host));
let _ = fs::remove_dir_all(&overlay);
// Prepare the rustc "image", what will actually end up getting installed
// Prepare the overlay which is part of the tarball but won't actually be
// installed
let cp = |file: &str| {
- build.install(&build.src.join(file), &overlay, 0o644);
+ builder.install(&builder.src.join(file), &overlay, 0o644);
};
cp("COPYRIGHT");
cp("LICENSE-APACHE");
cp("LICENSE-MIT");
cp("README.md");
// tiny morsel of metadata is used by rust-packaging
- let version = build.rust_version();
- build.create(&overlay.join("version"), &version);
- if let Some(sha) = build.rust_sha() {
- build.create(&overlay.join("git-commit-hash"), &sha);
+ let version = builder.rust_version();
+ builder.create(&overlay.join("version"), &version);
+ if let Some(sha) = builder.rust_sha() {
+ builder.create(&overlay.join("git-commit-hash"), &sha);
}
// On MinGW we've got a few runtime DLL dependencies that we need to
// install will *also* include the rust-mingw package, which also needs
// licenses, so to be safe we just include it here in all MinGW packages.
if host.contains("pc-windows-gnu") {
- make_win_dist(&image, &tmpdir(build), host, build);
+ make_win_dist(&image, &tmpdir(builder), host, builder);
let dst = image.join("share/doc");
t!(fs::create_dir_all(&dst));
- build.cp_r(&build.src.join("src/etc/third-party"), &dst);
+ builder.cp_r(&builder.src.join("src/etc/third-party"), &dst);
}
// Finally, wrap everything up in a nice tarball!
.arg("--rel-manifest-dir=rustlib")
.arg("--success-message=Rust-is-ready-to-roll.")
.arg("--image-dir").arg(&image)
- .arg("--work-dir").arg(&tmpdir(build))
- .arg("--output-dir").arg(&distdir(build))
+ .arg("--work-dir").arg(&tmpdir(builder))
+ .arg("--output-dir").arg(&distdir(builder))
.arg("--non-installed-overlay").arg(&overlay)
.arg(format!("--package-name={}-{}", name, host))
.arg("--component-name=rustc")
.arg("--legacy-manifest-dirs=rustlib,cargo");
- build.run(&mut cmd);
- build.remove_dir(&image);
- build.remove_dir(&overlay);
+ builder.run(&mut cmd);
+ builder.remove_dir(&image);
+ builder.remove_dir(&overlay);
- return distdir(build).join(format!("{}-{}.tar.gz", name, host));
+ return distdir(builder).join(format!("{}-{}.tar.gz", name, host));
fn prepare_image(builder: &Builder, compiler: Compiler, image: &Path) {
let host = compiler.host;
- let build = builder.build;
let src = builder.sysroot(compiler);
let libdir = libdir(&host);
// Copy rustc/rustdoc binaries
t!(fs::create_dir_all(image.join("bin")));
- build.cp_r(&src.join("bin"), &image.join("bin"));
+ builder.cp_r(&src.join("bin"), &image.join("bin"));
- build.install(&builder.rustdoc(compiler.host), &image.join("bin"), 0o755);
+ builder.install(&builder.rustdoc(compiler.host), &image.join("bin"), 0o755);
// Copy runtime DLLs needed by the compiler
if libdir != "bin" {
- for entry in build.read_dir(&src.join(libdir)) {
+ for entry in builder.read_dir(&src.join(libdir)) {
let name = entry.file_name();
if let Some(s) = name.to_str() {
if is_dylib(s) {
- build.install(&entry.path(), &image.join(libdir), 0o644);
+ builder.install(&entry.path(), &image.join(libdir), 0o644);
}
}
}
let backends_rel = backends_src.strip_prefix(&src).unwrap();
let backends_dst = image.join(&backends_rel);
t!(fs::create_dir_all(&backends_dst));
- build.cp_r(&backends_src, &backends_dst);
+ builder.cp_r(&backends_src, &backends_dst);
// Copy over lld if it's there
if builder.config.lld_enabled {
.join("bin")
.join(&exe);
t!(fs::create_dir_all(&dst.parent().unwrap()));
- build.copy(&src, &dst);
+ builder.copy(&src, &dst);
}
// Man pages
t!(fs::create_dir_all(image.join("share/man/man1")));
- let man_src = build.src.join("src/doc/man");
+ let man_src = builder.src.join("src/doc/man");
let man_dst = image.join("share/man/man1");
let month_year = t!(time::strftime("%B %Y", &time::now()));
// don't use our `bootstrap::util::{copy, cp_r}`, because those try
// to hardlink, and we don't want to edit the source templates
- for file_entry in build.read_dir(&man_src) {
+ for file_entry in builder.read_dir(&man_src) {
let page_src = file_entry.path();
let page_dst = man_dst.join(file_entry.file_name());
t!(fs::copy(&page_src, &page_dst));
// template in month/year and version number
- build.replace_in_file(&page_dst,
+ builder.replace_in_file(&page_dst,
&[("<INSERT DATE HERE>", &month_year),
("<INSERT VERSION HERE>", channel::CFG_RELEASE_NUM)]);
}
// Misc license info
let cp = |file: &str| {
- build.install(&build.src.join(file), &image.join("share/doc/rust"), 0o644);
+ builder.install(&builder.src.join(file), &image.join("share/doc/rust"), 0o644);
};
cp("COPYRIGHT");
cp("LICENSE-APACHE");
/// Copies debugger scripts for `target` into the `sysroot` specified.
fn run(self, builder: &Builder) {
- let build = builder.build;
let host = self.host;
let sysroot = self.sysroot;
let dst = sysroot.join("lib/rustlib/etc");
t!(fs::create_dir_all(&dst));
let cp_debugger_script = |file: &str| {
- build.install(&build.src.join("src/etc/").join(file), &dst, 0o644);
+ builder.install(&builder.src.join("src/etc/").join(file), &dst, 0o644);
};
if host.contains("windows-msvc") {
// windbg debugger scripts
- build.install(&build.src.join("src/etc/rust-windbg.cmd"), &sysroot.join("bin"),
+ builder.install(&builder.src.join("src/etc/rust-windbg.cmd"), &sysroot.join("bin"),
0o755);
cp_debugger_script("natvis/intrinsic.natvis");
cp_debugger_script("debugger_pretty_printers_common.py");
// gdb debugger scripts
- build.install(&build.src.join("src/etc/rust-gdb"), &sysroot.join("bin"),
+ builder.install(&builder.src.join("src/etc/rust-gdb"), &sysroot.join("bin"),
0o755);
cp_debugger_script("gdb_load_rust_pretty_printers.py");
cp_debugger_script("gdb_rust_pretty_printing.py");
// lldb debugger scripts
- build.install(&build.src.join("src/etc/rust-lldb"), &sysroot.join("bin"),
+ builder.install(&builder.src.join("src/etc/rust-lldb"), &sysroot.join("bin"),
0o755);
cp_debugger_script("lldb_rust_formatters.py");
fn make_run(run: RunConfig) {
run.builder.ensure(Std {
- compiler: run.builder.compiler(run.builder.top_stage, run.builder.build.build),
+ compiler: run.builder.compiler(run.builder.top_stage, run.builder.config.build),
target: run.target,
});
}
fn run(self, builder: &Builder) -> PathBuf {
- let build = builder.build;
let compiler = self.compiler;
let target = self.target;
- let name = pkgname(build, "rust-std");
- build.info(&format!("Dist std stage{} ({} -> {})", compiler.stage, &compiler.host, target));
+ let name = pkgname(builder, "rust-std");
+ builder.info(&format!("Dist std stage{} ({} -> {})",
+ compiler.stage, &compiler.host, target));
// The only true set of target libraries came from the build triple, so
// let's reduce redundant work by only producing archives from that host.
- if compiler.host != build.build {
- build.info(&format!("\tskipping, not a build host"));
- return distdir(build).join(format!("{}-{}.tar.gz", name, target));
+ if compiler.host != builder.config.build {
+ builder.info(&format!("\tskipping, not a build host"));
+ return distdir(builder).join(format!("{}-{}.tar.gz", name, target));
}
// We want to package up as many target libraries as possible
// for the `rust-std` package, so if this is a host target we
// depend on librustc and otherwise we just depend on libtest.
- if build.hosts.iter().any(|t| t == target) {
+ if builder.hosts.iter().any(|t| t == target) {
builder.ensure(compile::Rustc { compiler, target });
} else {
- if build.no_std(target) == Some(true) {
+ if builder.no_std(target) == Some(true) {
// the `test` doesn't compile for no-std targets
builder.ensure(compile::Std { compiler, target });
} else {
}
}
- let image = tmpdir(build).join(format!("{}-{}-image", name, target));
+ let image = tmpdir(builder).join(format!("{}-{}-image", name, target));
let _ = fs::remove_dir_all(&image);
let dst = image.join("lib/rustlib").join(target);
t!(fs::create_dir_all(&dst));
let mut src = builder.sysroot_libdir(compiler, target).to_path_buf();
src.pop(); // Remove the trailing /lib folder from the sysroot_libdir
- build.cp_filtered(&src, &dst, &|path| {
+ builder.cp_filtered(&src, &dst, &|path| {
let name = path.file_name().and_then(|s| s.to_str());
- name != Some(build.config.rust_codegen_backends_dir.as_str()) &&
+ name != Some(builder.config.rust_codegen_backends_dir.as_str()) &&
name != Some("bin")
});
.arg("--rel-manifest-dir=rustlib")
.arg("--success-message=std-is-standing-at-the-ready.")
.arg("--image-dir").arg(&image)
- .arg("--work-dir").arg(&tmpdir(build))
- .arg("--output-dir").arg(&distdir(build))
+ .arg("--work-dir").arg(&tmpdir(builder))
+ .arg("--output-dir").arg(&distdir(builder))
.arg(format!("--package-name={}-{}", name, target))
.arg(format!("--component-name=rust-std-{}", target))
.arg("--legacy-manifest-dirs=rustlib,cargo");
- build.run(&mut cmd);
- build.remove_dir(&image);
- distdir(build).join(format!("{}-{}.tar.gz", name, target))
+ builder.run(&mut cmd);
+ builder.remove_dir(&image);
+ distdir(builder).join(format!("{}-{}.tar.gz", name, target))
}
}
fn should_run(run: ShouldRun) -> ShouldRun {
let builder = run.builder;
- run.path("analysis").default_condition(builder.build.config.extended)
+ run.path("analysis").default_condition(builder.config.extended)
}
fn make_run(run: RunConfig) {
run.builder.ensure(Analysis {
- compiler: run.builder.compiler(run.builder.top_stage, run.builder.build.build),
+ compiler: run.builder.compiler(run.builder.top_stage, run.builder.config.build),
target: run.target,
});
}
/// Creates a tarball of save-analysis metadata, if available.
fn run(self, builder: &Builder) -> PathBuf {
- let build = builder.build;
let compiler = self.compiler;
let target = self.target;
- assert!(build.config.extended);
- build.info(&format!("Dist analysis"));
- let name = pkgname(build, "rust-analysis");
+ assert!(builder.config.extended);
+ builder.info(&format!("Dist analysis"));
+ let name = pkgname(builder, "rust-analysis");
- if &compiler.host != build.build {
- build.info(&format!("\tskipping, not a build host"));
- return distdir(build).join(format!("{}-{}.tar.gz", name, target));
+ if &compiler.host != builder.config.build {
+ builder.info(&format!("\tskipping, not a build host"));
+ return distdir(builder).join(format!("{}-{}.tar.gz", name, target));
}
builder.ensure(Std { compiler, target });
// Package save-analysis from stage1 if not doing a full bootstrap, as the
// stage2 artifacts is simply copied from stage1 in that case.
- let compiler = if build.force_use_stage1(compiler, target) {
+ let compiler = if builder.force_use_stage1(compiler, target) {
builder.compiler(1, compiler.host)
} else {
compiler.clone()
};
- let image = tmpdir(build).join(format!("{}-{}-image", name, target));
+ let image = tmpdir(builder).join(format!("{}-{}-image", name, target));
- let src = build.stage_out(compiler, Mode::Libstd)
- .join(target).join(build.cargo_dir()).join("deps");
+ let src = builder.stage_out(compiler, Mode::Libstd)
+ .join(target).join(builder.cargo_dir()).join("deps");
let image_src = src.join("save-analysis");
let dst = image.join("lib/rustlib").join(target).join("analysis");
t!(fs::create_dir_all(&dst));
- build.info(&format!("image_src: {:?}, dst: {:?}", image_src, dst));
- build.cp_r(&image_src, &dst);
+ builder.info(&format!("image_src: {:?}, dst: {:?}", image_src, dst));
+ builder.cp_r(&image_src, &dst);
let mut cmd = rust_installer(builder);
cmd.arg("generate")
.arg("--rel-manifest-dir=rustlib")
.arg("--success-message=save-analysis-saved.")
.arg("--image-dir").arg(&image)
- .arg("--work-dir").arg(&tmpdir(build))
- .arg("--output-dir").arg(&distdir(build))
+ .arg("--work-dir").arg(&tmpdir(builder))
+ .arg("--output-dir").arg(&distdir(builder))
.arg(format!("--package-name={}-{}", name, target))
.arg(format!("--component-name=rust-analysis-{}", target))
.arg("--legacy-manifest-dirs=rustlib,cargo");
- build.run(&mut cmd);
- build.remove_dir(&image);
- distdir(build).join(format!("{}-{}.tar.gz", name, target))
+ builder.run(&mut cmd);
+ builder.remove_dir(&image);
+ distdir(builder).join(format!("{}-{}.tar.gz", name, target))
}
}
-fn copy_src_dirs(build: &Build, src_dirs: &[&str], exclude_dirs: &[&str], dst_dir: &Path) {
+fn copy_src_dirs(builder: &Builder, src_dirs: &[&str], exclude_dirs: &[&str], dst_dir: &Path) {
fn filter_fn(exclude_dirs: &[&str], dir: &str, path: &Path) -> bool {
let spath = match path.to_str() {
Some(path) => path,
for item in src_dirs {
let dst = &dst_dir.join(item);
t!(fs::create_dir_all(dst));
- build.cp_filtered(&build.src.join(item), dst, &|path| filter_fn(exclude_dirs, item, path));
+ builder.cp_filtered(
+ &builder.src.join(item), dst, &|path| filter_fn(exclude_dirs, item, path));
}
}
/// Creates the `rust-src` installer component
fn run(self, builder: &Builder) -> PathBuf {
- let build = builder.build;
- build.info(&format!("Dist src"));
+ builder.info(&format!("Dist src"));
- let name = pkgname(build, "rust-src");
- let image = tmpdir(build).join(format!("{}-image", name));
+ let name = pkgname(builder, "rust-src");
+ let image = tmpdir(builder).join(format!("{}-image", name));
let _ = fs::remove_dir_all(&image);
let dst = image.join("lib/rustlib/src");
"src/jemalloc/test/unit",
];
- copy_src_dirs(build, &std_src_dirs[..], &std_src_dirs_exclude[..], &dst_src);
+ copy_src_dirs(builder, &std_src_dirs[..], &std_src_dirs_exclude[..], &dst_src);
for file in src_files.iter() {
- build.copy(&build.src.join(file), &dst_src.join(file));
+ builder.copy(&builder.src.join(file), &dst_src.join(file));
}
// Create source tarball in rust-installer format
.arg("--rel-manifest-dir=rustlib")
.arg("--success-message=Awesome-Source.")
.arg("--image-dir").arg(&image)
- .arg("--work-dir").arg(&tmpdir(build))
- .arg("--output-dir").arg(&distdir(build))
+ .arg("--work-dir").arg(&tmpdir(builder))
+ .arg("--output-dir").arg(&distdir(builder))
.arg(format!("--package-name={}", name))
.arg("--component-name=rust-src")
.arg("--legacy-manifest-dirs=rustlib,cargo");
- build.run(&mut cmd);
+ builder.run(&mut cmd);
- build.remove_dir(&image);
- distdir(build).join(&format!("{}.tar.gz", name))
+ builder.remove_dir(&image);
+ distdir(builder).join(&format!("{}.tar.gz", name))
}
}
/// Creates the plain source tarball
fn run(self, builder: &Builder) -> PathBuf {
- let build = builder.build;
- build.info(&format!("Create plain source tarball"));
+ builder.info(&format!("Create plain source tarball"));
// Make sure that the root folder of tarball has the correct name
- let plain_name = format!("{}-src", pkgname(build, "rustc"));
- let plain_dst_src = tmpdir(build).join(&plain_name);
+ let plain_name = format!("{}-src", pkgname(builder, "rustc"));
+ let plain_dst_src = tmpdir(builder).join(&plain_name);
let _ = fs::remove_dir_all(&plain_dst_src);
t!(fs::create_dir_all(&plain_dst_src));
"src",
];
- copy_src_dirs(build, &src_dirs[..], &[], &plain_dst_src);
+ copy_src_dirs(builder, &src_dirs[..], &[], &plain_dst_src);
// Copy the files normally
for item in &src_files {
- build.copy(&build.src.join(item), &plain_dst_src.join(item));
+ builder.copy(&builder.src.join(item), &plain_dst_src.join(item));
}
// Create the version file
- build.create(&plain_dst_src.join("version"), &build.rust_version());
- if let Some(sha) = build.rust_sha() {
- build.create(&plain_dst_src.join("git-commit-hash"), &sha);
+ builder.create(&plain_dst_src.join("version"), &builder.rust_version());
+ if let Some(sha) = builder.rust_sha() {
+ builder.create(&plain_dst_src.join("git-commit-hash"), &sha);
}
// If we're building from git sources, we need to vendor a complete distribution.
- if build.rust_info.is_git() {
+ if builder.rust_info.is_git() {
// Get cargo-vendor installed, if it isn't already.
let mut has_cargo_vendor = false;
- let mut cmd = Command::new(&build.initial_cargo);
+ let mut cmd = Command::new(&builder.initial_cargo);
for line in output(cmd.arg("install").arg("--list")).lines() {
has_cargo_vendor |= line.starts_with("cargo-vendor ");
}
if !has_cargo_vendor {
- let mut cmd = Command::new(&build.initial_cargo);
+ let mut cmd = Command::new(&builder.initial_cargo);
cmd.arg("install")
.arg("--force")
.arg("--debug")
.arg("--vers").arg(CARGO_VENDOR_VERSION)
.arg("cargo-vendor")
- .env("RUSTC", &build.initial_rustc);
- if let Some(dir) = build.openssl_install_dir(build.config.build) {
+ .env("RUSTC", &builder.initial_rustc);
+ if let Some(dir) = builder.openssl_install_dir(builder.config.build) {
builder.ensure(native::Openssl {
- target: build.config.build,
+ target: builder.config.build,
});
cmd.env("OPENSSL_DIR", dir);
}
- build.run(&mut cmd);
+ builder.run(&mut cmd);
}
// Vendor all Cargo dependencies
- let mut cmd = Command::new(&build.initial_cargo);
+ let mut cmd = Command::new(&builder.initial_cargo);
cmd.arg("vendor")
.current_dir(&plain_dst_src.join("src"));
- build.run(&mut cmd);
+ builder.run(&mut cmd);
}
// Create plain source tarball
- let plain_name = format!("rustc-{}-src", build.rust_package_vers());
- let mut tarball = distdir(build).join(&format!("{}.tar.gz", plain_name));
+ let plain_name = format!("rustc-{}-src", builder.rust_package_vers());
+ let mut tarball = distdir(builder).join(&format!("{}.tar.gz", plain_name));
tarball.set_extension(""); // strip .gz
tarball.set_extension(""); // strip .tar
if let Some(dir) = tarball.parent() {
- build.create_dir(&dir);
+ builder.create_dir(&dir);
}
- build.info(&format!("running installer"));
+ builder.info(&format!("running installer"));
let mut cmd = rust_installer(builder);
cmd.arg("tarball")
.arg("--input").arg(&plain_name)
.arg("--output").arg(&tarball)
.arg("--work-dir=.")
- .current_dir(tmpdir(build));
- build.run(&mut cmd);
- distdir(build).join(&format!("{}.tar.gz", plain_name))
+ .current_dir(tmpdir(builder));
+ builder.run(&mut cmd);
+ distdir(builder).join(&format!("{}.tar.gz", plain_name))
}
}
}
fn run(self, builder: &Builder) -> PathBuf {
- let build = builder.build;
let stage = self.stage;
let target = self.target;
- build.info(&format!("Dist cargo stage{} ({})", stage, target));
- let src = build.src.join("src/tools/cargo");
+ builder.info(&format!("Dist cargo stage{} ({})", stage, target));
+ let src = builder.src.join("src/tools/cargo");
let etc = src.join("src/etc");
- let release_num = build.release_num("cargo");
- let name = pkgname(build, "cargo");
- let version = builder.cargo_info.version(build, &release_num);
+ let release_num = builder.release_num("cargo");
+ let name = pkgname(builder, "cargo");
+ let version = builder.cargo_info.version(builder, &release_num);
- let tmp = tmpdir(build);
+ let tmp = tmpdir(builder);
let image = tmp.join("cargo-image");
drop(fs::remove_dir_all(&image));
- build.create_dir(&image);
+ builder.create_dir(&image);
// Prepare the image directory
- build.create_dir(&image.join("share/zsh/site-functions"));
- build.create_dir(&image.join("etc/bash_completion.d"));
+ builder.create_dir(&image.join("share/zsh/site-functions"));
+ builder.create_dir(&image.join("etc/bash_completion.d"));
let cargo = builder.ensure(tool::Cargo {
- compiler: builder.compiler(stage, build.build),
+ compiler: builder.compiler(stage, builder.config.build),
target
});
- build.install(&cargo, &image.join("bin"), 0o755);
+ builder.install(&cargo, &image.join("bin"), 0o755);
for man in t!(etc.join("man").read_dir()) {
let man = t!(man);
- build.install(&man.path(), &image.join("share/man/man1"), 0o644);
+ builder.install(&man.path(), &image.join("share/man/man1"), 0o644);
}
- build.install(&etc.join("_cargo"), &image.join("share/zsh/site-functions"), 0o644);
- build.copy(&etc.join("cargo.bashcomp.sh"),
+ builder.install(&etc.join("_cargo"), &image.join("share/zsh/site-functions"), 0o644);
+ builder.copy(&etc.join("cargo.bashcomp.sh"),
&image.join("etc/bash_completion.d/cargo"));
let doc = image.join("share/doc/cargo");
- build.install(&src.join("README.md"), &doc, 0o644);
- build.install(&src.join("LICENSE-MIT"), &doc, 0o644);
- build.install(&src.join("LICENSE-APACHE"), &doc, 0o644);
- build.install(&src.join("LICENSE-THIRD-PARTY"), &doc, 0o644);
+ builder.install(&src.join("README.md"), &doc, 0o644);
+ builder.install(&src.join("LICENSE-MIT"), &doc, 0o644);
+ builder.install(&src.join("LICENSE-APACHE"), &doc, 0o644);
+ builder.install(&src.join("LICENSE-THIRD-PARTY"), &doc, 0o644);
// Prepare the overlay
let overlay = tmp.join("cargo-overlay");
drop(fs::remove_dir_all(&overlay));
- build.create_dir(&overlay);
- build.install(&src.join("README.md"), &overlay, 0o644);
- build.install(&src.join("LICENSE-MIT"), &overlay, 0o644);
- build.install(&src.join("LICENSE-APACHE"), &overlay, 0o644);
- build.install(&src.join("LICENSE-THIRD-PARTY"), &overlay, 0o644);
- build.create(&overlay.join("version"), &version);
+ builder.create_dir(&overlay);
+ builder.install(&src.join("README.md"), &overlay, 0o644);
+ builder.install(&src.join("LICENSE-MIT"), &overlay, 0o644);
+ builder.install(&src.join("LICENSE-APACHE"), &overlay, 0o644);
+ builder.install(&src.join("LICENSE-THIRD-PARTY"), &overlay, 0o644);
+ builder.create(&overlay.join("version"), &version);
// Generate the installer tarball
let mut cmd = rust_installer(builder);
.arg("--rel-manifest-dir=rustlib")
.arg("--success-message=Rust-is-ready-to-roll.")
.arg("--image-dir").arg(&image)
- .arg("--work-dir").arg(&tmpdir(build))
- .arg("--output-dir").arg(&distdir(build))
+ .arg("--work-dir").arg(&tmpdir(builder))
+ .arg("--output-dir").arg(&distdir(builder))
.arg("--non-installed-overlay").arg(&overlay)
.arg(format!("--package-name={}-{}", name, target))
.arg("--component-name=cargo")
.arg("--legacy-manifest-dirs=rustlib,cargo");
- build.run(&mut cmd);
- distdir(build).join(format!("{}-{}.tar.gz", name, target))
+ builder.run(&mut cmd);
+ distdir(builder).join(format!("{}-{}.tar.gz", name, target))
}
}
}
fn run(self, builder: &Builder) -> Option<PathBuf> {
- let build = builder.build;
let stage = self.stage;
let target = self.target;
- assert!(build.config.extended);
+ assert!(builder.config.extended);
- build.info(&format!("Dist RLS stage{} ({})", stage, target));
- let src = build.src.join("src/tools/rls");
- let release_num = build.release_num("rls");
- let name = pkgname(build, "rls");
- let version = build.rls_info.version(build, &release_num);
+ builder.info(&format!("Dist RLS stage{} ({})", stage, target));
+ let src = builder.src.join("src/tools/rls");
+ let release_num = builder.release_num("rls");
+ let name = pkgname(builder, "rls");
+ let version = builder.rls_info.version(builder, &release_num);
- let tmp = tmpdir(build);
+ let tmp = tmpdir(builder);
let image = tmp.join("rls-image");
drop(fs::remove_dir_all(&image));
t!(fs::create_dir_all(&image));
// We expect RLS to build, because we've exited this step above if tool
// state for RLS isn't testing.
let rls = builder.ensure(tool::Rls {
- compiler: builder.compiler(stage, build.build),
+ compiler: builder.compiler(stage, builder.config.build),
target, extra_features: Vec::new()
}).or_else(|| { println!("Unable to build RLS, skipping dist"); None })?;
- build.install(&rls, &image.join("bin"), 0o755);
+ builder.install(&rls, &image.join("bin"), 0o755);
let doc = image.join("share/doc/rls");
- build.install(&src.join("README.md"), &doc, 0o644);
- build.install(&src.join("LICENSE-MIT"), &doc, 0o644);
- build.install(&src.join("LICENSE-APACHE"), &doc, 0o644);
+ builder.install(&src.join("README.md"), &doc, 0o644);
+ builder.install(&src.join("LICENSE-MIT"), &doc, 0o644);
+ builder.install(&src.join("LICENSE-APACHE"), &doc, 0o644);
// Prepare the overlay
let overlay = tmp.join("rls-overlay");
drop(fs::remove_dir_all(&overlay));
t!(fs::create_dir_all(&overlay));
- build.install(&src.join("README.md"), &overlay, 0o644);
- build.install(&src.join("LICENSE-MIT"), &overlay, 0o644);
- build.install(&src.join("LICENSE-APACHE"), &overlay, 0o644);
- build.create(&overlay.join("version"), &version);
+ builder.install(&src.join("README.md"), &overlay, 0o644);
+ builder.install(&src.join("LICENSE-MIT"), &overlay, 0o644);
+ builder.install(&src.join("LICENSE-APACHE"), &overlay, 0o644);
+ builder.create(&overlay.join("version"), &version);
// Generate the installer tarball
let mut cmd = rust_installer(builder);
.arg("--rel-manifest-dir=rustlib")
.arg("--success-message=RLS-ready-to-serve.")
.arg("--image-dir").arg(&image)
- .arg("--work-dir").arg(&tmpdir(build))
- .arg("--output-dir").arg(&distdir(build))
+ .arg("--work-dir").arg(&tmpdir(builder))
+ .arg("--output-dir").arg(&distdir(builder))
.arg("--non-installed-overlay").arg(&overlay)
.arg(format!("--package-name={}-{}", name, target))
.arg("--legacy-manifest-dirs=rustlib,cargo")
.arg("--component-name=rls-preview");
- build.run(&mut cmd);
- Some(distdir(build).join(format!("{}-{}.tar.gz", name, target)))
+ builder.run(&mut cmd);
+ Some(distdir(builder).join(format!("{}-{}.tar.gz", name, target)))
}
}
}
fn run(self, builder: &Builder) -> Option<PathBuf> {
- let build = builder.build;
let stage = self.stage;
let target = self.target;
- build.info(&format!("Dist Rustfmt stage{} ({})", stage, target));
- let src = build.src.join("src/tools/rustfmt");
- let release_num = build.release_num("rustfmt");
- let name = pkgname(build, "rustfmt");
- let version = build.rustfmt_info.version(build, &release_num);
+ builder.info(&format!("Dist Rustfmt stage{} ({})", stage, target));
+ let src = builder.src.join("src/tools/rustfmt");
+ let release_num = builder.release_num("rustfmt");
+ let name = pkgname(builder, "rustfmt");
+ let version = builder.rustfmt_info.version(builder, &release_num);
- let tmp = tmpdir(build);
+ let tmp = tmpdir(builder);
let image = tmp.join("rustfmt-image");
drop(fs::remove_dir_all(&image));
- build.create_dir(&image);
+ builder.create_dir(&image);
// Prepare the image directory
let rustfmt = builder.ensure(tool::Rustfmt {
- compiler: builder.compiler(stage, build.build),
+ compiler: builder.compiler(stage, builder.config.build),
target, extra_features: Vec::new()
}).or_else(|| { println!("Unable to build Rustfmt, skipping dist"); None })?;
let cargofmt = builder.ensure(tool::Cargofmt {
- compiler: builder.compiler(stage, build.build),
+ compiler: builder.compiler(stage, builder.config.build),
target, extra_features: Vec::new()
}).or_else(|| { println!("Unable to build Cargofmt, skipping dist"); None })?;
- build.install(&rustfmt, &image.join("bin"), 0o755);
- build.install(&cargofmt, &image.join("bin"), 0o755);
+ builder.install(&rustfmt, &image.join("bin"), 0o755);
+ builder.install(&cargofmt, &image.join("bin"), 0o755);
let doc = image.join("share/doc/rustfmt");
- build.install(&src.join("README.md"), &doc, 0o644);
- build.install(&src.join("LICENSE-MIT"), &doc, 0o644);
- build.install(&src.join("LICENSE-APACHE"), &doc, 0o644);
+ builder.install(&src.join("README.md"), &doc, 0o644);
+ builder.install(&src.join("LICENSE-MIT"), &doc, 0o644);
+ builder.install(&src.join("LICENSE-APACHE"), &doc, 0o644);
// Prepare the overlay
let overlay = tmp.join("rustfmt-overlay");
drop(fs::remove_dir_all(&overlay));
- build.create_dir(&overlay);
- build.install(&src.join("README.md"), &overlay, 0o644);
- build.install(&src.join("LICENSE-MIT"), &overlay, 0o644);
- build.install(&src.join("LICENSE-APACHE"), &overlay, 0o644);
- build.create(&overlay.join("version"), &version);
+ builder.create_dir(&overlay);
+ builder.install(&src.join("README.md"), &overlay, 0o644);
+ builder.install(&src.join("LICENSE-MIT"), &overlay, 0o644);
+ builder.install(&src.join("LICENSE-APACHE"), &overlay, 0o644);
+ builder.create(&overlay.join("version"), &version);
// Generate the installer tarball
let mut cmd = rust_installer(builder);
.arg("--rel-manifest-dir=rustlib")
.arg("--success-message=rustfmt-ready-to-fmt.")
.arg("--image-dir").arg(&image)
- .arg("--work-dir").arg(&tmpdir(build))
- .arg("--output-dir").arg(&distdir(build))
+ .arg("--work-dir").arg(&tmpdir(builder))
+ .arg("--output-dir").arg(&distdir(builder))
.arg("--non-installed-overlay").arg(&overlay)
.arg(format!("--package-name={}-{}", name, target))
.arg("--legacy-manifest-dirs=rustlib,cargo")
.arg("--component-name=rustfmt-preview");
- build.run(&mut cmd);
- Some(distdir(build).join(format!("{}-{}.tar.gz", name, target)))
+ builder.run(&mut cmd);
+ Some(distdir(builder).join(format!("{}-{}.tar.gz", name, target)))
}
}
fn make_run(run: RunConfig) {
run.builder.ensure(Extended {
stage: run.builder.top_stage,
- host: run.builder.build.build,
+ host: run.builder.config.build,
target: run.target,
});
}
/// Creates a combined installer for the specified target in the provided stage.
fn run(self, builder: &Builder) {
- let build = builder.build;
let stage = self.stage;
let target = self.target;
- build.info(&format!("Dist extended stage{} ({})", stage, target));
+ builder.info(&format!("Dist extended stage{} ({})", stage, target));
let rustc_installer = builder.ensure(Rustc {
compiler: builder.compiler(stage, target),
target,
});
- let tmp = tmpdir(build);
+ let tmp = tmpdir(builder);
let overlay = tmp.join("extended-overlay");
- let etc = build.src.join("src/etc/installer");
+ let etc = builder.src.join("src/etc/installer");
let work = tmp.join("work");
let _ = fs::remove_dir_all(&overlay);
- build.install(&build.src.join("COPYRIGHT"), &overlay, 0o644);
- build.install(&build.src.join("LICENSE-APACHE"), &overlay, 0o644);
- build.install(&build.src.join("LICENSE-MIT"), &overlay, 0o644);
- let version = build.rust_version();
- build.create(&overlay.join("version"), &version);
- if let Some(sha) = build.rust_sha() {
- build.create(&overlay.join("git-commit-hash"), &sha);
+ builder.install(&builder.src.join("COPYRIGHT"), &overlay, 0o644);
+ builder.install(&builder.src.join("LICENSE-APACHE"), &overlay, 0o644);
+ builder.install(&builder.src.join("LICENSE-MIT"), &overlay, 0o644);
+ let version = builder.rust_version();
+ builder.create(&overlay.join("version"), &version);
+ if let Some(sha) = builder.rust_sha() {
+ builder.create(&overlay.join("git-commit-hash"), &sha);
}
- build.install(&etc.join("README.md"), &overlay, 0o644);
+ builder.install(&etc.join("README.md"), &overlay, 0o644);
// When rust-std package split from rustc, we needed to ensure that during
// upgrades rustc was upgraded before rust-std. To avoid rustc clobbering
tarballs.extend(rustfmt_installer.clone());
tarballs.push(analysis_installer);
tarballs.push(std_installer);
- if build.config.docs {
+ if builder.config.docs {
tarballs.push(docs_installer);
}
if target.contains("pc-windows-gnu") {
.arg("--rel-manifest-dir=rustlib")
.arg("--success-message=Rust-is-ready-to-roll.")
.arg("--work-dir").arg(&work)
- .arg("--output-dir").arg(&distdir(build))
- .arg(format!("--package-name={}-{}", pkgname(build, "rust"), target))
+ .arg("--output-dir").arg(&distdir(builder))
+ .arg(format!("--package-name={}-{}", pkgname(builder, "rust"), target))
.arg("--legacy-manifest-dirs=rustlib,cargo")
.arg("--input-tarballs").arg(input_tarballs)
.arg("--non-installed-overlay").arg(&overlay);
- build.run(&mut cmd);
+ builder.run(&mut cmd);
let mut license = String::new();
- license += &build.read(&build.src.join("COPYRIGHT"));
- license += &build.read(&build.src.join("LICENSE-APACHE"));
- license += &build.read(&build.src.join("LICENSE-MIT"));
+ license += &builder.read(&builder.src.join("COPYRIGHT"));
+ license += &builder.read(&builder.src.join("LICENSE-APACHE"));
+ license += &builder.read(&builder.src.join("LICENSE-MIT"));
license.push_str("\n");
license.push_str("\n");
.arg("--scripts").arg(pkg.join(component))
.arg("--nopayload")
.arg(pkg.join(component).with_extension("pkg"));
- build.run(&mut cmd);
+ builder.run(&mut cmd);
};
let prepare = |name: &str| {
- build.create_dir(&pkg.join(name));
- build.cp_r(&work.join(&format!("{}-{}", pkgname(build, name), target)),
+ builder.create_dir(&pkg.join(name));
+ builder.cp_r(&work.join(&format!("{}-{}", pkgname(builder, name), target)),
&pkg.join(name));
- build.install(&etc.join("pkg/postinstall"), &pkg.join(name), 0o755);
+ builder.install(&etc.join("pkg/postinstall"), &pkg.join(name), 0o755);
pkgbuild(name);
};
prepare("rustc");
}
// create an 'uninstall' package
- build.install(&etc.join("pkg/postinstall"), &pkg.join("uninstall"), 0o755);
+ builder.install(&etc.join("pkg/postinstall"), &pkg.join("uninstall"), 0o755);
pkgbuild("uninstall");
- build.create_dir(&pkg.join("res"));
- build.create(&pkg.join("res/LICENSE.txt"), &license);
- build.install(&etc.join("gfx/rust-logo.png"), &pkg.join("res"), 0o644);
+ builder.create_dir(&pkg.join("res"));
+ builder.create(&pkg.join("res/LICENSE.txt"), &license);
+ builder.install(&etc.join("gfx/rust-logo.png"), &pkg.join("res"), 0o644);
let mut cmd = Command::new("productbuild");
cmd.arg("--distribution").arg(xform(&etc.join("pkg/Distribution.xml")))
.arg("--resources").arg(pkg.join("res"))
- .arg(distdir(build).join(format!("{}-{}.pkg",
- pkgname(build, "rust"),
+ .arg(distdir(builder).join(format!("{}-{}.pkg",
+ pkgname(builder, "rust"),
target)))
.arg("--package-path").arg(&pkg);
- build.run(&mut cmd);
+ builder.run(&mut cmd);
}
if target.contains("windows") {
let _ = fs::remove_dir_all(&exe);
let prepare = |name: &str| {
- build.create_dir(&exe.join(name));
+ builder.create_dir(&exe.join(name));
let dir = if name == "rust-std" || name == "rust-analysis" {
format!("{}-{}", name, target)
} else if name == "rls" {
} else {
name.to_string()
};
- build.cp_r(&work.join(&format!("{}-{}", pkgname(build, name), target))
+ builder.cp_r(&work.join(&format!("{}-{}", pkgname(builder, name), target))
.join(dir),
&exe.join(name));
- build.remove(&exe.join(name).join("manifest.in"));
+ builder.remove(&exe.join(name).join("manifest.in"));
};
prepare("rustc");
prepare("cargo");
prepare("rust-mingw");
}
- build.install(&xform(&etc.join("exe/rust.iss")), &exe, 0o644);
- build.install(&etc.join("exe/modpath.iss"), &exe, 0o644);
- build.install(&etc.join("exe/upgrade.iss"), &exe, 0o644);
- build.install(&etc.join("gfx/rust-logo.ico"), &exe, 0o644);
- build.create(&exe.join("LICENSE.txt"), &license);
+ builder.install(&xform(&etc.join("exe/rust.iss")), &exe, 0o644);
+ builder.install(&etc.join("exe/modpath.iss"), &exe, 0o644);
+ builder.install(&etc.join("exe/upgrade.iss"), &exe, 0o644);
+ builder.install(&etc.join("gfx/rust-logo.ico"), &exe, 0o644);
+ builder.create(&exe.join("LICENSE.txt"), &license);
// Generate exe installer
let mut cmd = Command::new("iscc");
if target.contains("windows-gnu") {
cmd.arg("/dMINGW");
}
- add_env(build, &mut cmd, target);
- build.run(&mut cmd);
- build.install(&exe.join(format!("{}-{}.exe", pkgname(build, "rust"), target)),
- &distdir(build),
+ add_env(builder, &mut cmd, target);
+ builder.run(&mut cmd);
+ builder.install(&exe.join(format!("{}-{}.exe", pkgname(builder, "rust"), target)),
+ &distdir(builder),
0o755);
// Generate msi installer
let light = wix.join("bin/light.exe");
let heat_flags = ["-nologo", "-gg", "-sfrag", "-srd", "-sreg"];
- build.run(Command::new(&heat)
+ builder.run(Command::new(&heat)
.current_dir(&exe)
.arg("dir")
.arg("rustc")
.arg("-dr").arg("Rustc")
.arg("-var").arg("var.RustcDir")
.arg("-out").arg(exe.join("RustcGroup.wxs")));
- build.run(Command::new(&heat)
+ builder.run(Command::new(&heat)
.current_dir(&exe)
.arg("dir")
.arg("rust-docs")
.arg("-var").arg("var.DocsDir")
.arg("-out").arg(exe.join("DocsGroup.wxs"))
.arg("-t").arg(etc.join("msi/squash-components.xsl")));
- build.run(Command::new(&heat)
+ builder.run(Command::new(&heat)
.current_dir(&exe)
.arg("dir")
.arg("cargo")
.arg("-var").arg("var.CargoDir")
.arg("-out").arg(exe.join("CargoGroup.wxs"))
.arg("-t").arg(etc.join("msi/remove-duplicates.xsl")));
- build.run(Command::new(&heat)
+ builder.run(Command::new(&heat)
.current_dir(&exe)
.arg("dir")
.arg("rust-std")
.arg("-var").arg("var.StdDir")
.arg("-out").arg(exe.join("StdGroup.wxs")));
if rls_installer.is_some() {
- build.run(Command::new(&heat)
+ builder.run(Command::new(&heat)
.current_dir(&exe)
.arg("dir")
.arg("rls")
.arg("-out").arg(exe.join("RlsGroup.wxs"))
.arg("-t").arg(etc.join("msi/remove-duplicates.xsl")));
}
- build.run(Command::new(&heat)
+ builder.run(Command::new(&heat)
.current_dir(&exe)
.arg("dir")
.arg("rust-analysis")
.arg("-out").arg(exe.join("AnalysisGroup.wxs"))
.arg("-t").arg(etc.join("msi/remove-duplicates.xsl")));
if target.contains("windows-gnu") {
- build.run(Command::new(&heat)
+ builder.run(Command::new(&heat)
.current_dir(&exe)
.arg("dir")
.arg("rust-mingw")
.arg("-arch").arg(&arch)
.arg("-out").arg(&output)
.arg(&input);
- add_env(build, &mut cmd, target);
+ add_env(builder, &mut cmd, target);
if rls_installer.is_some() {
cmd.arg("-dRlsDir=rls");
if target.contains("windows-gnu") {
cmd.arg("-dGccDir=rust-mingw");
}
- build.run(&mut cmd);
+ builder.run(&mut cmd);
};
candle(&xform(&etc.join("msi/rust.wxs")));
candle(&etc.join("msi/ui.wxs"));
candle("GccGroup.wxs".as_ref());
}
- build.create(&exe.join("LICENSE.rtf"), &rtf);
- build.install(&etc.join("gfx/banner.bmp"), &exe, 0o644);
- build.install(&etc.join("gfx/dialogbg.bmp"), &exe, 0o644);
+ builder.create(&exe.join("LICENSE.rtf"), &rtf);
+ builder.install(&etc.join("gfx/banner.bmp"), &exe, 0o644);
+ builder.install(&etc.join("gfx/dialogbg.bmp"), &exe, 0o644);
- let filename = format!("{}-{}.msi", pkgname(build, "rust"), target);
+ let filename = format!("{}-{}.msi", pkgname(builder, "rust"), target);
let mut cmd = Command::new(&light);
cmd.arg("-nologo")
.arg("-ext").arg("WixUIExtension")
// ICE57 wrongly complains about the shortcuts
cmd.arg("-sice:ICE57");
- build.run(&mut cmd);
+ builder.run(&mut cmd);
- if !build.config.dry_run {
- t!(fs::rename(exe.join(&filename), distdir(build).join(&filename)));
+ if !builder.config.dry_run {
+ t!(fs::rename(exe.join(&filename), distdir(builder).join(&filename)));
}
}
}
}
-fn add_env(build: &Build, cmd: &mut Command, target: Interned<String>) {
+fn add_env(builder: &Builder, cmd: &mut Command, target: Interned<String>) {
let mut parts = channel::CFG_RELEASE_NUM.split('.');
- cmd.env("CFG_RELEASE_INFO", build.rust_version())
+ cmd.env("CFG_RELEASE_INFO", builder.rust_version())
.env("CFG_RELEASE_NUM", channel::CFG_RELEASE_NUM)
- .env("CFG_RELEASE", build.rust_release())
+ .env("CFG_RELEASE", builder.rust_release())
.env("CFG_VER_MAJOR", parts.next().unwrap())
.env("CFG_VER_MINOR", parts.next().unwrap())
.env("CFG_VER_PATCH", parts.next().unwrap())
.env("CFG_VER_BUILD", "0") // just needed to build
- .env("CFG_PACKAGE_VERS", build.rust_package_vers())
- .env("CFG_PACKAGE_NAME", pkgname(build, "rust"))
+ .env("CFG_PACKAGE_VERS", builder.rust_package_vers())
+ .env("CFG_PACKAGE_NAME", pkgname(builder, "rust"))
.env("CFG_BUILD", target)
- .env("CFG_CHANNEL", &build.config.channel);
+ .env("CFG_CHANNEL", &builder.config.channel);
if target.contains("windows-gnu") {
cmd.env("CFG_MINGW", "1")
}
fn run(self, builder: &Builder) {
- let build = builder.build;
let mut cmd = builder.tool_cmd(Tool::BuildManifest);
- if build.config.dry_run {
+ if builder.config.dry_run {
return;
}
- let sign = build.config.dist_sign_folder.as_ref().unwrap_or_else(|| {
+ let sign = builder.config.dist_sign_folder.as_ref().unwrap_or_else(|| {
panic!("\n\nfailed to specify `dist.sign-folder` in `config.toml`\n\n")
});
- let addr = build.config.dist_upload_addr.as_ref().unwrap_or_else(|| {
+ let addr = builder.config.dist_upload_addr.as_ref().unwrap_or_else(|| {
panic!("\n\nfailed to specify `dist.upload-addr` in `config.toml`\n\n")
});
- let file = build.config.dist_gpg_password_file.as_ref().unwrap_or_else(|| {
+ let file = builder.config.dist_gpg_password_file.as_ref().unwrap_or_else(|| {
panic!("\n\nfailed to specify `dist.gpg-password-file` in `config.toml`\n\n")
});
let mut pass = String::new();
let today = output(Command::new("date").arg("+%Y-%m-%d"));
cmd.arg(sign);
- cmd.arg(distdir(build));
+ cmd.arg(distdir(builder));
cmd.arg(today.trim());
- cmd.arg(build.rust_package_vers());
- cmd.arg(build.package_vers(&build.release_num("cargo")));
- cmd.arg(build.package_vers(&build.release_num("rls")));
- cmd.arg(build.package_vers(&build.release_num("rustfmt")));
+ cmd.arg(builder.rust_package_vers());
+ cmd.arg(builder.package_vers(&builder.release_num("cargo")));
+ cmd.arg(builder.package_vers(&builder.release_num("rls")));
+ cmd.arg(builder.package_vers(&builder.release_num("rustfmt")));
cmd.arg(addr);
- build.create_dir(&distdir(build));
+ builder.create_dir(&distdir(builder));
let mut child = t!(cmd.stdin(Stdio::piped()).spawn());
t!(child.stdin.take().unwrap().write_all(pass.as_bytes()));
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-//! Documentation generation for rustbuild.
+//! Documentation generation for rustbuilder.
//!
//! This module implements generation for all bits and pieces of documentation
//! for the Rust project. This notably includes suites like the rust book, the
use std::io;
use std::path::{PathBuf, Path};
-use {Build, Mode};
+use Mode;
use build_helper::up_to_date;
use util::symlink_dir;
fn should_run(run: ShouldRun) -> ShouldRun {
let builder = run.builder;
- run.path($path).default_condition(builder.build.config.docs)
+ run.path($path).default_condition(builder.config.docs)
}
fn make_run(run: RunConfig) {
/// This will not actually generate any documentation if the documentation has
/// already been generated.
fn run(self, builder: &Builder) {
- let src = builder.build.src.join("src/doc");
+ let src = builder.src.join("src/doc");
builder.ensure(RustbookSrc {
target: self.target,
name: self.name,
fn should_run(run: ShouldRun) -> ShouldRun {
let builder = run.builder;
- run.path("src/doc/unstable-book").default_condition(builder.build.config.docs)
+ run.path("src/doc/unstable-book").default_condition(builder.config.docs)
}
fn make_run(run: RunConfig) {
builder.ensure(RustbookSrc {
target: self.target,
name: INTERNER.intern_str("unstable-book"),
- src: builder.build.md_doc_out(self.target),
+ src: builder.md_doc_out(self.target),
})
}
}
fn should_run(run: ShouldRun) -> ShouldRun {
let builder = run.builder;
- run.path("src/tools/cargo/src/doc/book").default_condition(builder.build.config.docs)
+ run.path("src/tools/cargo/src/doc/book").default_condition(builder.config.docs)
}
fn make_run(run: RunConfig) {
}
fn run(self, builder: &Builder) {
- let build = builder.build;
-
let target = self.target;
let name = self.name;
- let src = build.src.join("src/tools/cargo/src/doc");
+ let src = builder.src.join("src/tools/cargo/src/doc");
- let out = build.doc_out(target);
+ let out = builder.doc_out(target);
t!(fs::create_dir_all(&out));
let out = out.join(name);
- build.info(&format!("Cargo Book ({}) - {}", target, name));
+ builder.info(&format!("Cargo Book ({}) - {}", target, name));
let _ = fs::remove_dir_all(&out);
- build.run(builder.tool_cmd(Tool::Rustbook)
+ builder.run(builder.tool_cmd(Tool::Rustbook)
.arg("build")
.arg(&src)
.arg("-d")
/// This will not actually generate any documentation if the documentation has
/// already been generated.
fn run(self, builder: &Builder) {
- let build = builder.build;
let target = self.target;
let name = self.name;
let src = self.src;
- let out = build.doc_out(target);
+ let out = builder.doc_out(target);
t!(fs::create_dir_all(&out));
let out = out.join(name);
if up_to_date(&src, &index) && up_to_date(&rustbook, &index) {
return
}
- build.info(&format!("Rustbook ({}) - {}", target, name));
+ builder.info(&format!("Rustbook ({}) - {}", target, name));
let _ = fs::remove_dir_all(&out);
- build.run(rustbook_cmd
+ builder.run(rustbook_cmd
.arg("build")
.arg(&src)
.arg("-d")
fn should_run(run: ShouldRun) -> ShouldRun {
let builder = run.builder;
- run.path("src/doc/book").default_condition(builder.build.config.docs)
+ run.path("src/doc/book").default_condition(builder.config.docs)
}
fn make_run(run: RunConfig) {
run.builder.ensure(TheBook {
- compiler: run.builder.compiler(run.builder.top_stage, run.builder.build.build),
+ compiler: run.builder.compiler(run.builder.top_stage, run.builder.config.build),
target: run.target,
name: "book",
});
/// * Index page
/// * Redirect pages
fn run(self, builder: &Builder) {
- let build = builder.build;
let compiler = self.compiler;
let target = self.target;
let name = self.name;
// build the index page
let index = format!("{}/index.md", name);
- build.info(&format!("Documenting book index ({})", target));
+ builder.info(&format!("Documenting book index ({})", target));
invoke_rustdoc(builder, compiler, target, &index);
// build the redirect pages
- build.info(&format!("Documenting book redirect pages ({})", target));
- for file in t!(fs::read_dir(build.src.join("src/doc/book/redirects"))) {
+ builder.info(&format!("Documenting book redirect pages ({})", target));
+ for file in t!(fs::read_dir(builder.src.join("src/doc/book/redirects"))) {
let file = t!(file);
let path = file.path();
let path = path.to_str().unwrap();
}
fn invoke_rustdoc(builder: &Builder, compiler: Compiler, target: Interned<String>, markdown: &str) {
- let build = builder.build;
- let out = build.doc_out(target);
+ let out = builder.doc_out(target);
- let path = build.src.join("src/doc").join(markdown);
+ let path = builder.src.join("src/doc").join(markdown);
- let favicon = build.src.join("src/doc/favicon.inc");
- let footer = build.src.join("src/doc/footer.inc");
+ let favicon = builder.src.join("src/doc/favicon.inc");
+ let footer = builder.src.join("src/doc/footer.inc");
let version_info = out.join("version_info.html");
let mut cmd = builder.rustdoc_cmd(compiler.host);
.arg("--markdown-css")
.arg("../rust.css");
- build.run(&mut cmd);
+ builder.run(&mut cmd);
}
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
fn should_run(run: ShouldRun) -> ShouldRun {
let builder = run.builder;
- run.path("src/doc").default_condition(builder.build.config.docs)
+ run.path("src/doc").default_condition(builder.config.docs)
}
fn make_run(run: RunConfig) {
run.builder.ensure(Standalone {
- compiler: run.builder.compiler(run.builder.top_stage, run.builder.build.build),
+ compiler: run.builder.compiler(run.builder.top_stage, run.builder.config.build),
target: run.target,
});
}
///
/// In the end, this is just a glorified wrapper around rustdoc!
fn run(self, builder: &Builder) {
- let build = builder.build;
let target = self.target;
let compiler = self.compiler;
- build.info(&format!("Documenting standalone ({})", target));
- let out = build.doc_out(target);
+ builder.info(&format!("Documenting standalone ({})", target));
+ let out = builder.doc_out(target);
t!(fs::create_dir_all(&out));
- let favicon = build.src.join("src/doc/favicon.inc");
- let footer = build.src.join("src/doc/footer.inc");
- let full_toc = build.src.join("src/doc/full-toc.inc");
- t!(fs::copy(build.src.join("src/doc/rust.css"), out.join("rust.css")));
+ let favicon = builder.src.join("src/doc/favicon.inc");
+ let footer = builder.src.join("src/doc/footer.inc");
+ let full_toc = builder.src.join("src/doc/full-toc.inc");
+ t!(fs::copy(builder.src.join("src/doc/rust.css"), out.join("rust.css")));
- let version_input = build.src.join("src/doc/version_info.html.template");
+ let version_input = builder.src.join("src/doc/version_info.html.template");
let version_info = out.join("version_info.html");
- if !build.config.dry_run && !up_to_date(&version_input, &version_info) {
+ if !builder.config.dry_run && !up_to_date(&version_input, &version_info) {
let mut info = String::new();
t!(t!(File::open(&version_input)).read_to_string(&mut info));
- let info = info.replace("VERSION", &build.rust_release())
- .replace("SHORT_HASH", build.rust_info.sha_short().unwrap_or(""))
- .replace("STAMP", build.rust_info.sha().unwrap_or(""));
+ let info = info.replace("VERSION", &builder.rust_release())
+ .replace("SHORT_HASH", builder.rust_info.sha_short().unwrap_or(""))
+ .replace("STAMP", builder.rust_info.sha().unwrap_or(""));
t!(t!(File::create(&version_info)).write_all(info.as_bytes()));
}
- for file in t!(fs::read_dir(build.src.join("src/doc"))) {
+ for file in t!(fs::read_dir(builder.src.join("src/doc"))) {
let file = t!(file);
let path = file.path();
let filename = path.file_name().unwrap().to_str().unwrap();
up_to_date(&favicon, &html) &&
up_to_date(&full_toc, &html) &&
up_to_date(&version_info, &html) &&
- (build.config.dry_run || up_to_date(&rustdoc, &html)) {
+ (builder.config.dry_run || up_to_date(&rustdoc, &html)) {
continue
}
} else {
cmd.arg("--markdown-css").arg("rust.css");
}
- build.run(&mut cmd);
+ builder.run(&mut cmd);
}
}
}
fn should_run(run: ShouldRun) -> ShouldRun {
let builder = run.builder;
- run.all_krates("std").default_condition(builder.build.config.docs)
+ run.all_krates("std").default_condition(builder.config.docs)
}
fn make_run(run: RunConfig) {
/// This will generate all documentation for the standard library and its
/// dependencies. This is largely just a wrapper around `cargo doc`.
fn run(self, builder: &Builder) {
- let build = builder.build;
let stage = self.stage;
let target = self.target;
- build.info(&format!("Documenting stage{} std ({})", stage, target));
- let out = build.doc_out(target);
+ builder.info(&format!("Documenting stage{} std ({})", stage, target));
+ let out = builder.doc_out(target);
t!(fs::create_dir_all(&out));
- let compiler = builder.compiler(stage, build.build);
+ let compiler = builder.compiler(stage, builder.config.build);
let rustdoc = builder.rustdoc(compiler.host);
- let compiler = if build.force_use_stage1(compiler, target) {
+ let compiler = if builder.force_use_stage1(compiler, target) {
builder.compiler(1, compiler.host)
} else {
compiler
};
builder.ensure(compile::Std { compiler, target });
- let out_dir = build.stage_out(compiler, Mode::Libstd)
+ let out_dir = builder.stage_out(compiler, Mode::Libstd)
.join(target).join("doc");
// Here what we're doing is creating a *symlink* (directory junction on
//
// This way rustdoc generates output directly into the output, and rustdoc
// will also directly handle merging.
- let my_out = build.crate_doc_out(target);
- build.clear_if_dirty(&my_out, &rustdoc);
- t!(symlink_dir_force(&build.config, &my_out, &out_dir));
+ let my_out = builder.crate_doc_out(target);
+ builder.clear_if_dirty(&my_out, &rustdoc);
+ t!(symlink_dir_force(&builder.config, &my_out, &out_dir));
let mut cargo = builder.cargo(compiler, Mode::Libstd, target, "doc");
compile::std_cargo(builder, &compiler, target, &mut cargo);
t!(fs::create_dir_all(out_dir.join(krate)));
}
- build.run(&mut cargo);
- build.cp_r(&my_out, &out);
+ builder.run(&mut cargo);
+ builder.cp_r(&my_out, &out);
}
}
fn should_run(run: ShouldRun) -> ShouldRun {
let builder = run.builder;
- run.krate("test").default_condition(builder.build.config.docs)
+ run.krate("test").default_condition(builder.config.docs)
}
fn make_run(run: RunConfig) {
/// This will generate all documentation for libtest and its dependencies. This
/// is largely just a wrapper around `cargo doc`.
fn run(self, builder: &Builder) {
- let build = builder.build;
let stage = self.stage;
let target = self.target;
- build.info(&format!("Documenting stage{} test ({})", stage, target));
- let out = build.doc_out(target);
+ builder.info(&format!("Documenting stage{} test ({})", stage, target));
+ let out = builder.doc_out(target);
t!(fs::create_dir_all(&out));
- let compiler = builder.compiler(stage, build.build);
+ let compiler = builder.compiler(stage, builder.config.build);
let rustdoc = builder.rustdoc(compiler.host);
- let compiler = if build.force_use_stage1(compiler, target) {
+ let compiler = if builder.force_use_stage1(compiler, target) {
builder.compiler(1, compiler.host)
} else {
compiler
builder.ensure(Std { stage, target });
builder.ensure(compile::Test { compiler, target });
- let out_dir = build.stage_out(compiler, Mode::Libtest)
+ let out_dir = builder.stage_out(compiler, Mode::Libtest)
.join(target).join("doc");
// See docs in std above for why we symlink
- let my_out = build.crate_doc_out(target);
- build.clear_if_dirty(&my_out, &rustdoc);
+ let my_out = builder.crate_doc_out(target);
+ builder.clear_if_dirty(&my_out, &rustdoc);
t!(symlink_dir_force(&builder.config, &my_out, &out_dir));
let mut cargo = builder.cargo(compiler, Mode::Libtest, target, "doc");
- compile::test_cargo(build, &compiler, target, &mut cargo);
+ compile::test_cargo(builder, &compiler, target, &mut cargo);
cargo.arg("--no-deps").arg("-p").arg("test");
- build.run(&mut cargo);
- build.cp_r(&my_out, &out);
+ builder.run(&mut cargo);
+ builder.cp_r(&my_out, &out);
}
}
fn should_run(run: ShouldRun) -> ShouldRun {
let builder = run.builder;
- run.krate("rustc-main").default_condition(builder.build.config.docs)
+ run.krate("rustc-main").default_condition(builder.config.docs)
}
fn make_run(run: RunConfig) {
/// here as we want to be able to keep it separate from the standard
/// documentation. This is largely just a wrapper around `cargo doc`.
fn run(self, builder: &Builder) {
- let build = builder.build;
let stage = self.stage;
let target = self.target;
- build.info(&format!("Documenting stage{} whitelisted compiler ({})", stage, target));
- let out = build.doc_out(target);
+ builder.info(&format!("Documenting stage{} whitelisted compiler ({})", stage, target));
+ let out = builder.doc_out(target);
t!(fs::create_dir_all(&out));
- let compiler = builder.compiler(stage, build.build);
+ let compiler = builder.compiler(stage, builder.config.build);
let rustdoc = builder.rustdoc(compiler.host);
- let compiler = if build.force_use_stage1(compiler, target) {
+ let compiler = if builder.force_use_stage1(compiler, target) {
builder.compiler(1, compiler.host)
} else {
compiler
builder.ensure(Std { stage, target });
builder.ensure(compile::Rustc { compiler, target });
- let out_dir = build.stage_out(compiler, Mode::Librustc)
+ let out_dir = builder.stage_out(compiler, Mode::Librustc)
.join(target).join("doc");
// See docs in std above for why we symlink
- let my_out = build.crate_doc_out(target);
- build.clear_if_dirty(&my_out, &rustdoc);
+ let my_out = builder.crate_doc_out(target);
+ builder.clear_if_dirty(&my_out, &rustdoc);
t!(symlink_dir_force(&builder.config, &my_out, &out_dir));
let mut cargo = builder.cargo(compiler, Mode::Librustc, target, "doc");
- compile::rustc_cargo(build, &mut cargo);
+ compile::rustc_cargo(builder, &mut cargo);
// We don't want to build docs for internal compiler dependencies in this
// step (there is another step for that). Therefore, we whitelist the crates
cargo.arg("-p").arg(krate);
}
- build.run(&mut cargo);
- build.cp_r(&my_out, &out);
+ builder.run(&mut cargo);
+ builder.cp_r(&my_out, &out);
}
}
fn should_run(run: ShouldRun) -> ShouldRun {
let builder = run.builder;
- run.krate("rustc-main").default_condition(builder.build.config.docs)
+ run.krate("rustc-main").default_condition(builder.config.docs)
}
fn make_run(run: RunConfig) {
/// we do not merge it with the other documentation from std, test and
/// proc_macros. This is largely just a wrapper around `cargo doc`.
fn run(self, builder: &Builder) {
- let build = builder.build;
let stage = self.stage;
let target = self.target;
- build.info(&format!("Documenting stage{} compiler ({})", stage, target));
- let out = build.compiler_doc_out(target);
+ builder.info(&format!("Documenting stage{} compiler ({})", stage, target));
+ let out = builder.compiler_doc_out(target);
t!(fs::create_dir_all(&out));
- let compiler = builder.compiler(stage, build.build);
+ let compiler = builder.compiler(stage, builder.config.build);
let rustdoc = builder.rustdoc(compiler.host);
- let compiler = if build.force_use_stage1(compiler, target) {
+ let compiler = if builder.force_use_stage1(compiler, target) {
builder.compiler(1, compiler.host)
} else {
compiler
};
- if !build.config.compiler_docs {
- build.info(&format!("\tskipping - compiler docs disabled"));
+ if !builder.config.compiler_docs {
+ builder.info(&format!("\tskipping - compiler docs disabled"));
return;
}
builder.ensure(Std { stage, target });
builder.ensure(compile::Rustc { compiler, target });
- let out_dir = build.stage_out(compiler, Mode::Librustc)
+ let out_dir = builder.stage_out(compiler, Mode::Librustc)
.join(target).join("doc");
// We do not symlink to the same shared folder that already contains std library
// documentation from previous steps as we do not want to include that.
- build.clear_if_dirty(&out, &rustdoc);
+ builder.clear_if_dirty(&out, &rustdoc);
t!(symlink_dir_force(&builder.config, &out, &out_dir));
let mut cargo = builder.cargo(compiler, Mode::Librustc, target, "doc");
cargo.env("RUSTDOCFLAGS", "--document-private-items");
- compile::rustc_cargo(build, &mut cargo);
+ compile::rustc_cargo(builder, &mut cargo);
// Only include compiler crates, no dependencies of those, such as `libc`.
cargo.arg("--no-deps");
let mut compiler_crates = HashSet::new();
for root_crate in &["rustc", "rustc_driver"] {
let interned_root_crate = INTERNER.intern_str(root_crate);
- find_compiler_crates(&build, &interned_root_crate, &mut compiler_crates);
+ find_compiler_crates(builder, &interned_root_crate, &mut compiler_crates);
}
for krate in &compiler_crates {
cargo.arg("-p").arg(krate);
}
- build.run(&mut cargo);
+ builder.run(&mut cargo);
}
}
fn find_compiler_crates(
- build: &Build,
+ builder: &Builder,
name: &Interned<String>,
crates: &mut HashSet<Interned<String>>
) {
crates.insert(*name);
// Look for dependencies.
- for dep in build.crates.get(name).unwrap().deps.iter() {
- if build.crates.get(dep).unwrap().is_local(build) {
- find_compiler_crates(build, dep, crates);
+ for dep in builder.crates.get(name).unwrap().deps.iter() {
+ if builder.crates.get(dep).unwrap().is_local(builder) {
+ find_compiler_crates(builder, dep, crates);
}
}
}
fn should_run(run: ShouldRun) -> ShouldRun {
let builder = run.builder;
- run.path("src/tools/error_index_generator").default_condition(builder.build.config.docs)
+ run.path("src/tools/error_index_generator").default_condition(builder.config.docs)
}
fn make_run(run: RunConfig) {
/// Generates the HTML rendered error-index by running the
/// `error_index_generator` tool.
fn run(self, builder: &Builder) {
- let build = builder.build;
let target = self.target;
- build.info(&format!("Documenting error index ({})", target));
- let out = build.doc_out(target);
+ builder.info(&format!("Documenting error index ({})", target));
+ let out = builder.doc_out(target);
t!(fs::create_dir_all(&out));
let mut index = builder.tool_cmd(Tool::ErrorIndex);
index.arg("html");
index.arg(out.join("error-index.html"));
// FIXME: shouldn't have to pass this env var
- index.env("CFG_BUILD", &build.build)
- .env("RUSTC_ERROR_METADATA_DST", build.extended_error_dir());
+ index.env("CFG_BUILD", &builder.config.build)
+ .env("RUSTC_ERROR_METADATA_DST", builder.extended_error_dir());
- build.run(&mut index);
+ builder.run(&mut index);
}
}
fn should_run(run: ShouldRun) -> ShouldRun {
let builder = run.builder;
- run.path("src/tools/unstable-book-gen").default_condition(builder.build.config.docs)
+ run.path("src/tools/unstable-book-gen").default_condition(builder.config.docs)
}
fn make_run(run: RunConfig) {
}
fn run(self, builder: &Builder) {
- let build = builder.build;
let target = self.target;
builder.ensure(compile::Std {
- compiler: builder.compiler(builder.top_stage, build.build),
+ compiler: builder.compiler(builder.top_stage, builder.config.build),
target,
});
- build.info(&format!("Generating unstable book md files ({})", target));
- let out = build.md_doc_out(target).join("unstable-book");
- build.create_dir(&out);
- build.remove_dir(&out);
+ builder.info(&format!("Generating unstable book md files ({})", target));
+ let out = builder.md_doc_out(target).join("unstable-book");
+ builder.create_dir(&out);
+ builder.remove_dir(&out);
let mut cmd = builder.tool_cmd(Tool::UnstableBookGen);
- cmd.arg(build.src.join("src"));
+ cmd.arg(builder.src.join("src"));
cmd.arg(out);
- build.run(&mut cmd);
+ builder.run(&mut cmd);
}
}
stage: u32,
host: Option<Interned<String>>
) {
- let build = builder.build;
- build.info(&format!("Install {} stage{} ({:?})", package, stage, host));
+ builder.info(&format!("Install {} stage{} ({:?})", package, stage, host));
let prefix_default = PathBuf::from("/usr/local");
let sysconfdir_default = PathBuf::from("/etc");
let bindir_default = PathBuf::from("bin");
let libdir_default = PathBuf::from("lib");
let mandir_default = datadir_default.join("man");
- let prefix = build.config.prefix.as_ref().map_or(prefix_default, |p| {
+ let prefix = builder.config.prefix.as_ref().map_or(prefix_default, |p| {
fs::canonicalize(p).expect(&format!("could not canonicalize {}", p.display()))
});
- let sysconfdir = build.config.sysconfdir.as_ref().unwrap_or(&sysconfdir_default);
- let datadir = build.config.datadir.as_ref().unwrap_or(&datadir_default);
- let docdir = build.config.docdir.as_ref().unwrap_or(&docdir_default);
- let bindir = build.config.bindir.as_ref().unwrap_or(&bindir_default);
- let libdir = build.config.libdir.as_ref().unwrap_or(&libdir_default);
- let mandir = build.config.mandir.as_ref().unwrap_or(&mandir_default);
+ let sysconfdir = builder.config.sysconfdir.as_ref().unwrap_or(&sysconfdir_default);
+ let datadir = builder.config.datadir.as_ref().unwrap_or(&datadir_default);
+ let docdir = builder.config.docdir.as_ref().unwrap_or(&docdir_default);
+ let bindir = builder.config.bindir.as_ref().unwrap_or(&bindir_default);
+ let libdir = builder.config.libdir.as_ref().unwrap_or(&libdir_default);
+ let mandir = builder.config.mandir.as_ref().unwrap_or(&mandir_default);
let sysconfdir = prefix.join(sysconfdir);
let datadir = prefix.join(datadir);
let libdir = add_destdir(&libdir, &destdir);
let mandir = add_destdir(&mandir, &destdir);
- let empty_dir = build.out.join("tmp/empty_dir");
+ let empty_dir = builder.out.join("tmp/empty_dir");
t!(fs::create_dir_all(&empty_dir));
let package_name = if let Some(host) = host {
- format!("{}-{}", pkgname(build, name), host)
+ format!("{}-{}", pkgname(builder, name), host)
} else {
- pkgname(build, name)
+ pkgname(builder, name)
};
let mut cmd = Command::new("sh");
cmd.current_dir(&empty_dir)
- .arg(sanitize_sh(&tmpdir(build).join(&package_name).join("install.sh")))
+ .arg(sanitize_sh(&tmpdir(builder).join(&package_name).join("install.sh")))
.arg(format!("--prefix={}", sanitize_sh(&prefix)))
.arg(format!("--sysconfdir={}", sanitize_sh(&sysconfdir)))
.arg(format!("--datadir={}", sanitize_sh(&datadir)))
.arg(format!("--libdir={}", sanitize_sh(&libdir)))
.arg(format!("--mandir={}", sanitize_sh(&mandir)))
.arg("--disable-ldconfig");
- build.run(&mut cmd);
+ builder.run(&mut cmd);
t!(fs::remove_dir_all(&empty_dir));
}
run.builder.ensure($name {
stage: run.builder.top_stage,
target: run.target,
- host: run.builder.build.build,
+ host: run.builder.config.build,
});
}
install_docs(builder, self.stage, self.target);
};
Std, "src/libstd", true, only_hosts: true, {
- for target in &builder.build.targets {
+ for target in &builder.targets {
builder.ensure(dist::Std {
compiler: builder.compiler(self.stage, self.host),
target: *target
use cmake;
use cc;
-use Build;
use util::{self, exe};
use build_helper::up_to_date;
use builder::{Builder, RunConfig, ShouldRun, Step};
/// Compile LLVM for `target`.
fn run(self, builder: &Builder) -> PathBuf {
- let build = builder.build;
let target = self.target;
let emscripten = self.emscripten;
// If we're using a custom LLVM bail out here, but we can only use a
// custom LLVM for the build triple.
if !self.emscripten {
- if let Some(config) = build.config.target_config.get(&target) {
+ if let Some(config) = builder.config.target_config.get(&target) {
if let Some(ref s) = config.llvm_config {
- check_llvm_version(build, s);
+ check_llvm_version(builder, s);
return s.to_path_buf()
}
}
}
- let rebuild_trigger = build.src.join("src/rustllvm/llvm-rebuild-trigger");
+ let rebuild_trigger = builder.src.join("src/rustllvm/llvm-rebuild-trigger");
let mut rebuild_trigger_contents = String::new();
t!(t!(File::open(&rebuild_trigger)).read_to_string(&mut rebuild_trigger_contents));
let (out_dir, llvm_config_ret_dir) = if emscripten {
- let dir = build.emscripten_llvm_out(target);
+ let dir = builder.emscripten_llvm_out(target);
let config_dir = dir.join("bin");
(dir, config_dir)
} else {
- let mut dir = build.llvm_out(build.config.build);
- if !build.config.build.contains("msvc") || build.config.ninja {
+ let mut dir = builder.llvm_out(builder.config.build);
+ if !builder.config.build.contains("msvc") || builder.config.ninja {
dir.push("build");
}
- (build.llvm_out(target), dir.join("bin"))
+ (builder.llvm_out(target), dir.join("bin"))
};
let done_stamp = out_dir.join("llvm-finished-building");
let build_llvm_config = llvm_config_ret_dir
- .join(exe("llvm-config", &*build.config.build));
+ .join(exe("llvm-config", &*builder.config.build));
if done_stamp.exists() {
let mut done_contents = String::new();
t!(t!(File::open(&done_stamp)).read_to_string(&mut done_contents));
}
}
- let _folder = build.fold_output(|| "llvm");
+ let _folder = builder.fold_output(|| "llvm");
let descriptor = if emscripten { "Emscripten " } else { "" };
- build.info(&format!("Building {}LLVM for {}", descriptor, target));
- let _time = util::timeit(&build);
+ builder.info(&format!("Building {}LLVM for {}", descriptor, target));
+ let _time = util::timeit(&builder);
t!(fs::create_dir_all(&out_dir));
// http://llvm.org/docs/CMake.html
let root = if self.emscripten { "src/llvm-emscripten" } else { "src/llvm" };
- let mut cfg = cmake::Config::new(build.src.join(root));
+ let mut cfg = cmake::Config::new(builder.src.join(root));
- let profile = match (build.config.llvm_optimize, build.config.llvm_release_debuginfo) {
+ let profile = match (builder.config.llvm_optimize, builder.config.llvm_release_debuginfo) {
(false, _) => "Debug",
(true, false) => "Release",
(true, true) => "RelWithDebInfo",
let llvm_targets = if self.emscripten {
"JSBackend"
} else {
- match build.config.llvm_targets {
+ match builder.config.llvm_targets {
Some(ref s) => s,
None => "X86;ARM;AArch64;Mips;PowerPC;SystemZ;MSP430;Sparc;NVPTX;Hexagon",
}
let llvm_exp_targets = if self.emscripten {
""
} else {
- &build.config.llvm_experimental_targets[..]
+ &builder.config.llvm_experimental_targets[..]
};
- let assertions = if build.config.llvm_assertions {"ON"} else {"OFF"};
+ let assertions = if builder.config.llvm_assertions {"ON"} else {"OFF"};
cfg.out_dir(&out_dir)
.profile(profile)
.define("WITH_POLLY", "OFF")
.define("LLVM_ENABLE_TERMINFO", "OFF")
.define("LLVM_ENABLE_LIBEDIT", "OFF")
- .define("LLVM_PARALLEL_COMPILE_JOBS", build.jobs().to_string())
+ .define("LLVM_PARALLEL_COMPILE_JOBS", builder.jobs().to_string())
.define("LLVM_TARGET_ARCH", target.split('-').next().unwrap())
.define("LLVM_DEFAULT_TARGET_TRIPLE", target);
cfg.define("LLVM_BUILD_32_BITS", "ON");
}
- if let Some(num_linkers) = build.config.llvm_link_jobs {
+ if let Some(num_linkers) = builder.config.llvm_link_jobs {
if num_linkers > 0 {
cfg.define("LLVM_PARALLEL_LINK_JOBS", num_linkers.to_string());
}
}
// http://llvm.org/docs/HowToCrossCompileLLVM.html
- if target != build.build && !emscripten {
+ if target != builder.config.build && !emscripten {
builder.ensure(Llvm {
- target: build.build,
+ target: builder.config.build,
emscripten: false,
});
// FIXME: if the llvm root for the build triple is overridden then we
// should use llvm-tblgen from there, also should verify that it
// actually exists most of the time in normal installs of LLVM.
- let host = build.llvm_out(build.build).join("bin/llvm-tblgen");
+ let host = builder.llvm_out(builder.config.build).join("bin/llvm-tblgen");
cfg.define("CMAKE_CROSSCOMPILING", "True")
.define("LLVM_TABLEGEN", &host);
cfg.define("CMAKE_SYSTEM_NAME", "FreeBSD");
}
- cfg.define("LLVM_NATIVE_BUILD", build.llvm_out(build.build).join("build"));
+ cfg.define("LLVM_NATIVE_BUILD", builder.llvm_out(builder.config.build).join("build"));
}
- configure_cmake(build, target, &mut cfg, false);
+ configure_cmake(builder, target, &mut cfg, false);
// FIXME: we don't actually need to build all LLVM tools and all LLVM
// libraries here, e.g. we just want a few components and a few
}
}
-fn check_llvm_version(build: &Build, llvm_config: &Path) {
- if !build.config.llvm_version_check {
+fn check_llvm_version(builder: &Builder, llvm_config: &Path) {
+ if !builder.config.llvm_version_check {
return
}
- if build.config.dry_run {
+ if builder.config.dry_run {
return;
}
panic!("\n\nbad LLVM version: {}, need >=3.9\n\n", version)
}
-fn configure_cmake(build: &Build,
+fn configure_cmake(builder: &Builder,
target: Interned<String>,
cfg: &mut cmake::Config,
building_dist_binaries: bool) {
- if build.config.ninja {
+ if builder.config.ninja {
cfg.generator("Ninja");
}
cfg.target(&target)
- .host(&build.config.build);
+ .host(&builder.config.build);
let sanitize_cc = |cc: &Path| {
if target.contains("msvc") {
// MSVC with CMake uses msbuild by default which doesn't respect these
// vars that we'd otherwise configure. In that case we just skip this
// entirely.
- if target.contains("msvc") && !build.config.ninja {
+ if target.contains("msvc") && !builder.config.ninja {
return
}
- let cc = build.cc(target);
- let cxx = build.cxx(target).unwrap();
+ let cc = builder.cc(target);
+ let cxx = builder.cxx(target).unwrap();
// Handle msvc + ninja + ccache specially (this is what the bots use)
if target.contains("msvc") &&
- build.config.ninja &&
- build.config.ccache.is_some() {
+ builder.config.ninja &&
+ builder.config.ccache.is_some() {
let mut cc = env::current_exe().expect("failed to get cwd");
cc.set_file_name("sccache-plus-cl.exe");
cfg.define("CMAKE_C_COMPILER", sanitize_cc(&cc))
.define("CMAKE_CXX_COMPILER", sanitize_cc(&cc));
cfg.env("SCCACHE_PATH",
- build.config.ccache.as_ref().unwrap())
+ builder.config.ccache.as_ref().unwrap())
.env("SCCACHE_TARGET", target);
// If ccache is configured we inform the build a little differently hwo
// to invoke ccache while also invoking our compilers.
- } else if let Some(ref ccache) = build.config.ccache {
+ } else if let Some(ref ccache) = builder.config.ccache {
cfg.define("CMAKE_C_COMPILER", ccache)
.define("CMAKE_C_COMPILER_ARG1", sanitize_cc(cc))
.define("CMAKE_CXX_COMPILER", ccache)
.define("CMAKE_CXX_COMPILER", sanitize_cc(cxx));
}
- cfg.build_arg("-j").build_arg(build.jobs().to_string());
- cfg.define("CMAKE_C_FLAGS", build.cflags(target).join(" "));
- let mut cxxflags = build.cflags(target).join(" ");
+ cfg.build_arg("-j").build_arg(builder.jobs().to_string());
+ cfg.define("CMAKE_C_FLAGS", builder.cflags(target).join(" "));
+ let mut cxxflags = builder.cflags(target).join(" ");
if building_dist_binaries {
- if build.config.llvm_static_stdcpp && !target.contains("windows") {
+ if builder.config.llvm_static_stdcpp && !target.contains("windows") {
cxxflags.push_str(" -static-libstdc++");
}
}
cfg.define("CMAKE_CXX_FLAGS", cxxflags);
- if let Some(ar) = build.ar(target) {
+ if let Some(ar) = builder.ar(target) {
if ar.is_absolute() {
// LLVM build breaks if `CMAKE_AR` is a relative path, for some reason it
// tries to resolve this path in the LLVM build directory.
return PathBuf::from("lld-out-dir-test-gen");
}
let target = self.target;
- let build = builder.build;
let llvm_config = builder.ensure(Llvm {
target: self.target,
emscripten: false,
});
- let out_dir = build.lld_out(target);
+ let out_dir = builder.lld_out(target);
let done_stamp = out_dir.join("lld-finished-building");
if done_stamp.exists() {
return out_dir
}
- let _folder = build.fold_output(|| "lld");
- build.info(&format!("Building LLD for {}", target));
- let _time = util::timeit(&build);
+ let _folder = builder.fold_output(|| "lld");
+ builder.info(&format!("Building LLD for {}", target));
+ let _time = util::timeit(&builder);
t!(fs::create_dir_all(&out_dir));
- let mut cfg = cmake::Config::new(build.src.join("src/tools/lld"));
- configure_cmake(build, target, &mut cfg, true);
+ let mut cfg = cmake::Config::new(builder.src.join("src/tools/lld"));
+ configure_cmake(builder, target, &mut cfg, true);
cfg.out_dir(&out_dir)
.profile("Release")
if builder.config.dry_run {
return;
}
- let build = builder.build;
let target = self.target;
- let dst = build.test_helpers_out(target);
- let src = build.src.join("src/test/auxiliary/rust_test_helpers.c");
+ let dst = builder.test_helpers_out(target);
+ let src = builder.src.join("src/test/auxiliary/rust_test_helpers.c");
if up_to_date(&src, &dst.join("librust_test_helpers.a")) {
return
}
- let _folder = build.fold_output(|| "build_test_helpers");
- build.info(&format!("Building test helpers"));
+ let _folder = builder.fold_output(|| "build_test_helpers");
+ builder.info(&format!("Building test helpers"));
t!(fs::create_dir_all(&dst));
let mut cfg = cc::Build::new();
// extra configuration, so inform gcc of these compilers. Note, though, that
// on MSVC we still need gcc's detection of env vars (ugh).
if !target.contains("msvc") {
- if let Some(ar) = build.ar(target) {
+ if let Some(ar) = builder.ar(target) {
cfg.archiver(ar);
}
- cfg.compiler(build.cc(target));
+ cfg.compiler(builder.cc(target));
}
cfg.cargo_metadata(false)
.out_dir(&dst)
.target(&target)
- .host(&build.build)
+ .host(&builder.config.build)
.opt_level(0)
.warnings(false)
.debug(false)
- .file(build.src.join("src/test/auxiliary/rust_test_helpers.c"))
+ .file(builder.src.join("src/test/auxiliary/rust_test_helpers.c"))
.compile("rust_test_helpers");
}
}
if builder.config.dry_run {
return;
}
- let build = builder.build;
let target = self.target;
- let out = match build.openssl_dir(target) {
+ let out = match builder.openssl_dir(target) {
Some(dir) => dir,
None => return,
};
}
// Ensure the hash is correct.
- let mut shasum = if target.contains("apple") || build.build.contains("netbsd") {
+ let mut shasum = if target.contains("apple") ||
+ builder.config.build.contains("netbsd") {
let mut cmd = Command::new("shasum");
cmd.arg("-a").arg("256");
cmd
t!(fs::rename(&tmp, &tarball));
}
let obj = out.join(format!("openssl-{}", OPENSSL_VERS));
- let dst = build.openssl_install_dir(target).unwrap();
+ let dst = builder.openssl_install_dir(target).unwrap();
drop(fs::remove_dir_all(&obj));
drop(fs::remove_dir_all(&dst));
- build.run(Command::new("tar").arg("zxf").arg(&tarball).current_dir(&out));
+ builder.run(Command::new("tar").arg("zxf").arg(&tarball).current_dir(&out));
let mut configure = Command::new("perl");
configure.arg(obj.join("Configure"));
_ => panic!("don't know how to configure OpenSSL for {}", target),
};
configure.arg(os);
- configure.env("CC", build.cc(target));
- for flag in build.cflags(target) {
+ configure.env("CC", builder.cc(target));
+ for flag in builder.cflags(target) {
configure.arg(flag);
}
// There is no specific os target for android aarch64 or x86_64,
if target == "sparc64-unknown-netbsd" {
// Need -m64 to get assembly generated correctly for sparc64.
configure.arg("-m64");
- if build.build.contains("netbsd") {
+ if builder.config.build.contains("netbsd") {
// Disable sparc64 asm on NetBSD builders, it uses
// m4(1)'s -B flag, which NetBSD m4 does not support.
configure.arg("no-asm");
configure.arg("no-asm");
}
configure.current_dir(&obj);
- build.info(&format!("Configuring openssl for {}", target));
- build.run_quiet(&mut configure);
- build.info(&format!("Building openssl for {}", target));
- build.run_quiet(Command::new("make").arg("-j1").current_dir(&obj));
- build.info(&format!("Installing openssl for {}", target));
- build.run_quiet(Command::new("make").arg("install").arg("-j1").current_dir(&obj));
+ builder.info(&format!("Configuring openssl for {}", target));
+ builder.run_quiet(&mut configure);
+ builder.info(&format!("Building openssl for {}", target));
+ builder.run_quiet(Command::new("make").arg("-j1").current_dir(&obj));
+ builder.info(&format!("Installing openssl for {}", target));
+ builder.run_quiet(Command::new("make").arg("install").arg("-j1").current_dir(&obj));
let mut f = t!(File::create(&stamp));
t!(f.write_all(OPENSSL_VERS.as_bytes()));
continue;
}
- cmd_finder.must_have(build.cc(*target));
- if let Some(ar) = build.ar(*target) {
- cmd_finder.must_have(ar);
+ if !build.config.dry_run {
+ cmd_finder.must_have(build.cc(*target));
+ if let Some(ar) = build.ar(*target) {
+ cmd_finder.must_have(ar);
+ }
}
}
for host in &build.hosts {
- cmd_finder.must_have(build.cxx(*host).unwrap());
+ if !build.config.dry_run {
+ cmd_finder.must_have(build.cxx(*host).unwrap());
+ }
// The msvc hosts don't use jemalloc, turn it off globally to
// avoid packaging the dummy liballoc_jemalloc on that platform.
use native;
use tool::{self, Tool};
use util::{self, dylib_path, dylib_path_var};
-use {Build, Mode};
+use Mode;
use toolstate::ToolState;
const ADB_TEST_DIR: &str = "/data/tmp/work";
}
}
-fn try_run(build: &Build, cmd: &mut Command) -> bool {
- if !build.fail_fast {
- if !build.try_run(cmd) {
- let mut failures = build.delayed_failures.borrow_mut();
+fn try_run(builder: &Builder, cmd: &mut Command) -> bool {
+ if !builder.fail_fast {
+ if !builder.try_run(cmd) {
+ let mut failures = builder.delayed_failures.borrow_mut();
failures.push(format!("{:?}", cmd));
return false;
}
} else {
- build.run(cmd);
+ builder.run(cmd);
}
true
}
-fn try_run_quiet(build: &Build, cmd: &mut Command) -> bool {
- if !build.fail_fast {
- if !build.try_run_quiet(cmd) {
- let mut failures = build.delayed_failures.borrow_mut();
+fn try_run_quiet(builder: &Builder, cmd: &mut Command) -> bool {
+ if !builder.fail_fast {
+ if !builder.try_run_quiet(cmd) {
+ let mut failures = builder.delayed_failures.borrow_mut();
failures.push(format!("{:?}", cmd));
return false;
}
} else {
- build.run_quiet(cmd);
+ builder.run_quiet(cmd);
}
true
}
/// This tool in `src/tools` will verify the validity of all our links in the
/// documentation to ensure we don't have a bunch of dead ones.
fn run(self, builder: &Builder) {
- let build = builder.build;
let host = self.host;
- build.info(&format!("Linkcheck ({})", host));
+ builder.info(&format!("Linkcheck ({})", host));
builder.default_doc(None);
- let _time = util::timeit(&build);
- try_run(build, builder.tool_cmd(Tool::Linkchecker)
- .arg(build.out.join(host).join("doc")));
+ let _time = util::timeit(&builder);
+ try_run(builder, builder.tool_cmd(Tool::Linkchecker)
+ .arg(builder.out.join(host).join("doc")));
}
fn should_run(run: ShouldRun) -> ShouldRun {
let builder = run.builder;
- run.path("src/tools/linkchecker").default_condition(builder.build.config.docs)
+ run.path("src/tools/linkchecker").default_condition(builder.config.docs)
}
fn make_run(run: RunConfig) {
/// This tool in `src/tools` will check out a few Rust projects and run `cargo
/// test` to ensure that we don't regress the test suites there.
fn run(self, builder: &Builder) {
- let build = builder.build;
let compiler = builder.compiler(self.stage, self.host);
builder.ensure(compile::Rustc { compiler, target: compiler.host });
// Note that this is a short, cryptic, and not scoped directory name. This
// is currently to minimize the length of path on Windows where we otherwise
// quickly run into path name limit constraints.
- let out_dir = build.out.join("ct");
+ let out_dir = builder.out.join("ct");
t!(fs::create_dir_all(&out_dir));
- let _time = util::timeit(&build);
+ let _time = util::timeit(&builder);
let mut cmd = builder.tool_cmd(Tool::CargoTest);
- try_run(build, cmd.arg(&build.initial_cargo)
+ try_run(builder, cmd.arg(&builder.initial_cargo)
.arg(&out_dir)
.env("RUSTC", builder.rustc(compiler))
.env("RUSTDOC", builder.rustdoc(compiler.host)));
/// Runs `cargo test` for `cargo` packaged with Rust.
fn run(self, builder: &Builder) {
- let build = builder.build;
let compiler = builder.compiler(self.stage, self.host);
builder.ensure(tool::Cargo { compiler, target: self.host });
let mut cargo = builder.cargo(compiler, Mode::Tool, self.host, "test");
- cargo.arg("--manifest-path").arg(build.src.join("src/tools/cargo/Cargo.toml"));
- if !build.fail_fast {
+ cargo.arg("--manifest-path").arg(builder.src.join("src/tools/cargo/Cargo.toml"));
+ if !builder.fail_fast {
cargo.arg("--no-fail-fast");
}
// available.
cargo.env("CFG_DISABLE_CROSS_TESTS", "1");
- try_run(build, cargo.env("PATH", &path_for_cargo(builder, compiler)));
+ try_run(builder, cargo.env("PATH", &path_for_cargo(builder, compiler)));
}
}
/// Runs `cargo test` for the rls.
fn run(self, builder: &Builder) {
- let build = builder.build;
let stage = self.stage;
let host = self.host;
let compiler = builder.compiler(stage, host);
builder.add_rustc_lib_path(compiler, &mut cargo);
- if try_run(build, &mut cargo) {
- build.save_toolstate("rls", ToolState::TestPass);
+ if try_run(builder, &mut cargo) {
+ builder.save_toolstate("rls", ToolState::TestPass);
}
}
}
/// Runs `cargo test` for rustfmt.
fn run(self, builder: &Builder) {
- let build = builder.build;
let stage = self.stage;
let host = self.host;
let compiler = builder.compiler(stage, host);
builder.add_rustc_lib_path(compiler, &mut cargo);
- if try_run(build, &mut cargo) {
- build.save_toolstate("rustfmt", ToolState::TestPass);
+ if try_run(builder, &mut cargo) {
+ builder.save_toolstate("rustfmt", ToolState::TestPass);
}
}
}
const DEFAULT: bool = true;
fn should_run(run: ShouldRun) -> ShouldRun {
- let test_miri = run.builder.build.config.test_miri;
+ let test_miri = run.builder.config.test_miri;
run.path("src/tools/miri").default_condition(test_miri)
}
/// Runs `cargo test` for miri.
fn run(self, builder: &Builder) {
- let build = builder.build;
let stage = self.stage;
let host = self.host;
let compiler = builder.compiler(stage, host);
});
if let Some(miri) = miri {
let mut cargo = builder.cargo(compiler, Mode::Tool, host, "test");
- cargo.arg("--manifest-path").arg(build.src.join("src/tools/miri/Cargo.toml"));
+ cargo.arg("--manifest-path").arg(builder.src.join("src/tools/miri/Cargo.toml"));
// Don't build tests dynamically, just a pain to work with
cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");
builder.add_rustc_lib_path(compiler, &mut cargo);
- if try_run(build, &mut cargo) {
- build.save_toolstate("miri", ToolState::TestPass);
+ if try_run(builder, &mut cargo) {
+ builder.save_toolstate("miri", ToolState::TestPass);
}
} else {
eprintln!("failed to test miri: could not build");
/// Runs `cargo test` for clippy.
fn run(self, builder: &Builder) {
- let build = builder.build;
let stage = self.stage;
let host = self.host;
let compiler = builder.compiler(stage, host);
});
if let Some(clippy) = clippy {
let mut cargo = builder.cargo(compiler, Mode::Tool, host, "test");
- cargo.arg("--manifest-path").arg(build.src.join("src/tools/clippy/Cargo.toml"));
+ cargo.arg("--manifest-path").arg(builder.src.join("src/tools/clippy/Cargo.toml"));
// Don't build tests dynamically, just a pain to work with
cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");
builder.add_rustc_lib_path(compiler, &mut cargo);
- if try_run(build, &mut cargo) {
- build.save_toolstate("clippy-driver", ToolState::TestPass);
+ if try_run(builder, &mut cargo) {
+ builder.save_toolstate("clippy-driver", ToolState::TestPass);
}
} else {
eprintln!("failed to test clippy: could not build");
.env("RUSTC_STAGE", self.compiler.stage.to_string())
.env("RUSTC_SYSROOT", builder.sysroot(self.compiler))
.env("RUSTDOC_LIBDIR", builder.sysroot_libdir(self.compiler, self.compiler.host))
- .env("CFG_RELEASE_CHANNEL", &builder.build.config.channel)
+ .env("CFG_RELEASE_CHANNEL", &builder.config.channel)
.env("RUSTDOC_REAL", builder.rustdoc(self.compiler.host))
- .env("RUSTDOC_CRATE_VERSION", builder.build.rust_version())
+ .env("RUSTDOC_CRATE_VERSION", builder.rust_version())
.env("RUSTC_BOOTSTRAP", "1");
- if let Some(linker) = builder.build.linker(self.compiler.host) {
+ if let Some(linker) = builder.linker(self.compiler.host) {
cmd.env("RUSTC_TARGET_LINKER", linker);
}
- try_run(builder.build, &mut cmd);
+ try_run(builder, &mut cmd);
}
}
}
}
+#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+pub struct RustdocUi {
+ pub host: Interned<String>,
+ pub target: Interned<String>,
+ pub compiler: Compiler,
+}
+
+impl Step for RustdocUi {
+ type Output = ();
+ const DEFAULT: bool = true;
+ const ONLY_HOSTS: bool = true;
+
+ fn should_run(run: ShouldRun) -> ShouldRun {
+ run.path("src/test/rustdoc-ui")
+ }
+
+ fn make_run(run: RunConfig) {
+ let compiler = run.builder.compiler(run.builder.top_stage, run.host);
+ run.builder.ensure(RustdocUi {
+ host: run.host,
+ target: run.target,
+ compiler,
+ });
+ }
+
+ fn run(self, builder: &Builder) {
+ builder.ensure(Compiletest {
+ compiler: self.compiler,
+ target: self.target,
+ mode: "ui",
+ suite: "rustdoc-ui",
+ })
+ }
+}
+
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct Tidy;
/// otherwise just implements a few lint-like checks that are specific to the
/// compiler itself.
fn run(self, builder: &Builder) {
- let build = builder.build;
-
let mut cmd = builder.tool_cmd(Tool::Tidy);
- cmd.arg(build.src.join("src"));
- cmd.arg(&build.initial_cargo);
- if !build.config.vendor {
+ cmd.arg(builder.src.join("src"));
+ cmd.arg(&builder.initial_cargo);
+ if !builder.config.vendor {
cmd.arg("--no-vendor");
}
- if build.config.quiet_tests {
+ if builder.config.quiet_tests {
cmd.arg("--quiet");
}
- let _folder = build.fold_output(|| "tidy");
+ let _folder = builder.fold_output(|| "tidy");
builder.info(&format!("tidy check"));
- try_run(build, &mut cmd);
+ try_run(builder, &mut cmd);
}
fn should_run(run: ShouldRun) -> ShouldRun {
}
}
-fn testdir(build: &Build, host: Interned<String>) -> PathBuf {
- build.out.join(host).join("test")
+fn testdir(builder: &Builder, host: Interned<String>) -> PathBuf {
+ builder.out.join(host).join("test")
}
macro_rules! default_test {
/// compiletest `mode` and `suite` arguments. For example `mode` can be
/// "run-pass" or `suite` can be something like `debuginfo`.
fn run(self, builder: &Builder) {
- let build = builder.build;
let compiler = self.compiler;
let target = self.target;
let mode = self.mode;
let suite = self.suite;
// Skip codegen tests if they aren't enabled in configuration.
- if !build.config.codegen_tests && suite == "codegen" {
+ if !builder.config.codegen_tests && suite == "codegen" {
return;
}
if suite == "debuginfo" {
// Skip debuginfo tests on MSVC
- if build.build.contains("msvc") {
+ if builder.config.build.contains("msvc") {
return;
}
if mode == "debuginfo-XXX" {
- return if build.build.contains("apple") {
+ return if builder.config.build.contains("apple") {
builder.ensure(Compiletest {
mode: "debuginfo-lldb",
..self
cmd.arg("--run-lib-path").arg(builder.sysroot_libdir(compiler, target));
cmd.arg("--rustc-path").arg(builder.rustc(compiler));
+ let is_rustdoc_ui = suite.ends_with("rustdoc-ui");
+
// Avoid depending on rustdoc when we don't need it.
- if mode == "rustdoc" || (mode == "run-make" && suite.ends_with("fulldeps")) {
+ if mode == "rustdoc" ||
+ (mode == "run-make" && suite.ends_with("fulldeps")) ||
+ (mode == "ui" && is_rustdoc_ui) {
cmd.arg("--rustdoc-path").arg(builder.rustdoc(compiler.host));
}
- cmd.arg("--src-base").arg(build.src.join("src/test").join(suite));
- cmd.arg("--build-base").arg(testdir(build, compiler.host).join(suite));
+ cmd.arg("--src-base").arg(builder.src.join("src/test").join(suite));
+ cmd.arg("--build-base").arg(testdir(builder, compiler.host).join(suite));
cmd.arg("--stage-id").arg(format!("stage{}-{}", compiler.stage, target));
cmd.arg("--mode").arg(mode);
cmd.arg("--target").arg(target);
cmd.arg("--host").arg(&*compiler.host);
- cmd.arg("--llvm-filecheck").arg(build.llvm_filecheck(build.build));
+ cmd.arg("--llvm-filecheck").arg(builder.llvm_filecheck(builder.config.build));
- if let Some(ref nodejs) = build.config.nodejs {
+ if let Some(ref nodejs) = builder.config.nodejs {
cmd.arg("--nodejs").arg(nodejs);
}
- let mut flags = vec!["-Crpath".to_string()];
- if build.config.rust_optimize_tests {
- flags.push("-O".to_string());
- }
- if build.config.rust_debuginfo_tests {
- flags.push("-g".to_string());
+ let mut flags = if is_rustdoc_ui {
+ Vec::new()
+ } else {
+ vec!["-Crpath".to_string()]
+ };
+ if !is_rustdoc_ui {
+ if builder.config.rust_optimize_tests {
+ flags.push("-O".to_string());
+ }
+ if builder.config.rust_debuginfo_tests {
+ flags.push("-g".to_string());
+ }
}
flags.push("-Zunstable-options".to_string());
- flags.push(build.config.cmd.rustc_args().join(" "));
+ flags.push(builder.config.cmd.rustc_args().join(" "));
- if let Some(linker) = build.linker(target) {
+ if let Some(linker) = builder.linker(target) {
cmd.arg("--linker").arg(linker);
}
let mut targetflags = flags.clone();
targetflags.push(format!("-Lnative={}",
- build.test_helpers_out(target).display()));
+ builder.test_helpers_out(target).display()));
cmd.arg("--target-rustcflags").arg(targetflags.join(" "));
- cmd.arg("--docck-python").arg(build.python());
+ cmd.arg("--docck-python").arg(builder.python());
- if build.build.ends_with("apple-darwin") {
+ if builder.config.build.ends_with("apple-darwin") {
// Force /usr/bin/python on macOS for LLDB tests because we're loading the
// LLDB plugin's compiled module which only works with the system python
// (namely not Homebrew-installed python)
cmd.arg("--lldb-python").arg("/usr/bin/python");
} else {
- cmd.arg("--lldb-python").arg(build.python());
+ cmd.arg("--lldb-python").arg(builder.python());
}
- if let Some(ref gdb) = build.config.gdb {
+ if let Some(ref gdb) = builder.config.gdb {
cmd.arg("--gdb").arg(gdb);
}
- if let Some(ref vers) = build.lldb_version {
+ if let Some(ref vers) = builder.lldb_version {
cmd.arg("--lldb-version").arg(vers);
}
- if let Some(ref dir) = build.lldb_python_dir {
+ if let Some(ref dir) = builder.lldb_python_dir {
cmd.arg("--lldb-python-dir").arg(dir);
}
- cmd.args(&build.config.cmd.test_args());
+ cmd.args(&builder.config.cmd.test_args());
- if build.is_verbose() {
+ if builder.is_verbose() {
cmd.arg("--verbose");
}
- if build.config.quiet_tests {
+ if builder.config.quiet_tests {
cmd.arg("--quiet");
}
- if build.config.llvm_enabled {
+ if builder.config.llvm_enabled {
let llvm_config = builder.ensure(native::Llvm {
- target: build.config.build,
+ target: builder.config.build,
emscripten: false,
});
- if !build.config.dry_run {
+ if !builder.config.dry_run {
let llvm_version = output(Command::new(&llvm_config).arg("--version"));
cmd.arg("--llvm-version").arg(llvm_version);
}
- if !build.is_rust_llvm(target) {
+ if !builder.is_rust_llvm(target) {
cmd.arg("--system-llvm");
}
// Only pass correct values for these flags for the `run-make` suite as it
// requires that a C++ compiler was configured which isn't always the case.
- if !build.config.dry_run && suite == "run-make-fulldeps" {
+ if !builder.config.dry_run && suite == "run-make-fulldeps" {
let llvm_components = output(Command::new(&llvm_config).arg("--components"));
let llvm_cxxflags = output(Command::new(&llvm_config).arg("--cxxflags"));
- cmd.arg("--cc").arg(build.cc(target))
- .arg("--cxx").arg(build.cxx(target).unwrap())
- .arg("--cflags").arg(build.cflags(target).join(" "))
+ cmd.arg("--cc").arg(builder.cc(target))
+ .arg("--cxx").arg(builder.cxx(target).unwrap())
+ .arg("--cflags").arg(builder.cflags(target).join(" "))
.arg("--llvm-components").arg(llvm_components.trim())
.arg("--llvm-cxxflags").arg(llvm_cxxflags.trim());
- if let Some(ar) = build.ar(target) {
+ if let Some(ar) = builder.ar(target) {
cmd.arg("--ar").arg(ar);
}
}
}
- if suite == "run-make-fulldeps" && !build.config.llvm_enabled {
+ if suite == "run-make-fulldeps" && !builder.config.llvm_enabled {
builder.info(
&format!("Ignoring run-make test suite as they generally don't work without LLVM"));
return;
.arg("--llvm-cxxflags").arg("");
}
- if build.remote_tested(target) {
+ if builder.remote_tested(target) {
cmd.arg("--remote-test-client").arg(builder.tool_exe(Tool::RemoteTestClient));
}
// Note that if we encounter `PATH` we make sure to append to our own `PATH`
// rather than stomp over it.
if target.contains("msvc") {
- for &(ref k, ref v) in build.cc[&target].env() {
+ for &(ref k, ref v) in builder.cc[&target].env() {
if k != "PATH" {
cmd.env(k, v);
}
}
}
cmd.env("RUSTC_BOOTSTRAP", "1");
- build.add_rust_test_threads(&mut cmd);
+ builder.add_rust_test_threads(&mut cmd);
- if build.config.sanitizers {
+ if builder.config.sanitizers {
cmd.env("SANITIZER_SUPPORT", "1");
}
- if build.config.profiler {
+ if builder.config.profiler {
cmd.env("PROFILER_SUPPORT", "1");
}
- cmd.env("RUST_TEST_TMPDIR", build.out.join("tmp"));
+ cmd.env("RUST_TEST_TMPDIR", builder.out.join("tmp"));
cmd.arg("--adb-path").arg("adb");
cmd.arg("--adb-test-dir").arg(ADB_TEST_DIR);
if target.contains("android") {
// Assume that cc for this target comes from the android sysroot
cmd.arg("--android-cross-path")
- .arg(build.cc(target).parent().unwrap().parent().unwrap());
+ .arg(builder.cc(target).parent().unwrap().parent().unwrap());
} else {
cmd.arg("--android-cross-path").arg("");
}
- build.ci_env.force_coloring_in_ci(&mut cmd);
+ builder.ci_env.force_coloring_in_ci(&mut cmd);
- let _folder = build.fold_output(|| format!("test_{}", suite));
+ let _folder = builder.fold_output(|| format!("test_{}", suite));
builder.info(&format!("Check compiletest suite={} mode={} ({} -> {})",
suite, mode, &compiler.host, target));
- let _time = util::timeit(&build);
- try_run(build, &mut cmd);
+ let _time = util::timeit(&builder);
+ try_run(builder, &mut cmd);
}
}
/// located in `src/doc`. The `rustdoc` that's run is the one that sits next to
/// `compiler`.
fn run(self, builder: &Builder) {
- let build = builder.build;
let compiler = self.compiler;
builder.ensure(compile::Test { compiler, target: compiler.host });
// Do a breadth-first traversal of the `src/doc` directory and just run
// tests for all files that end in `*.md`
- let mut stack = vec![build.src.join(self.path)];
- let _time = util::timeit(&build);
- let _folder = build.fold_output(|| format!("test_{}", self.name));
+ let mut stack = vec![builder.src.join(self.path)];
+ let _time = util::timeit(&builder);
+ let _folder = builder.fold_output(|| format!("test_{}", self.name));
let mut files = Vec::new();
while let Some(p) = stack.pop() {
}
// The nostarch directory in the book is for no starch, and so isn't
- // guaranteed to build. We don't care if it doesn't build, so skip it.
+ // guaranteed to builder. We don't care if it doesn't build, so skip it.
if p.to_str().map_or(false, |p| p.contains("nostarch")) {
continue;
}
} else {
ToolState::TestFail
};
- build.save_toolstate(self.name, toolstate);
+ builder.save_toolstate(self.name, toolstate);
}
}
}
/// generate a markdown file from the error indexes of the code base which is
/// then passed to `rustdoc --test`.
fn run(self, builder: &Builder) {
- let build = builder.build;
let compiler = self.compiler;
builder.ensure(compile::Std { compiler, target: compiler.host });
- let dir = testdir(build, compiler.host);
+ let dir = testdir(builder, compiler.host);
t!(fs::create_dir_all(&dir));
let output = dir.join("error-index.md");
let mut tool = builder.tool_cmd(Tool::ErrorIndex);
tool.arg("markdown")
.arg(&output)
- .env("CFG_BUILD", &build.build)
- .env("RUSTC_ERROR_METADATA_DST", build.extended_error_dir());
+ .env("CFG_BUILD", &builder.config.build)
+ .env("RUSTC_ERROR_METADATA_DST", builder.extended_error_dir());
- let _folder = build.fold_output(|| "test_error_index");
- build.info(&format!("Testing error-index stage{}", compiler.stage));
- let _time = util::timeit(&build);
- build.run(&mut tool);
+ let _folder = builder.fold_output(|| "test_error_index");
+ builder.info(&format!("Testing error-index stage{}", compiler.stage));
+ let _time = util::timeit(&builder);
+ builder.run(&mut tool);
markdown_test(builder, compiler, &output);
}
}
fn markdown_test(builder: &Builder, compiler: Compiler, markdown: &Path) -> bool {
- let build = builder.build;
match File::open(markdown) {
Ok(mut file) => {
let mut contents = String::new();
Err(_) => {},
}
- build.info(&format!("doc tests for: {}", markdown.display()));
+ builder.info(&format!("doc tests for: {}", markdown.display()));
let mut cmd = builder.rustdoc_cmd(compiler.host);
- build.add_rust_test_threads(&mut cmd);
+ builder.add_rust_test_threads(&mut cmd);
cmd.arg("--test");
cmd.arg(markdown);
cmd.env("RUSTC_BOOTSTRAP", "1");
- let test_args = build.config.cmd.test_args().join(" ");
+ let test_args = builder.config.cmd.test_args().join(" ");
cmd.arg("--test-args").arg(test_args);
- if build.config.quiet_tests {
- try_run_quiet(build, &mut cmd)
+ if builder.config.quiet_tests {
+ try_run_quiet(builder, &mut cmd)
} else {
- try_run(build, &mut cmd)
+ try_run(builder, &mut cmd)
}
}
/// Currently this runs all tests for a DAG by passing a bunch of `-p foo`
/// arguments, and those arguments are discovered from `cargo metadata`.
fn run(self, builder: &Builder) {
- let build = builder.build;
let compiler = self.compiler;
let target = self.target;
let mode = self.mode;
// libstd, then what we're actually testing is the libstd produced in
// stage1. Reflect that here by updating the compiler that we're working
// with automatically.
- let compiler = if build.force_use_stage1(compiler, target) {
+ let compiler = if builder.force_use_stage1(compiler, target) {
builder.compiler(1, compiler.host)
} else {
compiler.clone()
compile::std_cargo(builder, &compiler, target, &mut cargo);
}
Mode::Libtest => {
- compile::test_cargo(build, &compiler, target, &mut cargo);
+ compile::test_cargo(builder, &compiler, target, &mut cargo);
}
Mode::Librustc => {
builder.ensure(compile::Rustc { compiler, target });
- compile::rustc_cargo(build, &mut cargo);
+ compile::rustc_cargo(builder, &mut cargo);
}
_ => panic!("can only test libraries"),
};
// Pass in some standard flags then iterate over the graph we've discovered
// in `cargo metadata` with the maps above and figure out what `-p`
// arguments need to get passed.
- if test_kind.subcommand() == "test" && !build.fail_fast {
+ if test_kind.subcommand() == "test" && !builder.fail_fast {
cargo.arg("--no-fail-fast");
}
- if build.doc_tests {
+ if builder.doc_tests {
cargo.arg("--doc");
}
cargo.env(dylib_path_var(), env::join_paths(&dylib_path).unwrap());
cargo.arg("--");
- cargo.args(&build.config.cmd.test_args());
+ cargo.args(&builder.config.cmd.test_args());
- if build.config.quiet_tests {
+ if builder.config.quiet_tests {
cargo.arg("--quiet");
}
if target.contains("emscripten") {
cargo.env(format!("CARGO_TARGET_{}_RUNNER", envify(&target)),
- build.config.nodejs.as_ref().expect("nodejs not configured"));
+ builder.config.nodejs.as_ref().expect("nodejs not configured"));
} else if target.starts_with("wasm32") {
// Warn about running tests without the `wasm_syscall` feature enabled.
// The javascript shim implements the syscall interface so that test
// output can be correctly reported.
- if !build.config.wasm_syscall {
- build.info(&format!("Libstd was built without `wasm_syscall` feature enabled: \
+ if !builder.config.wasm_syscall {
+ builder.info(&format!("Libstd was built without `wasm_syscall` feature enabled: \
test output may not be visible."));
}
// incompatible with `-C prefer-dynamic`, so disable that here
cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");
- let node = build.config.nodejs.as_ref()
+ let node = builder.config.nodejs.as_ref()
.expect("nodejs not configured");
let runner = format!("{} {}/src/etc/wasm32-shim.js",
node.display(),
- build.src.display());
+ builder.src.display());
cargo.env(format!("CARGO_TARGET_{}_RUNNER", envify(&target)), &runner);
- } else if build.remote_tested(target) {
+ } else if builder.remote_tested(target) {
cargo.env(format!("CARGO_TARGET_{}_RUNNER", envify(&target)),
format!("{} run",
builder.tool_exe(Tool::RemoteTestClient).display()));
}
- let _folder = build.fold_output(|| {
+ let _folder = builder.fold_output(|| {
format!("{}_stage{}-{}", test_kind.subcommand(), compiler.stage, krate)
});
- build.info(&format!("{} {} stage{} ({} -> {})", test_kind, krate, compiler.stage,
+ builder.info(&format!("{} {} stage{} ({} -> {})", test_kind, krate, compiler.stage,
&compiler.host, target));
- let _time = util::timeit(&build);
- try_run(build, &mut cargo);
+ let _time = util::timeit(&builder);
+ try_run(builder, &mut cargo);
}
}
}
fn run(self, builder: &Builder) {
- let build = builder.build;
let test_kind = self.test_kind;
let compiler = builder.compiler(builder.top_stage, self.host);
target,
test_kind.subcommand(),
"src/tools/rustdoc");
- if test_kind.subcommand() == "test" && !build.fail_fast {
+ if test_kind.subcommand() == "test" && !builder.fail_fast {
cargo.arg("--no-fail-fast");
}
cargo.arg("-p").arg("rustdoc:0.0.0");
cargo.arg("--");
- cargo.args(&build.config.cmd.test_args());
+ cargo.args(&builder.config.cmd.test_args());
- if build.config.quiet_tests {
+ if builder.config.quiet_tests {
cargo.arg("--quiet");
}
- let _folder = build.fold_output(|| {
+ let _folder = builder.fold_output(|| {
format!("{}_stage{}-rustdoc", test_kind.subcommand(), compiler.stage)
});
- build.info(&format!("{} rustdoc stage{} ({} -> {})", test_kind, compiler.stage,
+ builder.info(&format!("{} rustdoc stage{} ({} -> {})", test_kind, compiler.stage,
&compiler.host, target));
- let _time = util::timeit(&build);
+ let _time = util::timeit(&builder);
- try_run(build, &mut cargo);
+ try_run(builder, &mut cargo);
}
}
}
fn run(self, builder: &Builder) {
- let build = builder.build;
let compiler = self.compiler;
let target = self.target;
- if !build.remote_tested(target) {
+ if !builder.remote_tested(target) {
return
}
builder.ensure(compile::Test { compiler, target });
- build.info(&format!("REMOTE copy libs to emulator ({})", target));
- t!(fs::create_dir_all(build.out.join("tmp")));
+ builder.info(&format!("REMOTE copy libs to emulator ({})", target));
+ t!(fs::create_dir_all(builder.out.join("tmp")));
let server = builder.ensure(tool::RemoteTestServer { compiler, target });
cmd.arg("spawn-emulator")
.arg(target)
.arg(&server)
- .arg(build.out.join("tmp"));
- if let Some(rootfs) = build.qemu_rootfs(target) {
+ .arg(builder.out.join("tmp"));
+ if let Some(rootfs) = builder.qemu_rootfs(target) {
cmd.arg(rootfs);
}
- build.run(&mut cmd);
+ builder.run(&mut cmd);
// Push all our dylibs to the emulator
for f in t!(builder.sysroot_libdir(compiler, target).read_dir()) {
let f = t!(f);
let name = f.file_name().into_string().unwrap();
if util::is_dylib(&name) {
- build.run(Command::new(&tool)
+ builder.run(Command::new(&tool)
.arg("push")
.arg(f.path()));
}
/// Run "distcheck", a 'make check' from a tarball
fn run(self, builder: &Builder) {
- let build = builder.build;
-
- build.info(&format!("Distcheck"));
- let dir = build.out.join("tmp").join("distcheck");
+ builder.info(&format!("Distcheck"));
+ let dir = builder.out.join("tmp").join("distcheck");
let _ = fs::remove_dir_all(&dir);
t!(fs::create_dir_all(&dir));
.arg(builder.ensure(dist::PlainSourceTarball))
.arg("--strip-components=1")
.current_dir(&dir);
- build.run(&mut cmd);
- build.run(Command::new("./configure")
- .args(&build.config.configure_args)
+ builder.run(&mut cmd);
+ builder.run(Command::new("./configure")
+ .args(&builder.config.configure_args)
.arg("--enable-vendor")
.current_dir(&dir));
- build.run(Command::new(build_helper::make(&build.build))
+ builder.run(Command::new(build_helper::make(&builder.config.build))
.arg("check")
.current_dir(&dir));
// Now make sure that rust-src has all of libstd's dependencies
- build.info(&format!("Distcheck rust-src"));
- let dir = build.out.join("tmp").join("distcheck-src");
+ builder.info(&format!("Distcheck rust-src"));
+ let dir = builder.out.join("tmp").join("distcheck-src");
let _ = fs::remove_dir_all(&dir);
t!(fs::create_dir_all(&dir));
.arg(builder.ensure(dist::Src))
.arg("--strip-components=1")
.current_dir(&dir);
- build.run(&mut cmd);
+ builder.run(&mut cmd);
let toml = dir.join("rust-src/lib/rustlib/src/rust/src/libstd/Cargo.toml");
- build.run(Command::new(&build.initial_cargo)
+ builder.run(Command::new(&builder.initial_cargo)
.arg("generate-lockfile")
.arg("--manifest-path")
.arg(&toml)
/// Test the build system itself
fn run(self, builder: &Builder) {
- let build = builder.build;
- let mut cmd = Command::new(&build.initial_cargo);
+ let mut cmd = Command::new(&builder.initial_cargo);
cmd.arg("test")
- .current_dir(build.src.join("src/bootstrap"))
+ .current_dir(builder.src.join("src/bootstrap"))
.env("RUSTFLAGS", "-Cdebuginfo=2")
- .env("CARGO_TARGET_DIR", build.out.join("bootstrap"))
+ .env("CARGO_TARGET_DIR", builder.out.join("bootstrap"))
.env("RUSTC_BOOTSTRAP", "1")
- .env("RUSTC", &build.initial_rustc);
+ .env("RUSTC", &builder.initial_rustc);
if let Some(flags) = option_env!("RUSTFLAGS") {
// Use the same rustc flags for testing as for "normal" compilation,
// so that Cargo doesn’t recompile the entire dependency graph every time:
// https://github.com/rust-lang/rust/issues/49215
cmd.env("RUSTFLAGS", flags);
}
- if !build.fail_fast {
+ if !builder.fail_fast {
cmd.arg("--no-fail-fast");
}
- cmd.arg("--").args(&build.config.cmd.test_args());
- try_run(build, &mut cmd);
+ cmd.arg("--").args(&builder.config.cmd.test_args());
+ try_run(builder, &mut cmd);
}
fn should_run(run: ShouldRun) -> ShouldRun {
}
fn run(self, builder: &Builder) {
- let build = builder.build;
let compiler = self.compiler;
let target = self.target;
let mode = self.mode;
// This is for the original compiler, but if we're forced to use stage 1, then
// std/test/rustc stamps won't exist in stage 2, so we need to get those from stage 1, since
// we copy the libs forward.
- let tools_dir = build.stage_out(compiler, Mode::Tool);
+ let tools_dir = builder.stage_out(compiler, Mode::Tool);
let compiler = if builder.force_use_stage1(compiler, target) {
builder.compiler(1, compiler.host)
} else {
for &cur_mode in &[Mode::Libstd, Mode::Libtest, Mode::Librustc] {
let stamp = match cur_mode {
- Mode::Libstd => libstd_stamp(build, compiler, target),
- Mode::Libtest => libtest_stamp(build, compiler, target),
- Mode::Librustc => librustc_stamp(build, compiler, target),
+ Mode::Libstd => libstd_stamp(builder, compiler, target),
+ Mode::Libtest => libtest_stamp(builder, compiler, target),
+ Mode::Librustc => librustc_stamp(builder, compiler, target),
_ => panic!(),
};
- if build.clear_if_dirty(&tools_dir, &stamp) {
+ if builder.clear_if_dirty(&tools_dir, &stamp) {
break;
}
/// This will build the specified tool with the specified `host` compiler in
/// `stage` into the normal cargo output directory.
fn run(self, builder: &Builder) -> Option<PathBuf> {
- let build = builder.build;
let compiler = self.compiler;
let target = self.target;
let tool = self.tool;
let mut cargo = prepare_tool_cargo(builder, compiler, target, "build", path);
cargo.arg("--features").arg(self.extra_features.join(" "));
- let _folder = build.fold_output(|| format!("stage{}-{}", compiler.stage, tool));
- build.info(&format!("Building stage{} tool {} ({})", compiler.stage, tool, target));
+ let _folder = builder.fold_output(|| format!("stage{}-{}", compiler.stage, tool));
+ builder.info(&format!("Building stage{} tool {} ({})", compiler.stage, tool, target));
let mut duplicates = Vec::new();
- let is_expected = compile::stream_cargo(build, &mut cargo, &mut |msg| {
+ let is_expected = compile::stream_cargo(builder, &mut cargo, &mut |msg| {
// Only care about big things like the RLS/Cargo for now
if tool != "rls" && tool != "cargo" {
return
}
}
- let mut artifacts = build.tool_artifacts.borrow_mut();
+ let mut artifacts = builder.tool_artifacts.borrow_mut();
let prev_artifacts = artifacts
.entry(target)
.or_insert_with(Default::default);
panic!("tools should not compile multiple copies of the same crate");
}
- build.save_toolstate(tool, if is_expected {
+ builder.save_toolstate(tool, if is_expected {
ToolState::TestFail
} else {
ToolState::BuildFail
return None;
}
} else {
- let cargo_out = build.cargo_out(compiler, Mode::Tool, target)
+ let cargo_out = builder.cargo_out(compiler, Mode::Tool, target)
.join(exe(tool, &compiler.host));
- let bin = build.tools_dir(compiler).join(exe(tool, &compiler.host));
- build.copy(&cargo_out, &bin);
+ let bin = builder.tools_dir(compiler).join(exe(tool, &compiler.host));
+ builder.copy(&cargo_out, &bin);
Some(bin)
}
}
command: &'static str,
path: &'static str,
) -> Command {
- let build = builder.build;
let mut cargo = builder.cargo(compiler, Mode::Tool, target, command);
- let dir = build.src.join(path);
+ let dir = builder.src.join(path);
cargo.arg("--manifest-path").arg(dir.join("Cargo.toml"));
// We don't want to build tools dynamically as they'll be running across
// stages and such and it's just easier if they're not dynamically linked.
cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");
- if let Some(dir) = build.openssl_install_dir(target) {
+ if let Some(dir) = builder.openssl_install_dir(target) {
cargo.env("OPENSSL_STATIC", "1");
cargo.env("OPENSSL_DIR", dir);
cargo.env("LIBZ_SYS_STATIC", "1");
// own copy
cargo.env("LZMA_API_STATIC", "1");
- cargo.env("CFG_RELEASE_CHANNEL", &build.config.channel);
- cargo.env("CFG_VERSION", build.rust_version());
+ cargo.env("CFG_RELEASE_CHANNEL", &builder.config.channel);
+ cargo.env("CFG_VERSION", builder.rust_version());
- let info = GitInfo::new(&build.config, &dir);
+ let info = GitInfo::new(&builder.config, &dir);
if let Some(sha) = info.sha() {
cargo.env("CFG_COMMIT_HASH", sha);
}
match tool {
$(Tool::$name =>
self.ensure($name {
- compiler: self.compiler(stage, self.build.build),
- target: self.build.build,
+ compiler: self.compiler(stage, self.config.build),
+ target: self.config.build,
}),
)+
}
fn make_run(run: RunConfig) {
run.builder.ensure($name {
- compiler: run.builder.compiler(run.builder.top_stage, run.builder.build.build),
+ compiler: run.builder.compiler(run.builder.top_stage, run.builder.config.build),
target: run.target,
});
}
fn make_run(run: RunConfig) {
run.builder.ensure(RemoteTestServer {
- compiler: run.builder.compiler(run.builder.top_stage, run.builder.build.build),
+ compiler: run.builder.compiler(run.builder.top_stage, run.builder.config.build),
target: run.target,
});
}
}
fn run(self, builder: &Builder) -> PathBuf {
- let build = builder.build;
let target_compiler = builder.compiler(builder.top_stage, self.host);
let target = target_compiler.host;
let build_compiler = if target_compiler.stage == 0 {
- builder.compiler(0, builder.build.build)
+ builder.compiler(0, builder.config.build)
} else if target_compiler.stage >= 2 {
// Past stage 2, we consider the compiler to be ABI-compatible and hence capable of
// building rustdoc itself.
- builder.compiler(target_compiler.stage, builder.build.build)
+ builder.compiler(target_compiler.stage, builder.config.build)
} else {
// Similar to `compile::Assemble`, build with the previous stage's compiler. Otherwise
// we'd have stageN/bin/rustc and stageN/bin/rustdoc be effectively different stage
// compilers, which isn't what we want.
- builder.compiler(target_compiler.stage - 1, builder.build.build)
+ builder.compiler(target_compiler.stage - 1, builder.config.build)
};
builder.ensure(compile::Rustc { compiler: build_compiler, target });
builder.ensure(compile::Rustc {
compiler: build_compiler,
- target: builder.build.build,
+ target: builder.config.build,
});
let mut cargo = prepare_tool_cargo(builder,
cargo.env("RUSTC_DEBUGINFO", builder.config.rust_debuginfo.to_string())
.env("RUSTC_DEBUGINFO_LINES", builder.config.rust_debuginfo_lines.to_string());
- let _folder = build.fold_output(|| format!("stage{}-rustdoc", target_compiler.stage));
- build.info(&format!("Building rustdoc for stage{} ({})",
+ let _folder = builder.fold_output(|| format!("stage{}-rustdoc", target_compiler.stage));
+ builder.info(&format!("Building rustdoc for stage{} ({})",
target_compiler.stage, target_compiler.host));
- build.run(&mut cargo);
+ builder.run(&mut cargo);
// Cargo adds a number of paths to the dylib search path on windows, which results in
// the wrong rustdoc being executed. To avoid the conflicting rustdocs, we name the "tool"
// rustdoc a different name.
- let tool_rustdoc = build.cargo_out(build_compiler, Mode::Tool, target)
+ let tool_rustdoc = builder.cargo_out(build_compiler, Mode::Tool, target)
.join(exe("rustdoc-tool-binary", &target_compiler.host));
// don't create a stage0-sysroot/bin directory.
t!(fs::create_dir_all(&bindir));
let bin_rustdoc = bindir.join(exe("rustdoc", &*target_compiler.host));
let _ = fs::remove_file(&bin_rustdoc);
- build.copy(&tool_rustdoc, &bin_rustdoc);
+ builder.copy(&tool_rustdoc, &bin_rustdoc);
bin_rustdoc
} else {
tool_rustdoc
fn should_run(run: ShouldRun) -> ShouldRun {
let builder = run.builder;
- run.path("src/tools/cargo").default_condition(builder.build.config.extended)
+ run.path("src/tools/cargo").default_condition(builder.config.extended)
}
fn make_run(run: RunConfig) {
run.builder.ensure(Cargo {
- compiler: run.builder.compiler(run.builder.top_stage, run.builder.build.build),
+ compiler: run.builder.compiler(run.builder.top_stage, run.builder.config.build),
target: run.target,
});
}
// compiler to be available, so we need to depend on that.
builder.ensure(compile::Rustc {
compiler: self.compiler,
- target: builder.build.build,
+ target: builder.config.build,
});
builder.ensure(ToolBuild {
compiler: self.compiler,
fn should_run(run: ShouldRun) -> ShouldRun {
let builder = run.builder;
- run.path($path).default_condition(builder.build.config.extended)
+ run.path($path).default_condition(builder.config.extended)
}
fn make_run(run: RunConfig) {
run.builder.ensure($name {
- compiler: run.builder.compiler(run.builder.top_stage, run.builder.build.build),
+ compiler: run.builder.compiler(run.builder.top_stage, run.builder.config.build),
target: run.target,
extra_features: Vec::new(),
});
// compiler to be available, so we need to depend on that.
builder.ensure(compile::Rustc {
compiler: self.compiler,
- target: builder.build.build,
+ target: builder.config.build,
});
};
Miri, miri, "src/tools/miri", "miri", {};
// compiler to be available, so we need to depend on that.
builder.ensure(compile::Rustc {
compiler: self.compiler,
- target: builder.build.build,
+ target: builder.config.build,
});
};
Rustfmt, rustfmt, "src/tools/rustfmt", "rustfmt", {};
/// `host`.
pub fn tool_cmd(&self, tool: Tool) -> Command {
let mut cmd = Command::new(self.tool_exe(tool));
- let compiler = self.compiler(self.tool_default_stage(tool), self.build.build);
+ let compiler = self.compiler(self.tool_default_stage(tool), self.config.build);
self.prepare_tool_cmd(compiler, &mut cmd);
cmd
}
use std::time::{SystemTime, Instant};
use config::Config;
-use Build;
+use builder::Builder;
/// Returns the `name` as the filename of a static library for `target`.
pub fn staticlib(name: &str, target: &str) -> String {
pub struct TimeIt(bool, Instant);
/// Returns an RAII structure that prints out how long it took to drop.
-pub fn timeit(build: &Build) -> TimeIt {
- TimeIt(build.config.dry_run, Instant::now())
+pub fn timeit(builder: &Builder) -> TimeIt {
+ TimeIt(builder.config.dry_run, Instant::now())
}
impl Drop for TimeIt {
--set build.nodejs=/node-v9.2.0-linux-x64/bin/node \
--set rust.lld
+# Some run-make tests have assertions about code size, and enabling debug
+# assertions in libstd causes the binary to be much bigger than it would
+# otherwise normally be. We already test libstd with debug assertions in lots of
+# other contexts as well
+ENV NO_DEBUG_ASSERTIONS=1
+
ENV SCRIPT python2.7 /checkout/x.py test --target $TARGETS \
src/test/run-make \
src/test/ui \
of a dependency. `--library-path` provides directories to search in, `--extern`
instead lets you specify exactly which dependency is located where.
+## `-C`/`--codegen`: pass codegen options to rustc
+
+Using this flag looks like this:
+
+```bash
+$ rustdoc src/lib.rs -C target_feature=+avx
+$ rustdoc src/lib.rs --codegen target_feature=+avx
+
+$ rustdoc --test src/lib.rs -C target_feature=+avx
+$ rustdoc --test src/lib.rs --codegen target_feature=+avx
+
+$ rustdoc --test README.md -C target_feature=+avx
+$ rustdoc --test README.md --codegen target_feature=+avx
+```
+
+When rustdoc generates documentation, looks for documentation tests, or executes documentation
+tests, it needs to compile some rust code, at least part-way. This flag allows you to tell rustdoc
+to provide some extra codegen options to rustc when it runs these compilations. Most of the time,
+these options won't affect a regular documentation run, but if something depends on target features
+to be enabled, or documentation tests need to use some additional options, this flag allows you to
+affect that.
+
+The arguments to this flag are the same as those for the `-C` flag on rustc. Run `rustc -C help` to
+get the full list.
+
## `--passes`: add more rustdoc passes
Using this flag looks like this:
we don't want the reader to see every line every time. Here's what we put in
our source code:
-```text
- First, we set `x` to five:
+``````markdown
+First, we set `x` to five:
- ```
- let x = 5;
- # let y = 6;
- # println!("{}", x + y);
- ```
+```
+let x = 5;
+# let y = 6;
+# println!("{}", x + y);
+```
- Next, we set `y` to six:
+Next, we set `y` to six:
- ```
- # let x = 5;
- let y = 6;
- # println!("{}", x + y);
- ```
+```
+# let x = 5;
+let y = 6;
+# println!("{}", x + y);
+```
- Finally, we print the sum of `x` and `y`:
+Finally, we print the sum of `x` and `y`:
- ```
- # let x = 5;
- # let y = 6;
- println!("{}", x + y);
- ```
```
+# let x = 5;
+# let y = 6;
+println!("{}", x + y);
+```
+``````
By repeating all parts of the example, you can ensure that your example still
compiles, while only showing the parts that are relevant to that part of your
#[stable(feature = "box_from_slice", since = "1.17.0")]
impl<'a> From<&'a str> for Box<str> {
+ #[inline]
fn from(s: &'a str) -> Box<str> {
unsafe { from_boxed_utf8_unchecked(Box::from(s.as_bytes())) }
}
#[stable(feature = "boxed_str_conv", since = "1.19.0")]
impl From<Box<str>> for Box<[u8]> {
+ #[inline]
fn from(s: Box<str>) -> Self {
unsafe { Box::from_raw(Box::into_raw(s) as *mut [u8]) }
}
#[stable(feature = "btree_drop", since = "1.7.0")]
impl<K, V> Drop for IntoIter<K, V> {
fn drop(&mut self) {
- for _ in &mut *self {
- }
+ self.for_each(drop);
unsafe {
let leaf_node = ptr::read(&self.front).into_node();
if let Some(first_parent) = leaf_node.deallocate_and_ascend() {
where F: FnMut(&mut T) -> bool,
{
fn drop(&mut self) {
- for _ in self { }
+ self.for_each(drop);
}
}
unsafe {
let elem_size = mem::size_of::<T>();
- let alloc_size = cap.checked_mul(elem_size).expect("capacity overflow");
- alloc_guard(alloc_size).expect("capacity overflow");
+ let alloc_size = cap.checked_mul(elem_size).unwrap_or_else(|| capacity_overflow());
+ alloc_guard(alloc_size).unwrap_or_else(|_| capacity_overflow());
// handles ZSTs and `cap = 0` alike
let ptr = if alloc_size == 0 {
// `from_size_align_unchecked`.
let new_cap = 2 * self.cap;
let new_size = new_cap * elem_size;
- alloc_guard(new_size).expect("capacity overflow");
+ alloc_guard(new_size).unwrap_or_else(|_| capacity_overflow());
let ptr_res = self.a.realloc(NonNull::from(self.ptr).as_opaque(),
cur,
new_size);
// overflow and the alignment is sufficiently small.
let new_cap = 2 * self.cap;
let new_size = new_cap * elem_size;
- alloc_guard(new_size).expect("capacity overflow");
+ alloc_guard(new_size).unwrap_or_else(|_| capacity_overflow());
match self.a.grow_in_place(NonNull::from(self.ptr).as_opaque(), old_layout, new_size) {
Ok(_) => {
// We can't directly divide `size`.
pub fn reserve_exact(&mut self, used_cap: usize, needed_extra_cap: usize) {
match self.try_reserve_exact(used_cap, needed_extra_cap) {
- Err(CapacityOverflow) => panic!("capacity overflow"),
+ Err(CapacityOverflow) => capacity_overflow(),
Err(AllocErr) => self.a.oom(),
Ok(()) => { /* yay */ }
}
/// The same as try_reserve, but errors are lowered to a call to oom().
pub fn reserve(&mut self, used_cap: usize, needed_extra_cap: usize) {
match self.try_reserve(used_cap, needed_extra_cap) {
- Err(CapacityOverflow) => panic!("capacity overflow"),
+ Err(CapacityOverflow) => capacity_overflow(),
Err(AllocErr) => self.a.oom(),
Ok(()) => { /* yay */ }
}
}
let new_cap = self.amortized_new_size(used_cap, needed_extra_cap)
- .expect("capacity overflow");
+ .unwrap_or_else(|_| capacity_overflow());
// Here, `cap < used_cap + needed_extra_cap <= new_cap`
// (regardless of whether `self.cap - used_cap` wrapped).
let new_layout = Layout::new::<T>().repeat(new_cap).unwrap().0;
// FIXME: may crash and burn on over-reserve
- alloc_guard(new_layout.size()).expect("capacity overflow");
+ alloc_guard(new_layout.size()).unwrap_or_else(|_| capacity_overflow());
match self.a.grow_in_place(
NonNull::from(self.ptr).as_opaque(), old_layout, new_layout.size(),
) {
}
}
+// One central function responsible for reporting capacity overflows. This'll
+// ensure that the code generation related to these panics is minimal as there's
+// only one location which panics rather than a bunch throughout the module.
+fn capacity_overflow() -> ! {
+ panic!("capacity overflow")
+}
+
#[cfg(test)]
mod tests {
use super::*;
/// assert_eq!(*boxed_bytes, *s.as_bytes());
/// ```
#[stable(feature = "str_box_extras", since = "1.20.0")]
+ #[inline]
pub fn into_boxed_bytes(self: Box<str>) -> Box<[u8]> {
self.into()
}
/// assert_eq!(boxed_str.into_string(), string);
/// ```
#[stable(feature = "box_str", since = "1.4.0")]
+ #[inline]
pub fn into_string(self: Box<str>) -> String {
let slice = Box::<[u8]>::from(self);
unsafe { String::from_utf8_unchecked(slice.into_vec()) }
/// assert_eq!("☺", &*smile);
/// ```
#[stable(feature = "str_box_extras", since = "1.20.0")]
+#[inline]
pub unsafe fn from_boxed_utf8_unchecked(v: Box<[u8]>) -> Box<str> {
Box::from_raw(Box::into_raw(v) as *mut str)
}
/// and replaces it with the given string.
/// The given string doesn't need to be the same length as the range.
///
- /// Note: Unlike [`Vec::splice`], the replacement happens eagerly, and this
- /// method does not return the removed chars.
- ///
/// # Panics
///
/// Panics if the starting point or end point do not lie on a [`char`]
/// let b = s.into_boxed_str();
/// ```
#[stable(feature = "box_str", since = "1.4.0")]
+ #[inline]
pub fn into_boxed_str(self) -> Box<str> {
let slice = self.vec.into_boxed_slice();
unsafe { from_boxed_utf8_unchecked(slice) }
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn shrink_to_fit(&mut self) {
- self.buf.shrink_to_fit(self.len);
+ if self.capacity() != self.len {
+ self.buf.shrink_to_fit(self.len);
+ }
}
/// Shrinks the capacity of the vector with a lower bound.
impl<'a, T> Drop for Drain<'a, T> {
fn drop(&mut self) {
// exhaust self first
- while let Some(_) = self.next() {}
+ self.for_each(drop);
if self.tail_len > 0 {
unsafe {
#[stable(feature = "vec_splice", since = "1.21.0")]
impl<'a, I: Iterator> Drop for Splice<'a, I> {
fn drop(&mut self) {
- // exhaust drain first
- while let Some(_) = self.drain.next() {}
-
+ self.drain.by_ref().for_each(drop);
unsafe {
if self.drain.tail_len == 0 {
where F: FnMut(&mut T) -> bool,
{
fn drop(&mut self) {
- for _ in self.by_ref() { }
-
+ self.for_each(drop);
unsafe {
self.vec.set_len(self.old_len - self.del);
}
#[stable(feature = "drain", since = "1.6.0")]
impl<'a, T: 'a> Drop for Drain<'a, T> {
fn drop(&mut self) {
- for _ in self.by_ref() {}
+ self.for_each(drop);
let source_deque = unsafe { self.deque.as_mut() };
name = "arena"
path = "lib.rs"
crate-type = ["dylib"]
+
+[dependencies]
+rustc_data_structures = { path = "../librustc_data_structures" }
\ No newline at end of file
#![allow(deprecated)]
extern crate alloc;
+extern crate rustc_data_structures;
+
+use rustc_data_structures::sync::MTLock;
use std::cell::{Cell, RefCell};
use std::cmp;
chunks: RefCell<Vec<TypedArenaChunk<u8>>>,
}
+unsafe impl Send for DroplessArena {}
+
impl DroplessArena {
pub fn new() -> DroplessArena {
DroplessArena {
}
}
+pub struct SyncTypedArena<T> {
+ lock: MTLock<TypedArena<T>>,
+}
+
+impl<T> SyncTypedArena<T> {
+ #[inline(always)]
+ pub fn new() -> SyncTypedArena<T> {
+ SyncTypedArena {
+ lock: MTLock::new(TypedArena::new())
+ }
+ }
+
+ #[inline(always)]
+ pub fn alloc(&self, object: T) -> &mut T {
+ // Extend the lifetime of the result since it's limited to the lock guard
+ unsafe { &mut *(self.lock.lock().alloc(object) as *mut T) }
+ }
+
+ #[inline(always)]
+ pub fn alloc_slice(&self, slice: &[T]) -> &mut [T]
+ where
+ T: Copy,
+ {
+ // Extend the lifetime of the result since it's limited to the lock guard
+ unsafe { &mut *(self.lock.lock().alloc_slice(slice) as *mut [T]) }
+ }
+
+ #[inline(always)]
+ pub fn clear(&mut self) {
+ self.lock.get_mut().clear();
+ }
+}
+
+pub struct SyncDroplessArena {
+ lock: MTLock<DroplessArena>,
+}
+
+impl SyncDroplessArena {
+ #[inline(always)]
+ pub fn new() -> SyncDroplessArena {
+ SyncDroplessArena {
+ lock: MTLock::new(DroplessArena::new())
+ }
+ }
+
+ #[inline(always)]
+ pub fn in_arena<T: ?Sized>(&self, ptr: *const T) -> bool {
+ self.lock.lock().in_arena(ptr)
+ }
+
+ #[inline(always)]
+ pub fn alloc<T>(&self, object: T) -> &mut T {
+ // Extend the lifetime of the result since it's limited to the lock guard
+ unsafe { &mut *(self.lock.lock().alloc(object) as *mut T) }
+ }
+
+ #[inline(always)]
+ pub fn alloc_slice<T>(&self, slice: &[T]) -> &mut [T]
+ where
+ T: Copy,
+ {
+ // Extend the lifetime of the result since it's limited to the lock guard
+ unsafe { &mut *(self.lock.lock().alloc_slice(slice) as *mut [T]) }
+ }
+}
+
#[cfg(test)]
mod tests {
extern crate test;
/// Constructs a `Layout` suitable for holding a value of type `T`.
pub fn new<T>() -> Self {
let (size, align) = size_align::<T>();
- Layout::from_size_align(size, align).unwrap()
+ // Note that the align is guaranteed by rustc to be a power of two and
+ // the size+align combo is guaranteed to fit in our address space. As a
+ // result use the unchecked constructor here to avoid inserting code
+ // that panics if it isn't optimized well enough.
+ debug_assert!(Layout::from_size_align(size, align).is_ok());
+ unsafe {
+ Layout::from_size_align_unchecked(size, align)
+ }
}
/// Produces layout describing a record that could be used to
/// or other unsized type like a slice).
pub fn for_value<T: ?Sized>(t: &T) -> Self {
let (size, align) = (mem::size_of_val(t), mem::align_of_val(t));
- Layout::from_size_align(size, align).unwrap()
+ // See rationale in `new` for why this us using an unsafe variant below
+ debug_assert!(Layout::from_size_align(size, align).is_ok());
+ unsafe {
+ Layout::from_size_align_unchecked(size, align)
+ }
}
/// Creates a layout describing the record that can hold a value
.ok_or(LayoutErr { private: () })?;
let alloc_size = padded_size.checked_mul(n)
.ok_or(LayoutErr { private: () })?;
-
- // We can assume that `self.align` is a power-of-two.
- // Furthermore, `alloc_size` has already been rounded up
- // to a multiple of `self.align`; therefore, the call to
- // `Layout::from_size_align` below should never panic.
- Ok((Layout::from_size_align(alloc_size, self.align).unwrap(), padded_size))
+ Ok((Layout::from_size_align(alloc_size, self.align)?, padded_size))
}
/// Creates a layout describing the record for `self` followed by
// truncation. However other flags like `fill`, `width` and `align`
// must act as always.
if let Some((i, _)) = s.char_indices().skip(max).next() {
- &s[..i]
+ // LLVM here can't prove that `..i` won't panic `&s[..i]`, but
+ // we know that it can't panic. Use `get` + `unwrap_or` to avoid
+ // `unsafe` and otherwise don't emit any panic-related code
+ // here.
+ s.get(..i).unwrap_or(&s)
} else {
&s
}
/// an extra layer of indirection. `flat_map()` will remove this extra layer
/// on its own.
///
- /// You can think of [`flat_map(f)`][flat_map] as the semantic equivalent
+ /// You can think of `flat_map(f)` as the semantic equivalent
/// of [`map`]ping, and then [`flatten`]ing as in `map(f).flatten()`.
///
/// Another way of thinking about `flat_map()`: [`map`]'s closure returns
#![feature(asm)]
#![feature(associated_type_defaults)]
#![feature(attr_literals)]
-#![feature(cfg_target_feature)]
#![feature(cfg_target_has_atomic)]
#![feature(concat_idents)]
#![feature(const_fn)]
#![feature(specialization)]
#![feature(staged_api)]
#![feature(stmt_expr_attributes)]
-#![feature(target_feature)]
#![feature(unboxed_closures)]
#![feature(untagged_unions)]
#![feature(unwind_attributes)]
+#![cfg_attr(not(stage0), feature(mmx_target_feature))]
+#![cfg_attr(not(stage0), feature(tbm_target_feature))]
+#![cfg_attr(not(stage0), feature(sse4a_target_feature))]
+#![cfg_attr(not(stage0), feature(arm_target_feature))]
+#![cfg_attr(not(stage0), feature(powerpc_target_feature))]
+#![cfg_attr(not(stage0), feature(mips_target_feature))]
+#![cfg_attr(not(stage0), feature(aarch64_target_feature))]
+
+#![cfg_attr(stage0, feature(target_feature))]
+#![cfg_attr(stage0, feature(cfg_target_feature))]
+
#[prelude_import]
#[allow(unused)]
use prelude::v1::*;
// things like SIMD and such. Note that the actual source for all this lies in a
// different repository, rust-lang-nursery/stdsimd. That's why the setup here is
// a bit wonky.
+#[allow(unused_macros)]
+macro_rules! test_v16 { ($item:item) => {}; }
+#[allow(unused_macros)]
+macro_rules! test_v32 { ($item:item) => {}; }
+#[allow(unused_macros)]
+macro_rules! test_v64 { ($item:item) => {}; }
+#[allow(unused_macros)]
+macro_rules! test_v128 { ($item:item) => {}; }
+#[allow(unused_macros)]
+macro_rules! test_v256 { ($item:item) => {}; }
+#[allow(unused_macros)]
+macro_rules! test_v512 { ($item:item) => {}; }
+#[allow(unused_macros)]
+macro_rules! vector_impl { ($([$f:ident, $($args:tt)*]),*) => { $($f!($($args)*);)* } }
#[path = "../stdsimd/coresimd/mod.rs"]
#[allow(missing_docs, missing_debug_implementations, dead_code)]
#[unstable(feature = "stdsimd", issue = "48556")]
#[unstable(feature = "stdsimd", issue = "48556")]
#[cfg(not(stage0))]
pub use coresimd::simd;
-#[unstable(feature = "stdsimd", issue = "48556")]
+#[stable(feature = "simd_arch", since = "1.27.0")]
#[cfg(not(stage0))]
pub use coresimd::arch;
/// ```
/// #![feature(range_contains)]
///
- /// assert!(!(3..5).contains(2));
- /// assert!( (3..5).contains(3));
- /// assert!( (3..5).contains(4));
- /// assert!(!(3..5).contains(5));
+ /// use std::f32;
///
- /// assert!(!(3..3).contains(3));
- /// assert!(!(3..2).contains(3));
+ /// assert!(!(3..5).contains(&2));
+ /// assert!( (3..5).contains(&3));
+ /// assert!( (3..5).contains(&4));
+ /// assert!(!(3..5).contains(&5));
+ ///
+ /// assert!(!(3..3).contains(&3));
+ /// assert!(!(3..2).contains(&3));
+ ///
+ /// assert!( (0.0..1.0).contains(&0.5));
+ /// assert!(!(0.0..1.0).contains(&f32::NAN));
+ /// assert!(!(0.0..f32::NAN).contains(&0.5));
+ /// assert!(!(f32::NAN..1.0).contains(&0.5));
/// ```
#[unstable(feature = "range_contains", reason = "recently added as per RFC", issue = "32311")]
- pub fn contains(&self, item: Idx) -> bool {
- (self.start <= item) && (item < self.end)
+ pub fn contains<U>(&self, item: &U) -> bool
+ where
+ Idx: PartialOrd<U>,
+ U: ?Sized + PartialOrd<Idx>,
+ {
+ <Self as RangeBounds<Idx>>::contains(self, item)
}
/// Returns `true` if the range contains no items.
}
}
-#[unstable(feature = "range_contains", reason = "recently added as per RFC", issue = "32311")]
impl<Idx: PartialOrd<Idx>> RangeFrom<Idx> {
/// Returns `true` if `item` is contained in the range.
///
/// ```
/// #![feature(range_contains)]
///
- /// assert!(!(3..).contains(2));
- /// assert!( (3..).contains(3));
- /// assert!( (3..).contains(1_000_000_000));
+ /// use std::f32;
+ ///
+ /// assert!(!(3..).contains(&2));
+ /// assert!( (3..).contains(&3));
+ /// assert!( (3..).contains(&1_000_000_000));
+ ///
+ /// assert!( (0.0..).contains(&0.5));
+ /// assert!(!(0.0..).contains(&f32::NAN));
+ /// assert!(!(f32::NAN..).contains(&0.5));
/// ```
- pub fn contains(&self, item: Idx) -> bool {
- (self.start <= item)
+ #[unstable(feature = "range_contains", reason = "recently added as per RFC", issue = "32311")]
+ pub fn contains<U>(&self, item: &U) -> bool
+ where
+ Idx: PartialOrd<U>,
+ U: ?Sized + PartialOrd<Idx>,
+ {
+ <Self as RangeBounds<Idx>>::contains(self, item)
}
}
}
}
-#[unstable(feature = "range_contains", reason = "recently added as per RFC", issue = "32311")]
impl<Idx: PartialOrd<Idx>> RangeTo<Idx> {
/// Returns `true` if `item` is contained in the range.
///
/// ```
/// #![feature(range_contains)]
///
- /// assert!( (..5).contains(-1_000_000_000));
- /// assert!( (..5).contains(4));
- /// assert!(!(..5).contains(5));
+ /// use std::f32;
+ ///
+ /// assert!( (..5).contains(&-1_000_000_000));
+ /// assert!( (..5).contains(&4));
+ /// assert!(!(..5).contains(&5));
+ ///
+ /// assert!( (..1.0).contains(&0.5));
+ /// assert!(!(..1.0).contains(&f32::NAN));
+ /// assert!(!(..f32::NAN).contains(&0.5));
/// ```
- pub fn contains(&self, item: Idx) -> bool {
- (item < self.end)
+ #[unstable(feature = "range_contains", reason = "recently added as per RFC", issue = "32311")]
+ pub fn contains<U>(&self, item: &U) -> bool
+ where
+ Idx: PartialOrd<U>,
+ U: ?Sized + PartialOrd<Idx>,
+ {
+ <Self as RangeBounds<Idx>>::contains(self, item)
}
}
/// ```
/// #![feature(range_contains)]
///
- /// assert!(!(3..=5).contains(2));
- /// assert!( (3..=5).contains(3));
- /// assert!( (3..=5).contains(4));
- /// assert!( (3..=5).contains(5));
- /// assert!(!(3..=5).contains(6));
+ /// use std::f32;
+ ///
+ /// assert!(!(3..=5).contains(&2));
+ /// assert!( (3..=5).contains(&3));
+ /// assert!( (3..=5).contains(&4));
+ /// assert!( (3..=5).contains(&5));
+ /// assert!(!(3..=5).contains(&6));
///
- /// assert!( (3..=3).contains(3));
- /// assert!(!(3..=2).contains(3));
+ /// assert!( (3..=3).contains(&3));
+ /// assert!(!(3..=2).contains(&3));
+ ///
+ /// assert!( (0.0..=1.0).contains(&1.0));
+ /// assert!(!(0.0..=1.0).contains(&f32::NAN));
+ /// assert!(!(0.0..=f32::NAN).contains(&0.0));
+ /// assert!(!(f32::NAN..=1.0).contains(&1.0));
/// ```
#[unstable(feature = "range_contains", reason = "recently added as per RFC", issue = "32311")]
- pub fn contains(&self, item: Idx) -> bool {
- self.start <= item && item <= self.end
+ pub fn contains<U>(&self, item: &U) -> bool
+ where
+ Idx: PartialOrd<U>,
+ U: ?Sized + PartialOrd<Idx>,
+ {
+ <Self as RangeBounds<Idx>>::contains(self, item)
}
/// Returns `true` if the range contains no items.
/// ```
/// #![feature(range_contains)]
///
- /// assert!( (..=5).contains(-1_000_000_000));
- /// assert!( (..=5).contains(5));
- /// assert!(!(..=5).contains(6));
+ /// use std::f32;
+ ///
+ /// assert!( (..=5).contains(&-1_000_000_000));
+ /// assert!( (..=5).contains(&5));
+ /// assert!(!(..=5).contains(&6));
+ ///
+ /// assert!( (..=1.0).contains(&1.0));
+ /// assert!(!(..=1.0).contains(&f32::NAN));
+ /// assert!(!(..=f32::NAN).contains(&0.5));
/// ```
- pub fn contains(&self, item: Idx) -> bool {
- (item <= self.end)
+ #[unstable(feature = "range_contains", reason = "recently added as per RFC", issue = "32311")]
+ pub fn contains<U>(&self, item: &U) -> bool
+ where
+ Idx: PartialOrd<U>,
+ U: ?Sized + PartialOrd<Idx>,
+ {
+ <Self as RangeBounds<Idx>>::contains(self, item)
}
}
/// # }
/// ```
fn end(&self) -> Bound<&T>;
+
+
+ /// Returns `true` if `item` is contained in the range.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(range_contains)]
+ ///
+ /// use std::f32;
+ ///
+ /// assert!( (3..5).contains(&4));
+ /// assert!(!(3..5).contains(&2));
+ ///
+ /// assert!( (0.0..1.0).contains(&0.5));
+ /// assert!(!(0.0..1.0).contains(&f32::NAN));
+ /// assert!(!(0.0..f32::NAN).contains(&0.5));
+ /// assert!(!(f32::NAN..1.0).contains(&0.5));
+ #[unstable(feature = "range_contains", reason = "recently added as per RFC", issue = "32311")]
+ fn contains<U>(&self, item: &U) -> bool
+ where
+ T: PartialOrd<U>,
+ U: ?Sized + PartialOrd<T>,
+ {
+ (match self.start() {
+ Included(ref start) => *start <= item,
+ Excluded(ref start) => *start < item,
+ Unbounded => true,
+ })
+ &&
+ (match self.end() {
+ Included(ref end) => item <= *end,
+ Excluded(ref end) => item < *end,
+ Unbounded => true,
+ })
+ }
}
use self::Bound::{Excluded, Included, Unbounded};
and related macros",
issue = "0")]
#[doc(hidden)]
- pub fn internal_constructor(payload: &'a (Any + Send),
- message: Option<&'a fmt::Arguments<'a>>,
+ #[inline]
+ pub fn internal_constructor(message: Option<&'a fmt::Arguments<'a>>,
location: Location<'a>)
-> Self {
- PanicInfo { payload, location, message }
+ PanicInfo { payload: &(), location, message }
+ }
+
+ #[doc(hidden)]
+ #[inline]
+ pub fn set_payload(&mut self, info: &'a (Any + Send)) {
+ self.payload = info;
}
/// Returns the payload associated with the panic.
write!(formatter, "{}:{}:{}", self.file, self.line, self.col)
}
}
+
+/// An internal trait used by libstd to pass data from libstd to `panic_unwind`
+/// and other panic runtimes. Not intended to be stabilized any time soon, do
+/// not use.
+#[unstable(feature = "std_internals", issue = "0")]
+#[doc(hidden)]
+pub unsafe trait BoxMeUp {
+ fn box_me_up(&mut self) -> *mut (Any + Send);
+ fn get(&mut self) -> &(Any + Send);
+}
/// `finger` is the current byte index of the forward search.
/// Imagine that it exists before the byte at its index, i.e.
- /// haystack[finger] is the first byte of the slice we must inspect during
+ /// `haystack[finger]` is the first byte of the slice we must inspect during
/// forward searching
finger: usize,
/// `finger_back` is the current byte index of the reverse search.
# Examples
```
-", $extra_feature, "#![feature(atomic_nand)]
-
+", $extra_feature, "
use std::sync::atomic::{", stringify!($atomic_type), ", Ordering};
let foo = ", stringify!($atomic_type), "::new(0x13);
unstable(feature = "integer_atomics", issue = "32976"),
unstable(feature = "integer_atomics", issue = "32976"),
unstable(feature = "integer_atomics", issue = "32976"),
- unstable(feature = "atomic_nand", issue = "13226"),
+ unstable(feature = "integer_atomics", issue = "32976"),
"i8", "../../../std/primitive.i8.html",
"#![feature(integer_atomics)]\n\n",
atomic_min, atomic_max,
unstable(feature = "integer_atomics", issue = "32976"),
unstable(feature = "integer_atomics", issue = "32976"),
unstable(feature = "integer_atomics", issue = "32976"),
- unstable(feature = "atomic_nand", issue = "13226"),
+ unstable(feature = "integer_atomics", issue = "32976"),
"u8", "../../../std/primitive.u8.html",
"#![feature(integer_atomics)]\n\n",
atomic_umin, atomic_umax,
unstable(feature = "integer_atomics", issue = "32976"),
unstable(feature = "integer_atomics", issue = "32976"),
unstable(feature = "integer_atomics", issue = "32976"),
- unstable(feature = "atomic_nand", issue = "13226"),
+ unstable(feature = "integer_atomics", issue = "32976"),
"i16", "../../../std/primitive.i16.html",
"#![feature(integer_atomics)]\n\n",
atomic_min, atomic_max,
unstable(feature = "integer_atomics", issue = "32976"),
unstable(feature = "integer_atomics", issue = "32976"),
unstable(feature = "integer_atomics", issue = "32976"),
- unstable(feature = "atomic_nand", issue = "13226"),
+ unstable(feature = "integer_atomics", issue = "32976"),
"u16", "../../../std/primitive.u16.html",
"#![feature(integer_atomics)]\n\n",
atomic_umin, atomic_umax,
unstable(feature = "integer_atomics", issue = "32976"),
unstable(feature = "integer_atomics", issue = "32976"),
unstable(feature = "integer_atomics", issue = "32976"),
- unstable(feature = "atomic_nand", issue = "13226"),
+ unstable(feature = "integer_atomics", issue = "32976"),
"i32", "../../../std/primitive.i32.html",
"#![feature(integer_atomics)]\n\n",
atomic_min, atomic_max,
unstable(feature = "integer_atomics", issue = "32976"),
unstable(feature = "integer_atomics", issue = "32976"),
unstable(feature = "integer_atomics", issue = "32976"),
- unstable(feature = "atomic_nand", issue = "13226"),
+ unstable(feature = "integer_atomics", issue = "32976"),
"u32", "../../../std/primitive.u32.html",
"#![feature(integer_atomics)]\n\n",
atomic_umin, atomic_umax,
unstable(feature = "integer_atomics", issue = "32976"),
unstable(feature = "integer_atomics", issue = "32976"),
unstable(feature = "integer_atomics", issue = "32976"),
- unstable(feature = "atomic_nand", issue = "13226"),
+ unstable(feature = "integer_atomics", issue = "32976"),
"i64", "../../../std/primitive.i64.html",
"#![feature(integer_atomics)]\n\n",
atomic_min, atomic_max,
unstable(feature = "integer_atomics", issue = "32976"),
unstable(feature = "integer_atomics", issue = "32976"),
unstable(feature = "integer_atomics", issue = "32976"),
- unstable(feature = "atomic_nand", issue = "13226"),
+ unstable(feature = "integer_atomics", issue = "32976"),
"u64", "../../../std/primitive.u64.html",
"#![feature(integer_atomics)]\n\n",
atomic_umin, atomic_umax,
stable(feature = "atomic_debug", since = "1.3.0"),
stable(feature = "atomic_access", since = "1.15.0"),
stable(feature = "atomic_from", since = "1.23.0"),
- unstable(feature = "atomic_nand", issue = "13226"),
+ stable(feature = "atomic_nand", since = "1.27.0"),
"isize", "../../../std/primitive.isize.html",
"",
atomic_min, atomic_max,
stable(feature = "atomic_debug", since = "1.3.0"),
stable(feature = "atomic_access", since = "1.15.0"),
stable(feature = "atomic_from", since = "1.23.0"),
- unstable(feature = "atomic_nand", issue = "13226"),
+ stable(feature = "atomic_nand", since = "1.27.0"),
"usize", "../../../std/primitive.usize.html",
"",
atomic_umin, atomic_umax,
#![feature(trusted_len)]
#![feature(try_trait)]
#![feature(exact_chunks)]
-#![feature(atomic_nand)]
+#![cfg_attr(stage0, feature(atomic_nand))]
#![feature(reverse_bits)]
#![feature(inclusive_range_fields)]
#![feature(iterator_find_map)]
// now hopefully.
#[no_mangle]
#[rustc_std_internal_symbol]
-pub unsafe extern fn __rust_start_panic(_data: usize, _vtable: usize) -> u32 {
+pub unsafe extern fn __rust_start_panic(_payload: usize) -> u32 {
abort();
#[cfg(any(unix, target_os = "cloudabi"))]
html_root_url = "https://doc.rust-lang.org/nightly/",
issue_tracker_base_url = "https://github.com/rust-lang/rust/issues/")]
+#![feature(allocator_api)]
#![feature(alloc)]
#![feature(core_intrinsics)]
#![feature(lang_items)]
#![feature(panic_unwind)]
#![feature(raw)]
#![feature(staged_api)]
+#![feature(std_internals)]
#![feature(unwind_attributes)]
#![cfg_attr(target_env = "msvc", feature(raw))]
#[cfg(not(any(target_env = "msvc", all(windows, target_arch = "x86_64", target_env = "gnu"))))]
extern crate unwind;
+use alloc::boxed::Box;
use core::intrinsics;
use core::mem;
use core::raw;
+use core::panic::BoxMeUp;
// Rust runtime's startup objects depend on these symbols, so make them public.
#[cfg(all(target_os="windows", target_arch = "x86", target_env="gnu"))]
// implementation.
#[no_mangle]
#[unwind(allowed)]
-pub unsafe extern "C" fn __rust_start_panic(data: usize, vtable: usize) -> u32 {
- imp::panic(mem::transmute(raw::TraitObject {
- data: data as *mut (),
- vtable: vtable as *mut (),
- }))
+pub unsafe extern "C" fn __rust_start_panic(payload: usize) -> u32 {
+ let payload = payload as *mut &mut BoxMeUp;
+ imp::panic(Box::from_raw((*payload).box_me_up()))
}
use hir::svh::Svh;
use util::nodemap::{DefIdMap, FxHashMap};
-use arena::TypedArena;
+use arena::SyncTypedArena;
use std::io;
use ty::TyCtxt;
pub struct Forest {
krate: Crate,
pub dep_graph: DepGraph,
- inlined_bodies: TypedArena<Body>
+ inlined_bodies: SyncTypedArena<Body>
}
impl Forest {
Forest {
krate,
dep_graph: dep_graph.clone(),
- inlined_bodies: TypedArena::new()
+ inlined_bodies: SyncTypedArena::new()
}
}
const NAKED = 0b0001_0000;
const NO_MANGLE = 0b0010_0000;
const RUSTC_STD_INTERNAL_SYMBOL = 0b0100_0000;
+ const NO_DEBUG = 0b1000_0000;
}
}
});
impl_stable_hash_for!(struct middle::cstore::ExternCrate {
- def_id,
+ src,
span,
- direct,
- path_len
+ path_len,
+ direct
+});
+
+impl_stable_hash_for!(enum middle::cstore::ExternCrateSource {
+ Extern(def_id),
+ Use,
+ Path,
});
impl_stable_hash_for!(struct middle::cstore::CrateSource {
enum AllocDiscriminant {
Alloc,
- ExternStatic,
+ Static,
Function,
}
impl_stable_hash_for!(enum self::AllocDiscriminant {
Alloc,
- ExternStatic,
+ Static,
Function
});
hasher: &mut StableHasher<W>,
) {
ty::tls::with_opt(|tcx| {
+ trace!("hashing {:?}", *self);
let tcx = tcx.expect("can't hash AllocIds during hir lowering");
- if let Some(alloc) = tcx.interpret_interner.get_alloc(*self) {
+ if let Some(def_id) = tcx.interpret_interner.get_static(*self) {
+ AllocDiscriminant::Static.hash_stable(hcx, hasher);
+ trace!("hashing {:?} as static {:?}", *self, def_id);
+ def_id.hash_stable(hcx, hasher);
+ } else if let Some(alloc) = tcx.interpret_interner.get_alloc(*self) {
AllocDiscriminant::Alloc.hash_stable(hcx, hasher);
if hcx.alloc_id_recursion_tracker.insert(*self) {
- tcx
- .interpret_interner
- .get_corresponding_static_def_id(*self)
- .hash_stable(hcx, hasher);
+ trace!("hashing {:?} as alloc {:#?}", *self, alloc);
alloc.hash_stable(hcx, hasher);
assert!(hcx.alloc_id_recursion_tracker.remove(self));
+ } else {
+ trace!("skipping hashing of {:?} due to recursion", *self);
}
} else if let Some(inst) = tcx.interpret_interner.get_fn(*self) {
+ trace!("hashing {:?} as fn {:#?}", *self, inst);
AllocDiscriminant::Function.hash_stable(hcx, hasher);
inst.hash_stable(hcx, hasher);
- } else if let Some(def_id) = tcx.interpret_interner
- .get_corresponding_static_def_id(*self) {
- AllocDiscriminant::ExternStatic.hash_stable(hcx, hasher);
- def_id.hash_stable(hcx, hasher);
} else {
bug!("no allocation for {}", self);
}
InvalidPointerMath |
ReadUndefBytes |
DeadLocal |
- ExecutionTimeLimitReached |
StackFrameLimitReached |
OutOfTls |
TlsOutOfBounds |
FromEnv(where_clause) => where_clause.hash_stable(hcx, hasher),
WellFormedTy(ty) => ty.hash_stable(hcx, hasher),
+ Normalize(projection) => projection.hash_stable(hcx, hasher),
FromEnvTy(ty) => ty.hash_stable(hcx, hasher),
RegionOutlives(predicate) => predicate.hash_stable(hcx, hasher),
TypeOutlives(predicate) => predicate.hash_stable(hcx, hasher),
) {
debug!("report_region_errors(): {} errors to start", errors.len());
- if will_later_be_reported_by_nll && self.tcx.nll() {
+ if will_later_be_reported_by_nll && self.tcx.use_mir_borrowck() {
// With `#![feature(nll)]`, we want to present a nice user
// experience, so don't even mention the errors from the
// AST checker.
return;
}
- // But with -Znll, it's nice to have some note for later.
+ // But with nll, it's nice to have some note for later.
for error in errors {
match *error {
RegionResolutionError::ConcreteFailure(ref origin, ..)
| RegionResolutionError::GenericBoundFailure(ref origin, ..) => {
self.tcx
.sess
- .span_warn(origin.span(), "not reporting region error due to -Znll");
+ .span_warn(origin.span(), "not reporting region error due to nll");
}
RegionResolutionError::SubSupConflict(ref rvo, ..) => {
self.tcx
.sess
- .span_warn(rvo.span(), "not reporting region error due to -Znll");
+ .span_warn(rvo.span(), "not reporting region error due to nll");
}
}
}
use syntax_pos::{self, Span};
use syntax_pos::symbol::InternedString;
use util::nodemap::FxHashMap;
-use arena::DroplessArena;
+use arena::SyncDroplessArena;
use self::combine::CombineFields;
use self::higher_ranked::HrMatchResult;
/// F: for<'b, 'tcx> where 'gcx: 'tcx FnOnce(InferCtxt<'b, 'gcx, 'tcx>).
pub struct InferCtxtBuilder<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
global_tcx: TyCtxt<'a, 'gcx, 'gcx>,
- arena: DroplessArena,
+ arena: SyncDroplessArena,
fresh_tables: Option<RefCell<ty::TypeckTables<'tcx>>>,
}
pub fn infer_ctxt(self) -> InferCtxtBuilder<'a, 'gcx, 'tcx> {
InferCtxtBuilder {
global_tcx: self,
- arena: DroplessArena::new(),
+ arena: SyncDroplessArena::new(),
fresh_tables: None,
}
use self::TargetLint::*;
use std::slice;
+use rustc_data_structures::sync::{RwLock, ReadGuard};
use lint::{EarlyLintPassObject, LateLintPassObject};
use lint::{Level, Lint, LintId, LintPass, LintBuffer};
use lint::builtin::BuiltinLintDiagnostics;
use util::nodemap::FxHashMap;
use std::default::Default as StdDefault;
-use std::cell::{Ref, RefCell};
use syntax::ast;
use syntax::edition;
use syntax_pos::{MultiSpan, Span};
pub struct LintSession<'a, PassObject> {
/// Reference to the store of registered lints.
- lints: Ref<'a, LintStore>,
+ lints: ReadGuard<'a, LintStore>,
/// Trait objects for each lint pass.
passes: Option<Vec<PassObject>>,
/// Creates a new `LintSession`, by moving out the `LintStore`'s initial
/// lint levels and pass objects. These can be restored using the `restore`
/// method.
- fn new(store: &'a RefCell<LintStore>) -> LintSession<'a, PassObject> {
+ fn new(store: &'a RwLock<LintStore>) -> LintSession<'a, PassObject> {
let mut s = store.borrow_mut();
let passes = PassObject::take_passes(&mut *s);
drop(s);
}
/// Restores the levels back to the original lint store.
- fn restore(self, store: &RefCell<LintStore>) {
+ fn restore(self, store: &RwLock<LintStore>) {
drop(self.lints);
let mut s = store.borrow_mut();
PassObject::restore_passes(&mut *s, self.passes);
#[derive(Copy, Clone, Debug)]
pub struct ExternCrate {
- /// def_id of an `extern crate` in the current crate that caused
- /// this crate to be loaded; note that there could be multiple
- /// such ids
- pub def_id: DefId,
+ pub src: ExternCrateSource,
/// span of the extern crate that caused this to be loaded
pub span: Span,
+ /// Number of links to reach the extern;
+ /// used to select the extern with the shortest path
+ pub path_len: usize,
+
/// If true, then this crate is the crate named by the extern
/// crate referenced above. If false, then this crate is a dep
/// of the crate.
pub direct: bool,
+}
- /// Number of links to reach the extern crate `def_id`
- /// declaration; used to select the extern crate with the shortest
- /// path
- pub path_len: usize,
+#[derive(Copy, Clone, Debug)]
+pub enum ExternCrateSource {
+ /// Crate is loaded by `extern crate`.
+ Extern(
+ /// def_id of the item in the current crate that caused
+ /// this crate to be loaded; note that there could be multiple
+ /// such ids
+ DefId,
+ ),
+ // Crate is loaded by `use`.
+ Use,
+ /// Crate is implicitly loaded by an absolute or an `extern::` path.
+ Path,
}
pub struct EncodedMetadata {
}
pub trait CrateLoader {
- fn process_item(&mut self, item: &ast::Item, defs: &Definitions);
+ fn process_extern_crate(&mut self, item: &ast::Item, defs: &Definitions) -> CrateNum;
+
+ fn process_path_extern(
+ &mut self,
+ name: Symbol,
+ span: Span,
+ ) -> CrateNum;
+
+ fn process_use_extern(
+ &mut self,
+ name: Symbol,
+ span: Span,
+ id: ast::NodeId,
+ defs: &Definitions,
+ ) -> CrateNum;
+
fn postprocess(&mut self, krate: &ast::Crate);
- fn resolve_crate_from_path(&mut self, name: Symbol, span: Span) -> CrateNum;
}
// This method is used when generating the command line to pass through to
Intrinsic(String),
OverflowingMath,
InvalidChar(u128),
- ExecutionTimeLimitReached,
StackFrameLimitReached,
OutOfTls,
TlsOutOfBounds,
"mir not found",
InvalidChar(..) =>
"tried to interpret an invalid 32-bit value as a char",
- ExecutionTimeLimitReached =>
- "the expression was too complex to be evaluated or resulted in an infinite loop",
StackFrameLimitReached =>
"reached the configured maximum number of stack frames",
OutOfTls =>
impl ::rustc_serialize::UseSpecializedEncodable for AllocId {}
impl ::rustc_serialize::UseSpecializedDecodable for AllocId {}
-pub const ALLOC_DISCRIMINANT: usize = 0;
-pub const FN_DISCRIMINANT: usize = 1;
-pub const EXTERN_STATIC_DISCRIMINANT: usize = 2;
-pub const SHORTHAND_START: usize = 3;
+#[derive(RustcDecodable, RustcEncodable)]
+enum AllocKind {
+ Alloc,
+ Fn,
+ Static,
+}
pub fn specialized_encode_alloc_id<
'a, 'tcx,
encoder: &mut E,
tcx: TyCtxt<'a, 'tcx, 'tcx>,
alloc_id: AllocId,
- shorthand: Option<usize>,
) -> Result<(), E::Error> {
- if let Some(shorthand) = shorthand {
- return shorthand.encode(encoder);
- }
if let Some(alloc) = tcx.interpret_interner.get_alloc(alloc_id) {
trace!("encoding {:?} with {:#?}", alloc_id, alloc);
- ALLOC_DISCRIMINANT.encode(encoder)?;
+ AllocKind::Alloc.encode(encoder)?;
alloc.encode(encoder)?;
- // encode whether this allocation is the root allocation of a static
- tcx.interpret_interner
- .get_corresponding_static_def_id(alloc_id)
- .encode(encoder)?;
} else if let Some(fn_instance) = tcx.interpret_interner.get_fn(alloc_id) {
trace!("encoding {:?} with {:#?}", alloc_id, fn_instance);
- FN_DISCRIMINANT.encode(encoder)?;
+ AllocKind::Fn.encode(encoder)?;
fn_instance.encode(encoder)?;
- } else if let Some(did) = tcx.interpret_interner.get_corresponding_static_def_id(alloc_id) {
- // extern "C" statics don't have allocations, just encode its def_id
- EXTERN_STATIC_DISCRIMINANT.encode(encoder)?;
+ } else if let Some(did) = tcx.interpret_interner.get_static(alloc_id) {
+ // referring to statics doesn't need to know about their allocations, just about its DefId
+ AllocKind::Static.encode(encoder)?;
did.encode(encoder)?;
} else {
bug!("alloc id without corresponding allocation: {}", alloc_id);
pub fn specialized_decode_alloc_id<
'a, 'tcx,
D: Decoder,
- CACHE: FnOnce(&mut D, usize, AllocId),
- SHORT: FnOnce(&mut D, usize) -> Result<AllocId, D::Error>
+ CACHE: FnOnce(&mut D, AllocId),
>(
decoder: &mut D,
tcx: TyCtxt<'a, 'tcx, 'tcx>,
- pos: usize,
cache: CACHE,
- short: SHORT,
) -> Result<AllocId, D::Error> {
- match usize::decode(decoder)? {
- ALLOC_DISCRIMINANT => {
+ match AllocKind::decode(decoder)? {
+ AllocKind::Alloc => {
let alloc_id = tcx.interpret_interner.reserve();
- trace!("creating alloc id {:?} at {}", alloc_id, pos);
+ trace!("creating alloc id {:?}", alloc_id);
// insert early to allow recursive allocs
- cache(decoder, pos, alloc_id);
+ cache(decoder, alloc_id);
let allocation = Allocation::decode(decoder)?;
trace!("decoded alloc {:?} {:#?}", alloc_id, allocation);
let allocation = tcx.intern_const_alloc(allocation);
tcx.interpret_interner.intern_at_reserved(alloc_id, allocation);
- if let Some(glob) = Option::<DefId>::decode(decoder)? {
- tcx.interpret_interner.cache(glob, alloc_id);
- }
-
Ok(alloc_id)
},
- FN_DISCRIMINANT => {
- trace!("creating fn alloc id at {}", pos);
+ AllocKind::Fn => {
+ trace!("creating fn alloc id");
let instance = ty::Instance::decode(decoder)?;
trace!("decoded fn alloc instance: {:?}", instance);
let id = tcx.interpret_interner.create_fn_alloc(instance);
trace!("created fn alloc id: {:?}", id);
- cache(decoder, pos, id);
+ cache(decoder, id);
Ok(id)
},
- EXTERN_STATIC_DISCRIMINANT => {
- trace!("creating extern static alloc id at {}", pos);
+ AllocKind::Static => {
+ trace!("creating extern static alloc id at");
let did = DefId::decode(decoder)?;
- let alloc_id = tcx.interpret_interner.reserve();
- tcx.interpret_interner.cache(did, alloc_id);
+ let alloc_id = tcx.interpret_interner.cache_static(did);
+ cache(decoder, alloc_id);
Ok(alloc_id)
},
- shorthand => {
- trace!("loading shorthand {}", shorthand);
- short(decoder, shorthand)
- },
}
}
Location { block: self.block, statement_index: self.statement_index + 1 }
}
- pub fn dominates(&self, other: &Location, dominators: &Dominators<BasicBlock>) -> bool {
+ pub fn dominates(&self, other: Location, dominators: &Dominators<BasicBlock>) -> bool {
if self.block == other.block {
self.statement_index <= other.statement_index
} else {
self.0.values()
}
+ pub fn len(&self) -> usize {
+ self.0.len()
+ }
+
// True if any of the output types require codegen or linking.
pub fn should_trans(&self) -> bool {
self.0.keys().any(|k| match *k {
useful for profiling / PGO."),
relro_level: Option<RelroLevel> = (None, parse_relro_level, [TRACKED],
"choose which RELRO level to use"),
- nll: bool = (false, parse_bool, [UNTRACKED],
- "run the non-lexical lifetimes MIR pass"),
disable_nll_user_type_assert: bool = (false, parse_bool, [UNTRACKED],
"disable user provided type assertion in NLL"),
trans_time_graph: bool = (false, parse_bool, [UNTRACKED],
use util::common::{duration_to_secs_str, ErrorReported};
use util::common::ProfileQueriesMsg;
-use rustc_data_structures::sync::{Lrc, Lock, LockCell, OneThread, Once};
+use rustc_data_structures::sync::{self, Lrc, Lock, LockCell, OneThread, Once, RwLock};
use syntax::ast::NodeId;
use errors::{self, DiagnosticBuilder, DiagnosticId};
// FIXME: lint_store and buffered_lints are not thread-safe,
// but are only used in a single thread
- pub lint_store: OneThread<RefCell<lint::LintStore>>,
- pub buffered_lints: OneThread<RefCell<Option<lint::LintBuffer>>>,
+ pub lint_store: RwLock<lint::LintStore>,
+ pub buffered_lints: Lock<Option<lint::LintBuffer>>,
/// Set of (DiagnosticId, Option<Span>, message) tuples tracking
/// (sub)diagnostics that have been set once, but should not be set again,
/// in order to avoid redundantly verbose output (Issue #24690, #44953).
- pub one_time_diagnostics: RefCell<FxHashSet<(DiagnosticMessageId, Option<Span>, String)>>,
+ pub one_time_diagnostics: Lock<FxHashSet<(DiagnosticMessageId, Option<Span>, String)>>,
pub plugin_llvm_passes: OneThread<RefCell<Vec<String>>>,
pub plugin_attributes: OneThread<RefCell<Vec<(String, AttributeType)>>>,
pub crate_types: Once<Vec<config::CrateType>>,
/// The maximum number of stackframes allowed in const eval
pub const_eval_stack_frame_limit: usize,
- /// The maximum number miri steps per constant
- pub const_eval_step_limit: usize,
/// The metadata::creader module may inject an allocator/panic_runtime
/// dependency if it didn't already find one, and this tracks what was
}
pub fn teach(&self, code: &DiagnosticId) -> bool {
- self.opts.debugging_opts.teach && !self.parse_sess.span_diagnostic.code_emitted(code)
+ self.opts.debugging_opts.teach && self.parse_sess.span_diagnostic.must_teach(code)
}
/// Are we allowed to use features from the Rust 2018 edition?
let external_macro_backtrace = sopts.debugging_opts.external_macro_backtrace;
- let emitter: Box<dyn Emitter> =
+ let emitter: Box<dyn Emitter + sync::Send> =
match (sopts.error_format, emitter_dest) {
(config::ErrorOutputType::HumanReadable(color_config), None) => Box::new(
EmitterWriter::stderr(
default_sysroot,
local_crate_source_file,
working_dir,
- lint_store: OneThread::new(RefCell::new(lint::LintStore::new())),
- buffered_lints: OneThread::new(RefCell::new(Some(lint::LintBuffer::new()))),
- one_time_diagnostics: RefCell::new(FxHashSet()),
+ lint_store: RwLock::new(lint::LintStore::new()),
+ buffered_lints: Lock::new(Some(lint::LintBuffer::new())),
+ one_time_diagnostics: Lock::new(FxHashSet()),
plugin_llvm_passes: OneThread::new(RefCell::new(Vec::new())),
plugin_attributes: OneThread::new(RefCell::new(Vec::new())),
crate_types: Once::new(),
recursion_limit: Once::new(),
type_length_limit: Once::new(),
const_eval_stack_frame_limit: 100,
- const_eval_step_limit: 1_000_000,
next_node_id: OneThread::new(Cell::new(NodeId::new(1))),
injected_allocator: Once::new(),
allocator_kind: Once::new(),
}
pub fn early_error(output: config::ErrorOutputType, msg: &str) -> ! {
- let emitter: Box<dyn Emitter> = match output {
+ let emitter: Box<dyn Emitter + sync::Send> = match output {
config::ErrorOutputType::HumanReadable(color_config) => {
Box::new(EmitterWriter::stderr(color_config, None, false, false))
}
}
pub fn early_warn(output: config::ErrorOutputType, msg: &str) {
- let emitter: Box<dyn Emitter> = match output {
+ let emitter: Box<dyn Emitter + sync::Send> = match output {
config::ErrorOutputType::HumanReadable(color_config) => {
Box::new(EmitterWriter::stderr(color_config, None, false, false))
}
WellFormed(WhereClauseAtom<'tcx>),
FromEnv(WhereClauseAtom<'tcx>),
WellFormedTy(Ty<'tcx>),
+ Normalize(ty::ProjectionPredicate<'tcx>),
FromEnvTy(Ty<'tcx>),
RegionOutlives(ty::RegionOutlivesPredicate<'tcx>),
TypeOutlives(ty::TypeOutlivesPredicate<'tcx>),
FromEnv(Implemented(trait_ref)) => write!(fmt, "FromEnv({})", trait_ref),
FromEnv(ProjectionEq(projection)) => write!(fmt, "FromEnv({})", projection),
WellFormedTy(ty) => write!(fmt, "WellFormed({})", ty),
+ Normalize(projection) => write!(fmt, "Normalize({})", projection),
FromEnvTy(ty) => write!(fmt, "FromEnv({})", ty),
RegionOutlives(predicate) => write!(fmt, "RegionOutlives({})", predicate),
TypeOutlives(predicate) => write!(fmt, "TypeOutlives({})", predicate),
(traits::DomainGoal::WellFormed)(wc),
(traits::DomainGoal::FromEnv)(wc),
(traits::DomainGoal::WellFormedTy)(ty),
+ (traits::DomainGoal::Normalize)(projection),
(traits::DomainGoal::FromEnvTy)(ty),
(traits::DomainGoal::RegionOutlives)(predicate),
(traits::DomainGoal::TypeOutlives)(predicate),
use rustc_data_structures::stable_hasher::{HashStable, hash_stable_hashmap,
StableHasher, StableHasherResult,
StableVec};
-use arena::{TypedArena, DroplessArena};
+use arena::{TypedArena, SyncDroplessArena};
use rustc_data_structures::indexed_vec::IndexVec;
use rustc_data_structures::sync::{Lrc, Lock};
use std::any::Any;
use std::borrow::Borrow;
-use std::cell::Cell;
use std::cmp::Ordering;
use std::collections::hash_map::{self, Entry};
use std::hash::{Hash, Hasher};
pub struct AllArenas<'tcx> {
pub global: GlobalArenas<'tcx>,
- pub interner: DroplessArena,
+ pub interner: SyncDroplessArena,
}
impl<'tcx> AllArenas<'tcx> {
pub fn new() -> Self {
AllArenas {
global: GlobalArenas::new(),
- interner: DroplessArena::new(),
+ interner: SyncDroplessArena::new(),
}
}
}
pub struct CtxtInterners<'tcx> {
/// The arena that types, regions, etc are allocated from
- arena: &'tcx DroplessArena,
+ arena: &'tcx SyncDroplessArena,
/// Specifically use a speedy hash algorithm for these hash sets,
/// they're accessed quite often.
}
impl<'gcx: 'tcx, 'tcx> CtxtInterners<'tcx> {
- fn new(arena: &'tcx DroplessArena) -> CtxtInterners<'tcx> {
+ fn new(arena: &'tcx SyncDroplessArena) -> CtxtInterners<'tcx> {
CtxtInterners {
arena,
type_: Default::default(),
return ty;
}
let global_interner = global_interners.map(|interners| {
- interners.type_.borrow_mut()
+ (interners.type_.borrow_mut(), &interners.arena)
});
- if let Some(ref interner) = global_interner {
- if let Some(&Interned(ty)) = interner.get(&st) {
+ if let Some((ref type_, _)) = global_interner {
+ if let Some(&Interned(ty)) = type_.get(&st) {
return ty;
}
}
// determine that all contents are in the global tcx.
// See comments on Lift for why we can't use that.
if !flags.flags.intersects(ty::TypeFlags::KEEP_IN_LOCAL_TCX) {
- if let Some(interner) = global_interners {
+ if let Some((mut type_, arena)) = global_interner {
let ty_struct: TyS<'gcx> = unsafe {
mem::transmute(ty_struct)
};
- let ty: Ty<'gcx> = interner.arena.alloc(ty_struct);
- global_interner.unwrap().insert(Interned(ty));
+ let ty: Ty<'gcx> = arena.alloc(ty_struct);
+ type_.insert(Interned(ty));
return ty;
}
} else {
// Make sure we don't end up with inference
// types/regions in the global tcx.
- if global_interners.is_none() {
+ if global_interner.is_none() {
drop(interner);
bug!("Attempted to intern `{:?}` which contains \
inference types/regions in the global type context",
/// Data layout specification for the current target.
pub data_layout: TargetDataLayout,
- /// Used to prevent layout from recursing too deeply.
- pub layout_depth: Cell<usize>,
-
stability_interner: Lock<FxHashSet<&'tcx attr::Stability>>,
pub interpret_interner: InterpretInterner<'tcx>,
/// Allows obtaining const allocs via a unique identifier
alloc_by_id: FxHashMap<interpret::AllocId, &'tcx interpret::Allocation>,
- /// Reverse map of `alloc_cache`
- global_cache: FxHashMap<interpret::AllocId, DefId>,
+ /// Allows obtaining static def ids via a unique id
+ statics: FxHashMap<interpret::AllocId, DefId>,
/// The AllocId to assign to the next new regular allocation.
/// Always incremented, never gets smaller.
next_id: interpret::AllocId,
- /// Allows checking whether a static already has an allocation
- ///
- /// This is only important for detecting statics referring to themselves
- // FIXME(oli-obk) move it to the EvalContext?
- alloc_cache: FxHashMap<DefId, interpret::AllocId>,
+ /// Inverse map of `statics`
+ /// Used so we don't allocate a new pointer every time we need one
+ static_cache: FxHashMap<DefId, interpret::AllocId>,
/// A cache for basic byte allocations keyed by their contents. This is used to deduplicate
/// allocations for string and bytestring literals.
self.inner.borrow().alloc_by_id.get(&id).cloned()
}
- pub fn get_cached(
- &self,
- static_id: DefId,
- ) -> Option<interpret::AllocId> {
- self.inner.borrow().alloc_cache.get(&static_id).cloned()
- }
-
- pub fn cache(
+ pub fn cache_static(
&self,
static_id: DefId,
- alloc_id: interpret::AllocId,
- ) {
- let mut inner = self.inner.borrow_mut();
- inner.global_cache.insert(alloc_id, static_id);
- if let Some(old) = inner.alloc_cache.insert(static_id, alloc_id) {
- bug!("tried to cache {:?}, but was already existing as {:#?}", static_id, old);
+ ) -> interpret::AllocId {
+ if let Some(alloc_id) = self.inner.borrow().static_cache.get(&static_id).cloned() {
+ return alloc_id;
}
+ let alloc_id = self.reserve();
+ let mut inner = self.inner.borrow_mut();
+ inner.static_cache.insert(static_id, alloc_id);
+ inner.statics.insert(alloc_id, static_id);
+ alloc_id
}
- pub fn get_corresponding_static_def_id(
+ pub fn get_static(
&self,
ptr: interpret::AllocId,
) -> Option<DefId> {
- self.inner.borrow().global_cache.get(&ptr).cloned()
+ self.inner.borrow().statics.get(&ptr).cloned()
}
pub fn intern_at_reserved(
crate_name: Symbol::intern(crate_name),
data_layout,
layout_interner: Lock::new(FxHashSet()),
- layout_depth: Cell::new(0),
stability_interner: Lock::new(FxHashSet()),
interpret_interner: Default::default(),
tx_to_llvm_workers: Lock::new(tx),
self.on_disk_query_result_cache.serialize(self.global_tcx(), encoder)
}
- /// If true, we should use NLL-style region checking instead of
- /// lexical style.
- pub fn nll(self) -> bool {
- self.features().nll || self.sess.opts.debugging_opts.nll
- }
-
/// If true, we should use the MIR-based borrowck (we may *also* use
/// the AST-based borrowck).
- pub fn use_mir(self) -> bool {
+ pub fn use_mir_borrowck(self) -> bool {
self.borrowck_mode().use_mir()
}
mode @ BorrowckMode::Compare => mode,
mode @ BorrowckMode::Ast => {
- if self.nll() {
+ if self.features().nll {
BorrowckMode::Mir
} else {
mode
/// MIR borrowck, but not when NLL is used. They are also consumed
/// by the validation stuff.
pub fn emit_end_regions(self) -> bool {
- // FIXME(#46875) -- we should not emit end regions when NLL is enabled,
- // but for now we can't stop doing so because it causes false positives
self.sess.opts.debugging_opts.emit_end_regions ||
self.sess.opts.debugging_opts.mir_emit_validate > 0 ||
- self.use_mir()
+ self.use_mir_borrowck()
}
#[inline]
/// Call the closure with a local `TyCtxt` using the given arena.
pub fn enter_local<F, R>(
&self,
- arena: &'tcx DroplessArena,
+ arena: &'tcx SyncDroplessArena,
f: F
) -> R
where
let new_icx = ty::tls::ImplicitCtxt {
tcx,
query: icx.query.clone(),
+ layout_depth: icx.layout_depth,
};
ty::tls::enter_context(&new_icx, |new_icx| {
f(new_icx.tcx)
/// The current query job, if any. This is updated by start_job in
/// ty::maps::plumbing when executing a query
pub query: Option<Lrc<maps::QueryJob<'gcx>>>,
+
+ /// Used to prevent layout from recursing too deeply.
+ pub layout_depth: usize,
}
// A thread local value which stores a pointer to the current ImplicitCtxt
let icx = ImplicitCtxt {
tcx,
query: None,
+ layout_depth: 0,
};
enter_context(&icx, |_| {
f(tcx)
use hir::map::DefPathData;
use hir::def_id::{CrateNum, DefId, CRATE_DEF_INDEX, LOCAL_CRATE};
use ty::{self, Ty, TyCtxt};
+use middle::cstore::{ExternCrate, ExternCrateSource};
use syntax::ast;
use syntax::symbol::Symbol;
use syntax::symbol::InternedString;
// `extern crate` manually, we put the `extern
// crate` as the parent. So you wind up with
// something relative to the current crate.
- // 2. for an indirect crate, where there is no extern
- // crate, we just prepend the crate name.
+ // 2. for an extern inferred from a path or an indirect crate,
+ // where there is no explicit `extern crate`, we just prepend
+ // the crate name.
//
// Returns `None` for the local crate.
if cnum != LOCAL_CRATE {
let opt_extern_crate = self.extern_crate(cnum.as_def_id());
- let opt_extern_crate = opt_extern_crate.and_then(|extern_crate| {
- if extern_crate.direct {
- Some(extern_crate.def_id)
- } else {
- None
- }
- });
- if let Some(extern_crate_def_id) = opt_extern_crate {
- self.push_item_path(buffer, extern_crate_def_id);
+ if let Some(ExternCrate {
+ src: ExternCrateSource::Extern(def_id),
+ direct: true,
+ ..
+ }) = *opt_extern_crate
+ {
+ self.push_item_path(buffer, def_id);
} else {
buffer.push(&self.crate_name(cnum).as_str());
}
// followed by the path to the item within the crate and return.
if cur_def.index == CRATE_DEF_INDEX {
match *self.extern_crate(cur_def) {
- Some(ref extern_crate) if extern_crate.direct => {
- self.push_item_path(buffer, extern_crate.def_id);
- cur_path.iter().rev().map(|segment| buffer.push(&segment)).count();
+ Some(ExternCrate {
+ src: ExternCrateSource::Extern(def_id),
+ direct: true,
+ ..
+ }) => {
+ self.push_item_path(buffer, def_id);
+ cur_path.iter().rev().for_each(|segment| buffer.push(&segment));
return true;
}
None => {
buffer.push(&self.crate_name(cur_def.krate).as_str());
- cur_path.iter().rev().map(|segment| buffer.push(&segment)).count();
+ cur_path.iter().rev().for_each(|segment| buffer.push(&segment));
return true;
}
_ => {},
query: ty::ParamEnvAnd<'tcx, Ty<'tcx>>)
-> Result<&'tcx LayoutDetails, LayoutError<'tcx>>
{
- let (param_env, ty) = query.into_parts();
+ ty::tls::with_related_context(tcx, move |icx| {
+ let rec_limit = *tcx.sess.recursion_limit.get();
+ let (param_env, ty) = query.into_parts();
- let rec_limit = *tcx.sess.recursion_limit.get();
- let depth = tcx.layout_depth.get();
- if depth > rec_limit {
- tcx.sess.fatal(
- &format!("overflow representing the type `{}`", ty));
- }
+ if icx.layout_depth > rec_limit {
+ tcx.sess.fatal(
+ &format!("overflow representing the type `{}`", ty));
+ }
- tcx.layout_depth.set(depth+1);
- let cx = LayoutCx { tcx, param_env };
- let layout = cx.layout_raw_uncached(ty);
- tcx.layout_depth.set(depth);
+ // Update the ImplicitCtxt to increase the layout_depth
+ let icx = ty::tls::ImplicitCtxt {
+ layout_depth: icx.layout_depth + 1,
+ ..icx.clone()
+ };
- layout
+ ty::tls::enter_context(&icx, |_| {
+ let cx = LayoutCx { tcx, param_env };
+ cx.layout_raw_uncached(ty)
+ })
+ })
}
pub fn provide(providers: &mut ty::maps::Providers) {
/// A span and a query key
#[derive(Clone, Debug)]
pub struct QueryInfo<'tcx> {
+ /// The span for a reason this query was required
pub span: Span,
pub query: Query<'tcx>,
}
cycle.insert(0, job.info.clone());
if &*job as *const _ == self as *const _ {
- break;
+ // This is the end of the cycle
+ // The span entry we included was for the usage
+ // of the cycle itself, and not part of the cycle
+ // Replace it with the span which caused the cycle to form
+ cycle[0].span = span;
+ // Find out why the cycle itself was used
+ let usage = job.parent.as_ref().map(|parent| {
+ (job.info.span, parent.info.query.clone())
+ });
+ return Err(CycleError { usage, cycle });
}
current_job = job.parent.clone();
}
- Err(CycleError { span, cycle })
+ panic!("did not find a cycle")
}
/// Signals to waiters that the query is complete.
substitute_normalize_and_test_predicates_node((DefId, &'tcx Substs<'tcx>)) -> bool,
[] fn target_features_whitelist:
- target_features_whitelist_node(CrateNum) -> Lrc<FxHashSet<String>>,
+ target_features_whitelist_node(CrateNum) -> Lrc<FxHashMap<String, Option<String>>>,
// Get an estimate of the size of an InstanceDef based on its MIR for CGU partitioning.
[] fn instance_def_size_estimate: instance_def_size_estimate_dep_node(ty::InstanceDef<'tcx>)
use ty::maps::job::QueryResult;
use ty::codec::{self as ty_codec, TyDecoder, TyEncoder};
use ty::context::TyCtxt;
+use util::common::time;
const TAG_FILE_FOOTER: u128 = 0xC0FFEE_C0FFEE_C0FFEE_C0FFEE_C0FFEE;
// `serialized_data`.
prev_diagnostics_index: FxHashMap<SerializedDepNodeIndex, AbsoluteBytePos>,
- // A cache to ensure we don't read allocations twice
- interpret_alloc_cache: RefCell<FxHashMap<usize, interpret::AllocId>>,
+ // Alloc indices to memory location map
+ prev_interpret_alloc_index: Vec<AbsoluteBytePos>,
- // A map from positions to size of the serialized allocation
- // so we can skip over already processed allocations
- interpret_alloc_size: RefCell<FxHashMap<usize, usize>>,
+ /// Deserialization: A cache to ensure we don't read allocations twice
+ interpret_alloc_cache: RefCell<FxHashMap<usize, interpret::AllocId>>,
}
// This type is used only for (de-)serialization.
prev_cnums: Vec<(u32, String, CrateDisambiguator)>,
query_result_index: EncodedQueryResultIndex,
diagnostics_index: EncodedQueryResultIndex,
+ // the location of all allocations
+ interpret_alloc_index: Vec<AbsoluteBytePos>,
}
type EncodedQueryResultIndex = Vec<(SerializedDepNodeIndex, AbsoluteBytePos)>;
query_result_index: footer.query_result_index.into_iter().collect(),
prev_diagnostics_index: footer.diagnostics_index.into_iter().collect(),
synthetic_expansion_infos: Lock::new(FxHashMap()),
+ prev_interpret_alloc_index: footer.interpret_alloc_index,
interpret_alloc_cache: RefCell::new(FxHashMap::default()),
- interpret_alloc_size: RefCell::new(FxHashMap::default()),
}
}
query_result_index: FxHashMap(),
prev_diagnostics_index: FxHashMap(),
synthetic_expansion_infos: Lock::new(FxHashMap()),
+ prev_interpret_alloc_index: Vec::new(),
interpret_alloc_cache: RefCell::new(FxHashMap::default()),
- interpret_alloc_size: RefCell::new(FxHashMap::default()),
}
}
type_shorthands: FxHashMap(),
predicate_shorthands: FxHashMap(),
expn_info_shorthands: FxHashMap(),
- interpret_alloc_shorthands: FxHashMap(),
+ interpret_allocs: FxHashMap(),
+ interpret_allocs_inverse: Vec::new(),
codemap: CachingCodemapView::new(tcx.sess.codemap()),
file_to_file_index,
};
// Encode query results
let mut query_result_index = EncodedQueryResultIndex::new();
- {
+ time(tcx.sess, "encode query results", || {
use ty::maps::queries::*;
let enc = &mut encoder;
let qri = &mut query_result_index;
}
}
}
- }
+
+ Ok(())
+ })?;
// Encode diagnostics
let diagnostics_index = {
diagnostics_index
};
+ let interpret_alloc_index = {
+ let mut interpret_alloc_index = Vec::new();
+ let mut n = 0;
+ loop {
+ let new_n = encoder.interpret_allocs_inverse.len();
+ // if we have found new ids, serialize those, too
+ if n == new_n {
+ // otherwise, abort
+ break;
+ }
+ for idx in n..new_n {
+ let id = encoder.interpret_allocs_inverse[idx];
+ let pos = AbsoluteBytePos::new(encoder.position());
+ interpret_alloc_index.push(pos);
+ interpret::specialized_encode_alloc_id(
+ &mut encoder,
+ tcx,
+ id,
+ )?;
+ }
+ n = new_n;
+ }
+ interpret_alloc_index
+ };
+
let sorted_cnums = sorted_cnums_including_local_crate(tcx);
let prev_cnums: Vec<_> = sorted_cnums.iter().map(|&cnum| {
let crate_name = tcx.original_crate_name(cnum).as_str().to_string();
prev_cnums,
query_result_index,
diagnostics_index,
+ interpret_alloc_index,
})?;
// Encode the position of the footer as the last 8 bytes of the
file_index_to_file: &self.file_index_to_file,
file_index_to_stable_id: &self.file_index_to_stable_id,
synthetic_expansion_infos: &self.synthetic_expansion_infos,
+ prev_interpret_alloc_index: &self.prev_interpret_alloc_index,
interpret_alloc_cache: &self.interpret_alloc_cache,
- interpret_alloc_size: &self.interpret_alloc_size,
};
match decode_tagged(&mut decoder, dep_node_index) {
file_index_to_file: &'x Lock<FxHashMap<FileMapIndex, Lrc<FileMap>>>,
file_index_to_stable_id: &'x FxHashMap<FileMapIndex, StableFilemapId>,
interpret_alloc_cache: &'x RefCell<FxHashMap<usize, interpret::AllocId>>,
- interpret_alloc_size: &'x RefCell<FxHashMap<usize, usize>>,
+ /// maps from index in the cache file to location in the cache file
+ prev_interpret_alloc_index: &'x [AbsoluteBytePos],
}
impl<'a, 'tcx, 'x> CacheDecoder<'a, 'tcx, 'x> {
impl<'a, 'tcx, 'x> SpecializedDecoder<interpret::AllocId> for CacheDecoder<'a, 'tcx, 'x> {
fn specialized_decode(&mut self) -> Result<interpret::AllocId, Self::Error> {
let tcx = self.tcx;
- let pos = TyDecoder::position(self);
- trace!("specialized_decode_alloc_id: {:?}", pos);
- if let Some(cached) = self.interpret_alloc_cache.borrow().get(&pos).cloned() {
- // if there's no end position we are currently deserializing a recursive
- // allocation
- if let Some(end) = self.interpret_alloc_size.borrow().get(&pos).cloned() {
- trace!("{} already cached as {:?}", pos, cached);
- // skip ahead
- self.opaque.set_position(end);
- return Ok(cached)
- }
+ let idx = usize::decode(self)?;
+ trace!("loading index {}", idx);
+
+ if let Some(cached) = self.interpret_alloc_cache.borrow().get(&idx).cloned() {
+ trace!("loading alloc id {:?} from alloc_cache", cached);
+ return Ok(cached);
}
- let id = interpret::specialized_decode_alloc_id(
- self,
- tcx,
- pos,
- |this, pos, alloc_id| {
- assert!(this.interpret_alloc_cache.borrow_mut().insert(pos, alloc_id).is_none());
- },
- |this, shorthand| {
- // need to load allocation
- this.with_position(shorthand, |this| interpret::AllocId::decode(this))
- }
- )?;
- assert!(self
- .interpret_alloc_size
- .borrow_mut()
- .insert(pos, TyDecoder::position(self))
- .is_none());
- Ok(id)
+ let pos = self.prev_interpret_alloc_index[idx].to_usize();
+ trace!("loading position {}", pos);
+ self.with_position(pos, |this| {
+ interpret::specialized_decode_alloc_id(
+ this,
+ tcx,
+ |this, alloc_id| {
+ trace!("caching idx {} for alloc id {} at position {}", idx, alloc_id, pos);
+ assert!(this
+ .interpret_alloc_cache
+ .borrow_mut()
+ .insert(idx, alloc_id)
+ .is_none());
+ },
+ )
+ })
}
}
impl<'a, 'tcx, 'x> SpecializedDecoder<Span> for CacheDecoder<'a, 'tcx, 'x> {
type_shorthands: FxHashMap<ty::Ty<'tcx>, usize>,
predicate_shorthands: FxHashMap<ty::Predicate<'tcx>, usize>,
expn_info_shorthands: FxHashMap<Mark, AbsoluteBytePos>,
- interpret_alloc_shorthands: FxHashMap<interpret::AllocId, usize>,
+ interpret_allocs: FxHashMap<interpret::AllocId, usize>,
+ interpret_allocs_inverse: Vec<interpret::AllocId>,
codemap: CachingCodemapView<'tcx>,
file_to_file_index: FxHashMap<*const FileMap, FileMapIndex>,
}
{
fn specialized_encode(&mut self, alloc_id: &interpret::AllocId) -> Result<(), Self::Error> {
use std::collections::hash_map::Entry;
- let tcx = self.tcx;
- let pos = self.position();
- let shorthand = match self.interpret_alloc_shorthands.entry(*alloc_id) {
- Entry::Occupied(entry) => Some(entry.get().clone()),
- Entry::Vacant(entry) => {
- // ensure that we don't place any AllocIds at the very beginning
- // of the metadata file, because that would end up making our indices
- // not special. It is essentially impossible for that to happen,
- // but let's make sure
- assert!(pos >= interpret::SHORTHAND_START);
- entry.insert(pos);
- None
+ let index = match self.interpret_allocs.entry(*alloc_id) {
+ Entry::Occupied(e) => *e.get(),
+ Entry::Vacant(e) => {
+ let idx = self.interpret_allocs_inverse.len();
+ self.interpret_allocs_inverse.push(*alloc_id);
+ e.insert(idx);
+ idx
},
};
- interpret::specialized_encode_alloc_id(
- self,
- tcx,
- *alloc_id,
- shorthand,
- )
+
+ index.encode(self)
}
}
E: 'enc + TyEncoder,
Q::Value: Encodable,
{
+ let desc = &format!("encode_query_results for {}",
+ unsafe { ::std::intrinsics::type_name::<Q>() });
+
+ time(tcx.sess, desc, || {
+
for (key, entry) in Q::get_cache_internal(tcx).map.iter() {
if Q::cache_on_disk(key.clone()) {
let entry = match *entry {
}
Ok(())
+ })
}
use errors::Level;
use ty::tls;
use ty::{TyCtxt};
+use ty::maps::Query;
use ty::maps::config::QueryDescription;
use ty::maps::job::{QueryResult, QueryInfo};
use ty::item_path;
#[derive(Clone)]
pub(super) struct CycleError<'tcx> {
- pub(super) span: Span,
+ /// The query and related span which uses the cycle
+ pub(super) usage: Option<(Span, Query<'tcx>)>,
pub(super) cycle: Vec<QueryInfo<'tcx>>,
}
}
impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
- pub(super) fn report_cycle(self, CycleError { span, cycle: stack }: CycleError)
+ pub(super) fn report_cycle(self, CycleError { usage, cycle: stack }: CycleError<'gcx>)
-> DiagnosticBuilder<'a>
{
assert!(!stack.is_empty());
+ let fix_span = |span: Span, query: &Query<'gcx>| {
+ self.sess.codemap().def_span(query.default_span(self, span))
+ };
+
// Disable naming impls with types in this path, since that
// sometimes cycles itself, leading to extra cycle errors.
// (And cycle errors around impls tend to occur during the
// collect/coherence phases anyhow.)
item_path::with_forced_impl_filename_line(|| {
- let span = self.sess.codemap().def_span(span);
- let mut err =
- struct_span_err!(self.sess, span, E0391,
- "cyclic dependency detected");
- err.span_label(span, "cyclic reference");
-
- err.span_note(self.sess.codemap().def_span(stack[0].span),
- &format!("the cycle begins when {}...", stack[0].query.describe(self)));
-
- for &QueryInfo { span, ref query, .. } in &stack[1..] {
- err.span_note(self.sess.codemap().def_span(span),
- &format!("...which then requires {}...", query.describe(self)));
+ let span = fix_span(stack[1 % stack.len()].span, &stack[0].query);
+ let mut err = struct_span_err!(self.sess,
+ span,
+ E0391,
+ "cycle detected when {}",
+ stack[0].query.describe(self));
+
+ for i in 1..stack.len() {
+ let query = &stack[i].query;
+ let span = fix_span(stack[(i + 1) % stack.len()].span, query);
+ err.span_note(span, &format!("...which requires {}...", query.describe(self)));
}
- err.note(&format!("...which then again requires {}, completing the cycle.",
+ err.note(&format!("...which again requires {}, completing the cycle",
stack[0].query.describe(self)));
+ if let Some((span, query)) = usage {
+ err.span_note(fix_span(span, &query),
+ &format!("cycle used when {}", query.describe(self)));
+ }
+
return err
})
}
r
}
}
+
+ // FIXME(eddyb) Get more valid Span's on queries.
+ pub fn default_span(&self, tcx: TyCtxt<'_, $tcx, '_>, span: Span) -> Span {
+ if span != DUMMY_SP {
+ return span;
+ }
+ // The def_span query is used to calculate default_span,
+ // so exit to avoid infinite recursion
+ match *self {
+ Query::def_span(..) => return span,
+ _ => ()
+ }
+ match *self {
+ $(Query::$name(key) => key.default_span(tcx),)*
+ }
+ }
}
pub mod queries {
/// If the query already executed and panicked, this will fatal error / silently panic
fn try_get_lock(
tcx: TyCtxt<'a, $tcx, 'lcx>,
- mut span: Span,
+ span: Span,
key: &$K
) -> TryGetLock<'a, $tcx, $V, Self>
{
};
mem::drop(lock);
- // This just matches the behavior of `try_get_with` so the span when
- // we await matches the span we would use when executing.
- // See the FIXME there.
- if span == DUMMY_SP && stringify!($name) != "def_span" {
- span = key.default_span(tcx);
- }
-
if let Err(cycle) = job.await(tcx, span) {
return TryGetLock::JobCompleted(Err(cycle));
}
}
fn try_get_with(tcx: TyCtxt<'a, $tcx, 'lcx>,
- mut span: Span,
+ span: Span,
key: $K)
-> Result<$V, CycleError<$tcx>>
{
let mut lock = get_lock_or_return!();
- // FIXME(eddyb) Get more valid Span's on queries.
- // def_span guard is necessary to prevent a recursive loop,
- // default_span calls def_span query internally.
- if span == DUMMY_SP && stringify!($name) != "def_span" {
- // This might deadlock if we hold the map lock since we might be
- // waiting for the def_span query and switch to some other fiber
- // So we drop the lock here and reacquire it
- mem::drop(lock);
- span = key.default_span(tcx);
- lock = get_lock_or_return!();
- }
-
// Fast path for when incr. comp. is off. `to_dep_node` is
// expensive for some DepKinds.
if !tcx.dep_graph.is_fully_enabled() {
let icx = ty::tls::ImplicitCtxt {
tcx,
query: Some(job.clone()),
+ layout_depth: icx.layout_depth,
};
// Use the ImplicitCtxt while we execute the query
Intrinsic(ref s) => Intrinsic(s.clone()),
OverflowingMath => OverflowingMath,
InvalidChar(c) => InvalidChar(c),
- ExecutionTimeLimitReached => ExecutionTimeLimitReached,
StackFrameLimitReached => StackFrameLimitReached,
OutOfTls => OutOfTls,
TlsOutOfBounds => TlsOutOfBounds,
}
}
- /// Returns the type of ty[i]
+ /// Returns the type of `ty[i]`.
pub fn builtin_index(&self) -> Option<Ty<'tcx>> {
match self.sty {
TyArray(ty, _) | TySlice(ty) => Some(ty),
use rustc::middle::allocator::AllocatorKind;
use rustc_errors;
use syntax::abi::Abi;
-use syntax::ast::{Crate, Attribute, LitKind, StrStyle};
-use syntax::ast::{Unsafety, Constness, Generics, Mutability, Ty, Mac, Arg};
-use syntax::ast::{self, Ident, Item, ItemKind, TyKind, VisibilityKind, Expr};
+use syntax::ast::{Attribute, Crate, LitKind, StrStyle};
+use syntax::ast::{Arg, Constness, Generics, Mac, Mutability, Ty, Unsafety};
+use syntax::ast::{self, Expr, Ident, Item, ItemKind, TyKind, VisibilityKind};
use syntax::attr;
use syntax::codemap::{dummy_spanned, respan};
-use syntax::codemap::{ExpnInfo, NameAndSpan, MacroAttribute};
+use syntax::codemap::{ExpnInfo, MacroAttribute, NameAndSpan};
use syntax::ext::base::ExtCtxt;
use syntax::ext::base::Resolver;
use syntax::ext::build::AstBuilder;
use {AllocatorMethod, AllocatorTy, ALLOCATOR_METHODS};
-pub fn modify(sess: &ParseSess,
- resolver: &mut Resolver,
- krate: Crate,
- handler: &rustc_errors::Handler) -> ast::Crate {
+pub fn modify(
+ sess: &ParseSess,
+ resolver: &mut Resolver,
+ krate: Crate,
+ handler: &rustc_errors::Handler,
+) -> ast::Crate {
ExpandAllocatorDirectives {
handler,
sess,
let name = if attr::contains_name(&item.attrs, "global_allocator") {
"global_allocator"
} else {
- return fold::noop_fold_item(item, self)
+ return fold::noop_fold_item(item, self);
};
match item.node {
ItemKind::Static(..) => {}
_ => {
- self.handler.span_err(item.span, "allocators must be statics");
- return SmallVector::one(item)
+ self.handler
+ .span_err(item.span, "allocators must be statics");
+ return SmallVector::one(item);
}
}
if self.found {
- self.handler.span_err(item.span, "cannot define more than one \
- #[global_allocator]");
- return SmallVector::one(item)
+ self.handler.span_err(
+ item.span,
+ "cannot define more than one \
+ #[global_allocator]",
+ );
+ return SmallVector::one(item);
}
self.found = true;
span: None,
allow_internal_unstable: true,
allow_internal_unsafe: false,
- }
+ },
});
let span = item.span.with_ctxt(SyntaxContext::empty().apply_mark(mark));
let ecfg = ExpansionConfig::default(name.to_string());
core: Ident::from_str("core"),
cx: ExtCtxt::new(self.sess, ecfg, self.resolver),
};
- let super_path = f.cx.path(f.span, vec![
- Ident::from_str("super"),
- f.global,
- ]);
+ let super_path = f.cx.path(f.span, vec![Ident::from_str("super"), f.global]);
let mut items = vec![
f.cx.item_extern_crate(f.span, f.core),
f.cx.item_use_simple(
let mut ret = SmallVector::new();
ret.push(item);
ret.push(module);
- return ret
+ return ret;
}
fn fold_mac(&mut self, mac: Mac) -> Mac {
i += 1;
name
};
- let args = method.inputs.iter().map(|ty| {
- self.arg_ty(ty, &mut abi_args, mk)
- }).collect();
+ let args = method
+ .inputs
+ .iter()
+ .map(|ty| self.arg_ty(ty, &mut abi_args, mk))
+ .collect();
let result = self.call_allocator(method.name, args);
let (output_ty, output_expr) = self.ret_ty(&method.output, result);
- let kind = ItemKind::Fn(self.cx.fn_decl(abi_args, ast::FunctionRetTy::Ty(output_ty)),
- Unsafety::Unsafe,
- dummy_spanned(Constness::NotConst),
- Abi::Rust,
- Generics::default(),
- self.cx.block_expr(output_expr));
- self.cx.item(self.span,
- Ident::from_str(&self.kind.fn_name(method.name)),
- self.attrs(),
- kind)
+ let kind = ItemKind::Fn(
+ self.cx.fn_decl(abi_args, ast::FunctionRetTy::Ty(output_ty)),
+ Unsafety::Unsafe,
+ dummy_spanned(Constness::NotConst),
+ Abi::Rust,
+ Generics::default(),
+ self.cx.block_expr(output_expr),
+ );
+ self.cx.item(
+ self.span,
+ Ident::from_str(&self.kind.fn_name(method.name)),
+ self.attrs(),
+ kind,
+ )
}
fn call_allocator(&self, method: &str, mut args: Vec<P<Expr>>) -> P<Expr> {
- let method = self.cx.path(self.span, vec![
- self.core,
- Ident::from_str("alloc"),
- Ident::from_str("GlobalAlloc"),
- Ident::from_str(method),
- ]);
+ let method = self.cx.path(
+ self.span,
+ vec![
+ self.core,
+ Ident::from_str("alloc"),
+ Ident::from_str("GlobalAlloc"),
+ Ident::from_str(method),
+ ],
+ );
let method = self.cx.expr_path(method);
let allocator = self.cx.path_ident(self.span, self.global);
let allocator = self.cx.expr_path(allocator);
]
}
- fn arg_ty(&self,
- ty: &AllocatorTy,
- args: &mut Vec<Arg>,
- ident: &mut FnMut() -> Ident) -> P<Expr> {
+ fn arg_ty(
+ &self,
+ ty: &AllocatorTy,
+ args: &mut Vec<Arg>,
+ ident: &mut FnMut() -> Ident,
+ ) -> P<Expr> {
match *ty {
AllocatorTy::Layout => {
let usize = self.cx.path_ident(self.span, Ident::from_str("usize"));
args.push(self.cx.arg(self.span, size, ty_usize.clone()));
args.push(self.cx.arg(self.span, align, ty_usize));
- let layout_new = self.cx.path(self.span, vec![
- self.core,
- Ident::from_str("alloc"),
- Ident::from_str("Layout"),
- Ident::from_str("from_size_align_unchecked"),
- ]);
+ let layout_new = self.cx.path(
+ self.span,
+ vec![
+ self.core,
+ Ident::from_str("alloc"),
+ Ident::from_str("Layout"),
+ Ident::from_str("from_size_align_unchecked"),
+ ],
+ );
let layout_new = self.cx.expr_path(layout_new);
let size = self.cx.expr_ident(self.span, size);
let align = self.cx.expr_ident(self.span, align);
- let layout = self.cx.expr_call(self.span,
- layout_new,
- vec![size, align]);
+ let layout = self.cx.expr_call(self.span, layout_new, vec![size, align]);
layout
}
self.cx.expr_ident(self.span, ident)
}
- AllocatorTy::ResultPtr |
- AllocatorTy::Bang |
- AllocatorTy::Unit => {
+ AllocatorTy::ResultPtr | AllocatorTy::Bang | AllocatorTy::Unit => {
panic!("can't convert AllocatorTy to an argument")
}
}
(self.ptr_u8(), expr)
}
- AllocatorTy::Bang => {
- (self.cx.ty(self.span, TyKind::Never), expr)
- }
+ AllocatorTy::Bang => (self.cx.ty(self.span, TyKind::Never), expr),
- AllocatorTy::Unit => {
- (self.cx.ty(self.span, TyKind::Tup(Vec::new())), expr)
- }
+ AllocatorTy::Unit => (self.cx.ty(self.span, TyKind::Tup(Vec::new())), expr),
- AllocatorTy::Layout |
- AllocatorTy::Usize |
- AllocatorTy::Ptr => {
+ AllocatorTy::Layout | AllocatorTy::Usize | AllocatorTy::Ptr => {
panic!("can't convert AllocatorTy to an output")
}
}
}
fn ptr_opaque(&self) -> P<Ty> {
- let opaque = self.cx.path(self.span, vec![
- self.core,
- Ident::from_str("alloc"),
- Ident::from_str("Opaque"),
- ]);
+ let opaque = self.cx.path(
+ self.span,
+ vec![
+ self.core,
+ Ident::from_str("alloc"),
+ Ident::from_str("Opaque"),
+ ],
+ );
let ty_opaque = self.cx.ty_path(opaque);
self.cx.ty_ptr(self.span, ty_opaque, Mutability::Mutable)
}
impl<A: Array> Drop for Iter<A> {
fn drop(&mut self) {
- for _ in self {}
+ self.for_each(drop);
}
}
impl<'a, A: Array> Drop for Drain<'a, A> {
fn drop(&mut self) {
// exhaust self first
- while let Some(_) = self.next() {}
+ self.for_each(drop);
if self.tail_len > 0 {
unsafe {
use rustc::ich::Fingerprint;
use rustc_data_structures::stable_hasher::StableHasher;
use rustc_mir as mir;
-use rustc::session::{Session, CompileResult, CrateDisambiguator};
+use rustc::session::{CompileResult, CrateDisambiguator, Session};
use rustc::session::CompileIncomplete;
use rustc::session::config::{self, Input, OutputFilenames, OutputType};
use rustc::session::search_paths::PathKind;
use rustc::lint;
-use rustc::middle::{self, stability, reachable, resolve_lifetime};
+use rustc::middle::{self, reachable, resolve_lifetime, stability};
use rustc::middle::cstore::CrateStore;
use rustc::middle::privacy::AccessLevels;
-use rustc::ty::{self, TyCtxt, Resolutions, AllArenas};
+use rustc::ty::{self, AllArenas, Resolutions, TyCtxt};
use rustc::traits;
-use rustc::util::common::{ErrorReported, time, install_panic_hook};
+use rustc::util::common::{install_panic_hook, time, ErrorReported};
use rustc_allocator as allocator;
use rustc_borrowck as borrowck;
use rustc_incremental;
use rustc_privacy;
use rustc_plugin::registry::Registry;
use rustc_plugin as plugin;
-use rustc_passes::{self, ast_validation, loops, rvalue_promotion, hir_stats};
+use rustc_passes::{self, ast_validation, hir_stats, loops, rvalue_promotion};
use super::Compilation;
use serialize::json;
use std::any::Any;
use std::env;
-use std::ffi::{OsString, OsStr};
+use std::ffi::{OsStr, OsString};
use std::fs;
use std::io::{self, Write};
use std::iter;
use profile;
-pub fn compile_input(trans: Box<TransCrate>,
- sess: &Session,
- cstore: &CStore,
- input_path: &Option<PathBuf>,
- input: &Input,
- outdir: &Option<PathBuf>,
- output: &Option<PathBuf>,
- addl_plugins: Option<Vec<String>>,
- control: &CompileController) -> CompileResult {
+pub fn compile_input(
+ trans: Box<TransCrate>,
+ sess: &Session,
+ cstore: &CStore,
+ input_path: &Option<PathBuf>,
+ input: &Input,
+ outdir: &Option<PathBuf>,
+ output: &Option<PathBuf>,
+ addl_plugins: Option<Vec<String>>,
+ control: &CompileController,
+) -> CompileResult {
macro_rules! controller_entry_point {
($point: ident, $tsess: expr, $make_state: expr, $phase_result: expr) => {{
let state = &mut $make_state;
};
let (krate, registry) = {
- let mut compile_state = CompileState::state_after_parse(input,
- sess,
- outdir,
- output,
- krate,
- &cstore);
- controller_entry_point!(after_parse,
- sess,
- compile_state,
- Ok(()));
+ let mut compile_state =
+ CompileState::state_after_parse(input, sess, outdir, output, krate, &cstore);
+ controller_entry_point!(after_parse, sess, compile_state, Ok(()));
(compile_state.krate.unwrap(), compile_state.registry)
};
::rustc_trans_utils::link::find_crate_name(Some(sess), &krate.attrs, input);
install_panic_hook();
- let ExpansionResult { expanded_crate, defs, analysis, resolutions, mut hir_forest } = {
+ let ExpansionResult {
+ expanded_crate,
+ defs,
+ analysis,
+ resolutions,
+ mut hir_forest,
+ } = {
phase_2_configure_and_expand(
sess,
&cstore,
control.make_glob_map,
|expanded_crate| {
let mut state = CompileState::state_after_expand(
- input, sess, outdir, output, &cstore, expanded_crate, &crate_name,
+ input,
+ sess,
+ outdir,
+ output,
+ &cstore,
+ expanded_crate,
+ &crate_name,
);
controller_entry_point!(after_expand, sess, state, Ok(()));
Ok(())
- }
+ },
)?
};
if output_contains_path(&output_paths, input_path) {
sess.err(&format!(
"the input file \"{}\" would be overwritten by the generated \
- executable",
- input_path.display()));
+ executable",
+ input_path.display()
+ ));
return Err(CompileIncomplete::Stopped);
}
if let Some(dir_path) = output_conflicts_with_dir(&output_paths) {
sess.err(&format!(
"the generated executable for the input file \"{}\" conflicts with the \
- existing directory \"{}\"",
- input_path.display(), dir_path.display()));
+ existing directory \"{}\"",
+ input_path.display(),
+ dir_path.display()
+ ));
return Err(CompileIncomplete::Stopped);
}
}
}
write_out_deps(sess, &outputs, &output_paths);
- if sess.opts.output_types.contains_key(&OutputType::DepInfo) &&
- sess.opts.output_types.keys().count() == 1 {
- return Ok(())
+ if sess.opts.output_types.contains_key(&OutputType::DepInfo)
+ && sess.opts.output_types.len() == 1
+ {
+ return Ok(());
}
if let &Some(ref dir) = outdir {
let arenas = AllArenas::new();
// Construct the HIR map
- let hir_map = time(sess,
- "indexing hir",
- || hir_map::map_crate(sess, cstore, &mut hir_forest, &defs));
+ let hir_map = time(sess, "indexing hir", || {
+ hir_map::map_crate(sess, cstore, &mut hir_forest, &defs)
+ });
{
hir_map.dep_graph.assert_ignored();
- controller_entry_point!(after_hir_lowering,
- sess,
- CompileState::state_after_hir_lowering(input,
- sess,
- outdir,
- output,
- &arenas,
- &cstore,
- &hir_map,
- &analysis,
- &resolutions,
- &expanded_crate,
- &hir_map.krate(),
- &outputs,
- &crate_name),
- Ok(()));
+ controller_entry_point!(
+ after_hir_lowering,
+ sess,
+ CompileState::state_after_hir_lowering(
+ input,
+ sess,
+ outdir,
+ output,
+ &arenas,
+ &cstore,
+ &hir_map,
+ &analysis,
+ &resolutions,
+ &expanded_crate,
+ &hir_map.krate(),
+ &outputs,
+ &crate_name
+ ),
+ Ok(())
+ );
}
let opt_crate = if control.keep_ast {
None
};
- phase_3_run_analysis_passes(&*trans,
- control,
- sess,
- cstore,
- hir_map,
- analysis,
- resolutions,
- &arenas,
- &crate_name,
- &outputs,
- |tcx, analysis, rx, result| {
- {
- // Eventually, we will want to track plugins.
- tcx.dep_graph.with_ignore(|| {
- let mut state = CompileState::state_after_analysis(input,
- sess,
- outdir,
- output,
- opt_crate,
- tcx.hir.krate(),
- &analysis,
- tcx,
- &crate_name);
- (control.after_analysis.callback)(&mut state);
- });
-
- if control.after_analysis.stop == Compilation::Stop {
- return result.and_then(|_| Err(CompileIncomplete::Stopped));
+ phase_3_run_analysis_passes(
+ &*trans,
+ control,
+ sess,
+ cstore,
+ hir_map,
+ analysis,
+ resolutions,
+ &arenas,
+ &crate_name,
+ &outputs,
+ |tcx, analysis, rx, result| {
+ {
+ // Eventually, we will want to track plugins.
+ tcx.dep_graph.with_ignore(|| {
+ let mut state = CompileState::state_after_analysis(
+ input,
+ sess,
+ outdir,
+ output,
+ opt_crate,
+ tcx.hir.krate(),
+ &analysis,
+ tcx,
+ &crate_name,
+ );
+ (control.after_analysis.callback)(&mut state);
+ });
+
+ if control.after_analysis.stop == Compilation::Stop {
+ return result.and_then(|_| Err(CompileIncomplete::Stopped));
+ }
}
- }
- result?;
+ result?;
- if log_enabled!(::log::Level::Info) {
- println!("Pre-trans");
- tcx.print_debug_stats();
- }
+ if log_enabled!(::log::Level::Info) {
+ println!("Pre-trans");
+ tcx.print_debug_stats();
+ }
- let ongoing_trans = phase_4_translate_to_llvm(&*trans, tcx, rx);
+ let ongoing_trans = phase_4_translate_to_llvm(&*trans, tcx, rx);
- if log_enabled!(::log::Level::Info) {
- println!("Post-trans");
- tcx.print_debug_stats();
- }
+ if log_enabled!(::log::Level::Info) {
+ println!("Post-trans");
+ tcx.print_debug_stats();
+ }
- if tcx.sess.opts.output_types.contains_key(&OutputType::Mir) {
- if let Err(e) = mir::transform::dump_mir::emit_mir(tcx, &outputs) {
- sess.err(&format!("could not emit MIR: {}", e));
- sess.abort_if_errors();
+ if tcx.sess.opts.output_types.contains_key(&OutputType::Mir) {
+ if let Err(e) = mir::transform::dump_mir::emit_mir(tcx, &outputs) {
+ sess.err(&format!("could not emit MIR: {}", e));
+ sess.abort_if_errors();
+ }
}
- }
- Ok((outputs.clone(), ongoing_trans, tcx.dep_graph.clone()))
- })??
+ Ok((outputs.clone(), ongoing_trans, tcx.dep_graph.clone()))
+ },
+ )??
};
if sess.opts.debugging_opts.print_type_sizes {
}
impl<'a, 'tcx> CompileState<'a, 'tcx> {
- fn empty(input: &'a Input,
- session: &'tcx Session,
- out_dir: &'a Option<PathBuf>)
- -> Self {
+ fn empty(input: &'a Input, session: &'tcx Session, out_dir: &'a Option<PathBuf>) -> Self {
CompileState {
input,
session,
}
}
- fn state_after_parse(input: &'a Input,
- session: &'tcx Session,
- out_dir: &'a Option<PathBuf>,
- out_file: &'a Option<PathBuf>,
- krate: ast::Crate,
- cstore: &'tcx CStore)
- -> Self {
+ fn state_after_parse(
+ input: &'a Input,
+ session: &'tcx Session,
+ out_dir: &'a Option<PathBuf>,
+ out_file: &'a Option<PathBuf>,
+ krate: ast::Crate,
+ cstore: &'tcx CStore,
+ ) -> Self {
CompileState {
// Initialize the registry before moving `krate`
registry: Some(Registry::new(&session, krate.span)),
}
}
- fn state_after_expand(input: &'a Input,
- session: &'tcx Session,
- out_dir: &'a Option<PathBuf>,
- out_file: &'a Option<PathBuf>,
- cstore: &'tcx CStore,
- expanded_crate: &'a ast::Crate,
- crate_name: &'a str)
- -> Self {
+ fn state_after_expand(
+ input: &'a Input,
+ session: &'tcx Session,
+ out_dir: &'a Option<PathBuf>,
+ out_file: &'a Option<PathBuf>,
+ cstore: &'tcx CStore,
+ expanded_crate: &'a ast::Crate,
+ crate_name: &'a str,
+ ) -> Self {
CompileState {
crate_name: Some(crate_name),
cstore: Some(cstore),
}
}
- fn state_after_hir_lowering(input: &'a Input,
- session: &'tcx Session,
- out_dir: &'a Option<PathBuf>,
- out_file: &'a Option<PathBuf>,
- arenas: &'tcx AllArenas<'tcx>,
- cstore: &'tcx CStore,
- hir_map: &'a hir_map::Map<'tcx>,
- analysis: &'a ty::CrateAnalysis,
- resolutions: &'a Resolutions,
- krate: &'a ast::Crate,
- hir_crate: &'a hir::Crate,
- output_filenames: &'a OutputFilenames,
- crate_name: &'a str)
- -> Self {
+ fn state_after_hir_lowering(
+ input: &'a Input,
+ session: &'tcx Session,
+ out_dir: &'a Option<PathBuf>,
+ out_file: &'a Option<PathBuf>,
+ arenas: &'tcx AllArenas<'tcx>,
+ cstore: &'tcx CStore,
+ hir_map: &'a hir_map::Map<'tcx>,
+ analysis: &'a ty::CrateAnalysis,
+ resolutions: &'a Resolutions,
+ krate: &'a ast::Crate,
+ hir_crate: &'a hir::Crate,
+ output_filenames: &'a OutputFilenames,
+ crate_name: &'a str,
+ ) -> Self {
CompileState {
crate_name: Some(crate_name),
arenas: Some(arenas),
}
}
- fn state_after_analysis(input: &'a Input,
- session: &'tcx Session,
- out_dir: &'a Option<PathBuf>,
- out_file: &'a Option<PathBuf>,
- krate: Option<&'a ast::Crate>,
- hir_crate: &'a hir::Crate,
- analysis: &'a ty::CrateAnalysis,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- crate_name: &'a str)
- -> Self {
+ fn state_after_analysis(
+ input: &'a Input,
+ session: &'tcx Session,
+ out_dir: &'a Option<PathBuf>,
+ out_file: &'a Option<PathBuf>,
+ krate: Option<&'a ast::Crate>,
+ hir_crate: &'a hir::Crate,
+ analysis: &'a ty::CrateAnalysis,
+ tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ crate_name: &'a str,
+ ) -> Self {
CompileState {
analysis: Some(analysis),
tcx: Some(tcx),
}
}
- fn state_when_compilation_done(input: &'a Input,
- session: &'tcx Session,
- out_dir: &'a Option<PathBuf>,
- out_file: &'a Option<PathBuf>)
- -> Self {
+ fn state_when_compilation_done(
+ input: &'a Input,
+ session: &'tcx Session,
+ out_dir: &'a Option<PathBuf>,
+ out_file: &'a Option<PathBuf>,
+ ) -> Self {
CompileState {
out_file: out_file.as_ref().map(|s| &**s),
..CompileState::empty(input, session, out_dir)
}
}
-pub fn phase_1_parse_input<'a>(control: &CompileController,
- sess: &'a Session,
- input: &Input)
- -> PResult<'a, ast::Crate> {
- sess.diagnostic().set_continue_after_error(control.continue_parse_after_error);
+pub fn phase_1_parse_input<'a>(
+ control: &CompileController,
+ sess: &'a Session,
+ input: &Input,
+) -> PResult<'a, ast::Crate> {
+ sess.diagnostic()
+ .set_continue_after_error(control.continue_parse_after_error);
if sess.profile_queries() {
profile::begin(sess);
}
- let krate = time(sess, "parsing", || {
- match *input {
- Input::File(ref file) => {
- parse::parse_crate_from_file(file, &sess.parse_sess)
- }
- Input::Str { ref input, ref name } => {
- parse::parse_crate_from_source_str(name.clone(),
- input.clone(),
- &sess.parse_sess)
- }
- }
+ let krate = time(sess, "parsing", || match *input {
+ Input::File(ref file) => parse::parse_crate_from_file(file, &sess.parse_sess),
+ Input::Str {
+ ref input,
+ ref name,
+ } => parse::parse_crate_from_source_str(name.clone(), input.clone(), &sess.parse_sess),
})?;
sess.diagnostic().set_continue_after_error(true);
}
if sess.opts.debugging_opts.input_stats {
- println!("Lines of code: {}", sess.codemap().count_lines());
+ println!(
+ "Lines of code: {}",
+ sess.codemap().count_lines()
+ );
println!("Pre-expansion node count: {}", count_nodes(&krate));
}
/// standard library and prelude, and name resolution.
///
/// Returns `None` if we're aborting after handling -W help.
-pub fn phase_2_configure_and_expand<F>(sess: &Session,
- cstore: &CStore,
- krate: ast::Crate,
- registry: Option<Registry>,
- crate_name: &str,
- addl_plugins: Option<Vec<String>>,
- make_glob_map: MakeGlobMap,
- after_expand: F)
- -> Result<ExpansionResult, CompileIncomplete>
- where F: FnOnce(&ast::Crate) -> CompileResult {
+pub fn phase_2_configure_and_expand<F>(
+ sess: &Session,
+ cstore: &CStore,
+ krate: ast::Crate,
+ registry: Option<Registry>,
+ crate_name: &str,
+ addl_plugins: Option<Vec<String>>,
+ make_glob_map: MakeGlobMap,
+ after_expand: F,
+) -> Result<ExpansionResult, CompileIncomplete>
+where
+ F: FnOnce(&ast::Crate) -> CompileResult,
+{
// Currently, we ignore the name resolution data structures for the purposes of dependency
// tracking. Instead we will run name resolution and include its output in the hash of each
// item, much like we do for macro expansion. In other words, the hash reflects not just
// this back at some point.
let mut crate_loader = CrateLoader::new(sess, &cstore, &crate_name);
let resolver_arenas = Resolver::arenas();
- let result = phase_2_configure_and_expand_inner(sess, cstore, krate, registry, crate_name,
- addl_plugins, make_glob_map, &resolver_arenas,
- &mut crate_loader, after_expand);
+ let result = phase_2_configure_and_expand_inner(
+ sess,
+ cstore,
+ krate,
+ registry,
+ crate_name,
+ addl_plugins,
+ make_glob_map,
+ &resolver_arenas,
+ &mut crate_loader,
+ after_expand,
+ );
match result {
- Ok(InnerExpansionResult {expanded_crate, resolver, hir_forest}) => {
- Ok(ExpansionResult {
- expanded_crate,
- defs: resolver.definitions,
- hir_forest,
- resolutions: Resolutions {
- freevars: resolver.freevars,
- export_map: resolver.export_map,
- trait_map: resolver.trait_map,
- maybe_unused_trait_imports: resolver.maybe_unused_trait_imports,
- maybe_unused_extern_crates: resolver.maybe_unused_extern_crates,
- },
-
- analysis: ty::CrateAnalysis {
- access_levels: Lrc::new(AccessLevels::default()),
- name: crate_name.to_string(),
- glob_map: if resolver.make_glob_map { Some(resolver.glob_map) } else { None },
+ Ok(InnerExpansionResult {
+ expanded_crate,
+ resolver,
+ hir_forest,
+ }) => Ok(ExpansionResult {
+ expanded_crate,
+ defs: resolver.definitions,
+ hir_forest,
+ resolutions: Resolutions {
+ freevars: resolver.freevars,
+ export_map: resolver.export_map,
+ trait_map: resolver.trait_map,
+ maybe_unused_trait_imports: resolver.maybe_unused_trait_imports,
+ maybe_unused_extern_crates: resolver.maybe_unused_extern_crates,
+ },
+
+ analysis: ty::CrateAnalysis {
+ access_levels: Lrc::new(AccessLevels::default()),
+ name: crate_name.to_string(),
+ glob_map: if resolver.make_glob_map {
+ Some(resolver.glob_map)
+ } else {
+ None
},
- })
- }
- Err(x) => Err(x)
+ },
+ }),
+ Err(x) => Err(x),
}
}
/// Same as phase_2_configure_and_expand, but doesn't let you keep the resolver
/// around
-pub fn phase_2_configure_and_expand_inner<'a, F>(sess: &'a Session,
- cstore: &'a CStore,
- krate: ast::Crate,
- registry: Option<Registry>,
- crate_name: &str,
- addl_plugins: Option<Vec<String>>,
- make_glob_map: MakeGlobMap,
- resolver_arenas: &'a ResolverArenas<'a>,
- crate_loader: &'a mut CrateLoader,
- after_expand: F)
- -> Result<InnerExpansionResult<'a>, CompileIncomplete>
- where F: FnOnce(&ast::Crate) -> CompileResult,
+pub fn phase_2_configure_and_expand_inner<'a, F>(
+ sess: &'a Session,
+ cstore: &'a CStore,
+ krate: ast::Crate,
+ registry: Option<Registry>,
+ crate_name: &str,
+ addl_plugins: Option<Vec<String>>,
+ make_glob_map: MakeGlobMap,
+ resolver_arenas: &'a ResolverArenas<'a>,
+ crate_loader: &'a mut CrateLoader,
+ after_expand: F,
+) -> Result<InnerExpansionResult<'a>, CompileIncomplete>
+where
+ F: FnOnce(&ast::Crate) -> CompileResult,
{
- let (mut krate, features) = syntax::config::features(krate, &sess.parse_sess,
- sess.opts.test,
- sess.opts.debugging_opts.edition);
+ let (mut krate, features) = syntax::config::features(
+ krate,
+ &sess.parse_sess,
+ sess.opts.test,
+ sess.opts.debugging_opts.edition,
+ );
// these need to be set "early" so that expansion sees `quote` if enabled.
sess.init_features(features);
let disambiguator = compute_crate_disambiguator(sess);
sess.crate_disambiguator.set(disambiguator);
- rustc_incremental::prepare_session_directory(
- sess,
- &crate_name,
- disambiguator,
- );
+ rustc_incremental::prepare_session_directory(sess, &crate_name, disambiguator);
if sess.opts.incremental.is_some() {
time(sess, "garbage collect incremental cache directory", || {
if let Err(e) = rustc_incremental::garbage_collect_session_directories(sess) {
- warn!("Error while trying to garbage collect incremental \
- compilation cache directory: {}", e);
+ warn!(
+ "Error while trying to garbage collect incremental \
+ compilation cache directory: {}",
+ e
+ );
}
});
}
let mut addl_plugins = Some(addl_plugins);
let registrars = time(sess, "plugin loading", || {
- plugin::load::load_plugins(sess,
- &cstore,
- &krate,
- crate_name,
- addl_plugins.take().unwrap())
+ plugin::load::load_plugins(
+ sess,
+ &cstore,
+ &krate,
+ crate_name,
+ addl_plugins.take().unwrap(),
+ )
});
let mut registry = registry.unwrap_or(Registry::new(sess, krate.span));
time(sess, "plugin registration", || {
if sess.features_untracked().rustc_diagnostic_macros {
- registry.register_macro("__diagnostic_used",
- diagnostics::plugin::expand_diagnostic_used);
- registry.register_macro("__register_diagnostic",
- diagnostics::plugin::expand_register_diagnostic);
- registry.register_macro("__build_diagnostic_array",
- diagnostics::plugin::expand_build_diagnostic_array);
+ registry.register_macro(
+ "__diagnostic_used",
+ diagnostics::plugin::expand_diagnostic_used,
+ );
+ registry.register_macro(
+ "__register_diagnostic",
+ diagnostics::plugin::expand_register_diagnostic,
+ );
+ registry.register_macro(
+ "__build_diagnostic_array",
+ diagnostics::plugin::expand_build_diagnostic_array,
+ );
}
for registrar in registrars {
});
let whitelisted_legacy_custom_derives = registry.take_whitelisted_custom_derives();
- let Registry { syntax_exts, early_lint_passes, late_lint_passes, lint_groups,
- llvm_passes, attributes, .. } = registry;
+ let Registry {
+ syntax_exts,
+ early_lint_passes,
+ late_lint_passes,
+ lint_groups,
+ llvm_passes,
+ attributes,
+ ..
+ } = registry;
sess.track_errors(|| {
let mut ls = sess.lint_store.borrow_mut();
return Err(CompileIncomplete::Stopped);
}
- let mut resolver = Resolver::new(sess,
- cstore,
- &krate,
- crate_name,
- make_glob_map,
- crate_loader,
- &resolver_arenas);
+ let mut resolver = Resolver::new(
+ sess,
+ cstore,
+ &krate,
+ crate_name,
+ make_glob_map,
+ crate_loader,
+ &resolver_arenas,
+ );
resolver.whitelisted_legacy_custom_derives = whitelisted_legacy_custom_derives;
syntax_ext::register_builtins(&mut resolver, syntax_exts, sess.features_untracked().quote);
+ // Expand all macros
krate = time(sess, "expansion", || {
// Windows dlls do not have rpaths, so they don't know how to find their
// dependencies. It's up to us to tell the system where to find all the
let mut old_path = OsString::new();
if cfg!(windows) {
old_path = env::var_os("PATH").unwrap_or(old_path);
- let mut new_path = sess.host_filesearch(PathKind::All)
- .get_dylib_search_paths();
+ let mut new_path = sess.host_filesearch(PathKind::All).get_dylib_search_paths();
for path in env::split_paths(&old_path) {
if !new_path.contains(&path) {
new_path.push(path);
}
}
- env::set_var("PATH",
- &env::join_paths(new_path.iter()
- .filter(|p| env::join_paths(iter::once(p)).is_ok()))
- .unwrap());
+ env::set_var(
+ "PATH",
+ &env::join_paths(
+ new_path
+ .iter()
+ .filter(|p| env::join_paths(iter::once(p)).is_ok()),
+ ).unwrap(),
+ );
}
+
+ // Create the config for macro expansion
let features = sess.features_untracked();
let cfg = syntax::ext::expand::ExpansionConfig {
features: Some(&features),
let mut ecx = ExtCtxt::new(&sess.parse_sess, cfg, &mut resolver);
let err_count = ecx.parse_sess.span_diagnostic.err_count();
- let krate = ecx.monotonic_expander().expand_crate(krate);
+ // Expand macros now!
+ let krate = time(sess, "expand crate", || {
+ ecx.monotonic_expander().expand_crate(krate)
+ });
- ecx.check_unused_macros();
+ // The rest is error reporting
- let mut missing_fragment_specifiers: Vec<_> =
- ecx.parse_sess.missing_fragment_specifiers.borrow().iter().cloned().collect();
+ time(sess, "check unused macros", || {
+ ecx.check_unused_macros();
+ });
+
+ let mut missing_fragment_specifiers: Vec<_> = ecx.parse_sess
+ .missing_fragment_specifiers
+ .borrow()
+ .iter()
+ .cloned()
+ .collect();
missing_fragment_specifiers.sort();
for span in missing_fragment_specifiers {
let lint = lint::builtin::MISSING_FRAGMENT_SPECIFIER;
});
krate = time(sess, "maybe building test harness", || {
- syntax::test::modify_for_testing(&sess.parse_sess,
- &mut resolver,
- sess.opts.test,
- krate,
- sess.diagnostic(),
- &sess.features_untracked())
+ syntax::test::modify_for_testing(
+ &sess.parse_sess,
+ &mut resolver,
+ sess.opts.test,
+ krate,
+ sess.diagnostic(),
+ &sess.features_untracked(),
+ )
});
// If we're actually rustdoc then there's no need to actually compile
let num_crate_types = crate_types.len();
let is_proc_macro_crate = crate_types.contains(&config::CrateTypeProcMacro);
let is_test_crate = sess.opts.test;
- syntax_ext::proc_macro_registrar::modify(&sess.parse_sess,
- &mut resolver,
- krate,
- is_proc_macro_crate,
- is_test_crate,
- num_crate_types,
- sess.diagnostic())
+ syntax_ext::proc_macro_registrar::modify(
+ &sess.parse_sess,
+ &mut resolver,
+ krate,
+ is_proc_macro_crate,
+ is_test_crate,
+ num_crate_types,
+ sess.diagnostic(),
+ )
});
}
krate = time(sess, "creating allocators", || {
- allocator::expand::modify(&sess.parse_sess,
- &mut resolver,
- krate,
- sess.diagnostic())
+ allocator::expand::modify(&sess.parse_sess, &mut resolver, krate, sess.diagnostic())
});
after_expand(&krate)?;
println!("{}", json::as_json(&krate));
}
- time(sess,
- "AST validation",
- || ast_validation::check_crate(sess, &krate));
+ time(sess, "AST validation", || {
+ ast_validation::check_crate(sess, &krate)
+ });
time(sess, "name resolution", || -> CompileResult {
resolver.resolve_crate(&krate);
// Needs to go *after* expansion to be able to check the results of macro expansion.
time(sess, "complete gated feature checking", || {
sess.track_errors(|| {
- syntax::feature_gate::check_crate(&krate,
- &sess.parse_sess,
- &sess.features_untracked(),
- &attributes,
- sess.opts.unstable_features);
+ syntax::feature_gate::check_crate(
+ &krate,
+ &sess.parse_sess,
+ &sess.features_untracked(),
+ &attributes,
+ sess.opts.unstable_features,
+ );
})
})?;
None => DepGraph::new_disabled(),
Some(future) => {
let prev_graph = time(sess, "blocked while dep-graph loading finishes", || {
- future.open()
- .unwrap_or_else(|e| rustc_incremental::LoadResult::Error {
- message: format!("could not decode incremental cache: {:?}", e)
- })
- .open(sess)
+ future
+ .open()
+ .unwrap_or_else(|e| rustc_incremental::LoadResult::Error {
+ message: format!("could not decode incremental cache: {:?}", e),
+ })
+ .open(sess)
});
DepGraph::new(prev_graph)
}
hir_map::Forest::new(hir_crate, &dep_graph)
});
- time(sess,
- "early lint checks",
- || lint::check_ast_crate(sess, &krate));
+ time(sess, "early lint checks", || {
+ lint::check_ast_crate(sess, &krate)
+ });
// Discard hygiene data, which isn't required after lowering to HIR.
if !sess.opts.debugging_opts.keep_hygiene_data {
/// Run the resolution, typechecking, region checking and other
/// miscellaneous analysis passes on the crate. Return various
/// structures carrying the results of the analysis.
-pub fn phase_3_run_analysis_passes<'tcx, F, R>(trans: &TransCrate,
- control: &CompileController,
- sess: &'tcx Session,
- cstore: &'tcx CrateStore,
- hir_map: hir_map::Map<'tcx>,
- mut analysis: ty::CrateAnalysis,
- resolutions: Resolutions,
- arenas: &'tcx AllArenas<'tcx>,
- name: &str,
- output_filenames: &OutputFilenames,
- f: F)
- -> Result<R, CompileIncomplete>
- where F: for<'a> FnOnce(TyCtxt<'a, 'tcx, 'tcx>,
- ty::CrateAnalysis,
- mpsc::Receiver<Box<Any + Send>>,
- CompileResult) -> R
+pub fn phase_3_run_analysis_passes<'tcx, F, R>(
+ trans: &TransCrate,
+ control: &CompileController,
+ sess: &'tcx Session,
+ cstore: &'tcx CrateStore,
+ hir_map: hir_map::Map<'tcx>,
+ mut analysis: ty::CrateAnalysis,
+ resolutions: Resolutions,
+ arenas: &'tcx AllArenas<'tcx>,
+ name: &str,
+ output_filenames: &OutputFilenames,
+ f: F,
+) -> Result<R, CompileIncomplete>
+where
+ F: for<'a> FnOnce(
+ TyCtxt<'a, 'tcx, 'tcx>,
+ ty::CrateAnalysis,
+ mpsc::Receiver<Box<Any + Send>>,
+ CompileResult,
+ ) -> R,
{
- let query_result_on_disk_cache = time(sess,
- "load query result cache",
- || rustc_incremental::load_query_result_cache(sess));
+ let query_result_on_disk_cache = time(sess, "load query result cache", || {
+ rustc_incremental::load_query_result_cache(sess)
+ });
- time(sess,
- "looking for entry point",
- || middle::entry::find_entry_point(sess, &hir_map, name));
+ time(sess, "looking for entry point", || {
+ middle::entry::find_entry_point(sess, &hir_map, name)
+ });
- sess.plugin_registrar_fn.set(time(sess, "looking for plugin registrar", || {
- plugin::build::find_plugin_registrar(sess.diagnostic(), &hir_map)
- }));
- sess.derive_registrar_fn.set(derive_registrar::find(&hir_map));
+ sess.plugin_registrar_fn
+ .set(time(sess, "looking for plugin registrar", || {
+ plugin::build::find_plugin_registrar(sess.diagnostic(), &hir_map)
+ }));
+ sess.derive_registrar_fn
+ .set(derive_registrar::find(&hir_map));
- time(sess,
- "loop checking",
- || loops::check_crate(sess, &hir_map));
+ time(sess, "loop checking", || loops::check_crate(sess, &hir_map));
let mut local_providers = ty::maps::Providers::default();
default_provide(&mut local_providers);
let (tx, rx) = mpsc::channel();
- TyCtxt::create_and_enter(sess,
- cstore,
- local_providers,
- extern_providers,
- arenas,
- resolutions,
- hir_map,
- query_result_on_disk_cache,
- name,
- tx,
- output_filenames,
- |tcx| {
- // Do some initialization of the DepGraph that can only be done with the
- // tcx available.
- rustc_incremental::dep_graph_tcx_init(tcx);
-
- time(sess, "attribute checking", || {
- hir::check_attr::check_crate(tcx)
- });
+ TyCtxt::create_and_enter(
+ sess,
+ cstore,
+ local_providers,
+ extern_providers,
+ arenas,
+ resolutions,
+ hir_map,
+ query_result_on_disk_cache,
+ name,
+ tx,
+ output_filenames,
+ |tcx| {
+ // Do some initialization of the DepGraph that can only be done with the
+ // tcx available.
+ rustc_incremental::dep_graph_tcx_init(tcx);
+
+ time(sess, "attribute checking", || {
+ hir::check_attr::check_crate(tcx)
+ });
- time(sess,
- "stability checking",
- || stability::check_unstable_api_usage(tcx));
+ time(sess, "stability checking", || {
+ stability::check_unstable_api_usage(tcx)
+ });
- // passes are timed inside typeck
- match typeck::check_crate(tcx) {
- Ok(x) => x,
- Err(x) => {
- f(tcx, analysis, rx, Err(x));
- return Err(x);
+ // passes are timed inside typeck
+ match typeck::check_crate(tcx) {
+ Ok(x) => x,
+ Err(x) => {
+ f(tcx, analysis, rx, Err(x));
+ return Err(x);
+ }
}
- }
- time(sess,
- "rvalue promotion",
- || rvalue_promotion::check_crate(tcx));
+ time(sess, "rvalue promotion", || {
+ rvalue_promotion::check_crate(tcx)
+ });
- analysis.access_levels =
- time(sess, "privacy checking", || rustc_privacy::check_crate(tcx));
+ analysis.access_levels =
+ time(sess, "privacy checking", || rustc_privacy::check_crate(tcx));
- time(sess,
- "intrinsic checking",
- || middle::intrinsicck::check_crate(tcx));
+ time(sess, "intrinsic checking", || {
+ middle::intrinsicck::check_crate(tcx)
+ });
- time(sess,
- "match checking",
- || mir::matchck_crate(tcx));
+ time(sess, "match checking", || mir::matchck_crate(tcx));
- // this must run before MIR dump, because
- // "not all control paths return a value" is reported here.
- //
- // maybe move the check to a MIR pass?
- time(sess,
- "liveness checking",
- || middle::liveness::check_crate(tcx));
-
- time(sess,
- "borrow checking",
- || borrowck::check_crate(tcx));
-
- time(sess,
- "MIR borrow checking",
- || for def_id in tcx.body_owners() { tcx.mir_borrowck(def_id); });
-
- time(sess,
- "MIR effect checking",
- || for def_id in tcx.body_owners() {
- mir::transform::check_unsafety::check_unsafety(tcx, def_id)
- });
- // Avoid overwhelming user with errors if type checking failed.
- // I'm not sure how helpful this is, to be honest, but it avoids
- // a
- // lot of annoying errors in the compile-fail tests (basically,
- // lint warnings and so on -- kindck used to do this abort, but
- // kindck is gone now). -nmatsakis
- if sess.err_count() > 0 {
- return Ok(f(tcx, analysis, rx, sess.compile_status()));
- }
+ // this must run before MIR dump, because
+ // "not all control paths return a value" is reported here.
+ //
+ // maybe move the check to a MIR pass?
+ time(sess, "liveness checking", || {
+ middle::liveness::check_crate(tcx)
+ });
- time(sess, "death checking", || middle::dead::check_crate(tcx));
+ time(sess, "borrow checking", || borrowck::check_crate(tcx));
- time(sess, "unused lib feature checking", || {
- stability::check_unused_or_stable_features(tcx)
- });
+ time(sess, "MIR borrow checking", || {
+ for def_id in tcx.body_owners() {
+ tcx.mir_borrowck(def_id);
+ }
+ });
- time(sess, "lint checking", || lint::check_crate(tcx));
+ time(sess, "MIR effect checking", || {
+ for def_id in tcx.body_owners() {
+ mir::transform::check_unsafety::check_unsafety(tcx, def_id)
+ }
+ });
+ // Avoid overwhelming user with errors if type checking failed.
+ // I'm not sure how helpful this is, to be honest, but it avoids
+ // a
+ // lot of annoying errors in the compile-fail tests (basically,
+ // lint warnings and so on -- kindck used to do this abort, but
+ // kindck is gone now). -nmatsakis
+ if sess.err_count() > 0 {
+ return Ok(f(tcx, analysis, rx, sess.compile_status()));
+ }
- time(sess,
- "dumping chalk-like clauses",
- || rustc_traits::lowering::dump_program_clauses(tcx));
+ time(sess, "death checking", || middle::dead::check_crate(tcx));
- return Ok(f(tcx, analysis, rx, tcx.sess.compile_status()));
- })
+ time(sess, "unused lib feature checking", || {
+ stability::check_unused_or_stable_features(tcx)
+ });
+
+ time(sess, "lint checking", || lint::check_crate(tcx));
+
+ time(sess, "dumping chalk-like clauses", || {
+ rustc_traits::lowering::dump_program_clauses(tcx)
+ });
+
+ return Ok(f(tcx, analysis, rx, tcx.sess.compile_status()));
+ },
+ )
}
/// Run the translation phase to LLVM, after which the AST and analysis can
/// be discarded.
-pub fn phase_4_translate_to_llvm<'a, 'tcx>(trans: &TransCrate,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- rx: mpsc::Receiver<Box<Any + Send>>)
- -> Box<Any> {
- time(tcx.sess,
- "resolving dependency formats",
- || ::rustc::middle::dependency_format::calculate(tcx));
-
- let translation =
- time(tcx.sess, "translation", move || {
- trans.trans_crate(tcx, rx)
- });
+pub fn phase_4_translate_to_llvm<'a, 'tcx>(
+ trans: &TransCrate,
+ tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ rx: mpsc::Receiver<Box<Any + Send>>,
+) -> Box<Any> {
+ time(tcx.sess, "resolving dependency formats", || {
+ ::rustc::middle::dependency_format::calculate(tcx)
+ });
+
+ let translation = time(tcx.sess, "translation", move || trans.trans_crate(tcx, rx));
if tcx.sess.profile_queries() {
profile::dump(&tcx.sess, "profile_queries".to_string())
}
}
// Returns all the paths that correspond to generated files.
-fn generated_output_paths(sess: &Session,
- outputs: &OutputFilenames,
- exact_name: bool,
- crate_name: &str) -> Vec<PathBuf> {
+fn generated_output_paths(
+ sess: &Session,
+ outputs: &OutputFilenames,
+ exact_name: bool,
+ crate_name: &str,
+) -> Vec<PathBuf> {
let mut out_filenames = Vec::new();
for output_type in sess.opts.output_types.keys() {
let file = outputs.path(*output_type);
match *output_type {
// If the filename has been overridden using `-o`, it will not be modified
// by appending `.rlib`, `.exe`, etc., so we can skip this transformation.
- OutputType::Exe if !exact_name => {
- for crate_type in sess.crate_types.borrow().iter() {
- let p = ::rustc_trans_utils::link::filename_for_input(
- sess,
- *crate_type,
- crate_name,
- outputs
- );
- out_filenames.push(p);
- }
- }
+ OutputType::Exe if !exact_name => for crate_type in sess.crate_types.borrow().iter() {
+ let p = ::rustc_trans_utils::link::filename_for_input(
+ sess,
+ *crate_type,
+ crate_name,
+ outputs,
+ );
+ out_filenames.push(p);
+ },
OutputType::DepInfo if sess.opts.debugging_opts.dep_info_omit_d_target => {
// Don't add the dep-info output when omitting it from dep-info targets
}
// Runs `f` on every output file path and returns the first non-None result, or None if `f`
// returns None for every file path.
fn check_output<F, T>(output_paths: &Vec<PathBuf>, f: F) -> Option<T>
- where F: Fn(&PathBuf) -> Option<T> {
- for output_path in output_paths {
- if let Some(result) = f(output_path) {
- return Some(result);
- }
- }
- None
+where
+ F: Fn(&PathBuf) -> Option<T>,
+{
+ for output_path in output_paths {
+ if let Some(result) = f(output_path) {
+ return Some(result);
+ }
+ }
+ None
}
pub fn output_contains_path(output_paths: &Vec<PathBuf>, input_path: &PathBuf) -> bool {
let input_path = input_path.canonicalize().ok();
if input_path.is_none() {
- return false
+ return false;
}
let check = |output_path: &PathBuf| {
if output_path.canonicalize().ok() == input_path {
Some(())
- } else { None }
+ } else {
+ None
+ }
};
check_output(output_paths, check).is_some()
}
let check = |output_path: &PathBuf| {
if output_path.is_dir() {
Some(output_path.clone())
- } else { None }
+ } else {
+ None
+ }
};
check_output(output_paths, check)
}
-fn write_out_deps(sess: &Session,
- outputs: &OutputFilenames,
- out_filenames: &Vec<PathBuf>) {
+fn write_out_deps(sess: &Session, outputs: &OutputFilenames, out_filenames: &Vec<PathBuf>) {
// Write out dependency rules to the dep-info file if requested
if !sess.opts.output_types.contains_key(&OutputType::DepInfo) {
return;
}
let deps_filename = outputs.path(OutputType::DepInfo);
- let result =
- (|| -> io::Result<()> {
- // Build a list of files used to compile the output and
- // write Makefile-compatible dependency rules
- let files: Vec<String> = sess.codemap()
- .files()
- .iter()
- .filter(|fmap| fmap.is_real_file())
- .filter(|fmap| !fmap.is_imported())
- .map(|fmap| escape_dep_filename(&fmap.name))
- .collect();
- let mut file = fs::File::create(&deps_filename)?;
- for path in out_filenames {
- write!(file, "{}: {}\n\n", path.display(), files.join(" "))?;
- }
+ let result = (|| -> io::Result<()> {
+ // Build a list of files used to compile the output and
+ // write Makefile-compatible dependency rules
+ let files: Vec<String> = sess.codemap()
+ .files()
+ .iter()
+ .filter(|fmap| fmap.is_real_file())
+ .filter(|fmap| !fmap.is_imported())
+ .map(|fmap| escape_dep_filename(&fmap.name))
+ .collect();
+ let mut file = fs::File::create(&deps_filename)?;
+ for path in out_filenames {
+ write!(file, "{}: {}\n\n", path.display(), files.join(" "))?;
+ }
- // Emit a fake target for each input file to the compilation. This
- // prevents `make` from spitting out an error if a file is later
- // deleted. For more info see #28735
- for path in files {
- writeln!(file, "{}:", path)?;
- }
- Ok(())
- })();
+ // Emit a fake target for each input file to the compilation. This
+ // prevents `make` from spitting out an error if a file is later
+ // deleted. For more info see #28735
+ for path in files {
+ writeln!(file, "{}:", path)?;
+ }
+ Ok(())
+ })();
match result {
Ok(()) => {}
Err(e) => {
- sess.fatal(&format!("error writing dependencies to `{}`: {}",
- deps_filename.display(),
- e));
+ sess.fatal(&format!(
+ "error writing dependencies to `{}`: {}",
+ deps_filename.display(),
+ e
+ ));
}
}
}
pub fn collect_crate_types(session: &Session, attrs: &[ast::Attribute]) -> Vec<config::CrateType> {
// Unconditionally collect crate types from attributes to make them used
- let attr_types: Vec<config::CrateType> =
- attrs.iter()
- .filter_map(|a| {
- if a.check_name("crate_type") {
- match a.value_str() {
- Some(ref n) if *n == "rlib" => {
- Some(config::CrateTypeRlib)
- }
- Some(ref n) if *n == "dylib" => {
- Some(config::CrateTypeDylib)
- }
- Some(ref n) if *n == "cdylib" => {
- Some(config::CrateTypeCdylib)
- }
- Some(ref n) if *n == "lib" => {
- Some(config::default_lib_output())
- }
- Some(ref n) if *n == "staticlib" => {
- Some(config::CrateTypeStaticlib)
- }
- Some(ref n) if *n == "proc-macro" => {
- Some(config::CrateTypeProcMacro)
- }
- Some(ref n) if *n == "bin" => Some(config::CrateTypeExecutable),
- Some(_) => {
- session.buffer_lint(lint::builtin::UNKNOWN_CRATE_TYPES,
- ast::CRATE_NODE_ID,
- a.span,
- "invalid `crate_type` value");
- None
- }
- _ => {
- session.struct_span_err(a.span, "`crate_type` requires a value")
- .note("for example: `#![crate_type=\"lib\"]`")
- .emit();
- None
- }
- }
- } else {
- None
- }
- })
- .collect();
+ let attr_types: Vec<config::CrateType> = attrs
+ .iter()
+ .filter_map(|a| {
+ if a.check_name("crate_type") {
+ match a.value_str() {
+ Some(ref n) if *n == "rlib" => Some(config::CrateTypeRlib),
+ Some(ref n) if *n == "dylib" => Some(config::CrateTypeDylib),
+ Some(ref n) if *n == "cdylib" => Some(config::CrateTypeCdylib),
+ Some(ref n) if *n == "lib" => Some(config::default_lib_output()),
+ Some(ref n) if *n == "staticlib" => Some(config::CrateTypeStaticlib),
+ Some(ref n) if *n == "proc-macro" => Some(config::CrateTypeProcMacro),
+ Some(ref n) if *n == "bin" => Some(config::CrateTypeExecutable),
+ Some(_) => {
+ session.buffer_lint(
+ lint::builtin::UNKNOWN_CRATE_TYPES,
+ ast::CRATE_NODE_ID,
+ a.span,
+ "invalid `crate_type` value",
+ );
+ None
+ }
+ _ => {
+ session
+ .struct_span_err(a.span, "`crate_type` requires a value")
+ .note("for example: `#![crate_type=\"lib\"]`")
+ .emit();
+ None
+ }
+ }
+ } else {
+ None
+ }
+ })
+ .collect();
// If we're generating a test executable, then ignore all other output
// styles at all other locations
if base.is_empty() {
base.extend(attr_types);
if base.is_empty() {
- base.push(::rustc_trans_utils::link::default_output_for_target(session));
+ base.push(::rustc_trans_utils::link::default_output_for_target(
+ session,
+ ));
}
base.sort();
base.dedup();
let res = !::rustc_trans_utils::link::invalid_output_for_target(session, *crate_type);
if !res {
- session.warn(&format!("dropping unsupported crate type `{}` for target `{}`",
- *crate_type,
- session.opts.target_triple));
+ session.warn(&format!(
+ "dropping unsupported crate type `{}` for target `{}`",
+ *crate_type, session.opts.target_triple
+ ));
}
res
// Also incorporate crate type, so that we don't get symbol conflicts when
// linking against a library of the same name, if this is an executable.
- let is_exe = session.crate_types.borrow().contains(&config::CrateTypeExecutable);
+ let is_exe = session
+ .crate_types
+ .borrow()
+ .contains(&config::CrateTypeExecutable);
hasher.write(if is_exe { b"exe" } else { b"lib" });
CrateDisambiguator::from(hasher.finish())
-
}
-pub fn build_output_filenames(input: &Input,
- odir: &Option<PathBuf>,
- ofile: &Option<PathBuf>,
- attrs: &[ast::Attribute],
- sess: &Session)
- -> OutputFilenames {
+pub fn build_output_filenames(
+ input: &Input,
+ odir: &Option<PathBuf>,
+ ofile: &Option<PathBuf>,
+ attrs: &[ast::Attribute],
+ sess: &Session,
+) -> OutputFilenames {
match *ofile {
None => {
// "-" as input file will cause the parser to read from stdin so we
// If a crate name is present, we use it as the link name
let stem = sess.opts
- .crate_name
- .clone()
- .or_else(|| attr::find_crate_name(attrs).map(|n| n.to_string()))
- .unwrap_or(input.filestem());
+ .crate_name
+ .clone()
+ .or_else(|| attr::find_crate_name(attrs).map(|n| n.to_string()))
+ .unwrap_or(input.filestem());
OutputFilenames {
out_directory: dirpath,
Some(ref out_file) => {
let unnamed_output_types = sess.opts
- .output_types
- .values()
- .filter(|a| a.is_none())
- .count();
+ .output_types
+ .values()
+ .filter(|a| a.is_none())
+ .count();
let ofile = if unnamed_output_types > 1 {
- sess.warn("due to multiple output types requested, the explicitly specified \
- output file name will be adapted for each output type");
+ sess.warn(
+ "due to multiple output types requested, the explicitly specified \
+ output file name will be adapted for each output type",
+ );
None
} else {
Some(out_file.clone())
OutputFilenames {
out_directory: out_file.parent().unwrap_or(cur_dir).to_path_buf(),
- out_filestem: out_file.file_stem()
- .unwrap_or(OsStr::new(""))
- .to_str()
- .unwrap()
- .to_string(),
+ out_filestem: out_file
+ .file_stem()
+ .unwrap_or(OsStr::new(""))
+ .to_str()
+ .unwrap()
+ .to_string(),
single_output_file: ofile,
extra: sess.opts.cg.extra_filename.clone(),
outputs: sess.opts.output_types.clone(),
#![feature(slice_sort_by_cached_key)]
#![feature(set_stdio)]
#![feature(rustc_stack_internals)]
+#![feature(no_debug)]
extern crate arena;
extern crate getopts;
pub fn get_trans(sess: &Session) -> Box<TransCrate> {
static INIT: Once = ONCE_INIT;
+
+ #[allow(deprecated)]
+ #[no_debug]
static mut LOAD: fn() -> Box<TransCrate> = || unreachable!();
INIT.call_once(|| {
(result, Some(sess))
}
+#[cfg(unix)]
+pub fn set_sigpipe_handler() {
+ unsafe {
+ // Set the SIGPIPE signal handler, so that an EPIPE
+ // will cause rustc to terminate, as expected.
+ assert!(libc::signal(libc::SIGPIPE, libc::SIG_DFL) != libc::SIG_ERR);
+ }
+}
+
+#[cfg(windows)]
+pub fn set_sigpipe_handler() {}
+
// Extract output directory and file from matches.
fn make_output(matches: &getopts::Matches) -> (Option<PathBuf>, Option<PathBuf>) {
let odir = matches.opt_str("out-dir").map(|o| PathBuf::from(&o));
use rustc::hir::map as hir_map;
use rustc::session::{self, config};
use rustc::session::config::{OutputFilenames, OutputTypes};
-use rustc_data_structures::sync::Lrc;
+use rustc_data_structures::sync::{self, Lrc};
use syntax;
use syntax::ast;
use syntax::abi::Abi;
}
}
-fn errors(msgs: &[&str]) -> (Box<Emitter + Send>, usize) {
+fn errors(msgs: &[&str]) -> (Box<Emitter + sync::Send>, usize) {
let v = msgs.iter().map(|m| m.to_string()).collect();
- (box ExpectErrorEmitter { messages: v } as Box<Emitter + Send>, msgs.len())
+ (box ExpectErrorEmitter { messages: v } as Box<Emitter + sync::Send>, msgs.len())
}
fn test_env<F>(source_string: &str,
- args: (Box<Emitter + Send>, usize),
+ args: (Box<Emitter + sync::Send>, usize),
body: F)
where F: FnOnce(Env)
{
}
fn test_env_impl<F>(source_string: &str,
- (emitter, expected_err_count): (Box<Emitter + Send>, usize),
+ (emitter, expected_err_count): (Box<Emitter + sync::Send>, usize),
body: F)
where F: FnOnce(Env)
{
} else {
0
};
- (b_start..b_end + extra).contains(a_start) ||
- (a_start..a_end + extra).contains(b_start)
+ (b_start..b_end + extra).contains(&a_start) ||
+ (a_start..a_end + extra).contains(&b_start)
}
fn overlaps(a1: &Annotation, a2: &Annotation, padding: usize) -> bool {
num_overlap(a1.start_col, a1.end_col + padding, a2.start_col, a2.end_col, false)
use emitter::{Emitter, EmitterWriter};
-use rustc_data_structures::sync::{self, Lrc};
+use rustc_data_structures::sync::{self, Lrc, Lock, LockCell};
use rustc_data_structures::fx::FxHashSet;
use rustc_data_structures::stable_hasher::StableHasher;
use std::borrow::Cow;
-use std::cell::{RefCell, Cell};
+use std::cell::Cell;
use std::{error, fmt};
use std::sync::atomic::AtomicUsize;
use std::sync::atomic::Ordering::SeqCst;
pub flags: HandlerFlags,
err_count: AtomicUsize,
- emitter: RefCell<Box<Emitter>>,
- continue_after_error: Cell<bool>,
- delayed_span_bug: RefCell<Option<Diagnostic>>,
+ emitter: Lock<Box<Emitter + sync::Send>>,
+ continue_after_error: LockCell<bool>,
+ delayed_span_bug: Lock<Option<Diagnostic>>,
// This set contains the `DiagnosticId` of all emitted diagnostics to avoid
// emitting the same diagnostic with extended help (`--teach`) twice, which
// would be uneccessary repetition.
- tracked_diagnostic_codes: RefCell<FxHashSet<DiagnosticId>>,
+ taught_diagnostics: Lock<FxHashSet<DiagnosticId>>,
+
+ /// Used to suggest rustc --explain <error code>
+ emitted_diagnostic_codes: Lock<FxHashSet<DiagnosticId>>,
// This set contains a hash of every diagnostic that has been emitted by
// this handler. These hashes is used to avoid emitting the same error
// twice.
- emitted_diagnostics: RefCell<FxHashSet<u128>>,
+ emitted_diagnostics: Lock<FxHashSet<u128>>,
}
fn default_track_diagnostic(_: &Diagnostic) {}
pub fn with_emitter(can_emit_warnings: bool,
treat_err_as_bug: bool,
- e: Box<Emitter>)
+ e: Box<Emitter + sync::Send>)
-> Handler {
Handler::with_emitter_and_flags(
e,
})
}
- pub fn with_emitter_and_flags(e: Box<Emitter>, flags: HandlerFlags) -> Handler {
+ pub fn with_emitter_and_flags(e: Box<Emitter + sync::Send>, flags: HandlerFlags) -> Handler {
Handler {
flags,
err_count: AtomicUsize::new(0),
- emitter: RefCell::new(e),
- continue_after_error: Cell::new(true),
- delayed_span_bug: RefCell::new(None),
- tracked_diagnostic_codes: RefCell::new(FxHashSet()),
- emitted_diagnostics: RefCell::new(FxHashSet()),
+ emitter: Lock::new(e),
+ continue_after_error: LockCell::new(true),
+ delayed_span_bug: Lock::new(None),
+ taught_diagnostics: Lock::new(FxHashSet()),
+ emitted_diagnostic_codes: Lock::new(FxHashSet()),
+ emitted_diagnostics: Lock::new(FxHashSet()),
}
}
/// tools that want to reuse a `Parser` cleaning the previously emitted diagnostics as well as
/// the overall count of emitted error diagnostics.
pub fn reset_err_count(&self) {
- self.emitted_diagnostics.replace(FxHashSet());
+ *self.emitted_diagnostics.borrow_mut() = FxHashSet();
self.err_count.store(0, SeqCst);
}
let _ = self.fatal(&s);
let can_show_explain = self.emitter.borrow().should_show_explain();
- let are_there_diagnostics = !self.tracked_diagnostic_codes.borrow().is_empty();
+ let are_there_diagnostics = !self.emitted_diagnostic_codes.borrow().is_empty();
if can_show_explain && are_there_diagnostics {
let mut error_codes =
- self.tracked_diagnostic_codes.borrow()
+ self.emitted_diagnostic_codes.borrow()
.clone()
.into_iter()
.filter_map(|x| match x {
}
}
- /// `true` if a diagnostic with this code has already been emitted in this handler.
+ /// `true` if we haven't taught a diagnostic with this code already.
+ /// The caller must then teach the user about such a diagnostic.
///
/// Used to suppress emitting the same error multiple times with extended explanation when
/// calling `-Zteach`.
- pub fn code_emitted(&self, code: &DiagnosticId) -> bool {
- self.tracked_diagnostic_codes.borrow().contains(code)
+ pub fn must_teach(&self, code: &DiagnosticId) -> bool {
+ self.taught_diagnostics.borrow_mut().insert(code.clone())
}
pub fn force_print_db(&self, mut db: DiagnosticBuilder) {
});
if let Some(ref code) = diagnostic.code {
- self.tracked_diagnostic_codes.borrow_mut().insert(code.clone());
+ self.emitted_diagnostic_codes.borrow_mut().insert(code.clone());
}
let diagnostic_hash = {
time(sess, "persist dep-graph", || {
save_in(sess,
dep_graph_path(sess),
- |e| encode_dep_graph(tcx, e));
+ |e| {
+ time(sess, "encode dep-graph", || {
+ encode_dep_graph(tcx, e)
+ })
+ });
});
}
tcx.sess.opts.dep_tracking_hash().encode(encoder)?;
// Encode the graph data.
- let serialized_graph = tcx.dep_graph.serialize();
+ let serialized_graph = time(tcx.sess, "getting serialized graph", || {
+ tcx.dep_graph.serialize()
+ });
if tcx.sess.opts.debugging_opts.incremental_info {
#[derive(Clone)]
println!("[incremental]");
}
- serialized_graph.encode(encoder)?;
+ time(tcx.sess, "encoding serialized graph", || {
+ serialized_graph.encode(encoder)
+ })?;
Ok(())
}
fn encode_query_cache(tcx: TyCtxt,
encoder: &mut Encoder)
-> io::Result<()> {
- tcx.serialize_query_result_cache(encoder)
+ time(tcx.sess, "serialize query result cache", || {
+ tcx.serialize_query_result_cache(encoder)
+ })
}
use rustc_back::target::TargetTriple;
use rustc::session::search_paths::PathKind;
use rustc::middle;
-use rustc::middle::cstore::{validate_crate_name, ExternCrate};
+use rustc::middle::cstore::{validate_crate_name, ExternCrate, ExternCrateSource};
use rustc::util::common::record_time;
use rustc::util::nodemap::FxHashSet;
use rustc::hir::map::Definitions;
// - something over nothing (tuple.0);
// - direct extern crate to indirect (tuple.1);
// - shorter paths to longer (tuple.2).
- let new_rank = (true, extern_crate.direct, !extern_crate.path_len);
+ let new_rank = (
+ true,
+ extern_crate.direct,
+ cmp::Reverse(extern_crate.path_len),
+ );
let old_rank = match *old_extern_crate {
- None => (false, false, !0),
- Some(ref c) => (true, c.direct, !c.path_len),
+ None => (false, false, cmp::Reverse(usize::max_value())),
+ Some(ref c) => (
+ true,
+ c.direct,
+ cmp::Reverse(c.path_len),
+ ),
};
-
if old_rank >= new_rank {
return; // no change needed
}
}
}
- fn process_item(&mut self, item: &ast::Item, definitions: &Definitions) {
+ fn process_extern_crate(&mut self, item: &ast::Item, definitions: &Definitions) -> CrateNum {
match item.node {
ast::ItemKind::ExternCrate(orig_name) => {
debug!("resolving extern crate stmt. ident: {} orig_name: {:?}",
let def_id = definitions.opt_local_def_id(item.id).unwrap();
let path_len = definitions.def_path(def_id.index).data.len();
-
- let extern_crate = ExternCrate { def_id, span: item.span, direct: true, path_len };
- self.update_extern_crate(cnum, extern_crate, &mut FxHashSet());
+ self.update_extern_crate(
+ cnum,
+ ExternCrate {
+ src: ExternCrateSource::Extern(def_id),
+ span: item.span,
+ path_len,
+ direct: true,
+ },
+ &mut FxHashSet(),
+ );
self.cstore.add_extern_mod_stmt_cnum(item.id, cnum);
+ cnum
}
- _ => {}
+ _ => bug!(),
}
}
- fn resolve_crate_from_path(&mut self, name: Symbol, span: Span) -> CrateNum {
- self.resolve_crate(&None, name, name, None, None, span, PathKind::Crate,
- DepKind::Explicit).0
+ fn process_path_extern(
+ &mut self,
+ name: Symbol,
+ span: Span,
+ ) -> CrateNum {
+ let cnum = self.resolve_crate(
+ &None, name, name, None, None, span, PathKind::Crate, DepKind::Explicit
+ ).0;
+
+ self.update_extern_crate(
+ cnum,
+ ExternCrate {
+ src: ExternCrateSource::Path,
+ span,
+ // to have the least priority in `update_extern_crate`
+ path_len: usize::max_value(),
+ direct: true,
+ },
+ &mut FxHashSet(),
+ );
+
+ cnum
+ }
+
+ fn process_use_extern(
+ &mut self,
+ name: Symbol,
+ span: Span,
+ id: ast::NodeId,
+ definitions: &Definitions,
+ ) -> CrateNum {
+ let cnum = self.resolve_crate(
+ &None, name, name, None, None, span, PathKind::Crate, DepKind::Explicit
+ ).0;
+
+ let def_id = definitions.opt_local_def_id(id).unwrap();
+ let path_len = definitions.def_path(def_id.index).data.len();
+
+ self.update_extern_crate(
+ cnum,
+ ExternCrate {
+ src: ExternCrateSource::Use,
+ span,
+ path_len,
+ direct: true,
+ },
+ &mut FxHashSet(),
+ );
+
+ cnum
}
}
// interpreter allocation cache
interpret_alloc_cache: FxHashMap<usize, interpret::AllocId>,
- // a cache for sizes of interpreter allocations
- // needed to skip already deserialized allocations
- interpret_alloc_size: FxHashMap<usize, usize>,
+
+ // Read from the LazySeq CrateRoot::inpterpret_alloc_index on demand
+ interpret_alloc_index: Option<Vec<u32>>,
}
/// Abstract over the various ways one can create metadata decoders.
last_filemap_index: 0,
lazy_state: LazyState::NoNode,
interpret_alloc_cache: FxHashMap::default(),
- interpret_alloc_size: FxHashMap::default(),
+ interpret_alloc_index: None,
}
}
}
self.lazy_state = LazyState::Previous(position + min_size);
Ok(position)
}
+
+ fn interpret_alloc(&mut self, idx: usize) -> usize {
+ if let Some(index) = self.interpret_alloc_index.as_mut() {
+ return index[idx] as usize;
+ }
+ let cdata = self.cdata();
+ let index: Vec<u32> = cdata.root.interpret_alloc_index.decode(cdata).collect();
+ let pos = index[idx];
+ self.interpret_alloc_index = Some(index);
+ pos as usize
+ }
}
impl<'a, 'tcx: 'a> TyDecoder<'a, 'tcx> for DecodeContext<'a, 'tcx> {
impl<'a, 'tcx> SpecializedDecoder<interpret::AllocId> for DecodeContext<'a, 'tcx> {
fn specialized_decode(&mut self) -> Result<interpret::AllocId, Self::Error> {
- let tcx = self.tcx.expect("need tcx for AllocId decoding");
- let pos = self.position();
- if let Some(cached) = self.interpret_alloc_cache.get(&pos).cloned() {
- // if there's no end position we are currently deserializing a recursive
- // allocation
- if let Some(end) = self.interpret_alloc_size.get(&pos).cloned() {
- trace!("{} already cached as {:?}", pos, cached);
- // skip ahead
- self.opaque.set_position(end);
- return Ok(cached)
- }
+ let tcx = self.tcx.unwrap();
+ let idx = usize::decode(self)?;
+
+ if let Some(cached) = self.interpret_alloc_cache.get(&idx).cloned() {
+ return Ok(cached);
}
- let id = interpret::specialized_decode_alloc_id(
- self,
- tcx,
- pos,
- |this, pos, alloc_id| { this.interpret_alloc_cache.insert(pos, alloc_id); },
- |this, shorthand| {
- // need to load allocation
- this.with_position(shorthand, |this| interpret::AllocId::decode(this))
- }
- )?;
- let end_pos = self.position();
- assert!(self
- .interpret_alloc_size
- .insert(pos, end_pos)
- .is_none());
- Ok(id)
+ let pos = self.interpret_alloc(idx);
+ self.with_position(pos, |this| {
+ interpret::specialized_decode_alloc_id(
+ this,
+ tcx,
+ |this, alloc_id| {
+ assert!(this.interpret_alloc_cache.insert(idx, alloc_id).is_none());
+ },
+ )
+ })
}
}
lazy_state: LazyState,
type_shorthands: FxHashMap<Ty<'tcx>, usize>,
predicate_shorthands: FxHashMap<ty::Predicate<'tcx>, usize>,
- interpret_alloc_shorthands: FxHashMap<interpret::AllocId, usize>,
+
+ interpret_allocs: FxHashMap<interpret::AllocId, usize>,
+ interpret_allocs_inverse: Vec<interpret::AllocId>,
// This is used to speed up Span encoding.
filemap_cache: Lrc<FileMap>,
impl<'a, 'tcx> SpecializedEncoder<interpret::AllocId> for EncodeContext<'a, 'tcx> {
fn specialized_encode(&mut self, alloc_id: &interpret::AllocId) -> Result<(), Self::Error> {
use std::collections::hash_map::Entry;
- let tcx = self.tcx;
- let pos = self.position();
- let shorthand = match self.interpret_alloc_shorthands.entry(*alloc_id) {
- Entry::Occupied(entry) => Some(entry.get().clone()),
- Entry::Vacant(entry) => {
- // ensure that we don't place any AllocIds at the very beginning
- // of the metadata file, because that would end up making our indices
- // not special. This is essentially impossible, but let's make sure
- assert!(pos >= interpret::SHORTHAND_START);
- entry.insert(pos);
- None
+ let index = match self.interpret_allocs.entry(*alloc_id) {
+ Entry::Occupied(e) => *e.get(),
+ Entry::Vacant(e) => {
+ let idx = self.interpret_allocs_inverse.len();
+ self.interpret_allocs_inverse.push(*alloc_id);
+ e.insert(idx);
+ idx
},
};
- interpret::specialized_encode_alloc_id(
- self,
- tcx,
- *alloc_id,
- shorthand,
- )
+
+ index.encode(self)
}
}
start - min_end
}
LazyState::Previous(last_min_end) => {
- assert!(last_min_end <= position);
+ assert!(
+ last_min_end <= position,
+ "make sure that the calls to `lazy*` \
+ are in the same order as the metadata fields",
+ );
position - last_min_end
}
};
IsolatedEncoder::encode_wasm_custom_sections,
&wasm_custom_sections);
- // Encode and index the items.
+ let tcx = self.tcx;
+
+ // Encode the items.
i = self.position();
let items = self.encode_info_for_items();
let item_bytes = self.position() - i;
+ // Encode the allocation index
+ let interpret_alloc_index = {
+ let mut interpret_alloc_index = Vec::new();
+ let mut n = 0;
+ trace!("beginning to encode alloc ids");
+ loop {
+ let new_n = self.interpret_allocs_inverse.len();
+ // if we have found new ids, serialize those, too
+ if n == new_n {
+ // otherwise, abort
+ break;
+ }
+ trace!("encoding {} further alloc ids", new_n - n);
+ for idx in n..new_n {
+ let id = self.interpret_allocs_inverse[idx];
+ let pos = self.position() as u32;
+ interpret_alloc_index.push(pos);
+ interpret::specialized_encode_alloc_id(
+ self,
+ tcx,
+ id,
+ ).unwrap();
+ }
+ n = new_n;
+ }
+ self.lazy_seq(interpret_alloc_index)
+ };
+
+ // Index the items
i = self.position();
let index = items.write_index(&mut self.opaque.cursor);
let index_bytes = self.position() - i;
- let tcx = self.tcx;
let link_meta = self.link_meta;
let is_proc_macro = tcx.sess.crate_types.borrow().contains(&CrateTypeProcMacro);
let has_default_lib_allocator =
attr::contains_name(tcx.hir.krate_attrs(), "default_lib_allocator");
let has_global_allocator = *tcx.sess.has_global_allocator.get();
+
let root = self.lazy(&CrateRoot {
name: tcx.crate_name(LOCAL_CRATE),
extra_filename: tcx.sess.opts.cg.extra_filename.clone(),
impls,
exported_symbols,
wasm_custom_sections,
+ interpret_alloc_index,
index,
});
}
}
+ fn metadata_output_only(&self) -> bool {
+ // MIR optimisation can be skipped when we're just interested in the metadata.
+ !self.tcx.sess.opts.output_types.should_trans()
+ }
+
fn encode_info_for_impl_item(&mut self, def_id: DefId) -> Entry<'tcx> {
debug!("IsolatedEncoder::encode_info_for_impl_item({:?})", def_id);
let tcx = self.tcx;
} else if let hir::ImplItemKind::Method(ref sig, body) = ast_item.node {
let generics = self.tcx.generics_of(def_id);
let types = generics.parent_types as usize + generics.types.len();
- let needs_inline = types > 0 || tcx.trans_fn_attrs(def_id).requests_inline();
+ let needs_inline = (types > 0 || tcx.trans_fn_attrs(def_id).requests_inline()) &&
+ !self.metadata_output_only();
let is_const_fn = sig.constness == hir::Constness::Const;
let ast = if is_const_fn { Some(body) } else { None };
let always_encode_mir = self.tcx.sess.opts.debugging_opts.always_encode_mir;
hir::ItemConst(..) => self.encode_optimized_mir(def_id),
hir::ItemFn(_, _, constness, _, ref generics, _) => {
let has_tps = generics.ty_params().next().is_some();
- let needs_inline = has_tps || tcx.trans_fn_attrs(def_id).requests_inline();
+ let needs_inline = (has_tps || tcx.trans_fn_attrs(def_id).requests_inline()) &&
+ !self.metadata_output_only();
let always_encode_mir = self.tcx.sess.opts.debugging_opts.always_encode_mir;
if needs_inline || constness == hir::Constness::Const || always_encode_mir {
self.encode_optimized_mir(def_id)
type_shorthands: Default::default(),
predicate_shorthands: Default::default(),
filemap_cache: tcx.sess.codemap().files()[0].clone(),
- interpret_alloc_shorthands: Default::default(),
+ interpret_allocs: Default::default(),
+ interpret_allocs_inverse: Default::default(),
};
// Encode the rustc version string in a predictable location.
pub impls: LazySeq<TraitImpls>,
pub exported_symbols: EncodedExportedSymbols,
pub wasm_custom_sections: LazySeq<DefIndex>,
+ pub interpret_alloc_index: LazySeq<u32>,
pub index: LazySeq<index::Index>,
}
--- /dev/null
+// Copyright 2012-2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use borrow_check::place_ext::PlaceExt;
+use dataflow::indexes::BorrowIndex;
+use rustc::mir::traversal;
+use rustc::mir::visit::{PlaceContext, Visitor};
+use rustc::mir::{self, Location, Mir, Place};
+use rustc::ty::{Region, TyCtxt};
+use rustc::util::nodemap::{FxHashMap, FxHashSet};
+use rustc_data_structures::indexed_vec::IndexVec;
+use std::fmt;
+use std::hash::Hash;
+use std::ops::Index;
+
+crate struct BorrowSet<'tcx> {
+ /// The fundamental map relating bitvector indexes to the borrows
+ /// in the MIR.
+ crate borrows: IndexVec<BorrowIndex, BorrowData<'tcx>>,
+
+ /// Each borrow is also uniquely identified in the MIR by the
+ /// `Location` of the assignment statement in which it appears on
+ /// the right hand side; we map each such location to the
+ /// corresponding `BorrowIndex`.
+ crate location_map: FxHashMap<Location, BorrowIndex>,
+
+ /// Locations which activate borrows.
+ /// NOTE: A given location may activate more than one borrow in the future
+ /// when more general two-phase borrow support is introduced, but for now we
+ /// only need to store one borrow index
+ crate activation_map: FxHashMap<Location, Vec<BorrowIndex>>,
+
+ /// Every borrow has a region; this maps each such regions back to
+ /// its borrow-indexes.
+ crate region_map: FxHashMap<Region<'tcx>, FxHashSet<BorrowIndex>>,
+
+ /// Map from local to all the borrows on that local
+ crate local_map: FxHashMap<mir::Local, FxHashSet<BorrowIndex>>,
+}
+
+impl<'tcx> Index<BorrowIndex> for BorrowSet<'tcx> {
+ type Output = BorrowData<'tcx>;
+
+ fn index(&self, index: BorrowIndex) -> &BorrowData<'tcx> {
+ &self.borrows[index]
+ }
+}
+
+#[derive(Debug)]
+crate struct BorrowData<'tcx> {
+ /// Location where the borrow reservation starts.
+ /// In many cases, this will be equal to the activation location but not always.
+ crate reserve_location: Location,
+ /// Location where the borrow is activated. None if this is not a
+ /// 2-phase borrow.
+ crate activation_location: Option<Location>,
+ /// What kind of borrow this is
+ crate kind: mir::BorrowKind,
+ /// The region for which this borrow is live
+ crate region: Region<'tcx>,
+ /// Place from which we are borrowing
+ crate borrowed_place: mir::Place<'tcx>,
+ /// Place to which the borrow was stored
+ crate assigned_place: mir::Place<'tcx>,
+}
+
+impl<'tcx> fmt::Display for BorrowData<'tcx> {
+ fn fmt(&self, w: &mut fmt::Formatter) -> fmt::Result {
+ let kind = match self.kind {
+ mir::BorrowKind::Shared => "",
+ mir::BorrowKind::Unique => "uniq ",
+ mir::BorrowKind::Mut { .. } => "mut ",
+ };
+ let region = format!("{}", self.region);
+ let region = if region.len() > 0 {
+ format!("{} ", region)
+ } else {
+ region
+ };
+ write!(w, "&{}{}{:?}", region, kind, self.borrowed_place)
+ }
+}
+
+impl<'tcx> BorrowSet<'tcx> {
+ pub fn build(tcx: TyCtxt<'_, '_, 'tcx>, mir: &Mir<'tcx>) -> Self {
+ let mut visitor = GatherBorrows {
+ tcx,
+ mir,
+ idx_vec: IndexVec::new(),
+ location_map: FxHashMap(),
+ activation_map: FxHashMap(),
+ region_map: FxHashMap(),
+ local_map: FxHashMap(),
+ pending_activations: FxHashMap(),
+ };
+
+ for (block, block_data) in traversal::preorder(mir) {
+ visitor.visit_basic_block_data(block, block_data);
+ }
+
+ // Double check: We should have found an activation for every pending
+ // activation.
+ assert_eq!(
+ visitor
+ .pending_activations
+ .iter()
+ .find(|&(_local, &borrow_index)| visitor.idx_vec[borrow_index]
+ .activation_location
+ .is_none()),
+ None,
+ "never found an activation for this borrow!",
+ );
+
+ BorrowSet {
+ borrows: visitor.idx_vec,
+ location_map: visitor.location_map,
+ activation_map: visitor.activation_map,
+ region_map: visitor.region_map,
+ local_map: visitor.local_map,
+ }
+ }
+
+ crate fn activations_at_location(&self, location: Location) -> &[BorrowIndex] {
+ self.activation_map
+ .get(&location)
+ .map(|activations| &activations[..])
+ .unwrap_or(&[])
+ }
+}
+
+struct GatherBorrows<'a, 'gcx: 'tcx, 'tcx: 'a> {
+ tcx: TyCtxt<'a, 'gcx, 'tcx>,
+ mir: &'a Mir<'tcx>,
+ idx_vec: IndexVec<BorrowIndex, BorrowData<'tcx>>,
+ location_map: FxHashMap<Location, BorrowIndex>,
+ activation_map: FxHashMap<Location, Vec<BorrowIndex>>,
+ region_map: FxHashMap<Region<'tcx>, FxHashSet<BorrowIndex>>,
+ local_map: FxHashMap<mir::Local, FxHashSet<BorrowIndex>>,
+
+ /// When we encounter a 2-phase borrow statement, it will always
+ /// be assigning into a temporary TEMP:
+ ///
+ /// TEMP = &foo
+ ///
+ /// We add TEMP into this map with `b`, where `b` is the index of
+ /// the borrow. When we find a later use of this activation, we
+ /// remove from the map (and add to the "tombstone" set below).
+ pending_activations: FxHashMap<mir::Local, BorrowIndex>,
+}
+
+impl<'a, 'gcx, 'tcx> Visitor<'tcx> for GatherBorrows<'a, 'gcx, 'tcx> {
+ fn visit_assign(
+ &mut self,
+ block: mir::BasicBlock,
+ assigned_place: &mir::Place<'tcx>,
+ rvalue: &mir::Rvalue<'tcx>,
+ location: mir::Location,
+ ) {
+ if let mir::Rvalue::Ref(region, kind, ref borrowed_place) = *rvalue {
+ if borrowed_place.is_unsafe_place(self.tcx, self.mir) {
+ return;
+ }
+
+ let borrow = BorrowData {
+ kind,
+ region,
+ reserve_location: location,
+ activation_location: None,
+ borrowed_place: borrowed_place.clone(),
+ assigned_place: assigned_place.clone(),
+ };
+ let idx = self.idx_vec.push(borrow);
+ self.location_map.insert(location, idx);
+
+ self.insert_as_pending_if_two_phase(location, &assigned_place, region, kind, idx);
+
+ insert(&mut self.region_map, ®ion, idx);
+ if let Some(local) = borrowed_place.root_local() {
+ insert(&mut self.local_map, &local, idx);
+ }
+ }
+
+ return self.super_assign(block, assigned_place, rvalue, location);
+
+ fn insert<'a, K, V>(map: &'a mut FxHashMap<K, FxHashSet<V>>, k: &K, v: V)
+ where
+ K: Clone + Eq + Hash,
+ V: Eq + Hash,
+ {
+ map.entry(k.clone()).or_insert(FxHashSet()).insert(v);
+ }
+ }
+
+ fn visit_place(
+ &mut self,
+ place: &mir::Place<'tcx>,
+ context: PlaceContext<'tcx>,
+ location: Location,
+ ) {
+ self.super_place(place, context, location);
+
+ // We found a use of some temporary TEMP...
+ if let Place::Local(temp) = place {
+ // ... check whether we (earlier) saw a 2-phase borrow like
+ //
+ // TMP = &mut place
+ match self.pending_activations.get(temp) {
+ Some(&borrow_index) => {
+ let borrow_data = &mut self.idx_vec[borrow_index];
+
+ // Watch out: the use of TMP in the borrow
+ // itself doesn't count as an
+ // activation. =)
+ if borrow_data.reserve_location == location && context == PlaceContext::Store {
+ return;
+ }
+
+ if let Some(other_activation) = borrow_data.activation_location {
+ span_bug!(
+ self.mir.source_info(location).span,
+ "found two activations for 2-phase borrow temporary {:?}: \
+ {:?} and {:?}",
+ temp,
+ location,
+ other_activation,
+ );
+ }
+
+ // Otherwise, this is the unique later use
+ // that we expect.
+ borrow_data.activation_location = Some(location);
+ self.activation_map
+ .entry(location)
+ .or_insert(Vec::new())
+ .push(borrow_index);
+ }
+
+ None => {}
+ }
+ }
+ }
+
+ fn visit_rvalue(&mut self, rvalue: &mir::Rvalue<'tcx>, location: mir::Location) {
+ if let mir::Rvalue::Ref(region, kind, ref place) = *rvalue {
+ // double-check that we already registered a BorrowData for this
+
+ let borrow_index = self.location_map[&location];
+ let borrow_data = &self.idx_vec[borrow_index];
+ assert_eq!(borrow_data.reserve_location, location);
+ assert_eq!(borrow_data.kind, kind);
+ assert_eq!(borrow_data.region, region);
+ assert_eq!(borrow_data.borrowed_place, *place);
+ }
+
+ return self.super_rvalue(rvalue, location);
+ }
+
+ fn visit_statement(
+ &mut self,
+ block: mir::BasicBlock,
+ statement: &mir::Statement<'tcx>,
+ location: Location,
+ ) {
+ return self.super_statement(block, statement, location);
+ }
+}
+
+impl<'a, 'gcx, 'tcx> GatherBorrows<'a, 'gcx, 'tcx> {
+ /// Returns true if the borrow represented by `kind` is
+ /// allowed to be split into separate Reservation and
+ /// Activation phases.
+ fn allow_two_phase_borrow(&self, kind: mir::BorrowKind) -> bool {
+ self.tcx.two_phase_borrows()
+ && (kind.allows_two_phase_borrow()
+ || self.tcx.sess.opts.debugging_opts.two_phase_beyond_autoref)
+ }
+
+ /// If this is a two-phase borrow, then we will record it
+ /// as "pending" until we find the activating use.
+ fn insert_as_pending_if_two_phase(
+ &mut self,
+ start_location: Location,
+ assigned_place: &mir::Place<'tcx>,
+ region: Region<'tcx>,
+ kind: mir::BorrowKind,
+ borrow_index: BorrowIndex,
+ ) {
+ debug!(
+ "Borrows::insert_as_pending_if_two_phase({:?}, {:?}, {:?}, {:?})",
+ start_location, assigned_place, region, borrow_index,
+ );
+
+ if !self.allow_two_phase_borrow(kind) {
+ debug!(" -> {:?}", start_location);
+ return;
+ }
+
+ // When we encounter a 2-phase borrow statement, it will always
+ // be assigning into a temporary TEMP:
+ //
+ // TEMP = &foo
+ //
+ // so extract `temp`.
+ let temp = if let &mir::Place::Local(temp) = assigned_place {
+ temp
+ } else {
+ span_bug!(
+ self.mir.source_info(start_location).span,
+ "expected 2-phase borrow to assign to a local, not `{:?}`",
+ assigned_place,
+ );
+ };
+
+ // Insert `temp` into the list of pending activations. From
+ // now on, we'll be on the lookout for a use of it. Note that
+ // we are guaranteed that this use will come after the
+ // assignment.
+ let old_value = self.pending_activations.insert(temp, borrow_index);
+ assert!(old_value.is_none());
+ }
+}
use super::{Context, MirBorrowckCtxt};
use super::{InitializationRequiringAction, PrefixSet};
-use dataflow::{Borrows, BorrowData, FlowAtLocation, MovingOutStatements};
+use super::borrow_set::BorrowData;
+
+use dataflow::{FlowAtLocation, MovingOutStatements};
use dataflow::move_paths::MovePathIndex;
use util::borrowck_errors::{BorrowckErrors, Origin};
(place, span): (&Place<'tcx>, Span),
gen_borrow_kind: BorrowKind,
issued_borrow: &BorrowData<'tcx>,
- end_issued_loan_span: Option<Span>,
) {
let issued_span = self.retrieve_borrow_span(issued_borrow);
"it",
rgt,
"",
- end_issued_loan_span,
+ None,
Origin::Mir,
)
}
"",
issued_span,
"",
- end_issued_loan_span,
+ None,
Origin::Mir,
)
}
span,
&desc_place,
issued_span,
- end_issued_loan_span,
+ None,
Origin::Mir,
)
}
issued_span,
"it",
"",
- end_issued_loan_span,
+ None,
Origin::Mir,
),
lft,
issued_span,
"",
- end_issued_loan_span,
+ None,
Origin::Mir,
)
}
lft,
issued_span,
"",
- end_issued_loan_span,
+ None,
Origin::Mir,
)
}
context: Context,
borrow: &BorrowData<'tcx>,
drop_span: Span,
- borrows: &Borrows<'cx, 'gcx, 'tcx>
) {
- let end_span = borrows.opt_region_end_span(&borrow.region);
- let scope_tree = borrows.scope_tree();
+ let scope_tree = self.tcx.region_scope_tree(self.mir_def_id);
let root_place = self.prefixes(&borrow.borrowed_place, PrefixSet::All)
.last()
.unwrap();
drop_span,
borrow_span,
proper_span,
- end_span,
);
}
(RegionKind::ReScope(_), None) => {
drop_span,
borrow_span,
proper_span,
- end_span,
);
}
(RegionKind::ReEarlyBound(_), Some(name))
drop_span,
borrow_span,
proper_span,
- end_span,
);
}
(RegionKind::ReEarlyBound(_), None)
drop_span,
borrow_span,
proper_span,
- end_span,
);
}
(RegionKind::ReLateBound(_, _), _)
drop_span: Span,
borrow_span: Span,
_proper_span: Span,
- end_span: Option<Span>,
) {
let tcx = self.tcx;
let mut err =
drop_span,
format!("`{}` dropped here while still borrowed", name),
);
- if let Some(end) = end_span {
- err.span_label(end, "borrowed value needs to live until here");
- }
self.explain_why_borrow_contains_point(context, borrow, &mut err);
err.emit();
}
drop_span: Span,
_borrow_span: Span,
proper_span: Span,
- end_span: Option<Span>,
) {
let tcx = self.tcx;
let mut err =
"temporary value dropped here while still borrowed",
);
err.note("consider using a `let` binding to increase its lifetime");
- if let Some(end) = end_span {
- err.span_label(end, "temporary value needs to live until here");
- }
self.explain_why_borrow_contains_point(context, borrow, &mut err);
err.emit();
}
drop_span: Span,
borrow_span: Span,
_proper_span: Span,
- _end_span: Option<Span>,
) {
debug!(
"report_unscoped_local_value_does_not_live_long_enough(\
err.span_label(borrow_span, "borrowed value does not live long enough");
err.span_label(drop_span, "borrowed value only lives until here");
- if !tcx.nll() {
- tcx.note_and_explain_region(
- scope_tree,
- &mut err,
- "borrowed value must be valid for ",
- borrow.region,
- "...",
- );
- }
-
self.explain_why_borrow_contains_point(context, borrow, &mut err);
err.emit();
}
drop_span: Span,
_borrow_span: Span,
proper_span: Span,
- _end_span: Option<Span>,
) {
debug!(
"report_unscoped_temporary_value_does_not_live_long_enough(\
err.span_label(proper_span, "temporary value does not live long enough");
err.span_label(drop_span, "temporary value only lives until here");
- if !tcx.nll() {
- tcx.note_and_explain_region(
- scope_tree,
- &mut err,
- "borrowed value must be valid for ",
- borrow.region,
- "...",
- );
- }
-
self.explain_why_borrow_contains_point(context, borrow, &mut err);
err.emit();
}
}
// Retrieve span of given borrow from the current MIR representation
- pub fn retrieve_borrow_span(&self, borrow: &BorrowData) -> Span {
+ crate fn retrieve_borrow_span(&self, borrow: &BorrowData) -> Span {
self.mir.source_info(borrow.reserve_location).span
}
//! but is not as ugly as it is right now.
use rustc::mir::{BasicBlock, Location};
+use rustc_data_structures::indexed_set::Iter;
use dataflow::{MaybeInitializedPlaces, MaybeUninitializedPlaces};
use dataflow::{EverInitializedPlaces, MovingOutStatements};
use dataflow::{Borrows};
use dataflow::{FlowAtLocation, FlowsAtLocation};
use dataflow::move_paths::HasMoveData;
+use dataflow::move_paths::indexes::BorrowIndex;
use std::fmt;
// (forced to be `pub` due to its use as an associated type below.)
-pub(crate) struct Flows<'b, 'gcx: 'tcx, 'tcx: 'b> {
- pub borrows: FlowAtLocation<Borrows<'b, 'gcx, 'tcx>>,
+crate struct Flows<'b, 'gcx: 'tcx, 'tcx: 'b> {
+ borrows: FlowAtLocation<Borrows<'b, 'gcx, 'tcx>>,
pub inits: FlowAtLocation<MaybeInitializedPlaces<'b, 'gcx, 'tcx>>,
pub uninits: FlowAtLocation<MaybeUninitializedPlaces<'b, 'gcx, 'tcx>>,
pub move_outs: FlowAtLocation<MovingOutStatements<'b, 'gcx, 'tcx>>,
}
impl<'b, 'gcx, 'tcx> Flows<'b, 'gcx, 'tcx> {
- pub fn new(
+ crate fn new(
borrows: FlowAtLocation<Borrows<'b, 'gcx, 'tcx>>,
inits: FlowAtLocation<MaybeInitializedPlaces<'b, 'gcx, 'tcx>>,
uninits: FlowAtLocation<MaybeUninitializedPlaces<'b, 'gcx, 'tcx>>,
ever_inits,
}
}
+
+ crate fn borrows_in_scope(&self) -> impl Iterator<Item = BorrowIndex> + '_ {
+ self.borrows.iter_incoming()
+ }
+
+ crate fn with_outgoing_borrows(&self, op: impl FnOnce(Iter<BorrowIndex>)) {
+ self.borrows.with_iter_outgoing(op)
+ }
}
macro_rules! each_flow {
s.push_str(", ");
};
saw_one = true;
- let borrow_data = &self.borrows.operator().borrows()[borrow.borrow_index()];
- s.push_str(&format!("{}{}", borrow_data,
- if borrow.is_activation() { "@active" } else { "" }));
+ let borrow_data = &self.borrows.operator().borrows()[borrow];
+ s.push_str(&format!("{}", borrow_data));
});
s.push_str("] ");
s.push_str(", ");
};
saw_one = true;
- let borrow_data = &self.borrows.operator().borrows()[borrow.borrow_index()];
+ let borrow_data = &self.borrows.operator().borrows()[borrow];
s.push_str(&format!("{}", borrow_data));
});
s.push_str("] ");
use rustc::mir::{Field, Statement, StatementKind, Terminator, TerminatorKind};
use rustc::mir::ClosureRegionRequirements;
+use rustc_data_structures::control_flow_graph::dominators::Dominators;
use rustc_data_structures::fx::FxHashSet;
use rustc_data_structures::indexed_set::IdxSetBuf;
use rustc_data_structures::indexed_vec::Idx;
use std::rc::Rc;
-use syntax::ast;
use syntax_pos::Span;
use dataflow::{do_dataflow, DebugFormatted};
use dataflow::{DataflowResultsConsumer};
use dataflow::{MaybeInitializedPlaces, MaybeUninitializedPlaces};
use dataflow::{EverInitializedPlaces, MovingOutStatements};
-use dataflow::{BorrowData, Borrows, ReserveOrActivateIndex};
+use dataflow::Borrows;
use dataflow::indexes::BorrowIndex;
use dataflow::move_paths::{IllegalMoveOriginKind, MoveError};
use dataflow::move_paths::{HasMoveData, LookupResult, MoveData, MovePathIndex};
use std::iter;
+use self::borrow_set::{BorrowSet, BorrowData};
use self::flows::Flows;
use self::prefixes::PrefixSet;
use self::MutateMode::{JustWrite, WriteAndRead};
+crate mod borrow_set;
mod error_reporting;
mod flows;
+crate mod place_ext;
mod prefixes;
pub(crate) mod nll;
let input_mir = tcx.mir_validated(def_id);
debug!("run query mir_borrowck: {}", tcx.item_path_str(def_id));
- if !tcx.has_attr(def_id, "rustc_mir_borrowck") && !tcx.use_mir() {
+ if !tcx.has_attr(def_id, "rustc_mir_borrowck") && !tcx.use_mir_borrowck() {
return None;
}
.as_local_node_id(def_id)
.expect("do_mir_borrowck: non-local DefId");
- // Make our own copy of the MIR. This copy will be modified (in place) to
- // contain non-lexical lifetimes. It will have a lifetime tied
- // to the inference context.
+ // Replace all regions with fresh inference variables. This
+ // requires first making our own copy of the MIR. This copy will
+ // be modified (in place) to contain non-lexical lifetimes. It
+ // will have a lifetime tied to the inference context.
let mut mir: Mir<'tcx> = input_mir.clone();
- let free_regions = if !tcx.nll() {
- None
- } else {
- let mir = &mut mir;
-
- // Replace all regions with fresh inference variables.
- Some(nll::replace_regions_in_mir(infcx, def_id, param_env, mir))
- };
- let mir = &mir;
+ let free_regions = nll::replace_regions_in_mir(infcx, def_id, param_env, &mut mir);
+ let mir = &mir; // no further changes
let move_data: MoveData<'tcx> = match MoveData::gather_moves(mir, tcx) {
Ok(move_data) => move_data,
|bd, i| DebugFormatted::new(&bd.move_data().inits[i]),
));
+ let borrow_set = Rc::new(BorrowSet::build(tcx, mir));
+
// If we are in non-lexical mode, compute the non-lexical lifetimes.
- let (opt_regioncx, opt_closure_req) = if let Some(free_regions) = free_regions {
- let (regioncx, opt_closure_req) = nll::compute_regions(
- infcx,
- def_id,
- free_regions,
- mir,
- param_env,
- &mut flow_inits,
- &mdpe.move_data,
- );
- (Some(Rc::new(regioncx)), opt_closure_req)
- } else {
- assert!(!tcx.nll());
- (None, None)
- };
+ let (regioncx, opt_closure_req) = nll::compute_regions(
+ infcx,
+ def_id,
+ free_regions,
+ mir,
+ param_env,
+ &mut flow_inits,
+ &mdpe.move_data,
+ &borrow_set,
+ );
+ let regioncx = Rc::new(regioncx);
let flow_inits = flow_inits; // remove mut
let flow_borrows = FlowAtLocation::new(do_dataflow(
id,
&attributes,
&dead_unwinds,
- Borrows::new(tcx, mir, opt_regioncx.clone(), def_id, body_id),
- |rs, i| {
- DebugFormatted::new(&(i.kind(), rs.location(i.borrow_index())))
- }
+ Borrows::new(tcx, mir, regioncx.clone(), def_id, body_id, &borrow_set),
+ |rs, i| DebugFormatted::new(&rs.location(i)),
));
- let movable_generator = !match tcx.hir.get(id) {
+ let movable_generator = match tcx.hir.get(id) {
hir::map::Node::NodeExpr(&hir::Expr {
node: hir::ExprClosure(.., Some(hir::GeneratorMovability::Static)),
..
- }) => true,
- _ => false,
+ }) => false,
+ _ => true,
};
+ let dominators = mir.dominators();
+
let mut mbcx = MirBorrowckCtxt {
tcx: tcx,
mir: mir,
- node_id: id,
+ mir_def_id: def_id,
move_data: &mdpe.move_data,
param_env: param_env,
movable_generator,
access_place_error_reported: FxHashSet(),
reservation_error_reported: FxHashSet(),
moved_error_reported: FxHashSet(),
- nonlexical_regioncx: opt_regioncx,
+ nonlexical_regioncx: regioncx,
nonlexical_cause_info: None,
+ borrow_set,
+ dominators,
};
let mut state = Flows::new(
pub struct MirBorrowckCtxt<'cx, 'gcx: 'tcx, 'tcx: 'cx> {
tcx: TyCtxt<'cx, 'gcx, 'tcx>,
mir: &'cx Mir<'tcx>,
- node_id: ast::NodeId,
+ mir_def_id: DefId,
move_data: &'cx MoveData<'tcx>,
param_env: ParamEnv<'gcx>,
movable_generator: bool,
/// Non-lexical region inference context, if NLL is enabled. This
/// contains the results from region inference and lets us e.g.
/// find out which CFG points are contained in each borrow region.
- nonlexical_regioncx: Option<Rc<RegionInferenceContext<'tcx>>>,
+ nonlexical_regioncx: Rc<RegionInferenceContext<'tcx>>,
nonlexical_cause_info: Option<RegionCausalInfo>,
+
+ /// The set of borrows extracted from the MIR
+ borrow_set: Rc<BorrowSet<'tcx>>,
+
+ /// Dominators for MIR
+ dominators: Dominators<BasicBlock>,
}
// Check that:
if self.movable_generator {
// Look for any active borrows to locals
- let domain = flow_state.borrows.operator();
- let data = domain.borrows();
- flow_state.borrows.with_iter_outgoing(|borrows| {
+ let borrow_set = self.borrow_set.clone();
+ flow_state.with_outgoing_borrows(|borrows| {
for i in borrows {
- let borrow = &data[i.borrow_index()];
+ let borrow = &borrow_set[i];
self.check_for_local_borrow(borrow, span);
}
});
// Often, the storage will already have been killed by an explicit
// StorageDead, but we don't always emit those (notably on unwind paths),
// so this "extra check" serves as a kind of backup.
- let domain = flow_state.borrows.operator();
- let data = domain.borrows();
- flow_state.borrows.with_iter_outgoing(|borrows| {
+ let borrow_set = self.borrow_set.clone();
+ flow_state.with_outgoing_borrows(|borrows| {
for i in borrows {
- let borrow = &data[i.borrow_index()];
+ let borrow = &borrow_set[i];
let context = ContextKind::StorageDead.new(loc);
- self.check_for_invalidation_at_exit(context, borrow, span, flow_state);
+ self.check_for_invalidation_at_exit(context, borrow, span);
}
});
}
rw: ReadOrWrite,
flow_state: &Flows<'cx, 'gcx, 'tcx>,
) -> bool {
+ debug!(
+ "check_access_for_conflict(context={:?}, place_span={:?}, sd={:?}, rw={:?})",
+ context,
+ place_span,
+ sd,
+ rw,
+ );
+
let mut error_reported = false;
self.each_borrow_involving_path(
context,
(sd, place_span.0),
flow_state,
- |this, index, borrow| match (rw, borrow.kind) {
+ |this, borrow_index, borrow| match (rw, borrow.kind) {
// Obviously an activation is compatible with its own
// reservation (or even prior activating uses of same
// borrow); so don't check if they interfere.
//
// NOTE: *reservations* do conflict with themselves;
// thus aren't injecting unsoundenss w/ this check.)
- (Activation(_, activating), _) if activating == index.borrow_index() => {
+ (Activation(_, activating), _) if activating == borrow_index => {
debug!(
"check_access_for_conflict place_span: {:?} sd: {:?} rw: {:?} \
- skipping {:?} b/c activation of same borrow_index: {:?}",
+ skipping {:?} b/c activation of same borrow_index",
place_span,
sd,
rw,
- (index, borrow),
- index.borrow_index()
+ (borrow_index, borrow),
);
Control::Continue
}
(Read(kind), BorrowKind::Unique) | (Read(kind), BorrowKind::Mut { .. }) => {
// Reading from mere reservations of mutable-borrows is OK.
- if this.allow_two_phase_borrow(borrow.kind) && index.is_reservation() {
+ if !this.is_active(borrow, context.loc) {
+ assert!(this.allow_two_phase_borrow(borrow.kind));
return Control::Continue;
}
this.report_use_while_mutably_borrowed(context, place_span, borrow)
}
ReadKind::Borrow(bk) => {
- let end_issued_loan_span = flow_state
- .borrows
- .operator()
- .opt_region_end_span(&borrow.region);
error_reported = true;
this.report_conflicting_borrow(
context,
place_span,
bk,
&borrow,
- end_issued_loan_span,
)
}
}
match kind {
WriteKind::MutableBorrow(bk) => {
- let end_issued_loan_span = flow_state
- .borrows
- .operator()
- .opt_region_end_span(&borrow.region);
-
error_reported = true;
this.report_conflicting_borrow(
context,
place_span,
bk,
&borrow,
- end_issued_loan_span,
)
}
WriteKind::StorageDeadOrDrop => {
context,
borrow,
place_span.1,
- flow_state.borrows.operator(),
);
}
WriteKind::Mutate => {
context: Context,
borrow: &BorrowData<'tcx>,
span: Span,
- flow_state: &Flows<'cx, 'gcx, 'tcx>,
) {
debug!("check_for_invalidation_at_exit({:?})", borrow);
let place = &borrow.borrowed_place;
context,
borrow,
span,
- flow_state.borrows.operator(),
)
}
}
// Two-phase borrow support: For each activation that is newly
// generated at this statement, check if it interferes with
// another borrow.
- let domain = flow_state.borrows.operator();
- let data = domain.borrows();
- flow_state.borrows.each_gen_bit(|gen| {
- if gen.is_activation() {
- let borrow_index = gen.borrow_index();
- let borrow = &data[borrow_index];
- // currently the flow analysis registers
- // activations for both mutable and immutable
- // borrows. So make sure we are talking about a
- // mutable borrow before we check it.
- match borrow.kind {
- BorrowKind::Shared => return,
- BorrowKind::Unique | BorrowKind::Mut { .. } => {}
- }
-
- self.access_place(
- ContextKind::Activation.new(location),
- (&borrow.borrowed_place, span),
- (
- Deep,
- Activation(WriteKind::MutableBorrow(borrow.kind), borrow_index),
- ),
- LocalMutationIsAllowed::No,
- flow_state,
- );
- // We do not need to call `check_if_path_or_subpath_is_moved`
- // again, as we already called it when we made the
- // initial reservation.
- }
- });
+ let borrow_set = self.borrow_set.clone();
+ for &borrow_index in borrow_set.activations_at_location(location) {
+ let borrow = &borrow_set[borrow_index];
+
+ // only mutable borrows should be 2-phase
+ assert!(match borrow.kind {
+ BorrowKind::Shared => false,
+ BorrowKind::Unique | BorrowKind::Mut { .. } => true,
+ });
+
+ self.access_place(
+ ContextKind::Activation.new(location),
+ (&borrow.borrowed_place, span),
+ (
+ Deep,
+ Activation(WriteKind::MutableBorrow(borrow.kind), borrow_index),
+ ),
+ LocalMutationIsAllowed::No,
+ flow_state,
+ );
+ // We do not need to call `check_if_path_or_subpath_is_moved`
+ // again, as we already called it when we made the
+ // initial reservation.
+ }
}
}
} else {
self.get_default_err_msg(place)
};
+ let sp = self.mir.source_info(locations[0]).span;
+ let mut to_suggest_span = String::new();
+ if let Ok(src) =
+ self.tcx.sess.codemap().span_to_snippet(sp) {
+ to_suggest_span = src[1..].to_string();
+ };
err_info = Some((
- self.mir.source_info(locations[0]).span,
+ sp,
"consider changing this to be a \
- mutable reference: `&mut`", item_msg,
+ mutable reference",
+ to_suggest_span,
+ item_msg,
self.get_primary_err_msg(base)));
}
},
_ => {},
}
- if let Some((err_help_span, err_help_stmt, item_msg, sec_span)) = err_info {
+ if let Some((err_help_span,
+ err_help_stmt,
+ to_suggest_span,
+ item_msg,
+ sec_span)) = err_info {
let mut err = self.tcx.cannot_assign(span, &item_msg, Origin::Mir);
- err.span_suggestion(err_help_span, err_help_stmt, format!(""));
+ err.span_suggestion(err_help_span,
+ err_help_stmt,
+ format!("&mut {}", to_suggest_span));
if place != place_err {
err.span_label(span, sec_span);
}
unreachable!("iter::repeat returned None")
}
- /// This function iterates over all of the current borrows
- /// (represented by 1-bits in `flow_state.borrows`) that conflict
- /// with an access to a place, invoking the `op` callback for each
- /// one.
+ /// This function iterates over all of the in-scope borrows that
+ /// conflict with an access to a place, invoking the `op` callback
+ /// for each one.
///
/// "Current borrow" here means a borrow that reaches the point in
/// the control-flow where the access occurs.
///
- /// The borrow's phase is represented by the ReserveOrActivateIndex
- /// passed to the callback: one can call `is_reservation()` and
- /// `is_activation()` to determine what phase the borrow is
- /// currently in, when such distinction matters.
+ /// The borrow's phase is represented by the IsActive parameter
+ /// passed to the callback.
fn each_borrow_involving_path<F>(
&mut self,
_context: Context,
flow_state: &Flows<'cx, 'gcx, 'tcx>,
mut op: F,
) where
- F: FnMut(&mut Self, ReserveOrActivateIndex, &BorrowData<'tcx>) -> Control,
+ F: FnMut(&mut Self, BorrowIndex, &BorrowData<'tcx>) -> Control,
{
let (access, place) = access_place;
// FIXME: analogous code in check_loans first maps `place` to
// its base_path.
- let data = flow_state.borrows.operator().borrows();
-
// check for loan restricting path P being used. Accounts for
// borrows of P, P.a.b, etc.
- let mut iter_incoming = flow_state.borrows.iter_incoming();
- while let Some(i) = iter_incoming.next() {
- let borrowed = &data[i.borrow_index()];
+ let borrow_set = self.borrow_set.clone();
+ for i in flow_state.borrows_in_scope() {
+ let borrowed = &borrow_set[i];
if self.places_conflict(&borrowed.borrowed_place, place, access) {
debug!(
}
}
}
+
+ fn is_active(
+ &self,
+ borrow_data: &BorrowData<'tcx>,
+ location: Location
+ ) -> bool {
+ debug!("is_active(borrow_data={:?}, location={:?})", borrow_data, location);
+
+ // If this is not a 2-phase borrow, it is always active.
+ let activation_location = match borrow_data.activation_location {
+ Some(v) => v,
+ None => return true,
+ };
+
+ // Otherwise, it is active for every location *except* in between
+ // the reservation and the activation:
+ //
+ // X
+ // /
+ // R <--+ Except for this
+ // / \ | diamond
+ // \ / |
+ // A <------+
+ // |
+ // Z
+ //
+ // Note that we assume that:
+ // - the reservation R dominates the activation A
+ // - the activation A post-dominates the reservation R (ignoring unwinding edges).
+ //
+ // This means that there can't be an edge that leaves A and
+ // comes back into that diamond unless it passes through R.
+ //
+ // Suboptimal: In some cases, this code walks the dominator
+ // tree twice when it only has to be walked once. I am
+ // lazy. -nmatsakis
+
+ // If dominated by the activation A, then it is active. The
+ // activation occurs upon entering the point A, so this is
+ // also true if location == activation_location.
+ if activation_location.dominates(location, &self.dominators) {
+ return true;
+ }
+
+ // The reservation starts *on exiting* the reservation block,
+ // so check if the location is dominated by R.successor. If so,
+ // this point falls in between the reservation and location.
+ let reserve_location = borrow_data.reserve_location.successor_within_block();
+ if reserve_location.dominates(location, &self.dominators) {
+ false
+ } else {
+ // Otherwise, this point is outside the diamond, so
+ // consider the borrow active. This could happen for
+ // example if the borrow remains active around a loop (in
+ // which case it would be active also for the point R,
+ // which would generate an error).
+ true
+ }
+ }
}
impl<'cx, 'gcx, 'tcx> MirBorrowckCtxt<'cx, 'gcx, 'tcx> {
use borrow_check::nll::region_infer::{Cause, RegionInferenceContext};
use borrow_check::{Context, MirBorrowckCtxt};
-use dataflow::BorrowData;
+use borrow_check::borrow_set::BorrowData;
use rustc::mir::visit::{MirVisitable, PlaceContext, Visitor};
use rustc::mir::{Local, Location, Mir};
use rustc_data_structures::fx::FxHashSet;
borrow: &BorrowData<'tcx>,
err: &mut DiagnosticBuilder<'_>,
) {
- if let Some(regioncx) = &self.nonlexical_regioncx {
- let mir = self.mir;
+ let regioncx = &&self.nonlexical_regioncx;
+ let mir = self.mir;
- if self.nonlexical_cause_info.is_none() {
- self.nonlexical_cause_info = Some(regioncx.compute_causal_info(mir));
- }
+ if self.nonlexical_cause_info.is_none() {
+ self.nonlexical_cause_info = Some(regioncx.compute_causal_info(mir));
+ }
+
+ let cause_info = self.nonlexical_cause_info.as_ref().unwrap();
+ if let Some(cause) = cause_info.why_region_contains_point(borrow.region, context.loc) {
+ match *cause.root_cause() {
+ Cause::LiveVar(local, location) => {
+ match find_regular_use(mir, regioncx, borrow, location, local) {
+ Some(p) => {
+ err.span_label(
+ mir.source_info(p).span,
+ format!("borrow later used here"),
+ );
+ }
+
+ None => {
+ span_bug!(
+ mir.source_info(context.loc).span,
+ "Cause should end in a LiveVar"
+ );
+ }
+ }
+ }
- let cause_info = self.nonlexical_cause_info.as_ref().unwrap();
- if let Some(cause) = cause_info.why_region_contains_point(borrow.region, context.loc) {
- match *cause.root_cause() {
- Cause::LiveVar(local, location) => {
- match find_regular_use(mir, regioncx, borrow, location, local) {
- Some(p) => {
+ Cause::DropVar(local, location) => {
+ match find_drop_use(mir, regioncx, borrow, location, local) {
+ Some(p) => match &mir.local_decls[local].name {
+ Some(local_name) => {
err.span_label(
mir.source_info(p).span,
- format!("borrow later used here"),
+ format!(
+ "borrow later used here, when `{}` is dropped",
+ local_name
+ ),
);
}
-
None => {
- span_bug!(
- mir.source_info(context.loc).span,
- "Cause should end in a LiveVar"
+ err.span_label(
+ mir.local_decls[local].source_info.span,
+ "borrow may end up in a temporary, created here",
);
- }
- }
- }
- Cause::DropVar(local, location) => {
- match find_drop_use(mir, regioncx, borrow, location, local) {
- Some(p) => match &mir.local_decls[local].name {
- Some(local_name) => {
- err.span_label(
- mir.source_info(p).span,
- format!(
- "borrow later used here, when `{}` is dropped",
- local_name
- ),
- );
- }
- None => {
- err.span_label(
- mir.local_decls[local].source_info.span,
- "borrow may end up in a temporary, created here",
- );
-
- err.span_label(
- mir.source_info(p).span,
- "temporary later dropped here, \
- potentially using the reference",
- );
- }
- },
-
- None => {
- span_bug!(
- mir.source_info(context.loc).span,
- "Cause should end in a DropVar"
+ err.span_label(
+ mir.source_info(p).span,
+ "temporary later dropped here, \
+ potentially using the reference",
);
}
- }
- }
+ },
- Cause::UniversalRegion(region_vid) => {
- if let Some(region) = regioncx.to_error_region(region_vid) {
- self.tcx.note_and_explain_free_region(
- err,
- "borrowed value must be valid for ",
- region,
- "...",
+ None => {
+ span_bug!(
+ mir.source_info(context.loc).span,
+ "Cause should end in a DropVar"
);
}
}
+ }
- _ => {}
+ Cause::UniversalRegion(region_vid) => {
+ if let Some(region) = regioncx.to_error_region(region_vid) {
+ self.tcx.note_and_explain_free_region(
+ err,
+ "borrowed value must be valid for ",
+ region,
+ "...",
+ );
+ }
}
+
+ _ => {}
}
}
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+use borrow_check::borrow_set::BorrowSet;
use rustc::hir::def_id::DefId;
use rustc::mir::{ClosureRegionRequirements, ClosureOutlivesSubject, Mir};
use rustc::infer::InferCtxt;
param_env: ty::ParamEnv<'gcx>,
flow_inits: &mut FlowAtLocation<MaybeInitializedPlaces<'cx, 'gcx, 'tcx>>,
move_data: &MoveData<'tcx>,
+ _borrow_set: &BorrowSet<'tcx>,
) -> (
RegionInferenceContext<'tcx>,
Option<ClosureRegionRequirements<'gcx>>,
use borrow_check::nll::region_infer::Cause;
use borrow_check::nll::region_infer::ClosureRegionRequirementsExt;
use borrow_check::nll::universal_regions::UniversalRegions;
+use dataflow::move_paths::MoveData;
use dataflow::FlowAtLocation;
use dataflow::MaybeInitializedPlaces;
-use dataflow::move_paths::MoveData;
use rustc::hir::def_id::DefId;
-use rustc::infer::{InferCtxt, InferOk, InferResult, LateBoundRegionConversionTime, UnitResult};
use rustc::infer::region_constraints::{GenericKind, RegionConstraintData};
-use rustc::traits::{self, Normalized, TraitEngine};
+use rustc::infer::{InferCtxt, InferOk, InferResult, LateBoundRegionConversionTime, UnitResult};
+use rustc::mir::tcx::PlaceTy;
+use rustc::mir::visit::{PlaceContext, Visitor};
+use rustc::mir::*;
use rustc::traits::query::NoSolution;
+use rustc::traits::{self, Normalized, TraitEngine};
use rustc::ty::error::TypeError;
use rustc::ty::fold::TypeFoldable;
use rustc::ty::{self, ToPolyTraitRef, Ty, TyCtxt, TypeVariants};
-use rustc::mir::*;
-use rustc::mir::tcx::PlaceTy;
-use rustc::mir::visit::{PlaceContext, Visitor};
use std::fmt;
use syntax::ast;
use syntax_pos::{Span, DUMMY_SP};
})
}
-mod liveness;
mod input_output;
+mod liveness;
/// Type checks the given `mir` in the context of the inference
/// context `infcx`. Returns any region constraints that have yet to
tcx.predicates_of(def_id).instantiate(tcx, substs);
let predicates =
type_checker.normalize(&instantiated_predicates.predicates, location);
- type_checker.prove_predicates(&predicates, location);
+ type_checker.prove_predicates(predicates.iter().cloned(), location);
}
value.ty
}
StatementKind::UserAssertTy(ref c_ty, ref local) => {
let local_ty = mir.local_decls()[*local].ty;
- let (ty, _) = self.infcx.instantiate_canonical_with_fresh_inference_vars(
- stmt.source_info.span, c_ty);
- debug!("check_stmt: user_assert_ty ty={:?} local_ty={:?}", ty, local_ty);
+ let (ty, _) = self.infcx
+ .instantiate_canonical_with_fresh_inference_vars(stmt.source_info.span, c_ty);
+ debug!(
+ "check_stmt: user_assert_ty ty={:?} local_ty={:?}",
+ ty, local_ty
+ );
if let Err(terr) = self.eq_types(ty, local_ty, location.at_self()) {
span_mirbug!(
self,
let sig = self.normalize(&sig, term_location);
self.check_call_dest(mir, term, &sig, destination, term_location);
+ self.prove_predicates(
+ sig.inputs().iter().map(|ty| ty::Predicate::WellFormed(ty)),
+ term_location,
+ );
+
// The ordinary liveness rules will ensure that all
// regions in the type of the callee are live here. We
// then further constrain the late-bound regions that
let predicates = self.normalize(&instantiated_predicates.predicates, location);
debug!("prove_aggregate_predicates: predicates={:?}", predicates);
- self.prove_predicates(&predicates, location);
+ self.prove_predicates(predicates.iter().cloned(), location);
}
fn prove_trait_ref(&mut self, trait_ref: ty::TraitRef<'tcx>, location: Location) {
self.prove_predicates(
- &[
- ty::Predicate::Trait(trait_ref.to_poly_trait_ref().to_poly_trait_predicate()),
- ],
+ [ty::Predicate::Trait(
+ trait_ref.to_poly_trait_ref().to_poly_trait_predicate(),
+ )].iter()
+ .cloned(),
location,
);
}
- fn prove_predicates(&mut self, predicates: &[ty::Predicate<'tcx>], location: Location) {
+ fn prove_predicates(
+ &mut self,
+ predicates: impl IntoIterator<Item = ty::Predicate<'tcx>>,
+ location: Location,
+ ) {
+ let mut predicates_iter = predicates.into_iter();
+
debug!(
"prove_predicates(predicates={:?}, location={:?})",
- predicates, location
+ predicates_iter.by_ref().collect::<Vec<_>>(),
+ location
);
self.fully_perform_op(location.at_self(), |this| {
let cause = this.misc(this.last_span);
- let obligations = predicates
- .iter()
- .map(|&p| traits::Obligation::new(cause.clone(), this.param_env, p))
+ let obligations = predicates_iter
+ .map(|p| traits::Obligation::new(cause.clone(), this.param_env, p))
.collect();
Ok(InferOk {
value: (),
// When NLL is enabled, the borrow checker runs the typeck
// itself, so we don't need this MIR pass anymore.
- if tcx.nll() {
+ if tcx.use_mir_borrowck() {
return;
}
/// True if `r` is a member of this set of universal regions.
pub fn is_universal_region(&self, r: RegionVid) -> bool {
- (FIRST_GLOBAL_INDEX..self.num_universals).contains(r.index())
+ (FIRST_GLOBAL_INDEX..self.num_universals).contains(&r.index())
}
/// Classifies `r` as a universal region, returning `None` if this
/// is not a member of this set of universal regions.
pub fn region_classification(&self, r: RegionVid) -> Option<RegionClassification> {
let index = r.index();
- if (FIRST_GLOBAL_INDEX..self.first_extern_index).contains(index) {
+ if (FIRST_GLOBAL_INDEX..self.first_extern_index).contains(&index) {
Some(RegionClassification::Global)
- } else if (self.first_extern_index..self.first_local_index).contains(index) {
+ } else if (self.first_extern_index..self.first_local_index).contains(&index) {
Some(RegionClassification::External)
- } else if (self.first_local_index..self.num_universals).contains(index) {
+ } else if (self.first_local_index..self.num_universals).contains(&index) {
Some(RegionClassification::Local)
} else {
None
--- /dev/null
+// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use rustc::hir;
+use rustc::mir::ProjectionElem;
+use rustc::mir::{Local, Mir, Place};
+use rustc::ty::{self, TyCtxt};
+
+/// Extension methods for the `Place` type.
+crate trait PlaceExt<'tcx> {
+ /// True if this is a deref of a raw pointer.
+ fn is_unsafe_place(&self, tcx: TyCtxt<'_, '_, 'tcx>, mir: &Mir<'tcx>) -> bool;
+
+ /// If this is a place like `x.f.g`, returns the local
+ /// `x`. Returns `None` if this is based in a static.
+ fn root_local(&self) -> Option<Local>;
+}
+
+impl<'tcx> PlaceExt<'tcx> for Place<'tcx> {
+ fn is_unsafe_place(&self, tcx: TyCtxt<'_, '_, 'tcx>, mir: &Mir<'tcx>) -> bool {
+ match self {
+ Place::Local(_) => false,
+ Place::Static(static_) => {
+ tcx.is_static(static_.def_id) == Some(hir::Mutability::MutMutable)
+ }
+ Place::Projection(proj) => match proj.elem {
+ ProjectionElem::Field(..)
+ | ProjectionElem::Downcast(..)
+ | ProjectionElem::Subslice { .. }
+ | ProjectionElem::ConstantIndex { .. }
+ | ProjectionElem::Index(_) => proj.base.is_unsafe_place(tcx, mir),
+ ProjectionElem::Deref => {
+ let ty = proj.base.ty(mir, tcx).to_ty(tcx);
+ match ty.sty {
+ ty::TyRawPtr(..) => true,
+ _ => proj.base.is_unsafe_place(tcx, mir),
+ }
+ }
+ },
+ }
+ }
+
+ fn root_local(&self) -> Option<Local> {
+ let mut p = self;
+ loop {
+ match p {
+ Place::Projection(pi) => p = &pi.base,
+ Place::Static(_) => return None,
+ Place::Local(l) => return Some(*l),
+ }
+ }
+ }
+}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+use borrow_check::borrow_set::{BorrowSet, BorrowData};
+use borrow_check::place_ext::PlaceExt;
+
use rustc;
use rustc::hir;
use rustc::hir::def_id::DefId;
use rustc::middle::region;
use rustc::mir::{self, Location, Place, Mir};
-use rustc::mir::visit::{PlaceContext, Visitor};
-use rustc::ty::{self, Region, TyCtxt};
+use rustc::ty::TyCtxt;
use rustc::ty::RegionKind;
use rustc::ty::RegionKind::ReScope;
-use rustc::util::nodemap::{FxHashMap, FxHashSet};
-use rustc_data_structures::bitslice::{BitwiseOperator};
-use rustc_data_structures::indexed_set::{IdxSet};
-use rustc_data_structures::indexed_vec::{Idx, IndexVec};
+use rustc_data_structures::bitslice::BitwiseOperator;
+use rustc_data_structures::indexed_set::IdxSet;
+use rustc_data_structures::indexed_vec::IndexVec;
use rustc_data_structures::sync::Lrc;
use dataflow::{BitDenotation, BlockSets, InitialFlow};
-pub use dataflow::indexes::{BorrowIndex, ReserveOrActivateIndex};
+pub use dataflow::indexes::BorrowIndex;
use borrow_check::nll::region_infer::RegionInferenceContext;
use borrow_check::nll::ToRegionVid;
-use syntax_pos::Span;
-
-use std::fmt;
-use std::hash::Hash;
use std::rc::Rc;
/// `Borrows` stores the data used in the analyses that track the flow
scope_tree: Lrc<region::ScopeTree>,
root_scope: Option<region::Scope>,
- /// The fundamental map relating bitvector indexes to the borrows
- /// in the MIR.
- borrows: IndexVec<BorrowIndex, BorrowData<'tcx>>,
-
- /// Each borrow is also uniquely identified in the MIR by the
- /// `Location` of the assignment statement in which it appears on
- /// the right hand side; we map each such location to the
- /// corresponding `BorrowIndex`.
- location_map: FxHashMap<Location, BorrowIndex>,
-
- /// Every borrow in MIR is immediately stored into a place via an
- /// assignment statement. This maps each such assigned place back
- /// to its borrow-indexes.
- assigned_map: FxHashMap<Place<'tcx>, FxHashSet<BorrowIndex>>,
-
- /// Locations which activate borrows.
- activation_map: FxHashMap<Location, FxHashSet<BorrowIndex>>,
-
- /// Every borrow has a region; this maps each such regions back to
- /// its borrow-indexes.
- region_map: FxHashMap<Region<'tcx>, FxHashSet<BorrowIndex>>,
-
- /// Map from local to all the borrows on that local
- local_map: FxHashMap<mir::Local, FxHashSet<BorrowIndex>>,
-
- /// Maps regions to their corresponding source spans
- /// Only contains ReScope()s as keys
- region_span_map: FxHashMap<RegionKind, Span>,
+ borrow_set: Rc<BorrowSet<'tcx>>,
/// NLL region inference context with which NLL queries should be resolved
- nonlexical_regioncx: Option<Rc<RegionInferenceContext<'tcx>>>,
-}
-
-// temporarily allow some dead fields: `kind` and `region` will be
-// needed by borrowck; `borrowed_place` will probably be a MovePathIndex when
-// that is extended to include borrowed data paths.
-#[allow(dead_code)]
-#[derive(Debug)]
-pub struct BorrowData<'tcx> {
- /// Location where the borrow reservation starts.
- /// In many cases, this will be equal to the activation location but not always.
- pub(crate) reserve_location: Location,
- /// What kind of borrow this is
- pub(crate) kind: mir::BorrowKind,
- /// The region for which this borrow is live
- pub(crate) region: Region<'tcx>,
- /// Place from which we are borrowing
- pub(crate) borrowed_place: mir::Place<'tcx>,
- /// Place to which the borrow was stored
- pub(crate) assigned_place: mir::Place<'tcx>,
-}
-
-impl<'tcx> fmt::Display for BorrowData<'tcx> {
- fn fmt(&self, w: &mut fmt::Formatter) -> fmt::Result {
- let kind = match self.kind {
- mir::BorrowKind::Shared => "",
- mir::BorrowKind::Unique => "uniq ",
- mir::BorrowKind::Mut { .. } => "mut ",
- };
- let region = format!("{}", self.region);
- let region = if region.len() > 0 { format!("{} ", region) } else { region };
- write!(w, "&{}{}{:?}", region, kind, self.borrowed_place)
- }
-}
-
-impl ReserveOrActivateIndex {
- fn reserved(i: BorrowIndex) -> Self { ReserveOrActivateIndex::new(i.index() * 2) }
- fn active(i: BorrowIndex) -> Self { ReserveOrActivateIndex::new((i.index() * 2) + 1) }
-
- pub(crate) fn is_reservation(self) -> bool { self.index() % 2 == 0 }
- pub(crate) fn is_activation(self) -> bool { self.index() % 2 == 1}
-
- pub(crate) fn kind(self) -> &'static str {
- if self.is_reservation() { "reserved" } else { "active" }
- }
- pub(crate) fn borrow_index(self) -> BorrowIndex {
- BorrowIndex::new(self.index() / 2)
- }
+ nonlexical_regioncx: Rc<RegionInferenceContext<'tcx>>,
}
impl<'a, 'gcx, 'tcx> Borrows<'a, 'gcx, 'tcx> {
- pub fn new(tcx: TyCtxt<'a, 'gcx, 'tcx>,
- mir: &'a Mir<'tcx>,
- nonlexical_regioncx: Option<Rc<RegionInferenceContext<'tcx>>>,
- def_id: DefId,
- body_id: Option<hir::BodyId>)
- -> Self {
+ crate fn new(
+ tcx: TyCtxt<'a, 'gcx, 'tcx>,
+ mir: &'a Mir<'tcx>,
+ nonlexical_regioncx: Rc<RegionInferenceContext<'tcx>>,
+ def_id: DefId,
+ body_id: Option<hir::BodyId>,
+ borrow_set: &Rc<BorrowSet<'tcx>>
+ ) -> Self {
let scope_tree = tcx.region_scope_tree(def_id);
let root_scope = body_id.map(|body_id| {
region::Scope::CallSite(tcx.hir.body(body_id).value.hir_id.local_id)
});
- let mut visitor = GatherBorrows {
- tcx,
- mir,
- idx_vec: IndexVec::new(),
- location_map: FxHashMap(),
- assigned_map: FxHashMap(),
- activation_map: FxHashMap(),
- region_map: FxHashMap(),
- local_map: FxHashMap(),
- region_span_map: FxHashMap(),
- nonlexical_regioncx: nonlexical_regioncx.clone()
- };
- visitor.visit_mir(mir);
- return Borrows { tcx: tcx,
- mir: mir,
- borrows: visitor.idx_vec,
- scope_tree,
- root_scope,
- location_map: visitor.location_map,
- assigned_map: visitor.assigned_map,
- activation_map: visitor.activation_map,
- region_map: visitor.region_map,
- local_map: visitor.local_map,
- region_span_map: visitor.region_span_map,
- nonlexical_regioncx };
-
- struct GatherBorrows<'a, 'gcx: 'tcx, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
- mir: &'a Mir<'tcx>,
- idx_vec: IndexVec<BorrowIndex, BorrowData<'tcx>>,
- location_map: FxHashMap<Location, BorrowIndex>,
- assigned_map: FxHashMap<Place<'tcx>, FxHashSet<BorrowIndex>>,
- activation_map: FxHashMap<Location, FxHashSet<BorrowIndex>>,
- region_map: FxHashMap<Region<'tcx>, FxHashSet<BorrowIndex>>,
- local_map: FxHashMap<mir::Local, FxHashSet<BorrowIndex>>,
- region_span_map: FxHashMap<RegionKind, Span>,
- nonlexical_regioncx: Option<Rc<RegionInferenceContext<'tcx>>>,
- }
-
- impl<'a, 'gcx, 'tcx> Visitor<'tcx> for GatherBorrows<'a, 'gcx, 'tcx> {
- fn visit_assign(&mut self,
- block: mir::BasicBlock,
- assigned_place: &mir::Place<'tcx>,
- rvalue: &mir::Rvalue<'tcx>,
- location: mir::Location) {
- fn root_local(mut p: &mir::Place<'_>) -> Option<mir::Local> {
- loop { match p {
- mir::Place::Projection(pi) => p = &pi.base,
- mir::Place::Static(_) => return None,
- mir::Place::Local(l) => return Some(*l)
- }}
- }
-
- if let mir::Rvalue::Ref(region, kind, ref borrowed_place) = *rvalue {
- if is_unsafe_place(self.tcx, self.mir, borrowed_place) { return; }
-
- let activate_location = self.compute_activation_location(location,
- &assigned_place,
- region,
- kind);
- let borrow = BorrowData {
- kind, region,
- reserve_location: location,
- borrowed_place: borrowed_place.clone(),
- assigned_place: assigned_place.clone(),
- };
- let idx = self.idx_vec.push(borrow);
- self.location_map.insert(location, idx);
-
- insert(&mut self.activation_map, &activate_location, idx);
- insert(&mut self.assigned_map, assigned_place, idx);
- insert(&mut self.region_map, ®ion, idx);
- if let Some(local) = root_local(borrowed_place) {
- insert(&mut self.local_map, &local, idx);
- }
- }
-
- return self.super_assign(block, assigned_place, rvalue, location);
-
- fn insert<'a, K, V>(map: &'a mut FxHashMap<K, FxHashSet<V>>,
- k: &K,
- v: V)
- where K: Clone+Eq+Hash, V: Eq+Hash
- {
- map.entry(k.clone())
- .or_insert(FxHashSet())
- .insert(v);
- }
- }
-
- fn visit_rvalue(&mut self,
- rvalue: &mir::Rvalue<'tcx>,
- location: mir::Location) {
- if let mir::Rvalue::Ref(region, kind, ref place) = *rvalue {
- // double-check that we already registered a BorrowData for this
-
- let mut found_it = false;
- for idx in &self.region_map[region] {
- let bd = &self.idx_vec[*idx];
- if bd.reserve_location == location &&
- bd.kind == kind &&
- bd.region == region &&
- bd.borrowed_place == *place
- {
- found_it = true;
- break;
- }
- }
- assert!(found_it, "Ref {:?} at {:?} missing BorrowData", rvalue, location);
- }
-
- return self.super_rvalue(rvalue, location);
- }
-
- fn visit_statement(&mut self,
- block: mir::BasicBlock,
- statement: &mir::Statement<'tcx>,
- location: Location) {
- if let mir::StatementKind::EndRegion(region_scope) = statement.kind {
- self.region_span_map.insert(ReScope(region_scope), statement.source_info.span);
- }
- return self.super_statement(block, statement, location);
- }
- }
-
- /// A MIR visitor that determines if a specific place is used in a two-phase activating
- /// manner in a given chunk of MIR.
- struct ContainsUseOfPlace<'b, 'tcx: 'b> {
- target: &'b Place<'tcx>,
- use_found: bool,
- }
-
- impl<'b, 'tcx: 'b> ContainsUseOfPlace<'b, 'tcx> {
- fn new(place: &'b Place<'tcx>) -> Self {
- Self { target: place, use_found: false }
- }
-
- /// return whether `context` should be considered a "use" of a
- /// place found in that context. "Uses" activate associated
- /// borrows (at least when such uses occur while the borrow also
- /// has a reservation at the time).
- fn is_potential_use(context: PlaceContext) -> bool {
- match context {
- // storage effects on a place do not activate it
- PlaceContext::StorageLive | PlaceContext::StorageDead => false,
-
- // validation effects do not activate a place
- //
- // FIXME: Should they? Is it just another read? Or can we
- // guarantee it won't dereference the stored address? How
- // "deep" does validation go?
- PlaceContext::Validate => false,
-
- // FIXME: This is here to not change behaviour from before
- // AsmOutput existed, but it's not necessarily a pure overwrite.
- // so it's possible this should activate the place.
- PlaceContext::AsmOutput |
- // pure overwrites of a place do not activate it. (note
- // PlaceContext::Call is solely about dest place)
- PlaceContext::Store | PlaceContext::Call => false,
-
- // reads of a place *do* activate it
- PlaceContext::Move |
- PlaceContext::Copy |
- PlaceContext::Drop |
- PlaceContext::Inspect |
- PlaceContext::Borrow { .. } |
- PlaceContext::Projection(..) => true,
- }
- }
- }
-
- impl<'b, 'tcx: 'b> Visitor<'tcx> for ContainsUseOfPlace<'b, 'tcx> {
- fn visit_place(&mut self,
- place: &mir::Place<'tcx>,
- context: PlaceContext<'tcx>,
- location: Location) {
- if Self::is_potential_use(context) && place == self.target {
- self.use_found = true;
- return;
- // There is no need to keep checking the statement, we already found a use
- }
-
- self.super_place(place, context, location);
- }
- }
-
- impl<'a, 'gcx, 'tcx> GatherBorrows<'a, 'gcx, 'tcx> {
- /// Returns true if the borrow represented by `kind` is
- /// allowed to be split into separate Reservation and
- /// Activation phases.
- fn allow_two_phase_borrow(&self, kind: mir::BorrowKind) -> bool {
- self.tcx.two_phase_borrows() &&
- (kind.allows_two_phase_borrow() ||
- self.tcx.sess.opts.debugging_opts.two_phase_beyond_autoref)
- }
-
- /// Returns true if the given location contains an NLL-activating use of the given place
- fn location_contains_use(&self, location: Location, place: &Place) -> bool {
- let mut use_checker = ContainsUseOfPlace::new(place);
- let block = &self.mir.basic_blocks().get(location.block).unwrap_or_else(|| {
- panic!("could not find block at location {:?}", location);
- });
- if location.statement_index != block.statements.len() {
- // This is a statement
- let stmt = block.statements.get(location.statement_index).unwrap_or_else(|| {
- panic!("could not find statement at location {:?}");
- });
- use_checker.visit_statement(location.block, stmt, location);
- } else {
- // This is a terminator
- match block.terminator {
- Some(ref term) => {
- use_checker.visit_terminator(location.block, term, location);
- }
- None => {
- // There is no way for Place to be used by the terminator if there is no
- // terminator
- }
- }
- }
-
- use_checker.use_found
- }
-
- /// Determines if the provided region is terminated after the provided location.
- /// EndRegion statements terminate their enclosed region::Scope.
- /// We also consult with the NLL region inference engine, should one be available
- fn region_terminated_after(&self, region: Region<'tcx>, location: Location) -> bool {
- let block_data = &self.mir[location.block];
- if location.statement_index != block_data.statements.len() {
- let stmt = &block_data.statements[location.statement_index];
- if let mir::StatementKind::EndRegion(region_scope) = stmt.kind {
- if &ReScope(region_scope) == region {
- // We encountered an EndRegion statement that terminates the provided
- // region
- return true;
- }
- }
- }
- if let Some(ref regioncx) = self.nonlexical_regioncx {
- if !regioncx.region_contains_point(region, location) {
- // NLL says the region has ended already
- return true;
- }
- }
-
- false
- }
-
- /// Computes the activation location of a borrow.
- /// The general idea is to start at the beginning of the region and perform a DFS
- /// until we exit the region, either via an explicit EndRegion or because NLL tells
- /// us so. If we find more than one valid activation point, we currently panic the
- /// compiler since two-phase borrows are only currently supported for compiler-
- /// generated code. More precisely, we only allow two-phase borrows for:
- /// - Function calls (fn some_func(&mut self, ....))
- /// - *Assign operators (a += b -> fn add_assign(&mut self, other: Self))
- /// See
- /// - https://github.com/rust-lang/rust/issues/48431
- /// for detailed design notes.
- /// See the FIXME in the body of the function for notes on extending support to more
- /// general two-phased borrows.
- fn compute_activation_location(&self,
- start_location: Location,
- assigned_place: &mir::Place<'tcx>,
- region: Region<'tcx>,
- kind: mir::BorrowKind) -> Location {
- debug!("Borrows::compute_activation_location({:?}, {:?}, {:?})",
- start_location,
- assigned_place,
- region);
- if !self.allow_two_phase_borrow(kind) {
- debug!(" -> {:?}", start_location);
- return start_location;
- }
-
- // Perform the DFS.
- // `stack` is the stack of locations still under consideration
- // `visited` is the set of points we have already visited
- // `found_use` is an Option that becomes Some when we find a use
- let mut stack = vec![start_location];
- let mut visited = FxHashSet();
- let mut found_use = None;
- while let Some(curr_loc) = stack.pop() {
- let block_data = &self.mir.basic_blocks()
- .get(curr_loc.block)
- .unwrap_or_else(|| {
- panic!("could not find block at location {:?}", curr_loc);
- });
-
- if self.region_terminated_after(region, curr_loc) {
- // No need to process this statement.
- // It's either an EndRegion (and thus couldn't use assigned_place) or not
- // contained in the NLL region and thus a use would be invalid
- continue;
- }
-
- if !visited.insert(curr_loc) {
- debug!(" Already visited {:?}", curr_loc);
- continue;
- }
-
- if self.location_contains_use(curr_loc, assigned_place) {
- // FIXME: Handle this case a little more gracefully. Perhaps collect
- // all uses in a vector, and find the point in the CFG that dominates
- // all of them?
- // Right now this is sufficient though since there should only be exactly
- // one borrow-activating use of the borrow.
- assert!(found_use.is_none(), "Found secondary use of place");
- found_use = Some(curr_loc);
- }
-
- // Push the points we should consider next.
- if curr_loc.statement_index < block_data.statements.len() {
- stack.push(curr_loc.successor_within_block());
- } else {
- stack.extend(block_data.terminator().successors().iter().map(
- |&basic_block| {
- Location {
- statement_index: 0,
- block: basic_block
- }
- }
- ))
- }
- }
- let found_use = found_use.expect("Did not find use of two-phase place");
- debug!(" -> {:?}", found_use);
- found_use
- }
- }
- }
-
- /// Returns the span for the "end point" given region. This will
- /// return `None` if NLL is enabled, since that concept has no
- /// meaning there. Otherwise, return region span if it exists and
- /// span for end of the function if it doesn't exist.
- pub(crate) fn opt_region_end_span(&self, region: &Region) -> Option<Span> {
- match self.nonlexical_regioncx {
- Some(_) => None,
- None => {
- match self.region_span_map.get(region) {
- Some(span) => Some(self.tcx.sess.codemap().end_point(*span)),
- None => Some(self.tcx.sess.codemap().end_point(self.mir.span))
- }
- }
+ Borrows {
+ tcx: tcx,
+ mir: mir,
+ borrow_set: borrow_set.clone(),
+ scope_tree,
+ root_scope,
+ nonlexical_regioncx,
}
}
- pub fn borrows(&self) -> &IndexVec<BorrowIndex, BorrowData<'tcx>> { &self.borrows }
+ crate fn borrows(&self) -> &IndexVec<BorrowIndex, BorrowData<'tcx>> { &self.borrow_set.borrows }
pub fn scope_tree(&self) -> &Lrc<region::ScopeTree> { &self.scope_tree }
pub fn location(&self, idx: BorrowIndex) -> &Location {
- &self.borrows[idx].reserve_location
+ &self.borrow_set.borrows[idx].reserve_location
}
/// Add all borrows to the kill set, if those borrows are out of scope at `location`.
/// That means either they went out of either a nonlexical scope, if we care about those
/// at the moment, or the location represents a lexical EndRegion
fn kill_loans_out_of_scope_at_location(&self,
- sets: &mut BlockSets<ReserveOrActivateIndex>,
+ sets: &mut BlockSets<BorrowIndex>,
location: Location) {
- if let Some(ref regioncx) = self.nonlexical_regioncx {
- // NOTE: The state associated with a given `location`
- // reflects the dataflow on entry to the statement. If it
- // does not contain `borrow_region`, then then that means
- // that the statement at `location` kills the borrow.
- //
- // We are careful always to call this function *before* we
- // set up the gen-bits for the statement or
- // termanator. That way, if the effect of the statement or
- // terminator *does* introduce a new loan of the same
- // region, then setting that gen-bit will override any
- // potential kill introduced here.
- for (borrow_index, borrow_data) in self.borrows.iter_enumerated() {
- let borrow_region = borrow_data.region.to_region_vid();
- if !regioncx.region_contains_point(borrow_region, location) {
- sets.kill(&ReserveOrActivateIndex::reserved(borrow_index));
- sets.kill(&ReserveOrActivateIndex::active(borrow_index));
- }
+ let regioncx = &self.nonlexical_regioncx;
+
+ // NOTE: The state associated with a given `location`
+ // reflects the dataflow on entry to the statement. If it
+ // does not contain `borrow_region`, then then that means
+ // that the statement at `location` kills the borrow.
+ //
+ // We are careful always to call this function *before* we
+ // set up the gen-bits for the statement or
+ // termanator. That way, if the effect of the statement or
+ // terminator *does* introduce a new loan of the same
+ // region, then setting that gen-bit will override any
+ // potential kill introduced here.
+ for (borrow_index, borrow_data) in self.borrow_set.borrows.iter_enumerated() {
+ let borrow_region = borrow_data.region.to_region_vid();
+ if !regioncx.region_contains_point(borrow_region, location) {
+ sets.kill(&borrow_index);
}
}
}
fn kill_borrows_on_local(&self,
- sets: &mut BlockSets<ReserveOrActivateIndex>,
+ sets: &mut BlockSets<BorrowIndex>,
local: &rustc::mir::Local)
{
- if let Some(borrow_indexes) = self.local_map.get(local) {
- sets.kill_all(borrow_indexes.iter()
- .map(|b| ReserveOrActivateIndex::reserved(*b)));
- sets.kill_all(borrow_indexes.iter()
- .map(|b| ReserveOrActivateIndex::active(*b)));
- }
- }
-
- /// Performs the activations for a given location
- fn perform_activations_at_location(&self,
- sets: &mut BlockSets<ReserveOrActivateIndex>,
- location: Location) {
- // Handle activations
- match self.activation_map.get(&location) {
- Some(activations) => {
- for activated in activations {
- debug!("activating borrow {:?}", activated);
- sets.gen(&ReserveOrActivateIndex::active(*activated))
- }
- }
- None => {}
+ if let Some(borrow_indexes) = self.borrow_set.local_map.get(local) {
+ sets.kill_all(borrow_indexes);
}
}
}
impl<'a, 'gcx, 'tcx> BitDenotation for Borrows<'a, 'gcx, 'tcx> {
- type Idx = ReserveOrActivateIndex;
+ type Idx = BorrowIndex;
fn name() -> &'static str { "borrows" }
fn bits_per_block(&self) -> usize {
- self.borrows.len() * 2
+ self.borrow_set.borrows.len() * 2
}
- fn start_block_effect(&self, _entry_set: &mut IdxSet<ReserveOrActivateIndex>) {
+ fn start_block_effect(&self, _entry_set: &mut IdxSet<BorrowIndex>) {
// no borrows of code region_scopes have been taken prior to
// function execution, so this method has no effect on
// `_sets`.
}
fn before_statement_effect(&self,
- sets: &mut BlockSets<ReserveOrActivateIndex>,
+ sets: &mut BlockSets<BorrowIndex>,
location: Location) {
debug!("Borrows::before_statement_effect sets: {:?} location: {:?}", sets, location);
self.kill_loans_out_of_scope_at_location(sets, location);
}
- fn statement_effect(&self, sets: &mut BlockSets<ReserveOrActivateIndex>, location: Location) {
+ fn statement_effect(&self, sets: &mut BlockSets<BorrowIndex>, location: Location) {
debug!("Borrows::statement_effect sets: {:?} location: {:?}", sets, location);
let block = &self.mir.basic_blocks().get(location.block).unwrap_or_else(|| {
panic!("could not find statement at location {:?}");
});
- self.perform_activations_at_location(sets, location);
self.kill_loans_out_of_scope_at_location(sets, location);
match stmt.kind {
- // EndRegion kills any borrows (reservations and active borrows both)
- mir::StatementKind::EndRegion(region_scope) => {
- if let Some(borrow_indexes) = self.region_map.get(&ReScope(region_scope)) {
- assert!(self.nonlexical_regioncx.is_none());
- for idx in borrow_indexes {
- sets.kill(&ReserveOrActivateIndex::reserved(*idx));
- sets.kill(&ReserveOrActivateIndex::active(*idx));
- }
- } else {
- // (if there is no entry, then there are no borrows to be tracked)
- }
+ mir::StatementKind::EndRegion(_) => {
}
mir::StatementKind::Assign(ref lhs, ref rhs) => {
// propagate_call_return method.
if let mir::Rvalue::Ref(region, _, ref place) = *rhs {
- if is_unsafe_place(self.tcx, self.mir, place) { return; }
- let index = self.location_map.get(&location).unwrap_or_else(|| {
+ if place.is_unsafe_place(self.tcx, self.mir) { return; }
+ let index = self.borrow_set.location_map.get(&location).unwrap_or_else(|| {
panic!("could not find BorrowIndex for location {:?}", location);
});
if let RegionKind::ReEmpty = region {
// If the borrowed value dies before the borrow is used, the region for
// the borrow can be empty. Don't track the borrow in that case.
- sets.kill(&ReserveOrActivateIndex::active(*index));
+ sets.kill(&index);
return
}
- assert!(self.region_map.get(region).unwrap_or_else(|| {
+ assert!(self.borrow_set.region_map.get(region).unwrap_or_else(|| {
panic!("could not find BorrowIndexs for region {:?}", region);
}).contains(&index));
- sets.gen(&ReserveOrActivateIndex::reserved(*index));
+ sets.gen(&index);
// Issue #46746: Two-phase borrows handles
// stmts of form `Tmp = &mut Borrow` ...
// e.g. `box (&mut _)`. Current
// conservative solution: force
// immediate activation here.
- sets.gen(&ReserveOrActivateIndex::active(*index));
+ sets.gen(&index);
}
}
}
}
fn before_terminator_effect(&self,
- sets: &mut BlockSets<ReserveOrActivateIndex>,
+ sets: &mut BlockSets<BorrowIndex>,
location: Location) {
debug!("Borrows::before_terminator_effect sets: {:?} location: {:?}", sets, location);
self.kill_loans_out_of_scope_at_location(sets, location);
}
- fn terminator_effect(&self, sets: &mut BlockSets<ReserveOrActivateIndex>, location: Location) {
+ fn terminator_effect(&self, sets: &mut BlockSets<BorrowIndex>, location: Location) {
debug!("Borrows::terminator_effect sets: {:?} location: {:?}", sets, location);
let block = &self.mir.basic_blocks().get(location.block).unwrap_or_else(|| {
});
let term = block.terminator();
- self.perform_activations_at_location(sets, location);
self.kill_loans_out_of_scope_at_location(sets, location);
// and hence most of these loans will already be dead -- but, in some cases
// like unwind paths, we do not always emit `EndRegion` statements, so we
// add some kills here as a "backup" and to avoid spurious error messages.
- for (borrow_index, borrow_data) in self.borrows.iter_enumerated() {
+ for (borrow_index, borrow_data) in self.borrow_set.borrows.iter_enumerated() {
if let ReScope(scope) = borrow_data.region {
// Check that the scope is not actually a scope from a function that is
// a parent of our closure. Note that the CallSite scope itself is
if *scope != root_scope &&
self.scope_tree.is_subscope_of(*scope, root_scope)
{
- sets.kill(&ReserveOrActivateIndex::reserved(borrow_index));
- sets.kill(&ReserveOrActivateIndex::active(borrow_index));
+ sets.kill(&borrow_index);
}
}
}
}
fn propagate_call_return(&self,
- _in_out: &mut IdxSet<ReserveOrActivateIndex>,
+ _in_out: &mut IdxSet<BorrowIndex>,
_call_bb: mir::BasicBlock,
_dest_bb: mir::BasicBlock,
_dest_place: &mir::Place) {
}
}
-fn is_unsafe_place<'a, 'gcx: 'tcx, 'tcx: 'a>(
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
- mir: &'a Mir<'tcx>,
- place: &mir::Place<'tcx>
-) -> bool {
- use self::mir::Place::*;
- use self::mir::ProjectionElem;
-
- match *place {
- Local(_) => false,
- Static(ref static_) => tcx.is_static(static_.def_id) == Some(hir::Mutability::MutMutable),
- Projection(ref proj) => {
- match proj.elem {
- ProjectionElem::Field(..) |
- ProjectionElem::Downcast(..) |
- ProjectionElem::Subslice { .. } |
- ProjectionElem::ConstantIndex { .. } |
- ProjectionElem::Index(_) => {
- is_unsafe_place(tcx, mir, &proj.base)
- }
- ProjectionElem::Deref => {
- let ty = proj.base.ty(mir, tcx).to_ty(tcx);
- match ty.sty {
- ty::TyRawPtr(..) => true,
- _ => is_unsafe_place(tcx, mir, &proj.base),
- }
- }
- }
- }
- }
-}
use rustc_data_structures::indexed_vec::Idx;
use super::MoveDataParamEnv;
+
use util::elaborate_drops::DropFlagState;
use super::move_paths::{HasMoveData, MoveData, MoveOutIndex, MovePathIndex, InitIndex};
pub use self::borrowed_locals::*;
-#[allow(dead_code)]
pub(super) mod borrows;
/// `MaybeInitializedPlaces` tracks all places that might be
pub use self::impls::{MaybeInitializedPlaces, MaybeUninitializedPlaces};
pub use self::impls::{DefinitelyInitializedPlaces, MovingOutStatements};
pub use self::impls::EverInitializedPlaces;
-pub use self::impls::borrows::{Borrows, BorrowData};
+pub use self::impls::borrows::Borrows;
pub use self::impls::HaveBeenBorrowedLocals;
-pub(crate) use self::impls::borrows::{ReserveOrActivateIndex};
pub use self::at_location::{FlowAtLocation, FlowsAtLocation};
pub(crate) use self::drop_flag_effects::*;
/// Index into Borrows.locations
new_index!(BorrowIndex, "bw");
-
- /// Index into Reservations/Activations bitvector
- new_index!(ReserveOrActivateIndex, "ra");
}
pub use self::indexes::MovePathIndex;
use rustc::ty::{self, TyCtxt, Ty, Instance};
use rustc::ty::layout::{self, LayoutOf};
use rustc::ty::subst::Subst;
-use rustc::util::nodemap::FxHashSet;
use syntax::ast::Mutability;
use syntax::codemap::Span;
}
span = mir.span;
let layout = ecx.layout_of(mir.return_ty().subst(tcx, cid.instance.substs))?;
- let alloc = tcx.interpret_interner.get_cached(cid.instance.def_id());
- let is_static = tcx.is_static(cid.instance.def_id()).is_some();
- let alloc = match alloc {
- Some(alloc) => {
- assert!(cid.promoted.is_none());
- assert!(param_env.caller_bounds.is_empty());
- alloc
- },
- None => {
- assert!(!layout.is_unsized());
- let ptr = ecx.memory.allocate(
- layout.size.bytes(),
- layout.align,
- None,
- )?;
- if is_static {
- tcx.interpret_interner.cache(cid.instance.def_id(), ptr.alloc_id);
- }
- let internally_mutable = !layout.ty.is_freeze(tcx, param_env, mir.span);
- let mutability = tcx.is_static(cid.instance.def_id());
- let mutability = if mutability == Some(hir::Mutability::MutMutable) || internally_mutable {
- Mutability::Mutable
- } else {
- Mutability::Immutable
- };
- let cleanup = StackPopCleanup::MarkStatic(mutability);
- let name = ty::tls::with(|tcx| tcx.item_path_str(cid.instance.def_id()));
- let prom = cid.promoted.map_or(String::new(), |p| format!("::promoted[{:?}]", p));
- trace!("const_eval: pushing stack frame for global: {}{}", name, prom);
- assert!(mir.arg_count == 0);
- ecx.push_stack_frame(
- cid.instance,
- mir.span,
- mir,
- Place::from_ptr(ptr, layout.align),
- cleanup,
- )?;
-
- while ecx.step()? {}
- ptr.alloc_id
- }
+ assert!(!layout.is_unsized());
+ let ptr = ecx.memory.allocate(
+ layout.size.bytes(),
+ layout.align,
+ None,
+ )?;
+ let internally_mutable = !layout.ty.is_freeze(tcx, param_env, mir.span);
+ let mutability = tcx.is_static(cid.instance.def_id());
+ let mutability = if mutability == Some(hir::Mutability::MutMutable) || internally_mutable {
+ Mutability::Mutable
+ } else {
+ Mutability::Immutable
};
- let ptr = MemoryPointer::new(alloc, 0).into();
+ let cleanup = StackPopCleanup::MarkStatic(mutability);
+ let name = ty::tls::with(|tcx| tcx.item_path_str(cid.instance.def_id()));
+ let prom = cid.promoted.map_or(String::new(), |p| format!("::promoted[{:?}]", p));
+ trace!("const_eval: pushing stack frame for global: {}{}", name, prom);
+ assert!(mir.arg_count == 0);
+ ecx.push_stack_frame(
+ cid.instance,
+ mir.span,
+ mir,
+ Place::from_ptr(ptr, layout.align),
+ cleanup,
+ )?;
+
+ while ecx.step()? {}
+ let ptr = ptr.into();
// always try to read the value and report errors
let value = match ecx.try_read_value(ptr, layout.align, layout.ty)? {
// if it's a constant (so it needs no address, directly compute its value)
- Some(val) if !is_static => val,
+ Some(val) if tcx.is_static(cid.instance.def_id()).is_none() => val,
// point at the allocation
_ => Value::ByRef(ptr, layout.align),
};
ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>,
cid: GlobalId<'tcx>,
) -> EvalResult<'tcx, AllocId> {
- let alloc = ecx
- .tcx
- .interpret_interner
- .get_cached(cid.instance.def_id());
- // Don't evaluate when already cached to prevent cycles
- if let Some(alloc) = alloc {
- return Ok(alloc)
- }
- // ensure the static is computed
- ecx.const_eval(cid)?;
Ok(ecx
.tcx
.interpret_interner
- .get_cached(cid.instance.def_id())
- .expect("uncached static"))
+ .cache_static(cid.instance.def_id()))
}
fn box_alloc<'a>(
let def_id = cid.instance.def.def_id();
if tcx.is_foreign_item(def_id) {
- let id = tcx.interpret_interner.get_cached(def_id);
- let id = match id {
- // FIXME: due to caches this shouldn't happen, add some assertions
- Some(id) => id,
- None => {
- let id = tcx.interpret_interner.reserve();
- tcx.interpret_interner.cache(def_id, id);
- id
- },
- };
+ let id = tcx.interpret_interner.cache_static(def_id);
let ty = tcx.type_of(def_id);
let layout = tcx.layout_of(key.param_env.and(ty)).unwrap();
let ptr = MemoryPointer::new(id, 0);
};
let (res, ecx) = eval_body_and_ecx(tcx, cid, None, key.param_env);
- res.map(|(miri_value, ptr, miri_ty)| {
- if tcx.is_static(def_id).is_some() {
- if let Ok(ptr) = ptr.primval.to_ptr() {
- let mut seen = FxHashSet::default();
- create_depgraph_edges(tcx, ptr.alloc_id, &mut seen);
- }
- }
+ res.map(|(miri_value, _, miri_ty)| {
tcx.mk_const(ty::Const {
val: ConstVal::Value(miri_value),
ty: miri_ty,
}
})
}
-
-// This function creates dep graph edges from statics to all referred to statics.
-// This is necessary, because the `const_eval` query cannot directly call itself
-// for other statics, because we cannot prevent recursion in queries.
-//
-// see test/incremental/static_refering_to_other_static2/issue.rs for an example
-// where not creating those edges would cause static A, which refers to static B
-// to point to the old allocation of static B, even though B has changed.
-//
-// In the future we will want to remove this funcion in favour of a system that
-// makes sure that statics don't need to have edges to other statics as long as
-// they are only referring by reference and not inspecting the other static's body.
-fn create_depgraph_edges<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- alloc_id: AllocId,
- seen: &mut FxHashSet<AllocId>,
-) {
- trace!("create_depgraph_edges: {:?}, {:?}", alloc_id, seen);
- if seen.insert(alloc_id) {
- trace!("seen: {:?}, {:?}", alloc_id, seen);
- if let Some(alloc) = tcx.interpret_interner.get_alloc(alloc_id) {
- trace!("get_alloc: {:?}, {:?}, {:?}", alloc_id, seen, alloc);
- for (_, &reloc) in &alloc.relocations {
- if let Some(did) = tcx.interpret_interner.get_corresponding_static_def_id(reloc) {
- trace!("get_corresponding: {:?}, {:?}, {:?}, {:?}, {:?}", alloc_id, seen, alloc, did, reloc);
- let _ = tcx.maybe_optimized_mir(did);
- }
- create_depgraph_edges(tcx, reloc, seen);
- }
- }
- }
-}
/// The maximum number of terminators that may be evaluated.
/// This prevents infinite loops and huge computations from freezing up const eval.
/// Remove once halting problem is solved.
- pub(crate) steps_remaining: usize,
+ pub(crate) terminators_remaining: usize,
}
/// A stack frame.
memory: Memory::new(tcx, memory_data),
stack: Vec::new(),
stack_limit: tcx.sess.const_eval_stack_frame_limit,
- steps_remaining: tcx.sess.const_eval_step_limit,
+ terminators_remaining: 1_000_000,
}
}
}
Aggregate(ref kind, ref operands) => {
- self.inc_step_counter_and_check_limit(operands.len())?;
+ self.inc_step_counter_and_check_limit(operands.len());
let (dest, active_field_index) = match **kind {
mir::AggregateKind::Adt(adt_def, variant_index, _, active_field_index) => {
}
pub fn read_global_as_value(&self, gid: GlobalId<'tcx>, ty: Ty<'tcx>) -> EvalResult<'tcx, Value> {
- if gid.promoted.is_none() {
- let cached = self
+ if self.tcx.is_static(gid.instance.def_id()).is_some() {
+ let alloc_id = self
.tcx
.interpret_interner
- .get_cached(gid.instance.def_id());
- if let Some(alloc_id) = cached {
- let layout = self.layout_of(ty)?;
- let ptr = MemoryPointer::new(alloc_id, 0);
- return Ok(Value::ByRef(ptr.into(), layout.align))
- }
+ .cache_static(gid.instance.def_id());
+ let layout = self.layout_of(ty)?;
+ let ptr = MemoryPointer::new(alloc_id, 0);
+ return Ok(Value::ByRef(ptr.into(), layout.align))
}
let cv = self.const_eval(gid)?;
self.const_to_value(&cv.val, ty)
use super::{EvalContext, Machine};
impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
- pub fn inc_step_counter_and_check_limit(&mut self, n: usize) -> EvalResult<'tcx> {
- self.steps_remaining = self.steps_remaining.saturating_sub(n);
- if self.steps_remaining > 0 {
- Ok(())
- } else {
- err!(ExecutionTimeLimitReached)
+ pub fn inc_step_counter_and_check_limit(&mut self, n: usize) {
+ self.terminators_remaining = self.terminators_remaining.saturating_sub(n);
+ if self.terminators_remaining == 0 {
+ // FIXME(#49980): make this warning a lint
+ self.tcx.sess.span_warn(self.frame().span, "Constant evaluating a complex constant, this might take some time");
+ self.terminators_remaining = 1_000_000;
}
}
return Ok(true);
}
- self.inc_step_counter_and_check_limit(1)?;
+ self.inc_step_counter_and_check_limit(1);
let terminator = basic_block.terminator();
assert_eq!(old_frames, self.cur_frame());
#![feature(box_patterns)]
#![feature(box_syntax)]
#![feature(catch_expr)]
+#![feature(crate_visibility_modifier)]
#![feature(const_fn)]
#![feature(core_intrinsics)]
#![feature(decl_macro)]
alloc_id: AllocId,
output: &mut Vec<MonoItem<'tcx>>,
) {
- if let Some(did) = tcx.interpret_interner.get_corresponding_static_def_id(alloc_id) {
+ if let Some(did) = tcx.interpret_interner.get_static(alloc_id) {
let instance = Instance::mono(tcx, did);
if should_monomorphize_locally(tcx, &instance) {
trace!("collecting static {:?}", did);
}
ItemKind::ExternCrate(orig_name) => {
- self.crate_loader.process_item(item, &self.definitions);
-
- // n.b. we don't need to look at the path option here, because cstore already did
- let crate_id = self.cstore.extern_mod_stmt_cnum_untracked(item.id).unwrap();
+ let crate_id = self.crate_loader.process_extern_crate(item, &self.definitions);
let module =
self.get_module(DefId { krate: crate_id, index: CRATE_DEF_INDEX });
self.populate_module_if_necessary(module);
self.current_module = module;
}
- ItemKind::ForeignMod(..) => self.crate_loader.process_item(item, &self.definitions),
+ // Handled in `rustc_metadata::{native_libs,link_args}`
+ ItemKind::ForeignMod(..) => {}
// These items live in the value namespace.
ItemKind::Static(_, m, _) => {
RibKind<'a>),
}
-// The rib kind controls the translation of local
-// definitions (`Def::Local`) to upvars (`Def::Upvar`).
+/// The rib kind controls the translation of local
+/// definitions (`Def::Local`) to upvars (`Def::Upvar`).
#[derive(Copy, Clone, Debug)]
enum RibKind<'a> {
- // No translation needs to be applied.
+ /// No translation needs to be applied.
NormalRibKind,
- // We passed through a closure scope at the given node ID.
- // Translate upvars as appropriate.
+ /// We passed through a closure scope at the given node ID.
+ /// Translate upvars as appropriate.
ClosureRibKind(NodeId /* func id */),
- // We passed through an impl or trait and are now in one of its
- // methods or associated types. Allow references to ty params that impl or trait
- // binds. Disallow any other upvars (including other ty params that are
- // upvars).
+ /// We passed through an impl or trait and are now in one of its
+ /// methods or associated types. Allow references to ty params that impl or trait
+ /// binds. Disallow any other upvars (including other ty params that are
+ /// upvars).
TraitOrImplItemRibKind,
- // We passed through an item scope. Disallow upvars.
+ /// We passed through an item scope. Disallow upvars.
ItemRibKind,
- // We're in a constant item. Can't refer to dynamic stuff.
+ /// We're in a constant item. Can't refer to dynamic stuff.
ConstantItemRibKind,
- // We passed through a module.
+ /// We passed through a module.
ModuleRibKind(Module<'a>),
- // We passed through a `macro_rules!` statement
+ /// We passed through a `macro_rules!` statement
MacroDefinition(DefId),
- // All bindings in this rib are type parameters that can't be used
- // from the default of a type parameter because they're not declared
- // before said type parameter. Also see the `visit_generics` override.
+ /// All bindings in this rib are type parameters that can't be used
+ /// from the default of a type parameter because they're not declared
+ /// before said type parameter. Also see the `visit_generics` override.
ForwardTyParamBanRibKind,
}
}
}
-// Records a possibly-private value, type, or module definition.
+/// Records a possibly-private value, type, or module definition.
#[derive(Clone, Debug)]
pub struct NameBinding<'a> {
kind: NameBindingKind<'a>,
prelude: Option<Module<'a>>,
- // n.b. This is used only for better diagnostics, not name resolution itself.
+ /// n.b. This is used only for better diagnostics, not name resolution itself.
has_self: FxHashSet<DefId>,
- // Names of fields of an item `DefId` accessible with dot syntax.
- // Used for hints during error reporting.
+ /// Names of fields of an item `DefId` accessible with dot syntax.
+ /// Used for hints during error reporting.
field_names: FxHashMap<DefId, Vec<Name>>,
- // All imports known to succeed or fail.
+ /// All imports known to succeed or fail.
determined_imports: Vec<&'a ImportDirective<'a>>,
- // All non-determined imports.
+ /// All non-determined imports.
indeterminate_imports: Vec<&'a ImportDirective<'a>>,
- // The module that represents the current item scope.
+ /// The module that represents the current item scope.
current_module: Module<'a>,
- // The current set of local scopes for types and values.
- // FIXME #4948: Reuse ribs to avoid allocation.
+ /// The current set of local scopes for types and values.
+ /// FIXME #4948: Reuse ribs to avoid allocation.
ribs: PerNS<Vec<Rib<'a>>>,
- // The current set of local scopes, for labels.
+ /// The current set of local scopes, for labels.
label_ribs: Vec<Rib<'a>>,
- // The trait that the current context can refer to.
+ /// The trait that the current context can refer to.
current_trait_ref: Option<(Module<'a>, TraitRef)>,
- // The current self type if inside an impl (used for better errors).
+ /// The current self type if inside an impl (used for better errors).
current_self_type: Option<Ty>,
- // The idents for the primitive types.
+ /// The idents for the primitive types.
primitive_type_table: PrimitiveTypeTable,
def_map: DefMap,
pub export_map: ExportMap,
pub trait_map: TraitMap,
- // A map from nodes to anonymous modules.
- // Anonymous modules are pseudo-modules that are implicitly created around items
- // contained within blocks.
- //
- // For example, if we have this:
- //
- // fn f() {
- // fn g() {
- // ...
- // }
- // }
- //
- // There will be an anonymous module created around `g` with the ID of the
- // entry block for `f`.
+ /// A map from nodes to anonymous modules.
+ /// Anonymous modules are pseudo-modules that are implicitly created around items
+ /// contained within blocks.
+ ///
+ /// For example, if we have this:
+ ///
+ /// fn f() {
+ /// fn g() {
+ /// ...
+ /// }
+ /// }
+ ///
+ /// There will be an anonymous module created around `g` with the ID of the
+ /// entry block for `f`.
block_map: NodeMap<Module<'a>>,
module_map: FxHashMap<DefId, Module<'a>>,
extern_module_map: FxHashMap<(DefId, bool /* MacrosOnly? */), Module<'a>>,
arenas: &'a ResolverArenas<'a>,
dummy_binding: &'a NameBinding<'a>,
- use_extern_macros: bool, // true if `#![feature(use_extern_macros)]`
+ /// true if `#![feature(use_extern_macros)]`
+ use_extern_macros: bool,
crate_loader: &'a mut CrateLoader,
macro_names: FxHashSet<Ident>,
pub whitelisted_legacy_custom_derives: Vec<Name>,
pub found_unresolved_macro: bool,
- // List of crate local macros that we need to warn about as being unused.
- // Right now this only includes macro_rules! macros, and macros 2.0.
+ /// List of crate local macros that we need to warn about as being unused.
+ /// Right now this only includes macro_rules! macros, and macros 2.0.
unused_macros: FxHashSet<DefId>,
- // Maps the `Mark` of an expansion to its containing module or block.
+ /// Maps the `Mark` of an expansion to its containing module or block.
invocations: FxHashMap<Mark, &'a InvocationData<'a>>,
- // Avoid duplicated errors for "name already defined".
+ /// Avoid duplicated errors for "name already defined".
name_already_seen: FxHashMap<Name, Span>,
- // If `#![feature(proc_macro)]` is set
+ /// If `#![feature(proc_macro)]` is set
proc_macro_enabled: bool,
- // A set of procedural macros imported by `#[macro_use]` that have already been warned about
+ /// A set of procedural macros imported by `#[macro_use]` that have already been warned about
warned_proc_macros: FxHashSet<Name>,
potentially_unused_imports: Vec<&'a ImportDirective<'a>>,
- // This table maps struct IDs into struct constructor IDs,
- // it's not used during normal resolution, only for better error reporting.
+ /// This table maps struct IDs into struct constructor IDs,
+ /// it's not used during normal resolution, only for better error reporting.
struct_constructors: DefIdMap<(Def, ty::Visibility)>,
- // Only used for better errors on `fn(): fn()`
+ /// Only used for better errors on `fn(): fn()`
current_type_ascription: Vec<Span>,
injected_crate: Option<Module<'a>>,
prev_name == keywords::CrateRoot.name() &&
self.session.features_untracked().extern_absolute_paths {
// `::extern_crate::a::b`
- let crate_id = self.crate_loader.resolve_crate_from_path(name, ident.span);
+ let crate_id = self.crate_loader.process_path_extern(name, ident.span);
let crate_root =
self.get_module(DefId { krate: crate_id, index: CRATE_DEF_INDEX });
self.populate_module_if_necessary(crate_root);
}
} else if is_extern && !token::is_path_segment_keyword(source) {
let crate_id =
- self.crate_loader.resolve_crate_from_path(source.name, directive.span);
+ self.resolver.crate_loader.process_use_extern(
+ source.name,
+ directive.span,
+ directive.id,
+ &self.resolver.definitions,
+ );
let crate_root =
self.get_module(DefId { krate: crate_id, index: CRATE_DEF_INDEX });
self.populate_module_if_necessary(crate_root);
use rustc::hir::def::Def as HirDef;
use rustc::hir::map::{Node, NodeItem};
use rustc::hir::def_id::{DefId, LOCAL_CRATE};
+use rustc::middle::cstore::ExternCrate;
use rustc::session::config::CrateType::CrateTypeExecutable;
use rustc::ty::{self, TyCtxt};
use rustc_typeck::hir_ty_to_ty;
for &n in self.tcx.crates().iter() {
let span = match *self.tcx.extern_crate(n.as_def_id()) {
- Some(ref c) => c.span,
+ Some(ExternCrate { span, .. }) => span,
None => {
debug!("Skipping crate {}, no data", n);
continue;
FromEnv(..) |
WellFormedTy(..) |
FromEnvTy(..) |
+ Normalize(..) |
RegionOutlives(..) |
TypeOutlives(..) => self,
}
-> Lrc<&'tcx Slice<Clause<'tcx>>>
{
let node_id = tcx.hir.as_local_node_id(def_id).unwrap();
- let item = tcx.hir.expect_item(node_id);
- match item.node {
- hir::ItemTrait(..) => program_clauses_for_trait(tcx, def_id),
- hir::ItemImpl(..) => program_clauses_for_impl(tcx, def_id),
+ let node = tcx.hir.find(node_id).unwrap();
+ match node {
+ hir::map::Node::NodeItem(item) => match item.node {
+ hir::ItemTrait(..) => program_clauses_for_trait(tcx, def_id),
+ hir::ItemImpl(..) => program_clauses_for_impl(tcx, def_id),
+ _ => Lrc::new(tcx.mk_clauses(iter::empty::<Clause>())),
+ }
+ hir::map::Node::NodeImplItem(item) => {
+ if let hir::ImplItemKind::Type(..) = item.node {
+ program_clauses_for_associated_type_value(tcx, def_id)
+ } else {
+ Lrc::new(tcx.mk_clauses(iter::empty::<Clause>()))
+ }
+ },
// FIXME: other constructions e.g. traits, associated types...
_ => Lrc::new(tcx.mk_clauses(iter::empty::<Clause>())),
Lrc::new(tcx.mk_clauses(iter::once(Clause::ForAll(ty::Binder::dummy(clause)))))
}
+pub fn program_clauses_for_associated_type_value<'a, 'tcx>(
+ tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ item_id: DefId,
+) -> Lrc<&'tcx Slice<Clause<'tcx>>> {
+ // Rule Normalize-From-Impl (see rustc guide)
+ //
+ // ```impl<P0..Pn> Trait<A1..An> for A0
+ // {
+ // type AssocType<Pn+1..Pm> where WC = T;
+ // }```
+ //
+ // ```
+ // forall<P0..Pm> {
+ // forall<Pn+1..Pm> {
+ // Normalize(<A0 as Trait<A1..An>>::AssocType<Pn+1..Pm> -> T) :-
+ // Implemented(A0: Trait<A1..An>) && WC
+ // }
+ // }
+ // ```
+
+ let item = tcx.associated_item(item_id);
+ debug_assert_eq!(item.kind, ty::AssociatedKind::Type);
+ let impl_id = if let ty::AssociatedItemContainer::ImplContainer(impl_id) = item.container {
+ impl_id
+ } else {
+ bug!()
+ };
+ // `A0 as Trait<A1..An>`
+ let trait_ref = tcx.impl_trait_ref(impl_id).unwrap();
+ // `T`
+ let ty = tcx.type_of(item_id);
+ // `Implemented(A0: Trait<A1..An>)`
+ let trait_implemented = ty::Binder::dummy(ty::TraitPredicate { trait_ref }.lower());
+ // `WC`
+ let item_where_clauses = tcx.predicates_of(item_id).predicates.lower();
+ // `Implemented(A0: Trait<A1..An>) && WC`
+ let mut where_clauses = vec![trait_implemented];
+ where_clauses.extend(item_where_clauses);
+ // `<A0 as Trait<A1..An>>::AssocType<Pn+1..Pm>`
+ let projection_ty = ty::ProjectionTy::from_ref_and_name(tcx, trait_ref, item.name);
+ // `Normalize(<A0 as Trait<A1..An>>::AssocType<Pn+1..Pm> -> T)`
+ let normalize_goal = DomainGoal::Normalize(ty::ProjectionPredicate { projection_ty, ty });
+ // `Normalize(... -> T) :- ...`
+ let clause = ProgramClause {
+ goal: normalize_goal,
+ hypotheses: tcx.mk_goals(
+ where_clauses.into_iter().map(|wc| Goal::from_poly_domain_goal(wc, tcx))
+ ),
+ };
+ Lrc::new(tcx.mk_clauses(iter::once(Clause::ForAll(ty::Binder::dummy(clause)))))
+}
+
pub fn dump_program_clauses<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
if !tcx.features().rustc_attrs {
return;
// rustdoc needs to be able to document functions that use all the features, so
// whitelist them all
Lrc::new(llvm_util::all_known_features()
- .map(|c| c.to_string())
+ .map(|(a, b)| (a.to_string(), b.map(|s| s.to_string())))
.collect())
} else {
Lrc::new(llvm_util::target_feature_whitelist(tcx.sess)
.iter()
- .map(|c| c.to_string())
+ .map(|&(a, b)| (a.to_string(), b.map(|s| s.to_string())))
.collect())
}
};
use llvm::debuginfo::{DIType, DIFile, DIScope, DIDescriptor,
DICompositeType, DILexicalBlock, DIFlags};
+use rustc::hir::TransFnAttrFlags;
use rustc::hir::def::CtorKind;
use rustc::hir::def_id::{DefId, CrateNum, LOCAL_CRATE};
use rustc::ty::fold::TypeVisitor;
use std::fmt::Write;
use std::ptr;
use std::path::{Path, PathBuf};
-use syntax::{ast, attr};
+use syntax::ast;
use syntax::symbol::{Interner, InternedString, Symbol};
use syntax_pos::{self, Span, FileName};
}
let tcx = cx.tcx;
- let no_mangle = attr::contains_name(&tcx.get_attrs(def_id), "no_mangle");
+ let attrs = tcx.trans_fn_attrs(def_id);
+
+ if attrs.flags.contains(TransFnAttrFlags::NO_DEBUG) {
+ return;
+ }
+
+ let no_mangle = attrs.flags.contains(TransFnAttrFlags::NO_MANGLE);
// We may want to remove the namespace scope if we're in an extern block, see:
// https://github.com/rust-lang/rust/pull/46457#issuecomment-351750952
let var_scope = get_namespace_for_item(cx, def_id);
- let span = cx.tcx.def_span(def_id);
+ let span = tcx.def_span(def_id);
let (file_metadata, line_number) = if span != syntax_pos::DUMMY_SP {
let loc = span_start(cx, span);
use llvm;
use llvm::{ModuleRef, ContextRef, ValueRef};
use llvm::debuginfo::{DIFile, DIType, DIScope, DIBuilderRef, DISubprogram, DIArray, DIFlags};
+use rustc::hir::TransFnAttrFlags;
use rustc::hir::def_id::{DefId, CrateNum};
use rustc::ty::subst::Substs;
use common::CodegenCx;
use builder::Builder;
use monomorphize::Instance;
-use rustc::ty::{self, ParamEnv, Ty};
+use rustc::ty::{self, ParamEnv, Ty, InstanceDef};
use rustc::mir;
use rustc::session::config::{self, FullDebugInfo, LimitedDebugInfo, NoDebugInfo};
use rustc::util::nodemap::{DefIdMap, FxHashMap, FxHashSet};
return FunctionDebugContext::DebugInfoDisabled;
}
- for attr in instance.def.attrs(cx.tcx).iter() {
- if attr.check_name("no_debug") {
+ if let InstanceDef::Item(def_id) = instance.def {
+ if cx.tcx.trans_fn_attrs(def_id).flags.contains(TransFnAttrFlags::NO_DEBUG) {
return FunctionDebugContext::FunctionWithoutDebugInfo;
}
}
- let containing_scope = get_containing_scope(cx, instance);
let span = mir.span;
// This can be the case for functions inlined from another crate
}
let def_id = instance.def_id();
+ let containing_scope = get_containing_scope(cx, instance);
let loc = span_start(cx, span);
let file_metadata = file_metadata(cx, &loc.file.name, def_id.krate);
#![feature(slice_sort_by_cached_key)]
#![feature(optin_builtin_traits)]
#![feature(inclusive_range_fields)]
-#![feature(underscore_lifetimes)]
use rustc::dep_graph::WorkProduct;
use syntax_pos::symbol::Symbol;
use rustc::session::config::PrintRequest;
use libc::c_int;
use std::ffi::CString;
+use syntax::feature_gate::UnstableFeatures;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Once;
// to LLVM or the feature detection code will walk past the end of the feature
// array, leading to crashes.
-const ARM_WHITELIST: &'static [&'static str] = &["neon", "v7", "vfp2", "vfp3", "vfp4"];
-
-const AARCH64_WHITELIST: &'static [&'static str] = &["fp", "neon", "sve", "crc", "crypto",
- "ras", "lse", "rdm", "fp16", "rcpc",
- "dotprod", "v8.1a", "v8.2a", "v8.3a"];
-
-const X86_WHITELIST: &'static [&'static str] = &["aes", "avx", "avx2", "avx512bw",
- "avx512cd", "avx512dq", "avx512er",
- "avx512f", "avx512ifma", "avx512pf",
- "avx512vbmi", "avx512vl", "avx512vpopcntdq",
- "bmi1", "bmi2", "fma", "fxsr",
- "lzcnt", "mmx", "pclmulqdq",
- "popcnt", "rdrand", "rdseed",
- "sha",
- "sse", "sse2", "sse3", "sse4.1",
- "sse4.2", "sse4a", "ssse3",
- "tbm", "xsave", "xsavec",
- "xsaveopt", "xsaves"];
-
-const HEXAGON_WHITELIST: &'static [&'static str] = &["hvx", "hvx-double"];
-
-const POWERPC_WHITELIST: &'static [&'static str] = &["altivec",
- "power8-altivec", "power9-altivec",
- "power8-vector", "power9-vector",
- "vsx"];
-
-const MIPS_WHITELIST: &'static [&'static str] = &["fp64", "msa"];
+const ARM_WHITELIST: &[(&str, Option<&str>)] = &[
+ ("neon", Some("arm_target_feature")),
+ ("v7", Some("arm_target_feature")),
+ ("vfp2", Some("arm_target_feature")),
+ ("vfp3", Some("arm_target_feature")),
+ ("vfp4", Some("arm_target_feature")),
+];
+
+const AARCH64_WHITELIST: &[(&str, Option<&str>)] = &[
+ ("fp", Some("aarch64_target_feature")),
+ ("neon", Some("aarch64_target_feature")),
+ ("sve", Some("aarch64_target_feature")),
+ ("crc", Some("aarch64_target_feature")),
+ ("crypto", Some("aarch64_target_feature")),
+ ("ras", Some("aarch64_target_feature")),
+ ("lse", Some("aarch64_target_feature")),
+ ("rdm", Some("aarch64_target_feature")),
+ ("fp16", Some("aarch64_target_feature")),
+ ("rcpc", Some("aarch64_target_feature")),
+ ("dotprod", Some("aarch64_target_feature")),
+ ("v8.1a", Some("aarch64_target_feature")),
+ ("v8.2a", Some("aarch64_target_feature")),
+ ("v8.3a", Some("aarch64_target_feature")),
+];
+
+const X86_WHITELIST: &[(&str, Option<&str>)] = &[
+ ("aes", None),
+ ("avx", None),
+ ("avx2", None),
+ ("avx512bw", Some("avx512_target_feature")),
+ ("avx512cd", Some("avx512_target_feature")),
+ ("avx512dq", Some("avx512_target_feature")),
+ ("avx512er", Some("avx512_target_feature")),
+ ("avx512f", Some("avx512_target_feature")),
+ ("avx512ifma", Some("avx512_target_feature")),
+ ("avx512pf", Some("avx512_target_feature")),
+ ("avx512vbmi", Some("avx512_target_feature")),
+ ("avx512vl", Some("avx512_target_feature")),
+ ("avx512vpopcntdq", Some("avx512_target_feature")),
+ ("bmi1", None),
+ ("bmi2", None),
+ ("fma", None),
+ ("fxsr", None),
+ ("lzcnt", None),
+ ("mmx", Some("mmx_target_feature")),
+ ("pclmulqdq", None),
+ ("popcnt", None),
+ ("rdrand", None),
+ ("rdseed", None),
+ ("sha", None),
+ ("sse", None),
+ ("sse2", None),
+ ("sse3", None),
+ ("sse4.1", None),
+ ("sse4.2", None),
+ ("sse4a", Some("sse4a_target_feature")),
+ ("ssse3", None),
+ ("tbm", Some("tbm_target_feature")),
+ ("xsave", None),
+ ("xsavec", None),
+ ("xsaveopt", None),
+ ("xsaves", None),
+];
+
+const HEXAGON_WHITELIST: &[(&str, Option<&str>)] = &[
+ ("hvx", Some("hexagon_target_feature")),
+ ("hvx-double", Some("hexagon_target_feature")),
+];
+
+const POWERPC_WHITELIST: &[(&str, Option<&str>)] = &[
+ ("altivec", Some("powerpc_target_feature")),
+ ("power8-altivec", Some("powerpc_target_feature")),
+ ("power9-altivec", Some("powerpc_target_feature")),
+ ("power8-vector", Some("powerpc_target_feature")),
+ ("power9-vector", Some("powerpc_target_feature")),
+ ("vsx", Some("powerpc_target_feature")),
+];
+
+const MIPS_WHITELIST: &[(&str, Option<&str>)] = &[
+ ("fp64", Some("mips_target_feature")),
+ ("msa", Some("mips_target_feature")),
+];
/// When rustdoc is running, provide a list of all known features so that all their respective
/// primtives may be documented.
///
/// IMPORTANT: If you're adding another whitelist to the above lists, make sure to add it to this
/// iterator!
-pub fn all_known_features() -> impl Iterator<Item=&'static str> {
+pub fn all_known_features() -> impl Iterator<Item=(&'static str, Option<&'static str>)> {
ARM_WHITELIST.iter().cloned()
.chain(AARCH64_WHITELIST.iter().cloned())
.chain(X86_WHITELIST.iter().cloned())
let target_machine = create_target_machine(sess, true);
target_feature_whitelist(sess)
.iter()
+ .filter_map(|&(feature, gate)| {
+ if UnstableFeatures::from_environment().is_nightly_build() || gate.is_none() {
+ Some(feature)
+ } else {
+ None
+ }
+ })
.filter(|feature| {
let llvm_feature = to_llvm_feature(sess, feature);
let cstr = CString::new(llvm_feature).unwrap();
.map(|feature| Symbol::intern(feature)).collect()
}
-pub fn target_feature_whitelist(sess: &Session) -> &'static [&'static str] {
+pub fn target_feature_whitelist(sess: &Session)
+ -> &'static [(&'static str, Option<&'static str>)]
+{
match &*sess.target.target.arch {
"arm" => ARM_WHITELIST,
"aarch64" => AARCH64_WHITELIST,
let static_ = cx
.tcx
.interpret_interner
- .get_corresponding_static_def_id(ptr.alloc_id);
+ .get_static(ptr.alloc_id);
let base_addr = if let Some(def_id) = static_ {
assert!(cx.tcx.is_static(def_id).is_some());
consts::get_static(cx, def_id)
promoted: None
};
let param_env = ty::ParamEnv::reveal_all();
- cx.tcx.const_eval(param_env.and(cid))?;
+ let static_ = cx.tcx.const_eval(param_env.and(cid))?;
- let alloc_id = cx
- .tcx
- .interpret_interner
- .get_cached(def_id)
- .expect("global not cached");
+ let ptr = match static_.val {
+ ConstVal::Value(MiriValue::ByRef(ptr, _)) => ptr,
+ _ => bug!("static const eval returned {:#?}", static_),
+ };
let alloc = cx
.tcx
.interpret_interner
- .get_alloc(alloc_id)
+ .get_alloc(ptr.primval.to_ptr().expect("static has integer pointer").alloc_id)
.expect("miri allocation never successfully created");
Ok(global_initializer(cx, alloc))
}
use rustc::middle::cstore::MetadataLoader;
use rustc::dep_graph::DepGraph;
use rustc_back::target::Target;
-use rustc_data_structures::fx::FxHashSet;
+use rustc_data_structures::fx::FxHashMap;
use rustc_mir::monomorphize::collector;
use link::{build_link_meta, out_filename};
::symbol_names::provide(providers);
providers.target_features_whitelist = |_tcx, _cnum| {
- Lrc::new(FxHashSet()) // Just a dummy
+ Lrc::new(FxHashMap()) // Just a dummy
};
}
fn provide_extern(&self, _providers: &mut Providers) {}
&format!("{}, producing the closest possible value",
msg),
cast_suggestion);
- err.warn("casting here will cause undefined behavior if the value is \
- finite but larger or smaller than the largest or smallest \
- finite value representable by `f32` (this is a bug and will be \
- fixed)");
}
true
}
use rustc::ty::util::IntTypeExt;
use rustc::ty::util::Discr;
use rustc::util::captures::Captures;
-use rustc::util::nodemap::{FxHashSet, FxHashMap};
+use rustc::util::nodemap::FxHashMap;
use syntax::{abi, ast};
use syntax::ast::MetaItemKind;
use syntax::attr::{InlineAttr, list_contains_name, mark_used};
use syntax::codemap::Spanned;
use syntax::symbol::{Symbol, keywords};
+use syntax::feature_gate;
use syntax_pos::{Span, DUMMY_SP};
use rustc::hir::{self, map as hir_map, TransFnAttrs, TransFnAttrFlags, Unsafety};
fn from_target_feature(
tcx: TyCtxt,
attr: &ast::Attribute,
- whitelist: &FxHashSet<String>,
+ whitelist: &FxHashMap<String, Option<String>>,
target_features: &mut Vec<Symbol>,
) {
let list = match attr.meta_item_list() {
return
}
};
-
+ let rust_features = tcx.features();
for item in list {
+ // Only `enable = ...` is accepted in the meta item list
if !item.check_name("enable") {
let msg = "#[target_feature(..)] only accepts sub-keys of `enable` \
currently";
tcx.sess.span_err(item.span, &msg);
continue
}
+
+ // Must be of the form `enable = "..."` ( a string)
let value = match item.value_str() {
- Some(list) => list,
+ Some(value) => value,
None => {
let msg = "#[target_feature] attribute must be of the form \
#[target_feature(enable = \"..\")]";
continue
}
};
- let value = value.as_str();
- for feature in value.split(',') {
- if whitelist.contains(feature) {
- target_features.push(Symbol::intern(feature));
- continue
- }
-
- let msg = format!("the feature named `{}` is not valid for \
- this target", feature);
- let mut err = tcx.sess.struct_span_err(item.span, &msg);
- if feature.starts_with("+") {
- let valid = whitelist.contains(&feature[1..]);
- if valid {
- err.help("consider removing the leading `+` in the feature name");
+ // We allow comma separation to enable multiple features
+ for feature in value.as_str().split(',') {
+
+ // Only allow whitelisted features per platform
+ let feature_gate = match whitelist.get(feature) {
+ Some(g) => g,
+ None => {
+ let msg = format!("the feature named `{}` is not valid for \
+ this target", feature);
+ let mut err = tcx.sess.struct_span_err(item.span, &msg);
+
+ if feature.starts_with("+") {
+ let valid = whitelist.contains_key(&feature[1..]);
+ if valid {
+ err.help("consider removing the leading `+` in the feature name");
+ }
+ }
+ err.emit();
+ continue
}
+ };
+
+ // Only allow features whose feature gates have been enabled
+ let allowed = match feature_gate.as_ref().map(|s| &**s) {
+ Some("arm_target_feature") => rust_features.arm_target_feature,
+ Some("aarch64_target_feature") => rust_features.aarch64_target_feature,
+ Some("hexagon_target_feature") => rust_features.hexagon_target_feature,
+ Some("powerpc_target_feature") => rust_features.powerpc_target_feature,
+ Some("mips_target_feature") => rust_features.mips_target_feature,
+ Some("avx512_target_feature") => rust_features.avx512_target_feature,
+ Some("mmx_target_feature") => rust_features.mmx_target_feature,
+ Some("sse4a_target_feature") => rust_features.sse4a_target_feature,
+ Some("tbm_target_feature") => rust_features.tbm_target_feature,
+ Some(name) => bug!("unknown target feature gate {}", name),
+ None => true,
+ };
+ if !allowed {
+ feature_gate::emit_feature_err(
+ &tcx.sess.parse_sess,
+ feature_gate.as_ref().unwrap(),
+ item.span,
+ feature_gate::GateIssue::Language,
+ &format!("the target feature `{}` is currently unstable",
+ feature),
+ );
+ continue
}
- err.emit();
+ target_features.push(Symbol::intern(feature));
}
}
}
trans_fn_attrs.flags |= TransFnAttrFlags::NO_MANGLE;
} else if attr.check_name("rustc_std_internal_symbol") {
trans_fn_attrs.flags |= TransFnAttrFlags::RUSTC_STD_INTERNAL_SYMBOL;
+ } else if attr.check_name("no_debug") {
+ trans_fn_attrs.flags |= TransFnAttrFlags::NO_DEBUG;
} else if attr.check_name("inline") {
trans_fn_attrs.inline = attrs.iter().fold(InlineAttr::None, |ia, attr| {
if attr.path != "inline" {
.emit();
}
} else if attr.check_name("target_feature") {
- // handle deprecated #[target_feature = "..."]
- if let Some(val) = attr.value_str() {
- for feat in val.as_str().split(",").map(|f| f.trim()) {
- if !feat.is_empty() && !feat.contains('\0') {
- trans_fn_attrs.target_features.push(Symbol::intern(feat));
- }
- }
- let msg = "#[target_feature = \"..\"] is deprecated and will \
- eventually be removed, use \
- #[target_feature(enable = \"..\")] instead";
- tcx.sess.span_warn(attr.span, &msg);
- continue
- }
-
if tcx.fn_sig(id).unsafety() == Unsafety::Normal {
let msg = "#[target_feature(..)] can only be applied to \
`unsafe` function";
#![feature(slice_patterns)]
#![feature(slice_sort_by_cached_key)]
#![feature(dyn_trait)]
-#![feature(underscore_lifetimes)]
#[macro_use] extern crate log;
#[macro_use] extern crate syntax;
Type,
}
+fn resolution_failure(cx: &DocContext, path_str: &str) {
+ cx.sess().warn(&format!("[{}] cannot be resolved, ignoring it...", path_str));
+}
+
impl Clean<Attributes> for [ast::Attribute] {
fn clean(&self, cx: &DocContext) -> Attributes {
let mut attrs = Attributes::from_ast(cx.sess().diagnostic(), self);
if let Ok(def) = resolve(cx, path_str, true) {
def
} else {
+ resolution_failure(cx, path_str);
// this could just be a normal link or a broken link
// we could potentially check if something is
// "intra-doc-link-like" and warn in that case
if let Ok(def) = resolve(cx, path_str, false) {
def
} else {
+ resolution_failure(cx, path_str);
// this could just be a normal link
continue;
}
} else if let Ok(value_def) = resolve(cx, path_str, true) {
value_def
} else {
+ resolution_failure(cx, path_str);
// this could just be a normal link
continue;
}
if let Some(def) = macro_resolve(cx, path_str) {
(def, None)
} else {
+ resolution_failure(cx, path_str);
continue
}
}
use rustc::ty::{self, TyCtxt, AllArenas};
use rustc::hir::map as hir_map;
use rustc::lint;
+use rustc::session::config::ErrorOutputType;
use rustc::util::nodemap::{FxHashMap, FxHashSet};
use rustc_resolve as resolve;
use rustc_metadata::creader::CrateLoader;
use syntax::codemap;
use syntax::edition::Edition;
use syntax::feature_gate::UnstableFeatures;
+use syntax::json::JsonEmitter;
use errors;
-use errors::emitter::ColorConfig;
+use errors::emitter::{Emitter, EmitterWriter};
use std::cell::{RefCell, Cell};
use std::mem;
-use rustc_data_structures::sync::Lrc;
+use rustc_data_structures::sync::{self, Lrc};
use std::rc::Rc;
use std::path::PathBuf;
use clean::Clean;
use html::render::RenderInfo;
-pub use rustc::session::config::Input;
+pub use rustc::session::config::{Input, CodegenOptions};
pub use rustc::session::search_paths::SearchPaths;
pub type ExternalPaths = FxHashMap<DefId, (Vec<String>, clean::TypeKind)>;
}
}
-
pub fn run_core(search_paths: SearchPaths,
cfgs: Vec<String>,
externs: config::Externs,
allow_warnings: bool,
crate_name: Option<String>,
force_unstable_if_unmarked: bool,
- edition: Edition) -> (clean::Crate, RenderInfo)
+ edition: Edition,
+ cg: CodegenOptions,
+ error_format: ErrorOutputType) -> (clean::Crate, RenderInfo)
{
// Parse, resolve, and typecheck the given crate.
let warning_lint = lint::builtin::WARNINGS.name_lower();
let host_triple = TargetTriple::from_triple(config::host_triple());
+ // plays with error output here!
let sessopts = config::Options {
maybe_sysroot,
search_paths,
crate_types: vec![config::CrateTypeRlib],
lint_opts: if !allow_warnings { vec![(warning_lint, lint::Allow)] } else { vec![] },
lint_cap: Some(lint::Allow),
+ cg,
externs,
target_triple: triple.unwrap_or(host_triple),
// Ensure that rustdoc works even if rustc is feature-staged
edition,
..config::basic_debugging_options()
},
+ error_format,
..config::basic_options().clone()
};
let codemap = Lrc::new(codemap::CodeMap::new(sessopts.file_path_mapping()));
- let diagnostic_handler = errors::Handler::with_tty_emitter(ColorConfig::Auto,
- true,
- false,
- Some(codemap.clone()));
+ let emitter: Box<dyn Emitter + sync::Send> = match error_format {
+ ErrorOutputType::HumanReadable(color_config) => Box::new(
+ EmitterWriter::stderr(
+ color_config,
+ Some(codemap.clone()),
+ false,
+ sessopts.debugging_opts.teach,
+ ).ui_testing(sessopts.debugging_opts.ui_testing)
+ ),
+ ErrorOutputType::Json(pretty) => Box::new(
+ JsonEmitter::stderr(
+ None,
+ codemap.clone(),
+ pretty,
+ sessopts.debugging_opts.approximate_suggestions,
+ ).ui_testing(sessopts.debugging_opts.ui_testing)
+ ),
+ ErrorOutputType::Short(color_config) => Box::new(
+ EmitterWriter::stderr(color_config, Some(codemap.clone()), true, false)
+ ),
+ };
+
+ let diagnostic_handler = errors::Handler::with_emitter_and_flags(
+ emitter,
+ errors::HandlerFlags {
+ can_emit_warnings: true,
+ treat_err_as_bug: false,
+ external_macro_backtrace: false,
+ ..Default::default()
+ },
+ );
let mut sess = session::build_session_(
sessopts, cpath, diagnostic_handler, codemap,
-> io::Result<()>
{
write!(dst,
-r##"<!DOCTYPE html>
-<html lang="en">
-<head>
- <meta charset="utf-8">
- <meta name="viewport" content="width=device-width, initial-scale=1.0">
- <meta name="generator" content="rustdoc">
- <meta name="description" content="{description}">
- <meta name="keywords" content="{keywords}">
-
- <title>{title}</title>
-
- <link rel="stylesheet" type="text/css" href="{root_path}normalize{suffix}.css">
- <link rel="stylesheet" type="text/css" href="{root_path}rustdoc{suffix}.css"
- id="mainThemeStyle">
- {themes}
- <link rel="stylesheet" type="text/css" href="{root_path}dark{suffix}.css">
- <link rel="stylesheet" type="text/css" href="{root_path}light{suffix}.css" id="themeStyle">
- <script src="{root_path}storage{suffix}.js"></script>
- {css_extension}
-
- {favicon}
- {in_header}
-</head>
-<body class="rustdoc {css_class}">
- <!--[if lte IE 8]>
- <div class="warning">
- This old browser is unsupported and will most likely display funky
- things.
- </div>
- <![endif]-->
-
- {before_content}
-
- <nav class="sidebar">
- <div class="sidebar-menu">☰</div>
- {logo}
- {sidebar}
- </nav>
-
- <div class="theme-picker">
- <button id="theme-picker" aria-label="Pick another theme!">
- <img src="{root_path}brush{suffix}.svg" width="18" alt="Pick another theme!">
- </button>
- <div id="theme-choices"></div>
- </div>
- <script src="{root_path}theme{suffix}.js"></script>
- <nav class="sub">
- <form class="search-form js-only">
- <div class="search-container">
- <input class="search-input" name="search"
- autocomplete="off"
- placeholder="Click or press ‘S’ to search, ‘?’ for more options…"
- type="search">
- </div>
- </form>
- </nav>
-
- <section id='main' class="content">{content}</section>
- <section id='search' class="content hidden"></section>
-
- <section class="footer"></section>
-
- <aside id="help" class="hidden">
- <div>
- <h1 class="hidden">Help</h1>
-
- <div class="shortcuts">
- <h2>Keyboard Shortcuts</h2>
-
- <dl>
- <dt><kbd>?</kbd></dt>
- <dd>Show this help dialog</dd>
- <dt><kbd>S</kbd></dt>
- <dd>Focus the search field</dd>
- <dt><kbd>↑</kbd></dt>
- <dd>Move up in search results</dd>
- <dt><kbd>↓</kbd></dt>
- <dd>Move down in search results</dd>
- <dt><kbd>↹</kbd></dt>
- <dd>Switch tab</dd>
- <dt><kbd>⏎</kbd></dt>
- <dd>Go to active search result</dd>
- <dt><kbd>+</kbd></dt>
- <dd>Expand all sections</dd>
- <dt><kbd>-</kbd></dt>
- <dd>Collapse all sections</dd>
- </dl>
- </div>
-
- <div class="infos">
- <h2>Search Tricks</h2>
-
- <p>
- Prefix searches with a type followed by a colon (e.g.
- <code>fn:</code>) to restrict the search to a given type.
- </p>
-
- <p>
- Accepted types are: <code>fn</code>, <code>mod</code>,
- <code>struct</code>, <code>enum</code>,
- <code>trait</code>, <code>type</code>, <code>macro</code>,
- and <code>const</code>.
- </p>
-
- <p>
- Search functions by type signature (e.g.
- <code>vec -> usize</code> or <code>* -> vec</code>)
- </p>
- </div>
- </div>
- </aside>
-
- {after_content}
-
- <script>
- window.rootPath = "{root_path}";
- window.currentCrate = "{krate}";
- </script>
- <script src="{root_path}main{suffix}.js"></script>
- <script defer src="{root_path}search-index.js"></script>
-</body>
-</html>"##,
+"<!DOCTYPE html>\
+<html lang=\"en\">\
+<head>\
+ <meta charset=\"utf-8\">\
+ <meta name=\"viewport\" content=\"width=device-width, initial-scale=1.0\">\
+ <meta name=\"generator\" content=\"rustdoc\">\
+ <meta name=\"description\" content=\"{description}\">\
+ <meta name=\"keywords\" content=\"{keywords}\">\
+ <title>{title}</title>\
+ <link rel=\"stylesheet\" type=\"text/css\" href=\"{root_path}normalize{suffix}.css\">\
+ <link rel=\"stylesheet\" type=\"text/css\" href=\"{root_path}rustdoc{suffix}.css\" \
+ id=\"mainThemeStyle\">\
+ {themes}\
+ <link rel=\"stylesheet\" type=\"text/css\" href=\"{root_path}dark{suffix}.css\">\
+ <link rel=\"stylesheet\" type=\"text/css\" href=\"{root_path}light{suffix}.css\" \
+ id=\"themeStyle\">\
+ <script src=\"{root_path}storage{suffix}.js\"></script>\
+ {css_extension}\
+ {favicon}\
+ {in_header}\
+</head>\
+<body class=\"rustdoc {css_class}\">\
+ <!--[if lte IE 8]>\
+ <div class=\"warning\">\
+ This old browser is unsupported and will most likely display funky \
+ things.\
+ </div>\
+ <![endif]-->\
+ {before_content}\
+ <nav class=\"sidebar\">\
+ <div class=\"sidebar-menu\">☰</div>\
+ {logo}\
+ {sidebar}\
+ </nav>\
+ <div class=\"theme-picker\">\
+ <button id=\"theme-picker\" aria-label=\"Pick another theme!\">\
+ <img src=\"{root_path}brush{suffix}.svg\" width=\"18\" alt=\"Pick another theme!\">\
+ </button>\
+ <div id=\"theme-choices\"></div>\
+ </div>\
+ <script src=\"{root_path}theme{suffix}.js\"></script>\
+ <nav class=\"sub\">\
+ <form class=\"search-form js-only\">\
+ <div class=\"search-container\">\
+ <input class=\"search-input\" name=\"search\" \
+ autocomplete=\"off\" \
+ placeholder=\"Click or press ‘S’ to search, ‘?’ for more options…\" \
+ type=\"search\">\
+ </div>\
+ </form>\
+ </nav>\
+ <section id=\"main\" class=\"content\">{content}</section>\
+ <section id=\"search\" class=\"content hidden\"></section>\
+ <section class=\"footer\"></section>\
+ <aside id=\"help\" class=\"hidden\">\
+ <div>\
+ <h1 class=\"hidden\">Help</h1>\
+ <div class=\"shortcuts\">\
+ <h2>Keyboard Shortcuts</h2>\
+ <dl>\
+ <dt><kbd>?</kbd></dt>\
+ <dd>Show this help dialog</dd>\
+ <dt><kbd>S</kbd></dt>\
+ <dd>Focus the search field</dd>\
+ <dt><kbd>↑</kbd></dt>\
+ <dd>Move up in search results</dd>\
+ <dt><kbd>↓</kbd></dt>\
+ <dd>Move down in search results</dd>\
+ <dt><kbd>↹</kbd></dt>\
+ <dd>Switch tab</dd>\
+ <dt><kbd>⏎</kbd></dt>\
+ <dd>Go to active search result</dd>\
+ <dt><kbd>+</kbd></dt>\
+ <dd>Expand all sections</dd>\
+ <dt><kbd>-</kbd></dt>\
+ <dd>Collapse all sections</dd>\
+ </dl>\
+ </div>\
+ <div class=\"infos\">\
+ <h2>Search Tricks</h2>\
+ <p>\
+ Prefix searches with a type followed by a colon (e.g. \
+ <code>fn:</code>) to restrict the search to a given type.\
+ </p>\
+ <p>\
+ Accepted types are: <code>fn</code>, <code>mod</code>, \
+ <code>struct</code>, <code>enum</code>, \
+ <code>trait</code>, <code>type</code>, <code>macro</code>, \
+ and <code>const</code>.\
+ </p>\
+ <p>\
+ Search functions by type signature (e.g. \
+ <code>vec -> usize</code> or <code>* -> vec</code>)\
+ </p>\
+ <p>\
+ Search multiple things at once by splitting your query with comma (e.g. \
+ <code>str,u8</code> or <code>String,struct:Vec,test</code>)\
+ </p>\
+ </div>\
+ </div>\
+ </aside>\
+ {after_content}\
+ <script>\
+ window.rootPath = \"{root_path}\";\
+ window.currentCrate = \"{krate}\";\
+ </script>\
+ <script src=\"{root_path}main{suffix}.js\"></script>\
+ <script defer src=\"{root_path}search-index.js\"></script>\
+</body>\
+</html>",
css_extension = if css_file_extension {
format!("<link rel=\"stylesheet\" type=\"text/css\" href=\"{root_path}theme{suffix}.css\">",
root_path = page.root_path,
/// This describes the layout of each page, and is not modified after
/// creation of the context (contains info like the favicon and added html).
pub layout: layout::Layout,
- /// This flag indicates whether [src] links should be generated or not. If
+ /// This flag indicates whether `[src]` links should be generated or not. If
/// the source files are present in the html rendering, then this will be
/// `true`.
pub include_sources: bool,
let mut dst = try_err!(File::create(&joint_dst), &joint_dst);
try_err!(dst.write_all(&buf), &joint_dst);
- all.append(full_path(self, &item), &item_type);
+ if !self.render_redirect_pages {
+ all.append(full_path(self, &item), &item_type);
+ }
// Redirect from a sane URL using the namespace to Rustdoc's
// URL for the page.
let redir_name = format!("{}.{}.html", name, item_type.name_space());
printTab(currentTab);
}
+ function execSearch(query, searchWords) {
+ var queries = query.raw.split(",");
+ var results = {
+ 'in_args': [],
+ 'returned': [],
+ 'others': [],
+ };
+
+ for (var i = 0; i < queries.length; ++i) {
+ var query = queries[i].trim();
+ if (query.length !== 0) {
+ var tmp = execQuery(getQuery(query), searchWords);
+
+ results['in_args'].push(tmp['in_args']);
+ results['returned'].push(tmp['returned']);
+ results['others'].push(tmp['others']);
+ }
+ }
+ if (queries.length > 1) {
+ function getSmallest(arrays, positions) {
+ var start = null;
+
+ for (var it = 0; it < positions.length; ++it) {
+ if (arrays[it].length > positions[it] &&
+ (start === null || start > arrays[it][positions[it]].lev)) {
+ start = arrays[it][positions[it]].lev;
+ }
+ }
+ return start;
+ }
+
+ function mergeArrays(arrays) {
+ var ret = [];
+ var positions = [];
+
+ for (var x = 0; x < arrays.length; ++x) {
+ positions.push(0);
+ }
+ while (ret.length < MAX_RESULTS) {
+ var smallest = getSmallest(arrays, positions);
+ if (smallest === null) {
+ break;
+ }
+ for (x = 0; x < arrays.length && ret.length < MAX_RESULTS; ++x) {
+ if (arrays[x].length > positions[x] &&
+ arrays[x][positions[x]].lev === smallest) {
+ ret.push(arrays[x][positions[x]]);
+ positions[x] += 1;
+ }
+ }
+ }
+ return ret;
+ }
+
+ return {
+ 'in_args': mergeArrays(results['in_args']),
+ 'returned': mergeArrays(results['returned']),
+ 'others': mergeArrays(results['others']),
+ };
+ } else {
+ return {
+ 'in_args': results['in_args'][0],
+ 'returned': results['returned'][0],
+ 'others': results['others'][0],
+ };
+ }
+ }
+
function search(e) {
- var query,
- obj, i, len,
- results = {"in_args": [], "returned": [], "others": []},
- resultIndex;
var params = getQueryStringParams();
+ var query = getQuery(document.getElementsByClassName('search-input')[0].value);
- query = getQuery(document.getElementsByClassName('search-input')[0].value);
if (e) {
e.preventDefault();
}
}
}
- results = execQuery(query, index);
- showResults(results);
+ showResults(execSearch(query, index));
}
function buildIndex(rawSearchIndex) {
#![feature(test)]
#![feature(vec_remove_item)]
#![feature(entry_and_modify)]
+#![feature(dyn_trait)]
extern crate arena;
extern crate getopts;
extern crate serialize as rustc_serialize; // used by deriving
+use errors::ColorConfig;
+
use std::collections::{BTreeMap, BTreeSet};
use std::default::Default;
use std::env;
use syntax::edition::Edition;
use externalfiles::ExternalHtml;
use rustc::session::search_paths::SearchPaths;
-use rustc::session::config::{ErrorOutputType, RustcOptGroup, nightly_options, Externs};
+use rustc::session::config::{ErrorOutputType, RustcOptGroup, Externs, CodegenOptions};
+use rustc::session::config::{nightly_options, build_codegen_options};
use rustc_back::target::TargetTriple;
#[macro_use]
pub fn main() {
const STACK_SIZE: usize = 32_000_000; // 32MB
+ rustc_driver::set_sigpipe_handler();
env_logger::init();
let res = std::thread::Builder::new().stack_size(STACK_SIZE).spawn(move || {
syntax::with_globals(move || {
stable("plugin-path", |o| {
o.optmulti("", "plugin-path", "directory to load plugins from", "DIR")
}),
+ stable("C", |o| {
+ o.optmulti("C", "codegen", "pass a codegen option to rustc", "OPT[=VALUE]")
+ }),
stable("passes", |o| {
o.optmulti("", "passes",
"list of passes to also run, you might want \
"edition to use when compiling rust code (default: 2015)",
"EDITION")
}),
+ unstable("color", |o| {
+ o.optopt("",
+ "color",
+ "Configure coloring of output:
+ auto = colorize, if output goes to a tty (default);
+ always = always colorize output;
+ never = never colorize output",
+ "auto|always|never")
+ }),
+ unstable("error-format", |o| {
+ o.optopt("",
+ "error-format",
+ "How errors and other messages are produced",
+ "human|json|short")
+ }),
]
}
}
let input = &matches.free[0];
+ let color = match matches.opt_str("color").as_ref().map(|s| &s[..]) {
+ Some("auto") => ColorConfig::Auto,
+ Some("always") => ColorConfig::Always,
+ Some("never") => ColorConfig::Never,
+ None => ColorConfig::Auto,
+ Some(arg) => {
+ print_error(&format!("argument for --color must be `auto`, `always` or `never` \
+ (instead was `{}`)", arg));
+ return 1;
+ }
+ };
+ let error_format = match matches.opt_str("error-format").as_ref().map(|s| &s[..]) {
+ Some("human") => ErrorOutputType::HumanReadable(color),
+ Some("json") => ErrorOutputType::Json(false),
+ Some("pretty-json") => ErrorOutputType::Json(true),
+ Some("short") => ErrorOutputType::Short(color),
+ None => ErrorOutputType::HumanReadable(color),
+ Some(arg) => {
+ print_error(&format!("argument for --error-format must be `human`, `json` or \
+ `short` (instead was `{}`)", arg));
+ return 1;
+ }
+ };
+
let mut libs = SearchPaths::new();
for s in &matches.opt_strs("L") {
- libs.add_path(s, ErrorOutputType::default());
+ libs.add_path(s, error_format);
}
let externs = match parse_externs(&matches) {
Ok(ex) => ex,
}
};
+ let cg = build_codegen_options(&matches, ErrorOutputType::default());
+
match (should_test, markdown_input) {
(true, true) => {
return markdown::test(input, cfgs, libs, externs, test_args, maybe_sysroot,
- display_warnings, linker, edition)
+ display_warnings, linker, edition, cg)
}
(true, false) => {
return test::run(Path::new(input), cfgs, libs, externs, test_args, crate_name,
- maybe_sysroot, display_warnings, linker, edition)
+ maybe_sysroot, display_warnings, linker, edition, cg)
}
(false, true) => return markdown::render(Path::new(input),
output.unwrap_or(PathBuf::from("doc")),
}
let output_format = matches.opt_str("w");
- let res = acquire_input(PathBuf::from(input), externs, edition, &matches, move |out| {
+
+ let res = acquire_input(PathBuf::from(input), externs, edition, cg, &matches, error_format,
+ move |out| {
let Output { krate, passes, renderinfo } = out;
info!("going to format");
match output_format.as_ref().map(|s| &**s) {
fn acquire_input<R, F>(input: PathBuf,
externs: Externs,
edition: Edition,
+ cg: CodegenOptions,
matches: &getopts::Matches,
+ error_format: ErrorOutputType,
f: F)
-> Result<R, String>
where R: 'static + Send, F: 'static + Send + FnOnce(Output) -> R {
match matches.opt_str("r").as_ref().map(|s| &**s) {
- Some("rust") => Ok(rust_input(input, externs, edition, matches, f)),
+ Some("rust") => Ok(rust_input(input, externs, edition, cg, matches, error_format, f)),
Some(s) => Err(format!("unknown input format: {}", s)),
- None => Ok(rust_input(input, externs, edition, matches, f))
+ None => Ok(rust_input(input, externs, edition, cg, matches, error_format, f))
}
}
fn rust_input<R, F>(cratefile: PathBuf,
externs: Externs,
edition: Edition,
+ cg: CodegenOptions,
matches: &getopts::Matches,
+ error_format: ErrorOutputType,
f: F) -> R
where R: 'static + Send,
F: 'static + Send + FnOnce(Output) -> R
let (mut krate, renderinfo) =
core::run_core(paths, cfgs, externs, Input::File(cratefile), triple, maybe_sysroot,
display_warnings, crate_name.clone(),
- force_unstable_if_unmarked, edition);
+ force_unstable_if_unmarked, edition, cg, error_format);
info!("finished with rustc");
use getopts;
use testing;
use rustc::session::search_paths::SearchPaths;
-use rustc::session::config::Externs;
+use rustc::session::config::{Externs, CodegenOptions};
use syntax::codemap::DUMMY_SP;
use syntax::edition::Edition;
/// Run any tests/code examples in the markdown file `input`.
pub fn test(input: &str, cfgs: Vec<String>, libs: SearchPaths, externs: Externs,
mut test_args: Vec<String>, maybe_sysroot: Option<PathBuf>,
- display_warnings: bool, linker: Option<PathBuf>, edition: Edition) -> isize {
+ display_warnings: bool, linker: Option<PathBuf>, edition: Edition,
+ cg: CodegenOptions) -> isize {
let input_str = match load_string(input) {
Ok(s) => s,
Err(LoadStringError::ReadFail) => return 1,
let mut opts = TestOptions::default();
opts.no_crate_inject = true;
opts.display_warnings = display_warnings;
- let mut collector = Collector::new(input.to_owned(), cfgs, libs, externs,
+ let mut collector = Collector::new(input.to_owned(), cfgs, libs, cg, externs,
true, opts, maybe_sysroot, None,
Some(PathBuf::from(input)),
linker, edition);
use rustc::hir;
use rustc::hir::intravisit;
use rustc::session::{self, CompileIncomplete, config};
-use rustc::session::config::{OutputType, OutputTypes, Externs};
+use rustc::session::config::{OutputType, OutputTypes, Externs, CodegenOptions};
use rustc::session::search_paths::{SearchPaths, PathKind};
use rustc_metadata::dynamic_lib::DynamicLibrary;
use tempdir::TempDir;
maybe_sysroot: Option<PathBuf>,
display_warnings: bool,
linker: Option<PathBuf>,
- edition: Edition)
+ edition: Edition,
+ cg: CodegenOptions)
-> isize {
let input = config::Input::File(input_path.to_owned());
|| Some(env::current_exe().unwrap().parent().unwrap().parent().unwrap().to_path_buf())),
search_paths: libs.clone(),
crate_types: vec![config::CrateTypeDylib],
+ cg: cg.clone(),
externs: externs.clone(),
unstable_features: UnstableFeatures::from_environment(),
lint_cap: Some(::rustc::lint::Level::Allow),
let mut collector = Collector::new(crate_name,
cfgs,
libs,
+ cg,
externs,
false,
opts,
fn run_test(test: &str, cratename: &str, filename: &FileName, line: usize,
cfgs: Vec<String>, libs: SearchPaths,
- externs: Externs,
+ cg: CodegenOptions, externs: Externs,
should_panic: bool, no_run: bool, as_test_harness: bool,
compile_fail: bool, mut error_codes: Vec<String>, opts: &TestOptions,
maybe_sysroot: Option<PathBuf>, linker: Option<PathBuf>, edition: Edition) {
cg: config::CodegenOptions {
prefer_dynamic: true,
linker,
- .. config::basic_codegen_options()
+ ..cg
},
test: as_test_harness,
unstable_features: UnstableFeatures::from_environment(),
cfgs: Vec<String>,
libs: SearchPaths,
+ cg: CodegenOptions,
externs: Externs,
use_headers: bool,
cratename: String,
}
impl Collector {
- pub fn new(cratename: String, cfgs: Vec<String>, libs: SearchPaths, externs: Externs,
- use_headers: bool, opts: TestOptions, maybe_sysroot: Option<PathBuf>,
- codemap: Option<Lrc<CodeMap>>, filename: Option<PathBuf>,
- linker: Option<PathBuf>, edition: Edition) -> Collector {
+ pub fn new(cratename: String, cfgs: Vec<String>, libs: SearchPaths, cg: CodegenOptions,
+ externs: Externs, use_headers: bool, opts: TestOptions,
+ maybe_sysroot: Option<PathBuf>, codemap: Option<Lrc<CodeMap>>,
+ filename: Option<PathBuf>, linker: Option<PathBuf>, edition: Edition) -> Collector {
Collector {
tests: Vec::new(),
names: Vec::new(),
cfgs,
libs,
+ cg,
externs,
use_headers,
cratename,
let name = self.generate_name(line, &filename);
let cfgs = self.cfgs.clone();
let libs = self.libs.clone();
+ let cg = self.cg.clone();
let externs = self.externs.clone();
let cratename = self.cratename.to_string();
let opts = self.opts.clone();
line,
cfgs,
libs,
+ cg,
externs,
should_panic,
no_run,
impl<'a, K: 'a, V: 'a> Drop for Drain<'a, K, V> {
fn drop(&mut self) {
- for _ in self {}
+ self.for_each(drop);
}
}
/// # Conversions
///
/// See the [module's toplevel documentation about conversions][conversions] for a discussion on
-/// the traits which `OsString` implements for conversions from/to native representations.
+/// the traits which `OsString` implements for [conversions] from/to native representations.
///
/// [`OsStr`]: struct.OsStr.html
/// [`&OsStr`]: struct.OsStr.html
/// [`new`]: #method.new
/// [`push`]: #method.push
/// [`as_os_str`]: #method.as_os_str
+/// [conversions]: index.html#conversions
#[derive(Clone)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct OsString {
/// references; the latter are owned strings.
///
/// See the [module's toplevel documentation about conversions][conversions] for a discussion on
-/// the traits which `OsStr` implements for conversions from/to native representations.
+/// the traits which `OsStr` implements for [conversions] from/to native representations.
///
/// [`OsString`]: struct.OsString.html
/// [`&str`]: ../primitive.str.html
//!
//! Once you are familiar with the contents of the standard library you may
//! begin to find the verbosity of the prose distracting. At this stage in your
-//! development you may want to press the **[-]** button near the top of the
+//! development you may want to press the `[-]` button near the top of the
//! page to collapse it into a more skimmable view.
//!
-//! While you are looking at that **[-]** button also notice the **[src]**
+//! While you are looking at that `[-]` button also notice the `[src]`
//! button. Rust's API documentation comes with the source code and you are
//! encouraged to read it. The standard library source is generally high
//! quality and a peek behind the curtains is often enlightening.
#![feature(rand)]
#![feature(raw)]
#![feature(rustc_attrs)]
+#![feature(std_internals)]
#![feature(stdsimd)]
#![feature(shrink_to)]
#![feature(slice_bytes)]
#[unstable(feature = "stdsimd", issue = "48556")]
#[cfg(all(not(stage0), not(test)))]
pub use stdsimd::simd;
-#[unstable(feature = "stdsimd", issue = "48556")]
+#[stable(feature = "simd_arch", since = "1.27.0")]
#[cfg(all(not(stage0), not(test)))]
pub use stdsimd::arch;
}
}
-/// A macro for defining #[cfg] if-else statements.
+/// A macro for defining `#[cfg]` if-else statements.
///
/// This is similar to the `if/elif` C preprocessor macro by allowing definition
/// of a cascade of `#[cfg]` cases, emitting the implementation which matches
/// first.
///
-/// This allows you to conveniently provide a long list #[cfg]'d blocks of code
+/// This allows you to conveniently provide a long list `#[cfg]`'d blocks of code
/// without having to rewrite each clause multiple times.
macro_rules! cfg_if {
($(
//! * Executing a panic up to doing the actual implementation
//! * Shims around "try"
+use core::panic::BoxMeUp;
+
use io::prelude::*;
use any::Any;
use mem;
use ptr;
use raw;
-use sys::stdio::Stderr;
+use sys::stdio::{Stderr, stderr_prints_nothing};
use sys_common::rwlock::RWLock;
use sys_common::thread_info;
use sys_common::util;
data_ptr: *mut usize,
vtable_ptr: *mut usize) -> u32;
#[unwind(allowed)]
- fn __rust_start_panic(data: usize, vtable: usize) -> u32;
+ fn __rust_start_panic(payload: usize) -> u32;
}
#[derive(Copy, Clone)]
};
let location = info.location().unwrap(); // The current implementation always returns Some
- let file = location.file();
- let line = location.line();
- let col = location.column();
let msg = match info.payload().downcast_ref::<&'static str>() {
Some(s) => *s,
let name = thread.as_ref().and_then(|t| t.name()).unwrap_or("<unnamed>");
let write = |err: &mut ::io::Write| {
- let _ = writeln!(err, "thread '{}' panicked at '{}', {}:{}:{}",
- name, msg, file, line, col);
+ let _ = writeln!(err, "thread '{}' panicked at '{}', {}",
+ name, msg, location);
#[cfg(feature = "backtrace")]
{
let prev = LOCAL_STDERR.with(|s| s.borrow_mut().take());
match (prev, err.as_mut()) {
- (Some(mut stderr), _) => {
- write(&mut *stderr);
- let mut s = Some(stderr);
- LOCAL_STDERR.with(|slot| {
- *slot.borrow_mut() = s.take();
- });
- }
- (None, Some(ref mut err)) => { write(err) }
- _ => {}
+ (Some(mut stderr), _) => {
+ write(&mut *stderr);
+ let mut s = Some(stderr);
+ LOCAL_STDERR.with(|slot| {
+ *slot.borrow_mut() = s.take();
+ });
+ }
+ (None, Some(ref mut err)) => { write(err) }
+ _ => {}
}
}
// panic + OOM properly anyway (see comment in begin_panic
// below).
- let mut s = String::new();
- let _ = s.write_fmt(*msg);
- rust_panic_with_hook(Box::new(s), Some(msg), file_line_col)
+ rust_panic_with_hook(&mut PanicPayload::new(msg), Some(msg), file_line_col);
+
+ struct PanicPayload<'a> {
+ inner: &'a fmt::Arguments<'a>,
+ string: Option<String>,
+ }
+
+ impl<'a> PanicPayload<'a> {
+ fn new(inner: &'a fmt::Arguments<'a>) -> PanicPayload<'a> {
+ PanicPayload { inner, string: None }
+ }
+
+ fn fill(&mut self) -> &mut String {
+ let inner = self.inner;
+ self.string.get_or_insert_with(|| {
+ let mut s = String::new();
+ drop(s.write_fmt(*inner));
+ s
+ })
+ }
+ }
+
+ unsafe impl<'a> BoxMeUp for PanicPayload<'a> {
+ fn box_me_up(&mut self) -> *mut (Any + Send) {
+ let contents = mem::replace(self.fill(), String::new());
+ Box::into_raw(Box::new(contents))
+ }
+
+ fn get(&mut self) -> &(Any + Send) {
+ self.fill()
+ }
+ }
}
/// This is the entry point of panicking for panic!() and assert!().
// be performed in the parent of this thread instead of the thread that's
// panicking.
- rust_panic_with_hook(Box::new(msg), None, file_line_col)
+ rust_panic_with_hook(&mut PanicPayload::new(msg), None, file_line_col);
+
+ struct PanicPayload<A> {
+ inner: Option<A>,
+ }
+
+ impl<A: Send + 'static> PanicPayload<A> {
+ fn new(inner: A) -> PanicPayload<A> {
+ PanicPayload { inner: Some(inner) }
+ }
+ }
+
+ unsafe impl<A: Send + 'static> BoxMeUp for PanicPayload<A> {
+ fn box_me_up(&mut self) -> *mut (Any + Send) {
+ let data = match self.inner.take() {
+ Some(a) => Box::new(a) as Box<Any + Send>,
+ None => Box::new(()),
+ };
+ Box::into_raw(data)
+ }
+
+ fn get(&mut self) -> &(Any + Send) {
+ match self.inner {
+ Some(ref a) => a,
+ None => &(),
+ }
+ }
+ }
}
-/// Executes the primary logic for a panic, including checking for recursive
-/// panics and panic hooks.
+/// Central point for dispatching panics.
///
-/// This is the entry point or panics from libcore, formatted panics, and
-/// `Box<Any>` panics. Here we'll verify that we're not panicking recursively,
-/// run panic hooks, and then delegate to the actual implementation of panics.
-#[inline(never)]
-#[cold]
-fn rust_panic_with_hook(payload: Box<Any + Send>,
+/// Executes the primary logic for a panic, including checking for recursive
+/// panics, panic hooks, and finally dispatching to the panic runtime to either
+/// abort or unwind.
+fn rust_panic_with_hook(payload: &mut BoxMeUp,
message: Option<&fmt::Arguments>,
file_line_col: &(&'static str, u32, u32)) -> ! {
let (file, line, col) = *file_line_col;
}
unsafe {
- let info = PanicInfo::internal_constructor(
- &*payload,
+ let mut info = PanicInfo::internal_constructor(
message,
Location::internal_constructor(file, line, col),
);
HOOK_LOCK.read();
match HOOK {
- Hook::Default => default_hook(&info),
- Hook::Custom(ptr) => (*ptr)(&info),
+ // Some platforms know that printing to stderr won't ever actually
+ // print anything, and if that's the case we can skip the default
+ // hook.
+ Hook::Default if stderr_prints_nothing() => {}
+ Hook::Default => {
+ info.set_payload(payload.get());
+ default_hook(&info);
+ }
+ Hook::Custom(ptr) => {
+ info.set_payload(payload.get());
+ (*ptr)(&info);
+ }
}
HOOK_LOCK.read_unlock();
}
/// Shim around rust_panic. Called by resume_unwind.
pub fn update_count_then_panic(msg: Box<Any + Send>) -> ! {
update_panic_count(1);
- rust_panic(msg)
+
+ struct RewrapBox(Box<Any + Send>);
+
+ unsafe impl BoxMeUp for RewrapBox {
+ fn box_me_up(&mut self) -> *mut (Any + Send) {
+ Box::into_raw(mem::replace(&mut self.0, Box::new(())))
+ }
+
+ fn get(&mut self) -> &(Any + Send) {
+ &*self.0
+ }
+ }
+
+ rust_panic(&mut RewrapBox(msg))
}
/// A private no-mangle function on which to slap yer breakpoints.
#[no_mangle]
#[allow(private_no_mangle_fns)] // yes we get it, but we like breakpoints
-pub fn rust_panic(msg: Box<Any + Send>) -> ! {
+pub fn rust_panic(mut msg: &mut BoxMeUp) -> ! {
let code = unsafe {
- let obj = mem::transmute::<_, raw::TraitObject>(msg);
- __rust_start_panic(obj.data as usize, obj.vtable as usize)
+ let obj = &mut msg as *mut &mut BoxMeUp;
+ __rust_start_panic(obj as usize)
};
rtabort!("failed to initiate panic, error {}", code)
}
}
pub const STDIN_BUF_SIZE: usize = ::sys_common::io::DEFAULT_BUF_SIZE;
+
+pub fn stderr_prints_nothing() -> bool {
+ false
+}
}
pub const STDIN_BUF_SIZE: usize = ::sys_common::io::DEFAULT_BUF_SIZE;
+
+pub fn stderr_prints_nothing() -> bool {
+ false
+}
reset_sigpipe();
}
- #[cfg(not(any(target_os = "emscripten", target_os="fuchsia")))]
+ #[cfg(not(any(target_os = "emscripten", target_os = "fuchsia")))]
unsafe fn reset_sigpipe() {
assert!(signal(libc::SIGPIPE, libc::SIG_IGN) != libc::SIG_ERR);
}
- #[cfg(any(target_os = "emscripten", target_os="fuchsia"))]
+ #[cfg(any(target_os = "emscripten", target_os = "fuchsia"))]
unsafe fn reset_sigpipe() {}
}
}
pub const STDIN_BUF_SIZE: usize = ::sys_common::io::DEFAULT_BUF_SIZE;
+
+pub fn stderr_prints_nothing() -> bool {
+ false
+}
if *mode >= 0 {
*mode += 1;
} else {
- panic!("rwlock locked for writing");
+ rtabort!("rwlock locked for writing");
}
}
if *mode == 0 {
*mode = -1;
} else {
- panic!("rwlock locked for reading")
+ rtabort!("rwlock locked for reading")
}
}
pub fn is_ebadf(_err: &io::Error) -> bool {
true
}
+
+pub fn stderr_prints_nothing() -> bool {
+ !cfg!(feature = "wasm_syscall")
+}
0 => {}
n => return n as *mut _,
}
- let mut re = Box::new(ReentrantMutex::uninitialized());
+ let mut re = box ReentrantMutex::uninitialized();
re.init();
let re = Box::into_raw(re);
match self.lock.compare_and_swap(0, re as usize, Ordering::SeqCst) {
// idea is that on windows we use a slightly smaller buffer that's
// been seen to be acceptable.
pub const STDIN_BUF_SIZE: usize = 8 * 1024;
+
+pub fn stderr_prints_nothing() -> bool {
+ false
+}
/// Controls how the backtrace should be formatted.
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub enum PrintFormat {
- /// Show all the frames with absolute path for files.
- Full = 2,
/// Show only relevant data from the backtrace.
- Short = 3,
+ Short = 2,
+ /// Show all the frames with absolute path for files.
+ Full = 3,
}
// For now logging is turned off by default, and this function checks to see
pub fn log_enabled() -> Option<PrintFormat> {
static ENABLED: atomic::AtomicIsize = atomic::AtomicIsize::new(0);
match ENABLED.load(Ordering::SeqCst) {
- 0 => {},
+ 0 => {}
1 => return None,
- 2 => return Some(PrintFormat::Full),
- 3 => return Some(PrintFormat::Short),
- _ => unreachable!(),
+ 2 => return Some(PrintFormat::Short),
+ _ => return Some(PrintFormat::Full),
}
let val = match env::var_os("RUST_BACKTRACE") {
use sync::Once;
use sys;
+macro_rules! rtabort {
+ ($($t:tt)*) => (::sys_common::util::abort(format_args!($($t)*)))
+}
+
+macro_rules! rtassert {
+ ($e:expr) => (if !$e {
+ rtabort!(concat!("assertion failed: ", stringify!($e)));
+ })
+}
+
pub mod at_exit_imp;
#[cfg(feature = "backtrace")]
pub mod backtrace;
if at_exit_imp::push(Box::new(f)) {Ok(())} else {Err(())}
}
-macro_rules! rtabort {
- ($($t:tt)*) => (::sys_common::util::abort(format_args!($($t)*)))
-}
-
/// One-time runtime cleanup.
pub fn cleanup() {
static CLEANUP: Once = Once::new();
self.key.store(key, Ordering::SeqCst);
}
INIT_LOCK.unlock();
- assert!(key != 0);
+ rtassert!(key != 0);
return key
}
imp::destroy(key1);
key2
};
- assert!(key != 0);
+ rtassert!(key != 0);
match self.key.compare_and_swap(0, key as usize, Ordering::SeqCst) {
// The CAS succeeded, so we've created the actual key
0 => key as usize,
use fmt;
use io::prelude::*;
-use sys::stdio::Stderr;
+use sys::stdio::{Stderr, stderr_prints_nothing};
use thread;
pub fn dumb_print(args: fmt::Arguments) {
+ if stderr_prints_nothing() {
+ return
+ }
let _ = Stderr::new().map(|mut stderr| stderr.write_fmt(args));
}
&'static $crate::cell::UnsafeCell<
$crate::option::Option<$t>>>
{
+ #[cfg(target_arch = "wasm32")]
+ static __KEY: $crate::thread::__StaticLocalKeyInner<$t> =
+ $crate::thread::__StaticLocalKeyInner::new();
+
#[thread_local]
- #[cfg(target_thread_local)]
+ #[cfg(all(target_thread_local, not(target_arch = "wasm32")))]
static __KEY: $crate::thread::__FastLocalKeyInner<$t> =
$crate::thread::__FastLocalKeyInner::new();
- #[cfg(not(target_thread_local))]
+ #[cfg(all(not(target_thread_local), not(target_arch = "wasm32")))]
static __KEY: $crate::thread::__OsLocalKeyInner<$t> =
$crate::thread::__OsLocalKeyInner::new();
}
}
+/// On some platforms like wasm32 there's no threads, so no need to generate
+/// thread locals and we can instead just use plain statics!
+#[doc(hidden)]
+#[cfg(target_arch = "wasm32")]
+pub mod statik {
+ use cell::UnsafeCell;
+ use fmt;
+
+ pub struct Key<T> {
+ inner: UnsafeCell<Option<T>>,
+ }
+
+ unsafe impl<T> ::marker::Sync for Key<T> { }
+
+ impl<T> fmt::Debug for Key<T> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.pad("Key { .. }")
+ }
+ }
+
+ impl<T> Key<T> {
+ pub const fn new() -> Key<T> {
+ Key {
+ inner: UnsafeCell::new(None),
+ }
+ }
+
+ pub unsafe fn get(&self) -> Option<&'static UnsafeCell<Option<T>>> {
+ Some(&*(&self.inner as *const _))
+ }
+ }
+}
+
#[doc(hidden)]
#[cfg(target_thread_local)]
pub mod fast {
// where fast TLS was not available; end-user code is compiled with fast TLS
// where available, but both are needed.
+#[unstable(feature = "libstd_thread_internals", issue = "0")]
+#[cfg(target_arch = "wasm32")]
+#[doc(hidden)] pub use self::local::statik::Key as __StaticLocalKeyInner;
#[unstable(feature = "libstd_thread_internals", issue = "0")]
#[cfg(target_thread_local)]
#[doc(hidden)] pub use self::local::fast::Key as __FastLocalKeyInner;
{
// We basically look at two token trees here, denoted as #1 and #2 below
let span = match parse_kleene_op(input, span) {
- // #1 is a `+` or `*` KleeneOp
- //
- // `?` is ambiguous: it could be a separator or a Kleene::ZeroOrOne, so we need to look
- // ahead one more token to be sure.
- Ok(Ok(op)) if op != KleeneOp::ZeroOrOne => return (None, op),
-
- // #1 is `?` token, but it could be a Kleene::ZeroOrOne without a separator or it could
- // be a `?` separator followed by any Kleene operator. We need to look ahead 1 token to
- // find out which.
- Ok(Ok(op)) => {
- assert_eq!(op, KleeneOp::ZeroOrOne);
-
- // Lookahead at #2. If it is a KleenOp, then #1 is a separator.
- let is_1_sep = if let Some(&tokenstream::TokenTree::Token(_, ref tok2)) = input.peek() {
- kleene_op(tok2).is_some()
- } else {
- false
- };
-
- if is_1_sep {
- // #1 is a separator and #2 should be a KleepeOp::*
- // (N.B. We need to advance the input iterator.)
- match parse_kleene_op(input, span) {
- // #2 is a KleeneOp (this is the only valid option) :)
- Ok(Ok(op)) if op == KleeneOp::ZeroOrOne => {
- if !features.macro_at_most_once_rep
- && !attr::contains_name(attrs, "allow_internal_unstable")
- {
- let explain = feature_gate::EXPLAIN_MACRO_AT_MOST_ONCE_REP;
- emit_feature_err(
- sess,
- "macro_at_most_once_rep",
- span,
- GateIssue::Language,
- explain,
- );
- }
- return (Some(token::Question), op);
- }
- Ok(Ok(op)) => return (Some(token::Question), op),
-
- // #2 is a random token (this is an error) :(
- Ok(Err((_, span))) => span,
-
- // #2 is not even a token at all :(
- Err(span) => span,
- }
- } else {
- if !features.macro_at_most_once_rep
- && !attr::contains_name(attrs, "allow_internal_unstable")
- {
- let explain = feature_gate::EXPLAIN_MACRO_AT_MOST_ONCE_REP;
- emit_feature_err(
- sess,
- "macro_at_most_once_rep",
- span,
- GateIssue::Language,
- explain,
- );
- }
-
- // #2 is a random tree and #1 is KleeneOp::ZeroOrOne
- return (None, op);
+ // #1 is any KleeneOp (`?`)
+ Ok(Ok(op)) if op == KleeneOp::ZeroOrOne => {
+ if !features.macro_at_most_once_rep
+ && !attr::contains_name(attrs, "allow_internal_unstable")
+ {
+ let explain = feature_gate::EXPLAIN_MACRO_AT_MOST_ONCE_REP;
+ emit_feature_err(
+ sess,
+ "macro_at_most_once_rep",
+ span,
+ GateIssue::Language,
+ explain,
+ );
}
+ return (None, op);
}
+ // #1 is any KleeneOp (`+`, `*`)
+ Ok(Ok(op)) => return (None, op),
+
// #1 is a separator followed by #2, a KleeneOp
Ok(Err((tok, span))) => match parse_kleene_op(input, span) {
// #2 is a KleeneOp :D
GateIssue::Language,
explain,
);
+ } else {
+ sess.span_diagnostic
+ .span_err(span, "`?` macro repetition does not allow a separator");
}
- return (Some(tok), op);
+ return (None, op);
}
Ok(Ok(op)) => return (Some(tok), op),
Err(span) => span,
};
- if !features.macro_at_most_once_rep
- && !attr::contains_name(attrs, "allow_internal_unstable")
- {
+ if !features.macro_at_most_once_rep && !attr::contains_name(attrs, "allow_internal_unstable") {
sess.span_diagnostic
.span_err(span, "expected one of: `*`, `+`, or `?`");
} else {
// allow `repr(simd)`, and importing the various simd intrinsics
(active, repr_simd, "1.4.0", Some(27731), None),
- // Allows cfg(target_feature = "...").
- (active, cfg_target_feature, "1.4.0", Some(29717), None),
-
// allow `extern "platform-intrinsic" { ... }`
(active, platform_intrinsics, "1.4.0", Some(27731), None),
(active, use_extern_macros, "1.15.0", Some(35896), None),
- // Allows #[target_feature(...)]
- (active, target_feature, "1.15.0", None, None),
-
// `extern "ptx-*" fn()`
(active, abi_ptx, "1.15.0", None, None),
// Future-proofing enums/structs with #[non_exhaustive] attribute (RFC 2008)
(active, non_exhaustive, "1.22.0", Some(44109), None),
- // allow `'_` placeholder lifetimes
- (active, underscore_lifetimes, "1.22.0", Some(44524), None),
-
- // Default match binding modes (RFC 2005)
- (active, match_default_bindings, "1.22.0", Some(42640), None),
-
// Trait object syntax with `dyn` prefix
(active, dyn_trait, "1.22.0", Some(44662), Some(Edition::Edition2018)),
// Allows macro invocations in `extern {}` blocks
(active, macros_in_extern, "1.27.0", Some(49476), None),
+
+ // unstable #[target_feature] directives
+ (active, arm_target_feature, "1.27.0", None, None),
+ (active, aarch64_target_feature, "1.27.0", None, None),
+ (active, hexagon_target_feature, "1.27.0", None, None),
+ (active, powerpc_target_feature, "1.27.0", None, None),
+ (active, mips_target_feature, "1.27.0", None, None),
+ (active, avx512_target_feature, "1.27.0", None, None),
+ (active, mmx_target_feature, "1.27.0", None, None),
+ (active, sse4a_target_feature, "1.27.0", None, None),
+ (active, tbm_target_feature, "1.27.0", None, None),
);
declare_features! (
(accepted, underscore_lifetimes, "1.26.0", Some(44524), None),
// Allows attributes on lifetime/type formal parameters in generics (RFC 1327)
(accepted, generic_param_attrs, "1.26.0", Some(48848), None),
+ // Allows cfg(target_feature = "...").
+ (accepted, cfg_target_feature, "1.27.0", Some(29717), None),
+ // Allows #[target_feature(...)]
+ (accepted, target_feature, "1.27.0", None, None),
);
// If you change this, please modify src/doc/unstable-book as well. You must
"the `#[naked]` attribute \
is an experimental feature",
cfg_fn!(naked_functions))),
- ("target_feature", Whitelisted, Gated(
- Stability::Unstable, "target_feature",
- "the `#[target_feature]` attribute is an experimental feature",
- cfg_fn!(target_feature))),
+ ("target_feature", Normal, Ungated),
("export_name", Whitelisted, Ungated),
("inline", Whitelisted, Ungated),
("link", Whitelisted, Ungated),
// cfg(...)'s that are feature gated
const GATED_CFGS: &[(&str, &str, fn(&Features) -> bool)] = &[
// (name in cfg, feature, function to check if the feature is enabled)
- ("target_feature", "cfg_target_feature", cfg_fn!(cfg_target_feature)),
("target_vendor", "cfg_target_vendor", cfg_fn!(cfg_target_vendor)),
("target_thread_local", "cfg_target_thread_local", cfg_fn!(cfg_target_thread_local)),
("target_has_atomic", "cfg_target_has_atomic", cfg_fn!(cfg_target_has_atomic)),
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-//! Machinery for hygienic macros, inspired by the MTWT[1] paper.
+//! Machinery for hygienic macros, inspired by the `MTWT[1]` paper.
//!
-//! [1] Matthew Flatt, Ryan Culpepper, David Darais, and Robert Bruce Findler. 2012.
+//! `[1]` Matthew Flatt, Ryan Culpepper, David Darais, and Robert Bruce Findler. 2012.
//! *Macros that work together: Compile-time bindings, partial expansion,
//! and definition contexts*. J. Funct. Program. 22, 2 (March 2012), 181-216.
//! DOI=10.1017/S0956796812000093 <http://dx.doi.org/10.1017/S0956796812000093>
(46, Offsetof, "offsetof")
(47, Override, "override")
(48, Priv, "priv")
- (49, Proc, "proc")
- (50, Pure, "pure")
- (51, Sizeof, "sizeof")
- (52, Typeof, "typeof")
- (53, Unsized, "unsized")
- (54, Virtual, "virtual")
- (55, Yield, "yield")
+ (49, Pure, "pure")
+ (50, Sizeof, "sizeof")
+ (51, Typeof, "typeof")
+ (52, Unsized, "unsized")
+ (53, Virtual, "virtual")
+ (54, Yield, "yield")
// Special lifetime names
- (56, UnderscoreLifetime, "'_")
- (57, StaticLifetime, "'static")
+ (55, UnderscoreLifetime, "'_")
+ (56, StaticLifetime, "'static")
// Weak keywords, have special meaning only in specific contexts.
- (58, Auto, "auto")
- (59, Catch, "catch")
- (60, Default, "default")
- (61, Dyn, "dyn")
- (62, Union, "union")
+ (57, Auto, "auto")
+ (58, Catch, "catch")
+ (59, Default, "default")
+ (60, Dyn, "dyn")
+ (61, Union, "union")
}
// If an interner exists, return it. Otherwise, prepare a fresh one.
pub fn filter_tests(opts: &TestOpts, tests: Vec<TestDescAndFn>) -> Vec<TestDescAndFn> {
let mut filtered = tests;
-
// Remove tests that don't match the test filter
filtered = match opts.filter {
None => filtered,
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-/// A macro for defining #[cfg] if-else statements.
+/// A macro for defining `#[cfg]` if-else statements.
///
/// This is similar to the `if/elif` C preprocessor macro by allowing definition
/// of a cascade of `#[cfg]` cases, emitting the implementation which matches
/// first.
///
-/// This allows you to conveniently provide a long list #[cfg]'d blocks of code
+/// This allows you to conveniently provide a long list `#[cfg]`'d blocks of code
/// without having to rewrite each clause multiple times.
macro_rules! cfg_if {
($(
extern crate rustc_driver;
-fn main() { rustc_driver::main() }
+fn main() {
+ rustc_driver::set_sigpipe_handler();
+ rustc_driver::main()
+}
-Subproject commit bcb720e55861c38db47f2ebdf26b7198338cb39d
+Subproject commit effdcd0132d17b6c4badc67b4b6d3fdf749a2d22
inside_closure(a)
};
outside_closure_1(a); //[ast]~ ERROR cannot borrow `*a` as mutable because previous closure requires unique access
- //[mir]~^ ERROR cannot borrow `*a` as mutable because previous closure requires unique access
+ //[mir]~^ ERROR cannot borrow `*a` as mutable because previous closure requires unique access
outside_closure_2(a); //[ast]~ ERROR cannot borrow `*a` as immutable because previous closure requires unique access
- //[mir]~^ ERROR cannot borrow `*a` as immutable because previous closure requires unique access
+ //[mir]~^ ERROR cannot borrow `*a` as immutable because previous closure requires unique access
+
+ drop(bar);
}
fn main() {
// ignore-sparc
// revisions: ast mir
-//[mir]compile-flags: -Z borrowck=mir -Z nll
+//[mir]compile-flags: -Z borrowck=mir
#![feature(asm)]
let mut x = 3;
let c1 = || x = 4;
let c2 = || x * 5; //[ast]~ ERROR cannot borrow `x`
- //[mir]~^ ERROR cannot borrow `x` as immutable because it is also borrowed as mutable
+ //[mir]~^ ERROR cannot borrow `x` as immutable because it is also borrowed as mutable
+ drop(c1);
}
fn b() {
let c1 = || set(&mut x);
let c2 = || get(&x); //[ast]~ ERROR cannot borrow `x`
//[mir]~^ ERROR cannot borrow `x` as immutable because it is also borrowed as mutable
+ drop(c1);
}
fn c() {
let c1 = || set(&mut x);
let c2 = || x * 5; //[ast]~ ERROR cannot borrow `x`
//[mir]~^ ERROR cannot borrow `x` as immutable because it is also borrowed as mutable
+ drop(c1);
}
fn d() {
let c2 = || x * 5;
x = 5; //[ast]~ ERROR cannot assign
//[mir]~^ ERROR cannot assign to `x` because it is borrowed
+ drop(c2);
}
fn e() {
let c1 = || get(&x);
x = 5; //[ast]~ ERROR cannot assign
//[mir]~^ ERROR cannot assign to `x` because it is borrowed
+ drop(c1);
}
fn f() {
let c1 = || get(&*x);
*x = 5; //[ast]~ ERROR cannot assign to `*x`
//[mir]~^ ERROR cannot assign to `*x` because it is borrowed
+ drop(c1);
}
fn g() {
let c1 = || get(&*x.f);
*x.f = 5; //[ast]~ ERROR cannot assign to `*x.f`
//[mir]~^ ERROR cannot assign to `*x.f` because it is borrowed
+ drop(c1);
}
fn h() {
let c1 = || get(&*x.f);
let c2 = || *x.f = 5; //[ast]~ ERROR cannot borrow `x` as mutable
//[mir]~^ ERROR cannot borrow `x` as mutable because it is also borrowed as immutable
+ drop(c1);
}
fn main() {
// Local and field from struct
{
let mut f = Foo { x: 22 };
- let _x = f.x();
+ let x = f.x();
f.x; //[ast]~ ERROR cannot use `f.x` because it was mutably borrowed
- //[mir]~^ ERROR cannot use `f.x` because it was mutably borrowed
+ //[mir]~^ ERROR cannot use `f.x` because it was mutably borrowed
+ drop(x);
}
// Local and field from tuple-struct
{
let mut g = Bar(22);
- let _0 = g.x();
+ let x = g.x();
g.0; //[ast]~ ERROR cannot use `g.0` because it was mutably borrowed
//[mir]~^ ERROR cannot use `g.0` because it was mutably borrowed
+ drop(x);
}
// Local and field from tuple
{
let mut h = (22, 23);
- let _0 = &mut h.0;
+ let x = &mut h.0;
h.0; //[ast]~ ERROR cannot use `h.0` because it was mutably borrowed
//[mir]~^ ERROR cannot use `h.0` because it was mutably borrowed
+ drop(x);
}
// Local and field from enum
{
let mut e = Baz::X(2);
- let _e0 = e.x();
+ let x = e.x();
match e { //[mir]~ ERROR cannot use `e` because it was mutably borrowed
Baz::X(value) => value
//[ast]~^ ERROR cannot use `e.0` because it was mutably borrowed
//[mir]~^^ ERROR cannot use `e.0` because it was mutably borrowed
};
+ drop(x);
}
// Local and field from union
unsafe {
let mut u = U { b: 0 };
- let _ra = &mut u.a;
+ let x = &mut u.a;
u.a; //[ast]~ ERROR cannot use `u.a` because it was mutably borrowed
//[mir]~^ ERROR cannot use `u.a` because it was mutably borrowed
+ drop(x);
}
// Deref and field from struct
{
let mut f = Box::new(Foo { x: 22 });
- let _x = f.x();
+ let x = f.x();
f.x; //[ast]~ ERROR cannot use `f.x` because it was mutably borrowed
//[mir]~^ ERROR cannot use `f.x` because it was mutably borrowed
+ drop(x);
}
// Deref and field from tuple-struct
{
let mut g = Box::new(Bar(22));
- let _0 = g.x();
+ let x = g.x();
g.0; //[ast]~ ERROR cannot use `g.0` because it was mutably borrowed
//[mir]~^ ERROR cannot use `g.0` because it was mutably borrowed
+ drop(x);
}
// Deref and field from tuple
{
let mut h = Box::new((22, 23));
- let _0 = &mut h.0;
+ let x = &mut h.0;
h.0; //[ast]~ ERROR cannot use `h.0` because it was mutably borrowed
//[mir]~^ ERROR cannot use `h.0` because it was mutably borrowed
+ drop(x);
}
// Deref and field from enum
{
let mut e = Box::new(Baz::X(3));
- let _e0 = e.x();
+ let x = e.x();
match *e { //[mir]~ ERROR cannot use `*e` because it was mutably borrowed
Baz::X(value) => value
//[ast]~^ ERROR cannot use `e.0` because it was mutably borrowed
//[mir]~^^ ERROR cannot use `e.0` because it was mutably borrowed
};
+ drop(x);
}
// Deref and field from union
unsafe {
let mut u = Box::new(U { b: 0 });
- let _ra = &mut u.a;
+ let x = &mut u.a;
u.a; //[ast]~ ERROR cannot use `u.a` because it was mutably borrowed
//[mir]~^ ERROR cannot use `u.a` because it was mutably borrowed
+ drop(x);
}
// Constant index
{
let mut v = &[1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
- let _v = &mut v;
+ let x = &mut v;
match v { //[mir]~ ERROR cannot use `v` because it was mutably borrowed
&[x, _, .., _, _] => println!("{}", x),
//[ast]~^ ERROR cannot use `v[..]` because it was mutably borrowed
//[mir]~^^ ERROR cannot use `v[..]` because it was mutably borrowed
_ => panic!("other case"),
}
+ drop(x);
}
// Subslices
{
let mut v = &[1, 2, 3, 4, 5];
- let _v = &mut v;
+ let x = &mut v;
match v { //[mir]~ ERROR cannot use `v` because it was mutably borrowed
&[x..] => println!("{:?}", x),
//[ast]~^ ERROR cannot use `v[..]` because it was mutably borrowed
//[mir]~^^ ERROR cannot use `v[..]` because it was mutably borrowed
_ => panic!("other case"),
}
+ drop(x);
}
// Downcasted field
{
enum E<X> { A(X), B { x: X } }
let mut e = E::A(3);
- let _e = &mut e;
+ let x = &mut e;
match e { //[mir]~ ERROR cannot use `e` because it was mutably borrowed
E::A(ref ax) =>
//[ast]~^ ERROR cannot borrow `e.0` as immutable because `e` is also borrowed as mutable
//[mir]~^^ ERROR cannot borrow `e.x` as immutable because it is also borrowed as mutable
println!("e.bx: {:?}", bx),
}
+ drop(x);
}
// Field in field
{
struct F { x: u32, y: u32 };
struct S { x: F, y: (u32, u32), };
let mut s = S { x: F { x: 1, y: 2}, y: (999, 998) };
- let _s = &mut s;
+ let x = &mut s;
match s { //[mir]~ ERROR cannot use `s` because it was mutably borrowed
S { y: (ref y0, _), .. } =>
//[ast]~^ ERROR cannot borrow `s.y.0` as immutable because `s` is also borrowed as mutable
println!("x0: {:?}", x0),
_ => panic!("other case"),
}
+ drop(x);
}
// Field of ref
{
let p: &'a u8 = &*block.current;
//[mir]~^ ERROR cannot borrow `*block.current` as immutable because it is also borrowed as mutable
// No errors in AST because of issue rust#38899
+ drop(x);
}
}
// Field of ptr
let p : *const u8 = &*(*block).current;
//[mir]~^ ERROR cannot borrow `*block.current` as immutable because it is also borrowed as mutable
// No errors in AST because of issue rust#38899
+ drop(x);
}
}
// Field of index
{
struct F {x: u32, y: u32};
let mut v = &[F{x: 1, y: 2}, F{x: 3, y: 4}];
- let _v = &mut v;
+ let x = &mut v;
v[0].y;
//[ast]~^ ERROR cannot use `v[..].y` because it was mutably borrowed
//[mir]~^^ ERROR cannot use `v[..].y` because it was mutably borrowed
//[mir]~| ERROR cannot use `*v` because it was mutably borrowed
+ drop(x);
}
// Field of constant index
{
struct F {x: u32, y: u32};
let mut v = &[F{x: 1, y: 2}, F{x: 3, y: 4}];
- let _v = &mut v;
+ let x = &mut v;
match v { //[mir]~ ERROR cannot use `v` because it was mutably borrowed
&[_, F {x: ref xf, ..}] => println!("{}", xf),
//[mir]~^ ERROR cannot borrow `v[..].x` as immutable because it is also borrowed as mutable
// No errors in AST
_ => panic!("other case")
}
+ drop(x);
}
// Field from upvar
{
}
// Field from upvar nested
{
+ // FIXME(#49824) -- the free region error below should probably not be there
let mut x = 0;
|| {
- || {
- let y = &mut x;
- &mut x; //[ast]~ ERROR cannot borrow `**x` as mutable more than once at a time
- //[mir]~^ ERROR cannot borrow `x` as mutable more than once at a time
- *y = 1;
+ || { //[mir]~ ERROR free region `` does not outlive
+ let y = &mut x;
+ &mut x; //[ast]~ ERROR cannot borrow `**x` as mutable more than once at a time
+ //[mir]~^ ERROR cannot borrow `x` as mutable more than once at a time
+ *y = 1;
+ drop(y);
}
};
}
fn main() {
let mut _a = 3;
- let _b = &mut _a;
+ let b = &mut _a;
{
- let _c = &*_b;
+ let c = &*b;
_a = 4; //[ast]~ ERROR cannot assign to `_a`
- //[mir]~^ ERROR cannot assign to `_a` because it is borrowed
+ //[mir]~^ ERROR cannot assign to `_a` because it is borrowed
+ drop(c);
}
+ drop(b);
}
// fact no outstanding loan of x!
x = Some(0);
}
- Some(ref __isize) => {
+ Some(ref r) => {
x = Some(1); //[ast]~ ERROR cannot assign
- //[mir]~^ ERROR cannot assign to `x` because it is borrowed
+ //[mir]~^ ERROR cannot assign to `x` because it is borrowed
+ drop(r);
}
}
x.clone(); // just to prevent liveness warnings
Foo::A(x) => x //[ast]~ ERROR [E0503]
//[mir]~^ ERROR [E0503]
};
+ drop(p);
}
fn main() {
let mut x = 1;
- let _x = &mut x;
+ let r = &mut x;
let _ = match x { //[mir]~ ERROR [E0503]
x => x + 1, //[ast]~ ERROR [E0503]
//[mir]~^ ERROR [E0503]
y => y + 2, //[ast]~ ERROR [E0503]
//[mir]~^ ERROR [E0503]
};
+ drop(r);
}
fn main() {
let mut x = 1;
- let mut addr;
+ let mut addr = vec![];
loop {
match 1 {
- 1 => { addr = &mut x; } //[ast]~ ERROR [E0499]
+ 1 => { addr.push(&mut x); } //[ast]~ ERROR [E0499]
//[mir]~^ ERROR [E0499]
- 2 => { addr = &mut x; } //[ast]~ ERROR [E0499]
+ 2 => { addr.push(&mut x); } //[ast]~ ERROR [E0499]
//[mir]~^ ERROR [E0499]
- _ => { addr = &mut x; } //[ast]~ ERROR [E0499]
+ _ => { addr.push(&mut x); } //[ast]~ ERROR [E0499]
//[mir]~^ ERROR [E0499]
}
}
s[2] = 20;
//[ast]~^ ERROR cannot assign to immutable indexed content
//[mir]~^^ ERROR cannot assign to immutable item
+ drop(rs);
}
Some(ref i) => {
// But on this branch, `i` is an outstanding borrow
x = Some(*i+1); //[ast]~ ERROR cannot assign to `x`
- //[mir]~^ ERROR cannot assign to `x` because it is borrowed
+ //[mir]~^ ERROR cannot assign to `x` because it is borrowed
+ drop(i);
}
}
x.clone(); // just to prevent liveness warnings
{
let ra = &u.a;
let ra2 = &u.a; // OK
+ drop(ra);
}
{
let ra = &u.a;
let a = u.a; // OK
+ drop(ra);
}
{
let ra = &u.a;
let rma = &mut u.a; //[ast]~ ERROR cannot borrow `u.a` as mutable because it is also borrowed as immutable
//[mir]~^ ERROR cannot borrow `u.a` as mutable because it is also borrowed as immutable
+ drop(ra);
}
{
let ra = &u.a;
u.a = 1; //[ast]~ ERROR cannot assign to `u.a` because it is borrowed
//[mir]~^ ERROR cannot assign to `u.a` because it is borrowed
+ drop(ra);
}
// Imm borrow, other field
{
let ra = &u.a;
let rb = &u.b; // OK
+ drop(ra);
}
{
let ra = &u.a;
let b = u.b; // OK
+ drop(ra);
}
{
let ra = &u.a;
let rmb = &mut u.b; //[ast]~ ERROR cannot borrow `u` (via `u.b`) as mutable because `u` is also borrowed as immutable (via `u.a`)
//[mir]~^ ERROR cannot borrow `u.b` as mutable because it is also borrowed as immutable
+ drop(ra);
}
{
let ra = &u.a;
u.b = 1; //[ast]~ ERROR cannot assign to `u.b` because it is borrowed
//[mir]~^ ERROR cannot assign to `u.b` because it is borrowed
+ drop(ra);
}
// Mut borrow, same field
{
let rma = &mut u.a;
let ra = &u.a; //[ast]~ ERROR cannot borrow `u.a` as immutable because it is also borrowed as mutable
//[mir]~^ ERROR cannot borrow `u.a` as immutable because it is also borrowed as mutable
+ drop(rma);
}
{
let ra = &mut u.a;
let a = u.a; //[ast]~ ERROR cannot use `u.a` because it was mutably borrowed
//[mir]~^ ERROR cannot use `u.a` because it was mutably borrowed
+ drop(ra);
}
{
let rma = &mut u.a;
let rma2 = &mut u.a; //[ast]~ ERROR cannot borrow `u.a` as mutable more than once at a time
//[mir]~^ ERROR cannot borrow `u.a` as mutable more than once at a time
+ drop(rma);
}
{
let rma = &mut u.a;
u.a = 1; //[ast]~ ERROR cannot assign to `u.a` because it is borrowed
//[mir]~^ ERROR cannot assign to `u.a` because it is borrowed
+ drop(rma);
}
// Mut borrow, other field
{
let rma = &mut u.a;
let rb = &u.b; //[ast]~ ERROR cannot borrow `u` (via `u.b`) as immutable because `u` is also borrowed as mutable (via `u.a`)
//[mir]~^ ERROR cannot borrow `u.b` as immutable because it is also borrowed as mutable
+ drop(rma);
}
{
let ra = &mut u.a;
let b = u.b; //[ast]~ ERROR cannot use `u.b` because it was mutably borrowed
//[mir]~^ ERROR cannot use `u.b` because it was mutably borrowed
+ drop(ra);
}
{
let rma = &mut u.a;
let rmb2 = &mut u.b; //[ast]~ ERROR cannot borrow `u` (via `u.b`) as mutable more than once at a time
//[mir]~^ ERROR cannot borrow `u.b` as mutable more than once at a time
+ drop(rma);
}
{
let rma = &mut u.a;
u.b = 1; //[ast]~ ERROR cannot assign to `u.b` because it is borrowed
//[mir]~^ ERROR cannot assign to `u.b` because it is borrowed
+ drop(rma);
}
}
}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Test that a borrow which starts as a 2-phase borrow and gets
+// carried around a loop winds up conflicting with itself.
+
+#![feature(nll)]
+
+struct Foo { x: String }
+
+impl Foo {
+ fn get_string(&mut self) -> &str {
+ &self.x
+ }
+}
+
+fn main() {
+ let mut foo = Foo { x: format!("Hello, world") };
+ let mut strings = vec![];
+
+ loop {
+ strings.push(foo.get_string()); //~ ERROR cannot borrow `foo` as mutable
+ if strings.len() > 2 { break; }
+ }
+
+ println!("{:?}", strings);
+}
// revisions: nll_target
// The following revisions are disabled due to missing support from two-phase beyond autorefs
-//[lxl_beyond] compile-flags: -Z borrowck=mir -Z two-phase-borrows -Z two-phase-beyond-autoref
-//[nll_beyond] compile-flags: -Z borrowck=mir -Z two-phase-borrows -Z two-phase-beyond-autoref -Z nll
+//[nll_beyond] compile-flags: -Z borrowck=mir -Z two-phase-borrows -Z two-phase-beyond-autoref
-//[nll_target] compile-flags: -Z borrowck=mir -Z two-phase-borrows -Z nll
+//[nll_target] compile-flags: -Z borrowck=mir -Z two-phase-borrows
// This is an important corner case pointed out by Niko: one is
// allowed to initiate a shared borrow during a reservation, but it
// revisions: nll_target
// The following revisions are disabled due to missing support for two_phase_beyond_autoref
-//[lxl_beyond] compile-flags: -Z borrowck=mir -Z two-phase-borrows -Z two_phase_beyond_autoref
-//[nll_beyond] compile-flags: -Z borrowck=mir -Z two-phase-borrows -Z two_phase_beyond_autoref -Z nll
+//[nll_beyond] compile-flags: -Z borrowck=mir -Z two-phase-borrows -Z two_phase_beyond_autoref
-
-//[nll_target] compile-flags: -Z borrowck=mir -Z two-phase-borrows -Z nll
+//[nll_target] compile-flags: -Z borrowck=mir -Z two-phase-borrows
// This is the second counter-example from Niko's blog post
// smallcultfollowing.com/babysteps/blog/2017/03/01/nested-method-calls-via-two-phase-borrowing/
/*3*/ *p += 1; // (mutable borrow of `i` starts here, since `p` is used)
- /*4*/ let k = i; //[lxl_beyond]~ ERROR cannot use `i` because it was mutably borrowed [E0503]
- //[nll_beyond]~^ ERROR cannot use `i` because it was mutably borrowed [E0503]
- //[nll_target]~^^ ERROR cannot use `i` because it was mutably borrowed [E0503]
+ /*4*/ let k = i; //[nll_beyond]~ ERROR cannot use `i` because it was mutably borrowed [E0503]
+ //[nll_target]~^ ERROR cannot use `i` because it was mutably borrowed [E0503]
/*5*/ *p += 1;
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// revisions: lxl nll
-//[lxl]compile-flags: -Z borrowck=mir -Z two-phase-borrows
-//[nll]compile-flags: -Z borrowck=mir -Z two-phase-borrows -Z nll
+// compile-flags: -Z borrowck=mir -Z two-phase-borrows
// This is the third counter-example from Niko's blog post
// smallcultfollowing.com/babysteps/blog/2017/03/01/nested-method-calls-via-two-phase-borrowing/
vec.get({
vec.push(2);
- //[lxl]~^ ERROR cannot borrow `vec` as mutable because it is also borrowed as immutable
- //[nll]~^^ ERROR cannot borrow `vec` as mutable because it is also borrowed as immutable
+ //~^ ERROR cannot borrow `vec` as mutable because it is also borrowed as immutable
0
});
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// revisions: ast lxl nll
+// revisions: ast nll
//[ast]compile-flags:
-//[lxl]compile-flags: -Z borrowck=mir -Z two-phase-borrows
-//[nll]compile-flags: -Z borrowck=mir -Z two-phase-borrows -Z nll
+//[nll]compile-flags: -Z borrowck=mir -Z two-phase-borrows
-//[g2p]compile-flags: -Z borrowck=mir -Z two-phase-borrows -Z nll -Z two-phase-beyond-autoref
+//[g2p]compile-flags: -Z borrowck=mir -Z two-phase-borrows -Z two-phase-beyond-autoref
// the above revision is disabled until two-phase-beyond-autoref support is better
// This is a test checking that when we limit two-phase borrows to
fn twice_ten_sm<F: FnMut(i32) -> i32>(f: &mut F) {
f(f(10));
- //[lxl]~^ ERROR cannot borrow `*f` as mutable more than once at a time
- //[nll]~^^ ERROR cannot borrow `*f` as mutable more than once at a time
- //[g2p]~^^^ ERROR cannot borrow `*f` as mutable more than once at a time
- //[ast]~^^^^ ERROR cannot borrow `*f` as mutable more than once at a time
+ //[nll]~^ ERROR cannot borrow `*f` as mutable more than once at a time
+ //[g2p]~^^ ERROR cannot borrow `*f` as mutable more than once at a time
+ //[ast]~^^^ ERROR cannot borrow `*f` as mutable more than once at a time
}
fn twice_ten_si<F: Fn(i32) -> i32>(f: &mut F) {
f(f(10));
}
fn twice_ten_so<F: FnOnce(i32) -> i32>(f: Box<F>) {
f(f(10));
- //[lxl]~^ ERROR use of moved value: `*f`
- //[nll]~^^ ERROR use of moved value: `*f`
- //[g2p]~^^^ ERROR use of moved value: `*f`
- //[ast]~^^^^ ERROR use of moved value: `*f`
+ //[nll]~^ ERROR use of moved value: `*f`
+ //[g2p]~^^ ERROR use of moved value: `*f`
+ //[ast]~^^^ ERROR use of moved value: `*f`
}
fn twice_ten_om(f: &mut FnMut(i32) -> i32) {
f(f(10));
- //[lxl]~^ ERROR cannot borrow `*f` as mutable more than once at a time
- //[nll]~^^ ERROR cannot borrow `*f` as mutable more than once at a time
- //[g2p]~^^^ ERROR cannot borrow `*f` as mutable more than once at a time
- //[ast]~^^^^ ERROR cannot borrow `*f` as mutable more than once at a time
+ //[nll]~^ ERROR cannot borrow `*f` as mutable more than once at a time
+ //[g2p]~^^ ERROR cannot borrow `*f` as mutable more than once at a time
+ //[ast]~^^^ ERROR cannot borrow `*f` as mutable more than once at a time
}
fn twice_ten_oi(f: &mut Fn(i32) -> i32) {
f(f(10));
}
fn twice_ten_oo(f: Box<FnOnce(i32) -> i32>) {
f(f(10));
- //[lxl]~^ ERROR cannot move a value of type
- //[lxl]~^^ ERROR cannot move a value of type
- //[lxl]~^^^ ERROR use of moved value: `*f`
- //[nll]~^^^^ ERROR cannot move a value of type
- //[nll]~^^^^^ ERROR cannot move a value of type
- //[nll]~^^^^^^ ERROR use of moved value: `*f`
- //[g2p]~^^^^^^^ ERROR cannot move a value of type
- //[g2p]~^^^^^^^^ ERROR cannot move a value of type
- //[g2p]~^^^^^^^^^ ERROR use of moved value: `*f`
- //[ast]~^^^^^^^^^^ ERROR use of moved value: `*f`
+ //[nll]~^ ERROR cannot move a value of type
+ //[nll]~^^ ERROR cannot move a value of type
+ //[nll]~^^^ ERROR use of moved value: `*f`
+ //[g2p]~^^^^ ERROR cannot move a value of type
+ //[g2p]~^^^^^ ERROR cannot move a value of type
+ //[g2p]~^^^^^^ ERROR use of moved value: `*f`
+ //[ast]~^^^^^^^ ERROR use of moved value: `*f`
}
twice_ten_sm(&mut |x| x + 1);
// This is not okay.
double_access(&mut a, &a);
- //[lxl]~^ ERROR cannot borrow `a` as immutable because it is also borrowed as mutable [E0502]
- //[nll]~^^ ERROR cannot borrow `a` as immutable because it is also borrowed as mutable [E0502]
- //[g2p]~^^^ ERROR cannot borrow `a` as immutable because it is also borrowed as mutable [E0502]
- //[ast]~^^^^ ERROR cannot borrow `a` as immutable because it is also borrowed as mutable [E0502]
+ //[nll]~^ ERROR cannot borrow `a` as immutable because it is also borrowed as mutable [E0502]
+ //[g2p]~^^ ERROR cannot borrow `a` as immutable because it is also borrowed as mutable [E0502]
+ //[ast]~^^^ ERROR cannot borrow `a` as immutable because it is also borrowed as mutable [E0502]
// But this is okay.
a.m(a.i(10));
fn coerce_index_op() {
let mut i = I(10);
i[i[3]] = 4;
- //[lxl]~^ ERROR cannot borrow `i` as immutable because it is also borrowed as mutable [E0502]
- //[nll]~^^ ERROR cannot borrow `i` as immutable because it is also borrowed as mutable [E0502]
- //[ast]~^^^ ERROR cannot borrow `i` as immutable because it is also borrowed as mutable [E0502]
+ //[nll]~^ ERROR cannot borrow `i` as immutable because it is also borrowed as mutable [E0502]
+ //[ast]~^^ ERROR cannot borrow `i` as immutable because it is also borrowed as mutable [E0502]
i[3] = i[4];
i[i[3]] = i[4];
- //[lxl]~^ ERROR cannot borrow `i` as immutable because it is also borrowed as mutable [E0502]
- //[nll]~^^ ERROR cannot borrow `i` as immutable because it is also borrowed as mutable [E0502]
- //[ast]~^^^ ERROR cannot borrow `i` as immutable because it is also borrowed as mutable [E0502]
+ //[nll]~^ ERROR cannot borrow `i` as immutable because it is also borrowed as mutable [E0502]
+ //[ast]~^^ ERROR cannot borrow `i` as immutable because it is also borrowed as mutable [E0502]
}
fn main() {
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// revisions: lxl nll
-//[lxl]compile-flags: -Z borrowck=mir -Z two-phase-borrows
-//[nll]compile-flags: -Z borrowck=mir -Z two-phase-borrows -Z nll
+// compile-flags: -Z borrowck=mir -Z two-phase-borrows
// This is similar to two-phase-reservation-sharing-interference.rs
// in that it shows a reservation that overlaps with a shared borrow.
#![feature(rustc_attrs)]
#[rustc_error]
-fn main() { //[nll]~ ERROR compilation successful
+fn main() { //~ ERROR compilation successful
let mut v = vec![0, 1, 2];
let shared = &v;
v.push(shared.len());
- //[lxl]~^ ERROR cannot borrow `v` as mutable because it is also borrowed as immutable [E0502]
assert_eq!(v, [0, 1, 2, 3]);
}
// ignore-tidy-linelength
-// revisions: lxl_beyond nll_beyond nll_target
+// revisions: nll_beyond nll_target
// The following revisions are disabled due to missing support from two-phase beyond autorefs
-//[lxl_beyond]compile-flags: -Z borrowck=mir -Z two-phase-borrows -Z two-phase-beyond-autoref
-//[lxl_beyond] should-fail
-//[nll_beyond]compile-flags: -Z borrowck=mir -Z two-phase-borrows -Z two-phase-beyond-autoref -Z nll
+//[nll_beyond]compile-flags: -Z borrowck=mir -Z two-phase-borrows -Z two-phase-beyond-autoref
//[nll_beyond] should-fail
-//[nll_target]compile-flags: -Z borrowck=mir -Z two-phase-borrows -Z nll
+//[nll_target]compile-flags: -Z borrowck=mir -Z two-phase-borrows
// This is a corner case that the current implementation is (probably)
// treating more conservatively than is necessary. But it also does
// with the shared borrow. But in the current implementation,
// its an error.
delay = &mut vec;
- //[lxl_beyond]~^ ERROR cannot borrow `vec` as mutable because it is also borrowed as immutable
- //[nll_beyond]~^^ ERROR cannot borrow `vec` as mutable because it is also borrowed as immutable
- //[nll_target]~^^^ ERROR cannot borrow `vec` as mutable because it is also borrowed as immutable
+ //[nll_beyond]~^ ERROR cannot borrow `vec` as mutable because it is also borrowed as immutable
+ //[nll_target]~^^ ERROR cannot borrow `vec` as mutable because it is also borrowed as immutable
shared[0];
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// revisions: lxl nll
-//[lxl]compile-flags: -Z borrowck=mir -Z two-phase-borrows
-//[nll]compile-flags: -Z borrowck=mir -Z two-phase-borrows -Z nll
+// cmpile-flags: -Z borrowck=mir -Z two-phase-borrows
// This is the first counter-example from Niko's blog post
// smallcultfollowing.com/babysteps/blog/2017/03/01/nested-method-calls-via-two-phase-borrowing/
v[0].push_str({
v.push(format!("foo"));
- //[lxl]~^ ERROR cannot borrow `v` as mutable more than once at a time [E0499]
- //[nll]~^^ ERROR cannot borrow `v` as mutable more than once at a time [E0499]
+ //~^ ERROR cannot borrow `v` as mutable more than once at a time [E0499]
"World!"
});
let z = borrow_mut(x);
//[ast]~^ ERROR cannot borrow `*x` as mutable more than once at a time
//[mir]~^^ ERROR cannot borrow `*x` as mutable more than once at a time
+ drop((y, z));
}
fn double_imm_borrow(x: &mut Box<i32>) {
**x += 1;
//[ast]~^ ERROR cannot assign to `**x` because it is borrowed
//[mir]~^^ ERROR cannot assign to `**x` because it is borrowed
+ drop((y, z));
}
fn double_mut_borrow2<T>(x: &mut Box<T>) {
#![feature(specialization)]
trait Trait<T> { type Assoc; }
-//~^ cyclic dependency detected [E0391]
+//~^ cycle detected
impl<T> Trait<T> for Vec<T> {
type Assoc = ();
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// error-pattern: cyclic dependency detected
+// error-pattern: cycle detected
#![feature(const_fn)]
struct A<T>
where T : Trait,
T : Add<T::Item>
- //~^ ERROR cyclic dependency detected
+ //~^ ERROR cycle detected
//~| ERROR associated type `Item` not found for `T`
{
data: T
// again references the trait.
trait Foo<X = Box<Foo>> {
- //~^ ERROR cyclic dependency detected
+ //~^ ERROR cycle detected
}
fn main() { }
// Test a supertrait cycle where a trait extends itself.
trait Chromosome: Chromosome {
- //~^ ERROR cyclic dependency detected
+ //~^ ERROR cycle detected
}
fn main() { }
f.call(&x);
f.call(&x);
x = 5;
+ drop(y);
}
fn main() {
// except according to those terms.
type x = Vec<x>;
-//~^ ERROR cyclic dependency detected
+//~^ ERROR cycle detected
fn main() { let b: x = Vec::new(); }
// except according to those terms.
trait T : Iterator<Item=Self::Item>
-//~^ ERROR cyclic dependency detected
+//~^ ERROR cycle detected
//~| ERROR associated type `Item` not found for `Self`
{}
}
pub trait Processor: Subscriber<Input = Self::Input> {
- //~^ ERROR cyclic dependency detected [E0391]
+ //~^ ERROR cycle detected
type Input;
}
}
fn foo<T: Trait<A = T::B>>() { }
-//~^ ERROR cyclic dependency detected
+//~^ ERROR cycle detected
//~| ERROR associated type `B` not found for `T`
fn main() { }
// except according to those terms.
trait Expr : PartialEq<Self::Item> {
- //~^ ERROR: cyclic dependency detected
+ //~^ ERROR: cycle detected
type Item;
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// error-pattern: cyclic dependency detected
-// note-pattern: the cycle begins when computing layout of
-// note-pattern: ...which then requires computing layout of
-// note-pattern: ...which then again requires computing layout of
-
+// error-pattern: cycle detected when computing layout of
+// note-pattern: ...which requires computing layout of
+// note-pattern: ...which again requires computing layout of
trait Mirror { type It: ?Sized; }
impl<T: ?Sized> Mirror for T { type It = Self; }
fn foo(_: T) {}
}
-pub struct Foo<T = Box<Trait<DefaultFoo>>>;
-type DefaultFoo = Foo; //~ ERROR cyclic dependency detected
+pub struct Foo<T = Box<Trait<DefaultFoo>>>; //~ ERROR cycle detected
+type DefaultFoo = Foo;
fn main() {
}
// revisions: ast mir
//[mir]compile-flags: -Z borrowck=mir
+// FIXME(#49821) -- No tip about using a let binding
+
use std::cell::RefCell;
fn main() {
//[ast]~| NOTE temporary value does not live long enough
//[ast]~| NOTE consider using a `let` binding to increase its lifetime
//[mir]~^^^^^ ERROR borrowed value does not live long enough [E0597]
- //[mir]~| NOTE temporary value dropped here while still borrowed
//[mir]~| NOTE temporary value does not live long enough
- //[mir]~| NOTE consider using a `let` binding to increase its lifetime
+ //[mir]~| NOTE temporary value only lives until here
println!("{}", val);
+ //[mir]~^ borrow later used here
}
//[ast]~^ NOTE temporary value needs to live until here
-//[mir]~^^ NOTE temporary value needs to live until here
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+// error-pattern: cycle detected when computing layout of
+
#![feature(const_fn)]
#![feature(core_intrinsics)]
struct Foo {
bytes: [u8; unsafe { intrinsics::size_of::<Foo>() }],
- //~^ ERROR cyclic dependency detected
x: usize,
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: -Z borrowck=mir -Z nll
+// compile-flags: -Z borrowck=mir
#![allow(dead_code)]
let g: fn(_, _) -> _ = |_x, y| y;
//~^ ERROR free region `'b` does not outlive free region `'a`
g
- //~^ WARNING not reporting region error due to -Znll
+ //~^ WARNING not reporting region error due to nll
}
fn main() {}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: -Z borrowck=mir -Z nll
+// compile-flags: -Zborrowck=mir
#![allow(dead_code)]
// The MIR type checker must therefore relate `'?0` to `'?1` and `'?2`
// as part of checking the `ReifyFnPointer`.
let f: fn(_) -> _ = foo;
- //~^ WARNING not reporting region error due to -Znll
+ //~^ WARNING not reporting region error due to nll
//~| ERROR free region `'a` does not outlive free region `'static`
f(x)
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: -Z borrowck=mir -Z nll
+// compile-flags: -Zborrowck=mir
#![allow(dead_code)]
// Here the NLL checker must relate the types in `f` to the types
// in `g`. These are related via the `UnsafeFnPointer` cast.
let g: unsafe fn(_) -> _ = f;
- //~^ WARNING not reporting region error due to -Znll
+ //~^ WARNING not reporting region error due to nll
//~| ERROR free region `'a` does not outlive free region `'static`
unsafe { g(input) }
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: -Z borrowck=mir -Z nll
+// compile-flags: -Z borrowck=mir
#![allow(dead_code)]
#![feature(dyn_trait)]
fn bar<'a>(x: &'a u32) -> &'static dyn Debug {
//~^ ERROR free region `'a` does not outlive free region `'static`
x
- //~^ WARNING not reporting region error due to -Znll
+ //~^ WARNING not reporting region error due to nll
}
fn main() {}
// check borrowing is detected successfully
let &mut ref x = foo;
*foo += 1; //[ast]~ ERROR cannot assign to `*foo` because it is borrowed
- //[mir]~^ ERROR cannot assign to `*foo` because it is borrowed
+ //[mir]~^ ERROR cannot assign to `*foo` because it is borrowed
+ drop(x);
}
// except according to those terms.
-// compile-flags:-Zborrowck=compare -Znll
+// compile-flags:-Zborrowck=compare
#![allow(warnings)]
#![feature(rustc_attrs)]
// except according to those terms.
-// compile-flags:-Zborrowck=compare -Znll
+// compile-flags:-Zborrowck=compare
#![allow(warnings)]
#![feature(rustc_attrs)]
// except according to those terms.
//revisions: ast mir
-//[mir] compile-flags: -Z borrowck=mir -Z nll
+//[mir] compile-flags: -Z borrowck=mir
#![allow(unused_assignments)]
// in the type of `p` includes the points after `&v[0]` up to (but not
// including) the call to `use_x`. The `else` branch is not included.
-// compile-flags:-Zborrowck=compare -Znll
+// compile-flags:-Zborrowck=compare
#![allow(warnings)]
#![feature(rustc_attrs)]
// in the type of `p` includes the points after `&v[0]` up to (but not
// including) the call to `use_x`. The `else` branch is not included.
-// compile-flags:-Zborrowck=compare -Znll
+// compile-flags:-Zborrowck=compare
#![allow(warnings)]
#![feature(rustc_attrs)]
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: -Z borrowck=mir -Z nll
+// compile-flags: -Zborrowck=mir
#![allow(dead_code)]
fn bar<'a, 'b>(x: &'a u32, y: &'b u32) -> (&'a u32, &'b u32) {
foo(x, y)
//~^ ERROR lifetime mismatch [E0623]
- //~| WARNING not reporting region error due to -Znll
+ //~| WARNING not reporting region error due to nll
}
fn main() {}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: -Z borrowck=mir -Z nll
+// compile-flags: -Z borrowck=mir
#![allow(dead_code)]
fn bar<'a, 'b>(x: Cell<&'a u32>, y: Cell<&'b u32>) {
Foo { x, y };
//~^ ERROR lifetime mismatch [E0623]
- //~| WARNING not reporting region error due to -Znll
+ //~| WARNING not reporting region error due to nll
}
fn main() {}
match (&a1,) {
(&ref b0,) => {
a1 = &f; //[ast]~ ERROR cannot assign
- //[mir]~^ ERROR cannot assign to `a1` because it is borrowed
+ //[mir]~^ ERROR cannot assign to `a1` because it is borrowed
+ drop(b0);
}
}
}
// except according to those terms.
// revisions: ll nll
-//[nll] compile-flags: -Znll -Zborrowck=mir
+//[nll] compile-flags:-Zborrowck=mir
fn static_id<'a,'b>(t: &'a ()) -> &'static ()
where 'a: 'static { t }
where 'a: 'b, 'b: 'static { t }
fn static_id_wrong_way<'a>(t: &'a ()) -> &'static () where 'static: 'a {
t //[ll]~ ERROR E0312
- //[nll]~^ WARNING not reporting region error due to -Znll
+ //[nll]~^ WARNING not reporting region error due to nll
//[nll]~| ERROR free region `'a` does not outlive free region `'static`
}
fn error(u: &(), v: &()) {
static_id(&u); //[ll]~ ERROR explicit lifetime required in the type of `u` [E0621]
- //[nll]~^ WARNING not reporting region error due to -Znll
+ //[nll]~^ WARNING not reporting region error due to nll
//[nll]~| ERROR explicit lifetime required in the type of `u` [E0621]
static_id_indirect(&v); //[ll]~ ERROR explicit lifetime required in the type of `v` [E0621]
- //[nll]~^ WARNING not reporting region error due to -Znll
+ //[nll]~^ WARNING not reporting region error due to nll
//[nll]~| ERROR explicit lifetime required in the type of `v` [E0621]
}
impl Tr for S where S<Self>: Copy {} // OK
impl Tr for S where Self::A: Copy {} // OK
-impl Tr for Self {} //~ ERROR cyclic dependency detected
-impl Tr for S<Self> {} //~ ERROR cyclic dependency detected
-impl Self {} //~ ERROR cyclic dependency detected
-impl S<Self> {} //~ ERROR cyclic dependency detected
-impl Tr<Self::A> for S {} //~ ERROR cyclic dependency detected
+impl Tr for Self {} //~ ERROR cycle detected
+impl Tr for S<Self> {} //~ ERROR cycle detected
+impl Self {} //~ ERROR cycle detected
+impl S<Self> {} //~ ERROR cycle detected
+impl Tr<Self::A> for S {} //~ ERROR cycle detected
fn main() {}
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// revisions:rpass1 rpass2
+
+#![cfg_attr(rpass2, warn(dead_code))]
+
+pub static mut BAA: *const i8 = unsafe { &BOO as *const _ as *const i8 };
+
+pub static mut BOO: *const i8 = unsafe { &BAA as *const _ as *const i8 };
+
+fn main() {}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags:-Znll
+// compile-flags:-Zborrowck=mir
fn can_panic() -> Box<usize> {
Box::new(44)
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags:-Znll
+// compile-flags:-Zborrowck=mir
#![allow(warnings)]
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags:-Znll
+// compile-flags:-Zborrowck=mir
fn cond() -> bool { false }
// suitable variables and that we setup the outlives relationship
// between R0 and R1 properly.
-// compile-flags:-Znll -Zverbose
-// ^^^^^^^^^ force compiler to dump more region information
+// compile-flags:-Zborrowck=mir -Zverbose
+// ^^^^^^^^^ force compiler to dump more region information
// ignore-tidy-linelength
#![allow(warnings)]
// in the type of `r_a` must outlive the region (`R7`) that appears in
// the type of `r_b`
-// compile-flags:-Znll -Zverbose
-// ^^^^^^^^^ force compiler to dump more region information
+// compile-flags:-Zborrowck=mir -Zverbose
+// ^^^^^^^^^ force compiler to dump more region information
#![allow(warnings)]
// in the type of `p` includes the points after `&v[0]` up to (but not
// including) the call to `use_x`. The `else` branch is not included.
-// compile-flags:-Znll -Zverbose
-// ^^^^^^^^^ force compiler to dump more region information
+// compile-flags:-Zborrowck=mir -Zverbose
+// ^^^^^^^^^ force compiler to dump more region information
#![allow(warnings)]
// but only at a particular point, and hence they wind up including
// distinct regions.
-// compile-flags:-Znll -Zverbose
-// ^^^^^^^^^ force compiler to dump more region information
+// compile-flags:-Zborrowck=mir -Zverbose
+// ^^^^^^^^^ force compiler to dump more region information
#![allow(warnings)]
// in the type of `p` includes the points after `&v[0]` up to (but not
// including) the call to `use_x`. The `else` branch is not included.
-// compile-flags:-Znll -Zverbose
-// ^^^^^^^^^ force compiler to dump more region information
+// compile-flags:-Zborrowck=mir -Zverbose
+// ^^^^^^^^^ force compiler to dump more region information
#![allow(warnings)]
+++ /dev/null
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// compile-flags: -Z parse-only
-
-// Test that we generate obsolete syntax errors around usages of `proc`.
-
-fn foo(p: proc()) { } //~ ERROR expected type, found reserved keyword `proc`
-
-fn bar() { proc() 1; } //~ ERROR expected expression, found reserved keyword `proc`
-
-fn main() { }
--- /dev/null
+-include ../tools.mk
+
+all: extern_absolute_paths.rs extern_in_paths.rs krate2
+ $(RUSTC) extern_absolute_paths.rs -Zsave-analysis
+ cat $(TMPDIR)/save-analysis/extern_absolute_paths.json | "$(PYTHON)" validate_json.py
+ $(RUSTC) extern_in_paths.rs -Zsave-analysis
+ cat $(TMPDIR)/save-analysis/extern_in_paths.json | "$(PYTHON)" validate_json.py
+
+krate2: krate2.rs
+ $(RUSTC) $<
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(extern_absolute_paths)]
+
+use krate2::hello;
+
+fn main() {
+ hello();
+ ::krate2::hello();
+}
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(extern_in_paths)]
+
+use extern::krate2;
+
+fn main() {
+ extern::krate2::hello();
+}
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![crate_name = "krate2"]
+#![crate_type = "lib"]
+
+pub fn hello() {
+}
--- /dev/null
+#!/usr/bin/env python
+
+# Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+# file at the top-level directory of this distribution and at
+# http://rust-lang.org/COPYRIGHT.
+#
+# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+# option. This file may not be copied, modified, or distributed
+# except according to those terms.
+
+import sys
+import json
+
+crates = json.loads(sys.stdin.readline().strip())["prelude"]["external_crates"]
+assert any(map(lambda c: c["id"]["name"] == "krate2", crates))
--- /dev/null
+-include ../../run-make-fulldeps/tools.mk
+
+ifeq ($(TARGET),wasm32-unknown-unknown)
+all:
+ $(RUSTC) foo.rs -C lto -O --target wasm32-unknown-unknown --cfg a
+ wc -c < $(TMPDIR)/foo.wasm
+ [ "`wc -c < $(TMPDIR)/foo.wasm`" -lt "1024" ]
+ $(RUSTC) foo.rs -C lto -O --target wasm32-unknown-unknown --cfg b
+ wc -c < $(TMPDIR)/foo.wasm
+ [ "`wc -c < $(TMPDIR)/foo.wasm`" -lt "5120" ]
+ $(RUSTC) foo.rs -C lto -O --target wasm32-unknown-unknown --cfg c
+ wc -c < $(TMPDIR)/foo.wasm
+ [ "`wc -c < $(TMPDIR)/foo.wasm`" -lt "5120" ]
+else
+all:
+endif
+
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![crate_type = "cdylib"]
+
+#[no_mangle]
+#[cfg(a)]
+pub fn foo() {
+ panic!("test");
+}
+
+#[no_mangle]
+#[cfg(b)]
+pub fn foo() {
+ panic!("{}", 1);
+}
+
+#[no_mangle]
+#[cfg(c)]
+pub fn foo() {
+ panic!("{}", "a");
+}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// revisions: lxl nll
-//[lxl]compile-flags: -Z borrowck=mir -Z two-phase-borrows
-//[nll]compile-flags: -Z borrowck=mir -Z two-phase-borrows -Z nll
+// compile-flags: -Z borrowck=mir -Z two-phase-borrows
// This is the "goto example" for why we want two phase borrows.
// except according to those terms.
// revisions: normal nll
-//[nll] compile-flags: -Znll -Zborrowck=mir
+//[nll] compile-flags:-Zborrowck=mir
#![feature(fn_traits,
step_trait,
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-//`#[cfg]` on struct field permits empty unusable struct
+// `#[cfg]` on struct field permits empty unusable struct
struct S {
#[cfg(untrue)]
} }
}
-macro_rules! baz {
- ($($a:ident),? ; $num:expr) => { { // comma separator is meaningless for `?`
- let mut x = 0;
-
- $(
- x += $a;
- )?
-
- assert_eq!(x, $num);
- } }
-}
-
macro_rules! barplus {
($($a:ident)?+ ; $num:expr) => { {
let mut x = 0;
$(
x += $a;
- )+
+ )?
assert_eq!(x, $num);
} }
$(
x += $a;
- )*
+ )?
assert_eq!(x, $num);
} }
// accept 0 or 1 repetitions
foo!( ; 0);
foo!(a ; 1);
- baz!( ; 0);
- baz!(a ; 1);
// Make sure using ? as a separator works as before
- barplus!(a ; 1);
- barplus!(a?a ; 2);
- barplus!(a?a?a ; 3);
- barstar!( ; 0);
- barstar!(a ; 1);
- barstar!(a?a ; 2);
- barstar!(a?a?a ; 3);
+ barplus!(+ ; 0);
+ barplus!(a + ; 1);
+ barstar!(* ; 0);
+ barstar!(a * ; 1);
}
// ignore-emscripten
#![feature(repr_simd, target_feature, cfg_target_feature)]
+#![feature(avx512_target_feature)]
use std::process::{Command, ExitStatus};
use std::env;
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+const QUERY = 'str,u8';
+
+const EXPECTED = {
+ 'others': [
+ { 'path': 'std', 'name': 'str' },
+ { 'path': 'std', 'name': 'u8' },
+ { 'path': 'std::ffi', 'name': 'CStr' },
+ { 'path': 'std::simd', 'name': 'u8x2' },
+ ],
+};
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// compile-pass
+
+//! Test with [Foo::baz], [Bar::foo], [Uniooon::X]
+
+pub struct Foo {
+ pub bar: usize,
+}
--- /dev/null
+warning: [Foo::baz] cannot be resolved, ignoring it...
+
+warning: [Bar::foo] cannot be resolved, ignoring it...
+
+warning: [Uniooon::X] cannot be resolved, ignoring it...
+
pub const CONST: u32 = 0;
pub static STATIC: &str = "baguette";
pub fn function() {}
+
+mod private_module {
+ pub struct ReexportedStruct;
+}
+
+// @has foo/all.html '//a[@href="struct.ReexportedStruct.html"]' 'ReexportedStruct'
+// @!has foo/all.html 'private_module'
+pub use private_module::ReexportedStruct;
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// only-x86_64
+// compile-flags:--test -C target-feature=+avx
+// should-fail
+
+/// (written on a spider's web) Some Struct
+///
+/// ```
+/// panic!("oh no");
+/// ```
+#[doc(cfg(target_feature = "avx"))]
+pub struct SomeStruct;
let c1 = to_fn_mut(|| x = 4);
let c2 = to_fn_mut(|| x = 5); //~ ERROR cannot borrow `x` as mutable more than once
//~| ERROR cannot borrow `x` as mutable more than once
+ drop((c1, c2));
}
fn set(x: &mut isize) {
let c1 = to_fn_mut(|| set(&mut x));
let c2 = to_fn_mut(|| set(&mut x)); //~ ERROR cannot borrow `x` as mutable more than once
//~| ERROR cannot borrow `x` as mutable more than once
+ drop((c1, c2));
}
fn c() {
let c1 = to_fn_mut(|| x = 5);
let c2 = to_fn_mut(|| set(&mut x)); //~ ERROR cannot borrow `x` as mutable more than once
//~| ERROR cannot borrow `x` as mutable more than once
+ drop((c1, c2));
}
fn d() {
let c2 = to_fn_mut(|| { let _y = to_fn_mut(|| set(&mut x)); }); // (nested closure)
//~^ ERROR cannot borrow `x` as mutable more than once
//~| ERROR cannot borrow `x` as mutable more than once
+ drop((c1, c2));
}
fn g() {
let c2 = to_fn_mut(|| set(&mut *x.f));
//~^ ERROR cannot borrow `x` as mutable more than once
//~| ERROR cannot borrow `x` as mutable more than once
+ drop((c1, c2));
}
fn main() {
| ^^ - borrow occurs due to use of `x` in closure
| |
| second mutable borrow occurs here
-LL | //~| ERROR cannot borrow `x` as mutable more than once
+...
LL | }
| - first borrow ends here
error[E0499]: cannot borrow `x` as mutable more than once at a time (Ast)
- --> $DIR/borrowck-closures-two-mut.rs:35:24
+ --> $DIR/borrowck-closures-two-mut.rs:36:24
|
LL | let c1 = to_fn_mut(|| set(&mut x));
| -- - previous borrow occurs due to use of `x` in closure
| ^^ - borrow occurs due to use of `x` in closure
| |
| second mutable borrow occurs here
-LL | //~| ERROR cannot borrow `x` as mutable more than once
+...
LL | }
| - first borrow ends here
error[E0499]: cannot borrow `x` as mutable more than once at a time (Ast)
- --> $DIR/borrowck-closures-two-mut.rs:42:24
+ --> $DIR/borrowck-closures-two-mut.rs:44:24
|
LL | let c1 = to_fn_mut(|| x = 5);
| -- - previous borrow occurs due to use of `x` in closure
| ^^ - borrow occurs due to use of `x` in closure
| |
| second mutable borrow occurs here
-LL | //~| ERROR cannot borrow `x` as mutable more than once
+...
LL | }
| - first borrow ends here
error[E0499]: cannot borrow `x` as mutable more than once at a time (Ast)
- --> $DIR/borrowck-closures-two-mut.rs:49:24
+ --> $DIR/borrowck-closures-two-mut.rs:52:24
|
LL | let c1 = to_fn_mut(|| x = 5);
| -- - previous borrow occurs due to use of `x` in closure
| - first borrow ends here
error[E0499]: cannot borrow `x` as mutable more than once at a time (Ast)
- --> $DIR/borrowck-closures-two-mut.rs:61:24
+ --> $DIR/borrowck-closures-two-mut.rs:65:24
|
LL | let c1 = to_fn_mut(|| set(&mut *x.f));
| -- - previous borrow occurs due to use of `x` in closure
| |
| second mutable borrow occurs here
LL | //~| ERROR cannot borrow `x` as mutable more than once
-LL | }
- | - first borrow ends here
+LL | drop((c1, c2));
+ | -- borrow later used here
error[E0499]: cannot borrow `x` as mutable more than once at a time (Mir)
- --> $DIR/borrowck-closures-two-mut.rs:35:24
+ --> $DIR/borrowck-closures-two-mut.rs:36:24
|
LL | let c1 = to_fn_mut(|| set(&mut x));
| -- - previous borrow occurs due to use of `x` in closure
| |
| second mutable borrow occurs here
LL | //~| ERROR cannot borrow `x` as mutable more than once
-LL | }
- | - first borrow ends here
+LL | drop((c1, c2));
+ | -- borrow later used here
error[E0499]: cannot borrow `x` as mutable more than once at a time (Mir)
- --> $DIR/borrowck-closures-two-mut.rs:42:24
+ --> $DIR/borrowck-closures-two-mut.rs:44:24
|
LL | let c1 = to_fn_mut(|| x = 5);
| -- - previous borrow occurs due to use of `x` in closure
| |
| second mutable borrow occurs here
LL | //~| ERROR cannot borrow `x` as mutable more than once
-LL | }
- | - first borrow ends here
+LL | drop((c1, c2));
+ | -- borrow later used here
error[E0499]: cannot borrow `x` as mutable more than once at a time (Mir)
- --> $DIR/borrowck-closures-two-mut.rs:49:24
+ --> $DIR/borrowck-closures-two-mut.rs:52:24
|
LL | let c1 = to_fn_mut(|| x = 5);
| -- - previous borrow occurs due to use of `x` in closure
| |
| second mutable borrow occurs here
...
-LL | }
- | - first borrow ends here
+LL | drop((c1, c2));
+ | -- borrow later used here
error[E0499]: cannot borrow `x` as mutable more than once at a time (Mir)
- --> $DIR/borrowck-closures-two-mut.rs:61:24
+ --> $DIR/borrowck-closures-two-mut.rs:65:24
|
LL | let c1 = to_fn_mut(|| set(&mut *x.f));
| -- - previous borrow occurs due to use of `x` in closure
| |
| second mutable borrow occurs here
...
-LL | }
- | - first borrow ends here
+LL | drop((c1, c2));
+ | -- borrow later used here
error: aborting due to 10 previous errors
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// revisions: lxl nll
-//[lxl]compile-flags: -Z borrowck=mir -Z two-phase-borrows
-//[nll]compile-flags: -Z borrowck=mir -Z two-phase-borrows -Z nll
+// compile-flags: -Z borrowck=mir -Z two-phase-borrows
// run-pass
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// revisions: lxl nll
-//[lxl]compile-flags: -Z borrowck=mir -Z two-phase-borrows
-//[nll]compile-flags: -Z borrowck=mir -Z two-phase-borrows -Z nll
+// compile-flags: -Z borrowck=mir -Z two-phase-borrows
// run-pass
#[rustc_dump_program_clauses] //~ ERROR Implemented(T: Foo) :-
impl<T: 'static> Foo for T where T: Iterator<Item = i32> { }
+trait Bar {
+ type Assoc;
+}
+
+impl<T> Bar for T where T: Iterator<Item = i32> {
+ #[rustc_dump_program_clauses] //~ ERROR Normalize(<T as Bar>::Assoc == std::vec::Vec<T>) :-
+ type Assoc = Vec<T>;
+}
+
fn main() {
println!("hello");
}
LL | #[rustc_dump_program_clauses] //~ ERROR Implemented(T: Foo) :-
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-error: aborting due to previous error
+error: Normalize(<T as Bar>::Assoc == std::vec::Vec<T>) :- Implemented(T: Bar).
+ --> $DIR/lower_impl.rs:23:5
+ |
+LL | #[rustc_dump_program_clauses] //~ ERROR Normalize(<T as Bar>::Assoc == std::vec::Vec<T>) :-
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 2 previous errors
}
trait B: C {
+ //~^ ERROR cycle detected
}
trait C: B { }
- //~^ ERROR cyclic dependency detected
- //~| cyclic reference
fn main() { }
-error[E0391]: cyclic dependency detected
- --> $DIR/cycle-trait-supertrait-indirect.rs:20:1
+error[E0391]: cycle detected when computing the supertraits of `B`
+ --> $DIR/cycle-trait-supertrait-indirect.rs:17:1
|
-LL | trait C: B { }
- | ^^^^^^^^^^ cyclic reference
+LL | trait B: C {
+ | ^^^^^^^^^^
|
-note: the cycle begins when computing the supertraits of `B`...
- --> $DIR/cycle-trait-supertrait-indirect.rs:14:1
+note: ...which requires computing the supertraits of `C`...
+ --> $DIR/cycle-trait-supertrait-indirect.rs:21:1
|
-LL | trait A: B {
+LL | trait C: B { }
| ^^^^^^^^^^
-note: ...which then requires computing the supertraits of `C`...
- --> $DIR/cycle-trait-supertrait-indirect.rs:17:1
+ = note: ...which again requires computing the supertraits of `B`, completing the cycle
+note: cycle used when computing the supertraits of `A`
+ --> $DIR/cycle-trait-supertrait-indirect.rs:14:1
|
-LL | trait B: C {
+LL | trait A: B {
| ^^^^^^^^^^
- = note: ...which then again requires computing the supertraits of `B`, completing the cycle.
error: aborting due to previous error
+++ /dev/null
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-#[cfg(target_feature = "x")] //~ ERROR `cfg(target_feature)` is experimental
-#[cfg_attr(target_feature = "x", x)] //~ ERROR `cfg(target_feature)` is experimental
-struct Foo(u64, u64);
-
-#[cfg(not(any(all(target_feature = "x"))))] //~ ERROR `cfg(target_feature)` is experimental
-fn foo() {}
-
-fn main() {
- cfg!(target_feature = "x");
- //~^ ERROR `cfg(target_feature)` is experimental and subject to change
-}
+++ /dev/null
-error[E0658]: `cfg(target_feature)` is experimental and subject to change (see issue #29717)
- --> $DIR/feature-gate-cfg-target-feature.rs:12:12
- |
-LL | #[cfg_attr(target_feature = "x", x)] //~ ERROR `cfg(target_feature)` is experimental
- | ^^^^^^^^^^^^^^^^^^^^
- |
- = help: add #![feature(cfg_target_feature)] to the crate attributes to enable
-
-error[E0658]: `cfg(target_feature)` is experimental and subject to change (see issue #29717)
- --> $DIR/feature-gate-cfg-target-feature.rs:11:7
- |
-LL | #[cfg(target_feature = "x")] //~ ERROR `cfg(target_feature)` is experimental
- | ^^^^^^^^^^^^^^^^^^^^
- |
- = help: add #![feature(cfg_target_feature)] to the crate attributes to enable
-
-error[E0658]: `cfg(target_feature)` is experimental and subject to change (see issue #29717)
- --> $DIR/feature-gate-cfg-target-feature.rs:15:19
- |
-LL | #[cfg(not(any(all(target_feature = "x"))))] //~ ERROR `cfg(target_feature)` is experimental
- | ^^^^^^^^^^^^^^^^^^^^
- |
- = help: add #![feature(cfg_target_feature)] to the crate attributes to enable
-
-error[E0658]: `cfg(target_feature)` is experimental and subject to change (see issue #29717)
- --> $DIR/feature-gate-cfg-target-feature.rs:19:10
- |
-LL | cfg!(target_feature = "x");
- | ^^^^^^^^^^^^^^^^^^^^
- |
- = help: add #![feature(cfg_target_feature)] to the crate attributes to enable
-
-error: aborting due to 4 previous errors
-
-For more information about this error, try `rustc --explain E0658`.
+++ /dev/null
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-#[target_feature = "+sse2"]
-//~^ the `#[target_feature]` attribute is an experimental feature
-fn foo() {}
+++ /dev/null
-error[E0658]: the `#[target_feature]` attribute is an experimental feature
- --> $DIR/feature-gate-target_feature.rs:11:1
- |
-LL | #[target_feature = "+sse2"]
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
- |
- = help: add #![feature(target_feature)] to the crate attributes to enable
-
-error: aborting due to previous error
-
-For more information about this error, try `rustc --explain E0658`.
// independently resolved and only require the concrete
// return type, which can't depend on the obligation.
fn cycle1() -> impl Clone {
- //~^ ERROR cyclic dependency detected
- //~| cyclic reference
+ //~^ ERROR cycle detected
send(cycle2().clone());
Rc::new(Cell::new(5))
LL | fn send<T: Send>(_: T) {}
| ^^^^^^^^^^^^^^^^^^^^^^
-error[E0391]: cyclic dependency detected
- --> $DIR/auto-trait-leak.rs:42:1
- |
-LL | fn cycle1() -> impl Clone {
- | ^^^^^^^^^^^^^^^^^^^^^^^^^ cyclic reference
- |
-note: the cycle begins when processing `cycle1`...
+error[E0391]: cycle detected when processing `cycle1`
--> $DIR/auto-trait-leak.rs:42:1
|
LL | fn cycle1() -> impl Clone {
| ^^^^^^^^^^^^^^^^^^^^^^^^^
-note: ...which then requires processing `cycle2::{{impl-Trait}}`...
- --> $DIR/auto-trait-leak.rs:50:16
+ |
+note: ...which requires processing `cycle2::{{impl-Trait}}`...
+ --> $DIR/auto-trait-leak.rs:49:16
|
LL | fn cycle2() -> impl Clone {
| ^^^^^^^^^^
-note: ...which then requires processing `cycle2`...
- --> $DIR/auto-trait-leak.rs:50:1
+note: ...which requires processing `cycle2`...
+ --> $DIR/auto-trait-leak.rs:49:1
|
LL | fn cycle2() -> impl Clone {
| ^^^^^^^^^^^^^^^^^^^^^^^^^
-note: ...which then requires processing `cycle1::{{impl-Trait}}`...
+note: ...which requires processing `cycle1::{{impl-Trait}}`...
--> $DIR/auto-trait-leak.rs:42:16
|
LL | fn cycle1() -> impl Clone {
| ^^^^^^^^^^
- = note: ...which then again requires processing `cycle1`, completing the cycle.
+ = note: ...which again requires processing `cycle1`, completing the cycle
+note: cycle used when type-checking all item bodies
error: aborting due to 3 previous errors
// except according to those terms.
trait t1 : t2 {
+//~^ ERROR cycle detected
}
trait t2 : t1 {
-//~^ ERROR cyclic dependency detected
-//~| cyclic reference
}
fn main() { }
-error[E0391]: cyclic dependency detected
- --> $DIR/issue-12511.rs:14:1
- |
-LL | trait t2 : t1 {
- | ^^^^^^^^^^^^^ cyclic reference
- |
-note: the cycle begins when computing the supertraits of `t1`...
+error[E0391]: cycle detected when computing the supertraits of `t1`
--> $DIR/issue-12511.rs:11:1
|
LL | trait t1 : t2 {
| ^^^^^^^^^^^^^
-note: ...which then requires computing the supertraits of `t2`...
- --> $DIR/issue-12511.rs:11:1
|
-LL | trait t1 : t2 {
+note: ...which requires computing the supertraits of `t2`...
+ --> $DIR/issue-12511.rs:15:1
+ |
+LL | trait t2 : t1 {
| ^^^^^^^^^^^^^
- = note: ...which then again requires computing the supertraits of `t1`, completing the cycle.
+ = note: ...which again requires computing the supertraits of `t1`, completing the cycle
error: aborting due to previous error
-error[E0391]: cyclic dependency detected
- --> $DIR/issue-23302-1.rs:14:9
- |
-LL | A = X::A as isize, //~ ERROR E0391
- | ^^^^^^^^^^^^^ cyclic reference
- |
-note: the cycle begins when const-evaluating `X::A::{{initializer}}`...
- --> $DIR/issue-23302-1.rs:14:9
- |
-LL | A = X::A as isize, //~ ERROR E0391
- | ^^^^^^^^^^^^^
-note: ...which then requires computing layout of `X`...
+error[E0391]: cycle detected when const-evaluating `X::A::{{initializer}}`
--> $DIR/issue-23302-1.rs:14:9
|
LL | A = X::A as isize, //~ ERROR E0391
| ^^^^
- = note: ...which then again requires const-evaluating `X::A::{{initializer}}`, completing the cycle.
+ |
+note: ...which requires computing layout of `X`...
+ = note: ...which again requires const-evaluating `X::A::{{initializer}}`, completing the cycle
error: aborting due to previous error
-error[E0391]: cyclic dependency detected
- --> $DIR/issue-23302-2.rs:14:9
- |
-LL | A = Y::B as isize, //~ ERROR E0391
- | ^^^^^^^^^^^^^ cyclic reference
- |
-note: the cycle begins when const-evaluating `Y::A::{{initializer}}`...
- --> $DIR/issue-23302-2.rs:14:9
- |
-LL | A = Y::B as isize, //~ ERROR E0391
- | ^^^^^^^^^^^^^
-note: ...which then requires computing layout of `Y`...
+error[E0391]: cycle detected when const-evaluating `Y::A::{{initializer}}`
--> $DIR/issue-23302-2.rs:14:9
|
LL | A = Y::B as isize, //~ ERROR E0391
| ^^^^
- = note: ...which then again requires const-evaluating `Y::A::{{initializer}}`, completing the cycle.
+ |
+note: ...which requires computing layout of `Y`...
+ = note: ...which again requires const-evaluating `Y::A::{{initializer}}`, completing the cycle
error: aborting due to previous error
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-const A: i32 = B;
+const A: i32 = B; //~ ERROR cycle detected
-const B: i32 = A; //~ ERROR cyclic dependency detected
+const B: i32 = A;
fn main() { }
-error[E0391]: cyclic dependency detected
- --> $DIR/issue-23302-3.rs:13:16
- |
-LL | const B: i32 = A; //~ ERROR cyclic dependency detected
- | ^ cyclic reference
- |
-note: the cycle begins when const checking if rvalue is promotable to static `A`...
+error[E0391]: cycle detected when const checking if rvalue is promotable to static `A`
--> $DIR/issue-23302-3.rs:11:1
|
-LL | const A: i32 = B;
+LL | const A: i32 = B; //~ ERROR cycle detected
| ^^^^^^^^^^^^^^^^^
-note: ...which then requires checking which parts of `A` are promotable to static...
- --> $DIR/issue-23302-3.rs:11:1
|
-LL | const A: i32 = B;
- | ^^^^^^^^^^^^^^^^^
-note: ...which then requires const checking if rvalue is promotable to static `B`...
+note: ...which requires checking which parts of `A` are promotable to static...
--> $DIR/issue-23302-3.rs:11:16
|
-LL | const A: i32 = B;
+LL | const A: i32 = B; //~ ERROR cycle detected
| ^
-note: ...which then requires checking which parts of `B` are promotable to static...
+note: ...which requires const checking if rvalue is promotable to static `B`...
--> $DIR/issue-23302-3.rs:13:1
|
-LL | const B: i32 = A; //~ ERROR cyclic dependency detected
+LL | const B: i32 = A;
| ^^^^^^^^^^^^^^^^^
- = note: ...which then again requires const checking if rvalue is promotable to static `A`, completing the cycle.
+note: ...which requires checking which parts of `B` are promotable to static...
+ --> $DIR/issue-23302-3.rs:13:16
+ |
+LL | const B: i32 = A;
+ | ^
+ = note: ...which again requires const checking if rvalue is promotable to static `A`, completing the cycle
error: aborting due to previous error
-error[E0391]: cyclic dependency detected
- --> $DIR/issue-36163.rs:14:9
- |
-LL | B = A, //~ ERROR E0391
- | ^ cyclic reference
- |
-note: the cycle begins when const-evaluating `Foo::B::{{initializer}}`...
+error[E0391]: cycle detected when const-evaluating `Foo::B::{{initializer}}`
--> $DIR/issue-36163.rs:14:9
|
LL | B = A, //~ ERROR E0391
| ^
-note: ...which then requires processing `Foo::B::{{initializer}}`...
+ |
+note: ...which requires processing `Foo::B::{{initializer}}`...
--> $DIR/issue-36163.rs:14:9
|
LL | B = A, //~ ERROR E0391
| ^
-note: ...which then requires const-evaluating `A`...
- --> $DIR/issue-36163.rs:11:1
- |
-LL | const A: isize = Foo::B as isize;
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-note: ...which then requires computing layout of `Foo`...
+note: ...which requires const-evaluating `A`...
--> $DIR/issue-36163.rs:11:18
|
LL | const A: isize = Foo::B as isize;
| ^^^^^^
- = note: ...which then again requires const-evaluating `Foo::B::{{initializer}}`, completing the cycle.
+note: ...which requires computing layout of `Foo`...
+ = note: ...which again requires const-evaluating `Foo::B::{{initializer}}`, completing the cycle
error: aborting due to previous error
*y.pointer += 1;
//~^ ERROR cannot assign to `*y.pointer` because it is borrowed (Ast) [E0506]
//~| ERROR cannot use `*y.pointer` because it was mutably borrowed (Mir) [E0503]
+ //~| ERROR cannot assign to `*y.pointer` because it is borrowed (Mir) [E0506]
*z.pointer += 1;
}
}
| ------ borrow of `y` occurs here
LL | *y.pointer += 1;
| ^^^^^^^^^^^^^^^ use of borrowed `y`
+...
+LL | *z.pointer += 1;
+ | --------------- borrow later used here
-error: aborting due to 2 previous errors
+error[E0506]: cannot assign to `*y.pointer` because it is borrowed (Mir)
+ --> $DIR/issue-45697-1.rs:30:9
+ |
+LL | let z = copy_borrowed_ptr(&mut y);
+ | ------ borrow of `*y.pointer` occurs here
+LL | *y.pointer += 1;
+ | ^^^^^^^^^^^^^^^ assignment to borrowed `*y.pointer` occurs here
+...
+LL | *z.pointer += 1;
+ | --------------- borrow later used here
+
+error: aborting due to 3 previous errors
Some errors occurred: E0503, E0506.
For more information about an error, try `rustc --explain E0503`.
*y.pointer += 1;
//~^ ERROR cannot assign to `*y.pointer` because it is borrowed (Ast) [E0506]
//~| ERROR cannot use `*y.pointer` because it was mutably borrowed (Mir) [E0503]
+ //~| ERROR cannot assign to `*y.pointer` because it is borrowed (Mir) [E0506]
*z.pointer += 1;
}
}
| ------ borrow of `y` occurs here
LL | *y.pointer += 1;
| ^^^^^^^^^^^^^^^ use of borrowed `y`
+...
+LL | *z.pointer += 1;
+ | --------------- borrow later used here
-error: aborting due to 2 previous errors
+error[E0506]: cannot assign to `*y.pointer` because it is borrowed (Mir)
+ --> $DIR/issue-45697.rs:30:9
+ |
+LL | let z = copy_borrowed_ptr(&mut y);
+ | ------ borrow of `*y.pointer` occurs here
+LL | *y.pointer += 1;
+ | ^^^^^^^^^^^^^^^ assignment to borrowed `*y.pointer` occurs here
+...
+LL | *z.pointer += 1;
+ | --------------- borrow later used here
+
+error: aborting due to 3 previous errors
Some errors occurred: E0503, E0506.
For more information about an error, try `rustc --explain E0503`.
error[E0597]: `z` does not live long enough (Mir)
--> $DIR/issue-46471-1.rs:16:9
|
-LL | &mut z
- | ^^^^^^ borrowed value does not live long enough
-LL | };
- | - `z` dropped here while still borrowed
-...
-LL | }
- | - borrowed value needs to live until here
+LL | let y = {
+ | _____________-
+LL | | let mut z = 0;
+LL | | &mut z
+ | | ^^^^^^ borrowed value does not live long enough
+LL | | };
+ | | -
+ | | |
+ | |_____borrowed value only lives until here
+ | borrow later used here
error: aborting due to 2 previous errors
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(nll)]
+
+fn flatten<'a, 'b, T>(x: &'a &'b T) -> &'a T {
+ x
+}
+
+fn main() {
+ let mut x = "original";
+ let y = &x;
+ let z = &y;
+ let w = flatten(z);
+ x = "modified";
+ //~^ ERROR cannot assign to `x` because it is borrowed [E0506]
+ println!("{}", w); // prints "modified"
+}
--- /dev/null
+error[E0506]: cannot assign to `x` because it is borrowed
+ --> $DIR/issue-48803.rs:22:5
+ |
+LL | let y = &x;
+ | -- borrow of `x` occurs here
+...
+LL | x = "modified";
+ | ^^^^^^^^^^^^^^ assignment to borrowed `x` occurs here
+LL | //~^ ERROR cannot assign to `x` because it is borrowed [E0506]
+LL | println!("{}", w); // prints "modified"
+ | - borrow later used here
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0506`.
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// The logic for parsing Kleene operators in macros has a special case to disambiguate `?`.
-// Specifically, `$(pat)?` is the ZeroOrOne operator whereas `$(pat)?+` or `$(pat)?*` are the
-// ZeroOrMore and OneOrMore operators using `?` as a separator. These tests are intended to
-// exercise that logic in the macro parser.
-//
-// Moreover, we also throw in some tests for using a separator with `?`, which is meaningless but
-// included for consistency with `+` and `*`.
-//
-// This test focuses on error cases.
+// Tests the behavior of various Kleene operators in macros with respect to `?` terminals. In
+// particular, `?` in the position of a separator and of a Kleene operator is tested.
#![feature(macro_at_most_once_rep)]
+// should match `` and `a`
macro_rules! foo {
($(a)?) => {}
}
macro_rules! baz {
- ($(a),?) => {} // comma separator is meaningless for `?`
+ ($(a),?) => {} //~ ERROR `?` macro repetition does not allow a separator
}
+// should match `+` and `a+`
macro_rules! barplus {
($(a)?+) => {}
}
+// should match `*` and `a*`
macro_rules! barstar {
($(a)?*) => {}
}
foo!(a?a?a); //~ ERROR no rules expected the token `?`
foo!(a?a); //~ ERROR no rules expected the token `?`
foo!(a?); //~ ERROR no rules expected the token `?`
- baz!(a?a?a); //~ ERROR no rules expected the token `?`
- baz!(a?a); //~ ERROR no rules expected the token `?`
- baz!(a?); //~ ERROR no rules expected the token `?`
- baz!(a,); //~ ERROR unexpected end of macro invocation
- baz!(a?a?a,); //~ ERROR no rules expected the token `?`
- baz!(a?a,); //~ ERROR no rules expected the token `?`
- baz!(a?,); //~ ERROR no rules expected the token `?`
barplus!(); //~ ERROR unexpected end of macro invocation
- barplus!(a?); //~ ERROR unexpected end of macro invocation
- barstar!(a?); //~ ERROR unexpected end of macro invocation
+ barstar!(); //~ ERROR unexpected end of macro invocation
+ barplus!(a?); //~ ERROR no rules expected the token `?`
+ barplus!(a); //~ ERROR unexpected end of macro invocation
+ barstar!(a?); //~ ERROR no rules expected the token `?`
+ barstar!(a); //~ ERROR unexpected end of macro invocation
+ barplus!(+); // ok
+ barstar!(*); // ok
+ barplus!(a+); // ok
+ barstar!(a*); // ok
}
+error: `?` macro repetition does not allow a separator
+ --> $DIR/macro-at-most-once-rep-ambig.rs:22:10
+ |
+LL | ($(a),?) => {} //~ ERROR `?` macro repetition does not allow a separator
+ | ^
+
error: no rules expected the token `?`
- --> $DIR/macro-at-most-once-rep-ambig.rs:40:11
+ --> $DIR/macro-at-most-once-rep-ambig.rs:36:11
|
LL | foo!(a?a?a); //~ ERROR no rules expected the token `?`
| ^
error: no rules expected the token `?`
- --> $DIR/macro-at-most-once-rep-ambig.rs:41:11
+ --> $DIR/macro-at-most-once-rep-ambig.rs:37:11
|
LL | foo!(a?a); //~ ERROR no rules expected the token `?`
| ^
error: no rules expected the token `?`
- --> $DIR/macro-at-most-once-rep-ambig.rs:42:11
+ --> $DIR/macro-at-most-once-rep-ambig.rs:38:11
|
LL | foo!(a?); //~ ERROR no rules expected the token `?`
| ^
-error: no rules expected the token `?`
- --> $DIR/macro-at-most-once-rep-ambig.rs:43:11
- |
-LL | baz!(a?a?a); //~ ERROR no rules expected the token `?`
- | ^
-
-error: no rules expected the token `?`
- --> $DIR/macro-at-most-once-rep-ambig.rs:44:11
- |
-LL | baz!(a?a); //~ ERROR no rules expected the token `?`
- | ^
-
-error: no rules expected the token `?`
- --> $DIR/macro-at-most-once-rep-ambig.rs:45:11
- |
-LL | baz!(a?); //~ ERROR no rules expected the token `?`
- | ^
-
error: unexpected end of macro invocation
- --> $DIR/macro-at-most-once-rep-ambig.rs:46:11
- |
-LL | baz!(a,); //~ ERROR unexpected end of macro invocation
- | ^
-
-error: no rules expected the token `?`
- --> $DIR/macro-at-most-once-rep-ambig.rs:47:11
+ --> $DIR/macro-at-most-once-rep-ambig.rs:39:5
|
-LL | baz!(a?a?a,); //~ ERROR no rules expected the token `?`
- | ^
+LL | barplus!(); //~ ERROR unexpected end of macro invocation
+ | ^^^^^^^^^^^
-error: no rules expected the token `?`
- --> $DIR/macro-at-most-once-rep-ambig.rs:48:11
+error: unexpected end of macro invocation
+ --> $DIR/macro-at-most-once-rep-ambig.rs:40:5
|
-LL | baz!(a?a,); //~ ERROR no rules expected the token `?`
- | ^
+LL | barstar!(); //~ ERROR unexpected end of macro invocation
+ | ^^^^^^^^^^^
error: no rules expected the token `?`
- --> $DIR/macro-at-most-once-rep-ambig.rs:49:11
+ --> $DIR/macro-at-most-once-rep-ambig.rs:41:15
|
-LL | baz!(a?,); //~ ERROR no rules expected the token `?`
- | ^
+LL | barplus!(a?); //~ ERROR no rules expected the token `?`
+ | ^
error: unexpected end of macro invocation
- --> $DIR/macro-at-most-once-rep-ambig.rs:50:5
+ --> $DIR/macro-at-most-once-rep-ambig.rs:42:14
|
-LL | barplus!(); //~ ERROR unexpected end of macro invocation
- | ^^^^^^^^^^^
+LL | barplus!(a); //~ ERROR unexpected end of macro invocation
+ | ^
-error: unexpected end of macro invocation
- --> $DIR/macro-at-most-once-rep-ambig.rs:51:15
+error: no rules expected the token `?`
+ --> $DIR/macro-at-most-once-rep-ambig.rs:43:15
|
-LL | barplus!(a?); //~ ERROR unexpected end of macro invocation
+LL | barstar!(a?); //~ ERROR no rules expected the token `?`
| ^
error: unexpected end of macro invocation
- --> $DIR/macro-at-most-once-rep-ambig.rs:52:15
+ --> $DIR/macro-at-most-once-rep-ambig.rs:44:14
|
-LL | barstar!(a?); //~ ERROR unexpected end of macro invocation
- | ^
+LL | barstar!(a); //~ ERROR unexpected end of macro invocation
+ | ^
-error: aborting due to 13 previous errors
+error: aborting due to 10 previous errors
// that appear free in its type (hence, we see it before the closure's
// "external requirements" report).
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
#![feature(rustc_attrs)]
let y = 22;
let mut closure = expect_sig(|p, y| *p = y);
//~^ ERROR does not outlive free region
- //~| WARNING not reporting region error due to -Znll
+ //~| WARNING not reporting region error due to nll
closure(&mut p, &y);
}
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
--> $DIR/escape-argument-callee.rs:36:50
|
LL | let mut closure = expect_sig(|p, y| *p = y);
// basically checking that the MIR type checker correctly enforces the
// closure signature.
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
#![feature(rustc_attrs)]
//
// except that the closure does so via a second closure.
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
#![feature(rustc_attrs)]
// `'b`. This relationship is propagated to the closure creator,
// which reports an error.
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
#![feature(rustc_attrs)]
// Test where we fail to approximate due to demanding a postdom
// relationship between our upper bounds.
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
#![feature(rustc_attrs)]
|_outlives1, _outlives2, _outlives3, x, y| {
// Only works if 'x: 'y:
let p = x.get();
- //~^ WARN not reporting region error due to -Znll
+ //~^ WARN not reporting region error due to nll
//~| ERROR does not outlive free region
demand_y(x, y, p)
},
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
--> $DIR/propagate-approximated-fail-no-postdom.rs:55:21
|
LL | let p = x.get();
LL | / |_outlives1, _outlives2, _outlives3, x, y| {
LL | | // Only works if 'x: 'y:
LL | | let p = x.get();
-LL | | //~^ WARN not reporting region error due to -Znll
+LL | | //~^ WARN not reporting region error due to nll
LL | | //~| ERROR does not outlive free region
LL | | demand_y(x, y, p)
LL | | },
// Note: the use of `Cell` here is to introduce invariance. One less
// variable.
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
#![feature(rustc_attrs)]
//~^ ERROR lifetime mismatch
// Only works if 'x: 'y:
- demand_y(x, y, x.get()) //~ WARNING not reporting region error due to -Znll
+ demand_y(x, y, x.get()) //~ WARNING not reporting region error due to nll
});
}
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
--> $DIR/propagate-approximated-ref.rs:57:9
|
-LL | demand_y(x, y, x.get()) //~ WARNING not reporting region error due to -Znll
+LL | demand_y(x, y, x.get()) //~ WARNING not reporting region error due to nll
| ^^^^^^^^^^^^^^^^^^^^^^^
note: External requirements
LL | | //~^ ERROR lifetime mismatch
LL | |
LL | | // Only works if 'x: 'y:
-LL | | demand_y(x, y, x.get()) //~ WARNING not reporting region error due to -Znll
+LL | | demand_y(x, y, x.get()) //~ WARNING not reporting region error due to nll
LL | | });
| |_____^
|
// where `'x` is bound in closure type but `'a` is free. This forces
// us to approximate `'x` one way or the other.
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
#![feature(rustc_attrs)]
let a = 0;
let cell = Cell::new(&a);
foo(cell, |cell_a, cell_x| {
- //~^ WARNING not reporting region error due to -Znll
+ //~^ WARNING not reporting region error due to nll
cell_a.set(cell_x.get()); // forces 'x: 'a, error in closure
//~^ ERROR does not outlive free region
})
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
--> $DIR/propagate-approximated-shorter-to-static-comparing-against-free.rs:31:5
|
LL | foo(cell, |cell_a, cell_x| {
|
LL | foo(cell, |cell_a, cell_x| {
| _______________^
-LL | | //~^ WARNING not reporting region error due to -Znll
+LL | | //~^ WARNING not reporting region error due to nll
LL | | cell_a.set(cell_x.get()); // forces 'x: 'a, error in closure
LL | | //~^ ERROR does not outlive free region
LL | | })
// FIXME(#45827) Because of shortcomings in the MIR type checker,
// these errors are not (yet) reported.
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
#![feature(rustc_attrs)]
//~^ ERROR does not outlive free region
// Only works if 'x: 'y:
- demand_y(x, y, x.get()) //~ WARNING not reporting region error due to -Znll
+ demand_y(x, y, x.get()) //~ WARNING not reporting region error due to nll
});
}
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
--> $DIR/propagate-approximated-shorter-to-static-no-bound.rs:49:9
|
-LL | demand_y(x, y, x.get()) //~ WARNING not reporting region error due to -Znll
+LL | demand_y(x, y, x.get()) //~ WARNING not reporting region error due to nll
| ^^^^^^^^^^^^^^^^^^^^^^^
note: External requirements
LL | | //~^ ERROR does not outlive free region
LL | |
LL | | // Only works if 'x: 'y:
-LL | | demand_y(x, y, x.get()) //~ WARNING not reporting region error due to -Znll
+LL | | demand_y(x, y, x.get()) //~ WARNING not reporting region error due to nll
LL | | });
| |_____^
|
LL | | //~^ ERROR does not outlive free region
LL | |
LL | | // Only works if 'x: 'y:
-LL | | demand_y(x, y, x.get()) //~ WARNING not reporting region error due to -Znll
+LL | | demand_y(x, y, x.get()) //~ WARNING not reporting region error due to nll
LL | | });
| |_____^
// FIXME(#45827) Because of shortcomings in the MIR type checker,
// these errors are not (yet) reported.
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
#![feature(rustc_attrs)]
//~^ ERROR does not outlive free region
// Only works if 'x: 'y:
demand_y(x, y, x.get())
- //~^ WARNING not reporting region error due to -Znll
+ //~^ WARNING not reporting region error due to nll
});
}
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
--> $DIR/propagate-approximated-shorter-to-static-wrong-bound.rs:51:9
|
LL | demand_y(x, y, x.get())
LL | | //~^ ERROR does not outlive free region
LL | | // Only works if 'x: 'y:
LL | | demand_y(x, y, x.get())
-LL | | //~^ WARNING not reporting region error due to -Znll
+LL | | //~^ WARNING not reporting region error due to nll
LL | | });
| |_____^
|
LL | | //~^ ERROR does not outlive free region
LL | | // Only works if 'x: 'y:
LL | | demand_y(x, y, x.get())
-LL | | //~^ WARNING not reporting region error due to -Znll
+LL | | //~^ WARNING not reporting region error due to nll
LL | | });
| |_____^
// relationships. In the 'main' variant, there are a number of
// anonymous regions as well.
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
#![feature(rustc_attrs)]
//~^ ERROR lifetime mismatch
// Only works if 'x: 'y:
- demand_y(outlives1, outlives2, x.get()) //~ WARNING not reporting region error due to -Znll
+ demand_y(outlives1, outlives2, x.get()) //~ WARNING not reporting region error due to nll
});
}
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
--> $DIR/propagate-approximated-val.rs:50:9
|
-LL | demand_y(outlives1, outlives2, x.get()) //~ WARNING not reporting region error due to -Znll
+LL | demand_y(outlives1, outlives2, x.get()) //~ WARNING not reporting region error due to nll
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
note: External requirements
LL | | //~^ ERROR lifetime mismatch
LL | |
LL | | // Only works if 'x: 'y:
-LL | | demand_y(outlives1, outlives2, x.get()) //~ WARNING not reporting region error due to -Znll
+LL | | demand_y(outlives1, outlives2, x.get()) //~ WARNING not reporting region error due to nll
LL | | });
| |_____^
|
// need to propagate; but in fact we do because identity of free
// regions is erased.
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
// compile-pass
#![feature(rustc_attrs)]
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
--> $DIR/propagate-despite-same-free-region.rs:54:21
|
LL | let p = x.get();
// as it knows of no relationships between `'x` and any
// non-higher-ranked regions.
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
#![feature(rustc_attrs)]
establish_relationships(&cell_a, &cell_b, |_outlives, x, y| {
// Only works if 'x: 'y:
demand_y(x, y, x.get())
- //~^ WARN not reporting region error due to -Znll
+ //~^ WARN not reporting region error due to nll
//~| ERROR does not outlive free region
});
}
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
--> $DIR/propagate-fail-to-approximate-longer-no-bounds.rs:47:9
|
LL | demand_y(x, y, x.get())
| _______________________________________________^
LL | | // Only works if 'x: 'y:
LL | | demand_y(x, y, x.get())
-LL | | //~^ WARN not reporting region error due to -Znll
+LL | | //~^ WARN not reporting region error due to nll
LL | | //~| ERROR does not outlive free region
LL | | });
| |_____^
// as it only knows of regions that `'x` is outlived by, and none that
// `'x` outlives.
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
#![feature(rustc_attrs)]
establish_relationships(&cell_a, &cell_b, |_outlives1, _outlives2, x, y| {
// Only works if 'x: 'y:
demand_y(x, y, x.get())
- //~^ WARN not reporting region error due to -Znll
+ //~^ WARN not reporting region error due to nll
//~| ERROR does not outlive free region
});
}
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
--> $DIR/propagate-fail-to-approximate-longer-wrong-bounds.rs:51:9
|
LL | demand_y(x, y, x.get())
| _______________________________________________^
LL | | // Only works if 'x: 'y:
LL | | demand_y(x, y, x.get())
-LL | | //~^ WARN not reporting region error due to -Znll
+LL | | //~^ WARN not reporting region error due to nll
LL | | //~| ERROR does not outlive free region
LL | | });
| |_____^
// the same `'a` for which it implements `Trait`, which can only be the `'a`
// from the function definition.
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
#![feature(rustc_attrs)]
#![allow(dead_code)]
// The latter does not hold.
require(value);
- //~^ WARNING not reporting region error due to -Znll
+ //~^ WARNING not reporting region error due to nll
});
}
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
--> $DIR/propagate-from-trait-match.rs:55:9
|
LL | require(value);
LL | |
LL | | // This function call requires that
... |
-LL | | //~^ WARNING not reporting region error due to -Znll
+LL | | //~^ WARNING not reporting region error due to nll
LL | | });
| |_____^
|
LL | |
LL | | // This function call requires that
... |
-LL | | //~^ WARNING not reporting region error due to -Znll
+LL | | //~^ WARNING not reporting region error due to nll
LL | | });
| |_____^
|
// a variety of errors from the older, AST-based machinery (notably
// borrowck), and then we get the NLL error at the end.
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
fn foo(x: &u32) -> &'static u32 {
&*x
- //~^ WARN not reporting region error due to -Znll
+ //~^ WARN not reporting region error due to nll
//~| ERROR explicit lifetime required in the type of `x`
}
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
--> $DIR/region-lbr-anon-does-not-outlive-static.rs:19:5
|
LL | &*x
// a variety of errors from the older, AST-based machinery (notably
// borrowck), and then we get the NLL error at the end.
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
fn foo<'a>(x: &'a u32) -> &'static u32 {
&*x
- //~^ WARN not reporting region error due to -Znll
+ //~^ WARN not reporting region error due to nll
//~| ERROR does not outlive free region
}
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
--> $DIR/region-lbr-named-does-not-outlive-static.rs:19:5
|
LL | &*x
// a variety of errors from the older, AST-based machinery (notably
// borrowck), and then we get the NLL error at the end.
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
fn foo<'a, 'b>(x: &'a u32, y: &'b u32) -> &'b u32 {
&*x
- //~^ WARN not reporting region error due to -Znll
+ //~^ WARN not reporting region error due to nll
//~| ERROR lifetime mismatch
}
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
--> $DIR/region-lbr1-does-not-outlive-ebr2.rs:19:5
|
LL | &*x
// Basic test for free regions in the NLL code. This test does not
// report an error because of the (implied) bound that `'b: 'a`.
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
// compile-pass
#![allow(warnings)]
// the first, but actually returns the second. This should fail within
// the closure.
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
#![feature(rustc_attrs)]
#[rustc_regions]
fn test() {
expect_sig(|a, b| b); // ought to return `a`
- //~^ WARN not reporting region error due to -Znll
+ //~^ WARN not reporting region error due to nll
//~| ERROR does not outlive free region
}
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
--> $DIR/return-wrong-bound-region.rs:21:23
|
LL | expect_sig(|a, b| b); // ought to return `a`
|
LL | / fn test() {
LL | | expect_sig(|a, b| b); // ought to return `a`
-LL | | //~^ WARN not reporting region error due to -Znll
+LL | | //~^ WARN not reporting region error due to nll
LL | | //~| ERROR does not outlive free region
LL | | }
| |_^
// Test that MIR borrowck and NLL analysis can handle constants of
// arbitrary types without ICEs.
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
// compile-pass
const HI: &str = "hi";
// in the type of `p` includes the points after `&v[0]` up to (but not
// including) the call to `use_x`. The `else` branch is not included.
-// compile-flags:-Znll -Zborrowck=mir
+// compile-flags:-Zborrowck=mir
// compile-pass
#![allow(warnings)]
// because of destructor. (Note that the stderr also identifies this
// destructor in the error message.)
-// compile-flags:-Znll -Zborrowck=mir
+// compile-flags:-Zborrowck=mir
#![allow(warnings)]
#![feature(dropck_eyepatch)]
// a variety of errors from the older, AST-based machinery (notably
// borrowck), and then we get the NLL error at the end.
-// compile-flags:-Znll -Zborrowck=compare
+// compile-flags:-Zborrowck=compare
struct Map {
}
--> $DIR/issue-47388.rs:18:5
|
LL | let fancy_ref = &(&mut fancy);
- | ------------- help: consider changing this to be a mutable reference: `&mut`
+ | ------------- help: consider changing this to be a mutable reference: `&mut (&mut fancy)`
LL | fancy_ref.num = 6; //~ ERROR E0594
| ^^^^^^^^^^^^^^^^^ `fancy_ref` is a `&` reference, so the data it refers to cannot be written
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-//compile-flags: -Z emit-end-regions -Zborrowck=mir -Znll
+//compile-flags: -Z emit-end-regions -Zborrowck=mir
#![allow(warnings)]
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-//compile-flags: -Z emit-end-regions -Zborrowck=mir -Z nll
+// compile-flags: -Z emit-end-regions -Zborrowck=mir
// compile-pass
#![allow(warnings)]
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-//compile-flags: -Z emit-end-regions -Zborrowck=mir -Znll
+//compile-flags: -Z emit-end-regions -Zborrowck=mir
#![allow(warnings)]
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-//compile-flags: -Z emit-end-regions -Zborrowck=mir -Znll
+//compile-flags: -Z emit-end-regions -Zborrowck=mir
#![allow(warnings)]
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-//compile-flags: -Z emit-end-regions -Zborrowck=mir -Znll
+//compile-flags: -Z emit-end-regions -Zborrowck=mir
#![allow(warnings)]
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags:-Znll -Zborrowck=mir
+// compile-flags:-Zborrowck=mir
// compile-pass
#![feature(rustc_attrs)]
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
#![allow(warnings)]
fn foo<'a, T>(x: &T) -> impl Foo<'a> {
x
- //~^ WARNING not reporting region error due to -Znll
+ //~^ WARNING not reporting region error due to nll
//~| ERROR explicit lifetime required in the type of `x` [E0621]
}
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
--> $DIR/impl-trait-captures.rs:21:5
|
LL | x
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
#![allow(warnings)]
use std::fmt::Debug;
fn no_region<'a, T>(x: Box<T>) -> impl Debug + 'a
- //~^ WARNING not reporting region error due to -Znll
+ //~^ WARNING not reporting region error due to nll
where
T: Debug,
{
}
fn wrong_region<'a, 'b, T>(x: Box<T>) -> impl Debug + 'a
- //~^ WARNING not reporting region error due to -Znll
+ //~^ WARNING not reporting region error due to nll
where
T: 'b + Debug,
{
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
--> $DIR/impl-trait-outlives.rs:17:35
|
LL | fn no_region<'a, T>(x: Box<T>) -> impl Debug + 'a
| ^^^^^^^^^^^^^^^
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
--> $DIR/impl-trait-outlives.rs:33:42
|
LL | fn wrong_region<'a, 'b, T>(x: Box<T>) -> impl Debug + 'a
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
// Test that we can deduce when projections like `T::Item` outlive the
// function body. Test that this does not imply that `T: 'a` holds.
#[rustc_errors]
fn generic2<T: Iterator>(value: T) {
twice(value, |value_ref, item| invoke2(value_ref, item));
- //~^ WARNING not reporting region error due to -Znll
+ //~^ WARNING not reporting region error due to nll
//~| ERROR the parameter type `T` may not live long enough
}
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
--> $DIR/projection-implied-bounds.rs:45:36
|
LL | twice(value, |value_ref, item| invoke2(value_ref, item));
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
// Tests closures that propagate an outlives relationship to their
// creator where the subject is a projection with no regions (`<T as
T: Iterator,
{
with_signature(x, |mut y| Box::new(y.next()))
- //~^ WARNING not reporting region error due to -Znll
+ //~^ WARNING not reporting region error due to nll
//~| ERROR the associated type `<T as std::iter::Iterator>::Item` may not live long enough
}
T: 'b + Iterator,
{
with_signature(x, |mut y| Box::new(y.next()))
- //~^ WARNING not reporting region error due to -Znll
+ //~^ WARNING not reporting region error due to nll
//~| ERROR the associated type `<T as std::iter::Iterator>::Item` may not live long enough
}
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
--> $DIR/projection-no-regions-closure.rs:36:31
|
LL | with_signature(x, |mut y| Box::new(y.next()))
| ^^^^^^^^^^^^^^^^^^
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
--> $DIR/projection-no-regions-closure.rs:54:31
|
LL | with_signature(x, |mut y| Box::new(y.next()))
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
#![allow(warnings)]
#![feature(dyn_trait)]
T: Iterator,
{
Box::new(x.next())
- //~^ WARNING not reporting region error due to -Znll
+ //~^ WARNING not reporting region error due to nll
//~| the associated type `<T as std::iter::Iterator>::Item` may not live long enough
}
T: 'b + Iterator,
{
Box::new(x.next())
- //~^ WARNING not reporting region error due to -Znll
+ //~^ WARNING not reporting region error due to nll
//~| the associated type `<T as std::iter::Iterator>::Item` may not live long enough
}
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
--> $DIR/projection-no-regions-fn.rs:24:5
|
LL | Box::new(x.next())
| ^^^^^^^^^^^^^^^^^^
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
--> $DIR/projection-no-regions-fn.rs:40:5
|
LL | Box::new(x.next())
//
// Ensuring that both `T: 'a` and `'b: 'a` holds does work (`elements_outlive`).
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
#![allow(warnings)]
#![feature(dyn_trait)]
T: Anything<'b>,
{
with_signature(cell, t, |cell, t| require(cell, t));
- //~^ WARNING not reporting region error due to -Znll
+ //~^ WARNING not reporting region error due to nll
//~| ERROR the parameter type `T` may not live long enough
//~| ERROR does not outlive free region
}
'a: 'a,
{
with_signature(cell, t, |cell, t| require(cell, t));
- //~^ WARNING not reporting region error due to -Znll
+ //~^ WARNING not reporting region error due to nll
//~| ERROR the parameter type `T` may not live long enough
//~| ERROR does not outlive free region
}
// can do better here with a more involved verification step.
with_signature(cell, t, |cell, t| require(cell, t));
- //~^ WARNING not reporting region error due to -Znll
+ //~^ WARNING not reporting region error due to nll
//~| ERROR the parameter type `T` may not live long enough
//~| ERROR free region `ReEarlyBound(1, 'b)` does not outlive free region `ReEarlyBound(0, 'a)`
}
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
--> $DIR/projection-one-region-closure.rs:56:39
|
LL | with_signature(cell, t, |cell, t| require(cell, t));
| ^^^^^^^
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
--> $DIR/projection-one-region-closure.rs:68:39
|
LL | with_signature(cell, t, |cell, t| require(cell, t));
| ^^^^^^^
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
--> $DIR/projection-one-region-closure.rs:90:39
|
LL | with_signature(cell, t, |cell, t| require(cell, t));
// case, the best way to satisfy the trait bound is to show that `'b:
// 'a`, which can be done in various ways.
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
#![allow(warnings)]
#![feature(dyn_trait)]
T: Anything<'b>,
{
with_signature(cell, t, |cell, t| require(cell, t));
- //~^ WARNING not reporting region error due to -Znll
+ //~^ WARNING not reporting region error due to nll
//~| ERROR does not outlive free region
}
'a: 'a,
{
with_signature(cell, t, |cell, t| require(cell, t));
- //~^ WARNING not reporting region error due to -Znll
+ //~^ WARNING not reporting region error due to nll
//~| ERROR does not outlive free region
}
// can do better here with a more involved verification step.
with_signature(cell, t, |cell, t| require(cell, t));
- //~^ WARNING not reporting region error due to -Znll
+ //~^ WARNING not reporting region error due to nll
//~| ERROR does not outlive free region
}
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
--> $DIR/projection-one-region-trait-bound-closure.rs:48:39
|
LL | with_signature(cell, t, |cell, t| require(cell, t));
| ^^^^^^^
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
--> $DIR/projection-one-region-trait-bound-closure.rs:59:39
|
LL | with_signature(cell, t, |cell, t| require(cell, t));
| ^^^^^^^
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
--> $DIR/projection-one-region-trait-bound-closure.rs:80:39
|
LL | with_signature(cell, t, |cell, t| require(cell, t));
// outlive `'static`. In this case, we don't get any errors, and in fact
// we don't even propagate constraints from the closures to the callers.
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
// compile-pass
#![allow(warnings)]
// the trait bound, and hence we propagate it to the caller as a type
// test.
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
#![allow(warnings)]
#![feature(dyn_trait)]
T: Anything<'b, 'c>,
{
with_signature(cell, t, |cell, t| require(cell, t));
- //~^ WARNING not reporting region error due to -Znll
+ //~^ WARNING not reporting region error due to nll
//~| ERROR associated type `<T as Anything<'_#5r, '_#6r>>::AssocType` may not live long enough
}
'a: 'a,
{
with_signature(cell, t, |cell, t| require(cell, t));
- //~^ WARNING not reporting region error due to -Znll
+ //~^ WARNING not reporting region error due to nll
//~| ERROR associated type `<T as Anything<'_#6r, '_#7r>>::AssocType` may not live long enough
}
// can do better here with a more involved verification step.
with_signature(cell, t, |cell, t| require(cell, t));
- //~^ WARNING not reporting region error due to -Znll
+ //~^ WARNING not reporting region error due to nll
//~| ERROR associated type `<T as Anything<'_#6r, '_#7r>>::AssocType` may not live long enough
}
T: Anything<'b, 'b>,
{
with_signature(cell, t, |cell, t| require(cell, t));
- //~^ WARNING not reporting region error due to -Znll
+ //~^ WARNING not reporting region error due to nll
//~| ERROR does not outlive free region
}
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
--> $DIR/projection-two-region-trait-bound-closure.rs:49:39
|
LL | with_signature(cell, t, |cell, t| require(cell, t));
| ^^^^^^^
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
--> $DIR/projection-two-region-trait-bound-closure.rs:60:39
|
LL | with_signature(cell, t, |cell, t| require(cell, t));
| ^^^^^^^
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
--> $DIR/projection-two-region-trait-bound-closure.rs:81:39
|
LL | with_signature(cell, t, |cell, t| require(cell, t));
| ^^^^^^^
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
--> $DIR/projection-two-region-trait-bound-closure.rs:109:39
|
LL | with_signature(cell, t, |cell, t| require(cell, t));
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
#![allow(warnings)]
#![feature(dyn_trait)]
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
--> $DIR/ty-param-closure-approximate-lower-bound.rs:35:31
|
LL | twice(cell, value, |a, b| invoke(a, b));
| ^^^^^^^^^^^^
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
--> $DIR/ty-param-closure-approximate-lower-bound.rs:43:31
|
LL | twice(cell, value, |a, b| invoke(a, b));
| ^^^^^^
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
--> $DIR/ty-param-closure-approximate-lower-bound.rs:43:31
|
LL | twice(cell, value, |a, b| invoke(a, b));
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
#![allow(warnings)]
#![feature(dyn_trait)]
// `'a` (and subsequently reports an error).
with_signature(x, |y| y)
- //~^ WARNING not reporting region error due to -Znll
+ //~^ WARNING not reporting region error due to nll
//~| ERROR the parameter type `T` may not live long enough
}
T: 'b + Debug,
{
x
- //~^ WARNING not reporting region error due to -Znll
+ //~^ WARNING not reporting region error due to nll
//~| ERROR the parameter type `T` may not live long enough
}
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
--> $DIR/ty-param-closure-outlives-from-return-type.rs:37:27
|
LL | with_signature(x, |y| y)
| ^
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
--> $DIR/ty-param-closure-outlives-from-return-type.rs:53:5
|
LL | x
// `correct_region` for an explanation of how this test is setup; it's
// somewhat intricate.
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
#![allow(warnings)]
#![feature(dyn_trait)]
// function, there is no where clause *anywhere*, and hence we
// get an error (but reported by the closure creator).
require(&x, &y)
- //~^ WARNING not reporting region error due to -Znll
+ //~^ WARNING not reporting region error due to nll
})
}
//~^ ERROR the parameter type `T` may not live long enough
// See `correct_region`
require(&x, &y)
- //~^ WARNING not reporting region error due to -Znll
+ //~^ WARNING not reporting region error due to nll
})
}
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
--> $DIR/ty-param-closure-outlives-from-where-clause.rs:45:9
|
LL | require(&x, &y)
| ^^^^^^^
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
--> $DIR/ty-param-closure-outlives-from-where-clause.rs:79:9
|
LL | require(&x, &y)
LL | | //
LL | | // See `correct_region`, which explains the point of this
... |
-LL | | //~^ WARNING not reporting region error due to -Znll
+LL | | //~^ WARNING not reporting region error due to nll
LL | | })
| |_____^
|
LL | | //
LL | | // See `correct_region`, which explains the point of this
... |
-LL | | //~^ WARNING not reporting region error due to -Znll
+LL | | //~^ WARNING not reporting region error due to nll
LL | | })
| |_____^
|
LL | | //~^ ERROR the parameter type `T` may not live long enough
LL | | // See `correct_region`
LL | | require(&x, &y)
-LL | | //~^ WARNING not reporting region error due to -Znll
+LL | | //~^ WARNING not reporting region error due to nll
LL | | })
| |_____^
|
LL | | //~^ ERROR the parameter type `T` may not live long enough
LL | | // See `correct_region`
LL | | require(&x, &y)
-LL | | //~^ WARNING not reporting region error due to -Znll
+LL | | //~^ WARNING not reporting region error due to nll
LL | | })
| |_____^
|
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags:-Znll -Zborrowck=mir
+// compile-flags:-Zborrowck=mir
// Test that we assume that universal types like `T` outlive the
// function body.
// Error here, because T: 'a is not satisfied.
fn region_static<'a, T>(cell: Cell<&'a usize>, t: T) {
outlives(cell, t)
- //~^ WARNING not reporting region error due to -Znll
+ //~^ WARNING not reporting region error due to nll
//~| ERROR the parameter type `T` may not live long enough
}
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
--> $DIR/ty-param-fn-body.rs:30:5
|
LL | outlives(cell, t)
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags:-Znll -Zborrowck=mir
+// compile-flags:-Zborrowck=mir
#![allow(warnings)]
#![feature(dyn_trait)]
T: Debug,
{
x
- //~^ WARNING not reporting region error due to -Znll
+ //~^ WARNING not reporting region error due to nll
//~| the parameter type `T` may not live long enough
}
T: 'b + Debug,
{
x
- //~^ WARNING not reporting region error due to -Znll
+ //~^ WARNING not reporting region error due to nll
//~| the parameter type `T` may not live long enough
}
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
--> $DIR/ty-param-fn.rs:22:5
|
LL | x
| ^
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
--> $DIR/ty-param-fn.rs:38:5
|
LL | x
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
// compile-pass
// Test that we assume that universal types like `T` outlive the
}
impl ToNbt<Self> {}
-//~^ ERROR cyclic dependency detected
+//~^ ERROR cycle detected
fn main() {}
-error[E0391]: cyclic dependency detected
+error[E0391]: cycle detected when processing `<impl at $DIR/issue-23305.rs:15:1: 15:20>`
--> $DIR/issue-23305.rs:15:12
|
LL | impl ToNbt<Self> {}
- | ^^^^ cyclic reference
+ | ^^^^
|
-note: the cycle begins when processing `<impl at $DIR/issue-23305.rs:15:1: 15:20>`...
- --> $DIR/issue-23305.rs:15:1
- |
-LL | impl ToNbt<Self> {}
- | ^^^^^^^^^^^^^^^^
- = note: ...which then again requires processing `<impl at $DIR/issue-23305.rs:15:1: 15:20>`, completing the cycle.
+ = note: ...which again requires processing `<impl at $DIR/issue-23305.rs:15:1: 15:20>`, completing the cycle
error: aborting due to previous error
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// ignore-arm
+// ignore-aarch64
+// ignore-wasm
+// ignore-emscripten
+// gate-test-sse4a_target_feature
+// gate-test-powerpc_target_feature
+// gate-test-avx512_target_feature
+// gate-test-tbm_target_feature
+// gate-test-arm_target_feature
+// gate-test-aarch64_target_feature
+// gate-test-hexagon_target_feature
+// gate-test-mips_target_feature
+// gate-test-mmx_target_feature
+// min-llvm-version 6.0
+
+#[target_feature(enable = "avx512bw")]
+//~^ ERROR: currently unstable
+unsafe fn foo() {
+}
+
+fn main() {}
--- /dev/null
+error[E0658]: the target feature `avx512bw` is currently unstable
+ --> $DIR/target-feature-gate.rs:26:18
+ |
+LL | #[target_feature(enable = "avx512bw")]
+ | ^^^^^^^^^^^^^^^^^^^
+ |
+ = help: add #![feature(avx512_target_feature)] to the crate attributes to enable
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0658`.
#![feature(target_feature)]
#[target_feature = "+sse2"]
-//~^ WARN: deprecated
+//~^ ERROR: must be of the form
#[target_feature(enable = "foo")]
//~^ ERROR: not valid for this target
#[target_feature(bar)]
-warning: #[target_feature = ".."] is deprecated and will eventually be removed, use #[target_feature(enable = "..")] instead
+error: #[target_feature] attribute must be of the form #[target_feature(..)]
--> $DIR/target-feature-wrong.rs:21:1
|
LL | #[target_feature = "+sse2"]
LL | #[inline(always)]
| ^^^^^^^^^^^^^^^^^
-error: aborting due to 6 previous errors
+error: aborting due to 7 previous errors
),
};
+ let src_base = opt_path(matches, "src-base");
+ let run_ignored = matches.opt_present("ignored");
Config {
compile_lib_path: make_absolute(opt_path(matches, "compile-lib-path")),
run_lib_path: make_absolute(opt_path(matches, "run-lib-path")),
valgrind_path: matches.opt_str("valgrind-path"),
force_valgrind: matches.opt_present("force-valgrind"),
llvm_filecheck: matches.opt_str("llvm-filecheck").map(|s| PathBuf::from(&s)),
- src_base: opt_path(matches, "src-base"),
+ src_base,
build_base: opt_path(matches, "build-base"),
stage_id: matches.opt_str("stage-id").unwrap(),
mode: matches
.unwrap()
.parse()
.expect("invalid mode"),
- run_ignored: matches.opt_present("ignored"),
+ run_ignored,
filter: matches.free.first().cloned(),
filter_exact: matches.opt_present("exact"),
logfile: matches.opt_str("logfile").map(|s| PathBuf::from(&s)),
// want to actually assert warnings about all this code. Instead
// let's just ignore unused code warnings by defaults and tests
// can turn it back on if needed.
- rustc.args(&["-A", "unused"]);
+ if !self.config.src_base.ends_with("rustdoc-ui") {
+ rustc.args(&["-A", "unused"]);
+ }
}
_ => {}
}
}
fn make_compile_args(&self, input_file: &Path, output_file: TargetLocation) -> Command {
- let mut rustc = Command::new(&self.config.rustc_path);
+ let is_rustdoc = self.config.src_base.ends_with("rustdoc-ui");
+ let mut rustc = if !is_rustdoc {
+ Command::new(&self.config.rustc_path)
+ } else {
+ Command::new(&self.config.rustdoc_path.clone().expect("no rustdoc built yet"))
+ };
rustc.arg(input_file).arg("-L").arg(&self.config.build_base);
// Optionally prevent default --target if specified in test compile-flags.
rustc.args(&["--cfg", revision]);
}
- if let Some(ref incremental_dir) = self.props.incremental_dir {
- rustc.args(&[
- "-C",
- &format!("incremental={}", incremental_dir.display()),
- ]);
- rustc.args(&["-Z", "incremental-verify-ich"]);
- rustc.args(&["-Z", "incremental-queries"]);
- }
+ if !is_rustdoc {
+ if let Some(ref incremental_dir) = self.props.incremental_dir {
+ rustc.args(&[
+ "-C",
+ &format!("incremental={}", incremental_dir.display()),
+ ]);
+ rustc.args(&["-Z", "incremental-verify-ich"]);
+ rustc.args(&["-Z", "incremental-queries"]);
+ }
- if self.config.mode == CodegenUnits {
- rustc.args(&["-Z", "human_readable_cgu_names"]);
+ if self.config.mode == CodegenUnits {
+ rustc.args(&["-Z", "human_readable_cgu_names"]);
+ }
}
match self.config.mode {
}
}
-
- if self.config.target == "wasm32-unknown-unknown" {
- // rustc.arg("-g"); // get any backtrace at all on errors
- } else if !self.props.no_prefer_dynamic {
- rustc.args(&["-C", "prefer-dynamic"]);
+ if !is_rustdoc {
+ if self.config.target == "wasm32-unknown-unknown" {
+ // rustc.arg("-g"); // get any backtrace at all on errors
+ } else if !self.props.no_prefer_dynamic {
+ rustc.args(&["-C", "prefer-dynamic"]);
+ }
}
match output_file {
match self.config.compare_mode {
Some(CompareMode::Nll) => {
- rustc.args(&["-Znll", "-Zborrowck=mir", "-Ztwo-phase-borrows"]);
+ rustc.args(&["-Zborrowck=mir", "-Ztwo-phase-borrows"]);
},
None => {},
}
} else {
rustc.args(self.split_maybe_args(&self.config.target_rustcflags));
}
- if let Some(ref linker) = self.config.linker {
- rustc.arg(format!("-Clinker={}", linker));
+ if !is_rustdoc {
+ if let Some(ref linker) = self.config.linker {
+ rustc.arg(format!("-Clinker={}", linker));
+ }
}
rustc.args(&self.props.compile_flags);
.compile_flags
.iter()
.any(|s| s.contains("--error-format"));
-
let proc_res = self.compile_test();
self.check_if_test_should_compile(&proc_res);
// execQuery first parameter is built in getQuery (which takes in the search input).
// execQuery last parameter is built in buildIndex.
// buildIndex requires the hashmap from search-index.
- var functionsToLoad = ["levenshtein", "validateResult", "getQuery", "buildIndex", "execQuery"];
+ var functionsToLoad = ["levenshtein", "validateResult", "getQuery", "buildIndex", "execQuery",
+ "execSearch"];
finalJS += 'window = { "currentCrate": "std" };\n';
finalJS += loadThings(arraysToLoad, 'array', extractArrayVariable, mainJs);
'exports.QUERY = QUERY;exports.EXPECTED = EXPECTED;');
const expected = loadedFile.EXPECTED;
const query = loadedFile.QUERY;
- var results = loaded.execQuery(loaded.getQuery(query), index);
+ var results = loaded.execSearch(loaded.getQuery(query), index);
process.stdout.write('Checking "' + file + '" ... ');
var error_text = [];
for (var key in expected) {
-Subproject commit 1415a4dc23f28644cb197b6bb69c311245c216e2
+Subproject commit dd807e24656c91b4ad22d3cac146edd86315e633