"crypto-hash 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"curl 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)",
"docopt 0.8.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "env_logger 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "env_logger 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)",
"failure 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"filetime 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)",
"flate2 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
+[[package]]
+name = "chrono"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "num 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)",
+ "time 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "clap"
version = "2.29.0"
"regex 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
]
+[[package]]
+name = "env_logger"
+version = "0.5.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "atty 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "chrono 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "regex 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "termcolor 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "error-chain"
version = "0.8.1"
name = "tidy"
version = "0.1.0"
+[[package]]
+name = "time"
+version = "0.1.39"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "libc 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)",
+ "redox_syscall 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "toml"
version = "0.2.1"
"checksum cargo_metadata 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "20d6fb2b5574726329c85cdba0df0347fddfec3cf9c8b588f9931708280f5643"
"checksum cc 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "deaf9ec656256bb25b404c51ef50097207b9cbb29c933d31f92cae5a8a0ffee0"
"checksum cfg-if 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "d4c819a1287eb618df47cc647173c5c4c66ba19d888a6e50d605672aed3140de"
+"checksum chrono 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7c20ebe0b2b08b0aeddba49c609fe7957ba2e33449882cb186a180bc60682fa9"
"checksum clap 2.29.0 (registry+https://github.com/rust-lang/crates.io-index)" = "110d43e343eb29f4f51c1db31beb879d546db27998577e5715270a54bcf41d3f"
"checksum cmake 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)" = "56d741ea7a69e577f6d06b36b7dff4738f680593dc27a701ffa8506b73ce28bb"
"checksum coco 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "c06169f5beb7e31c7c67ebf5540b8b472d23e3eade3b2ec7d1f5b504a85f91bd"
"checksum enum_primitive 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "be4551092f4d519593039259a9ed8daedf0da12e5109c5280338073eaeb81180"
"checksum env_logger 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "15abd780e45b3ea4f76b4e9a26ff4843258dd8a3eed2775a0e7368c2e7936c2f"
"checksum env_logger 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3ddf21e73e016298f5cb37d6ef8e8da8e39f91f9ec8b0df44b7deb16a9f8cd5b"
+"checksum env_logger 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "f15f0b172cb4f52ed5dbf47f774a387cd2315d1bf7894ab5af9b083ae27efa5a"
"checksum error-chain 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ff511d5dc435d703f4971bc399647c9bc38e20cb41452e3b9feb4765419ed3f3"
"checksum error-chain 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "6930e04918388a9a2e41d518c25cf679ccafe26733fb4127dbf21993f2575d46"
"checksum failure 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "934799b6c1de475a012a02dab0ace1ace43789ee4b99bcfbf1a2e3e8ced5de82"
"checksum thread-id 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a9539db560102d1cef46b8b78ce737ff0bb64e7e18d35b2a5688f7d097d0ff03"
"checksum thread_local 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)" = "8576dbbfcaef9641452d5cf0df9b0e7eeab7694956dd33bb61515fb8f18cfdd5"
"checksum thread_local 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "279ef31c19ededf577bfd12dfae728040a21f635b06a24cd670ff510edd38963"
+"checksum time 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)" = "a15375f1df02096fb3317256ce2cee6a1f42fc84ea5ad5fc8c421cfe40c73098"
"checksum toml 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "736b60249cb25337bc196faa43ee12c705e426f3d55c214d73a4e7be06f92cb4"
"checksum toml 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)" = "a7540f4ffc193e0d3c94121edb19b055670d369f77d5804db11ae053a45b6e7e"
"checksum unicode-bidi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "49f2bd0c6468a8230e1db229cff8029217cf623c767ea5d60bfbd42729ea54d5"
cmd.arg(format!("-Clinker={}", target_linker));
}
- // Pass down incremental directory, if any.
- if let Ok(dir) = env::var("RUSTC_INCREMENTAL") {
- cmd.arg(format!("-Zincremental={}", dir));
- }
-
let crate_name = args.windows(2)
.find(|a| &*a[0] == "--crate-name")
.unwrap();
env["LIBRARY_PATH"] = os.path.join(self.bin_root(), "lib") + \
(os.pathsep + env["LIBRARY_PATH"]) \
if "LIBRARY_PATH" in env else ""
+ env["RUSTFLAGS"] = "-Cdebuginfo=2"
env["PATH"] = os.path.join(self.bin_root(), "bin") + \
os.pathsep + env["PATH"]
if not os.path.isfile(self.cargo()):
use {Build, Mode};
use cache::{INTERNER, Interned, Cache};
use check;
+use test;
use flags::Subcommand;
use doc;
use tool;
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub enum Kind {
Build,
+ Check,
Test,
Bench,
Dist,
tool::Compiletest, tool::RemoteTestServer, tool::RemoteTestClient,
tool::RustInstaller, tool::Cargo, tool::Rls, tool::Rustdoc, tool::Clippy,
native::Llvm, tool::Rustfmt, tool::Miri),
- Kind::Test => describe!(check::Tidy, check::Bootstrap, check::DefaultCompiletest,
- check::HostCompiletest, check::Crate, check::CrateLibrustc, check::Rustdoc,
- check::Linkcheck, check::Cargotest, check::Cargo, check::Rls, check::Docs,
- check::ErrorIndex, check::Distcheck, check::Rustfmt, check::Miri, check::Clippy,
- check::RustdocJS),
-
- Kind::Bench => describe!(check::Crate, check::CrateLibrustc),
+ Kind::Check => describe!(check::Std, check::Test, check::Rustc),
+ Kind::Test => describe!(test::Tidy, test::Bootstrap, test::DefaultCompiletest,
+ test::HostCompiletest, test::Crate, test::CrateLibrustc, test::Rustdoc,
+ test::Linkcheck, test::Cargotest, test::Cargo, test::Rls, test::Docs,
+ test::ErrorIndex, test::Distcheck, test::Rustfmt, test::Miri, test::Clippy,
+ test::RustdocJS),
+ Kind::Bench => describe!(test::Crate, test::CrateLibrustc),
Kind::Doc => describe!(doc::UnstableBook, doc::UnstableBookGen, doc::TheBook,
doc::Standalone, doc::Std, doc::Test, doc::Rustc, doc::ErrorIndex, doc::Nomicon,
doc::Reference, doc::Rustdoc, doc::RustByExample, doc::CargoBook),
pub fn run(build: &Build) {
let (kind, paths) = match build.config.cmd {
Subcommand::Build { ref paths } => (Kind::Build, &paths[..]),
+ Subcommand::Check { ref paths } => (Kind::Check, &paths[..]),
Subcommand::Doc { ref paths } => (Kind::Doc, &paths[..]),
Subcommand::Test { ref paths, .. } => (Kind::Test, &paths[..]),
Subcommand::Bench { ref paths, .. } => (Kind::Bench, &paths[..]),
cargo.env("RUSTC_CODEGEN_UNITS", n.to_string());
}
+
if let Some(host_linker) = self.build.linker(compiler.host) {
cargo.env("RUSTC_HOST_LINKER", host_linker);
}
if let Some(target_linker) = self.build.linker(target) {
cargo.env("RUSTC_TARGET_LINKER", target_linker);
}
- if cmd != "build" {
+ if cmd != "build" && cmd != "check" {
cargo.env("RUSTDOC_LIBDIR", self.rustc_libdir(self.compiler(2, self.build.build)));
}
// not guaranteeing correctness across builds if the compiler
// is changing under your feet.`
if self.config.incremental && compiler.stage == 0 {
- let incr_dir = self.incremental_dir(compiler);
- cargo.env("RUSTC_INCREMENTAL", incr_dir);
+ cargo.env("CARGO_INCREMENTAL", "1");
}
if let Some(ref on_fail) = self.config.on_fail {
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-//! Implementation of the test-related targets of the build system.
-//!
-//! This file implements the various regression test suites that we execute on
-//! our CI.
+//! Implementation of compiling the compiler and standard library, in "check" mode.
-use std::collections::HashSet;
-use std::env;
-use std::ffi::OsString;
-use std::iter;
-use std::fmt;
-use std::fs::{self, File};
-use std::path::{PathBuf, Path};
-use std::process::Command;
-use std::io::Read;
-
-use build_helper::{self, output};
-
-use builder::{Kind, RunConfig, ShouldRun, Builder, Compiler, Step};
-use cache::{INTERNER, Interned};
-use compile;
-use dist;
-use native;
-use tool::{self, Tool};
-use util::{self, dylib_path, dylib_path_var};
-use {Build, Mode};
-use toolstate::ToolState;
-
-const ADB_TEST_DIR: &str = "/data/tmp/work";
-
-/// The two modes of the test runner; tests or benchmarks.
-#[derive(Debug, PartialEq, Eq, Hash, Copy, Clone)]
-pub enum TestKind {
- /// Run `cargo test`
- Test,
- /// Run `cargo bench`
- Bench,
-}
-
-impl TestKind {
- // Return the cargo subcommand for this test kind
- fn subcommand(self) -> &'static str {
- match self {
- TestKind::Test => "test",
- TestKind::Bench => "bench",
- }
- }
-}
-
-impl fmt::Display for TestKind {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- f.write_str(match *self {
- TestKind::Test => "Testing",
- TestKind::Bench => "Benchmarking",
- })
- }
-}
-
-fn try_run(build: &Build, cmd: &mut Command) -> bool {
- if !build.fail_fast {
- if !build.try_run(cmd) {
- let mut failures = build.delayed_failures.borrow_mut();
- failures.push(format!("{:?}", cmd));
- return false;
- }
- } else {
- build.run(cmd);
- }
- true
-}
-
-fn try_run_quiet(build: &Build, cmd: &mut Command) {
- if !build.fail_fast {
- if !build.try_run_quiet(cmd) {
- let mut failures = build.delayed_failures.borrow_mut();
- failures.push(format!("{:?}", cmd));
- }
- } else {
- build.run_quiet(cmd);
- }
-}
-
-#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
-pub struct Linkcheck {
- host: Interned<String>,
-}
-
-impl Step for Linkcheck {
- type Output = ();
- const ONLY_HOSTS: bool = true;
- const DEFAULT: bool = true;
-
- /// Runs the `linkchecker` tool as compiled in `stage` by the `host` compiler.
- ///
- /// This tool in `src/tools` will verify the validity of all our links in the
- /// documentation to ensure we don't have a bunch of dead ones.
- fn run(self, builder: &Builder) {
- let build = builder.build;
- let host = self.host;
-
- println!("Linkcheck ({})", host);
-
- builder.default_doc(None);
-
- let _time = util::timeit();
- try_run(build, builder.tool_cmd(Tool::Linkchecker)
- .arg(build.out.join(host).join("doc")));
- }
-
- fn should_run(run: ShouldRun) -> ShouldRun {
- let builder = run.builder;
- run.path("src/tools/linkchecker").default_condition(builder.build.config.docs)
- }
-
- fn make_run(run: RunConfig) {
- run.builder.ensure(Linkcheck { host: run.target });
- }
-}
+use compile::{run_cargo, std_cargo, test_cargo, rustc_cargo, add_to_sysroot};
+use builder::{RunConfig, Builder, ShouldRun, Step};
+use {Build, Compiler, Mode};
+use cache::Interned;
+use std::path::PathBuf;
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
-pub struct Cargotest {
- stage: u32,
- host: Interned<String>,
-}
-
-impl Step for Cargotest {
- type Output = ();
- const ONLY_HOSTS: bool = true;
-
- fn should_run(run: ShouldRun) -> ShouldRun {
- run.path("src/tools/cargotest")
- }
-
- fn make_run(run: RunConfig) {
- run.builder.ensure(Cargotest {
- stage: run.builder.top_stage,
- host: run.target,
- });
- }
-
- /// Runs the `cargotest` tool as compiled in `stage` by the `host` compiler.
- ///
- /// This tool in `src/tools` will check out a few Rust projects and run `cargo
- /// test` to ensure that we don't regress the test suites there.
- fn run(self, builder: &Builder) {
- let build = builder.build;
- let compiler = builder.compiler(self.stage, self.host);
- builder.ensure(compile::Rustc { compiler, target: compiler.host });
-
- // Note that this is a short, cryptic, and not scoped directory name. This
- // is currently to minimize the length of path on Windows where we otherwise
- // quickly run into path name limit constraints.
- let out_dir = build.out.join("ct");
- t!(fs::create_dir_all(&out_dir));
-
- let _time = util::timeit();
- let mut cmd = builder.tool_cmd(Tool::CargoTest);
- try_run(build, cmd.arg(&build.initial_cargo)
- .arg(&out_dir)
- .env("RUSTC", builder.rustc(compiler))
- .env("RUSTDOC", builder.rustdoc(compiler.host)));
- }
-}
-
-#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
-pub struct Cargo {
- stage: u32,
- host: Interned<String>,
-}
-
-impl Step for Cargo {
- type Output = ();
- const ONLY_HOSTS: bool = true;
-
- fn should_run(run: ShouldRun) -> ShouldRun {
- run.path("src/tools/cargo")
- }
-
- fn make_run(run: RunConfig) {
- run.builder.ensure(Cargo {
- stage: run.builder.top_stage,
- host: run.target,
- });
- }
-
- /// Runs `cargo test` for `cargo` packaged with Rust.
- fn run(self, builder: &Builder) {
- let build = builder.build;
- let compiler = builder.compiler(self.stage, self.host);
-
- builder.ensure(tool::Cargo { compiler, target: self.host });
- let mut cargo = builder.cargo(compiler, Mode::Tool, self.host, "test");
- cargo.arg("--manifest-path").arg(build.src.join("src/tools/cargo/Cargo.toml"));
- if !build.fail_fast {
- cargo.arg("--no-fail-fast");
- }
-
- // Don't build tests dynamically, just a pain to work with
- cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");
-
- // Don't run cross-compile tests, we may not have cross-compiled libstd libs
- // available.
- cargo.env("CFG_DISABLE_CROSS_TESTS", "1");
-
- try_run(build, cargo.env("PATH", &path_for_cargo(builder, compiler)));
- }
-}
-
-#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
-pub struct Rls {
- stage: u32,
- host: Interned<String>,
-}
-
-impl Step for Rls {
- type Output = ();
- const ONLY_HOSTS: bool = true;
-
- fn should_run(run: ShouldRun) -> ShouldRun {
- run.path("src/tools/rls")
- }
-
- fn make_run(run: RunConfig) {
- run.builder.ensure(Rls {
- stage: run.builder.top_stage,
- host: run.target,
- });
- }
-
- /// Runs `cargo test` for the rls.
- fn run(self, builder: &Builder) {
- let build = builder.build;
- let stage = self.stage;
- let host = self.host;
- let compiler = builder.compiler(stage, host);
-
- builder.ensure(tool::Rls { compiler, target: self.host });
- let mut cargo = tool::prepare_tool_cargo(builder,
- compiler,
- host,
- "test",
- "src/tools/rls");
-
- // Don't build tests dynamically, just a pain to work with
- cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");
-
- builder.add_rustc_lib_path(compiler, &mut cargo);
-
- if try_run(build, &mut cargo) {
- build.save_toolstate("rls", ToolState::TestPass);
- }
- }
-}
-
-#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
-pub struct Rustfmt {
- stage: u32,
- host: Interned<String>,
-}
-
-impl Step for Rustfmt {
- type Output = ();
- const ONLY_HOSTS: bool = true;
-
- fn should_run(run: ShouldRun) -> ShouldRun {
- run.path("src/tools/rustfmt")
- }
-
- fn make_run(run: RunConfig) {
- run.builder.ensure(Rustfmt {
- stage: run.builder.top_stage,
- host: run.target,
- });
- }
-
- /// Runs `cargo test` for rustfmt.
- fn run(self, builder: &Builder) {
- let build = builder.build;
- let stage = self.stage;
- let host = self.host;
- let compiler = builder.compiler(stage, host);
-
- builder.ensure(tool::Rustfmt { compiler, target: self.host });
- let mut cargo = tool::prepare_tool_cargo(builder,
- compiler,
- host,
- "test",
- "src/tools/rustfmt");
-
- // Don't build tests dynamically, just a pain to work with
- cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");
-
- builder.add_rustc_lib_path(compiler, &mut cargo);
-
- if try_run(build, &mut cargo) {
- build.save_toolstate("rustfmt", ToolState::TestPass);
- }
- }
-}
-
-#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
-pub struct Miri {
- stage: u32,
- host: Interned<String>,
-}
-
-impl Step for Miri {
- type Output = ();
- const ONLY_HOSTS: bool = true;
- const DEFAULT: bool = true;
-
- fn should_run(run: ShouldRun) -> ShouldRun {
- let test_miri = run.builder.build.config.test_miri;
- run.path("src/tools/miri").default_condition(test_miri)
- }
-
- fn make_run(run: RunConfig) {
- run.builder.ensure(Miri {
- stage: run.builder.top_stage,
- host: run.target,
- });
- }
-
- /// Runs `cargo test` for miri.
- fn run(self, builder: &Builder) {
- let build = builder.build;
- let stage = self.stage;
- let host = self.host;
- let compiler = builder.compiler(stage, host);
-
- if let Some(miri) = builder.ensure(tool::Miri { compiler, target: self.host }) {
- let mut cargo = builder.cargo(compiler, Mode::Tool, host, "test");
- cargo.arg("--manifest-path").arg(build.src.join("src/tools/miri/Cargo.toml"));
-
- // Don't build tests dynamically, just a pain to work with
- cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");
- // miri tests need to know about the stage sysroot
- cargo.env("MIRI_SYSROOT", builder.sysroot(compiler));
- cargo.env("RUSTC_TEST_SUITE", builder.rustc(compiler));
- cargo.env("RUSTC_LIB_PATH", builder.rustc_libdir(compiler));
- cargo.env("MIRI_PATH", miri);
-
- builder.add_rustc_lib_path(compiler, &mut cargo);
-
- if try_run(build, &mut cargo) {
- build.save_toolstate("miri", ToolState::TestPass);
- }
- } else {
- eprintln!("failed to test miri: could not build");
- }
- }
-}
-
-#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
-pub struct Clippy {
- stage: u32,
- host: Interned<String>,
-}
-
-impl Step for Clippy {
- type Output = ();
- const ONLY_HOSTS: bool = true;
- const DEFAULT: bool = false;
-
- fn should_run(run: ShouldRun) -> ShouldRun {
- run.path("src/tools/clippy")
- }
-
- fn make_run(run: RunConfig) {
- run.builder.ensure(Clippy {
- stage: run.builder.top_stage,
- host: run.target,
- });
- }
-
- /// Runs `cargo test` for clippy.
- fn run(self, builder: &Builder) {
- let build = builder.build;
- let stage = self.stage;
- let host = self.host;
- let compiler = builder.compiler(stage, host);
-
- if let Some(clippy) = builder.ensure(tool::Clippy { compiler, target: self.host }) {
- let mut cargo = builder.cargo(compiler, Mode::Tool, host, "test");
- cargo.arg("--manifest-path").arg(build.src.join("src/tools/clippy/Cargo.toml"));
-
- // Don't build tests dynamically, just a pain to work with
- cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");
- // clippy tests need to know about the stage sysroot
- cargo.env("SYSROOT", builder.sysroot(compiler));
- cargo.env("RUSTC_TEST_SUITE", builder.rustc(compiler));
- cargo.env("RUSTC_LIB_PATH", builder.rustc_libdir(compiler));
- let host_libs = builder.stage_out(compiler, Mode::Tool).join(builder.cargo_dir());
- cargo.env("HOST_LIBS", host_libs);
- // clippy tests need to find the driver
- cargo.env("CLIPPY_DRIVER_PATH", clippy);
-
- builder.add_rustc_lib_path(compiler, &mut cargo);
-
- if try_run(build, &mut cargo) {
- build.save_toolstate("clippy-driver", ToolState::TestPass);
- }
- } else {
- eprintln!("failed to test clippy: could not build");
- }
- }
-}
-
-fn path_for_cargo(builder: &Builder, compiler: Compiler) -> OsString {
- // Configure PATH to find the right rustc. NB. we have to use PATH
- // and not RUSTC because the Cargo test suite has tests that will
- // fail if rustc is not spelled `rustc`.
- let path = builder.sysroot(compiler).join("bin");
- let old_path = env::var_os("PATH").unwrap_or_default();
- env::join_paths(iter::once(path).chain(env::split_paths(&old_path))).expect("")
-}
-
-#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
-pub struct RustdocJS {
- pub host: Interned<String>,
+pub struct Std {
pub target: Interned<String>,
}
-impl Step for RustdocJS {
+impl Step for Std {
type Output = ();
const DEFAULT: bool = true;
- const ONLY_HOSTS: bool = true;
fn should_run(run: ShouldRun) -> ShouldRun {
- run.path("src/test/rustdoc-js")
+ run.path("src/libstd").krate("std")
}
fn make_run(run: RunConfig) {
- run.builder.ensure(RustdocJS {
- host: run.host,
+ run.builder.ensure(Std {
target: run.target,
});
}
- fn run(self, builder: &Builder) {
- if let Some(ref nodejs) = builder.config.nodejs {
- let mut command = Command::new(nodejs);
- command.args(&["src/tools/rustdoc-js/tester.js", &*self.host]);
- builder.ensure(::doc::Std {
- target: self.target,
- stage: builder.top_stage,
- });
- builder.run(&mut command);
- } else {
- println!("No nodejs found, skipping \"src/test/rustdoc-js\" tests");
- }
- }
-}
-
-#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
-pub struct Tidy {
- host: Interned<String>,
-}
-
-impl Step for Tidy {
- type Output = ();
- const DEFAULT: bool = true;
- const ONLY_HOSTS: bool = true;
- const ONLY_BUILD: bool = true;
-
- /// Runs the `tidy` tool as compiled in `stage` by the `host` compiler.
- ///
- /// This tool in `src/tools` checks up on various bits and pieces of style and
- /// otherwise just implements a few lint-like checks that are specific to the
- /// compiler itself.
- fn run(self, builder: &Builder) {
- let build = builder.build;
- let host = self.host;
-
- let _folder = build.fold_output(|| "tidy");
- println!("tidy check ({})", host);
- let mut cmd = builder.tool_cmd(Tool::Tidy);
- cmd.arg(build.src.join("src"));
- if !build.config.vendor {
- cmd.arg("--no-vendor");
- }
- if build.config.quiet_tests {
- cmd.arg("--quiet");
- }
- try_run(build, &mut cmd);
- }
-
- fn should_run(run: ShouldRun) -> ShouldRun {
- run.path("src/tools/tidy")
- }
-
- fn make_run(run: RunConfig) {
- run.builder.ensure(Tidy {
- host: run.builder.build.build,
- });
- }
-}
-
-fn testdir(build: &Build, host: Interned<String>) -> PathBuf {
- build.out.join(host).join("test")
-}
-
-#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
-struct Test {
- path: &'static str,
- mode: &'static str,
- suite: &'static str,
-}
-
-static DEFAULT_COMPILETESTS: &[Test] = &[
- Test { path: "src/test/ui", mode: "ui", suite: "ui" },
- Test { path: "src/test/run-pass", mode: "run-pass", suite: "run-pass" },
- Test { path: "src/test/compile-fail", mode: "compile-fail", suite: "compile-fail" },
- Test { path: "src/test/parse-fail", mode: "parse-fail", suite: "parse-fail" },
- Test { path: "src/test/run-fail", mode: "run-fail", suite: "run-fail" },
- Test {
- path: "src/test/run-pass-valgrind",
- mode: "run-pass-valgrind",
- suite: "run-pass-valgrind"
- },
- Test { path: "src/test/mir-opt", mode: "mir-opt", suite: "mir-opt" },
- Test { path: "src/test/codegen", mode: "codegen", suite: "codegen" },
- Test { path: "src/test/codegen-units", mode: "codegen-units", suite: "codegen-units" },
- Test { path: "src/test/incremental", mode: "incremental", suite: "incremental" },
-
- // What this runs varies depending on the native platform being apple
- Test { path: "src/test/debuginfo", mode: "debuginfo-XXX", suite: "debuginfo" },
-];
-
-#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
-pub struct DefaultCompiletest {
- compiler: Compiler,
- target: Interned<String>,
- mode: &'static str,
- suite: &'static str,
-}
-
-impl Step for DefaultCompiletest {
- type Output = ();
- const DEFAULT: bool = true;
-
- fn should_run(mut run: ShouldRun) -> ShouldRun {
- for test in DEFAULT_COMPILETESTS {
- run = run.path(test.path);
- }
- run
- }
-
- fn make_run(run: RunConfig) {
- let compiler = run.builder.compiler(run.builder.top_stage, run.host);
-
- let test = run.path.map(|path| {
- DEFAULT_COMPILETESTS.iter().find(|&&test| {
- path.ends_with(test.path)
- }).unwrap_or_else(|| {
- panic!("make_run in compile test to receive test path, received {:?}", path);
- })
- });
-
- if let Some(test) = test {
- run.builder.ensure(DefaultCompiletest {
- compiler,
- target: run.target,
- mode: test.mode,
- suite: test.suite,
- });
- } else {
- for test in DEFAULT_COMPILETESTS {
- run.builder.ensure(DefaultCompiletest {
- compiler,
- target: run.target,
- mode: test.mode,
- suite: test.suite
- });
- }
- }
- }
-
- fn run(self, builder: &Builder) {
- builder.ensure(Compiletest {
- compiler: self.compiler,
- target: self.target,
- mode: self.mode,
- suite: self.suite,
- })
- }
-}
-
-// Also default, but host-only.
-static HOST_COMPILETESTS: &[Test] = &[
- Test { path: "src/test/ui-fulldeps", mode: "ui", suite: "ui-fulldeps" },
- Test { path: "src/test/run-pass-fulldeps", mode: "run-pass", suite: "run-pass-fulldeps" },
- Test { path: "src/test/run-fail-fulldeps", mode: "run-fail", suite: "run-fail-fulldeps" },
- Test {
- path: "src/test/compile-fail-fulldeps",
- mode: "compile-fail",
- suite: "compile-fail-fulldeps",
- },
- Test {
- path: "src/test/incremental-fulldeps",
- mode: "incremental",
- suite: "incremental-fulldeps",
- },
- Test { path: "src/test/run-make", mode: "run-make", suite: "run-make" },
- Test { path: "src/test/rustdoc", mode: "rustdoc", suite: "rustdoc" },
-
- Test { path: "src/test/pretty", mode: "pretty", suite: "pretty" },
- Test { path: "src/test/run-pass/pretty", mode: "pretty", suite: "run-pass" },
- Test { path: "src/test/run-fail/pretty", mode: "pretty", suite: "run-fail" },
- Test { path: "src/test/run-pass-valgrind/pretty", mode: "pretty", suite: "run-pass-valgrind" },
- Test { path: "src/test/run-pass-fulldeps/pretty", mode: "pretty", suite: "run-pass-fulldeps" },
- Test { path: "src/test/run-fail-fulldeps/pretty", mode: "pretty", suite: "run-fail-fulldeps" },
-];
-
-#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
-pub struct HostCompiletest {
- compiler: Compiler,
- target: Interned<String>,
- mode: &'static str,
- suite: &'static str,
-}
-
-impl Step for HostCompiletest {
- type Output = ();
- const DEFAULT: bool = true;
- const ONLY_HOSTS: bool = true;
-
- fn should_run(mut run: ShouldRun) -> ShouldRun {
- for test in HOST_COMPILETESTS {
- run = run.path(test.path);
- }
- run
- }
-
- fn make_run(run: RunConfig) {
- let compiler = run.builder.compiler(run.builder.top_stage, run.host);
-
- let test = run.path.map(|path| {
- HOST_COMPILETESTS.iter().find(|&&test| {
- path.ends_with(test.path)
- }).unwrap_or_else(|| {
- panic!("make_run in compile test to receive test path, received {:?}", path);
- })
- });
-
- if let Some(test) = test {
- run.builder.ensure(HostCompiletest {
- compiler,
- target: run.target,
- mode: test.mode,
- suite: test.suite,
- });
- } else {
- for test in HOST_COMPILETESTS {
- if test.mode == "pretty" {
- continue;
- }
- run.builder.ensure(HostCompiletest {
- compiler,
- target: run.target,
- mode: test.mode,
- suite: test.suite
- });
- }
- }
- }
-
- fn run(self, builder: &Builder) {
- builder.ensure(Compiletest {
- compiler: self.compiler,
- target: self.target,
- mode: self.mode,
- suite: self.suite,
- })
- }
-}
-
-#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
-struct Compiletest {
- compiler: Compiler,
- target: Interned<String>,
- mode: &'static str,
- suite: &'static str,
-}
-
-impl Step for Compiletest {
- type Output = ();
-
- fn should_run(run: ShouldRun) -> ShouldRun {
- run.never()
- }
-
- /// Executes the `compiletest` tool to run a suite of tests.
- ///
- /// Compiles all tests with `compiler` for `target` with the specified
- /// compiletest `mode` and `suite` arguments. For example `mode` can be
- /// "run-pass" or `suite` can be something like `debuginfo`.
fn run(self, builder: &Builder) {
let build = builder.build;
- let compiler = self.compiler;
let target = self.target;
- let mode = self.mode;
- let suite = self.suite;
-
- // Skip codegen tests if they aren't enabled in configuration.
- if !build.config.codegen_tests && suite == "codegen" {
- return;
- }
-
- if suite == "debuginfo" {
- // Skip debuginfo tests on MSVC
- if build.build.contains("msvc") {
- return;
- }
-
- if mode == "debuginfo-XXX" {
- return if build.build.contains("apple") {
- builder.ensure(Compiletest {
- mode: "debuginfo-lldb",
- ..self
- });
- } else {
- builder.ensure(Compiletest {
- mode: "debuginfo-gdb",
- ..self
- });
- };
- }
-
- builder.ensure(dist::DebuggerScripts {
- sysroot: builder.sysroot(compiler),
- host: target
- });
- }
-
- if suite.ends_with("fulldeps") ||
- // FIXME: Does pretty need librustc compiled? Note that there are
- // fulldeps test suites with mode = pretty as well.
- mode == "pretty" ||
- mode == "rustdoc" ||
- mode == "run-make" {
- builder.ensure(compile::Rustc { compiler, target });
- }
-
- builder.ensure(compile::Test { compiler, target });
- builder.ensure(native::TestHelpers { target });
- builder.ensure(RemoteCopyLibs { compiler, target });
-
- let _folder = build.fold_output(|| format!("test_{}", suite));
- println!("Check compiletest suite={} mode={} ({} -> {})",
- suite, mode, &compiler.host, target);
- let mut cmd = builder.tool_cmd(Tool::Compiletest);
-
- // compiletest currently has... a lot of arguments, so let's just pass all
- // of them!
-
- cmd.arg("--compile-lib-path").arg(builder.rustc_libdir(compiler));
- cmd.arg("--run-lib-path").arg(builder.sysroot_libdir(compiler, target));
- cmd.arg("--rustc-path").arg(builder.rustc(compiler));
-
- // Avoid depending on rustdoc when we don't need it.
- if mode == "rustdoc" || mode == "run-make" {
- cmd.arg("--rustdoc-path").arg(builder.rustdoc(compiler.host));
- }
-
- cmd.arg("--src-base").arg(build.src.join("src/test").join(suite));
- cmd.arg("--build-base").arg(testdir(build, compiler.host).join(suite));
- cmd.arg("--stage-id").arg(format!("stage{}-{}", compiler.stage, target));
- cmd.arg("--mode").arg(mode);
- cmd.arg("--target").arg(target);
- cmd.arg("--host").arg(&*compiler.host);
- cmd.arg("--llvm-filecheck").arg(build.llvm_filecheck(build.build));
-
- if let Some(ref nodejs) = build.config.nodejs {
- cmd.arg("--nodejs").arg(nodejs);
- }
-
- let mut flags = vec!["-Crpath".to_string()];
- if build.config.rust_optimize_tests {
- flags.push("-O".to_string());
- }
- if build.config.rust_debuginfo_tests {
- flags.push("-g".to_string());
- }
- flags.push("-Zmiri -Zunstable-options".to_string());
- flags.push(build.config.cmd.rustc_args().join(" "));
-
- if let Some(linker) = build.linker(target) {
- cmd.arg("--linker").arg(linker);
- }
-
- let hostflags = flags.clone();
- cmd.arg("--host-rustcflags").arg(hostflags.join(" "));
-
- let mut targetflags = flags.clone();
- targetflags.push(format!("-Lnative={}",
- build.test_helpers_out(target).display()));
- cmd.arg("--target-rustcflags").arg(targetflags.join(" "));
-
- cmd.arg("--docck-python").arg(build.python());
-
- if build.build.ends_with("apple-darwin") {
- // Force /usr/bin/python on macOS for LLDB tests because we're loading the
- // LLDB plugin's compiled module which only works with the system python
- // (namely not Homebrew-installed python)
- cmd.arg("--lldb-python").arg("/usr/bin/python");
- } else {
- cmd.arg("--lldb-python").arg(build.python());
- }
-
- if let Some(ref gdb) = build.config.gdb {
- cmd.arg("--gdb").arg(gdb);
- }
- if let Some(ref vers) = build.lldb_version {
- cmd.arg("--lldb-version").arg(vers);
- }
- if let Some(ref dir) = build.lldb_python_dir {
- cmd.arg("--lldb-python-dir").arg(dir);
- }
-
- cmd.args(&build.config.cmd.test_args());
-
- if build.is_verbose() {
- cmd.arg("--verbose");
- }
-
- if build.config.quiet_tests {
- cmd.arg("--quiet");
- }
-
- if build.config.llvm_enabled {
- let llvm_config = build.llvm_config(target);
- let llvm_version = output(Command::new(&llvm_config).arg("--version"));
- cmd.arg("--llvm-version").arg(llvm_version);
- if !build.is_rust_llvm(target) {
- cmd.arg("--system-llvm");
- }
-
- // Only pass correct values for these flags for the `run-make` suite as it
- // requires that a C++ compiler was configured which isn't always the case.
- if suite == "run-make" {
- let llvm_components = output(Command::new(&llvm_config).arg("--components"));
- let llvm_cxxflags = output(Command::new(&llvm_config).arg("--cxxflags"));
- cmd.arg("--cc").arg(build.cc(target))
- .arg("--cxx").arg(build.cxx(target).unwrap())
- .arg("--cflags").arg(build.cflags(target).join(" "))
- .arg("--llvm-components").arg(llvm_components.trim())
- .arg("--llvm-cxxflags").arg(llvm_cxxflags.trim());
- if let Some(ar) = build.ar(target) {
- cmd.arg("--ar").arg(ar);
- }
- }
- }
- if suite == "run-make" && !build.config.llvm_enabled {
- println!("Ignoring run-make test suite as they generally dont work without LLVM");
- return;
- }
+ let compiler = builder.compiler(0, build.build);
- if suite != "run-make" {
- cmd.arg("--cc").arg("")
- .arg("--cxx").arg("")
- .arg("--cflags").arg("")
- .arg("--llvm-components").arg("")
- .arg("--llvm-cxxflags").arg("");
- }
+ let _folder = build.fold_output(|| format!("stage{}-std", compiler.stage));
+ println!("Checking std artifacts ({} -> {})", &compiler.host, target);
- if build.remote_tested(target) {
- cmd.arg("--remote-test-client").arg(builder.tool_exe(Tool::RemoteTestClient));
- }
-
- // Running a C compiler on MSVC requires a few env vars to be set, to be
- // sure to set them here.
- //
- // Note that if we encounter `PATH` we make sure to append to our own `PATH`
- // rather than stomp over it.
- if target.contains("msvc") {
- for &(ref k, ref v) in build.cc[&target].env() {
- if k != "PATH" {
- cmd.env(k, v);
- }
- }
- }
- cmd.env("RUSTC_BOOTSTRAP", "1");
- build.add_rust_test_threads(&mut cmd);
-
- if build.config.sanitizers {
- cmd.env("SANITIZER_SUPPORT", "1");
- }
-
- if build.config.profiler {
- cmd.env("PROFILER_SUPPORT", "1");
- }
-
- cmd.arg("--adb-path").arg("adb");
- cmd.arg("--adb-test-dir").arg(ADB_TEST_DIR);
- if target.contains("android") {
- // Assume that cc for this target comes from the android sysroot
- cmd.arg("--android-cross-path")
- .arg(build.cc(target).parent().unwrap().parent().unwrap());
- } else {
- cmd.arg("--android-cross-path").arg("");
- }
-
- build.ci_env.force_coloring_in_ci(&mut cmd);
-
- let _time = util::timeit();
- try_run(build, &mut cmd);
+ let out_dir = build.stage_out(compiler, Mode::Libstd);
+ build.clear_if_dirty(&out_dir, &builder.rustc(compiler));
+ let mut cargo = builder.cargo(compiler, Mode::Libstd, target, "check");
+ std_cargo(build, &compiler, target, &mut cargo);
+ run_cargo(build,
+ &mut cargo,
+ &libstd_stamp(build, compiler, target),
+ true);
+ let libdir = builder.sysroot_libdir(compiler, target);
+ add_to_sysroot(&libdir, &libstd_stamp(build, compiler, target));
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
-pub struct Docs {
- compiler: Compiler,
+pub struct Rustc {
+ pub target: Interned<String>,
}
-impl Step for Docs {
+impl Step for Rustc {
type Output = ();
- const DEFAULT: bool = true;
const ONLY_HOSTS: bool = true;
-
- fn should_run(run: ShouldRun) -> ShouldRun {
- run.path("src/doc")
- }
-
- fn make_run(run: RunConfig) {
- run.builder.ensure(Docs {
- compiler: run.builder.compiler(run.builder.top_stage, run.host),
- });
- }
-
- /// Run `rustdoc --test` for all documentation in `src/doc`.
- ///
- /// This will run all tests in our markdown documentation (e.g. the book)
- /// located in `src/doc`. The `rustdoc` that's run is the one that sits next to
- /// `compiler`.
- fn run(self, builder: &Builder) {
- let build = builder.build;
- let compiler = self.compiler;
-
- builder.ensure(compile::Test { compiler, target: compiler.host });
-
- // Do a breadth-first traversal of the `src/doc` directory and just run
- // tests for all files that end in `*.md`
- let mut stack = vec![build.src.join("src/doc")];
- let _time = util::timeit();
- let _folder = build.fold_output(|| "test_docs");
-
- while let Some(p) = stack.pop() {
- if p.is_dir() {
- stack.extend(t!(p.read_dir()).map(|p| t!(p).path()));
- continue
- }
-
- if p.extension().and_then(|s| s.to_str()) != Some("md") {
- continue;
- }
-
- // The nostarch directory in the book is for no starch, and so isn't
- // guaranteed to build. We don't care if it doesn't build, so skip it.
- if p.to_str().map_or(false, |p| p.contains("nostarch")) {
- continue;
- }
-
- markdown_test(builder, compiler, &p);
- }
- }
-}
-
-#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
-pub struct ErrorIndex {
- compiler: Compiler,
-}
-
-impl Step for ErrorIndex {
- type Output = ();
const DEFAULT: bool = true;
- const ONLY_HOSTS: bool = true;
fn should_run(run: ShouldRun) -> ShouldRun {
- run.path("src/tools/error_index_generator")
+ run.path("src/librustc").krate("rustc-main")
}
fn make_run(run: RunConfig) {
- run.builder.ensure(ErrorIndex {
- compiler: run.builder.compiler(run.builder.top_stage, run.host),
- });
- }
-
- /// Run the error index generator tool to execute the tests located in the error
- /// index.
- ///
- /// The `error_index_generator` tool lives in `src/tools` and is used to
- /// generate a markdown file from the error indexes of the code base which is
- /// then passed to `rustdoc --test`.
- fn run(self, builder: &Builder) {
- let build = builder.build;
- let compiler = self.compiler;
-
- builder.ensure(compile::Std { compiler, target: compiler.host });
-
- let _folder = build.fold_output(|| "test_error_index");
- println!("Testing error-index stage{}", compiler.stage);
-
- let dir = testdir(build, compiler.host);
- t!(fs::create_dir_all(&dir));
- let output = dir.join("error-index.md");
-
- let _time = util::timeit();
- build.run(builder.tool_cmd(Tool::ErrorIndex)
- .arg("markdown")
- .arg(&output)
- .env("CFG_BUILD", &build.build)
- .env("RUSTC_ERROR_METADATA_DST", build.extended_error_dir()));
-
- markdown_test(builder, compiler, &output);
- }
-}
-
-fn markdown_test(builder: &Builder, compiler: Compiler, markdown: &Path) {
- let build = builder.build;
- let mut file = t!(File::open(markdown));
- let mut contents = String::new();
- t!(file.read_to_string(&mut contents));
- if !contents.contains("```") {
- return;
- }
-
- println!("doc tests for: {}", markdown.display());
- let mut cmd = builder.rustdoc_cmd(compiler.host);
- build.add_rust_test_threads(&mut cmd);
- cmd.arg("--test");
- cmd.arg(markdown);
- cmd.env("RUSTC_BOOTSTRAP", "1");
-
- let test_args = build.config.cmd.test_args().join(" ");
- cmd.arg("--test-args").arg(test_args);
-
- if build.config.quiet_tests {
- try_run_quiet(build, &mut cmd);
- } else {
- try_run(build, &mut cmd);
- }
-}
-
-#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
-pub struct CrateLibrustc {
- compiler: Compiler,
- target: Interned<String>,
- test_kind: TestKind,
- krate: Option<Interned<String>>,
-}
-
-impl Step for CrateLibrustc {
- type Output = ();
- const DEFAULT: bool = true;
- const ONLY_HOSTS: bool = true;
-
- fn should_run(run: ShouldRun) -> ShouldRun {
- run.krate("rustc-main")
- }
-
- fn make_run(run: RunConfig) {
- let builder = run.builder;
- let compiler = builder.compiler(builder.top_stage, run.host);
-
- let make = |name: Option<Interned<String>>| {
- let test_kind = if builder.kind == Kind::Test {
- TestKind::Test
- } else if builder.kind == Kind::Bench {
- TestKind::Bench
- } else {
- panic!("unexpected builder.kind in crate: {:?}", builder.kind);
- };
-
- builder.ensure(CrateLibrustc {
- compiler,
- target: run.target,
- test_kind,
- krate: name,
- });
- };
-
- if let Some(path) = run.path {
- for (name, krate_path) in builder.crates("rustc-main") {
- if path.ends_with(krate_path) {
- make(Some(name));
- }
- }
- } else {
- make(None);
- }
- }
-
-
- fn run(self, builder: &Builder) {
- builder.ensure(Crate {
- compiler: self.compiler,
- target: self.target,
- mode: Mode::Librustc,
- test_kind: self.test_kind,
- krate: self.krate,
+ run.builder.ensure(Rustc {
+ target: run.target,
});
}
-}
-
-#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
-pub struct Crate {
- compiler: Compiler,
- target: Interned<String>,
- mode: Mode,
- test_kind: TestKind,
- krate: Option<Interned<String>>,
-}
-
-impl Step for Crate {
- type Output = ();
- const DEFAULT: bool = true;
-
- fn should_run(run: ShouldRun) -> ShouldRun {
- run.krate("std").krate("test")
- }
- fn make_run(run: RunConfig) {
- let builder = run.builder;
- let compiler = builder.compiler(builder.top_stage, run.host);
-
- let make = |mode: Mode, name: Option<Interned<String>>| {
- let test_kind = if builder.kind == Kind::Test {
- TestKind::Test
- } else if builder.kind == Kind::Bench {
- TestKind::Bench
- } else {
- panic!("unexpected builder.kind in crate: {:?}", builder.kind);
- };
-
- builder.ensure(Crate {
- compiler,
- target: run.target,
- mode,
- test_kind,
- krate: name,
- });
- };
-
- if let Some(path) = run.path {
- for (name, krate_path) in builder.crates("std") {
- if path.ends_with(krate_path) {
- make(Mode::Libstd, Some(name));
- }
- }
- for (name, krate_path) in builder.crates("test") {
- if path.ends_with(krate_path) {
- make(Mode::Libtest, Some(name));
- }
- }
- } else {
- make(Mode::Libstd, None);
- make(Mode::Libtest, None);
- }
- }
-
- /// Run all unit tests plus documentation tests for an entire crate DAG defined
- /// by a `Cargo.toml`
- ///
- /// This is what runs tests for crates like the standard library, compiler, etc.
- /// It essentially is the driver for running `cargo test`.
+ /// Build the compiler.
///
- /// Currently this runs all tests for a DAG by passing a bunch of `-p foo`
- /// arguments, and those arguments are discovered from `cargo metadata`.
+ /// This will build the compiler for a particular stage of the build using
+ /// the `compiler` targeting the `target` architecture. The artifacts
+ /// created will also be linked into the sysroot directory.
fn run(self, builder: &Builder) {
let build = builder.build;
- let compiler = self.compiler;
+ let compiler = builder.compiler(0, build.build);
let target = self.target;
- let mode = self.mode;
- let test_kind = self.test_kind;
- let krate = self.krate;
-
- builder.ensure(compile::Test { compiler, target });
- builder.ensure(RemoteCopyLibs { compiler, target });
-
- // If we're not doing a full bootstrap but we're testing a stage2 version of
- // libstd, then what we're actually testing is the libstd produced in
- // stage1. Reflect that here by updating the compiler that we're working
- // with automatically.
- let compiler = if build.force_use_stage1(compiler, target) {
- builder.compiler(1, compiler.host)
- } else {
- compiler.clone()
- };
-
- let mut cargo = builder.cargo(compiler, mode, target, test_kind.subcommand());
- let (name, root) = match mode {
- Mode::Libstd => {
- compile::std_cargo(build, &compiler, target, &mut cargo);
- ("libstd", "std")
- }
- Mode::Libtest => {
- compile::test_cargo(build, &compiler, target, &mut cargo);
- ("libtest", "test")
- }
- Mode::Librustc => {
- builder.ensure(compile::Rustc { compiler, target });
- compile::rustc_cargo(build, target, &mut cargo);
- ("librustc", "rustc-main")
- }
- _ => panic!("can only test libraries"),
- };
- let root = INTERNER.intern_string(String::from(root));
- let _folder = build.fold_output(|| {
- format!("{}_stage{}-{}", test_kind.subcommand(), compiler.stage, name)
- });
- println!("{} {} stage{} ({} -> {})", test_kind, name, compiler.stage,
- &compiler.host, target);
-
- // Build up the base `cargo test` command.
- //
- // Pass in some standard flags then iterate over the graph we've discovered
- // in `cargo metadata` with the maps above and figure out what `-p`
- // arguments need to get passed.
- if test_kind.subcommand() == "test" && !build.fail_fast {
- cargo.arg("--no-fail-fast");
- }
-
- match krate {
- Some(krate) => {
- cargo.arg("-p").arg(krate);
- }
- None => {
- let mut visited = HashSet::new();
- let mut next = vec![root];
- while let Some(name) = next.pop() {
- // Right now jemalloc and the sanitizer crates are
- // target-specific crate in the sense that it's not present
- // on all platforms. Custom skip it here for now, but if we
- // add more this probably wants to get more generalized.
- //
- // Also skip `build_helper` as it's not compiled normally
- // for target during the bootstrap and it's just meant to be
- // a helper crate, not tested. If it leaks through then it
- // ends up messing with various mtime calculations and such.
- if !name.contains("jemalloc") &&
- *name != *"build_helper" &&
- !(name.starts_with("rustc_") && name.ends_with("san")) &&
- name != "dlmalloc" {
- cargo.arg("-p").arg(&format!("{}:0.0.0", name));
- }
- for dep in build.crates[&name].deps.iter() {
- if visited.insert(dep) {
- next.push(*dep);
- }
- }
- }
- }
- }
-
- // The tests are going to run with the *target* libraries, so we need to
- // ensure that those libraries show up in the LD_LIBRARY_PATH equivalent.
- //
- // Note that to run the compiler we need to run with the *host* libraries,
- // but our wrapper scripts arrange for that to be the case anyway.
- let mut dylib_path = dylib_path();
- dylib_path.insert(0, PathBuf::from(&*builder.sysroot_libdir(compiler, target)));
- cargo.env(dylib_path_var(), env::join_paths(&dylib_path).unwrap());
- cargo.arg("--");
- cargo.args(&build.config.cmd.test_args());
+ let _folder = build.fold_output(|| format!("stage{}-rustc", compiler.stage));
+ println!("Checking compiler artifacts ({} -> {})", &compiler.host, target);
- if build.config.quiet_tests {
- cargo.arg("--quiet");
- }
+ let stage_out = builder.stage_out(compiler, Mode::Librustc);
+ build.clear_if_dirty(&stage_out, &libstd_stamp(build, compiler, target));
+ build.clear_if_dirty(&stage_out, &libtest_stamp(build, compiler, target));
- let _time = util::timeit();
-
- if target.contains("emscripten") {
- cargo.env(format!("CARGO_TARGET_{}_RUNNER", envify(&target)),
- build.config.nodejs.as_ref().expect("nodejs not configured"));
- } else if target.starts_with("wasm32") {
- // On the wasm32-unknown-unknown target we're using LTO which is
- // incompatible with `-C prefer-dynamic`, so disable that here
- cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");
-
- let node = build.config.nodejs.as_ref()
- .expect("nodejs not configured");
- let runner = format!("{} {}/src/etc/wasm32-shim.js",
- node.display(),
- build.src.display());
- cargo.env(format!("CARGO_TARGET_{}_RUNNER", envify(&target)), &runner);
- } else if build.remote_tested(target) {
- cargo.env(format!("CARGO_TARGET_{}_RUNNER", envify(&target)),
- format!("{} run",
- builder.tool_exe(Tool::RemoteTestClient).display()));
- }
- try_run(build, &mut cargo);
+ let mut cargo = builder.cargo(compiler, Mode::Librustc, target, "check");
+ rustc_cargo(build, target, &mut cargo);
+ run_cargo(build,
+ &mut cargo,
+ &librustc_stamp(build, compiler, target),
+ true);
+ let libdir = builder.sysroot_libdir(compiler, target);
+ add_to_sysroot(&libdir, &librustc_stamp(build, compiler, target));
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
-pub struct Rustdoc {
- host: Interned<String>,
- test_kind: TestKind,
+pub struct Test {
+ pub target: Interned<String>,
}
-impl Step for Rustdoc {
+impl Step for Test {
type Output = ();
const DEFAULT: bool = true;
- const ONLY_HOSTS: bool = true;
fn should_run(run: ShouldRun) -> ShouldRun {
- run.path("src/librustdoc").path("src/tools/rustdoc")
+ run.path("src/libtest").krate("test")
}
fn make_run(run: RunConfig) {
- let builder = run.builder;
-
- let test_kind = if builder.kind == Kind::Test {
- TestKind::Test
- } else if builder.kind == Kind::Bench {
- TestKind::Bench
- } else {
- panic!("unexpected builder.kind in crate: {:?}", builder.kind);
- };
-
- builder.ensure(Rustdoc {
- host: run.host,
- test_kind,
+ run.builder.ensure(Test {
+ target: run.target,
});
}
fn run(self, builder: &Builder) {
let build = builder.build;
- let test_kind = self.test_kind;
-
- let compiler = builder.compiler(builder.top_stage, self.host);
- let target = compiler.host;
-
- let mut cargo = tool::prepare_tool_cargo(builder,
- compiler,
- target,
- test_kind.subcommand(),
- "src/tools/rustdoc");
- let _folder = build.fold_output(|| {
- format!("{}_stage{}-rustdoc", test_kind.subcommand(), compiler.stage)
- });
- println!("{} rustdoc stage{} ({} -> {})", test_kind, compiler.stage,
- &compiler.host, target);
-
- if test_kind.subcommand() == "test" && !build.fail_fast {
- cargo.arg("--no-fail-fast");
- }
-
- cargo.arg("-p").arg("rustdoc:0.0.0");
-
- cargo.arg("--");
- cargo.args(&build.config.cmd.test_args());
-
- if build.config.quiet_tests {
- cargo.arg("--quiet");
- }
-
- let _time = util::timeit();
+ let target = self.target;
+ let compiler = builder.compiler(0, build.build);
- try_run(build, &mut cargo);
+ let _folder = build.fold_output(|| format!("stage{}-test", compiler.stage));
+ println!("Checking test artifacts ({} -> {})", &compiler.host, target);
+ let out_dir = build.stage_out(compiler, Mode::Libtest);
+ build.clear_if_dirty(&out_dir, &libstd_stamp(build, compiler, target));
+ let mut cargo = builder.cargo(compiler, Mode::Libtest, target, "check");
+ test_cargo(build, &compiler, target, &mut cargo);
+ run_cargo(build,
+ &mut cargo,
+ &libtest_stamp(build, compiler, target),
+ true);
+ let libdir = builder.sysroot_libdir(compiler, target);
+ add_to_sysroot(&libdir, &libtest_stamp(build, compiler, target));
}
}
-fn envify(s: &str) -> String {
- s.chars().map(|c| {
- match c {
- '-' => '_',
- c => c,
- }
- }).flat_map(|c| c.to_uppercase()).collect()
+/// Cargo's output path for the standard library in a given stage, compiled
+/// by a particular compiler for the specified target.
+pub fn libstd_stamp(build: &Build, compiler: Compiler, target: Interned<String>) -> PathBuf {
+ build.cargo_out(compiler, Mode::Libstd, target).join(".libstd-check.stamp")
}
-/// Some test suites are run inside emulators or on remote devices, and most
-/// of our test binaries are linked dynamically which means we need to ship
-/// the standard library and such to the emulator ahead of time. This step
-/// represents this and is a dependency of all test suites.
-///
-/// Most of the time this is a noop. For some steps such as shipping data to
-/// QEMU we have to build our own tools so we've got conditional dependencies
-/// on those programs as well. Note that the remote test client is built for
-/// the build target (us) and the server is built for the target.
-#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
-pub struct RemoteCopyLibs {
- compiler: Compiler,
- target: Interned<String>,
+/// Cargo's output path for libtest in a given stage, compiled by a particular
+/// compiler for the specified target.
+pub fn libtest_stamp(build: &Build, compiler: Compiler, target: Interned<String>) -> PathBuf {
+ build.cargo_out(compiler, Mode::Libtest, target).join(".libtest-check.stamp")
}
-impl Step for RemoteCopyLibs {
- type Output = ();
-
- fn should_run(run: ShouldRun) -> ShouldRun {
- run.never()
- }
-
- fn run(self, builder: &Builder) {
- let build = builder.build;
- let compiler = self.compiler;
- let target = self.target;
- if !build.remote_tested(target) {
- return
- }
-
- builder.ensure(compile::Test { compiler, target });
-
- println!("REMOTE copy libs to emulator ({})", target);
- t!(fs::create_dir_all(build.out.join("tmp")));
-
- let server = builder.ensure(tool::RemoteTestServer { compiler, target });
-
- // Spawn the emulator and wait for it to come online
- let tool = builder.tool_exe(Tool::RemoteTestClient);
- let mut cmd = Command::new(&tool);
- cmd.arg("spawn-emulator")
- .arg(target)
- .arg(&server)
- .arg(build.out.join("tmp"));
- if let Some(rootfs) = build.qemu_rootfs(target) {
- cmd.arg(rootfs);
- }
- build.run(&mut cmd);
-
- // Push all our dylibs to the emulator
- for f in t!(builder.sysroot_libdir(compiler, target).read_dir()) {
- let f = t!(f);
- let name = f.file_name().into_string().unwrap();
- if util::is_dylib(&name) {
- build.run(Command::new(&tool)
- .arg("push")
- .arg(f.path()));
- }
- }
- }
+/// Cargo's output path for librustc in a given stage, compiled by a particular
+/// compiler for the specified target.
+pub fn librustc_stamp(build: &Build, compiler: Compiler, target: Interned<String>) -> PathBuf {
+ build.cargo_out(compiler, Mode::Librustc, target).join(".librustc-check.stamp")
}
-#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
-pub struct Distcheck;
-
-impl Step for Distcheck {
- type Output = ();
- const ONLY_BUILD: bool = true;
-
- fn should_run(run: ShouldRun) -> ShouldRun {
- run.path("distcheck")
- }
-
- fn make_run(run: RunConfig) {
- run.builder.ensure(Distcheck);
- }
-
- /// Run "distcheck", a 'make check' from a tarball
- fn run(self, builder: &Builder) {
- let build = builder.build;
-
- println!("Distcheck");
- let dir = build.out.join("tmp").join("distcheck");
- let _ = fs::remove_dir_all(&dir);
- t!(fs::create_dir_all(&dir));
-
- // Guarantee that these are built before we begin running.
- builder.ensure(dist::PlainSourceTarball);
- builder.ensure(dist::Src);
-
- let mut cmd = Command::new("tar");
- cmd.arg("-xzf")
- .arg(builder.ensure(dist::PlainSourceTarball))
- .arg("--strip-components=1")
- .current_dir(&dir);
- build.run(&mut cmd);
- build.run(Command::new("./configure")
- .args(&build.config.configure_args)
- .arg("--enable-vendor")
- .current_dir(&dir));
- build.run(Command::new(build_helper::make(&build.build))
- .arg("check")
- .current_dir(&dir));
-
- // Now make sure that rust-src has all of libstd's dependencies
- println!("Distcheck rust-src");
- let dir = build.out.join("tmp").join("distcheck-src");
- let _ = fs::remove_dir_all(&dir);
- t!(fs::create_dir_all(&dir));
-
- let mut cmd = Command::new("tar");
- cmd.arg("-xzf")
- .arg(builder.ensure(dist::Src))
- .arg("--strip-components=1")
- .current_dir(&dir);
- build.run(&mut cmd);
-
- let toml = dir.join("rust-src/lib/rustlib/src/rust/src/libstd/Cargo.toml");
- build.run(Command::new(&build.initial_cargo)
- .arg("generate-lockfile")
- .arg("--manifest-path")
- .arg(&toml)
- .current_dir(&dir));
- }
-}
-
-#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
-pub struct Bootstrap;
-
-impl Step for Bootstrap {
- type Output = ();
- const DEFAULT: bool = true;
- const ONLY_HOSTS: bool = true;
- const ONLY_BUILD: bool = true;
-
- /// Test the build system itself
- fn run(self, builder: &Builder) {
- let build = builder.build;
- let mut cmd = Command::new(&build.initial_cargo);
- cmd.arg("test")
- .current_dir(build.src.join("src/bootstrap"))
- .env("CARGO_TARGET_DIR", build.out.join("bootstrap"))
- .env("RUSTC_BOOTSTRAP", "1")
- .env("RUSTC", &build.initial_rustc);
- if !build.fail_fast {
- cmd.arg("--no-fail-fast");
- }
- cmd.arg("--").args(&build.config.cmd.test_args());
- try_run(build, &mut cmd);
- }
-
- fn should_run(run: ShouldRun) -> ShouldRun {
- run.path("src/bootstrap")
- }
-
- fn make_run(run: RunConfig) {
- run.builder.ensure(Bootstrap);
- }
-}
std_cargo(build, &compiler, target, &mut cargo);
run_cargo(build,
&mut cargo,
- &libstd_stamp(build, compiler, target));
+ &libstd_stamp(build, compiler, target),
+ false);
builder.ensure(StdLink {
compiler: builder.compiler(compiler.stage, build.build),
test_cargo(build, &compiler, target, &mut cargo);
run_cargo(build,
&mut cargo,
- &libtest_stamp(build, compiler, target));
+ &libtest_stamp(build, compiler, target),
+ false);
builder.ensure(TestLink {
compiler: builder.compiler(compiler.stage, build.build),
rustc_cargo(build, target, &mut cargo);
run_cargo(build,
&mut cargo,
- &librustc_stamp(build, compiler, target));
+ &librustc_stamp(build, compiler, target),
+ false);
builder.ensure(RustcLink {
compiler: builder.compiler(compiler.stage, build.build),
///
/// For a particular stage this will link the file listed in `stamp` into the
/// `sysroot_dst` provided.
-fn add_to_sysroot(sysroot_dst: &Path, stamp: &Path) {
+pub fn add_to_sysroot(sysroot_dst: &Path, stamp: &Path) {
t!(fs::create_dir_all(&sysroot_dst));
for path in read_stamp_file(stamp) {
copy(&path, &sysroot_dst.join(path.file_name().unwrap()));
}
}
-fn run_cargo(build: &Build, cargo: &mut Command, stamp: &Path) {
+pub fn run_cargo(build: &Build, cargo: &mut Command, stamp: &Path, is_check: bool) {
// Instruct Cargo to give us json messages on stdout, critically leaving
// stderr as piped so we can get those pretty colors.
cargo.arg("--message-format").arg("json")
// Skip files like executables
if !filename.ends_with(".rlib") &&
!filename.ends_with(".lib") &&
- !is_dylib(&filename) {
+ !is_dylib(&filename) &&
+ !(is_check && filename.ends_with(".rmeta")) {
continue
}
Build {
paths: Vec<PathBuf>,
},
+ Check {
+ paths: Vec<PathBuf>,
+ },
Doc {
paths: Vec<PathBuf>,
},
Subcommands:
build Compile either the compiler or libraries
+ check Compile either the compiler or libraries, using cargo check
test Build and run some test suites
bench Build and run some benchmarks
doc Build documentation
// there on out.
let subcommand = args.iter().find(|&s|
(s == "build")
+ || (s == "check")
|| (s == "test")
|| (s == "bench")
|| (s == "doc")
arguments would), and then use the compiler built in stage 0 to build
src/libtest and its dependencies.
Once this is done, build/$ARCH/stage1 contains a usable compiler.");
+ }
+ "check" => {
+ subcommand_help.push_str("\n
+Arguments:
+ This subcommand accepts a number of paths to directories to the crates
+ and/or artifacts to compile. For example:
+
+ ./x.py check src/libcore
+ ./x.py check src/libcore src/libproc_macro
+
+ If no arguments are passed then the complete artifacts are compiled: std, test, and rustc. Note
+ also that since we use `cargo check`, by default this will automatically enable incremental
+ compilation, so there's no need to pass it separately, though it won't hurt. We also completely
+ ignore the stage passed, as there's no way to compile in non-stage 0 without actually building
+ the compiler.");
}
"test" => {
subcommand_help.push_str("\n
"build" => {
Subcommand::Build { paths: paths }
}
+ "check" => {
+ Subcommand::Check { paths: paths }
+ }
"test" => {
Subcommand::Test {
paths,
mod cc_detect;
mod channel;
mod check;
+mod test;
mod clean;
mod compile;
mod metadata;
out
}
- /// Get the directory for incremental by-products when using the
- /// given compiler.
- fn incremental_dir(&self, compiler: Compiler) -> PathBuf {
- self.out.join(&*compiler.host).join(format!("stage{}-incremental", compiler.stage))
- }
-
/// Returns the root directory for all output generated in a particular
/// stage when running with a particular host compiler.
///
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Implementation of the test-related targets of the build system.
+//!
+//! This file implements the various regression test suites that we execute on
+//! our CI.
+
+use std::collections::HashSet;
+use std::env;
+use std::ffi::OsString;
+use std::iter;
+use std::fmt;
+use std::fs::{self, File};
+use std::path::{PathBuf, Path};
+use std::process::Command;
+use std::io::Read;
+
+use build_helper::{self, output};
+
+use builder::{Kind, RunConfig, ShouldRun, Builder, Compiler, Step};
+use cache::{INTERNER, Interned};
+use compile;
+use dist;
+use native;
+use tool::{self, Tool};
+use util::{self, dylib_path, dylib_path_var};
+use {Build, Mode};
+use toolstate::ToolState;
+
+const ADB_TEST_DIR: &str = "/data/tmp/work";
+
+/// The two modes of the test runner; tests or benchmarks.
+#[derive(Debug, PartialEq, Eq, Hash, Copy, Clone)]
+pub enum TestKind {
+ /// Run `cargo test`
+ Test,
+ /// Run `cargo bench`
+ Bench,
+}
+
+impl TestKind {
+ // Return the cargo subcommand for this test kind
+ fn subcommand(self) -> &'static str {
+ match self {
+ TestKind::Test => "test",
+ TestKind::Bench => "bench",
+ }
+ }
+}
+
+impl fmt::Display for TestKind {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.write_str(match *self {
+ TestKind::Test => "Testing",
+ TestKind::Bench => "Benchmarking",
+ })
+ }
+}
+
+fn try_run(build: &Build, cmd: &mut Command) -> bool {
+ if !build.fail_fast {
+ if !build.try_run(cmd) {
+ let mut failures = build.delayed_failures.borrow_mut();
+ failures.push(format!("{:?}", cmd));
+ return false;
+ }
+ } else {
+ build.run(cmd);
+ }
+ true
+}
+
+fn try_run_quiet(build: &Build, cmd: &mut Command) {
+ if !build.fail_fast {
+ if !build.try_run_quiet(cmd) {
+ let mut failures = build.delayed_failures.borrow_mut();
+ failures.push(format!("{:?}", cmd));
+ }
+ } else {
+ build.run_quiet(cmd);
+ }
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct Linkcheck {
+ host: Interned<String>,
+}
+
+impl Step for Linkcheck {
+ type Output = ();
+ const ONLY_HOSTS: bool = true;
+ const DEFAULT: bool = true;
+
+ /// Runs the `linkchecker` tool as compiled in `stage` by the `host` compiler.
+ ///
+ /// This tool in `src/tools` will verify the validity of all our links in the
+ /// documentation to ensure we don't have a bunch of dead ones.
+ fn run(self, builder: &Builder) {
+ let build = builder.build;
+ let host = self.host;
+
+ println!("Linkcheck ({})", host);
+
+ builder.default_doc(None);
+
+ let _time = util::timeit();
+ try_run(build, builder.tool_cmd(Tool::Linkchecker)
+ .arg(build.out.join(host).join("doc")));
+ }
+
+ fn should_run(run: ShouldRun) -> ShouldRun {
+ let builder = run.builder;
+ run.path("src/tools/linkchecker").default_condition(builder.build.config.docs)
+ }
+
+ fn make_run(run: RunConfig) {
+ run.builder.ensure(Linkcheck { host: run.target });
+ }
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct Cargotest {
+ stage: u32,
+ host: Interned<String>,
+}
+
+impl Step for Cargotest {
+ type Output = ();
+ const ONLY_HOSTS: bool = true;
+
+ fn should_run(run: ShouldRun) -> ShouldRun {
+ run.path("src/tools/cargotest")
+ }
+
+ fn make_run(run: RunConfig) {
+ run.builder.ensure(Cargotest {
+ stage: run.builder.top_stage,
+ host: run.target,
+ });
+ }
+
+ /// Runs the `cargotest` tool as compiled in `stage` by the `host` compiler.
+ ///
+ /// This tool in `src/tools` will check out a few Rust projects and run `cargo
+ /// test` to ensure that we don't regress the test suites there.
+ fn run(self, builder: &Builder) {
+ let build = builder.build;
+ let compiler = builder.compiler(self.stage, self.host);
+ builder.ensure(compile::Rustc { compiler, target: compiler.host });
+
+ // Note that this is a short, cryptic, and not scoped directory name. This
+ // is currently to minimize the length of path on Windows where we otherwise
+ // quickly run into path name limit constraints.
+ let out_dir = build.out.join("ct");
+ t!(fs::create_dir_all(&out_dir));
+
+ let _time = util::timeit();
+ let mut cmd = builder.tool_cmd(Tool::CargoTest);
+ try_run(build, cmd.arg(&build.initial_cargo)
+ .arg(&out_dir)
+ .env("RUSTC", builder.rustc(compiler))
+ .env("RUSTDOC", builder.rustdoc(compiler.host)));
+ }
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct Cargo {
+ stage: u32,
+ host: Interned<String>,
+}
+
+impl Step for Cargo {
+ type Output = ();
+ const ONLY_HOSTS: bool = true;
+
+ fn should_run(run: ShouldRun) -> ShouldRun {
+ run.path("src/tools/cargo")
+ }
+
+ fn make_run(run: RunConfig) {
+ run.builder.ensure(Cargo {
+ stage: run.builder.top_stage,
+ host: run.target,
+ });
+ }
+
+ /// Runs `cargo test` for `cargo` packaged with Rust.
+ fn run(self, builder: &Builder) {
+ let build = builder.build;
+ let compiler = builder.compiler(self.stage, self.host);
+
+ builder.ensure(tool::Cargo { compiler, target: self.host });
+ let mut cargo = builder.cargo(compiler, Mode::Tool, self.host, "test");
+ cargo.arg("--manifest-path").arg(build.src.join("src/tools/cargo/Cargo.toml"));
+ if !build.fail_fast {
+ cargo.arg("--no-fail-fast");
+ }
+
+ // Don't build tests dynamically, just a pain to work with
+ cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");
+
+ // Don't run cross-compile tests, we may not have cross-compiled libstd libs
+ // available.
+ cargo.env("CFG_DISABLE_CROSS_TESTS", "1");
+
+ try_run(build, cargo.env("PATH", &path_for_cargo(builder, compiler)));
+ }
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct Rls {
+ stage: u32,
+ host: Interned<String>,
+}
+
+impl Step for Rls {
+ type Output = ();
+ const ONLY_HOSTS: bool = true;
+
+ fn should_run(run: ShouldRun) -> ShouldRun {
+ run.path("src/tools/rls")
+ }
+
+ fn make_run(run: RunConfig) {
+ run.builder.ensure(Rls {
+ stage: run.builder.top_stage,
+ host: run.target,
+ });
+ }
+
+ /// Runs `cargo test` for the rls.
+ fn run(self, builder: &Builder) {
+ let build = builder.build;
+ let stage = self.stage;
+ let host = self.host;
+ let compiler = builder.compiler(stage, host);
+
+ builder.ensure(tool::Rls { compiler, target: self.host });
+ let mut cargo = tool::prepare_tool_cargo(builder,
+ compiler,
+ host,
+ "test",
+ "src/tools/rls");
+
+ // Don't build tests dynamically, just a pain to work with
+ cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");
+
+ builder.add_rustc_lib_path(compiler, &mut cargo);
+
+ if try_run(build, &mut cargo) {
+ build.save_toolstate("rls", ToolState::TestPass);
+ }
+ }
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct Rustfmt {
+ stage: u32,
+ host: Interned<String>,
+}
+
+impl Step for Rustfmt {
+ type Output = ();
+ const ONLY_HOSTS: bool = true;
+
+ fn should_run(run: ShouldRun) -> ShouldRun {
+ run.path("src/tools/rustfmt")
+ }
+
+ fn make_run(run: RunConfig) {
+ run.builder.ensure(Rustfmt {
+ stage: run.builder.top_stage,
+ host: run.target,
+ });
+ }
+
+ /// Runs `cargo test` for rustfmt.
+ fn run(self, builder: &Builder) {
+ let build = builder.build;
+ let stage = self.stage;
+ let host = self.host;
+ let compiler = builder.compiler(stage, host);
+
+ builder.ensure(tool::Rustfmt { compiler, target: self.host });
+ let mut cargo = tool::prepare_tool_cargo(builder,
+ compiler,
+ host,
+ "test",
+ "src/tools/rustfmt");
+
+ // Don't build tests dynamically, just a pain to work with
+ cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");
+
+ builder.add_rustc_lib_path(compiler, &mut cargo);
+
+ if try_run(build, &mut cargo) {
+ build.save_toolstate("rustfmt", ToolState::TestPass);
+ }
+ }
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct Miri {
+ stage: u32,
+ host: Interned<String>,
+}
+
+impl Step for Miri {
+ type Output = ();
+ const ONLY_HOSTS: bool = true;
+ const DEFAULT: bool = true;
+
+ fn should_run(run: ShouldRun) -> ShouldRun {
+ let test_miri = run.builder.build.config.test_miri;
+ run.path("src/tools/miri").default_condition(test_miri)
+ }
+
+ fn make_run(run: RunConfig) {
+ run.builder.ensure(Miri {
+ stage: run.builder.top_stage,
+ host: run.target,
+ });
+ }
+
+ /// Runs `cargo test` for miri.
+ fn run(self, builder: &Builder) {
+ let build = builder.build;
+ let stage = self.stage;
+ let host = self.host;
+ let compiler = builder.compiler(stage, host);
+
+ if let Some(miri) = builder.ensure(tool::Miri { compiler, target: self.host }) {
+ let mut cargo = builder.cargo(compiler, Mode::Tool, host, "test");
+ cargo.arg("--manifest-path").arg(build.src.join("src/tools/miri/Cargo.toml"));
+
+ // Don't build tests dynamically, just a pain to work with
+ cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");
+ // miri tests need to know about the stage sysroot
+ cargo.env("MIRI_SYSROOT", builder.sysroot(compiler));
+ cargo.env("RUSTC_TEST_SUITE", builder.rustc(compiler));
+ cargo.env("RUSTC_LIB_PATH", builder.rustc_libdir(compiler));
+ cargo.env("MIRI_PATH", miri);
+
+ builder.add_rustc_lib_path(compiler, &mut cargo);
+
+ if try_run(build, &mut cargo) {
+ build.save_toolstate("miri", ToolState::TestPass);
+ }
+ } else {
+ eprintln!("failed to test miri: could not build");
+ }
+ }
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct Clippy {
+ stage: u32,
+ host: Interned<String>,
+}
+
+impl Step for Clippy {
+ type Output = ();
+ const ONLY_HOSTS: bool = true;
+ const DEFAULT: bool = false;
+
+ fn should_run(run: ShouldRun) -> ShouldRun {
+ run.path("src/tools/clippy")
+ }
+
+ fn make_run(run: RunConfig) {
+ run.builder.ensure(Clippy {
+ stage: run.builder.top_stage,
+ host: run.target,
+ });
+ }
+
+ /// Runs `cargo test` for clippy.
+ fn run(self, builder: &Builder) {
+ let build = builder.build;
+ let stage = self.stage;
+ let host = self.host;
+ let compiler = builder.compiler(stage, host);
+
+ if let Some(clippy) = builder.ensure(tool::Clippy { compiler, target: self.host }) {
+ let mut cargo = builder.cargo(compiler, Mode::Tool, host, "test");
+ cargo.arg("--manifest-path").arg(build.src.join("src/tools/clippy/Cargo.toml"));
+
+ // Don't build tests dynamically, just a pain to work with
+ cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");
+ // clippy tests need to know about the stage sysroot
+ cargo.env("SYSROOT", builder.sysroot(compiler));
+ cargo.env("RUSTC_TEST_SUITE", builder.rustc(compiler));
+ cargo.env("RUSTC_LIB_PATH", builder.rustc_libdir(compiler));
+ let host_libs = builder.stage_out(compiler, Mode::Tool).join(builder.cargo_dir());
+ cargo.env("HOST_LIBS", host_libs);
+ // clippy tests need to find the driver
+ cargo.env("CLIPPY_DRIVER_PATH", clippy);
+
+ builder.add_rustc_lib_path(compiler, &mut cargo);
+
+ if try_run(build, &mut cargo) {
+ build.save_toolstate("clippy-driver", ToolState::TestPass);
+ }
+ } else {
+ eprintln!("failed to test clippy: could not build");
+ }
+ }
+}
+
+fn path_for_cargo(builder: &Builder, compiler: Compiler) -> OsString {
+ // Configure PATH to find the right rustc. NB. we have to use PATH
+ // and not RUSTC because the Cargo test suite has tests that will
+ // fail if rustc is not spelled `rustc`.
+ let path = builder.sysroot(compiler).join("bin");
+ let old_path = env::var_os("PATH").unwrap_or_default();
+ env::join_paths(iter::once(path).chain(env::split_paths(&old_path))).expect("")
+}
+
+#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+pub struct RustdocJS {
+ pub host: Interned<String>,
+ pub target: Interned<String>,
+}
+
+impl Step for RustdocJS {
+ type Output = ();
+ const DEFAULT: bool = true;
+ const ONLY_HOSTS: bool = true;
+
+ fn should_run(run: ShouldRun) -> ShouldRun {
+ run.path("src/test/rustdoc-js")
+ }
+
+ fn make_run(run: RunConfig) {
+ run.builder.ensure(RustdocJS {
+ host: run.host,
+ target: run.target,
+ });
+ }
+
+ fn run(self, builder: &Builder) {
+ if let Some(ref nodejs) = builder.config.nodejs {
+ let mut command = Command::new(nodejs);
+ command.args(&["src/tools/rustdoc-js/tester.js", &*self.host]);
+ builder.ensure(::doc::Std {
+ target: self.target,
+ stage: builder.top_stage,
+ });
+ builder.run(&mut command);
+ } else {
+ println!("No nodejs found, skipping \"src/test/rustdoc-js\" tests");
+ }
+ }
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct Tidy {
+ host: Interned<String>,
+}
+
+impl Step for Tidy {
+ type Output = ();
+ const DEFAULT: bool = true;
+ const ONLY_HOSTS: bool = true;
+ const ONLY_BUILD: bool = true;
+
+ /// Runs the `tidy` tool as compiled in `stage` by the `host` compiler.
+ ///
+ /// This tool in `src/tools` checks up on various bits and pieces of style and
+ /// otherwise just implements a few lint-like checks that are specific to the
+ /// compiler itself.
+ fn run(self, builder: &Builder) {
+ let build = builder.build;
+ let host = self.host;
+
+ let _folder = build.fold_output(|| "tidy");
+ println!("tidy check ({})", host);
+ let mut cmd = builder.tool_cmd(Tool::Tidy);
+ cmd.arg(build.src.join("src"));
+ if !build.config.vendor {
+ cmd.arg("--no-vendor");
+ }
+ if build.config.quiet_tests {
+ cmd.arg("--quiet");
+ }
+ try_run(build, &mut cmd);
+ }
+
+ fn should_run(run: ShouldRun) -> ShouldRun {
+ run.path("src/tools/tidy")
+ }
+
+ fn make_run(run: RunConfig) {
+ run.builder.ensure(Tidy {
+ host: run.builder.build.build,
+ });
+ }
+}
+
+fn testdir(build: &Build, host: Interned<String>) -> PathBuf {
+ build.out.join(host).join("test")
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+struct Test {
+ path: &'static str,
+ mode: &'static str,
+ suite: &'static str,
+}
+
+static DEFAULT_COMPILETESTS: &[Test] = &[
+ Test { path: "src/test/ui", mode: "ui", suite: "ui" },
+ Test { path: "src/test/run-pass", mode: "run-pass", suite: "run-pass" },
+ Test { path: "src/test/compile-fail", mode: "compile-fail", suite: "compile-fail" },
+ Test { path: "src/test/parse-fail", mode: "parse-fail", suite: "parse-fail" },
+ Test { path: "src/test/run-fail", mode: "run-fail", suite: "run-fail" },
+ Test {
+ path: "src/test/run-pass-valgrind",
+ mode: "run-pass-valgrind",
+ suite: "run-pass-valgrind"
+ },
+ Test { path: "src/test/mir-opt", mode: "mir-opt", suite: "mir-opt" },
+ Test { path: "src/test/codegen", mode: "codegen", suite: "codegen" },
+ Test { path: "src/test/codegen-units", mode: "codegen-units", suite: "codegen-units" },
+ Test { path: "src/test/incremental", mode: "incremental", suite: "incremental" },
+
+ // What this runs varies depending on the native platform being apple
+ Test { path: "src/test/debuginfo", mode: "debuginfo-XXX", suite: "debuginfo" },
+];
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct DefaultCompiletest {
+ compiler: Compiler,
+ target: Interned<String>,
+ mode: &'static str,
+ suite: &'static str,
+}
+
+impl Step for DefaultCompiletest {
+ type Output = ();
+ const DEFAULT: bool = true;
+
+ fn should_run(mut run: ShouldRun) -> ShouldRun {
+ for test in DEFAULT_COMPILETESTS {
+ run = run.path(test.path);
+ }
+ run
+ }
+
+ fn make_run(run: RunConfig) {
+ let compiler = run.builder.compiler(run.builder.top_stage, run.host);
+
+ let test = run.path.map(|path| {
+ DEFAULT_COMPILETESTS.iter().find(|&&test| {
+ path.ends_with(test.path)
+ }).unwrap_or_else(|| {
+ panic!("make_run in compile test to receive test path, received {:?}", path);
+ })
+ });
+
+ if let Some(test) = test {
+ run.builder.ensure(DefaultCompiletest {
+ compiler,
+ target: run.target,
+ mode: test.mode,
+ suite: test.suite,
+ });
+ } else {
+ for test in DEFAULT_COMPILETESTS {
+ run.builder.ensure(DefaultCompiletest {
+ compiler,
+ target: run.target,
+ mode: test.mode,
+ suite: test.suite
+ });
+ }
+ }
+ }
+
+ fn run(self, builder: &Builder) {
+ builder.ensure(Compiletest {
+ compiler: self.compiler,
+ target: self.target,
+ mode: self.mode,
+ suite: self.suite,
+ })
+ }
+}
+
+// Also default, but host-only.
+static HOST_COMPILETESTS: &[Test] = &[
+ Test { path: "src/test/ui-fulldeps", mode: "ui", suite: "ui-fulldeps" },
+ Test { path: "src/test/run-pass-fulldeps", mode: "run-pass", suite: "run-pass-fulldeps" },
+ Test { path: "src/test/run-fail-fulldeps", mode: "run-fail", suite: "run-fail-fulldeps" },
+ Test {
+ path: "src/test/compile-fail-fulldeps",
+ mode: "compile-fail",
+ suite: "compile-fail-fulldeps",
+ },
+ Test {
+ path: "src/test/incremental-fulldeps",
+ mode: "incremental",
+ suite: "incremental-fulldeps",
+ },
+ Test { path: "src/test/run-make", mode: "run-make", suite: "run-make" },
+ Test { path: "src/test/rustdoc", mode: "rustdoc", suite: "rustdoc" },
+
+ Test { path: "src/test/pretty", mode: "pretty", suite: "pretty" },
+ Test { path: "src/test/run-pass/pretty", mode: "pretty", suite: "run-pass" },
+ Test { path: "src/test/run-fail/pretty", mode: "pretty", suite: "run-fail" },
+ Test { path: "src/test/run-pass-valgrind/pretty", mode: "pretty", suite: "run-pass-valgrind" },
+ Test { path: "src/test/run-pass-fulldeps/pretty", mode: "pretty", suite: "run-pass-fulldeps" },
+ Test { path: "src/test/run-fail-fulldeps/pretty", mode: "pretty", suite: "run-fail-fulldeps" },
+];
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct HostCompiletest {
+ compiler: Compiler,
+ target: Interned<String>,
+ mode: &'static str,
+ suite: &'static str,
+}
+
+impl Step for HostCompiletest {
+ type Output = ();
+ const DEFAULT: bool = true;
+ const ONLY_HOSTS: bool = true;
+
+ fn should_run(mut run: ShouldRun) -> ShouldRun {
+ for test in HOST_COMPILETESTS {
+ run = run.path(test.path);
+ }
+ run
+ }
+
+ fn make_run(run: RunConfig) {
+ let compiler = run.builder.compiler(run.builder.top_stage, run.host);
+
+ let test = run.path.map(|path| {
+ HOST_COMPILETESTS.iter().find(|&&test| {
+ path.ends_with(test.path)
+ }).unwrap_or_else(|| {
+ panic!("make_run in compile test to receive test path, received {:?}", path);
+ })
+ });
+
+ if let Some(test) = test {
+ run.builder.ensure(HostCompiletest {
+ compiler,
+ target: run.target,
+ mode: test.mode,
+ suite: test.suite,
+ });
+ } else {
+ for test in HOST_COMPILETESTS {
+ if test.mode == "pretty" {
+ continue;
+ }
+ run.builder.ensure(HostCompiletest {
+ compiler,
+ target: run.target,
+ mode: test.mode,
+ suite: test.suite
+ });
+ }
+ }
+ }
+
+ fn run(self, builder: &Builder) {
+ builder.ensure(Compiletest {
+ compiler: self.compiler,
+ target: self.target,
+ mode: self.mode,
+ suite: self.suite,
+ })
+ }
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+struct Compiletest {
+ compiler: Compiler,
+ target: Interned<String>,
+ mode: &'static str,
+ suite: &'static str,
+}
+
+impl Step for Compiletest {
+ type Output = ();
+
+ fn should_run(run: ShouldRun) -> ShouldRun {
+ run.never()
+ }
+
+ /// Executes the `compiletest` tool to run a suite of tests.
+ ///
+ /// Compiles all tests with `compiler` for `target` with the specified
+ /// compiletest `mode` and `suite` arguments. For example `mode` can be
+ /// "run-pass" or `suite` can be something like `debuginfo`.
+ fn run(self, builder: &Builder) {
+ let build = builder.build;
+ let compiler = self.compiler;
+ let target = self.target;
+ let mode = self.mode;
+ let suite = self.suite;
+
+ // Skip codegen tests if they aren't enabled in configuration.
+ if !build.config.codegen_tests && suite == "codegen" {
+ return;
+ }
+
+ if suite == "debuginfo" {
+ // Skip debuginfo tests on MSVC
+ if build.build.contains("msvc") {
+ return;
+ }
+
+ if mode == "debuginfo-XXX" {
+ return if build.build.contains("apple") {
+ builder.ensure(Compiletest {
+ mode: "debuginfo-lldb",
+ ..self
+ });
+ } else {
+ builder.ensure(Compiletest {
+ mode: "debuginfo-gdb",
+ ..self
+ });
+ };
+ }
+
+ builder.ensure(dist::DebuggerScripts {
+ sysroot: builder.sysroot(compiler),
+ host: target
+ });
+ }
+
+ if suite.ends_with("fulldeps") ||
+ // FIXME: Does pretty need librustc compiled? Note that there are
+ // fulldeps test suites with mode = pretty as well.
+ mode == "pretty" ||
+ mode == "rustdoc" ||
+ mode == "run-make" {
+ builder.ensure(compile::Rustc { compiler, target });
+ }
+
+ builder.ensure(compile::Test { compiler, target });
+ builder.ensure(native::TestHelpers { target });
+ builder.ensure(RemoteCopyLibs { compiler, target });
+
+ let _folder = build.fold_output(|| format!("test_{}", suite));
+ println!("Check compiletest suite={} mode={} ({} -> {})",
+ suite, mode, &compiler.host, target);
+ let mut cmd = builder.tool_cmd(Tool::Compiletest);
+
+ // compiletest currently has... a lot of arguments, so let's just pass all
+ // of them!
+
+ cmd.arg("--compile-lib-path").arg(builder.rustc_libdir(compiler));
+ cmd.arg("--run-lib-path").arg(builder.sysroot_libdir(compiler, target));
+ cmd.arg("--rustc-path").arg(builder.rustc(compiler));
+
+ // Avoid depending on rustdoc when we don't need it.
+ if mode == "rustdoc" || mode == "run-make" {
+ cmd.arg("--rustdoc-path").arg(builder.rustdoc(compiler.host));
+ }
+
+ cmd.arg("--src-base").arg(build.src.join("src/test").join(suite));
+ cmd.arg("--build-base").arg(testdir(build, compiler.host).join(suite));
+ cmd.arg("--stage-id").arg(format!("stage{}-{}", compiler.stage, target));
+ cmd.arg("--mode").arg(mode);
+ cmd.arg("--target").arg(target);
+ cmd.arg("--host").arg(&*compiler.host);
+ cmd.arg("--llvm-filecheck").arg(build.llvm_filecheck(build.build));
+
+ if let Some(ref nodejs) = build.config.nodejs {
+ cmd.arg("--nodejs").arg(nodejs);
+ }
+
+ let mut flags = vec!["-Crpath".to_string()];
+ if build.config.rust_optimize_tests {
+ flags.push("-O".to_string());
+ }
+ if build.config.rust_debuginfo_tests {
+ flags.push("-g".to_string());
+ }
+ flags.push("-Zmiri -Zunstable-options".to_string());
+ flags.push(build.config.cmd.rustc_args().join(" "));
+
+ if let Some(linker) = build.linker(target) {
+ cmd.arg("--linker").arg(linker);
+ }
+
+ let hostflags = flags.clone();
+ cmd.arg("--host-rustcflags").arg(hostflags.join(" "));
+
+ let mut targetflags = flags.clone();
+ targetflags.push(format!("-Lnative={}",
+ build.test_helpers_out(target).display()));
+ cmd.arg("--target-rustcflags").arg(targetflags.join(" "));
+
+ cmd.arg("--docck-python").arg(build.python());
+
+ if build.build.ends_with("apple-darwin") {
+ // Force /usr/bin/python on macOS for LLDB tests because we're loading the
+ // LLDB plugin's compiled module which only works with the system python
+ // (namely not Homebrew-installed python)
+ cmd.arg("--lldb-python").arg("/usr/bin/python");
+ } else {
+ cmd.arg("--lldb-python").arg(build.python());
+ }
+
+ if let Some(ref gdb) = build.config.gdb {
+ cmd.arg("--gdb").arg(gdb);
+ }
+ if let Some(ref vers) = build.lldb_version {
+ cmd.arg("--lldb-version").arg(vers);
+ }
+ if let Some(ref dir) = build.lldb_python_dir {
+ cmd.arg("--lldb-python-dir").arg(dir);
+ }
+
+ cmd.args(&build.config.cmd.test_args());
+
+ if build.is_verbose() {
+ cmd.arg("--verbose");
+ }
+
+ if build.config.quiet_tests {
+ cmd.arg("--quiet");
+ }
+
+ if build.config.llvm_enabled {
+ let llvm_config = build.llvm_config(target);
+ let llvm_version = output(Command::new(&llvm_config).arg("--version"));
+ cmd.arg("--llvm-version").arg(llvm_version);
+ if !build.is_rust_llvm(target) {
+ cmd.arg("--system-llvm");
+ }
+
+ // Only pass correct values for these flags for the `run-make` suite as it
+ // requires that a C++ compiler was configured which isn't always the case.
+ if suite == "run-make" {
+ let llvm_components = output(Command::new(&llvm_config).arg("--components"));
+ let llvm_cxxflags = output(Command::new(&llvm_config).arg("--cxxflags"));
+ cmd.arg("--cc").arg(build.cc(target))
+ .arg("--cxx").arg(build.cxx(target).unwrap())
+ .arg("--cflags").arg(build.cflags(target).join(" "))
+ .arg("--llvm-components").arg(llvm_components.trim())
+ .arg("--llvm-cxxflags").arg(llvm_cxxflags.trim());
+ if let Some(ar) = build.ar(target) {
+ cmd.arg("--ar").arg(ar);
+ }
+ }
+ }
+ if suite == "run-make" && !build.config.llvm_enabled {
+ println!("Ignoring run-make test suite as they generally dont work without LLVM");
+ return;
+ }
+
+ if suite != "run-make" {
+ cmd.arg("--cc").arg("")
+ .arg("--cxx").arg("")
+ .arg("--cflags").arg("")
+ .arg("--llvm-components").arg("")
+ .arg("--llvm-cxxflags").arg("");
+ }
+
+ if build.remote_tested(target) {
+ cmd.arg("--remote-test-client").arg(builder.tool_exe(Tool::RemoteTestClient));
+ }
+
+ // Running a C compiler on MSVC requires a few env vars to be set, to be
+ // sure to set them here.
+ //
+ // Note that if we encounter `PATH` we make sure to append to our own `PATH`
+ // rather than stomp over it.
+ if target.contains("msvc") {
+ for &(ref k, ref v) in build.cc[&target].env() {
+ if k != "PATH" {
+ cmd.env(k, v);
+ }
+ }
+ }
+ cmd.env("RUSTC_BOOTSTRAP", "1");
+ build.add_rust_test_threads(&mut cmd);
+
+ if build.config.sanitizers {
+ cmd.env("SANITIZER_SUPPORT", "1");
+ }
+
+ if build.config.profiler {
+ cmd.env("PROFILER_SUPPORT", "1");
+ }
+
+ cmd.arg("--adb-path").arg("adb");
+ cmd.arg("--adb-test-dir").arg(ADB_TEST_DIR);
+ if target.contains("android") {
+ // Assume that cc for this target comes from the android sysroot
+ cmd.arg("--android-cross-path")
+ .arg(build.cc(target).parent().unwrap().parent().unwrap());
+ } else {
+ cmd.arg("--android-cross-path").arg("");
+ }
+
+ build.ci_env.force_coloring_in_ci(&mut cmd);
+
+ let _time = util::timeit();
+ try_run(build, &mut cmd);
+ }
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct Docs {
+ compiler: Compiler,
+}
+
+impl Step for Docs {
+ type Output = ();
+ const DEFAULT: bool = true;
+ const ONLY_HOSTS: bool = true;
+
+ fn should_run(run: ShouldRun) -> ShouldRun {
+ run.path("src/doc")
+ }
+
+ fn make_run(run: RunConfig) {
+ run.builder.ensure(Docs {
+ compiler: run.builder.compiler(run.builder.top_stage, run.host),
+ });
+ }
+
+ /// Run `rustdoc --test` for all documentation in `src/doc`.
+ ///
+ /// This will run all tests in our markdown documentation (e.g. the book)
+ /// located in `src/doc`. The `rustdoc` that's run is the one that sits next to
+ /// `compiler`.
+ fn run(self, builder: &Builder) {
+ let build = builder.build;
+ let compiler = self.compiler;
+
+ builder.ensure(compile::Test { compiler, target: compiler.host });
+
+ // Do a breadth-first traversal of the `src/doc` directory and just run
+ // tests for all files that end in `*.md`
+ let mut stack = vec![build.src.join("src/doc")];
+ let _time = util::timeit();
+ let _folder = build.fold_output(|| "test_docs");
+
+ while let Some(p) = stack.pop() {
+ if p.is_dir() {
+ stack.extend(t!(p.read_dir()).map(|p| t!(p).path()));
+ continue
+ }
+
+ if p.extension().and_then(|s| s.to_str()) != Some("md") {
+ continue;
+ }
+
+ // The nostarch directory in the book is for no starch, and so isn't
+ // guaranteed to build. We don't care if it doesn't build, so skip it.
+ if p.to_str().map_or(false, |p| p.contains("nostarch")) {
+ continue;
+ }
+
+ markdown_test(builder, compiler, &p);
+ }
+ }
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct ErrorIndex {
+ compiler: Compiler,
+}
+
+impl Step for ErrorIndex {
+ type Output = ();
+ const DEFAULT: bool = true;
+ const ONLY_HOSTS: bool = true;
+
+ fn should_run(run: ShouldRun) -> ShouldRun {
+ run.path("src/tools/error_index_generator")
+ }
+
+ fn make_run(run: RunConfig) {
+ run.builder.ensure(ErrorIndex {
+ compiler: run.builder.compiler(run.builder.top_stage, run.host),
+ });
+ }
+
+ /// Run the error index generator tool to execute the tests located in the error
+ /// index.
+ ///
+ /// The `error_index_generator` tool lives in `src/tools` and is used to
+ /// generate a markdown file from the error indexes of the code base which is
+ /// then passed to `rustdoc --test`.
+ fn run(self, builder: &Builder) {
+ let build = builder.build;
+ let compiler = self.compiler;
+
+ builder.ensure(compile::Std { compiler, target: compiler.host });
+
+ let _folder = build.fold_output(|| "test_error_index");
+ println!("Testing error-index stage{}", compiler.stage);
+
+ let dir = testdir(build, compiler.host);
+ t!(fs::create_dir_all(&dir));
+ let output = dir.join("error-index.md");
+
+ let _time = util::timeit();
+ build.run(builder.tool_cmd(Tool::ErrorIndex)
+ .arg("markdown")
+ .arg(&output)
+ .env("CFG_BUILD", &build.build)
+ .env("RUSTC_ERROR_METADATA_DST", build.extended_error_dir()));
+
+ markdown_test(builder, compiler, &output);
+ }
+}
+
+fn markdown_test(builder: &Builder, compiler: Compiler, markdown: &Path) {
+ let build = builder.build;
+ let mut file = t!(File::open(markdown));
+ let mut contents = String::new();
+ t!(file.read_to_string(&mut contents));
+ if !contents.contains("```") {
+ return;
+ }
+
+ println!("doc tests for: {}", markdown.display());
+ let mut cmd = builder.rustdoc_cmd(compiler.host);
+ build.add_rust_test_threads(&mut cmd);
+ cmd.arg("--test");
+ cmd.arg(markdown);
+ cmd.env("RUSTC_BOOTSTRAP", "1");
+
+ let test_args = build.config.cmd.test_args().join(" ");
+ cmd.arg("--test-args").arg(test_args);
+
+ if build.config.quiet_tests {
+ try_run_quiet(build, &mut cmd);
+ } else {
+ try_run(build, &mut cmd);
+ }
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct CrateLibrustc {
+ compiler: Compiler,
+ target: Interned<String>,
+ test_kind: TestKind,
+ krate: Option<Interned<String>>,
+}
+
+impl Step for CrateLibrustc {
+ type Output = ();
+ const DEFAULT: bool = true;
+ const ONLY_HOSTS: bool = true;
+
+ fn should_run(run: ShouldRun) -> ShouldRun {
+ run.krate("rustc-main")
+ }
+
+ fn make_run(run: RunConfig) {
+ let builder = run.builder;
+ let compiler = builder.compiler(builder.top_stage, run.host);
+
+ let make = |name: Option<Interned<String>>| {
+ let test_kind = if builder.kind == Kind::Test {
+ TestKind::Test
+ } else if builder.kind == Kind::Bench {
+ TestKind::Bench
+ } else {
+ panic!("unexpected builder.kind in crate: {:?}", builder.kind);
+ };
+
+ builder.ensure(CrateLibrustc {
+ compiler,
+ target: run.target,
+ test_kind,
+ krate: name,
+ });
+ };
+
+ if let Some(path) = run.path {
+ for (name, krate_path) in builder.crates("rustc-main") {
+ if path.ends_with(krate_path) {
+ make(Some(name));
+ }
+ }
+ } else {
+ make(None);
+ }
+ }
+
+
+ fn run(self, builder: &Builder) {
+ builder.ensure(Crate {
+ compiler: self.compiler,
+ target: self.target,
+ mode: Mode::Librustc,
+ test_kind: self.test_kind,
+ krate: self.krate,
+ });
+ }
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct Crate {
+ compiler: Compiler,
+ target: Interned<String>,
+ mode: Mode,
+ test_kind: TestKind,
+ krate: Option<Interned<String>>,
+}
+
+impl Step for Crate {
+ type Output = ();
+ const DEFAULT: bool = true;
+
+ fn should_run(run: ShouldRun) -> ShouldRun {
+ run.krate("std").krate("test")
+ }
+
+ fn make_run(run: RunConfig) {
+ let builder = run.builder;
+ let compiler = builder.compiler(builder.top_stage, run.host);
+
+ let make = |mode: Mode, name: Option<Interned<String>>| {
+ let test_kind = if builder.kind == Kind::Test {
+ TestKind::Test
+ } else if builder.kind == Kind::Bench {
+ TestKind::Bench
+ } else {
+ panic!("unexpected builder.kind in crate: {:?}", builder.kind);
+ };
+
+ builder.ensure(Crate {
+ compiler,
+ target: run.target,
+ mode,
+ test_kind,
+ krate: name,
+ });
+ };
+
+ if let Some(path) = run.path {
+ for (name, krate_path) in builder.crates("std") {
+ if path.ends_with(krate_path) {
+ make(Mode::Libstd, Some(name));
+ }
+ }
+ for (name, krate_path) in builder.crates("test") {
+ if path.ends_with(krate_path) {
+ make(Mode::Libtest, Some(name));
+ }
+ }
+ } else {
+ make(Mode::Libstd, None);
+ make(Mode::Libtest, None);
+ }
+ }
+
+ /// Run all unit tests plus documentation tests for an entire crate DAG defined
+ /// by a `Cargo.toml`
+ ///
+ /// This is what runs tests for crates like the standard library, compiler, etc.
+ /// It essentially is the driver for running `cargo test`.
+ ///
+ /// Currently this runs all tests for a DAG by passing a bunch of `-p foo`
+ /// arguments, and those arguments are discovered from `cargo metadata`.
+ fn run(self, builder: &Builder) {
+ let build = builder.build;
+ let compiler = self.compiler;
+ let target = self.target;
+ let mode = self.mode;
+ let test_kind = self.test_kind;
+ let krate = self.krate;
+
+ builder.ensure(compile::Test { compiler, target });
+ builder.ensure(RemoteCopyLibs { compiler, target });
+
+ // If we're not doing a full bootstrap but we're testing a stage2 version of
+ // libstd, then what we're actually testing is the libstd produced in
+ // stage1. Reflect that here by updating the compiler that we're working
+ // with automatically.
+ let compiler = if build.force_use_stage1(compiler, target) {
+ builder.compiler(1, compiler.host)
+ } else {
+ compiler.clone()
+ };
+
+ let mut cargo = builder.cargo(compiler, mode, target, test_kind.subcommand());
+ let (name, root) = match mode {
+ Mode::Libstd => {
+ compile::std_cargo(build, &compiler, target, &mut cargo);
+ ("libstd", "std")
+ }
+ Mode::Libtest => {
+ compile::test_cargo(build, &compiler, target, &mut cargo);
+ ("libtest", "test")
+ }
+ Mode::Librustc => {
+ builder.ensure(compile::Rustc { compiler, target });
+ compile::rustc_cargo(build, target, &mut cargo);
+ ("librustc", "rustc-main")
+ }
+ _ => panic!("can only test libraries"),
+ };
+ let root = INTERNER.intern_string(String::from(root));
+ let _folder = build.fold_output(|| {
+ format!("{}_stage{}-{}", test_kind.subcommand(), compiler.stage, name)
+ });
+ println!("{} {} stage{} ({} -> {})", test_kind, name, compiler.stage,
+ &compiler.host, target);
+
+ // Build up the base `cargo test` command.
+ //
+ // Pass in some standard flags then iterate over the graph we've discovered
+ // in `cargo metadata` with the maps above and figure out what `-p`
+ // arguments need to get passed.
+ if test_kind.subcommand() == "test" && !build.fail_fast {
+ cargo.arg("--no-fail-fast");
+ }
+
+ match krate {
+ Some(krate) => {
+ cargo.arg("-p").arg(krate);
+ }
+ None => {
+ let mut visited = HashSet::new();
+ let mut next = vec![root];
+ while let Some(name) = next.pop() {
+ // Right now jemalloc and the sanitizer crates are
+ // target-specific crate in the sense that it's not present
+ // on all platforms. Custom skip it here for now, but if we
+ // add more this probably wants to get more generalized.
+ //
+ // Also skip `build_helper` as it's not compiled normally
+ // for target during the bootstrap and it's just meant to be
+ // a helper crate, not tested. If it leaks through then it
+ // ends up messing with various mtime calculations and such.
+ if !name.contains("jemalloc") &&
+ *name != *"build_helper" &&
+ !(name.starts_with("rustc_") && name.ends_with("san")) &&
+ name != "dlmalloc" {
+ cargo.arg("-p").arg(&format!("{}:0.0.0", name));
+ }
+ for dep in build.crates[&name].deps.iter() {
+ if visited.insert(dep) {
+ next.push(*dep);
+ }
+ }
+ }
+ }
+ }
+
+ // The tests are going to run with the *target* libraries, so we need to
+ // ensure that those libraries show up in the LD_LIBRARY_PATH equivalent.
+ //
+ // Note that to run the compiler we need to run with the *host* libraries,
+ // but our wrapper scripts arrange for that to be the case anyway.
+ let mut dylib_path = dylib_path();
+ dylib_path.insert(0, PathBuf::from(&*builder.sysroot_libdir(compiler, target)));
+ cargo.env(dylib_path_var(), env::join_paths(&dylib_path).unwrap());
+
+ cargo.arg("--");
+ cargo.args(&build.config.cmd.test_args());
+
+ if build.config.quiet_tests {
+ cargo.arg("--quiet");
+ }
+
+ let _time = util::timeit();
+
+ if target.contains("emscripten") {
+ cargo.env(format!("CARGO_TARGET_{}_RUNNER", envify(&target)),
+ build.config.nodejs.as_ref().expect("nodejs not configured"));
+ } else if target.starts_with("wasm32") {
+ // On the wasm32-unknown-unknown target we're using LTO which is
+ // incompatible with `-C prefer-dynamic`, so disable that here
+ cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");
+
+ let node = build.config.nodejs.as_ref()
+ .expect("nodejs not configured");
+ let runner = format!("{} {}/src/etc/wasm32-shim.js",
+ node.display(),
+ build.src.display());
+ cargo.env(format!("CARGO_TARGET_{}_RUNNER", envify(&target)), &runner);
+ } else if build.remote_tested(target) {
+ cargo.env(format!("CARGO_TARGET_{}_RUNNER", envify(&target)),
+ format!("{} run",
+ builder.tool_exe(Tool::RemoteTestClient).display()));
+ }
+ try_run(build, &mut cargo);
+ }
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct Rustdoc {
+ host: Interned<String>,
+ test_kind: TestKind,
+}
+
+impl Step for Rustdoc {
+ type Output = ();
+ const DEFAULT: bool = true;
+ const ONLY_HOSTS: bool = true;
+
+ fn should_run(run: ShouldRun) -> ShouldRun {
+ run.path("src/librustdoc").path("src/tools/rustdoc")
+ }
+
+ fn make_run(run: RunConfig) {
+ let builder = run.builder;
+
+ let test_kind = if builder.kind == Kind::Test {
+ TestKind::Test
+ } else if builder.kind == Kind::Bench {
+ TestKind::Bench
+ } else {
+ panic!("unexpected builder.kind in crate: {:?}", builder.kind);
+ };
+
+ builder.ensure(Rustdoc {
+ host: run.host,
+ test_kind,
+ });
+ }
+
+ fn run(self, builder: &Builder) {
+ let build = builder.build;
+ let test_kind = self.test_kind;
+
+ let compiler = builder.compiler(builder.top_stage, self.host);
+ let target = compiler.host;
+
+ let mut cargo = tool::prepare_tool_cargo(builder,
+ compiler,
+ target,
+ test_kind.subcommand(),
+ "src/tools/rustdoc");
+ let _folder = build.fold_output(|| {
+ format!("{}_stage{}-rustdoc", test_kind.subcommand(), compiler.stage)
+ });
+ println!("{} rustdoc stage{} ({} -> {})", test_kind, compiler.stage,
+ &compiler.host, target);
+
+ if test_kind.subcommand() == "test" && !build.fail_fast {
+ cargo.arg("--no-fail-fast");
+ }
+
+ cargo.arg("-p").arg("rustdoc:0.0.0");
+
+ cargo.arg("--");
+ cargo.args(&build.config.cmd.test_args());
+
+ if build.config.quiet_tests {
+ cargo.arg("--quiet");
+ }
+
+ let _time = util::timeit();
+
+ try_run(build, &mut cargo);
+ }
+}
+
+fn envify(s: &str) -> String {
+ s.chars().map(|c| {
+ match c {
+ '-' => '_',
+ c => c,
+ }
+ }).flat_map(|c| c.to_uppercase()).collect()
+}
+
+/// Some test suites are run inside emulators or on remote devices, and most
+/// of our test binaries are linked dynamically which means we need to ship
+/// the standard library and such to the emulator ahead of time. This step
+/// represents this and is a dependency of all test suites.
+///
+/// Most of the time this is a noop. For some steps such as shipping data to
+/// QEMU we have to build our own tools so we've got conditional dependencies
+/// on those programs as well. Note that the remote test client is built for
+/// the build target (us) and the server is built for the target.
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct RemoteCopyLibs {
+ compiler: Compiler,
+ target: Interned<String>,
+}
+
+impl Step for RemoteCopyLibs {
+ type Output = ();
+
+ fn should_run(run: ShouldRun) -> ShouldRun {
+ run.never()
+ }
+
+ fn run(self, builder: &Builder) {
+ let build = builder.build;
+ let compiler = self.compiler;
+ let target = self.target;
+ if !build.remote_tested(target) {
+ return
+ }
+
+ builder.ensure(compile::Test { compiler, target });
+
+ println!("REMOTE copy libs to emulator ({})", target);
+ t!(fs::create_dir_all(build.out.join("tmp")));
+
+ let server = builder.ensure(tool::RemoteTestServer { compiler, target });
+
+ // Spawn the emulator and wait for it to come online
+ let tool = builder.tool_exe(Tool::RemoteTestClient);
+ let mut cmd = Command::new(&tool);
+ cmd.arg("spawn-emulator")
+ .arg(target)
+ .arg(&server)
+ .arg(build.out.join("tmp"));
+ if let Some(rootfs) = build.qemu_rootfs(target) {
+ cmd.arg(rootfs);
+ }
+ build.run(&mut cmd);
+
+ // Push all our dylibs to the emulator
+ for f in t!(builder.sysroot_libdir(compiler, target).read_dir()) {
+ let f = t!(f);
+ let name = f.file_name().into_string().unwrap();
+ if util::is_dylib(&name) {
+ build.run(Command::new(&tool)
+ .arg("push")
+ .arg(f.path()));
+ }
+ }
+ }
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct Distcheck;
+
+impl Step for Distcheck {
+ type Output = ();
+ const ONLY_BUILD: bool = true;
+
+ fn should_run(run: ShouldRun) -> ShouldRun {
+ run.path("distcheck")
+ }
+
+ fn make_run(run: RunConfig) {
+ run.builder.ensure(Distcheck);
+ }
+
+ /// Run "distcheck", a 'make check' from a tarball
+ fn run(self, builder: &Builder) {
+ let build = builder.build;
+
+ println!("Distcheck");
+ let dir = build.out.join("tmp").join("distcheck");
+ let _ = fs::remove_dir_all(&dir);
+ t!(fs::create_dir_all(&dir));
+
+ // Guarantee that these are built before we begin running.
+ builder.ensure(dist::PlainSourceTarball);
+ builder.ensure(dist::Src);
+
+ let mut cmd = Command::new("tar");
+ cmd.arg("-xzf")
+ .arg(builder.ensure(dist::PlainSourceTarball))
+ .arg("--strip-components=1")
+ .current_dir(&dir);
+ build.run(&mut cmd);
+ build.run(Command::new("./configure")
+ .args(&build.config.configure_args)
+ .arg("--enable-vendor")
+ .current_dir(&dir));
+ build.run(Command::new(build_helper::make(&build.build))
+ .arg("check")
+ .current_dir(&dir));
+
+ // Now make sure that rust-src has all of libstd's dependencies
+ println!("Distcheck rust-src");
+ let dir = build.out.join("tmp").join("distcheck-src");
+ let _ = fs::remove_dir_all(&dir);
+ t!(fs::create_dir_all(&dir));
+
+ let mut cmd = Command::new("tar");
+ cmd.arg("-xzf")
+ .arg(builder.ensure(dist::Src))
+ .arg("--strip-components=1")
+ .current_dir(&dir);
+ build.run(&mut cmd);
+
+ let toml = dir.join("rust-src/lib/rustlib/src/rust/src/libstd/Cargo.toml");
+ build.run(Command::new(&build.initial_cargo)
+ .arg("generate-lockfile")
+ .arg("--manifest-path")
+ .arg(&toml)
+ .current_dir(&dir));
+ }
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct Bootstrap;
+
+impl Step for Bootstrap {
+ type Output = ();
+ const DEFAULT: bool = true;
+ const ONLY_HOSTS: bool = true;
+ const ONLY_BUILD: bool = true;
+
+ /// Test the build system itself
+ fn run(self, builder: &Builder) {
+ let build = builder.build;
+ let mut cmd = Command::new(&build.initial_cargo);
+ cmd.arg("test")
+ .current_dir(build.src.join("src/bootstrap"))
+ .env("CARGO_TARGET_DIR", build.out.join("bootstrap"))
+ .env("RUSTC_BOOTSTRAP", "1")
+ .env("RUSTC", &build.initial_rustc);
+ if !build.fail_fast {
+ cmd.arg("--no-fail-fast");
+ }
+ cmd.arg("--").args(&build.config.cmd.test_args());
+ try_run(build, &mut cmd);
+ }
+
+ fn should_run(run: ShouldRun) -> ShouldRun {
+ run.path("src/bootstrap")
+ }
+
+ fn make_run(run: RunConfig) {
+ run.builder.ensure(Bootstrap);
+ }
+}
-FROM ubuntu:16.04
+FROM ubuntu:18.04
RUN apt-get update && apt-get install -y --no-install-recommends \
clang \
CC_x86_64_sun_solaris=x86_64-sun-solaris2.10-gcc \
CXX_x86_64_sun_solaris=x86_64-sun-solaris2.10-g++
-# FIXME(EdSchouten): Remove this once cc ≥1.0.4 has been merged. It can
-# automatically pick the right compiler path.
-ENV \
- AR_x86_64_unknown_cloudabi=x86_64-unknown-cloudabi-ar \
- CC_x86_64_unknown_cloudabi=x86_64-unknown-cloudabi-clang \
- CXX_x86_64_unknown_cloudabi=x86_64-unknown-cloudabi-clang++
-
ENV TARGETS=x86_64-unknown-fuchsia
ENV TARGETS=$TARGETS,aarch64-unknown-fuchsia
ENV TARGETS=$TARGETS,sparcv9-sun-solaris
ln -s ../lib/llvm-5.0/bin/lld /usr/bin/${target}-ld
ln -s ../../${target} /usr/lib/llvm-5.0/${target}
-# FIXME(EdSchouten): Remove this once cc ≥1.0.4 has been merged. It
-# can make use of ${target}-cc and ${target}-c++, without incorrectly
-# assuming it's MSVC.
-ln -s ../lib/llvm-5.0/bin/clang /usr/bin/${target}-clang
-ln -s ../lib/llvm-5.0/bin/clang /usr/bin/${target}-clang++
-
# Install the C++ runtime libraries from CloudABI Ports.
echo deb https://nuxi.nl/distfiles/cloudabi-ports/debian/ cloudabi cloudabi > \
/etc/apt/sources.list.d/cloudabi.list
-FROM ubuntu:16.04
+FROM ubuntu:18.04
RUN apt-get update && apt-get install -y --no-install-recommends \
clang \
An overview of how to use the `rustdoc` command is available [in the docs][1].
Further details are available from the command line by with `rustdoc --help`.
-[1]: https://github.com/rust-lang/rust/blob/master/src/doc/book/documentation.md
+[1]: https://github.com/rust-lang/rust/blob/master/src/doc/rustdoc/src/what-is-rustdoc.md
#[unstable(feature = "collection_placement",
reason = "placement protocol is subject to change",
issue = "30172")]
-impl<'a, T> Place<T> for BinaryHeapPlace<'a, T>
+unsafe impl<'a, T> Place<T> for BinaryHeapPlace<'a, T>
where T: Clone + Ord {
fn pointer(&mut self) -> *mut T {
self.place.pointer()
#[unstable(feature = "placement_in",
reason = "placement box design is still being worked out.",
issue = "27779")]
-impl<T> Place<T> for IntermediateBox<T> {
+unsafe impl<T> Place<T> for IntermediateBox<T> {
fn pointer(&mut self) -> *mut T {
self.ptr as *mut T
}
#[unstable(feature = "collection_placement",
reason = "placement protocol is subject to change",
issue = "30172")]
-impl<'a, T> Place<T> for FrontPlace<'a, T> {
+unsafe impl<'a, T> Place<T> for FrontPlace<'a, T> {
fn pointer(&mut self) -> *mut T {
unsafe { &mut (*self.node.pointer()).element }
}
#[unstable(feature = "collection_placement",
reason = "placement protocol is subject to change",
issue = "30172")]
-impl<'a, T> Place<T> for BackPlace<'a, T> {
+unsafe impl<'a, T> Place<T> for BackPlace<'a, T> {
fn pointer(&mut self) -> *mut T {
unsafe { &mut (*self.node.pointer()).element }
}
#![feature(pattern)]
#![feature(placement_in_syntax)]
#![feature(rand)]
-#![feature(repr_align)]
#![feature(slice_rotate)]
#![feature(splice)]
#![feature(str_escape)]
#[unstable(feature = "collection_placement",
reason = "placement protocol is subject to change",
issue = "30172")]
-impl<'a, T> Place<T> for PlaceBack<'a, T> {
+unsafe impl<'a, T> Place<T> for PlaceBack<'a, T> {
fn pointer(&mut self) -> *mut T {
unsafe { self.vec.as_mut_ptr().offset(self.vec.len as isize) }
}
#[unstable(feature = "collection_placement",
reason = "placement protocol is subject to change",
issue = "30172")]
-impl<'a, T> Place<T> for PlaceBack<'a, T> {
+unsafe impl<'a, T> Place<T> for PlaceBack<'a, T> {
fn pointer(&mut self) -> *mut T {
unsafe { self.vec_deque.ptr().offset(self.vec_deque.head as isize) }
}
#[unstable(feature = "collection_placement",
reason = "placement protocol is subject to change",
issue = "30172")]
-impl<'a, T> Place<T> for PlaceFront<'a, T> {
+unsafe impl<'a, T> Place<T> for PlaceFront<'a, T> {
fn pointer(&mut self) -> *mut T {
let tail = self.vec_deque.wrap_sub(self.vec_deque.tail, 1);
unsafe { self.vec_deque.ptr().offset(tail as isize) }
//! Many functions in this module only handle normal numbers. The dec2flt routines conservatively
//! take the universally-correct slow path (Algorithm M) for very small and very large numbers.
//! That algorithm needs only next_float() which does handle subnormals and zeros.
-use u32;
use cmp::Ordering::{Less, Equal, Greater};
-use ops::{Mul, Div, Neg};
+use convert::{TryFrom, TryInto};
+use ops::{Add, Mul, Div, Neg};
use fmt::{Debug, LowerExp};
-use mem::transmute;
use num::diy_float::Fp;
use num::FpCategory::{Infinite, Zero, Subnormal, Normal, Nan};
use num::Float;
///
/// Should **never ever** be implemented for other types or be used outside the dec2flt module.
/// Inherits from `Float` because there is some overlap, but all the reused methods are trivial.
-pub trait RawFloat : Float + Copy + Debug + LowerExp
- + Mul<Output=Self> + Div<Output=Self> + Neg<Output=Self>
+pub trait RawFloat
+ : Float
+ + Copy
+ + Debug
+ + LowerExp
+ + Mul<Output=Self>
+ + Div<Output=Self>
+ + Neg<Output=Self>
+where
+ Self: Float<Bits = <Self as RawFloat>::RawBits>
{
const INFINITY: Self;
const NAN: Self;
const ZERO: Self;
+ /// Same as `Float::Bits` with extra traits.
+ type RawBits: Add<Output = Self::RawBits> + From<u8> + TryFrom<u64>;
+
/// Returns the mantissa, exponent and sign as integers.
fn integer_decode(self) -> (u64, i16, i8);
- /// Get the raw binary representation of the float.
- fn transmute(self) -> u64;
-
- /// Transmute the raw binary representation into a float.
- fn from_bits(bits: u64) -> Self;
-
/// Decode the float.
fn unpack(self) -> Unpacked;
}
impl RawFloat for f32 {
+ type RawBits = u32;
+
const SIG_BITS: u8 = 24;
const EXP_BITS: u8 = 8;
const CEIL_LOG5_OF_MAX_SIG: i16 = 11;
/// Returns the mantissa, exponent and sign as integers.
fn integer_decode(self) -> (u64, i16, i8) {
- let bits: u32 = unsafe { transmute(self) };
+ let bits = self.to_bits();
let sign: i8 = if bits >> 31 == 0 { 1 } else { -1 };
let mut exponent: i16 = ((bits >> 23) & 0xff) as i16;
let mantissa = if exponent == 0 {
(mantissa as u64, exponent, sign)
}
- fn transmute(self) -> u64 {
- let bits: u32 = unsafe { transmute(self) };
- bits as u64
- }
-
- fn from_bits(bits: u64) -> f32 {
- assert!(bits < u32::MAX as u64, "f32::from_bits: too many bits");
- unsafe { transmute(bits as u32) }
- }
-
fn unpack(self) -> Unpacked {
let (sig, exp, _sig) = self.integer_decode();
Unpacked::new(sig, exp)
impl RawFloat for f64 {
+ type RawBits = u64;
+
const SIG_BITS: u8 = 53;
const EXP_BITS: u8 = 11;
const CEIL_LOG5_OF_MAX_SIG: i16 = 23;
/// Returns the mantissa, exponent and sign as integers.
fn integer_decode(self) -> (u64, i16, i8) {
- let bits: u64 = unsafe { transmute(self) };
+ let bits = self.to_bits();
let sign: i8 = if bits >> 63 == 0 { 1 } else { -1 };
let mut exponent: i16 = ((bits >> 52) & 0x7ff) as i16;
let mantissa = if exponent == 0 {
(mantissa, exponent, sign)
}
- fn transmute(self) -> u64 {
- let bits: u64 = unsafe { transmute(self) };
- bits
- }
-
- fn from_bits(bits: u64) -> f64 {
- unsafe { transmute(bits) }
- }
-
fn unpack(self) -> Unpacked {
let (sig, exp, _sig) = self.integer_decode();
Unpacked::new(sig, exp)
"encode_normal: exponent out of range");
// Leave sign bit at 0 ("+"), our numbers are all positive
let bits = (k_enc as u64) << T::EXPLICIT_SIG_BITS | sig_enc;
- T::from_bits(bits)
+ T::from_bits(bits.try_into().unwrap_or_else(|_| unreachable!()))
}
/// Construct a subnormal. A mantissa of 0 is allowed and constructs zero.
pub fn encode_subnormal<T: RawFloat>(significand: u64) -> T {
assert!(significand < T::MIN_SIG, "encode_subnormal: not actually subnormal");
// Encoded exponent is 0, the sign bit is 0, so we just have to reinterpret the bits.
- T::from_bits(significand)
+ T::from_bits(significand.try_into().unwrap_or_else(|_| unreachable!()))
}
/// Approximate a bignum with an Fp. Rounds within 0.5 ULP with half-to-even.
// too is exactly what we want!
// Finally, f64::MAX + 1 = 7eff...f + 1 = 7ff0...0 = f64::INFINITY.
Zero | Subnormal | Normal => {
- let bits: u64 = x.transmute();
- T::from_bits(bits + 1)
+ T::from_bits(x.to_bits() + T::Bits::from(1u8))
}
}
}
reason = "stable interface is via `impl f{32,64}` in later crates",
issue = "32110")]
impl Float for f32 {
+ type Bits = u32;
+
/// Returns `true` if the number is NaN.
#[inline]
fn is_nan(self) -> bool {
const EXP_MASK: u32 = 0x7f800000;
const MAN_MASK: u32 = 0x007fffff;
- let bits: u32 = unsafe { mem::transmute(self) };
+ let bits = self.to_bits();
match (bits & MAN_MASK, bits & EXP_MASK) {
(0, 0) => Fp::Zero,
(_, 0) => Fp::Subnormal,
fn is_sign_negative(self) -> bool {
// IEEE754 says: isSignMinus(x) is true if and only if x has negative sign. isSignMinus
// applies to zeros and NaNs as well.
- #[repr(C)]
- union F32Bytes {
- f: f32,
- b: u32
- }
- unsafe { F32Bytes { f: self }.b & 0x8000_0000 != 0 }
+ self.to_bits() & 0x8000_0000 != 0
}
/// Returns the reciprocal (multiplicative inverse) of the number.
// multiplying by 1.0. Should switch to the `canonicalize` when it works.
(if other.is_nan() || self < other { self } else { other }) * 1.0
}
+
+ /// Raw transmutation to `u32`.
+ #[inline]
+ fn to_bits(self) -> u32 {
+ unsafe { mem::transmute(self) }
+ }
+
+ /// Raw transmutation from `u32`.
+ #[inline]
+ fn from_bits(v: u32) -> Self {
+ // It turns out the safety issues with sNaN were overblown! Hooray!
+ unsafe { mem::transmute(v) }
+ }
}
reason = "stable interface is via `impl f{32,64}` in later crates",
issue = "32110")]
impl Float for f64 {
+ type Bits = u64;
+
/// Returns `true` if the number is NaN.
#[inline]
fn is_nan(self) -> bool {
const EXP_MASK: u64 = 0x7ff0000000000000;
const MAN_MASK: u64 = 0x000fffffffffffff;
- let bits: u64 = unsafe { mem::transmute(self) };
+ let bits = self.to_bits();
match (bits & MAN_MASK, bits & EXP_MASK) {
(0, 0) => Fp::Zero,
(_, 0) => Fp::Subnormal,
/// negative sign bit and negative infinity.
#[inline]
fn is_sign_negative(self) -> bool {
- #[repr(C)]
- union F64Bytes {
- f: f64,
- b: u64
- }
- unsafe { F64Bytes { f: self }.b & 0x8000_0000_0000_0000 != 0 }
+ self.to_bits() & 0x8000_0000_0000_0000 != 0
}
/// Returns the reciprocal (multiplicative inverse) of the number.
// multiplying by 1.0. Should switch to the `canonicalize` when it works.
(if other.is_nan() || self < other { self } else { other }) * 1.0
}
+
+ /// Raw transmutation to `u64`.
+ #[inline]
+ fn to_bits(self) -> u64 {
+ unsafe { mem::transmute(self) }
+ }
+
+ /// Raw transmutation from `u64`.
+ #[inline]
+ fn from_bits(v: u64) -> Self {
+ // It turns out the safety issues with sNaN were overblown! Hooray!
+ unsafe { mem::transmute(v) }
+ }
}
reason = "stable interface is via `impl f{32,64}` in later crates",
issue = "32110")]
pub trait Float: Sized {
+ /// Type used by `to_bits` and `from_bits`.
+ #[stable(feature = "core_float_bits", since = "1.24.0")]
+ type Bits;
+
/// Returns `true` if this value is NaN and false otherwise.
#[stable(feature = "core", since = "1.6.0")]
fn is_nan(self) -> bool;
/// Returns the minimum of the two numbers.
#[stable(feature = "core_float_min_max", since="1.20.0")]
fn min(self, other: Self) -> Self;
+
+ /// Raw transmutation to integer.
+ #[stable(feature = "core_float_bits", since="1.24.0")]
+ fn to_bits(self) -> Self::Bits;
+ /// Raw transmutation from integer.
+ #[stable(feature = "core_float_bits", since="1.24.0")]
+ fn from_bits(v: Self::Bits) -> Self;
}
macro_rules! from_str_radix_int_impl {
/// implementation of Place to clean up any intermediate state
/// (e.g. deallocate box storage, pop a stack, etc).
#[unstable(feature = "placement_new_protocol", issue = "27779")]
-pub trait Place<Data: ?Sized> {
+pub unsafe trait Place<Data: ?Sized> {
/// Returns the address where the input value will be written.
/// Note that the data at this address is generally uninitialized,
/// and thus one should use `ptr::write` for initializing it.
+ ///
+ /// This function must return a pointer through which a value
+ /// of type `Data` can be written.
fn pointer(&mut self) -> *mut Data;
}
[input] TargetFeaturesWhitelist,
[] TargetFeaturesEnabled(DefId),
+ [] InstanceDefSizeEstimate { instance_def: InstanceDef<'tcx> },
);
trait DepNodeParams<'a, 'gcx: 'tcx + 'a, 'tcx: 'a> : fmt::Debug {
use syntax::abi::Abi;
use syntax::ast::{NodeId, CRATE_NODE_ID, Name, Attribute};
-use syntax::codemap::Spanned;
use syntax_pos::Span;
use hir::*;
use hir::def::Def;
fn visit_variant(&mut self, v: &'v Variant, g: &'v Generics, item_id: NodeId) {
walk_variant(self, v, g, item_id)
}
+ fn visit_label(&mut self, label: &'v Label) {
+ walk_label(self, label)
+ }
fn visit_lifetime(&mut self, lifetime: &'v Lifetime) {
walk_lifetime(self, lifetime)
}
}
}
-pub fn walk_opt_name<'v, V: Visitor<'v>>(visitor: &mut V, span: Span, opt_name: Option<Name>) {
- if let Some(name) = opt_name {
- visitor.visit_name(span, name);
- }
-}
-
-pub fn walk_opt_sp_name<'v, V: Visitor<'v>>(visitor: &mut V, opt_sp_name: &Option<Spanned<Name>>) {
- if let Some(ref sp_name) = *opt_sp_name {
- visitor.visit_name(sp_name.span, sp_name.node);
- }
-}
-
/// Walks the contents of a crate. See also `Crate::visit_all_items`.
pub fn walk_crate<'v, V: Visitor<'v>>(visitor: &mut V, krate: &'v Crate) {
visitor.visit_mod(&krate.module, krate.span, CRATE_NODE_ID);
walk_list!(visitor, visit_ty, &local.ty);
}
+pub fn walk_label<'v, V: Visitor<'v>>(visitor: &mut V, label: &'v Label) {
+ visitor.visit_name(label.span, label.name);
+}
+
pub fn walk_lifetime<'v, V: Visitor<'v>>(visitor: &mut V, lifetime: &'v Lifetime) {
visitor.visit_id(lifetime.id);
match lifetime.name {
match item.node {
ItemExternCrate(opt_name) => {
visitor.visit_id(item.id);
- walk_opt_name(visitor, item.span, opt_name)
+ if let Some(name) = opt_name {
+ visitor.visit_name(item.span, name);
+ }
}
ItemUse(ref path, _) => {
visitor.visit_id(item.id);
visitor.visit_expr(if_block);
walk_list!(visitor, visit_expr, optional_else);
}
- ExprWhile(ref subexpression, ref block, ref opt_sp_name) => {
+ ExprWhile(ref subexpression, ref block, ref opt_label) => {
+ walk_list!(visitor, visit_label, opt_label);
visitor.visit_expr(subexpression);
visitor.visit_block(block);
- walk_opt_sp_name(visitor, opt_sp_name);
}
- ExprLoop(ref block, ref opt_sp_name, _) => {
+ ExprLoop(ref block, ref opt_label, _) => {
+ walk_list!(visitor, visit_label, opt_label);
visitor.visit_block(block);
- walk_opt_sp_name(visitor, opt_sp_name);
}
ExprMatch(ref subexpression, ref arms, _) => {
visitor.visit_expr(subexpression);
ExprPath(ref qpath) => {
visitor.visit_qpath(qpath, expression.id, expression.span);
}
- ExprBreak(label, ref opt_expr) => {
- label.ident.map(|ident| {
- match label.target_id {
+ ExprBreak(ref destination, ref opt_expr) => {
+ if let Some(ref label) = destination.label {
+ visitor.visit_label(label);
+ match destination.target_id {
ScopeTarget::Block(node_id) |
ScopeTarget::Loop(LoopIdResult::Ok(node_id)) =>
visitor.visit_def_mention(Def::Label(node_id)),
ScopeTarget::Loop(LoopIdResult::Err(_)) => {},
};
- visitor.visit_name(ident.span, ident.node.name);
- });
+ }
walk_list!(visitor, visit_expr, opt_expr);
}
- ExprAgain(label) => {
- label.ident.map(|ident| {
- match label.target_id {
+ ExprAgain(ref destination) => {
+ if let Some(ref label) = destination.label {
+ visitor.visit_label(label);
+ match destination.target_id {
ScopeTarget::Block(_) => bug!("can't `continue` to a non-loop block"),
ScopeTarget::Loop(LoopIdResult::Ok(node_id)) =>
visitor.visit_def_mention(Def::Label(node_id)),
ScopeTarget::Loop(LoopIdResult::Err(_)) => {},
};
- visitor.visit_name(ident.span, ident.node.name);
- });
+ }
}
ExprRet(ref optional_expression) => {
walk_list!(visitor, visit_expr, optional_expression);
*self.name_map.entry(ident).or_insert_with(|| Symbol::from_ident(ident))
}
- fn lower_opt_sp_ident(&mut self, o_id: Option<Spanned<Ident>>) -> Option<Spanned<Name>> {
- o_id.map(|sp_ident| respan(sp_ident.span, sp_ident.node.name))
+ fn lower_label(&mut self, label: Option<Label>) -> Option<hir::Label> {
+ label.map(|label| hir::Label { name: label.ident.name, span: label.span })
}
- fn lower_loop_destination(&mut self, destination: Option<(NodeId, Spanned<Ident>)>)
+ fn lower_loop_destination(&mut self, destination: Option<(NodeId, Label)>)
-> hir::Destination
{
match destination {
- Some((id, label_ident)) => {
+ Some((id, label)) => {
let target = if let Def::Label(loop_id) = self.expect_full_def(id) {
hir::LoopIdResult::Ok(self.lower_node_id(loop_id).node_id)
} else {
hir::LoopIdResult::Err(hir::LoopIdError::UnresolvedLabel)
};
hir::Destination {
- ident: Some(label_ident),
+ label: self.lower_label(Some(label)),
target_id: hir::ScopeTarget::Loop(target),
}
},
.map(|innermost_loop_id| *innermost_loop_id);
hir::Destination {
- ident: None,
+ label: None,
target_id: hir::ScopeTarget::Loop(
loop_id.map(|id| Ok(self.lower_node_id(id).node_id))
.unwrap_or(Err(hir::LoopIdError::OutsideLoopScope))
hir::ExprIf(P(self.lower_expr(cond)), P(then_expr), else_opt)
}
- ExprKind::While(ref cond, ref body, opt_ident) => {
+ ExprKind::While(ref cond, ref body, opt_label) => {
self.with_loop_scope(e.id, |this|
hir::ExprWhile(
this.with_loop_condition_scope(|this| P(this.lower_expr(cond))),
this.lower_block(body, false),
- this.lower_opt_sp_ident(opt_ident)))
+ this.lower_label(opt_label)))
}
- ExprKind::Loop(ref body, opt_ident) => {
+ ExprKind::Loop(ref body, opt_label) => {
self.with_loop_scope(e.id, |this|
hir::ExprLoop(this.lower_block(body, false),
- this.lower_opt_sp_ident(opt_ident),
+ this.lower_label(opt_label),
hir::LoopSource::Loop))
}
ExprKind::Catch(ref body) => {
hir::ExprPath(self.lower_qpath(e.id, qself, path, ParamMode::Optional,
ImplTraitContext::Disallowed))
}
- ExprKind::Break(opt_ident, ref opt_expr) => {
- let label_result = if self.is_in_loop_condition && opt_ident.is_none() {
+ ExprKind::Break(opt_label, ref opt_expr) => {
+ let destination = if self.is_in_loop_condition && opt_label.is_none() {
hir::Destination {
- ident: opt_ident,
+ label: None,
target_id: hir::ScopeTarget::Loop(
Err(hir::LoopIdError::UnlabeledCfInWhileCondition).into()),
}
} else {
- self.lower_loop_destination(opt_ident.map(|ident| (e.id, ident)))
+ self.lower_loop_destination(opt_label.map(|label| (e.id, label)))
};
hir::ExprBreak(
- label_result,
+ destination,
opt_expr.as_ref().map(|x| P(self.lower_expr(x))))
}
- ExprKind::Continue(opt_ident) =>
+ ExprKind::Continue(opt_label) =>
hir::ExprAgain(
- if self.is_in_loop_condition && opt_ident.is_none() {
+ if self.is_in_loop_condition && opt_label.is_none() {
hir::Destination {
- ident: opt_ident,
+ label: None,
target_id: hir::ScopeTarget::Loop(Err(
hir::LoopIdError::UnlabeledCfInWhileCondition).into()),
}
} else {
- self.lower_loop_destination(opt_ident.map( |ident| (e.id, ident)))
+ self.lower_loop_destination(opt_label.map(|label| (e.id, label)))
}),
ExprKind::Ret(ref e) => hir::ExprRet(e.as_ref().map(|x| P(self.lower_expr(x)))),
ExprKind::InlineAsm(ref asm) => {
// Desugar ExprWhileLet
// From: `[opt_ident]: while let <pat> = <sub_expr> <body>`
- ExprKind::WhileLet(ref pat, ref sub_expr, ref body, opt_ident) => {
+ ExprKind::WhileLet(ref pat, ref sub_expr, ref body, opt_label) => {
// to:
//
// [opt_ident]: loop {
// `[opt_ident]: loop { ... }`
let loop_block = P(self.block_expr(P(match_expr)));
- let loop_expr = hir::ExprLoop(loop_block, self.lower_opt_sp_ident(opt_ident),
+ let loop_expr = hir::ExprLoop(loop_block, self.lower_label(opt_label),
hir::LoopSource::WhileLet);
// add attributes to the outer returned expr node
loop_expr
// Desugar ExprForLoop
// From: `[opt_ident]: for <pat> in <head> <body>`
- ExprKind::ForLoop(ref pat, ref head, ref body, opt_ident) => {
+ ExprKind::ForLoop(ref pat, ref head, ref body, opt_label) => {
// to:
//
// {
None));
// `[opt_ident]: loop { ... }`
- let loop_expr = hir::ExprLoop(loop_block, self.lower_opt_sp_ident(opt_ident),
+ let loop_expr = hir::ExprLoop(loop_block, self.lower_label(opt_label),
hir::LoopSource::ForLoop);
let LoweredNodeId { node_id, hir_id } = self.lower_node_id(e.id);
let loop_expr = P(hir::Expr {
e.span,
hir::ExprBreak(
hir::Destination {
- ident: None,
+ label: None,
target_id: hir::ScopeTarget::Block(catch_node),
},
Some(from_err_expr)
use syntax_pos::{Span, DUMMY_SP};
use syntax::codemap::{self, Spanned};
use syntax::abi::Abi;
-use syntax::ast::{self, Ident, Name, NodeId, DUMMY_NODE_ID, AsmDialect};
+use syntax::ast::{self, Name, NodeId, DUMMY_NODE_ID, AsmDialect};
use syntax::ast::{Attribute, Lit, StrStyle, FloatTy, IntTy, UintTy, MetaItem};
use syntax::ext::hygiene::SyntaxContext;
use syntax::ptr::P;
pub const DUMMY_ITEM_LOCAL_ID: ItemLocalId = ItemLocalId(!0);
+#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)]
+pub struct Label {
+ pub name: Name,
+ pub span: Span,
+}
+
+impl fmt::Debug for Label {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ write!(f, "label({:?})", self.name)
+ }
+}
+
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)]
pub struct Lifetime {
pub id: NodeId,
/// A while loop, with an optional label
///
/// `'label: while expr { block }`
- ExprWhile(P<Expr>, P<Block>, Option<Spanned<Name>>),
+ ExprWhile(P<Expr>, P<Block>, Option<Label>),
/// Conditionless loop (can be exited with break, continue, or return)
///
/// `'label: loop { block }`
- ExprLoop(P<Block>, Option<Spanned<Name>>, LoopSource),
+ ExprLoop(P<Block>, Option<Label>, LoopSource),
/// A `match` block, with a source that indicates whether or not it is
/// the result of a desugaring, and if so, which kind.
ExprMatch(P<Expr>, HirVec<Arm>, MatchSource),
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub struct Destination {
// This is `Some(_)` iff there is an explicit user-specified `label
- pub ident: Option<Spanned<Ident>>,
+ pub label: Option<Label>,
// These errors are caught and then reported during the diagnostics pass in
// librustc_passes/loops.rs
hir::ExprIf(ref test, ref blk, ref elseopt) => {
self.print_if(&test, &blk, elseopt.as_ref().map(|e| &**e))?;
}
- hir::ExprWhile(ref test, ref blk, opt_sp_name) => {
- if let Some(sp_name) = opt_sp_name {
- self.print_name(sp_name.node)?;
+ hir::ExprWhile(ref test, ref blk, opt_label) => {
+ if let Some(label) = opt_label {
+ self.print_name(label.name)?;
self.word_space(":")?;
}
self.head("while")?;
self.s.space()?;
self.print_block(&blk)?;
}
- hir::ExprLoop(ref blk, opt_sp_name, _) => {
- if let Some(sp_name) = opt_sp_name {
- self.print_name(sp_name.node)?;
+ hir::ExprLoop(ref blk, opt_label, _) => {
+ if let Some(label) = opt_label {
+ self.print_name(label.name)?;
self.word_space(":")?;
}
self.head("loop")?;
hir::ExprPath(ref qpath) => {
self.print_qpath(qpath, true)?
}
- hir::ExprBreak(label, ref opt_expr) => {
+ hir::ExprBreak(destination, ref opt_expr) => {
self.s.word("break")?;
self.s.space()?;
- if let Some(label_ident) = label.ident {
- self.print_name(label_ident.node.name)?;
+ if let Some(label) = destination.label {
+ self.print_name(label.name)?;
self.s.space()?;
}
if let Some(ref expr) = *opt_expr {
self.s.space()?;
}
}
- hir::ExprAgain(label) => {
+ hir::ExprAgain(destination) => {
self.s.word("continue")?;
self.s.space()?;
- if let Some(label_ident) = label.ident {
- self.print_name(label_ident.node.name)?;
+ if let Some(label) = destination.label {
+ self.print_name(label.name)?;
self.s.space()?
}
}
Name(name)
});
+impl_stable_hash_for!(struct hir::Label {
+ span,
+ name
+});
+
impl_stable_hash_for!(struct hir::Lifetime {
id,
span,
impl_stable_hash_for_spanned!(usize);
impl_stable_hash_for!(struct hir::Destination {
- ident,
+ label,
target_id
});
/// Describes constraints between the region variables and other
/// regions, as well as other conditions that must be verified, or
/// assumptions that can be made.
-#[derive(Default)]
+#[derive(Debug, Default)]
pub struct RegionConstraintData<'tcx> {
/// Constraints of the form `A <= B`, where either `A` or `B` can
/// be a region variable (or neither, as it happens).
use syntax::ast;
use syntax::symbol::Symbol;
+use syntax_pos::Span;
use hir::itemlikevisit::ItemLikeVisitor;
use hir;
impl<'a, 'v, 'tcx> ItemLikeVisitor<'v> for LanguageItemCollector<'a, 'tcx> {
fn visit_item(&mut self, item: &hir::Item) {
- if let Some(value) = extract(&item.attrs) {
+ if let Some((value, span)) = extract(&item.attrs) {
let item_index = self.item_refs.get(&*value.as_str()).cloned();
if let Some(item_index) = item_index {
let def_id = self.tcx.hir.local_def_id(item.id);
self.collect_item(item_index, def_id);
} else {
- let span = self.tcx.hir.span(item.id);
- span_err!(self.tcx.sess, span, E0522,
- "definition of an unknown language item: `{}`.",
- value);
+ let mut err = struct_span_err!(self.tcx.sess, span, E0522,
+ "definition of an unknown language item: `{}`",
+ value);
+ err.span_label(span, format!("definition of unknown language item `{}`", value));
+ err.emit();
}
}
}
}
}
-pub fn extract(attrs: &[ast::Attribute]) -> Option<Symbol> {
+pub fn extract(attrs: &[ast::Attribute]) -> Option<(Symbol, Span)> {
for attribute in attrs {
if attribute.check_name("lang") {
if let Some(value) = attribute.value_str() {
- return Some(value)
+ return Some((value, attribute.span));
}
}
}
fn expression_label(ex: &hir::Expr) -> Option<(ast::Name, Span)> {
match ex.node {
hir::ExprWhile(.., Some(label)) | hir::ExprLoop(_, Some(label), _) => {
- Some((label.node, label.span))
+ Some((label.name, label.span))
}
_ => None,
}
}
pub fn link_name(attrs: &[ast::Attribute]) -> Option<Symbol> {
- lang_items::extract(attrs).and_then(|name| {
+ lang_items::extract(attrs).and_then(|(name, _)| {
$(if name == stringify!($name) {
Some(Symbol::intern(stringify!($sym)))
} else)* {
}
fn visit_foreign_item(&mut self, i: &hir::ForeignItem) {
- if let Some(lang_item) = lang_items::extract(&i.attrs) {
+ if let Some((lang_item, _)) = lang_items::extract(&i.attrs) {
self.register(&lang_item.as_str(), i.span);
}
intravisit::walk_foreign_item(self, i)
use syntax::ast::NodeId;
use syntax::symbol::InternedString;
-use ty::Instance;
+use ty::{Instance, TyCtxt};
use util::nodemap::FxHashMap;
use rustc_data_structures::base_n;
use rustc_data_structures::stable_hasher::{HashStable, StableHasherResult,
GlobalAsm(NodeId),
}
+impl<'tcx> MonoItem<'tcx> {
+ pub fn size_estimate<'a>(&self, tcx: &TyCtxt<'a, 'tcx, 'tcx>) -> usize {
+ match *self {
+ MonoItem::Fn(instance) => {
+ // Estimate the size of a function based on how many statements
+ // it contains.
+ tcx.instance_def_size_estimate(instance.def)
+ },
+ // Conservatively estimate the size of a static declaration
+ // or assembly to be 1.
+ MonoItem::Static(_) | MonoItem::GlobalAsm(_) => 1,
+ }
+ }
+}
+
impl<'tcx> HashStable<StableHashingContext<'tcx>> for MonoItem<'tcx> {
fn hash_stable<W: StableHasherResult>(&self,
hcx: &mut StableHashingContext<'tcx>,
/// as well as the crate name and disambiguator.
name: InternedString,
items: FxHashMap<MonoItem<'tcx>, (Linkage, Visibility)>,
+ size_estimate: Option<usize>,
}
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
CodegenUnit {
name: name,
items: FxHashMap(),
+ size_estimate: None,
}
}
let hash = hash & ((1u128 << 80) - 1);
base_n::encode(hash, base_n::CASE_INSENSITIVE)
}
+
+ pub fn estimate_size<'a>(&mut self, tcx: &TyCtxt<'a, 'tcx, 'tcx>) {
+ // Estimate the size of a codegen unit as (approximately) the number of MIR
+ // statements it corresponds to.
+ self.size_estimate = Some(self.items.keys().map(|mi| mi.size_estimate(tcx)).sum());
+ }
+
+ pub fn size_estimate(&self) -> usize {
+ // Should only be called if `estimate_size` has previously been called.
+ self.size_estimate.expect("estimate_size must be called before getting a size_estimate")
+ }
+
+ pub fn modify_size_estimate(&mut self, delta: usize) {
+ assert!(self.size_estimate.is_some());
+ if let Some(size_estimate) = self.size_estimate {
+ self.size_estimate = Some(size_estimate + delta);
+ }
+ }
}
impl<'tcx> HashStable<StableHashingContext<'tcx>> for CodegenUnit<'tcx> {
let CodegenUnit {
ref items,
name,
+ // The size estimate is not relevant to the hash
+ size_estimate: _,
} = *self;
name.hash_stable(hcx, hasher);
pub err: ty::error::TypeError<'tcx>
}
-#[derive(PartialEq, Eq, Debug)]
+#[derive(PartialEq, Eq, PartialOrd, Ord, Debug)]
enum ProjectionTyCandidate<'tcx> {
// from a where-clause in the env or object type
ParamEnv(ty::PolyProjectionPredicate<'tcx>),
Reveal::UserFacing => ty,
Reveal::All => {
+ let recursion_limit = self.tcx().sess.recursion_limit.get();
+ if self.depth >= recursion_limit {
+ let obligation = Obligation::with_depth(
+ self.cause.clone(),
+ recursion_limit,
+ self.param_env,
+ ty,
+ );
+ self.selcx.infcx().report_overflow_error(&obligation, true);
+ }
+
let generic_ty = self.tcx().type_of(def_id);
let concrete_ty = generic_ty.subst(self.tcx(), substs);
- self.fold_ty(concrete_ty)
+ self.depth += 1;
+ let folded_ty = self.fold_ty(concrete_ty);
+ self.depth -= 1;
+ folded_ty
}
}
}
// Drop duplicates.
//
// Note: `candidates.vec` seems to be on the critical path of the
- // compiler. Replacing it with an hash set was also tried, which would
- // render the following dedup unnecessary. It led to cleaner code but
- // prolonged compiling time of `librustc` from 5m30s to 6m in one test, or
- // ~9% performance lost.
- if candidates.vec.len() > 1 {
- let mut i = 0;
- while i < candidates.vec.len() {
- let has_dup = (0..i).any(|j| candidates.vec[i] == candidates.vec[j]);
- if has_dup {
- candidates.vec.swap_remove(i);
- } else {
- i += 1;
- }
- }
- }
+ // compiler. Replacing it with an HashSet was also tried, which would
+ // render the following dedup unnecessary. The original comment indicated
+ // that it was 9% slower, but that data is now obsolete and a new
+ // benchmark should be performed.
+ candidates.vec.sort_unstable();
+ candidates.vec.dedup();
// Prefer where-clauses. As in select, if there are multiple
// candidates, we prefer where-clause candidates over impls. This
}))
},
TyArray(ty, len) => {
- if len.val.to_const_int().and_then(|i| i.to_u64()) == Some(0) {
- DefIdForest::empty()
- } else {
- ty.uninhabited_from(visited, tcx)
+ match len.val.to_const_int().and_then(|i| i.to_u64()) {
+ // If the array is definitely non-empty, it's uninhabited if
+ // the type of its elements is uninhabited.
+ Some(n) if n != 0 => ty.uninhabited_from(visited, tcx),
+ _ => DefIdForest::empty()
}
}
TyRef(_, ref tm) => {
}
}
+impl<'tcx> QueryDescription<'tcx> for queries::instance_def_size_estimate<'tcx> {
+ fn describe(tcx: TyCtxt, def: ty::InstanceDef<'tcx>) -> String {
+ format!("estimating size for `{}`", tcx.item_path_str(def.def_id()))
+ }
+}
+
macro_rules! impl_disk_cacheable_query(
($query_name:ident, |$key:tt| $cond:expr) => {
impl<'tcx> QueryDescription<'tcx> for queries::$query_name<'tcx> {
target_features_whitelist_node(CrateNum) -> Rc<FxHashSet<String>>,
[] fn target_features_enabled: TargetFeaturesEnabled(DefId) -> Rc<Vec<String>>,
+ // Get an estimate of the size of an InstanceDef based on its MIR for CGU partitioning.
+ [] fn instance_def_size_estimate: instance_def_size_estimate_dep_node(ty::InstanceDef<'tcx>)
+ -> usize,
}
//////////////////////////////////////////////////////////////////////
fn target_features_whitelist_node<'tcx>(_: CrateNum) -> DepConstructor<'tcx> {
DepConstructor::TargetFeaturesWhitelist
}
+
+fn instance_def_size_estimate_dep_node<'tcx>(instance_def: ty::InstanceDef<'tcx>)
+ -> DepConstructor<'tcx> {
+ DepConstructor::InstanceDefSizeEstimate {
+ instance_def
+ }
+}
DepKind::EraseRegionsTy |
DepKind::NormalizeTy |
DepKind::SubstituteNormalizeAndTestPredicates |
+ DepKind::InstanceDefSizeEstimate |
// This one should never occur in this context
DepKind::Null => {
use std::cell::RefCell;
use std::collections::BTreeMap;
use std::cmp;
+use std::cmp::Ordering;
use std::fmt;
use std::hash::{Hash, Hasher};
use std::iter::FromIterator;
}
}
+impl<'tcx> Ord for TyS<'tcx> {
+ #[inline]
+ fn cmp(&self, other: &TyS<'tcx>) -> Ordering {
+ // (self as *const _).cmp(other as *const _)
+ (self as *const TyS<'tcx>).cmp(&(other as *const TyS<'tcx>))
+ }
+}
+impl<'tcx> PartialOrd for TyS<'tcx> {
+ #[inline]
+ fn partial_cmp(&self, other: &TyS<'tcx>) -> Option<Ordering> {
+ Some(self.cmp(other))
+ }
+}
+
impl<'tcx> TyS<'tcx> {
pub fn is_primitive_ty(&self) -> bool {
match self.sty {
}
impl<T> Eq for Slice<T> {}
+impl<T> Ord for Slice<T> {
+ #[inline]
+ fn cmp(&self, other: &Slice<T>) -> Ordering {
+ (&self.0 as *const [T]).cmp(&(&other.0 as *const [T]))
+ }
+}
+impl<T> PartialOrd for Slice<T> {
+ #[inline]
+ fn partial_cmp(&self, other: &Slice<T>) -> Option<Ordering> {
+ Some(self.cmp(other))
+ }
+}
+
impl<T> Hash for Slice<T> {
fn hash<H: Hasher>(&self, s: &mut H) {
(self.as_ptr(), self.len()).hash(s)
/// equality between arbitrary types. Processing an instance of
/// Form #2 eventually yields one of these `ProjectionPredicate`
/// instances to normalize the LHS.
-#[derive(Copy, Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
+#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable)]
pub struct ProjectionPredicate<'tcx> {
pub projection_ty: ProjectionTy<'tcx>,
pub ty: Ty<'tcx>,
tcx.hir.crate_hash
}
+fn instance_def_size_estimate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ instance_def: InstanceDef<'tcx>)
+ -> usize {
+ match instance_def {
+ InstanceDef::Item(..) |
+ InstanceDef::DropGlue(..) => {
+ let mir = tcx.instance_mir(instance_def);
+ mir.basic_blocks().iter().map(|bb| bb.statements.len()).sum()
+ },
+ // Estimate the size of other compiler-generated shims to be 1.
+ _ => 1
+ }
+}
+
pub fn provide(providers: &mut ty::maps::Providers) {
context::provide(providers);
erase_regions::provide(providers);
original_crate_name,
crate_hash,
trait_impls_of: trait_def::trait_impls_of_provider,
+ instance_def_size_estimate,
..*providers
};
}
/// erase, or otherwise "discharge" these bound regions, we change the
/// type from `Binder<T>` to just `T` (see
/// e.g. `liberate_late_bound_regions`).
-#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)]
+#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, RustcEncodable, RustcDecodable)]
pub struct Binder<T>(pub T);
impl<T> Binder<T> {
/// Represents the projection of an associated type. In explicit UFCS
/// form this would be written `<T as Trait<..>>::N`.
-#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)]
+#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, RustcEncodable, RustcDecodable)]
pub struct ProjectionTy<'tcx> {
/// The parameters of the associated item.
pub substs: &'tcx Substs<'tcx>,
/// To reduce memory usage, a `Kind` is a interned pointer,
/// with the lowest 2 bits being reserved for a tag to
/// indicate the type (`Ty` or `Region`) it points to.
-#[derive(Copy, Clone, PartialEq, Eq, Hash)]
+#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Kind<'tcx> {
ptr: NonZero<usize>,
marker: PhantomData<(Ty<'tcx>, ty::Region<'tcx>)>
/// Relocation model to use in object file. Corresponds to `llc
/// -relocation-model=$relocation_model`. Defaults to "pic".
pub relocation_model: String,
- /// Code model to use. Corresponds to `llc -code-model=$code_model`. Defaults to "default".
- pub code_model: String,
+ /// Code model to use. Corresponds to `llc -code-model=$code_model`.
+ pub code_model: Option<String>,
/// TLS model to use. Options are "global-dynamic" (default), "local-dynamic", "initial-exec"
/// and "local-exec". This is similar to the -ftls-model option in GCC/Clang.
pub tls_model: String,
only_cdylib: false,
executables: false,
relocation_model: "pic".to_string(),
- code_model: "default".to_string(),
+ code_model: None,
tls_model: "global-dynamic".to_string(),
disable_redzone: false,
eliminate_frame_pointer: true,
key!(only_cdylib, bool);
key!(executables, bool);
key!(relocation_model);
- key!(code_model);
+ key!(code_model, optional);
key!(tls_model);
key!(disable_redzone, bool);
key!(eliminate_frame_pointer, bool);
end: RangeEnd,
},
- /// matches against a slice, checking the length and extracting elements
+ /// matches against a slice, checking the length and extracting elements.
+ /// irrefutable when there is a slice pattern and both `prefix` and `suffix` are empty.
+ /// e.g. `&[ref xs..]`.
Slice {
prefix: Vec<Pattern<'tcx>>,
slice: Option<Pattern<'tcx>>,
¬e,
errors::Level::Note);
}
- if match env::var_os("RUST_BACKTRACE") {
- Some(val) => &val != "0",
- None => false,
- } {
- handler.emit(&MultiSpan::new(),
- "run with `RUST_BACKTRACE=1` for a backtrace",
- errors::Level::Note);
- }
eprintln!("{}", str::from_utf8(&data.lock().unwrap()).unwrap());
}
#[repr(C)]
pub enum CodeModel {
Other,
- Default,
- JITDefault,
Small,
Kernel,
Medium,
Large,
+ None,
}
/// LLVMRustDiagnosticKind
pub enum ArchiveKind {
Other,
K_GNU,
- K_MIPS64,
K_BSD,
K_COFF,
}
const FlagStaticMember = (1 << 12);
const FlagLValueReference = (1 << 13);
const FlagRValueReference = (1 << 14);
+ const FlagExternalTypeRef = (1 << 15);
+ const FlagIntroducedVirtual = (1 << 18);
+ const FlagBitField = (1 << 19);
+ const FlagNoReturn = (1 << 20);
const FlagMainSubprogram = (1 << 21);
}
}
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"gnu" => Ok(ArchiveKind::K_GNU),
- "mips64" => Ok(ArchiveKind::K_MIPS64),
"bsd" => Ok(ArchiveKind::K_BSD),
"coff" => Ok(ArchiveKind::K_COFF),
_ => Err(()),
let data = self.infcx.take_and_reset_region_constraints();
if !data.is_empty() {
+ debug!("fully_perform_op: constraints generated at {:?} are {:#?}",
+ locations, data);
self.constraints
.outlives_sets
.push(OutlivesSet { locations, data });
where
T: fmt::Debug + TypeFoldable<'tcx>,
{
+ debug!("normalize(value={:?}, location={:?})", value, location);
self.fully_perform_op(location.at_self(), |this| {
let mut selcx = traits::SelectionContext::new(this.infcx);
let cause = this.misc(this.last_span);
Err(match_pair)
}
- PatternKind::Range { .. } |
- PatternKind::Slice { .. } => {
+ PatternKind::Range { .. } => {
Err(match_pair)
}
+ PatternKind::Slice { ref prefix, ref slice, ref suffix } => {
+ if prefix.is_empty() && slice.is_some() && suffix.is_empty() {
+ // irrefutable
+ self.prefix_slice_suffix(&mut candidate.match_pairs,
+ &match_pair.place,
+ prefix,
+ slice.as_ref(),
+ suffix);
+ Ok(())
+ } else {
+ Err(match_pair)
+ }
+ }
+
PatternKind::Variant { adt_def, substs, variant_index, ref subpatterns } => {
let irrefutable = adt_def.variants.iter().enumerate().all(|(i, v)| {
i == variant_index || {
use syntax::symbol::{Symbol, InternedString};
use rustc::mir::mono::MonoItem;
use monomorphize::item::{MonoItemExt, InstantiationMode};
+use core::usize;
pub use rustc::mir::mono::CodegenUnit;
let mut initial_partitioning = place_root_translation_items(tcx,
trans_items);
+ initial_partitioning.codegen_units.iter_mut().for_each(|cgu| cgu.estimate_size(&tcx));
+
debug_dump(tcx, "INITIAL PARTITIONING:", initial_partitioning.codegen_units.iter());
// If the partitioning should produce a fixed count of codegen units, merge
let mut post_inlining = place_inlined_translation_items(initial_partitioning,
inlining_map);
+ post_inlining.codegen_units.iter_mut().for_each(|cgu| cgu.estimate_size(&tcx));
+
debug_dump(tcx, "POST INLINING:", post_inlining.codegen_units.iter());
// Next we try to make as many symbols "internal" as possible, so LLVM has
codegen_units.sort_by_key(|cgu| cgu.name().clone());
// Merge the two smallest codegen units until the target size is reached.
- // Note that "size" is estimated here rather inaccurately as the number of
- // translation items in a given unit. This could be improved on.
while codegen_units.len() > target_cgu_count {
// Sort small cgus to the back
- codegen_units.sort_by_key(|cgu| -(cgu.items().len() as i64));
+ codegen_units.sort_by_key(|cgu| usize::MAX - cgu.size_estimate());
let mut smallest = codegen_units.pop().unwrap();
let second_smallest = codegen_units.last_mut().unwrap();
+ second_smallest.modify_size_estimate(smallest.size_estimate());
for (k, v) in smallest.items_mut().drain() {
second_smallest.items_mut().insert(k, v);
}
impl<'a> Visitor<'a> for AstValidator<'a> {
fn visit_expr(&mut self, expr: &'a Expr) {
match expr.node {
- ExprKind::While(.., Some(ident)) |
- ExprKind::Loop(_, Some(ident)) |
- ExprKind::WhileLet(.., Some(ident)) |
- ExprKind::ForLoop(.., Some(ident)) |
- ExprKind::Break(Some(ident), _) |
- ExprKind::Continue(Some(ident)) => {
- self.check_label(ident.node, ident.span);
- }
ExprKind::InlineAsm(..) if !self.session.target.target.options.allow_asm => {
span_err!(self.session, expr.span, E0472, "asm! is unsupported on this target");
}
visit::walk_use_tree(self, use_tree, id);
}
+ fn visit_label(&mut self, label: &'a Label) {
+ self.check_label(label.ident, label.span);
+ visit::walk_label(self, label);
+ }
+
fn visit_lifetime(&mut self, lifetime: &'a Lifetime) {
self.check_lifetime(lifetime);
visit::walk_lifetime(self, lifetime);
}
if let ast::UseTreeKind::Nested(ref items) = use_tree.kind {
+ // If it's the parent group, cover the entire use item
+ let span = if nested {
+ use_tree.span
+ } else {
+ self.item_span
+ };
+
if items.len() == 0 {
self.unused_imports
.entry(self.base_id)
.or_insert_with(NodeMap)
- .insert(id, self.item_span);
+ .insert(id, span);
}
} else {
let base_id = self.base_id;
use syntax::ast::{Arm, BindingMode, Block, Crate, Expr, ExprKind};
use syntax::ast::{FnDecl, ForeignItem, ForeignItemKind, GenericParam, Generics};
use syntax::ast::{Item, ItemKind, ImplItem, ImplItemKind};
-use syntax::ast::{Local, Mutability, Pat, PatKind, Path};
+use syntax::ast::{Label, Local, Mutability, Pat, PatKind, Path};
use syntax::ast::{QSelf, TraitItemKind, TraitRef, Ty, TyKind};
use syntax::feature_gate::{feature_err, emit_feature_err, GateIssue};
use syntax::parse::token;
segments: vec![],
span: use_tree.span,
};
- self.resolve_use_tree(item, use_tree, &path);
+ self.resolve_use_tree(item.id, use_tree, &path);
}
ItemKind::ExternCrate(_) | ItemKind::MacroDef(..) | ItemKind::GlobalAsm(_) => {
}
}
- fn resolve_use_tree(&mut self, item: &Item, use_tree: &ast::UseTree, prefix: &Path) {
+ fn resolve_use_tree(&mut self, id: NodeId, use_tree: &ast::UseTree, prefix: &Path) {
match use_tree.kind {
ast::UseTreeKind::Nested(ref items) => {
let path = Path {
if items.len() == 0 {
// Resolve prefix of an import with empty braces (issue #28388).
- self.smart_resolve_path(item.id, None, &path, PathSource::ImportPrefix);
+ self.smart_resolve_path(id, None, &path, PathSource::ImportPrefix);
} else {
- for &(ref tree, _) in items {
- self.resolve_use_tree(item, tree, &path);
+ for &(ref tree, nested_id) in items {
+ self.resolve_use_tree(nested_id, tree, &path);
}
}
}
}
}
- fn with_resolved_label<F>(&mut self, label: Option<SpannedIdent>, id: NodeId, f: F)
+ fn with_resolved_label<F>(&mut self, label: Option<Label>, id: NodeId, f: F)
where F: FnOnce(&mut Resolver)
{
if let Some(label) = label {
let def = Def::Label(id);
self.with_label_rib(|this| {
- this.label_ribs.last_mut().unwrap().bindings.insert(label.node, def);
+ this.label_ribs.last_mut().unwrap().bindings.insert(label.ident, def);
f(this);
});
} else {
}
}
- fn resolve_labeled_block(&mut self, label: Option<SpannedIdent>, id: NodeId, block: &Block) {
+ fn resolve_labeled_block(&mut self, label: Option<Label>, id: NodeId, block: &Block) {
self.with_resolved_label(label, id, |this| this.visit_block(block));
}
}
ExprKind::Break(Some(label), _) | ExprKind::Continue(Some(label)) => {
- match self.search_label(label.node, |rib, id| rib.bindings.get(&id).cloned()) {
+ match self.search_label(label.ident, |rib, id| rib.bindings.get(&id).cloned()) {
None => {
// Search again for close matches...
// Picks the first label that is "close enough", which is not necessarily
// the closest match
- let close_match = self.search_label(label.node, |rib, ident| {
+ let close_match = self.search_label(label.ident, |rib, ident| {
let names = rib.bindings.iter().map(|(id, _)| &id.name);
find_best_match_for_name(names, &*ident.name.as_str(), None)
});
self.record_def(expr.id, err_path_resolution());
resolve_error(self,
label.span,
- ResolutionError::UndeclaredLabel(&label.node.name.as_str(),
+ ResolutionError::UndeclaredLabel(&label.ident.name.as_str(),
close_match));
}
Some(def @ Def::Label(_)) => {
use back::bytecode::{self, RLIB_BYTECODE_EXTENSION};
use back::lto::{self, ModuleBuffer, ThinBuffer};
use back::link::{self, get_linker, remove};
+use back::command::Command;
use back::linker::LinkerInfo;
use back::symbol_export::ExportedSymbols;
use base;
use rustc_incremental::{save_trans_partition, in_incr_comp_dir};
use rustc::dep_graph::{DepGraph, WorkProductFileKind};
use rustc::middle::cstore::{LinkMeta, EncodedMetadata};
-use rustc::session::config::{self, OutputFilenames, OutputType, OutputTypes, Passes, SomePasses,
+use rustc::session::config::{self, OutputFilenames, OutputType, Passes, SomePasses,
AllPasses, Sanitizer};
use rustc::session::Session;
use rustc::util::nodemap::FxHashMap;
use rustc::hir::def_id::{CrateNum, LOCAL_CRATE};
use rustc::ty::TyCtxt;
use rustc::util::common::{time, time_depth, set_time_depth, path2cstr, print_time_passes_entry};
-use rustc::util::fs::{link_or_copy, rename_or_copy_remove};
+use rustc::util::fs::{link_or_copy};
use errors::{self, Handler, Level, DiagnosticBuilder, FatalError, DiagnosticId};
use errors::emitter::{Emitter};
use syntax::attr;
("ropi-rwpi", llvm::RelocMode::ROPI_RWPI),
];
-pub const CODE_GEN_MODEL_ARGS : [(&'static str, llvm::CodeModel); 5] = [
- ("default", llvm::CodeModel::Default),
+pub const CODE_GEN_MODEL_ARGS: &[(&str, llvm::CodeModel)] = &[
("small", llvm::CodeModel::Small),
("kernel", llvm::CodeModel::Kernel),
("medium", llvm::CodeModel::Medium),
let ffunction_sections = sess.target.target.options.function_sections;
let fdata_sections = ffunction_sections;
- let code_model_arg = match sess.opts.cg.code_model {
- Some(ref s) => &s,
- None => &sess.target.target.options.code_model,
- };
-
- let code_model = match CODE_GEN_MODEL_ARGS.iter().find(
- |&&arg| arg.0 == code_model_arg) {
- Some(x) => x.1,
- _ => {
- sess.err(&format!("{:?} is not a valid code model",
- code_model_arg));
- sess.abort_if_errors();
- bug!();
+ let code_model_arg = sess.opts.cg.code_model.as_ref().or(
+ sess.target.target.options.code_model.as_ref(),
+ );
+
+ let code_model = match code_model_arg {
+ Some(s) => {
+ match CODE_GEN_MODEL_ARGS.iter().find(|arg| arg.0 == s) {
+ Some(x) => x.1,
+ _ => {
+ sess.err(&format!("{:?} is not a valid code model",
+ code_model_arg));
+ sess.abort_if_errors();
+ bug!();
+ }
+ }
}
+ None => llvm::CodeModel::None,
};
let singlethread = sess.target.target.options.singlethread;
// make the object file bitcode. Provides easy compatibility with
// emscripten's ecc compiler, when used as the linker.
obj_is_bitcode: bool,
+ no_integrated_as: bool,
}
impl ModuleConfig {
emit_asm: false,
emit_obj: false,
obj_is_bitcode: false,
+ no_integrated_as: false,
no_verify: false,
no_prepopulate_passes: false,
}
}
+/// Assembler name and command used by codegen when no_integrated_as is enabled
+struct AssemblerCommand {
+ name: PathBuf,
+ cmd: Command,
+}
+
/// Additional resources used by optimize_and_codegen (not module specific)
#[derive(Clone)]
pub struct CodegenContext {
// A reference to the TimeGraph so we can register timings. None means that
// measuring is disabled.
time_graph: Option<TimeGraph>,
+ // The assembler command if no_integrated_as option is enabled, None otherwise
+ assembler_cmd: Option<Arc<AssemblerCommand>>,
}
impl CodegenContext {
!cgcx.crate_types.contains(&config::CrateTypeRlib) &&
mtrans.kind == ModuleKind::Regular;
+ // If we don't have the integrated assembler, then we need to emit asm
+ // from LLVM and use `gcc` to create the object file.
+ let asm_to_obj = config.emit_obj && config.no_integrated_as;
+
// Change what we write and cleanup based on whether obj files are
// just llvm bitcode. In that case write bitcode, and possibly
// delete the bitcode if it wasn't requested. Don't generate the
// machine code, instead copy the .o file from the .bc
let write_bc = config.emit_bc || (config.obj_is_bitcode && !asm2wasm);
let rm_bc = !config.emit_bc && config.obj_is_bitcode && !asm2wasm;
- let write_obj = config.emit_obj && !config.obj_is_bitcode && !asm2wasm;
+ let write_obj = config.emit_obj && !config.obj_is_bitcode && !asm2wasm && !asm_to_obj;
let copy_bc_to_obj = config.emit_obj && config.obj_is_bitcode && !asm2wasm;
let bc_out = cgcx.output_filenames.temp_path(OutputType::Bitcode, module_name);
timeline.record("ir");
}
- if config.emit_asm || (asm2wasm && config.emit_obj) {
+ if config.emit_asm || (asm2wasm && config.emit_obj) || asm_to_obj {
let path = cgcx.output_filenames.temp_path(OutputType::Assembly, module_name);
// We can't use the same module for asm and binary output, because that triggers
// various errors like invalid IR or broken binaries, so we might have to clone the
// module to produce the asm output
- let llmod = if config.emit_obj {
+ let llmod = if config.emit_obj && !asm2wasm {
llvm::LLVMCloneModule(llmod)
} else {
llmod
write_output_file(diag_handler, tm, cpm, llmod, &path,
llvm::FileType::AssemblyFile)
})?;
- if config.emit_obj {
+ if config.emit_obj && !asm2wasm {
llvm::LLVMDisposeModule(llmod);
}
timeline.record("asm");
llvm::FileType::ObjectFile)
})?;
timeline.record("obj");
+ } else if asm_to_obj {
+ let assembly = cgcx.output_filenames.temp_path(OutputType::Assembly, module_name);
+ run_assembler(cgcx, diag_handler, &assembly, &obj_out);
+ timeline.record("asm_to_obj");
+
+ if !config.emit_asm && !cgcx.save_temps {
+ drop(fs::remove_file(&assembly));
+ }
}
Ok(())
total_cgus: usize)
-> OngoingCrateTranslation {
let sess = tcx.sess;
- let crate_output = tcx.output_filenames(LOCAL_CRATE);
let crate_name = tcx.crate_name(LOCAL_CRATE);
let no_builtins = attr::contains_name(&tcx.hir.krate().attrs, "no_builtins");
let subsystem = attr::first_attr_value_str_by_name(&tcx.hir.krate().attrs,
subsystem.to_string()
});
- let no_integrated_as = tcx.sess.opts.cg.no_integrated_as ||
- (tcx.sess.target.target.options.no_integrated_as &&
- (crate_output.outputs.contains_key(&OutputType::Object) ||
- crate_output.outputs.contains_key(&OutputType::Exe)));
let linker_info = LinkerInfo::new(tcx);
let crate_info = CrateInfo::new(tcx);
- let output_types_override = if no_integrated_as {
- OutputTypes::new(&[(OutputType::Assembly, None)])
- } else {
- sess.opts.output_types.clone()
- };
-
// Figure out what we actually need to build.
let mut modules_config = ModuleConfig::new(sess.opts.cg.passes.clone());
let mut metadata_config = ModuleConfig::new(vec![]);
allocator_config.emit_bc_compressed = true;
}
- for output_type in output_types_override.keys() {
+ modules_config.no_integrated_as = tcx.sess.opts.cg.no_integrated_as ||
+ tcx.sess.target.target.options.no_integrated_as;
+
+ for output_type in sess.opts.output_types.keys() {
match *output_type {
OutputType::Bitcode => { modules_config.emit_bc = true; }
OutputType::LlvmAssembly => { modules_config.emit_ir = true; }
metadata,
windows_subsystem,
linker_info,
- no_integrated_as,
crate_info,
time_graph,
let wasm_import_memory =
attr::contains_name(&tcx.hir.krate().attrs, "wasm_import_memory");
+ let assembler_cmd = if modules_config.no_integrated_as {
+ // HACK: currently we use linker (gcc) as our assembler
+ let (name, mut cmd, _) = get_linker(sess);
+ cmd.args(&sess.target.target.options.asm_args);
+ Some(Arc::new(AssemblerCommand {
+ name,
+ cmd,
+ }))
+ } else {
+ None
+ };
+
let cgcx = CodegenContext {
crate_types: sess.crate_types.borrow().clone(),
each_linked_rlib_for_lto,
binaryen_linker: tcx.sess.linker_flavor() == LinkerFlavor::Binaryen,
debuginfo: tcx.sess.opts.debuginfo,
wasm_import_memory: wasm_import_memory,
+ assembler_cmd,
};
// This is the "main loop" of parallel work happening for parallel codegen.
});
}
-pub fn run_assembler(sess: &Session, outputs: &OutputFilenames) {
- let (pname, mut cmd, _) = get_linker(sess);
-
- for arg in &sess.target.target.options.asm_args {
- cmd.arg(arg);
- }
+pub fn run_assembler(cgcx: &CodegenContext, handler: &Handler, assembly: &Path, object: &Path) {
+ let assembler = cgcx.assembler_cmd
+ .as_ref()
+ .expect("cgcx.assembler_cmd is missing?");
- cmd.arg("-c").arg("-o").arg(&outputs.path(OutputType::Object))
- .arg(&outputs.temp_path(OutputType::Assembly, None));
+ let pname = &assembler.name;
+ let mut cmd = assembler.cmd.clone();
+ cmd.arg("-c").arg("-o").arg(object).arg(assembly);
debug!("{:?}", cmd);
match cmd.output() {
let mut note = prog.stderr.clone();
note.extend_from_slice(&prog.stdout);
- sess.struct_err(&format!("linking with `{}` failed: {}",
- pname.display(),
- prog.status))
+ handler.struct_err(&format!("linking with `{}` failed: {}",
+ pname.display(),
+ prog.status))
.note(&format!("{:?}", &cmd))
.note(str::from_utf8(¬e[..]).unwrap())
.emit();
- sess.abort_if_errors();
+ handler.abort_if_errors();
}
},
Err(e) => {
- sess.err(&format!("could not exec the linker `{}`: {}", pname.display(), e));
- sess.abort_if_errors();
+ handler.err(&format!("could not exec the linker `{}`: {}", pname.display(), e));
+ handler.abort_if_errors();
}
}
}
metadata: EncodedMetadata,
windows_subsystem: Option<String>,
linker_info: LinkerInfo,
- no_integrated_as: bool,
crate_info: CrateInfo,
time_graph: Option<TimeGraph>,
coordinator_send: Sender<Box<Any + Send>>,
metadata_module: compiled_modules.metadata_module,
};
- if self.no_integrated_as {
- run_assembler(sess, &self.output_filenames);
-
- // HACK the linker expects the object file to be named foo.0.o but
- // `run_assembler` produces an object named just foo.o. Rename it if we
- // are going to build an executable
- if sess.opts.output_types.contains_key(&OutputType::Exe) {
- let f = self.output_filenames.path(OutputType::Object);
- rename_or_copy_remove(&f,
- f.with_file_name(format!("{}.0.o",
- f.file_stem().unwrap().to_string_lossy()))).unwrap();
- }
-
- // Remove assembly source, unless --save-temps was specified
- if !sess.opts.cg.save_temps {
- fs::remove_file(&self.output_filenames
- .temp_path(OutputType::Assembly, None)).unwrap();
- }
- }
-
trans
}
use std::str;
use std::sync::Arc;
use std::time::{Instant, Duration};
-use std::i32;
+use std::{i32, usize};
use std::iter;
use std::sync::mpsc;
use syntax_pos::Span;
ongoing_translation.submit_pre_translated_module_to_llvm(tcx, metadata_module);
// We sort the codegen units by size. This way we can schedule work for LLVM
- // a bit more efficiently. Note that "size" is defined rather crudely at the
- // moment as it is just the number of TransItems in the CGU, not taking into
- // account the size of each TransItem.
+ // a bit more efficiently.
let codegen_units = {
let mut codegen_units = codegen_units;
- codegen_units.sort_by_key(|cgu| -(cgu.items().len() as isize));
+ codegen_units.sort_by_key(|cgu| usize::MAX - cgu.size_estimate());
codegen_units
};
use rustc::ty::layout::{self, TyLayout, Size};
-#[derive(Clone, Copy, PartialEq, Debug)]
+/// Classification of "eightbyte" components.
+// NB: the order of the variants is from general to specific,
+// such that `unify(a, b)` is the "smaller" of `a` and `b`.
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug)]
enum Class {
- None,
Int,
Sse,
SseUp
const MAX_EIGHTBYTES: usize = LARGEST_VECTOR_SIZE / 64;
fn classify_arg<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>, arg: &ArgType<'tcx>)
- -> Result<[Class; MAX_EIGHTBYTES], Memory> {
- fn unify(cls: &mut [Class],
- off: Size,
- c: Class) {
- let i = (off.bytes() / 8) as usize;
- let to_write = match (cls[i], c) {
- (Class::None, _) => c,
- (_, Class::None) => return,
-
- (Class::Int, _) |
- (_, Class::Int) => Class::Int,
-
- (Class::Sse, _) |
- (_, Class::Sse) => Class::Sse,
-
- (Class::SseUp, Class::SseUp) => Class::SseUp
- };
- cls[i] = to_write;
- }
-
+ -> Result<[Option<Class>; MAX_EIGHTBYTES], Memory> {
fn classify<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>,
layout: TyLayout<'tcx>,
- cls: &mut [Class],
+ cls: &mut [Option<Class>],
off: Size)
-> Result<(), Memory> {
if !off.is_abi_aligned(layout.align) {
return Ok(());
}
- match layout.abi {
- layout::Abi::Uninhabited => {}
+ let mut c = match layout.abi {
+ layout::Abi::Uninhabited => return Ok(()),
layout::Abi::Scalar(ref scalar) => {
- let reg = match scalar.value {
+ match scalar.value {
layout::Int(..) |
layout::Pointer => Class::Int,
layout::F32 |
layout::F64 => Class::Sse
- };
- unify(cls, off, reg);
- }
-
- layout::Abi::Vector { ref element, count } => {
- unify(cls, off, Class::Sse);
-
- // everything after the first one is the upper
- // half of a register.
- let stride = element.value.size(cx);
- for i in 1..count {
- let field_off = off + stride * i;
- unify(cls, field_off, Class::SseUp);
}
}
+ layout::Abi::Vector { .. } => Class::Sse,
+
layout::Abi::ScalarPair(..) |
layout::Abi::Aggregate { .. } => {
match layout.variants {
let field_off = off + layout.fields.offset(i);
classify(cx, layout.field(cx, i), cls, field_off)?;
}
+ return Ok(());
}
layout::Variants::Tagged { .. } |
layout::Variants::NicheFilling { .. } => return Err(Memory),
}
}
+ };
+
+ // Fill in `cls` for scalars (Int/Sse) and vectors (Sse).
+ let first = (off.bytes() / 8) as usize;
+ let last = ((off.bytes() + layout.size.bytes() - 1) / 8) as usize;
+ for cls in &mut cls[first..=last] {
+ *cls = Some(cls.map_or(c, |old| old.min(c)));
+
+ // Everything after the first Sse "eightbyte"
+ // component is the upper half of a register.
+ if c == Class::Sse {
+ c = Class::SseUp;
+ }
}
Ok(())
return Err(Memory);
}
- let mut cls = [Class::None; MAX_EIGHTBYTES];
+ let mut cls = [None; MAX_EIGHTBYTES];
classify(cx, arg.layout, &mut cls, Size::from_bytes(0))?;
if n > 2 {
- if cls[0] != Class::Sse {
+ if cls[0] != Some(Class::Sse) {
return Err(Memory);
}
- if cls[1..n].iter().any(|&c| c != Class::SseUp) {
+ if cls[1..n].iter().any(|&c| c != Some(Class::SseUp)) {
return Err(Memory);
}
} else {
let mut i = 0;
while i < n {
- if cls[i] == Class::SseUp {
- cls[i] = Class::Sse;
- } else if cls[i] == Class::Sse {
+ if cls[i] == Some(Class::SseUp) {
+ cls[i] = Some(Class::Sse);
+ } else if cls[i] == Some(Class::Sse) {
i += 1;
- while i != n && cls[i] == Class::SseUp { i += 1; }
+ while i != n && cls[i] == Some(Class::SseUp) { i += 1; }
} else {
i += 1;
}
Ok(cls)
}
-fn reg_component(cls: &[Class], i: &mut usize, size: Size) -> Option<Reg> {
+fn reg_component(cls: &[Option<Class>], i: &mut usize, size: Size) -> Option<Reg> {
if *i >= cls.len() {
return None;
}
match cls[*i] {
- Class::None => None,
- Class::Int => {
+ None => None,
+ Some(Class::Int) => {
*i += 1;
Some(match size.bytes() {
1 => Reg::i8(),
_ => Reg::i64()
})
}
- Class::Sse => {
- let vec_len = 1 + cls[*i+1..].iter().take_while(|&&c| c == Class::SseUp).count();
+ Some(Class::Sse) => {
+ let vec_len = 1 + cls[*i+1..].iter()
+ .take_while(|&&c| c == Some(Class::SseUp))
+ .count();
*i += vec_len;
Some(if vec_len == 1 {
match size.bytes() {
}
})
}
- c => bug!("reg_component: unhandled class {:?}", c)
+ Some(c) => bug!("reg_component: unhandled class {:?}", c)
}
}
-fn cast_target(cls: &[Class], size: Size) -> CastTarget {
+fn cast_target(cls: &[Option<Class>], size: Size) -> CastTarget {
let mut i = 0;
let lo = reg_component(cls, &mut i, size).unwrap();
let offset = Size::from_bytes(8) * (i as u64);
- let target = if size <= offset {
- CastTarget::from(lo)
- } else {
- let hi = reg_component(cls, &mut i, size - offset).unwrap();
- CastTarget::Pair(lo, hi)
- };
+ let mut target = CastTarget::from(lo);
+ if size > offset {
+ if let Some(hi) = reg_component(cls, &mut i, size - offset) {
+ target = CastTarget::Pair(lo, hi);
+ }
+ }
assert_eq!(reg_component(cls, &mut i, Size::from_bytes(0)), None);
target
}
let mut sse_regs = 8; // XMM0-7
let mut x86_64_ty = |arg: &mut ArgType<'tcx>, is_arg: bool| {
- let cls = classify_arg(cx, arg);
+ let mut cls_or_mem = classify_arg(cx, arg);
let mut needed_int = 0;
let mut needed_sse = 0;
- let in_mem = match cls {
- Err(Memory) => true,
- Ok(ref cls) if is_arg => {
- for &c in cls {
+ if is_arg {
+ if let Ok(cls) = cls_or_mem {
+ for &c in &cls {
match c {
- Class::Int => needed_int += 1,
- Class::Sse => needed_sse += 1,
+ Some(Class::Int) => needed_int += 1,
+ Some(Class::Sse) => needed_sse += 1,
_ => {}
}
}
- arg.layout.is_aggregate() &&
- (int_regs < needed_int || sse_regs < needed_sse)
+ if arg.layout.is_aggregate() {
+ if int_regs < needed_int || sse_regs < needed_sse {
+ cls_or_mem = Err(Memory);
+ }
+ }
}
- Ok(_) => false
- };
+ }
- if in_mem {
- if is_arg {
- arg.make_indirect_byval();
- } else {
- // `sret` parameter thus one less integer register available
- arg.make_indirect();
- int_regs -= 1;
+ match cls_or_mem {
+ Err(Memory) => {
+ if is_arg {
+ arg.make_indirect_byval();
+ } else {
+ // `sret` parameter thus one less integer register available
+ arg.make_indirect();
+ int_regs -= 1;
+ }
}
- } else {
- // split into sized chunks passed individually
- int_regs -= needed_int;
- sse_regs -= needed_sse;
-
- if arg.layout.is_aggregate() {
- let size = arg.layout.size;
- arg.cast_to(cast_target(cls.as_ref().unwrap(), size))
- } else {
- arg.extend_integer_width_to(32);
+ Ok(ref cls) => {
+ // split into sized chunks passed individually
+ int_regs -= needed_int;
+ sse_regs -= needed_sse;
+
+ if arg.layout.is_aggregate() {
+ let size = arg.layout.size;
+ arg.cast_to(cast_target(cls, size))
+ } else {
+ arg.extend_integer_width_to(32);
+ }
}
}
};
}
None => {}
};
+ if sig.output().is_never() {
+ flags = flags | DIFlags::FlagNoReturn;
+ }
let fn_metadata = unsafe {
llvm::LLVMRustDIBuilderCreateFunction(
let output = bare_fn_ty.output();
let late_bound_in_ret = tcx.collect_referenced_late_bound_regions(&output);
for br in late_bound_in_ret.difference(&late_bound_in_args) {
- let br_name = match *br {
- ty::BrNamed(_, name) => name,
- _ => {
- span_bug!(
- decl.output.span(),
- "anonymous bound region {:?} in return but not args",
- br);
- }
+ let lifetime_name = match *br {
+ ty::BrNamed(_, name) => format!("lifetime `{}`,", name),
+ ty::BrAnon(_) | ty::BrFresh(_) | ty::BrEnv => format!("an anonymous lifetime"),
};
- struct_span_err!(tcx.sess,
- decl.output.span(),
- E0581,
- "return type references lifetime `{}`, \
- which does not appear in the fn input types",
- br_name)
- .emit();
+ let mut err = struct_span_err!(tcx.sess,
+ decl.output.span(),
+ E0581,
+ "return type references {} \
+ which is not constrained by the fn input types",
+ lifetime_name);
+ if let ty::BrAnon(_) = *br {
+ // The only way for an anonymous lifetime to wind up
+ // in the return type but **also** be unconstrained is
+ // if it only appears in "associated types" in the
+ // input. See #47511 for an example. In this case,
+ // though we can easily give a hint that ought to be
+ // relevant.
+ err.note("lifetimes appearing in an associated type \
+ are not considered constrained");
+ }
+ err.emit();
}
bare_fn_ty
use rustc::traits::ObligationCause;
use syntax::ast;
-use syntax::util::parser::AssocOp;
+use syntax::util::parser::PREC_POSTFIX;
use syntax_pos::{self, Span};
use rustc::hir;
use rustc::hir::print;
// For now, don't suggest casting with `as`.
let can_cast = false;
- let needs_paren = expr.precedence().order() < (AssocOp::As.precedence() as i8);
+ let needs_paren = expr.precedence().order() < (PREC_POSTFIX as i8);
if let Ok(src) = self.tcx.sess.codemap().span_to_snippet(expr.span) {
let msg = format!("you can cast an `{}` to `{}`", checked_ty, expected_ty);
// this isn't perfect (that is, there are cases when
// implementing a trait would be legal but is rejected
// here).
- (type_is_local || info.def_id.is_local())
- && self.associated_item(info.def_id, item_name, Namespace::Value).is_some()
+ (type_is_local || info.def_id.is_local()) &&
+ self.associated_item(info.def_id, item_name, Namespace::Value)
+ .filter(|item| {
+ // We only want to suggest public or local traits (#45781).
+ item.vis == ty::Visibility::Public || info.def_id.is_local()
+ })
+ .is_some()
})
.collect::<Vec<_>>();
#![feature(advanced_slice_patterns)]
#![feature(box_patterns)]
#![feature(box_syntax)]
-#![feature(crate_visibility_modifier)]
#![feature(conservative_impl_trait)]
#![feature(copy_closures, clone_closures)]
+#![feature(crate_visibility_modifier)]
#![feature(from_ref)]
#![feature(match_default_bindings)]
#![feature(never_type)]
+#![feature(option_filter)]
#![feature(quote)]
#![feature(refcell_replace_swap)]
#![feature(rustc_diagnostic_macros)]
//! We walk the set of items and, for each member, generate new constraints.
use hir::def_id::DefId;
-use rustc::dep_graph::{DepGraphSafe, DepKind, DepNodeColor};
-use rustc::ich::StableHashingContext;
use rustc::ty::subst::Substs;
use rustc::ty::{self, Ty, TyCtxt};
use syntax::ast;
use rustc::hir;
use rustc::hir::itemlikevisit::ItemLikeVisitor;
-use rustc_data_structures::stable_hasher::StableHashingContextProvider;
-
use super::terms::*;
use super::terms::VarianceTerm::*;
}
}
-impl<'a, 'tcx> StableHashingContextProvider for ConstraintContext<'a, 'tcx> {
- type ContextType = StableHashingContext<'tcx>;
-
- fn create_stable_hashing_context(&self) -> Self::ContextType {
- self.terms_cx.tcx.create_stable_hashing_context()
- }
-}
-
-impl<'a, 'tcx> DepGraphSafe for ConstraintContext<'a, 'tcx> {}
-
impl<'a, 'tcx> ConstraintContext<'a, 'tcx> {
fn visit_node_helper(&mut self, id: ast::NodeId) {
let tcx = self.terms_cx.tcx;
let def_id = tcx.hir.local_def_id(id);
-
- // Encapsulate constructing the constraints into a task we can
- // reference later. This can go away once the red-green
- // algorithm is in place.
- //
- // See README.md for a detailed discussion
- // on dep-graph management.
- let dep_node = def_id.to_dep_node(tcx, DepKind::ItemVarianceConstraints);
-
- if let Some(DepNodeColor::Green(_)) = tcx.dep_graph.node_color(&dep_node) {
- // If the corresponding node has already been marked as green, the
- // appropriate portion of the DepGraph has already been loaded from
- // the previous graph, so we don't do any dep-tracking. Since we
- // don't cache any values though, we still have to re-run the
- // computation.
- tcx.dep_graph.with_ignore(|| {
- self.build_constraints_for_item(def_id);
- });
- } else {
- tcx.dep_graph.with_task(dep_node,
- self,
- def_id,
- visit_item_task);
- }
-
- fn visit_item_task<'a, 'tcx>(ccx: &mut ConstraintContext<'a, 'tcx>,
- def_id: DefId)
- {
- ccx.build_constraints_for_item(def_id);
- }
+ self.build_constraints_for_item(def_id);
}
fn tcx(&self) -> TyCtxt<'a, 'tcx, 'tcx> {
//! parameters. See README.md for details.
use arena;
-use rustc::dep_graph::DepKind;
use rustc::hir;
use rustc::hir::def_id::{CrateNum, DefId, LOCAL_CRATE};
use rustc::ty::{self, CrateVariancesMap, TyCtxt};
// Everything else must be inferred.
let crate_map = tcx.crate_variances(LOCAL_CRATE);
- let dep_node = item_def_id.to_dep_node(tcx, DepKind::ItemVarianceConstraints);
- tcx.dep_graph.read(dep_node);
-
crate_map.variances.get(&item_def_id)
.unwrap_or(&crate_map.empty_variance)
.clone()
pub other_attrs: Vec<ast::Attribute>,
pub cfg: Option<Rc<Cfg>>,
pub span: Option<syntax_pos::Span>,
- pub links: Vec<(String, DefId)>,
+ /// map from Rust paths to resolved defs and potential URL fragments
+ pub links: Vec<(String, DefId, Option<String>)>,
}
impl Attributes {
/// Cache must be populated before call
pub fn links(&self) -> Vec<(String, String)> {
use html::format::href;
- self.links.iter().filter_map(|&(ref s, did)| {
- if let Some((href, ..)) = href(did) {
+ self.links.iter().filter_map(|&(ref s, did, ref fragment)| {
+ if let Some((mut href, ..)) = href(did) {
+ if let Some(ref fragment) = *fragment {
+ href.push_str("#");
+ href.push_str(fragment);
+ }
Some((s.clone(), href))
} else {
None
/// they exist in both namespaces (structs and modules)
fn value_ns_kind(def: Def, path_str: &str) -> Option<(&'static str, String)> {
match def {
- // structs and mods exist in both namespaces. skip them
- Def::StructCtor(..) | Def::Mod(..) => None,
- Def::Variant(..) | Def::VariantCtor(..)
- => Some(("variant", format!("{}()", path_str))),
+ // structs, variants, and mods exist in both namespaces. skip them
+ Def::StructCtor(..) | Def::Mod(..) | Def::Variant(..) | Def::VariantCtor(..) => None,
Def::Fn(..)
=> Some(("function", format!("{}()", path_str))),
Def::Method(..)
let sp = attrs.doc_strings.first()
.map_or(DUMMY_SP, |a| a.span());
cx.sess()
- .struct_span_err(sp,
- &format!("`{}` is both {} {} and {} {}",
- path_str, article1, kind1,
- article2, kind2))
+ .struct_span_warn(sp,
+ &format!("`{}` is both {} {} and {} {}",
+ path_str, article1, kind1,
+ article2, kind2))
.help(&format!("try `{}` if you want to select the {}, \
or `{}` if you want to \
select the {}",
.emit();
}
+/// Given an enum variant's def, return the def of its enum and the associated fragment
+fn handle_variant(cx: &DocContext, def: Def) -> Result<(Def, Option<String>), ()> {
+ use rustc::ty::DefIdTree;
+
+ let parent = if let Some(parent) = cx.tcx.parent(def.def_id()) {
+ parent
+ } else {
+ return Err(())
+ };
+ let parent_def = Def::Enum(parent);
+ let variant = cx.tcx.expect_variant_def(def);
+ Ok((parent_def, Some(format!("{}.v", variant.name))))
+}
+
/// Resolve a given string as a path, along with whether or not it is
-/// in the value namespace
-fn resolve(cx: &DocContext, path_str: &str, is_val: bool) -> Result<hir::Path, ()> {
+/// in the value namespace. Also returns an optional URL fragment in the case
+/// of variants and methods
+fn resolve(cx: &DocContext, path_str: &str, is_val: bool) -> Result<(Def, Option<String>), ()> {
// In case we're in a module, try to resolve the relative
// path
if let Some(id) = cx.mod_ids.borrow().last() {
- cx.resolver.borrow_mut()
- .with_scope(*id, |resolver| {
- resolver.resolve_str_path_error(DUMMY_SP,
- &path_str, is_val)
- })
+ let result = cx.resolver.borrow_mut()
+ .with_scope(*id,
+ |resolver| {
+ resolver.resolve_str_path_error(DUMMY_SP,
+ &path_str, is_val)
+ });
+
+ if let Ok(result) = result {
+ // In case this is a trait item, skip the
+ // early return and try looking for the trait
+ let value = match result.def {
+ Def::Method(_) | Def::AssociatedConst(_) => true,
+ Def::AssociatedTy(_) => false,
+ Def::Variant(_) => return handle_variant(cx, result.def),
+ // not a trait item, just return what we found
+ _ => return Ok((result.def, None))
+ };
+
+ if value != is_val {
+ return Err(())
+ }
+ } else {
+ // If resolution failed, it may still be a method
+ // because methods are not handled by the resolver
+ // If so, bail when we're not looking for a value
+ if !is_val {
+ return Err(())
+ }
+ }
+
+ // Try looking for methods and associated items
+ let mut split = path_str.rsplitn(2, "::");
+ let mut item_name = if let Some(first) = split.next() {
+ first
+ } else {
+ return Err(())
+ };
+
+ let mut path = if let Some(second) = split.next() {
+ second
+ } else {
+ return Err(())
+ };
+
+ let ty = cx.resolver.borrow_mut()
+ .with_scope(*id,
+ |resolver| {
+ resolver.resolve_str_path_error(DUMMY_SP,
+ &path, false)
+ })?;
+ match ty.def {
+ Def::Struct(did) | Def::Union(did) | Def::Enum(did) | Def::TyAlias(did) => {
+ let item = cx.tcx.inherent_impls(did).iter()
+ .flat_map(|imp| cx.tcx.associated_items(*imp))
+ .find(|item| item.name == item_name);
+ if let Some(item) = item {
+ if item.kind == ty::AssociatedKind::Method && is_val {
+ Ok((ty.def, Some(format!("method.{}", item_name))))
+ } else {
+ Err(())
+ }
+ } else {
+ Err(())
+ }
+ }
+ Def::Trait(did) => {
+ let item = cx.tcx.associated_item_def_ids(did).iter()
+ .map(|item| cx.tcx.associated_item(*item))
+ .find(|item| item.name == item_name);
+ if let Some(item) = item {
+ let kind = match item.kind {
+ ty::AssociatedKind::Const if is_val => "associatedconstant",
+ ty::AssociatedKind::Type if !is_val => "associatedtype",
+ ty::AssociatedKind::Method if is_val => "tymethod",
+ _ => return Err(())
+ };
+
+ Ok((ty.def, Some(format!("{}.{}", kind, item_name))))
+ } else {
+ Err(())
+ }
+ }
+ _ => Err(())
+ }
+
} else {
- // FIXME(Manishearth) this branch doesn't seem to ever be hit, really
- cx.resolver.borrow_mut()
- .resolve_str_path_error(DUMMY_SP, &path_str, is_val)
+ Err(())
}
}
if UnstableFeatures::from_environment().is_nightly_build() {
let dox = attrs.collapsed_doc_value().unwrap_or_else(String::new);
for link in markdown_links(&dox, cx.render_type) {
- let def = {
+ let (def, fragment) = {
let mut kind = PathKind::Unknown;
let path_str = if let Some(prefix) =
["struct@", "enum@", "type@",
link.trim_left_matches(prefix)
} else if let Some(prefix) =
["const@", "static@",
- "value@", "function@", "mod@", "fn@", "module@"]
+ "value@", "function@", "mod@",
+ "fn@", "module@", "method@"]
.iter().find(|p| link.starts_with(**p)) {
kind = PathKind::Value;
link.trim_left_matches(prefix)
match kind {
PathKind::Value => {
- if let Ok(path) = resolve(cx, path_str, true) {
- path.def
+ if let Ok(def) = resolve(cx, path_str, true) {
+ def
} else {
// this could just be a normal link or a broken link
// we could potentially check if something is
}
}
PathKind::Type => {
- if let Ok(path) = resolve(cx, path_str, false) {
- path.def
+ if let Ok(def) = resolve(cx, path_str, false) {
+ def
} else {
// this could just be a normal link
continue;
PathKind::Unknown => {
// try everything!
if let Some(macro_def) = macro_resolve(cx, path_str) {
- if let Ok(type_path) = resolve(cx, path_str, false) {
+ if let Ok(type_def) = resolve(cx, path_str, false) {
let (type_kind, article, type_disambig)
- = type_ns_kind(type_path.def, path_str);
+ = type_ns_kind(type_def.0, path_str);
ambiguity_error(cx, &attrs, path_str,
article, type_kind, &type_disambig,
"a", "macro", &format!("macro@{}", path_str));
continue;
- } else if let Ok(value_path) = resolve(cx, path_str, true) {
+ } else if let Ok(value_def) = resolve(cx, path_str, true) {
let (value_kind, value_disambig)
- = value_ns_kind(value_path.def, path_str)
+ = value_ns_kind(value_def.0, path_str)
.expect("struct and mod cases should have been \
caught in previous branch");
ambiguity_error(cx, &attrs, path_str,
"a", value_kind, &value_disambig,
"a", "macro", &format!("macro@{}", path_str));
}
- macro_def
- } else if let Ok(type_path) = resolve(cx, path_str, false) {
+ (macro_def, None)
+ } else if let Ok(type_def) = resolve(cx, path_str, false) {
// It is imperative we search for not-a-value first
// Otherwise we will find struct ctors for when we are looking
// for structs, and the link won't work.
// if there is something in both namespaces
- if let Ok(value_path) = resolve(cx, path_str, true) {
- let kind = value_ns_kind(value_path.def, path_str);
+ if let Ok(value_def) = resolve(cx, path_str, true) {
+ let kind = value_ns_kind(value_def.0, path_str);
if let Some((value_kind, value_disambig)) = kind {
let (type_kind, article, type_disambig)
- = type_ns_kind(type_path.def, path_str);
+ = type_ns_kind(type_def.0, path_str);
ambiguity_error(cx, &attrs, path_str,
article, type_kind, &type_disambig,
"a", value_kind, &value_disambig);
continue;
}
}
- type_path.def
- } else if let Ok(value_path) = resolve(cx, path_str, true) {
- value_path.def
+ type_def
+ } else if let Ok(value_def) = resolve(cx, path_str, true) {
+ value_def
} else {
// this could just be a normal link
continue;
}
PathKind::Macro => {
if let Some(def) = macro_resolve(cx, path_str) {
- def
+ (def, None)
} else {
continue
}
let id = register_def(cx, def);
- attrs.links.push((link, id));
+ attrs.links.push((link, id, fragment));
}
cx.sess().abort_if_errors();
{sidebar}
</nav>
- <button id="theme-picker">
- <img src="{root_path}brush.svg" width="18" alt="Pick another theme!">
+ <div class="theme-picker">
+ <button id="theme-picker" aria-label="Pick another theme!">
+ <img src="{root_path}brush.svg" width="18" alt="Pick another theme!">
+ </button>
<div id="theme-choices"></div>
- </button>
+ </div>
<script src="{root_path}theme.js"></script>
<nav class="sub">
<form class="search-form js-only">
}}
}};
[{}].forEach(function(item) {{
- var div = document.createElement('div');
- div.innerHTML = item;
- div.onclick = function(el) {{
+ var but = document.createElement('button');
+ but.innerHTML = item;
+ but.onclick = function(el) {{
switchTheme(currentTheme, mainTheme, item);
}};
- themes.appendChild(div);
+ themes.appendChild(but);
}});
"#, themes.iter()
.map(|s| format!("\"{}\"", s))
border: solid 1px;
border-radius: 3px;
box-shadow: inset 0 -1px 0;
+ cursor: default;
}
-#theme-picker {
+.theme-picker {
position: absolute;
left: 211px;
- top: 17px;
+ top: 19px;
+}
+
+#theme-picker {
padding: 4px;
+ width: 27px;
+ height: 29px;
border: 1px solid;
border-radius: 3px;
cursor: pointer;
#theme-choices {
display: none;
position: absolute;
- left: -1px;
- top: 30px;
+ left: 0;
+ top: 28px;
border: 1px solid;
border-radius: 3px;
z-index: 1;
+ cursor: pointer;
}
-#theme-choices > div {
- border-top: 1px solid;
+#theme-choices > button {
+ border: none;
+ width: 100%;
padding: 4px;
text-align: center;
+ background: rgba(0,0,0,0);
+}
+
+#theme-choices > button:not(:first-child) {
+ border-top: 1px solid;
}
@media (max-width: 700px) {
- #theme-picker {
+ .theme-picker {
left: 109px;
top: 7px;
z-index: 1;
#help dt {
border-color: #bfbfbf;
- background: #fff;
+ background: rgba(0,0,0,0);
color: black;
}
}
kbd {
- color: #444d56;
+ color: #000;
background-color: #fafbfc;
border-color: #d1d5da;
border-bottom-color: #c6cbd1;
background: #f0f0f0;
}
+#theme-picker:hover, #theme-picker:focus {
+ border-color: #ffb900;
+}
+
#theme-choices {
border-color: #e0e0e0;
background-color: #353535;
}
-#theme-choices > div {
- border-top: #e0e0e0;
+#theme-choices > button:not(:first-child) {
+ border-top-color: #e0e0e0;
}
-#theme-choices > div:hover {
+#theme-choices > button:hover, #theme-choices > button:focus {
background-color: #444;
}
.stab.deprecated { background: #F3DFFF; border-color: #7F0087; }
.stab.portability { background: #C4ECFF; border-color: #7BA5DB; }
+.module-item .stab {
+ color: #000;
+}
+
#help > div {
background: #e9e9e9;
border-color: #bfbfbf;
}
kbd {
- color: #444d56;
+ color: #000;
background-color: #fafbfc;
border-color: #d1d5da;
border-bottom-color: #c6cbd1;
#theme-picker {
border-color: #e0e0e0;
+ background-color: #fff;
+}
+
+#theme-picker:hover, #theme-picker:focus {
+ border-color: #717171;
}
#theme-choices {
background-color: #fff;
}
-#theme-choices > div {
- border-top: #e0e0e0;
+#theme-choices > button:not(:first-child) {
+ border-top-color: #e0e0e0;
}
-#theme-choices > div:hover {
+#theme-choices > button:hover, #theme-choices > button:focus {
background-color: #eee;
}
#[unstable(feature = "collection_placement",
reason = "placement protocol is subject to change",
issue = "30172")]
-impl<'a, K, V> Place<V> for EntryPlace<'a, K, V> {
+unsafe impl<'a, K, V> Place<V> for EntryPlace<'a, K, V> {
fn pointer(&mut self) -> *mut V {
self.bucket.read_mut().1
}
#[stable(feature = "float_bits_conv", since = "1.20.0")]
#[inline]
pub fn to_bits(self) -> u32 {
- unsafe { ::mem::transmute(self) }
+ num::Float::to_bits(self)
}
/// Raw transmutation from `u32`.
#[stable(feature = "float_bits_conv", since = "1.20.0")]
#[inline]
pub fn from_bits(v: u32) -> Self {
- // It turns out the safety issues with sNaN were overblown! Hooray!
- unsafe { ::mem::transmute(v) }
+ num::Float::from_bits(v)
}
}
#[stable(feature = "float_bits_conv", since = "1.20.0")]
#[inline]
pub fn to_bits(self) -> u64 {
- unsafe { ::mem::transmute(self) }
+ num::Float::to_bits(self)
}
/// Raw transmutation from `u64`.
#[stable(feature = "float_bits_conv", since = "1.20.0")]
#[inline]
pub fn from_bits(v: u64) -> Self {
- // It turns out the safety issues with sNaN were overblown! Hooray!
- unsafe { ::mem::transmute(v) }
+ num::Float::from_bits(v)
}
}
#![feature(ptr_internals)]
#![feature(rand)]
#![feature(raw)]
-#![feature(repr_align)]
#![feature(rustc_attrs)]
#![feature(sip_hash_13)]
#![feature(slice_bytes)]
#![feature(doc_spotlight)]
#![cfg_attr(test, feature(update_panic_count))]
#![cfg_attr(windows, feature(used))]
+#![cfg_attr(stage0, feature(repr_align))]
#![default_lib_allocator]
/// ```
#[stable(feature = "duration", since = "1.3.0")]
#[inline]
- pub fn from_secs(secs: u64) -> Duration {
+ pub const fn from_secs(secs: u64) -> Duration {
Duration { secs: secs, nanos: 0 }
}
/// ```
#[stable(feature = "duration", since = "1.3.0")]
#[inline]
- pub fn from_millis(millis: u64) -> Duration {
- let secs = millis / MILLIS_PER_SEC;
- let nanos = ((millis % MILLIS_PER_SEC) as u32) * NANOS_PER_MILLI;
- Duration { secs: secs, nanos: nanos }
+ pub const fn from_millis(millis: u64) -> Duration {
+ Duration {
+ secs: millis / MILLIS_PER_SEC,
+ nanos: ((millis % MILLIS_PER_SEC) as u32) * NANOS_PER_MILLI,
+ }
}
/// Creates a new `Duration` from the specified number of microseconds.
/// ```
#[unstable(feature = "duration_from_micros", issue = "44400")]
#[inline]
- pub fn from_micros(micros: u64) -> Duration {
- let secs = micros / MICROS_PER_SEC;
- let nanos = ((micros % MICROS_PER_SEC) as u32) * NANOS_PER_MICRO;
- Duration { secs: secs, nanos: nanos }
+ pub const fn from_micros(micros: u64) -> Duration {
+ Duration {
+ secs: micros / MICROS_PER_SEC,
+ nanos: ((micros % MICROS_PER_SEC) as u32) * NANOS_PER_MICRO,
+ }
}
/// Creates a new `Duration` from the specified number of nanoseconds.
/// ```
#[unstable(feature = "duration_extras", issue = "46507")]
#[inline]
- pub fn from_nanos(nanos: u64) -> Duration {
- let secs = nanos / (NANOS_PER_SEC as u64);
- let nanos = (nanos % (NANOS_PER_SEC as u64)) as u32;
- Duration { secs: secs, nanos: nanos }
+ pub const fn from_nanos(nanos: u64) -> Duration {
+ Duration {
+ secs: nanos / (NANOS_PER_SEC as u64),
+ nanos: (nanos % (NANOS_PER_SEC as u64)) as u32,
+ }
}
/// Returns the number of _whole_ seconds contained by this `Duration`.
use std::rc::Rc;
use std::u32;
+#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)]
+pub struct Label {
+ pub ident: Ident,
+ pub span: Span,
+}
+
+impl fmt::Debug for Label {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ write!(f, "label({:?})", self.ident)
+ }
+}
+
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)]
pub struct Lifetime {
pub id: NodeId,
/// A while loop, with an optional label
///
/// `'label: while expr { block }`
- While(P<Expr>, P<Block>, Option<SpannedIdent>),
+ While(P<Expr>, P<Block>, Option<Label>),
/// A while-let loop, with an optional label
///
/// `'label: while let pat = expr { block }`
///
/// This is desugared to a combination of `loop` and `match` expressions.
- WhileLet(P<Pat>, P<Expr>, P<Block>, Option<SpannedIdent>),
+ WhileLet(P<Pat>, P<Expr>, P<Block>, Option<Label>),
/// A for loop, with an optional label
///
/// `'label: for pat in expr { block }`
///
/// This is desugared to a combination of `loop` and `match` expressions.
- ForLoop(P<Pat>, P<Expr>, P<Block>, Option<SpannedIdent>),
+ ForLoop(P<Pat>, P<Expr>, P<Block>, Option<Label>),
/// Conditionless loop (can be exited with break, continue, or return)
///
/// `'label: loop { block }`
- Loop(P<Block>, Option<SpannedIdent>),
+ Loop(P<Block>, Option<Label>),
/// A `match` block.
Match(P<Expr>, Vec<Arm>),
/// A closure (for example, `move |a, b, c| a + b + c`)
/// A referencing operation (`&a` or `&mut a`)
AddrOf(Mutability, P<Expr>),
/// A `break`, with an optional label to break, and an optional expression
- Break(Option<SpannedIdent>, Option<P<Expr>>),
+ Break(Option<Label>, Option<P<Expr>>),
/// A `continue`, with an optional label
- Continue(Option<SpannedIdent>),
+ Continue(Option<Label>),
/// A `return`, with an optional value to be returned
Ret(Option<P<Expr>>),
// Allows the `catch {...}` expression
(active, catch_expr, "1.17.0", Some(31436)),
- // Allows `repr(align(u16))` struct attribute (RFC 1358)
- (active, repr_align, "1.17.0", Some(33626)),
-
// Used to preserve symbols (see llvm.used)
(active, used, "1.18.0", Some(40289)),
// Allows the sysV64 ABI to be specified on all platforms
// instead of just the platforms on which it is the C ABI
(accepted, abi_sysv64, "1.24.0", Some(36167)),
+ // Allows `repr(align(16))` struct attribute (RFC 1358)
+ (accepted, repr_align, "1.24.0", Some(33626)),
);
// If you change this, please modify src/doc/unstable-book as well. You must
}
}
+ // allow attr_literals in #[repr(align(x))]
+ let mut is_repr_align = false;
+ if attr.path == "repr" {
+ if let Some(content) = attr.meta_item_list() {
+ is_repr_align = content.iter().any(|c| c.check_name("align"));
+ }
+ }
+
if self.context.features.proc_macro && attr::is_known(attr) {
return
}
- let meta = panictry!(attr.parse_meta(self.context.parse_sess));
- if contains_novel_literal(&meta) {
- gate_feature_post!(&self, attr_literals, attr.span,
- "non-string literals in attributes, or string \
- literals in top-level positions, are experimental");
+ if !is_repr_align {
+ let meta = panictry!(attr.parse_meta(self.context.parse_sess));
+ if contains_novel_literal(&meta) {
+ gate_feature_post!(&self, attr_literals, attr.span,
+ "non-string literals in attributes, or string \
+ literals in top-level positions, are experimental");
+ }
}
}
gate_feature_post!(&self, repr_simd, attr.span,
"SIMD types are experimental and possibly buggy");
}
- if item.check_name("align") {
- gate_feature_post!(&self, repr_align, attr.span,
- "the struct `#[repr(align(u16))]` attribute \
- is experimental");
- }
if item.check_name("transparent") {
gate_feature_post!(&self, repr_transparent, attr.span,
"the `#[repr(transparent)]` attribute \
noop_fold_macro_def(def, self)
}
+ fn fold_label(&mut self, label: Label) -> Label {
+ noop_fold_label(label, self)
+ }
+
fn fold_lifetime(&mut self, l: Lifetime) -> Lifetime {
noop_fold_lifetime(l, self)
}
params.move_map(|p| fld.fold_generic_param(p))
}
+pub fn noop_fold_label<T: Folder>(label: Label, fld: &mut T) -> Label {
+ Label {
+ ident: fld.fold_ident(label.ident),
+ span: fld.new_span(label.span),
+ }
+}
+
pub fn noop_fold_lifetime<T: Folder>(l: Lifetime, fld: &mut T) -> Lifetime {
Lifetime {
id: fld.new_id(l.id),
folder.fold_block(tr),
fl.map(|x| folder.fold_expr(x)))
}
- ExprKind::While(cond, body, opt_ident) => {
+ ExprKind::While(cond, body, opt_label) => {
ExprKind::While(folder.fold_expr(cond),
folder.fold_block(body),
- opt_ident.map(|label| respan(folder.new_span(label.span),
- folder.fold_ident(label.node))))
+ opt_label.map(|label| folder.fold_label(label)))
}
- ExprKind::WhileLet(pat, expr, body, opt_ident) => {
+ ExprKind::WhileLet(pat, expr, body, opt_label) => {
ExprKind::WhileLet(folder.fold_pat(pat),
folder.fold_expr(expr),
folder.fold_block(body),
- opt_ident.map(|label| respan(folder.new_span(label.span),
- folder.fold_ident(label.node))))
+ opt_label.map(|label| folder.fold_label(label)))
}
- ExprKind::ForLoop(pat, iter, body, opt_ident) => {
+ ExprKind::ForLoop(pat, iter, body, opt_label) => {
ExprKind::ForLoop(folder.fold_pat(pat),
folder.fold_expr(iter),
folder.fold_block(body),
- opt_ident.map(|label| respan(folder.new_span(label.span),
- folder.fold_ident(label.node))))
+ opt_label.map(|label| folder.fold_label(label)))
}
- ExprKind::Loop(body, opt_ident) => {
+ ExprKind::Loop(body, opt_label) => {
ExprKind::Loop(folder.fold_block(body),
- opt_ident.map(|label| respan(folder.new_span(label.span),
- folder.fold_ident(label.node))))
+ opt_label.map(|label| folder.fold_label(label)))
}
ExprKind::Match(expr, arms) => {
ExprKind::Match(folder.fold_expr(expr),
});
ExprKind::Path(qself, folder.fold_path(path))
}
- ExprKind::Break(opt_ident, opt_expr) => {
- ExprKind::Break(opt_ident.map(|label| respan(folder.new_span(label.span),
- folder.fold_ident(label.node))),
+ ExprKind::Break(opt_label, opt_expr) => {
+ ExprKind::Break(opt_label.map(|label| folder.fold_label(label)),
opt_expr.map(|e| folder.fold_expr(e)))
}
- ExprKind::Continue(opt_ident) => ExprKind::Continue(opt_ident.map(|label|
- respan(folder.new_span(label.span),
- folder.fold_ident(label.node)))
- ),
+ ExprKind::Continue(opt_label) => {
+ ExprKind::Continue(opt_label.map(|label| folder.fold_label(label)))
+ }
ExprKind::Ret(e) => ExprKind::Ret(e.map(|x| folder.fold_expr(x))),
ExprKind::InlineAsm(asm) => ExprKind::InlineAsm(asm.map(|asm| {
InlineAsm {
use ast::{ForeignItem, ForeignItemKind, FunctionRetTy};
use ast::GenericParam;
use ast::{Ident, ImplItem, IsAuto, Item, ItemKind};
-use ast::{Lifetime, LifetimeDef, Lit, LitKind, UintTy};
+use ast::{Label, Lifetime, LifetimeDef, Lit, LitKind, UintTy};
use ast::Local;
use ast::MacStmtStyle;
use ast::Mac_;
self.check_keyword(keywords::Extern)
}
- fn get_label(&mut self) -> ast::Ident {
- match self.token {
+ fn eat_label(&mut self) -> Option<Label> {
+ let ident = match self.token {
token::Lifetime(ref ident) => *ident,
token::Interpolated(ref nt) => match nt.0 {
token::NtLifetime(lifetime) => lifetime.ident,
- _ => self.bug("not a lifetime"),
+ _ => return None,
},
- _ => self.bug("not a lifetime"),
- }
+ _ => return None,
+ };
+ self.bump();
+ Some(Label { ident, span: self.prev_span })
}
/// parse a TyKind::BareFn type:
let lo = self.prev_span;
return self.parse_while_expr(None, lo, attrs);
}
- if self.token.is_lifetime() {
- let label = Spanned { node: self.get_label(),
- span: self.span };
- let lo = self.span;
- self.bump();
+ if let Some(label) = self.eat_label() {
+ let lo = label.span;
self.expect(&token::Colon)?;
if self.eat_keyword(keywords::While) {
return self.parse_while_expr(Some(label), lo, attrs)
return self.parse_loop_expr(None, lo, attrs);
}
if self.eat_keyword(keywords::Continue) {
- let ex = if self.token.is_lifetime() {
- let ex = ExprKind::Continue(Some(Spanned{
- node: self.get_label(),
- span: self.span
- }));
- self.bump();
- ex
- } else {
- ExprKind::Continue(None)
- };
+ let label = self.eat_label();
+ let ex = ExprKind::Continue(label);
let hi = self.prev_span;
return Ok(self.mk_expr(lo.to(hi), ex, attrs));
}
ex = ExprKind::Ret(None);
}
} else if self.eat_keyword(keywords::Break) {
- let lt = if self.token.is_lifetime() {
- let spanned_lt = Spanned {
- node: self.get_label(),
- span: self.span
- };
- self.bump();
- Some(spanned_lt)
- } else {
- None
- };
+ let label = self.eat_label();
let e = if self.token.can_begin_expr()
&& !(self.token == token::OpenDelim(token::Brace)
&& self.restrictions.contains(
} else {
None
};
- ex = ExprKind::Break(lt, e);
+ ex = ExprKind::Break(label, e);
hi = self.prev_span;
} else if self.eat_keyword(keywords::Yield) {
if self.token.can_begin_expr() {
}
/// Parse a 'for' .. 'in' expression ('for' token already eaten)
- pub fn parse_for_expr(&mut self, opt_ident: Option<ast::SpannedIdent>,
+ pub fn parse_for_expr(&mut self, opt_label: Option<Label>,
span_lo: Span,
mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
// Parse: `for <src_pat> in <src_expr> <src_loop_block>`
attrs.extend(iattrs);
let hi = self.prev_span;
- Ok(self.mk_expr(span_lo.to(hi), ExprKind::ForLoop(pat, expr, loop_block, opt_ident), attrs))
+ Ok(self.mk_expr(span_lo.to(hi), ExprKind::ForLoop(pat, expr, loop_block, opt_label), attrs))
}
/// Parse a 'while' or 'while let' expression ('while' token already eaten)
- pub fn parse_while_expr(&mut self, opt_ident: Option<ast::SpannedIdent>,
+ pub fn parse_while_expr(&mut self, opt_label: Option<Label>,
span_lo: Span,
mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
if self.token.is_keyword(keywords::Let) {
- return self.parse_while_let_expr(opt_ident, span_lo, attrs);
+ return self.parse_while_let_expr(opt_label, span_lo, attrs);
}
let cond = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?;
let (iattrs, body) = self.parse_inner_attrs_and_block()?;
attrs.extend(iattrs);
let span = span_lo.to(body.span);
- return Ok(self.mk_expr(span, ExprKind::While(cond, body, opt_ident), attrs));
+ return Ok(self.mk_expr(span, ExprKind::While(cond, body, opt_label), attrs));
}
/// Parse a 'while let' expression ('while' token already eaten)
- pub fn parse_while_let_expr(&mut self, opt_ident: Option<ast::SpannedIdent>,
+ pub fn parse_while_let_expr(&mut self, opt_label: Option<Label>,
span_lo: Span,
mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
self.expect_keyword(keywords::Let)?;
let (iattrs, body) = self.parse_inner_attrs_and_block()?;
attrs.extend(iattrs);
let span = span_lo.to(body.span);
- return Ok(self.mk_expr(span, ExprKind::WhileLet(pat, expr, body, opt_ident), attrs));
+ return Ok(self.mk_expr(span, ExprKind::WhileLet(pat, expr, body, opt_label), attrs));
}
// parse `loop {...}`, `loop` token already eaten
- pub fn parse_loop_expr(&mut self, opt_ident: Option<ast::SpannedIdent>,
+ pub fn parse_loop_expr(&mut self, opt_label: Option<Label>,
span_lo: Span,
mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
let (iattrs, body) = self.parse_inner_attrs_and_block()?;
attrs.extend(iattrs);
let span = span_lo.to(body.span);
- Ok(self.mk_expr(span, ExprKind::Loop(body, opt_ident), attrs))
+ Ok(self.mk_expr(span, ExprKind::Loop(body, opt_label), attrs))
}
/// Parse a `do catch {...}` expression (`do catch` token already eaten)
ast::ExprKind::IfLet(ref pat, ref expr, ref blk, ref elseopt) => {
self.print_if_let(pat, expr, blk, elseopt.as_ref().map(|e| &**e))?;
}
- ast::ExprKind::While(ref test, ref blk, opt_ident) => {
- if let Some(ident) = opt_ident {
- self.print_ident(ident.node)?;
+ ast::ExprKind::While(ref test, ref blk, opt_label) => {
+ if let Some(label) = opt_label {
+ self.print_ident(label.ident)?;
self.word_space(":")?;
}
self.head("while")?;
self.s.space()?;
self.print_block_with_attrs(blk, attrs)?;
}
- ast::ExprKind::WhileLet(ref pat, ref expr, ref blk, opt_ident) => {
- if let Some(ident) = opt_ident {
- self.print_ident(ident.node)?;
+ ast::ExprKind::WhileLet(ref pat, ref expr, ref blk, opt_label) => {
+ if let Some(label) = opt_label {
+ self.print_ident(label.ident)?;
self.word_space(":")?;
}
self.head("while let")?;
self.s.space()?;
self.print_block_with_attrs(blk, attrs)?;
}
- ast::ExprKind::ForLoop(ref pat, ref iter, ref blk, opt_ident) => {
- if let Some(ident) = opt_ident {
- self.print_ident(ident.node)?;
+ ast::ExprKind::ForLoop(ref pat, ref iter, ref blk, opt_label) => {
+ if let Some(label) = opt_label {
+ self.print_ident(label.ident)?;
self.word_space(":")?;
}
self.head("for")?;
self.s.space()?;
self.print_block_with_attrs(blk, attrs)?;
}
- ast::ExprKind::Loop(ref blk, opt_ident) => {
- if let Some(ident) = opt_ident {
- self.print_ident(ident.node)?;
+ ast::ExprKind::Loop(ref blk, opt_label) => {
+ if let Some(label) = opt_label {
+ self.print_ident(label.ident)?;
self.word_space(":")?;
}
self.head("loop")?;
ast::ExprKind::Path(Some(ref qself), ref path) => {
self.print_qpath(path, qself, true)?
}
- ast::ExprKind::Break(opt_ident, ref opt_expr) => {
+ ast::ExprKind::Break(opt_label, ref opt_expr) => {
self.s.word("break")?;
self.s.space()?;
- if let Some(ident) = opt_ident {
- self.print_ident(ident.node)?;
+ if let Some(label) = opt_label {
+ self.print_ident(label.ident)?;
self.s.space()?;
}
if let Some(ref expr) = *opt_expr {
self.s.space()?;
}
}
- ast::ExprKind::Continue(opt_ident) => {
+ ast::ExprKind::Continue(opt_label) => {
self.s.word("continue")?;
self.s.space()?;
- if let Some(ident) = opt_ident {
- self.print_ident(ident.node)?;
+ if let Some(label) = opt_label {
+ self.print_ident(label.ident)?;
self.s.space()?
}
}
fn visit_variant(&mut self, v: &'ast Variant, g: &'ast Generics, item_id: NodeId) {
walk_variant(self, v, g, item_id)
}
+ fn visit_label(&mut self, label: &'ast Label) {
+ walk_label(self, label)
+ }
fn visit_lifetime(&mut self, lifetime: &'ast Lifetime) {
walk_lifetime(self, lifetime)
}
}
}
-pub fn walk_opt_name<'a, V: Visitor<'a>>(visitor: &mut V, span: Span, opt_name: Option<Name>) {
- if let Some(name) = opt_name {
- visitor.visit_name(span, name);
- }
-}
-
-pub fn walk_opt_ident<'a, V: Visitor<'a>>(visitor: &mut V, span: Span, opt_ident: Option<Ident>) {
- if let Some(ident) = opt_ident {
- visitor.visit_ident(span, ident);
- }
-}
-
-pub fn walk_opt_sp_ident<'a, V: Visitor<'a>>(visitor: &mut V,
- opt_sp_ident: &Option<Spanned<Ident>>) {
- if let Some(ref sp_ident) = *opt_sp_ident {
- visitor.visit_ident(sp_ident.span, sp_ident.node);
- }
-}
-
pub fn walk_ident<'a, V: Visitor<'a>>(visitor: &mut V, span: Span, ident: Ident) {
visitor.visit_name(span, ident.name);
}
walk_list!(visitor, visit_expr, &local.init);
}
+pub fn walk_label<'a, V: Visitor<'a>>(visitor: &mut V, label: &'a Label) {
+ visitor.visit_ident(label.span, label.ident);
+}
+
pub fn walk_lifetime<'a, V: Visitor<'a>>(visitor: &mut V, lifetime: &'a Lifetime) {
visitor.visit_ident(lifetime.span, lifetime.ident);
}
visitor.visit_ident(item.span, item.ident);
match item.node {
ItemKind::ExternCrate(opt_name) => {
- walk_opt_name(visitor, item.span, opt_name)
+ if let Some(name) = opt_name {
+ visitor.visit_name(item.span, name);
+ }
}
ItemKind::Use(ref use_tree) => {
visitor.visit_use_tree(use_tree, item.id, false)
pub fn walk_struct_field<'a, V: Visitor<'a>>(visitor: &mut V, struct_field: &'a StructField) {
visitor.visit_vis(&struct_field.vis);
- walk_opt_ident(visitor, struct_field.span, struct_field.ident);
+ if let Some(ident) = struct_field.ident {
+ visitor.visit_ident(struct_field.span, ident);
+ }
visitor.visit_ty(&struct_field.ty);
walk_list!(visitor, visit_attribute, &struct_field.attrs);
}
visitor.visit_block(if_block);
walk_list!(visitor, visit_expr, optional_else);
}
- ExprKind::While(ref subexpression, ref block, ref opt_sp_ident) => {
+ ExprKind::While(ref subexpression, ref block, ref opt_label) => {
+ walk_list!(visitor, visit_label, opt_label);
visitor.visit_expr(subexpression);
visitor.visit_block(block);
- walk_opt_sp_ident(visitor, opt_sp_ident);
}
ExprKind::IfLet(ref pattern, ref subexpression, ref if_block, ref optional_else) => {
visitor.visit_pat(pattern);
visitor.visit_block(if_block);
walk_list!(visitor, visit_expr, optional_else);
}
- ExprKind::WhileLet(ref pattern, ref subexpression, ref block, ref opt_sp_ident) => {
+ ExprKind::WhileLet(ref pattern, ref subexpression, ref block, ref opt_label) => {
+ walk_list!(visitor, visit_label, opt_label);
visitor.visit_pat(pattern);
visitor.visit_expr(subexpression);
visitor.visit_block(block);
- walk_opt_sp_ident(visitor, opt_sp_ident);
}
- ExprKind::ForLoop(ref pattern, ref subexpression, ref block, ref opt_sp_ident) => {
+ ExprKind::ForLoop(ref pattern, ref subexpression, ref block, ref opt_label) => {
+ walk_list!(visitor, visit_label, opt_label);
visitor.visit_pat(pattern);
visitor.visit_expr(subexpression);
visitor.visit_block(block);
- walk_opt_sp_ident(visitor, opt_sp_ident);
}
- ExprKind::Loop(ref block, ref opt_sp_ident) => {
+ ExprKind::Loop(ref block, ref opt_label) => {
+ walk_list!(visitor, visit_label, opt_label);
visitor.visit_block(block);
- walk_opt_sp_ident(visitor, opt_sp_ident);
}
ExprKind::Match(ref subexpression, ref arms) => {
visitor.visit_expr(subexpression);
}
visitor.visit_path(path, expression.id)
}
- ExprKind::Break(ref opt_sp_ident, ref opt_expr) => {
- walk_opt_sp_ident(visitor, opt_sp_ident);
+ ExprKind::Break(ref opt_label, ref opt_expr) => {
+ walk_list!(visitor, visit_label, opt_label);
walk_list!(visitor, visit_expr, opt_expr);
}
- ExprKind::Continue(ref opt_sp_ident) => {
- walk_opt_sp_ident(visitor, opt_sp_ident);
+ ExprKind::Continue(ref opt_label) => {
+ walk_list!(visitor, visit_label, opt_label);
}
ExprKind::Ret(ref optional_expression) => {
walk_list!(visitor, visit_expr, optional_expression);
--- /dev/null
+Subproject commit 2717444753318e461e0c3b30dacd03ffbac96903
enum class LLVMRustArchiveKind {
Other,
GNU,
- MIPS64,
BSD,
COFF,
};
switch (Kind) {
case LLVMRustArchiveKind::GNU:
return Archive::K_GNU;
- case LLVMRustArchiveKind::MIPS64:
- return Archive::K_MIPS64;
case LLVMRustArchiveKind::BSD:
return Archive::K_BSD;
case LLVMRustArchiveKind::COFF:
Members.push_back(std::move(*MOrErr));
}
}
- auto Pair = writeArchive(Dst, Members, WriteSymbtab, Kind, true, false);
- if (!Pair.second)
+ auto Result = writeArchive(Dst, Members, WriteSymbtab, Kind, true, false);
+#if LLVM_VERSION_GE(6, 0)
+ if (!Result)
return LLVMRustResult::Success;
- LLVMRustSetLastError(Pair.second.message().c_str());
+ LLVMRustSetLastError(toString(std::move(Result)).c_str());
+#else
+ if (!Result.second)
+ return LLVMRustResult::Success;
+ LLVMRustSetLastError(Result.second.message().c_str());
+#endif
+
return LLVMRustResult::Failure;
}
#include "llvm/Support/FileSystem.h"
#include "llvm/Support/Host.h"
#include "llvm/Target/TargetMachine.h"
-#include "llvm/Target/TargetSubtargetInfo.h"
#include "llvm/Transforms/IPO/PassManagerBuilder.h"
+#if LLVM_VERSION_GE(6, 0)
+#include "llvm/CodeGen/TargetSubtargetInfo.h"
+#include "llvm/IR/IntrinsicInst.h"
+#else
+#include "llvm/Target/TargetSubtargetInfo.h"
+#endif
+
#if LLVM_VERSION_GE(4, 0)
#include "llvm/Transforms/IPO/AlwaysInliner.h"
#include "llvm/Transforms/IPO/FunctionImport.h"
enum class LLVMRustCodeModel {
Other,
- Default,
- JITDefault,
Small,
Kernel,
Medium,
Large,
+ None,
};
static CodeModel::Model fromRust(LLVMRustCodeModel Model) {
switch (Model) {
- case LLVMRustCodeModel::Default:
- return CodeModel::Default;
- case LLVMRustCodeModel::JITDefault:
- return CodeModel::JITDefault;
case LLVMRustCodeModel::Small:
return CodeModel::Small;
case LLVMRustCodeModel::Kernel:
bool TrapUnreachable,
bool Singlethread) {
- auto CM = fromRust(RustCM);
auto OptLevel = fromRust(RustOptLevel);
auto RM = fromRust(RustReloc);
Options.ThreadModel = ThreadModel::Single;
}
+#if LLVM_VERSION_GE(6, 0)
+ Optional<CodeModel::Model> CM;
+#else
+ CodeModel::Model CM = CodeModel::Model::Default;
+#endif
+ if (RustCM != LLVMRustCodeModel::None)
+ CM = fromRust(RustCM);
TargetMachine *TM = TheTarget->createTargetMachine(
Trip.getTriple(), RealCPU, Feature, Options, RM, CM, OptLevel);
return wrap(TM);
// enable fpmath flag UnsafeAlgebra
extern "C" void LLVMRustSetHasUnsafeAlgebra(LLVMValueRef V) {
if (auto I = dyn_cast<Instruction>(unwrap<Value>(V))) {
+#if LLVM_VERSION_GE(6, 0)
+ I->setFast(true);
+#else
I->setHasUnsafeAlgebra(true);
+#endif
}
}
FlagStaticMember = (1 << 12),
FlagLValueReference = (1 << 13),
FlagRValueReference = (1 << 14),
- FlagMainSubprogram = (1 << 21),
+ FlagExternalTypeRef = (1 << 15),
+ FlagIntroducedVirtual = (1 << 18),
+ FlagBitField = (1 << 19),
+ FlagNoReturn = (1 << 20),
+ FlagMainSubprogram = (1 << 21),
// Do not add values that are not supported by the minimum LLVM
- // version we support!
+ // version we support! see llvm/include/llvm/IR/DebugInfoFlags.def
};
inline LLVMRustDIFlags operator&(LLVMRustDIFlags A, LLVMRustDIFlags B) {
if (isSet(Flags & LLVMRustDIFlags::FlagRValueReference)) {
Result |= DINode::DIFlags::FlagRValueReference;
}
+ if (isSet(Flags & LLVMRustDIFlags::FlagExternalTypeRef)) {
+ Result |= DINode::DIFlags::FlagExternalTypeRef;
+ }
+ if (isSet(Flags & LLVMRustDIFlags::FlagIntroducedVirtual)) {
+ Result |= DINode::DIFlags::FlagIntroducedVirtual;
+ }
+ if (isSet(Flags & LLVMRustDIFlags::FlagBitField)) {
+ Result |= DINode::DIFlags::FlagBitField;
+ }
#if LLVM_RUSTLLVM || LLVM_VERSION_GE(4, 0)
+ if (isSet(Flags & LLVMRustDIFlags::FlagNoReturn)) {
+ Result |= DINode::DIFlags::FlagNoReturn;
+ }
if (isSet(Flags & LLVMRustDIFlags::FlagMainSubprogram)) {
Result |= DINode::DIFlags::FlagMainSubprogram;
}
#![crate_type = "lib"]
-#![feature(attr_literals)]
-#![feature(repr_align)]
-
#[repr(align(64))]
pub struct Align64(i32);
// CHECK: %Align64 = type { [0 x i32], i32, [15 x i32] }
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// ignore-tidy-linelength
+// min-llvm-version 4.0
+
+// compile-flags: -g -C no-prepopulate-passes
+
+// CHECK: {{.*}}DISubprogram{{.*}}name: "foo"{{.*}}DIFlagNoReturn
+
+fn foo() -> ! {
+ loop {}
+}
+
+pub fn main() {
+ foo();
+}
#![feature(lang_items)]
#[lang = "cookie"]
-fn cookie() -> ! { //~ E0522
+fn cookie() -> ! {
+//~^^ ERROR definition of an unknown language item: `cookie` [E0522]
loop {}
}
// except according to those terms.
#![allow(dead_code)]
-#![feature(attr_literals)]
-#![feature(repr_align)]
#[repr(C)]
enum A { A }
f: T
}
-#[rustc_if_this_changed]
+#[rustc_if_this_changed(Krate)]
type TypeAlias<T> = Foo<T>;
#[rustc_then_this_would_need(ItemVariances)] //~ ERROR OK
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(never_type)]
+
+enum Helper<T, U> {
+ T(T, [!; 0]),
+ #[allow(dead_code)]
+ U(U),
+}
+
+fn transmute<T, U>(t: T) -> U {
+ let Helper::U(u) = Helper::T(t, []);
+ //~^ ERROR refutable pattern in local binding: `T(_, _)` not covered
+ u
+}
+
+fn main() {
+ println!("{:?}", transmute::<&str, (*const u8, u64)>("type safety"));
+}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Test that attempts to construct infinite types via impl trait fail
+// in a graceful way.
+//
+// Regression test for #38064.
+
+// error-pattern:overflow evaluating the requirement `impl Quux`
+
+#![feature(conservative_impl_trait)]
+
+trait Quux {}
+
+fn foo() -> impl Quux {
+ struct Foo<T>(T);
+ impl<T> Quux for Foo<T> {}
+ Foo(bar())
+}
+
+fn bar() -> impl Quux {
+ struct Bar<T>(T);
+ impl<T> Quux for Bar<T> {}
+ Bar(foo())
+}
+
+// effectively:
+// struct Foo(Bar);
+// struct Bar(Foo);
+// should produce an error about infinite size
+
+fn main() { foo(); }
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(dead_code)]
-#![feature(attr_literals)]
-#![feature(repr_align)]
#[repr(align(16.0))] //~ ERROR: invalid `repr(align)` attribute: not an unsuffixed integer
struct A(i32);
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-#![feature(attr_literals)]
-#![feature(repr_align)]
#![feature(untagged_unions)]
#![allow(dead_code)]
--- /dev/null
+-include ../tools.mk
+
+all:
+ifeq ($(TARGET),x86_64-unknown-linux-gnu)
+ $(RUSTC) hello.rs -C no_integrated_as
+ $(call RUN,hello)
+endif
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn main() {
+ println!("Hello, world!");
+}
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-#![feature(attr_literals)]
-#![feature(repr_align)]
#![feature(box_syntax)]
use std::mem;
#![feature(attr_literals)]
#[repr(align(16))]
-pub struct A {
- y: i64,
-}
+pub struct A(i64);
pub extern "C" fn foo(x: A) {}
-fn main() {}
+fn main() {
+ foo(A(0));
+}
+++ /dev/null
-// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// compile-flags: -Z borrowck=mir -Z nll
-
-// This example comes from the NLL RFC.
-
-struct List<T> {
- value: T,
- next: Option<Box<List<T>>>,
-}
-
-fn to_refs<T>(list: &mut List<T>) -> Vec<&mut T> {
- let mut list = list;
- let mut result = vec![];
- loop {
- result.push(&mut list.value);
- if let Some(n) = list.next.as_mut() {
- list = n;
- } else {
- return result;
- }
- }
-}
-
-fn main() {
-}
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// #47096
+
+#![feature(slice_patterns)]
+
+fn foo(s: &[i32]) -> &[i32] {
+ let &[ref xs..] = s;
+ xs
+}
+
+fn main() {
+ let x = [1, 2, 3];
+ let y = foo(&x);
+ assert_eq!(x, y);
+}
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(i128_type)]
+
+#[repr(C)]
+pub struct Foo(i128);
+
+#[no_mangle]
+pub extern "C" fn foo(x: Foo) -> Foo { x }
+
+fn main() {
+ foo(Foo(1));
+}
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(use_nested_groups)]
+#![allow(unused_import)]
+
+use {{}, {}};
+
+fn main() {}
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(nll)]
+
+struct List<T> {
+ value: T,
+ next: Option<Box<List<T>>>,
+}
+
+fn to_refs<T>(mut list: &mut List<T>) -> Vec<&mut T> {
+ let mut result = vec![];
+ loop {
+ result.push(&mut list.value);
+ if let Some(n) = list.next.as_mut() {
+ list = n;
+ } else {
+ return result;
+ }
+ }
+}
+
+fn main() {
+ let mut list = List { value: 1, next: None };
+ let vec = to_refs(&mut list);
+ assert_eq!(vec![&mut 1], vec);
+}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-#![feature(attr_literals)]
-#![feature(repr_align)]
#![feature(untagged_unions)]
use std::mem::{size_of, size_of_val, align_of, align_of_val};
// @has intra_links/index.html
// @has - '//a/@href' '../intra_links/struct.ThisType.html'
+// @has - '//a/@href' '../intra_links/struct.ThisType.html#method.this_method'
// @has - '//a/@href' '../intra_links/enum.ThisEnum.html'
+// @has - '//a/@href' '../intra_links/enum.ThisEnum.html#ThisVariant.v'
+// @has - '//a/@href' '../intra_links/trait.ThisTrait.html'
+// @has - '//a/@href' '../intra_links/trait.ThisTrait.html#tymethod.this_associated_method'
+// @has - '//a/@href' '../intra_links/trait.ThisTrait.html#associatedtype.ThisAssociatedType'
+// @has - '//a/@href' '../intra_links/trait.ThisTrait.html#associatedconstant.THIS_ASSOCIATED_CONST'
// @has - '//a/@href' '../intra_links/trait.ThisTrait.html'
// @has - '//a/@href' '../intra_links/type.ThisAlias.html'
// @has - '//a/@href' '../intra_links/union.ThisUnion.html'
//! In this crate we would like to link to:
//!
//! * [`ThisType`](ThisType)
+//! * [`ThisType::this_method`](ThisType::this_method)
//! * [`ThisEnum`](ThisEnum)
+//! * [`ThisEnum::ThisVariant`](ThisEnum::ThisVariant)
//! * [`ThisTrait`](ThisTrait)
+//! * [`ThisTrait::this_associated_method`](ThisTrait::this_associated_method)
+//! * [`ThisTrait::ThisAssociatedType`](ThisTrait::ThisAssociatedType)
+//! * [`ThisTrait::THIS_ASSOCIATED_CONST`](ThisTrait::THIS_ASSOCIATED_CONST)
//! * [`ThisAlias`](ThisAlias)
//! * [`ThisUnion`](ThisUnion)
//! * [`this_function`](this_function())
}
pub struct ThisType;
+
+impl ThisType {
+ pub fn this_method() {}
+}
pub enum ThisEnum { ThisVariant, }
-pub trait ThisTrait {}
+pub trait ThisTrait {
+ type ThisAssociatedType;
+ const THIS_ASSOCIATED_CONST: u8;
+ fn this_associated_method();
+}
pub type ThisAlias = Result<(), ()>;
pub union ThisUnion { this_field: usize, }
+++ /dev/null
-// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-#![feature(attr_literals)]
-
-#[repr(align(64))] //~ error: the struct `#[repr(align(u16))]` attribute is experimental
-struct Foo(u64, u64);
-
-fn main() {}
+++ /dev/null
-error[E0658]: the struct `#[repr(align(u16))]` attribute is experimental (see issue #33626)
- --> $DIR/feature-gate-repr_align.rs:12:1
- |
-12 | #[repr(align(64))] //~ error: the struct `#[repr(align(u16))]` attribute is experimental
- | ^^^^^^^^^^^^^^^^^^
- |
- = help: add #![feature(repr_align)] to the crate attributes to enable
-
-error: aborting due to previous error
-
= note: the following traits define an item `method`, perhaps you need to implement one of them:
candidate #1: `foo::Bar`
candidate #2: `no_method_suggested_traits::foo::PubPub`
- candidate #3: `no_method_suggested_traits::bar::PubPriv`
- candidate #4: `no_method_suggested_traits::qux::PrivPub`
- candidate #5: `no_method_suggested_traits::quz::PrivPriv`
- candidate #6: `no_method_suggested_traits::Reexported`
+ candidate #3: `no_method_suggested_traits::qux::PrivPub`
+ candidate #4: `no_method_suggested_traits::Reexported`
error[E0599]: no method named `method` found for type `std::rc::Rc<&mut std::boxed::Box<&Foo>>` in the current scope
--> $DIR/no-method-suggested-traits.rs:52:43
= note: the following traits define an item `method`, perhaps you need to implement one of them:
candidate #1: `foo::Bar`
candidate #2: `no_method_suggested_traits::foo::PubPub`
- candidate #3: `no_method_suggested_traits::bar::PubPriv`
- candidate #4: `no_method_suggested_traits::qux::PrivPub`
- candidate #5: `no_method_suggested_traits::quz::PrivPriv`
- candidate #6: `no_method_suggested_traits::Reexported`
+ candidate #3: `no_method_suggested_traits::qux::PrivPub`
+ candidate #4: `no_method_suggested_traits::Reexported`
error[E0599]: no method named `method2` found for type `u64` in the current scope
--> $DIR/no-method-suggested-traits.rs:55:10
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Regression test for #47511: anonymous lifetimes can appear
+// unconstrained in a return type, but only if they appear just once
+// in the input, as the input to a projection.
+
+fn f(_: X) -> X {
+ //~^ ERROR return type references an anonymous lifetime
+ unimplemented!()
+}
+
+fn g<'a>(_: X<'a>) -> X<'a> {
+ //~^ ERROR return type references lifetime `'a`, which is not constrained
+ unimplemented!()
+}
+
+type X<'a> = <&'a () as Trait>::Value;
+
+trait Trait {
+ type Value;
+}
+
+impl<'a> Trait for &'a () {
+ type Value = ();
+}
+
+fn main() {}
--- /dev/null
+error[E0581]: return type references an anonymous lifetime which is not constrained by the fn input types
+ --> $DIR/issue-47511.rs:15:15
+ |
+15 | fn f(_: X) -> X {
+ | ^
+ |
+ = note: lifetimes appearing in an associated type are not considered constrained
+
+error[E0581]: return type references lifetime `'a`, which is not constrained by the fn input types
+ --> $DIR/issue-47511.rs:20:23
+ |
+20 | fn g<'a>(_: X<'a>) -> X<'a> {
+ | ^^^^^
+
+error: aborting due to 2 previous errors
+
`&mut Foo : std::iter::Iterator`
= help: items from traits can only be used if the trait is implemented and in scope
= note: the following traits define an item `take`, perhaps you need to implement one of them:
- candidate #1: `std::collections::hash::Recover`
- candidate #2: `std::io::Read`
- candidate #3: `std::iter::Iterator`
- candidate #4: `alloc::btree::Recover`
+ candidate #1: `std::io::Read`
+ candidate #2: `std::iter::Iterator`
error: aborting due to 4 previous errors
+++ /dev/null
-// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-#![feature(use_nested_groups)]
-#![deny(unused_imports)]
-
-mod foo {
- pub enum Bar {}
-}
-
-use foo::{*, *}; //~ ERROR unused import: `*`
-
-fn main() {
- let _: Bar;
-}
+++ /dev/null
-error: unused import: `*`
- --> $DIR/owl-import-generates-unused-import-lint.rs:18:14
- |
-18 | use foo::{*, *}; //~ ERROR unused import: `*`
- | ^
- |
-note: lint level defined here
- --> $DIR/owl-import-generates-unused-import-lint.rs:12:9
- |
-12 | #![deny(unused_imports)]
- | ^^^^^^^^^^^^^^
-
-error: aborting due to previous error
-
// It avoids using u64/i64 because on some targets that is only 4-byte
// aligned (while on most it is 8-byte aligned) and so the resulting
// padding and overall computed sizes can be quite different.
-#![feature(attr_literals)]
-#![feature(repr_align)]
#![feature(start)]
#![allow(dead_code)]
#![feature(attr_literals)]
-#[repr(align(16))] //~ ERROR is experimental
+#[repr(align(16))]
struct Gem {
mohs_hardness: u8,
poofed: bool,
-error[E0658]: the struct `#[repr(align(u16))]` attribute is experimental (see issue #33626)
- --> $DIR/gated-features-attr-spans.rs:13:1
- |
-13 | #[repr(align(16))] //~ ERROR is experimental
- | ^^^^^^^^^^^^^^^^^^
- |
- = help: add #![feature(repr_align)] to the crate attributes to enable
-
error[E0658]: SIMD types are experimental and possibly buggy (see issue #27731)
--> $DIR/gated-features-attr-spans.rs:20:1
|
|
= help: add #![feature(fn_must_use)] to the crate attributes to enable
-error: aborting due to 2 previous errors
+error: aborting due to previous error
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+struct T;
+
+fn main() {
+ T::new();
+ //~^ ERROR no function or associated item named `new` found for type `T` in the current scope
+}
--- /dev/null
+error[E0599]: no function or associated item named `new` found for type `T` in the current scope
+ --> $DIR/dont-suggest-private-trait-method.rs:14:5
+ |
+11 | struct T;
+ | --------- function or associated item `new` not found for this
+...
+14 | T::new();
+ | ^^^^^^ function or associated item not found in `T`
+
+error: aborting due to previous error
+
foo::<f32>(x_f64);
//~^ ERROR mismatched types
foo::<f32>(x_f32);
+
+ foo::<u32>(x_u8 as u16);
+ //~^ ERROR mismatched types
+ foo::<i32>(-x_i8);
+ //~^ ERROR mismatched types
}
312 | foo::<f32>(x_f64);
| ^^^^^ expected f32, found f64
-error: aborting due to 132 previous errors
+error[E0308]: mismatched types
+ --> $DIR/numeric-cast.rs:316:16
+ |
+316 | foo::<u32>(x_u8 as u16);
+ | ^^^^^^^^^^^ expected u32, found u16
+help: you can cast an `u16` to `u32`, which will zero-extend the source value
+ |
+316 | foo::<u32>((x_u8 as u16).into());
+ | ^^^^^^^^^^^^^^^^^^^^
+
+error[E0308]: mismatched types
+ --> $DIR/numeric-cast.rs:318:16
+ |
+318 | foo::<i32>(-x_i8);
+ | ^^^^^ expected i32, found i8
+help: you can cast an `i8` to `i32`, which will sign-extend the source value
+ |
+318 | foo::<i32>((-x_i8).into());
+ | ^^^^^^^^^^^^^^
+
+error: aborting due to 134 previous errors
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![allow(unused)]
+#![feature(lang_items)]
+
+#[lang = "foo"]
+fn bar() -> ! {
+//~^^ ERROR definition of an unknown language item: `foo`
+ loop {}
+}
+
+fn main() {}
--- /dev/null
+error[E0522]: definition of an unknown language item: `foo`
+ --> $DIR/unknown-language-item.rs:14:1
+ |
+14 | #[lang = "foo"]
+ | ^^^^^^^^^^^^^^^ definition of unknown language item `foo`
+
+error: aborting due to previous error
+
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(use_nested_groups)]
+#![allow(dead_code)]
+#![deny(unused_imports)]
+
+mod foo {
+ pub mod bar {
+ pub mod baz {
+ pub struct Bar();
+ }
+ pub mod foobar {}
+ }
+
+ pub struct Foo();
+}
+
+use foo::{Foo, bar::{baz::{}, foobar::*}, *};
+ //~^ ERROR unused imports: `*`, `Foo`, `baz::{}`, `foobar::*`
+use foo::bar::baz::{*, *};
+ //~^ ERROR unused import: `*`
+use foo::{};
+ //~^ ERROR unused import: `use foo::{};`
+
+fn main() {
+ let _: Bar;
+}
--- /dev/null
+error: unused imports: `*`, `Foo`, `baz::{}`, `foobar::*`
+ --> $DIR/use-nested-groups-unused-imports.rs:26:11
+ |
+26 | use foo::{Foo, bar::{baz::{}, foobar::*}, *};
+ | ^^^ ^^^^^^^ ^^^^^^^^^ ^
+ |
+note: lint level defined here
+ --> $DIR/use-nested-groups-unused-imports.rs:13:9
+ |
+13 | #![deny(unused_imports)]
+ | ^^^^^^^^^^^^^^
+
+error: unused import: `*`
+ --> $DIR/use-nested-groups-unused-imports.rs:28:24
+ |
+28 | use foo::bar::baz::{*, *};
+ | ^
+
+error: unused import: `use foo::{};`
+ --> $DIR/use-nested-groups-unused-imports.rs:30:1
+ |
+30 | use foo::{};
+ | ^^^^^^^^^^^^
+
+error: aborting due to 3 previous errors
+
-Subproject commit 6a8eb71f6d226f9ac869dbacd5ff6aa76deef1c4
+Subproject commit 91e36aa86c7037de50642f2fec1cf47c3d18af02