"crypto-hash 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"curl 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)",
"docopt 0.8.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "env_logger 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "env_logger 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)",
"failure 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"filetime 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)",
"flate2 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
+[[package]]
+name = "chrono"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "num 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)",
+ "time 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "clap"
version = "2.29.0"
"regex 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
]
+[[package]]
+name = "env_logger"
+version = "0.5.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "atty 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "chrono 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "regex 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "termcolor 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "error-chain"
version = "0.8.1"
name = "tidy"
version = "0.1.0"
+[[package]]
+name = "time"
+version = "0.1.39"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "libc 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)",
+ "redox_syscall 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "toml"
version = "0.2.1"
"checksum cargo_metadata 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "20d6fb2b5574726329c85cdba0df0347fddfec3cf9c8b588f9931708280f5643"
"checksum cc 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "deaf9ec656256bb25b404c51ef50097207b9cbb29c933d31f92cae5a8a0ffee0"
"checksum cfg-if 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "d4c819a1287eb618df47cc647173c5c4c66ba19d888a6e50d605672aed3140de"
+"checksum chrono 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7c20ebe0b2b08b0aeddba49c609fe7957ba2e33449882cb186a180bc60682fa9"
"checksum clap 2.29.0 (registry+https://github.com/rust-lang/crates.io-index)" = "110d43e343eb29f4f51c1db31beb879d546db27998577e5715270a54bcf41d3f"
"checksum cmake 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)" = "56d741ea7a69e577f6d06b36b7dff4738f680593dc27a701ffa8506b73ce28bb"
"checksum coco 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "c06169f5beb7e31c7c67ebf5540b8b472d23e3eade3b2ec7d1f5b504a85f91bd"
"checksum enum_primitive 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "be4551092f4d519593039259a9ed8daedf0da12e5109c5280338073eaeb81180"
"checksum env_logger 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "15abd780e45b3ea4f76b4e9a26ff4843258dd8a3eed2775a0e7368c2e7936c2f"
"checksum env_logger 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3ddf21e73e016298f5cb37d6ef8e8da8e39f91f9ec8b0df44b7deb16a9f8cd5b"
+"checksum env_logger 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "f15f0b172cb4f52ed5dbf47f774a387cd2315d1bf7894ab5af9b083ae27efa5a"
"checksum error-chain 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ff511d5dc435d703f4971bc399647c9bc38e20cb41452e3b9feb4765419ed3f3"
"checksum error-chain 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "6930e04918388a9a2e41d518c25cf679ccafe26733fb4127dbf21993f2575d46"
"checksum failure 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "934799b6c1de475a012a02dab0ace1ace43789ee4b99bcfbf1a2e3e8ced5de82"
"checksum thread-id 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a9539db560102d1cef46b8b78ce737ff0bb64e7e18d35b2a5688f7d097d0ff03"
"checksum thread_local 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)" = "8576dbbfcaef9641452d5cf0df9b0e7eeab7694956dd33bb61515fb8f18cfdd5"
"checksum thread_local 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "279ef31c19ededf577bfd12dfae728040a21f635b06a24cd670ff510edd38963"
+"checksum time 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)" = "a15375f1df02096fb3317256ce2cee6a1f42fc84ea5ad5fc8c421cfe40c73098"
"checksum toml 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "736b60249cb25337bc196faa43ee12c705e426f3d55c214d73a4e7be06f92cb4"
"checksum toml 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)" = "a7540f4ffc193e0d3c94121edb19b055670d369f77d5804db11ae053a45b6e7e"
"checksum unicode-bidi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "49f2bd0c6468a8230e1db229cff8029217cf623c767ea5d60bfbd42729ea54d5"
cmd.arg(format!("-Clinker={}", target_linker));
}
- // Pass down incremental directory, if any.
- if let Ok(dir) = env::var("RUSTC_INCREMENTAL") {
- cmd.arg(format!("-Zincremental={}", dir));
- }
-
let crate_name = args.windows(2)
.find(|a| &*a[0] == "--crate-name")
.unwrap();
env["LIBRARY_PATH"] = os.path.join(self.bin_root(), "lib") + \
(os.pathsep + env["LIBRARY_PATH"]) \
if "LIBRARY_PATH" in env else ""
+ env["RUSTFLAGS"] = "-Cdebuginfo=2"
env["PATH"] = os.path.join(self.bin_root(), "bin") + \
os.pathsep + env["PATH"]
if not os.path.isfile(self.cargo()):
use {Build, Mode};
use cache::{INTERNER, Interned, Cache};
use check;
+use test;
use flags::Subcommand;
use doc;
use tool;
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub enum Kind {
Build,
+ Check,
Test,
Bench,
Dist,
tool::Compiletest, tool::RemoteTestServer, tool::RemoteTestClient,
tool::RustInstaller, tool::Cargo, tool::Rls, tool::Rustdoc, tool::Clippy,
native::Llvm, tool::Rustfmt, tool::Miri),
- Kind::Test => describe!(check::Tidy, check::Bootstrap, check::DefaultCompiletest,
- check::HostCompiletest, check::Crate, check::CrateLibrustc, check::Rustdoc,
- check::Linkcheck, check::Cargotest, check::Cargo, check::Rls, check::Docs,
- check::ErrorIndex, check::Distcheck, check::Rustfmt, check::Miri, check::Clippy,
- check::RustdocJS),
-
- Kind::Bench => describe!(check::Crate, check::CrateLibrustc),
+ Kind::Check => describe!(check::Std, check::Test, check::Rustc),
+ Kind::Test => describe!(test::Tidy, test::Bootstrap, test::DefaultCompiletest,
+ test::HostCompiletest, test::Crate, test::CrateLibrustc, test::Rustdoc,
+ test::Linkcheck, test::Cargotest, test::Cargo, test::Rls, test::Docs,
+ test::ErrorIndex, test::Distcheck, test::Rustfmt, test::Miri, test::Clippy,
+ test::RustdocJS),
+ Kind::Bench => describe!(test::Crate, test::CrateLibrustc),
Kind::Doc => describe!(doc::UnstableBook, doc::UnstableBookGen, doc::TheBook,
doc::Standalone, doc::Std, doc::Test, doc::Rustc, doc::ErrorIndex, doc::Nomicon,
doc::Reference, doc::Rustdoc, doc::RustByExample, doc::CargoBook),
pub fn run(build: &Build) {
let (kind, paths) = match build.config.cmd {
Subcommand::Build { ref paths } => (Kind::Build, &paths[..]),
+ Subcommand::Check { ref paths } => (Kind::Check, &paths[..]),
Subcommand::Doc { ref paths } => (Kind::Doc, &paths[..]),
Subcommand::Test { ref paths, .. } => (Kind::Test, &paths[..]),
Subcommand::Bench { ref paths, .. } => (Kind::Bench, &paths[..]),
cargo.env("RUSTC_CODEGEN_UNITS", n.to_string());
}
+
if let Some(host_linker) = self.build.linker(compiler.host) {
cargo.env("RUSTC_HOST_LINKER", host_linker);
}
if let Some(target_linker) = self.build.linker(target) {
cargo.env("RUSTC_TARGET_LINKER", target_linker);
}
- if cmd != "build" {
+ if cmd != "build" && cmd != "check" {
cargo.env("RUSTDOC_LIBDIR", self.rustc_libdir(self.compiler(2, self.build.build)));
}
// not guaranteeing correctness across builds if the compiler
// is changing under your feet.`
if self.config.incremental && compiler.stage == 0 {
- let incr_dir = self.incremental_dir(compiler);
- cargo.env("RUSTC_INCREMENTAL", incr_dir);
+ cargo.env("CARGO_INCREMENTAL", "1");
}
if let Some(ref on_fail) = self.config.on_fail {
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-//! Implementation of the test-related targets of the build system.
-//!
-//! This file implements the various regression test suites that we execute on
-//! our CI.
+//! Implementation of compiling the compiler and standard library, in "check" mode.
-use std::collections::HashSet;
-use std::env;
-use std::ffi::OsString;
-use std::iter;
-use std::fmt;
-use std::fs::{self, File};
-use std::path::{PathBuf, Path};
-use std::process::Command;
-use std::io::Read;
-
-use build_helper::{self, output};
-
-use builder::{Kind, RunConfig, ShouldRun, Builder, Compiler, Step};
-use cache::{INTERNER, Interned};
-use compile;
-use dist;
-use native;
-use tool::{self, Tool};
-use util::{self, dylib_path, dylib_path_var};
-use {Build, Mode};
-use toolstate::ToolState;
-
-const ADB_TEST_DIR: &str = "/data/tmp/work";
-
-/// The two modes of the test runner; tests or benchmarks.
-#[derive(Debug, PartialEq, Eq, Hash, Copy, Clone)]
-pub enum TestKind {
- /// Run `cargo test`
- Test,
- /// Run `cargo bench`
- Bench,
-}
-
-impl TestKind {
- // Return the cargo subcommand for this test kind
- fn subcommand(self) -> &'static str {
- match self {
- TestKind::Test => "test",
- TestKind::Bench => "bench",
- }
- }
-}
-
-impl fmt::Display for TestKind {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- f.write_str(match *self {
- TestKind::Test => "Testing",
- TestKind::Bench => "Benchmarking",
- })
- }
-}
-
-fn try_run(build: &Build, cmd: &mut Command) -> bool {
- if !build.fail_fast {
- if !build.try_run(cmd) {
- let mut failures = build.delayed_failures.borrow_mut();
- failures.push(format!("{:?}", cmd));
- return false;
- }
- } else {
- build.run(cmd);
- }
- true
-}
-
-fn try_run_quiet(build: &Build, cmd: &mut Command) {
- if !build.fail_fast {
- if !build.try_run_quiet(cmd) {
- let mut failures = build.delayed_failures.borrow_mut();
- failures.push(format!("{:?}", cmd));
- }
- } else {
- build.run_quiet(cmd);
- }
-}
-
-#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
-pub struct Linkcheck {
- host: Interned<String>,
-}
-
-impl Step for Linkcheck {
- type Output = ();
- const ONLY_HOSTS: bool = true;
- const DEFAULT: bool = true;
-
- /// Runs the `linkchecker` tool as compiled in `stage` by the `host` compiler.
- ///
- /// This tool in `src/tools` will verify the validity of all our links in the
- /// documentation to ensure we don't have a bunch of dead ones.
- fn run(self, builder: &Builder) {
- let build = builder.build;
- let host = self.host;
-
- println!("Linkcheck ({})", host);
-
- builder.default_doc(None);
-
- let _time = util::timeit();
- try_run(build, builder.tool_cmd(Tool::Linkchecker)
- .arg(build.out.join(host).join("doc")));
- }
-
- fn should_run(run: ShouldRun) -> ShouldRun {
- let builder = run.builder;
- run.path("src/tools/linkchecker").default_condition(builder.build.config.docs)
- }
-
- fn make_run(run: RunConfig) {
- run.builder.ensure(Linkcheck { host: run.target });
- }
-}
+use compile::{run_cargo, std_cargo, test_cargo, rustc_cargo, add_to_sysroot};
+use builder::{RunConfig, Builder, ShouldRun, Step};
+use {Build, Compiler, Mode};
+use cache::Interned;
+use std::path::PathBuf;
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
-pub struct Cargotest {
- stage: u32,
- host: Interned<String>,
-}
-
-impl Step for Cargotest {
- type Output = ();
- const ONLY_HOSTS: bool = true;
-
- fn should_run(run: ShouldRun) -> ShouldRun {
- run.path("src/tools/cargotest")
- }
-
- fn make_run(run: RunConfig) {
- run.builder.ensure(Cargotest {
- stage: run.builder.top_stage,
- host: run.target,
- });
- }
-
- /// Runs the `cargotest` tool as compiled in `stage` by the `host` compiler.
- ///
- /// This tool in `src/tools` will check out a few Rust projects and run `cargo
- /// test` to ensure that we don't regress the test suites there.
- fn run(self, builder: &Builder) {
- let build = builder.build;
- let compiler = builder.compiler(self.stage, self.host);
- builder.ensure(compile::Rustc { compiler, target: compiler.host });
-
- // Note that this is a short, cryptic, and not scoped directory name. This
- // is currently to minimize the length of path on Windows where we otherwise
- // quickly run into path name limit constraints.
- let out_dir = build.out.join("ct");
- t!(fs::create_dir_all(&out_dir));
-
- let _time = util::timeit();
- let mut cmd = builder.tool_cmd(Tool::CargoTest);
- try_run(build, cmd.arg(&build.initial_cargo)
- .arg(&out_dir)
- .env("RUSTC", builder.rustc(compiler))
- .env("RUSTDOC", builder.rustdoc(compiler.host)));
- }
-}
-
-#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
-pub struct Cargo {
- stage: u32,
- host: Interned<String>,
-}
-
-impl Step for Cargo {
- type Output = ();
- const ONLY_HOSTS: bool = true;
-
- fn should_run(run: ShouldRun) -> ShouldRun {
- run.path("src/tools/cargo")
- }
-
- fn make_run(run: RunConfig) {
- run.builder.ensure(Cargo {
- stage: run.builder.top_stage,
- host: run.target,
- });
- }
-
- /// Runs `cargo test` for `cargo` packaged with Rust.
- fn run(self, builder: &Builder) {
- let build = builder.build;
- let compiler = builder.compiler(self.stage, self.host);
-
- builder.ensure(tool::Cargo { compiler, target: self.host });
- let mut cargo = builder.cargo(compiler, Mode::Tool, self.host, "test");
- cargo.arg("--manifest-path").arg(build.src.join("src/tools/cargo/Cargo.toml"));
- if !build.fail_fast {
- cargo.arg("--no-fail-fast");
- }
-
- // Don't build tests dynamically, just a pain to work with
- cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");
-
- // Don't run cross-compile tests, we may not have cross-compiled libstd libs
- // available.
- cargo.env("CFG_DISABLE_CROSS_TESTS", "1");
-
- try_run(build, cargo.env("PATH", &path_for_cargo(builder, compiler)));
- }
-}
-
-#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
-pub struct Rls {
- stage: u32,
- host: Interned<String>,
-}
-
-impl Step for Rls {
- type Output = ();
- const ONLY_HOSTS: bool = true;
-
- fn should_run(run: ShouldRun) -> ShouldRun {
- run.path("src/tools/rls")
- }
-
- fn make_run(run: RunConfig) {
- run.builder.ensure(Rls {
- stage: run.builder.top_stage,
- host: run.target,
- });
- }
-
- /// Runs `cargo test` for the rls.
- fn run(self, builder: &Builder) {
- let build = builder.build;
- let stage = self.stage;
- let host = self.host;
- let compiler = builder.compiler(stage, host);
-
- builder.ensure(tool::Rls { compiler, target: self.host });
- let mut cargo = tool::prepare_tool_cargo(builder,
- compiler,
- host,
- "test",
- "src/tools/rls");
-
- // Don't build tests dynamically, just a pain to work with
- cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");
-
- builder.add_rustc_lib_path(compiler, &mut cargo);
-
- if try_run(build, &mut cargo) {
- build.save_toolstate("rls", ToolState::TestPass);
- }
- }
-}
-
-#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
-pub struct Rustfmt {
- stage: u32,
- host: Interned<String>,
-}
-
-impl Step for Rustfmt {
- type Output = ();
- const ONLY_HOSTS: bool = true;
-
- fn should_run(run: ShouldRun) -> ShouldRun {
- run.path("src/tools/rustfmt")
- }
-
- fn make_run(run: RunConfig) {
- run.builder.ensure(Rustfmt {
- stage: run.builder.top_stage,
- host: run.target,
- });
- }
-
- /// Runs `cargo test` for rustfmt.
- fn run(self, builder: &Builder) {
- let build = builder.build;
- let stage = self.stage;
- let host = self.host;
- let compiler = builder.compiler(stage, host);
-
- builder.ensure(tool::Rustfmt { compiler, target: self.host });
- let mut cargo = tool::prepare_tool_cargo(builder,
- compiler,
- host,
- "test",
- "src/tools/rustfmt");
-
- // Don't build tests dynamically, just a pain to work with
- cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");
-
- builder.add_rustc_lib_path(compiler, &mut cargo);
-
- if try_run(build, &mut cargo) {
- build.save_toolstate("rustfmt", ToolState::TestPass);
- }
- }
-}
-
-#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
-pub struct Miri {
- stage: u32,
- host: Interned<String>,
-}
-
-impl Step for Miri {
- type Output = ();
- const ONLY_HOSTS: bool = true;
- const DEFAULT: bool = true;
-
- fn should_run(run: ShouldRun) -> ShouldRun {
- let test_miri = run.builder.build.config.test_miri;
- run.path("src/tools/miri").default_condition(test_miri)
- }
-
- fn make_run(run: RunConfig) {
- run.builder.ensure(Miri {
- stage: run.builder.top_stage,
- host: run.target,
- });
- }
-
- /// Runs `cargo test` for miri.
- fn run(self, builder: &Builder) {
- let build = builder.build;
- let stage = self.stage;
- let host = self.host;
- let compiler = builder.compiler(stage, host);
-
- if let Some(miri) = builder.ensure(tool::Miri { compiler, target: self.host }) {
- let mut cargo = builder.cargo(compiler, Mode::Tool, host, "test");
- cargo.arg("--manifest-path").arg(build.src.join("src/tools/miri/Cargo.toml"));
-
- // Don't build tests dynamically, just a pain to work with
- cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");
- // miri tests need to know about the stage sysroot
- cargo.env("MIRI_SYSROOT", builder.sysroot(compiler));
- cargo.env("RUSTC_TEST_SUITE", builder.rustc(compiler));
- cargo.env("RUSTC_LIB_PATH", builder.rustc_libdir(compiler));
- cargo.env("MIRI_PATH", miri);
-
- builder.add_rustc_lib_path(compiler, &mut cargo);
-
- if try_run(build, &mut cargo) {
- build.save_toolstate("miri", ToolState::TestPass);
- }
- } else {
- eprintln!("failed to test miri: could not build");
- }
- }
-}
-
-#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
-pub struct Clippy {
- stage: u32,
- host: Interned<String>,
-}
-
-impl Step for Clippy {
- type Output = ();
- const ONLY_HOSTS: bool = true;
- const DEFAULT: bool = false;
-
- fn should_run(run: ShouldRun) -> ShouldRun {
- run.path("src/tools/clippy")
- }
-
- fn make_run(run: RunConfig) {
- run.builder.ensure(Clippy {
- stage: run.builder.top_stage,
- host: run.target,
- });
- }
-
- /// Runs `cargo test` for clippy.
- fn run(self, builder: &Builder) {
- let build = builder.build;
- let stage = self.stage;
- let host = self.host;
- let compiler = builder.compiler(stage, host);
-
- if let Some(clippy) = builder.ensure(tool::Clippy { compiler, target: self.host }) {
- let mut cargo = builder.cargo(compiler, Mode::Tool, host, "test");
- cargo.arg("--manifest-path").arg(build.src.join("src/tools/clippy/Cargo.toml"));
-
- // Don't build tests dynamically, just a pain to work with
- cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");
- // clippy tests need to know about the stage sysroot
- cargo.env("SYSROOT", builder.sysroot(compiler));
- cargo.env("RUSTC_TEST_SUITE", builder.rustc(compiler));
- cargo.env("RUSTC_LIB_PATH", builder.rustc_libdir(compiler));
- let host_libs = builder.stage_out(compiler, Mode::Tool).join(builder.cargo_dir());
- cargo.env("HOST_LIBS", host_libs);
- // clippy tests need to find the driver
- cargo.env("CLIPPY_DRIVER_PATH", clippy);
-
- builder.add_rustc_lib_path(compiler, &mut cargo);
-
- if try_run(build, &mut cargo) {
- build.save_toolstate("clippy-driver", ToolState::TestPass);
- }
- } else {
- eprintln!("failed to test clippy: could not build");
- }
- }
-}
-
-fn path_for_cargo(builder: &Builder, compiler: Compiler) -> OsString {
- // Configure PATH to find the right rustc. NB. we have to use PATH
- // and not RUSTC because the Cargo test suite has tests that will
- // fail if rustc is not spelled `rustc`.
- let path = builder.sysroot(compiler).join("bin");
- let old_path = env::var_os("PATH").unwrap_or_default();
- env::join_paths(iter::once(path).chain(env::split_paths(&old_path))).expect("")
-}
-
-#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
-pub struct RustdocJS {
- pub host: Interned<String>,
+pub struct Std {
pub target: Interned<String>,
}
-impl Step for RustdocJS {
+impl Step for Std {
type Output = ();
const DEFAULT: bool = true;
- const ONLY_HOSTS: bool = true;
fn should_run(run: ShouldRun) -> ShouldRun {
- run.path("src/test/rustdoc-js")
+ run.path("src/libstd").krate("std")
}
fn make_run(run: RunConfig) {
- run.builder.ensure(RustdocJS {
- host: run.host,
+ run.builder.ensure(Std {
target: run.target,
});
}
- fn run(self, builder: &Builder) {
- if let Some(ref nodejs) = builder.config.nodejs {
- let mut command = Command::new(nodejs);
- command.args(&["src/tools/rustdoc-js/tester.js", &*self.host]);
- builder.ensure(::doc::Std {
- target: self.target,
- stage: builder.top_stage,
- });
- builder.run(&mut command);
- } else {
- println!("No nodejs found, skipping \"src/test/rustdoc-js\" tests");
- }
- }
-}
-
-#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
-pub struct Tidy {
- host: Interned<String>,
-}
-
-impl Step for Tidy {
- type Output = ();
- const DEFAULT: bool = true;
- const ONLY_HOSTS: bool = true;
- const ONLY_BUILD: bool = true;
-
- /// Runs the `tidy` tool as compiled in `stage` by the `host` compiler.
- ///
- /// This tool in `src/tools` checks up on various bits and pieces of style and
- /// otherwise just implements a few lint-like checks that are specific to the
- /// compiler itself.
- fn run(self, builder: &Builder) {
- let build = builder.build;
- let host = self.host;
-
- let _folder = build.fold_output(|| "tidy");
- println!("tidy check ({})", host);
- let mut cmd = builder.tool_cmd(Tool::Tidy);
- cmd.arg(build.src.join("src"));
- if !build.config.vendor {
- cmd.arg("--no-vendor");
- }
- if build.config.quiet_tests {
- cmd.arg("--quiet");
- }
- try_run(build, &mut cmd);
- }
-
- fn should_run(run: ShouldRun) -> ShouldRun {
- run.path("src/tools/tidy")
- }
-
- fn make_run(run: RunConfig) {
- run.builder.ensure(Tidy {
- host: run.builder.build.build,
- });
- }
-}
-
-fn testdir(build: &Build, host: Interned<String>) -> PathBuf {
- build.out.join(host).join("test")
-}
-
-#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
-struct Test {
- path: &'static str,
- mode: &'static str,
- suite: &'static str,
-}
-
-static DEFAULT_COMPILETESTS: &[Test] = &[
- Test { path: "src/test/ui", mode: "ui", suite: "ui" },
- Test { path: "src/test/run-pass", mode: "run-pass", suite: "run-pass" },
- Test { path: "src/test/compile-fail", mode: "compile-fail", suite: "compile-fail" },
- Test { path: "src/test/parse-fail", mode: "parse-fail", suite: "parse-fail" },
- Test { path: "src/test/run-fail", mode: "run-fail", suite: "run-fail" },
- Test {
- path: "src/test/run-pass-valgrind",
- mode: "run-pass-valgrind",
- suite: "run-pass-valgrind"
- },
- Test { path: "src/test/mir-opt", mode: "mir-opt", suite: "mir-opt" },
- Test { path: "src/test/codegen", mode: "codegen", suite: "codegen" },
- Test { path: "src/test/codegen-units", mode: "codegen-units", suite: "codegen-units" },
- Test { path: "src/test/incremental", mode: "incremental", suite: "incremental" },
-
- // What this runs varies depending on the native platform being apple
- Test { path: "src/test/debuginfo", mode: "debuginfo-XXX", suite: "debuginfo" },
-];
-
-#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
-pub struct DefaultCompiletest {
- compiler: Compiler,
- target: Interned<String>,
- mode: &'static str,
- suite: &'static str,
-}
-
-impl Step for DefaultCompiletest {
- type Output = ();
- const DEFAULT: bool = true;
-
- fn should_run(mut run: ShouldRun) -> ShouldRun {
- for test in DEFAULT_COMPILETESTS {
- run = run.path(test.path);
- }
- run
- }
-
- fn make_run(run: RunConfig) {
- let compiler = run.builder.compiler(run.builder.top_stage, run.host);
-
- let test = run.path.map(|path| {
- DEFAULT_COMPILETESTS.iter().find(|&&test| {
- path.ends_with(test.path)
- }).unwrap_or_else(|| {
- panic!("make_run in compile test to receive test path, received {:?}", path);
- })
- });
-
- if let Some(test) = test {
- run.builder.ensure(DefaultCompiletest {
- compiler,
- target: run.target,
- mode: test.mode,
- suite: test.suite,
- });
- } else {
- for test in DEFAULT_COMPILETESTS {
- run.builder.ensure(DefaultCompiletest {
- compiler,
- target: run.target,
- mode: test.mode,
- suite: test.suite
- });
- }
- }
- }
-
- fn run(self, builder: &Builder) {
- builder.ensure(Compiletest {
- compiler: self.compiler,
- target: self.target,
- mode: self.mode,
- suite: self.suite,
- })
- }
-}
-
-// Also default, but host-only.
-static HOST_COMPILETESTS: &[Test] = &[
- Test { path: "src/test/ui-fulldeps", mode: "ui", suite: "ui-fulldeps" },
- Test { path: "src/test/run-pass-fulldeps", mode: "run-pass", suite: "run-pass-fulldeps" },
- Test { path: "src/test/run-fail-fulldeps", mode: "run-fail", suite: "run-fail-fulldeps" },
- Test {
- path: "src/test/compile-fail-fulldeps",
- mode: "compile-fail",
- suite: "compile-fail-fulldeps",
- },
- Test {
- path: "src/test/incremental-fulldeps",
- mode: "incremental",
- suite: "incremental-fulldeps",
- },
- Test { path: "src/test/run-make", mode: "run-make", suite: "run-make" },
- Test { path: "src/test/rustdoc", mode: "rustdoc", suite: "rustdoc" },
-
- Test { path: "src/test/pretty", mode: "pretty", suite: "pretty" },
- Test { path: "src/test/run-pass/pretty", mode: "pretty", suite: "run-pass" },
- Test { path: "src/test/run-fail/pretty", mode: "pretty", suite: "run-fail" },
- Test { path: "src/test/run-pass-valgrind/pretty", mode: "pretty", suite: "run-pass-valgrind" },
- Test { path: "src/test/run-pass-fulldeps/pretty", mode: "pretty", suite: "run-pass-fulldeps" },
- Test { path: "src/test/run-fail-fulldeps/pretty", mode: "pretty", suite: "run-fail-fulldeps" },
-];
-
-#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
-pub struct HostCompiletest {
- compiler: Compiler,
- target: Interned<String>,
- mode: &'static str,
- suite: &'static str,
-}
-
-impl Step for HostCompiletest {
- type Output = ();
- const DEFAULT: bool = true;
- const ONLY_HOSTS: bool = true;
-
- fn should_run(mut run: ShouldRun) -> ShouldRun {
- for test in HOST_COMPILETESTS {
- run = run.path(test.path);
- }
- run
- }
-
- fn make_run(run: RunConfig) {
- let compiler = run.builder.compiler(run.builder.top_stage, run.host);
-
- let test = run.path.map(|path| {
- HOST_COMPILETESTS.iter().find(|&&test| {
- path.ends_with(test.path)
- }).unwrap_or_else(|| {
- panic!("make_run in compile test to receive test path, received {:?}", path);
- })
- });
-
- if let Some(test) = test {
- run.builder.ensure(HostCompiletest {
- compiler,
- target: run.target,
- mode: test.mode,
- suite: test.suite,
- });
- } else {
- for test in HOST_COMPILETESTS {
- if test.mode == "pretty" {
- continue;
- }
- run.builder.ensure(HostCompiletest {
- compiler,
- target: run.target,
- mode: test.mode,
- suite: test.suite
- });
- }
- }
- }
-
- fn run(self, builder: &Builder) {
- builder.ensure(Compiletest {
- compiler: self.compiler,
- target: self.target,
- mode: self.mode,
- suite: self.suite,
- })
- }
-}
-
-#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
-struct Compiletest {
- compiler: Compiler,
- target: Interned<String>,
- mode: &'static str,
- suite: &'static str,
-}
-
-impl Step for Compiletest {
- type Output = ();
-
- fn should_run(run: ShouldRun) -> ShouldRun {
- run.never()
- }
-
- /// Executes the `compiletest` tool to run a suite of tests.
- ///
- /// Compiles all tests with `compiler` for `target` with the specified
- /// compiletest `mode` and `suite` arguments. For example `mode` can be
- /// "run-pass" or `suite` can be something like `debuginfo`.
fn run(self, builder: &Builder) {
let build = builder.build;
- let compiler = self.compiler;
let target = self.target;
- let mode = self.mode;
- let suite = self.suite;
-
- // Skip codegen tests if they aren't enabled in configuration.
- if !build.config.codegen_tests && suite == "codegen" {
- return;
- }
-
- if suite == "debuginfo" {
- // Skip debuginfo tests on MSVC
- if build.build.contains("msvc") {
- return;
- }
-
- if mode == "debuginfo-XXX" {
- return if build.build.contains("apple") {
- builder.ensure(Compiletest {
- mode: "debuginfo-lldb",
- ..self
- });
- } else {
- builder.ensure(Compiletest {
- mode: "debuginfo-gdb",
- ..self
- });
- };
- }
-
- builder.ensure(dist::DebuggerScripts {
- sysroot: builder.sysroot(compiler),
- host: target
- });
- }
-
- if suite.ends_with("fulldeps") ||
- // FIXME: Does pretty need librustc compiled? Note that there are
- // fulldeps test suites with mode = pretty as well.
- mode == "pretty" ||
- mode == "rustdoc" ||
- mode == "run-make" {
- builder.ensure(compile::Rustc { compiler, target });
- }
-
- builder.ensure(compile::Test { compiler, target });
- builder.ensure(native::TestHelpers { target });
- builder.ensure(RemoteCopyLibs { compiler, target });
-
- let _folder = build.fold_output(|| format!("test_{}", suite));
- println!("Check compiletest suite={} mode={} ({} -> {})",
- suite, mode, &compiler.host, target);
- let mut cmd = builder.tool_cmd(Tool::Compiletest);
-
- // compiletest currently has... a lot of arguments, so let's just pass all
- // of them!
-
- cmd.arg("--compile-lib-path").arg(builder.rustc_libdir(compiler));
- cmd.arg("--run-lib-path").arg(builder.sysroot_libdir(compiler, target));
- cmd.arg("--rustc-path").arg(builder.rustc(compiler));
-
- // Avoid depending on rustdoc when we don't need it.
- if mode == "rustdoc" || mode == "run-make" {
- cmd.arg("--rustdoc-path").arg(builder.rustdoc(compiler.host));
- }
-
- cmd.arg("--src-base").arg(build.src.join("src/test").join(suite));
- cmd.arg("--build-base").arg(testdir(build, compiler.host).join(suite));
- cmd.arg("--stage-id").arg(format!("stage{}-{}", compiler.stage, target));
- cmd.arg("--mode").arg(mode);
- cmd.arg("--target").arg(target);
- cmd.arg("--host").arg(&*compiler.host);
- cmd.arg("--llvm-filecheck").arg(build.llvm_filecheck(build.build));
-
- if let Some(ref nodejs) = build.config.nodejs {
- cmd.arg("--nodejs").arg(nodejs);
- }
-
- let mut flags = vec!["-Crpath".to_string()];
- if build.config.rust_optimize_tests {
- flags.push("-O".to_string());
- }
- if build.config.rust_debuginfo_tests {
- flags.push("-g".to_string());
- }
- flags.push("-Zmiri -Zunstable-options".to_string());
- flags.push(build.config.cmd.rustc_args().join(" "));
-
- if let Some(linker) = build.linker(target) {
- cmd.arg("--linker").arg(linker);
- }
-
- let hostflags = flags.clone();
- cmd.arg("--host-rustcflags").arg(hostflags.join(" "));
-
- let mut targetflags = flags.clone();
- targetflags.push(format!("-Lnative={}",
- build.test_helpers_out(target).display()));
- cmd.arg("--target-rustcflags").arg(targetflags.join(" "));
-
- cmd.arg("--docck-python").arg(build.python());
-
- if build.build.ends_with("apple-darwin") {
- // Force /usr/bin/python on macOS for LLDB tests because we're loading the
- // LLDB plugin's compiled module which only works with the system python
- // (namely not Homebrew-installed python)
- cmd.arg("--lldb-python").arg("/usr/bin/python");
- } else {
- cmd.arg("--lldb-python").arg(build.python());
- }
-
- if let Some(ref gdb) = build.config.gdb {
- cmd.arg("--gdb").arg(gdb);
- }
- if let Some(ref vers) = build.lldb_version {
- cmd.arg("--lldb-version").arg(vers);
- }
- if let Some(ref dir) = build.lldb_python_dir {
- cmd.arg("--lldb-python-dir").arg(dir);
- }
-
- cmd.args(&build.config.cmd.test_args());
-
- if build.is_verbose() {
- cmd.arg("--verbose");
- }
-
- if build.config.quiet_tests {
- cmd.arg("--quiet");
- }
-
- if build.config.llvm_enabled {
- let llvm_config = build.llvm_config(target);
- let llvm_version = output(Command::new(&llvm_config).arg("--version"));
- cmd.arg("--llvm-version").arg(llvm_version);
- if !build.is_rust_llvm(target) {
- cmd.arg("--system-llvm");
- }
-
- // Only pass correct values for these flags for the `run-make` suite as it
- // requires that a C++ compiler was configured which isn't always the case.
- if suite == "run-make" {
- let llvm_components = output(Command::new(&llvm_config).arg("--components"));
- let llvm_cxxflags = output(Command::new(&llvm_config).arg("--cxxflags"));
- cmd.arg("--cc").arg(build.cc(target))
- .arg("--cxx").arg(build.cxx(target).unwrap())
- .arg("--cflags").arg(build.cflags(target).join(" "))
- .arg("--llvm-components").arg(llvm_components.trim())
- .arg("--llvm-cxxflags").arg(llvm_cxxflags.trim());
- if let Some(ar) = build.ar(target) {
- cmd.arg("--ar").arg(ar);
- }
- }
- }
- if suite == "run-make" && !build.config.llvm_enabled {
- println!("Ignoring run-make test suite as they generally dont work without LLVM");
- return;
- }
+ let compiler = builder.compiler(0, build.build);
- if suite != "run-make" {
- cmd.arg("--cc").arg("")
- .arg("--cxx").arg("")
- .arg("--cflags").arg("")
- .arg("--llvm-components").arg("")
- .arg("--llvm-cxxflags").arg("");
- }
+ let _folder = build.fold_output(|| format!("stage{}-std", compiler.stage));
+ println!("Checking std artifacts ({} -> {})", &compiler.host, target);
- if build.remote_tested(target) {
- cmd.arg("--remote-test-client").arg(builder.tool_exe(Tool::RemoteTestClient));
- }
-
- // Running a C compiler on MSVC requires a few env vars to be set, to be
- // sure to set them here.
- //
- // Note that if we encounter `PATH` we make sure to append to our own `PATH`
- // rather than stomp over it.
- if target.contains("msvc") {
- for &(ref k, ref v) in build.cc[&target].env() {
- if k != "PATH" {
- cmd.env(k, v);
- }
- }
- }
- cmd.env("RUSTC_BOOTSTRAP", "1");
- build.add_rust_test_threads(&mut cmd);
-
- if build.config.sanitizers {
- cmd.env("SANITIZER_SUPPORT", "1");
- }
-
- if build.config.profiler {
- cmd.env("PROFILER_SUPPORT", "1");
- }
-
- cmd.arg("--adb-path").arg("adb");
- cmd.arg("--adb-test-dir").arg(ADB_TEST_DIR);
- if target.contains("android") {
- // Assume that cc for this target comes from the android sysroot
- cmd.arg("--android-cross-path")
- .arg(build.cc(target).parent().unwrap().parent().unwrap());
- } else {
- cmd.arg("--android-cross-path").arg("");
- }
-
- build.ci_env.force_coloring_in_ci(&mut cmd);
-
- let _time = util::timeit();
- try_run(build, &mut cmd);
+ let out_dir = build.stage_out(compiler, Mode::Libstd);
+ build.clear_if_dirty(&out_dir, &builder.rustc(compiler));
+ let mut cargo = builder.cargo(compiler, Mode::Libstd, target, "check");
+ std_cargo(build, &compiler, target, &mut cargo);
+ run_cargo(build,
+ &mut cargo,
+ &libstd_stamp(build, compiler, target),
+ true);
+ let libdir = builder.sysroot_libdir(compiler, target);
+ add_to_sysroot(&libdir, &libstd_stamp(build, compiler, target));
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
-pub struct Docs {
- compiler: Compiler,
+pub struct Rustc {
+ pub target: Interned<String>,
}
-impl Step for Docs {
+impl Step for Rustc {
type Output = ();
- const DEFAULT: bool = true;
const ONLY_HOSTS: bool = true;
-
- fn should_run(run: ShouldRun) -> ShouldRun {
- run.path("src/doc")
- }
-
- fn make_run(run: RunConfig) {
- run.builder.ensure(Docs {
- compiler: run.builder.compiler(run.builder.top_stage, run.host),
- });
- }
-
- /// Run `rustdoc --test` for all documentation in `src/doc`.
- ///
- /// This will run all tests in our markdown documentation (e.g. the book)
- /// located in `src/doc`. The `rustdoc` that's run is the one that sits next to
- /// `compiler`.
- fn run(self, builder: &Builder) {
- let build = builder.build;
- let compiler = self.compiler;
-
- builder.ensure(compile::Test { compiler, target: compiler.host });
-
- // Do a breadth-first traversal of the `src/doc` directory and just run
- // tests for all files that end in `*.md`
- let mut stack = vec![build.src.join("src/doc")];
- let _time = util::timeit();
- let _folder = build.fold_output(|| "test_docs");
-
- while let Some(p) = stack.pop() {
- if p.is_dir() {
- stack.extend(t!(p.read_dir()).map(|p| t!(p).path()));
- continue
- }
-
- if p.extension().and_then(|s| s.to_str()) != Some("md") {
- continue;
- }
-
- // The nostarch directory in the book is for no starch, and so isn't
- // guaranteed to build. We don't care if it doesn't build, so skip it.
- if p.to_str().map_or(false, |p| p.contains("nostarch")) {
- continue;
- }
-
- markdown_test(builder, compiler, &p);
- }
- }
-}
-
-#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
-pub struct ErrorIndex {
- compiler: Compiler,
-}
-
-impl Step for ErrorIndex {
- type Output = ();
const DEFAULT: bool = true;
- const ONLY_HOSTS: bool = true;
fn should_run(run: ShouldRun) -> ShouldRun {
- run.path("src/tools/error_index_generator")
+ run.path("src/librustc").krate("rustc-main")
}
fn make_run(run: RunConfig) {
- run.builder.ensure(ErrorIndex {
- compiler: run.builder.compiler(run.builder.top_stage, run.host),
- });
- }
-
- /// Run the error index generator tool to execute the tests located in the error
- /// index.
- ///
- /// The `error_index_generator` tool lives in `src/tools` and is used to
- /// generate a markdown file from the error indexes of the code base which is
- /// then passed to `rustdoc --test`.
- fn run(self, builder: &Builder) {
- let build = builder.build;
- let compiler = self.compiler;
-
- builder.ensure(compile::Std { compiler, target: compiler.host });
-
- let _folder = build.fold_output(|| "test_error_index");
- println!("Testing error-index stage{}", compiler.stage);
-
- let dir = testdir(build, compiler.host);
- t!(fs::create_dir_all(&dir));
- let output = dir.join("error-index.md");
-
- let _time = util::timeit();
- build.run(builder.tool_cmd(Tool::ErrorIndex)
- .arg("markdown")
- .arg(&output)
- .env("CFG_BUILD", &build.build)
- .env("RUSTC_ERROR_METADATA_DST", build.extended_error_dir()));
-
- markdown_test(builder, compiler, &output);
- }
-}
-
-fn markdown_test(builder: &Builder, compiler: Compiler, markdown: &Path) {
- let build = builder.build;
- let mut file = t!(File::open(markdown));
- let mut contents = String::new();
- t!(file.read_to_string(&mut contents));
- if !contents.contains("```") {
- return;
- }
-
- println!("doc tests for: {}", markdown.display());
- let mut cmd = builder.rustdoc_cmd(compiler.host);
- build.add_rust_test_threads(&mut cmd);
- cmd.arg("--test");
- cmd.arg(markdown);
- cmd.env("RUSTC_BOOTSTRAP", "1");
-
- let test_args = build.config.cmd.test_args().join(" ");
- cmd.arg("--test-args").arg(test_args);
-
- if build.config.quiet_tests {
- try_run_quiet(build, &mut cmd);
- } else {
- try_run(build, &mut cmd);
- }
-}
-
-#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
-pub struct CrateLibrustc {
- compiler: Compiler,
- target: Interned<String>,
- test_kind: TestKind,
- krate: Option<Interned<String>>,
-}
-
-impl Step for CrateLibrustc {
- type Output = ();
- const DEFAULT: bool = true;
- const ONLY_HOSTS: bool = true;
-
- fn should_run(run: ShouldRun) -> ShouldRun {
- run.krate("rustc-main")
- }
-
- fn make_run(run: RunConfig) {
- let builder = run.builder;
- let compiler = builder.compiler(builder.top_stage, run.host);
-
- let make = |name: Option<Interned<String>>| {
- let test_kind = if builder.kind == Kind::Test {
- TestKind::Test
- } else if builder.kind == Kind::Bench {
- TestKind::Bench
- } else {
- panic!("unexpected builder.kind in crate: {:?}", builder.kind);
- };
-
- builder.ensure(CrateLibrustc {
- compiler,
- target: run.target,
- test_kind,
- krate: name,
- });
- };
-
- if let Some(path) = run.path {
- for (name, krate_path) in builder.crates("rustc-main") {
- if path.ends_with(krate_path) {
- make(Some(name));
- }
- }
- } else {
- make(None);
- }
- }
-
-
- fn run(self, builder: &Builder) {
- builder.ensure(Crate {
- compiler: self.compiler,
- target: self.target,
- mode: Mode::Librustc,
- test_kind: self.test_kind,
- krate: self.krate,
+ run.builder.ensure(Rustc {
+ target: run.target,
});
}
-}
-
-#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
-pub struct Crate {
- compiler: Compiler,
- target: Interned<String>,
- mode: Mode,
- test_kind: TestKind,
- krate: Option<Interned<String>>,
-}
-
-impl Step for Crate {
- type Output = ();
- const DEFAULT: bool = true;
-
- fn should_run(run: ShouldRun) -> ShouldRun {
- run.krate("std").krate("test")
- }
- fn make_run(run: RunConfig) {
- let builder = run.builder;
- let compiler = builder.compiler(builder.top_stage, run.host);
-
- let make = |mode: Mode, name: Option<Interned<String>>| {
- let test_kind = if builder.kind == Kind::Test {
- TestKind::Test
- } else if builder.kind == Kind::Bench {
- TestKind::Bench
- } else {
- panic!("unexpected builder.kind in crate: {:?}", builder.kind);
- };
-
- builder.ensure(Crate {
- compiler,
- target: run.target,
- mode,
- test_kind,
- krate: name,
- });
- };
-
- if let Some(path) = run.path {
- for (name, krate_path) in builder.crates("std") {
- if path.ends_with(krate_path) {
- make(Mode::Libstd, Some(name));
- }
- }
- for (name, krate_path) in builder.crates("test") {
- if path.ends_with(krate_path) {
- make(Mode::Libtest, Some(name));
- }
- }
- } else {
- make(Mode::Libstd, None);
- make(Mode::Libtest, None);
- }
- }
-
- /// Run all unit tests plus documentation tests for an entire crate DAG defined
- /// by a `Cargo.toml`
- ///
- /// This is what runs tests for crates like the standard library, compiler, etc.
- /// It essentially is the driver for running `cargo test`.
+ /// Build the compiler.
///
- /// Currently this runs all tests for a DAG by passing a bunch of `-p foo`
- /// arguments, and those arguments are discovered from `cargo metadata`.
+ /// This will build the compiler for a particular stage of the build using
+ /// the `compiler` targeting the `target` architecture. The artifacts
+ /// created will also be linked into the sysroot directory.
fn run(self, builder: &Builder) {
let build = builder.build;
- let compiler = self.compiler;
+ let compiler = builder.compiler(0, build.build);
let target = self.target;
- let mode = self.mode;
- let test_kind = self.test_kind;
- let krate = self.krate;
-
- builder.ensure(compile::Test { compiler, target });
- builder.ensure(RemoteCopyLibs { compiler, target });
-
- // If we're not doing a full bootstrap but we're testing a stage2 version of
- // libstd, then what we're actually testing is the libstd produced in
- // stage1. Reflect that here by updating the compiler that we're working
- // with automatically.
- let compiler = if build.force_use_stage1(compiler, target) {
- builder.compiler(1, compiler.host)
- } else {
- compiler.clone()
- };
-
- let mut cargo = builder.cargo(compiler, mode, target, test_kind.subcommand());
- let (name, root) = match mode {
- Mode::Libstd => {
- compile::std_cargo(build, &compiler, target, &mut cargo);
- ("libstd", "std")
- }
- Mode::Libtest => {
- compile::test_cargo(build, &compiler, target, &mut cargo);
- ("libtest", "test")
- }
- Mode::Librustc => {
- builder.ensure(compile::Rustc { compiler, target });
- compile::rustc_cargo(build, target, &mut cargo);
- ("librustc", "rustc-main")
- }
- _ => panic!("can only test libraries"),
- };
- let root = INTERNER.intern_string(String::from(root));
- let _folder = build.fold_output(|| {
- format!("{}_stage{}-{}", test_kind.subcommand(), compiler.stage, name)
- });
- println!("{} {} stage{} ({} -> {})", test_kind, name, compiler.stage,
- &compiler.host, target);
-
- // Build up the base `cargo test` command.
- //
- // Pass in some standard flags then iterate over the graph we've discovered
- // in `cargo metadata` with the maps above and figure out what `-p`
- // arguments need to get passed.
- if test_kind.subcommand() == "test" && !build.fail_fast {
- cargo.arg("--no-fail-fast");
- }
-
- match krate {
- Some(krate) => {
- cargo.arg("-p").arg(krate);
- }
- None => {
- let mut visited = HashSet::new();
- let mut next = vec![root];
- while let Some(name) = next.pop() {
- // Right now jemalloc and the sanitizer crates are
- // target-specific crate in the sense that it's not present
- // on all platforms. Custom skip it here for now, but if we
- // add more this probably wants to get more generalized.
- //
- // Also skip `build_helper` as it's not compiled normally
- // for target during the bootstrap and it's just meant to be
- // a helper crate, not tested. If it leaks through then it
- // ends up messing with various mtime calculations and such.
- if !name.contains("jemalloc") &&
- *name != *"build_helper" &&
- !(name.starts_with("rustc_") && name.ends_with("san")) &&
- name != "dlmalloc" {
- cargo.arg("-p").arg(&format!("{}:0.0.0", name));
- }
- for dep in build.crates[&name].deps.iter() {
- if visited.insert(dep) {
- next.push(*dep);
- }
- }
- }
- }
- }
-
- // The tests are going to run with the *target* libraries, so we need to
- // ensure that those libraries show up in the LD_LIBRARY_PATH equivalent.
- //
- // Note that to run the compiler we need to run with the *host* libraries,
- // but our wrapper scripts arrange for that to be the case anyway.
- let mut dylib_path = dylib_path();
- dylib_path.insert(0, PathBuf::from(&*builder.sysroot_libdir(compiler, target)));
- cargo.env(dylib_path_var(), env::join_paths(&dylib_path).unwrap());
- cargo.arg("--");
- cargo.args(&build.config.cmd.test_args());
+ let _folder = build.fold_output(|| format!("stage{}-rustc", compiler.stage));
+ println!("Checking compiler artifacts ({} -> {})", &compiler.host, target);
- if build.config.quiet_tests {
- cargo.arg("--quiet");
- }
+ let stage_out = builder.stage_out(compiler, Mode::Librustc);
+ build.clear_if_dirty(&stage_out, &libstd_stamp(build, compiler, target));
+ build.clear_if_dirty(&stage_out, &libtest_stamp(build, compiler, target));
- let _time = util::timeit();
-
- if target.contains("emscripten") {
- cargo.env(format!("CARGO_TARGET_{}_RUNNER", envify(&target)),
- build.config.nodejs.as_ref().expect("nodejs not configured"));
- } else if target.starts_with("wasm32") {
- // On the wasm32-unknown-unknown target we're using LTO which is
- // incompatible with `-C prefer-dynamic`, so disable that here
- cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");
-
- let node = build.config.nodejs.as_ref()
- .expect("nodejs not configured");
- let runner = format!("{} {}/src/etc/wasm32-shim.js",
- node.display(),
- build.src.display());
- cargo.env(format!("CARGO_TARGET_{}_RUNNER", envify(&target)), &runner);
- } else if build.remote_tested(target) {
- cargo.env(format!("CARGO_TARGET_{}_RUNNER", envify(&target)),
- format!("{} run",
- builder.tool_exe(Tool::RemoteTestClient).display()));
- }
- try_run(build, &mut cargo);
+ let mut cargo = builder.cargo(compiler, Mode::Librustc, target, "check");
+ rustc_cargo(build, target, &mut cargo);
+ run_cargo(build,
+ &mut cargo,
+ &librustc_stamp(build, compiler, target),
+ true);
+ let libdir = builder.sysroot_libdir(compiler, target);
+ add_to_sysroot(&libdir, &librustc_stamp(build, compiler, target));
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
-pub struct Rustdoc {
- host: Interned<String>,
- test_kind: TestKind,
+pub struct Test {
+ pub target: Interned<String>,
}
-impl Step for Rustdoc {
+impl Step for Test {
type Output = ();
const DEFAULT: bool = true;
- const ONLY_HOSTS: bool = true;
fn should_run(run: ShouldRun) -> ShouldRun {
- run.path("src/librustdoc").path("src/tools/rustdoc")
+ run.path("src/libtest").krate("test")
}
fn make_run(run: RunConfig) {
- let builder = run.builder;
-
- let test_kind = if builder.kind == Kind::Test {
- TestKind::Test
- } else if builder.kind == Kind::Bench {
- TestKind::Bench
- } else {
- panic!("unexpected builder.kind in crate: {:?}", builder.kind);
- };
-
- builder.ensure(Rustdoc {
- host: run.host,
- test_kind,
+ run.builder.ensure(Test {
+ target: run.target,
});
}
fn run(self, builder: &Builder) {
let build = builder.build;
- let test_kind = self.test_kind;
-
- let compiler = builder.compiler(builder.top_stage, self.host);
- let target = compiler.host;
-
- let mut cargo = tool::prepare_tool_cargo(builder,
- compiler,
- target,
- test_kind.subcommand(),
- "src/tools/rustdoc");
- let _folder = build.fold_output(|| {
- format!("{}_stage{}-rustdoc", test_kind.subcommand(), compiler.stage)
- });
- println!("{} rustdoc stage{} ({} -> {})", test_kind, compiler.stage,
- &compiler.host, target);
-
- if test_kind.subcommand() == "test" && !build.fail_fast {
- cargo.arg("--no-fail-fast");
- }
-
- cargo.arg("-p").arg("rustdoc:0.0.0");
-
- cargo.arg("--");
- cargo.args(&build.config.cmd.test_args());
-
- if build.config.quiet_tests {
- cargo.arg("--quiet");
- }
-
- let _time = util::timeit();
+ let target = self.target;
+ let compiler = builder.compiler(0, build.build);
- try_run(build, &mut cargo);
+ let _folder = build.fold_output(|| format!("stage{}-test", compiler.stage));
+ println!("Checking test artifacts ({} -> {})", &compiler.host, target);
+ let out_dir = build.stage_out(compiler, Mode::Libtest);
+ build.clear_if_dirty(&out_dir, &libstd_stamp(build, compiler, target));
+ let mut cargo = builder.cargo(compiler, Mode::Libtest, target, "check");
+ test_cargo(build, &compiler, target, &mut cargo);
+ run_cargo(build,
+ &mut cargo,
+ &libtest_stamp(build, compiler, target),
+ true);
+ let libdir = builder.sysroot_libdir(compiler, target);
+ add_to_sysroot(&libdir, &libtest_stamp(build, compiler, target));
}
}
-fn envify(s: &str) -> String {
- s.chars().map(|c| {
- match c {
- '-' => '_',
- c => c,
- }
- }).flat_map(|c| c.to_uppercase()).collect()
+/// Cargo's output path for the standard library in a given stage, compiled
+/// by a particular compiler for the specified target.
+pub fn libstd_stamp(build: &Build, compiler: Compiler, target: Interned<String>) -> PathBuf {
+ build.cargo_out(compiler, Mode::Libstd, target).join(".libstd-check.stamp")
}
-/// Some test suites are run inside emulators or on remote devices, and most
-/// of our test binaries are linked dynamically which means we need to ship
-/// the standard library and such to the emulator ahead of time. This step
-/// represents this and is a dependency of all test suites.
-///
-/// Most of the time this is a noop. For some steps such as shipping data to
-/// QEMU we have to build our own tools so we've got conditional dependencies
-/// on those programs as well. Note that the remote test client is built for
-/// the build target (us) and the server is built for the target.
-#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
-pub struct RemoteCopyLibs {
- compiler: Compiler,
- target: Interned<String>,
+/// Cargo's output path for libtest in a given stage, compiled by a particular
+/// compiler for the specified target.
+pub fn libtest_stamp(build: &Build, compiler: Compiler, target: Interned<String>) -> PathBuf {
+ build.cargo_out(compiler, Mode::Libtest, target).join(".libtest-check.stamp")
}
-impl Step for RemoteCopyLibs {
- type Output = ();
-
- fn should_run(run: ShouldRun) -> ShouldRun {
- run.never()
- }
-
- fn run(self, builder: &Builder) {
- let build = builder.build;
- let compiler = self.compiler;
- let target = self.target;
- if !build.remote_tested(target) {
- return
- }
-
- builder.ensure(compile::Test { compiler, target });
-
- println!("REMOTE copy libs to emulator ({})", target);
- t!(fs::create_dir_all(build.out.join("tmp")));
-
- let server = builder.ensure(tool::RemoteTestServer { compiler, target });
-
- // Spawn the emulator and wait for it to come online
- let tool = builder.tool_exe(Tool::RemoteTestClient);
- let mut cmd = Command::new(&tool);
- cmd.arg("spawn-emulator")
- .arg(target)
- .arg(&server)
- .arg(build.out.join("tmp"));
- if let Some(rootfs) = build.qemu_rootfs(target) {
- cmd.arg(rootfs);
- }
- build.run(&mut cmd);
-
- // Push all our dylibs to the emulator
- for f in t!(builder.sysroot_libdir(compiler, target).read_dir()) {
- let f = t!(f);
- let name = f.file_name().into_string().unwrap();
- if util::is_dylib(&name) {
- build.run(Command::new(&tool)
- .arg("push")
- .arg(f.path()));
- }
- }
- }
+/// Cargo's output path for librustc in a given stage, compiled by a particular
+/// compiler for the specified target.
+pub fn librustc_stamp(build: &Build, compiler: Compiler, target: Interned<String>) -> PathBuf {
+ build.cargo_out(compiler, Mode::Librustc, target).join(".librustc-check.stamp")
}
-#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
-pub struct Distcheck;
-
-impl Step for Distcheck {
- type Output = ();
- const ONLY_BUILD: bool = true;
-
- fn should_run(run: ShouldRun) -> ShouldRun {
- run.path("distcheck")
- }
-
- fn make_run(run: RunConfig) {
- run.builder.ensure(Distcheck);
- }
-
- /// Run "distcheck", a 'make check' from a tarball
- fn run(self, builder: &Builder) {
- let build = builder.build;
-
- println!("Distcheck");
- let dir = build.out.join("tmp").join("distcheck");
- let _ = fs::remove_dir_all(&dir);
- t!(fs::create_dir_all(&dir));
-
- // Guarantee that these are built before we begin running.
- builder.ensure(dist::PlainSourceTarball);
- builder.ensure(dist::Src);
-
- let mut cmd = Command::new("tar");
- cmd.arg("-xzf")
- .arg(builder.ensure(dist::PlainSourceTarball))
- .arg("--strip-components=1")
- .current_dir(&dir);
- build.run(&mut cmd);
- build.run(Command::new("./configure")
- .args(&build.config.configure_args)
- .arg("--enable-vendor")
- .current_dir(&dir));
- build.run(Command::new(build_helper::make(&build.build))
- .arg("check")
- .current_dir(&dir));
-
- // Now make sure that rust-src has all of libstd's dependencies
- println!("Distcheck rust-src");
- let dir = build.out.join("tmp").join("distcheck-src");
- let _ = fs::remove_dir_all(&dir);
- t!(fs::create_dir_all(&dir));
-
- let mut cmd = Command::new("tar");
- cmd.arg("-xzf")
- .arg(builder.ensure(dist::Src))
- .arg("--strip-components=1")
- .current_dir(&dir);
- build.run(&mut cmd);
-
- let toml = dir.join("rust-src/lib/rustlib/src/rust/src/libstd/Cargo.toml");
- build.run(Command::new(&build.initial_cargo)
- .arg("generate-lockfile")
- .arg("--manifest-path")
- .arg(&toml)
- .current_dir(&dir));
- }
-}
-
-#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
-pub struct Bootstrap;
-
-impl Step for Bootstrap {
- type Output = ();
- const DEFAULT: bool = true;
- const ONLY_HOSTS: bool = true;
- const ONLY_BUILD: bool = true;
-
- /// Test the build system itself
- fn run(self, builder: &Builder) {
- let build = builder.build;
- let mut cmd = Command::new(&build.initial_cargo);
- cmd.arg("test")
- .current_dir(build.src.join("src/bootstrap"))
- .env("CARGO_TARGET_DIR", build.out.join("bootstrap"))
- .env("RUSTC_BOOTSTRAP", "1")
- .env("RUSTC", &build.initial_rustc);
- if !build.fail_fast {
- cmd.arg("--no-fail-fast");
- }
- cmd.arg("--").args(&build.config.cmd.test_args());
- try_run(build, &mut cmd);
- }
-
- fn should_run(run: ShouldRun) -> ShouldRun {
- run.path("src/bootstrap")
- }
-
- fn make_run(run: RunConfig) {
- run.builder.ensure(Bootstrap);
- }
-}
std_cargo(build, &compiler, target, &mut cargo);
run_cargo(build,
&mut cargo,
- &libstd_stamp(build, compiler, target));
+ &libstd_stamp(build, compiler, target),
+ false);
builder.ensure(StdLink {
compiler: builder.compiler(compiler.stage, build.build),
test_cargo(build, &compiler, target, &mut cargo);
run_cargo(build,
&mut cargo,
- &libtest_stamp(build, compiler, target));
+ &libtest_stamp(build, compiler, target),
+ false);
builder.ensure(TestLink {
compiler: builder.compiler(compiler.stage, build.build),
rustc_cargo(build, target, &mut cargo);
run_cargo(build,
&mut cargo,
- &librustc_stamp(build, compiler, target));
+ &librustc_stamp(build, compiler, target),
+ false);
builder.ensure(RustcLink {
compiler: builder.compiler(compiler.stage, build.build),
///
/// For a particular stage this will link the file listed in `stamp` into the
/// `sysroot_dst` provided.
-fn add_to_sysroot(sysroot_dst: &Path, stamp: &Path) {
+pub fn add_to_sysroot(sysroot_dst: &Path, stamp: &Path) {
t!(fs::create_dir_all(&sysroot_dst));
for path in read_stamp_file(stamp) {
copy(&path, &sysroot_dst.join(path.file_name().unwrap()));
}
}
-fn run_cargo(build: &Build, cargo: &mut Command, stamp: &Path) {
+pub fn run_cargo(build: &Build, cargo: &mut Command, stamp: &Path, is_check: bool) {
// Instruct Cargo to give us json messages on stdout, critically leaving
// stderr as piped so we can get those pretty colors.
cargo.arg("--message-format").arg("json")
// Skip files like executables
if !filename.ends_with(".rlib") &&
!filename.ends_with(".lib") &&
- !is_dylib(&filename) {
+ !is_dylib(&filename) &&
+ !(is_check && filename.ends_with(".rmeta")) {
continue
}
Build {
paths: Vec<PathBuf>,
},
+ Check {
+ paths: Vec<PathBuf>,
+ },
Doc {
paths: Vec<PathBuf>,
},
Subcommands:
build Compile either the compiler or libraries
+ check Compile either the compiler or libraries, using cargo check
test Build and run some test suites
bench Build and run some benchmarks
doc Build documentation
// there on out.
let subcommand = args.iter().find(|&s|
(s == "build")
+ || (s == "check")
|| (s == "test")
|| (s == "bench")
|| (s == "doc")
arguments would), and then use the compiler built in stage 0 to build
src/libtest and its dependencies.
Once this is done, build/$ARCH/stage1 contains a usable compiler.");
+ }
+ "check" => {
+ subcommand_help.push_str("\n
+Arguments:
+ This subcommand accepts a number of paths to directories to the crates
+ and/or artifacts to compile. For example:
+
+ ./x.py check src/libcore
+ ./x.py check src/libcore src/libproc_macro
+
+ If no arguments are passed then the complete artifacts are compiled: std, test, and rustc. Note
+ also that since we use `cargo check`, by default this will automatically enable incremental
+ compilation, so there's no need to pass it separately, though it won't hurt. We also completely
+ ignore the stage passed, as there's no way to compile in non-stage 0 without actually building
+ the compiler.");
}
"test" => {
subcommand_help.push_str("\n
"build" => {
Subcommand::Build { paths: paths }
}
+ "check" => {
+ Subcommand::Check { paths: paths }
+ }
"test" => {
Subcommand::Test {
paths,
mod cc_detect;
mod channel;
mod check;
+mod test;
mod clean;
mod compile;
mod metadata;
out
}
- /// Get the directory for incremental by-products when using the
- /// given compiler.
- fn incremental_dir(&self, compiler: Compiler) -> PathBuf {
- self.out.join(&*compiler.host).join(format!("stage{}-incremental", compiler.stage))
- }
-
/// Returns the root directory for all output generated in a particular
/// stage when running with a particular host compiler.
///
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Implementation of the test-related targets of the build system.
+//!
+//! This file implements the various regression test suites that we execute on
+//! our CI.
+
+use std::collections::HashSet;
+use std::env;
+use std::ffi::OsString;
+use std::iter;
+use std::fmt;
+use std::fs::{self, File};
+use std::path::{PathBuf, Path};
+use std::process::Command;
+use std::io::Read;
+
+use build_helper::{self, output};
+
+use builder::{Kind, RunConfig, ShouldRun, Builder, Compiler, Step};
+use cache::{INTERNER, Interned};
+use compile;
+use dist;
+use native;
+use tool::{self, Tool};
+use util::{self, dylib_path, dylib_path_var};
+use {Build, Mode};
+use toolstate::ToolState;
+
+const ADB_TEST_DIR: &str = "/data/tmp/work";
+
+/// The two modes of the test runner; tests or benchmarks.
+#[derive(Debug, PartialEq, Eq, Hash, Copy, Clone)]
+pub enum TestKind {
+ /// Run `cargo test`
+ Test,
+ /// Run `cargo bench`
+ Bench,
+}
+
+impl TestKind {
+ // Return the cargo subcommand for this test kind
+ fn subcommand(self) -> &'static str {
+ match self {
+ TestKind::Test => "test",
+ TestKind::Bench => "bench",
+ }
+ }
+}
+
+impl fmt::Display for TestKind {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.write_str(match *self {
+ TestKind::Test => "Testing",
+ TestKind::Bench => "Benchmarking",
+ })
+ }
+}
+
+fn try_run(build: &Build, cmd: &mut Command) -> bool {
+ if !build.fail_fast {
+ if !build.try_run(cmd) {
+ let mut failures = build.delayed_failures.borrow_mut();
+ failures.push(format!("{:?}", cmd));
+ return false;
+ }
+ } else {
+ build.run(cmd);
+ }
+ true
+}
+
+fn try_run_quiet(build: &Build, cmd: &mut Command) {
+ if !build.fail_fast {
+ if !build.try_run_quiet(cmd) {
+ let mut failures = build.delayed_failures.borrow_mut();
+ failures.push(format!("{:?}", cmd));
+ }
+ } else {
+ build.run_quiet(cmd);
+ }
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct Linkcheck {
+ host: Interned<String>,
+}
+
+impl Step for Linkcheck {
+ type Output = ();
+ const ONLY_HOSTS: bool = true;
+ const DEFAULT: bool = true;
+
+ /// Runs the `linkchecker` tool as compiled in `stage` by the `host` compiler.
+ ///
+ /// This tool in `src/tools` will verify the validity of all our links in the
+ /// documentation to ensure we don't have a bunch of dead ones.
+ fn run(self, builder: &Builder) {
+ let build = builder.build;
+ let host = self.host;
+
+ println!("Linkcheck ({})", host);
+
+ builder.default_doc(None);
+
+ let _time = util::timeit();
+ try_run(build, builder.tool_cmd(Tool::Linkchecker)
+ .arg(build.out.join(host).join("doc")));
+ }
+
+ fn should_run(run: ShouldRun) -> ShouldRun {
+ let builder = run.builder;
+ run.path("src/tools/linkchecker").default_condition(builder.build.config.docs)
+ }
+
+ fn make_run(run: RunConfig) {
+ run.builder.ensure(Linkcheck { host: run.target });
+ }
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct Cargotest {
+ stage: u32,
+ host: Interned<String>,
+}
+
+impl Step for Cargotest {
+ type Output = ();
+ const ONLY_HOSTS: bool = true;
+
+ fn should_run(run: ShouldRun) -> ShouldRun {
+ run.path("src/tools/cargotest")
+ }
+
+ fn make_run(run: RunConfig) {
+ run.builder.ensure(Cargotest {
+ stage: run.builder.top_stage,
+ host: run.target,
+ });
+ }
+
+ /// Runs the `cargotest` tool as compiled in `stage` by the `host` compiler.
+ ///
+ /// This tool in `src/tools` will check out a few Rust projects and run `cargo
+ /// test` to ensure that we don't regress the test suites there.
+ fn run(self, builder: &Builder) {
+ let build = builder.build;
+ let compiler = builder.compiler(self.stage, self.host);
+ builder.ensure(compile::Rustc { compiler, target: compiler.host });
+
+ // Note that this is a short, cryptic, and not scoped directory name. This
+ // is currently to minimize the length of path on Windows where we otherwise
+ // quickly run into path name limit constraints.
+ let out_dir = build.out.join("ct");
+ t!(fs::create_dir_all(&out_dir));
+
+ let _time = util::timeit();
+ let mut cmd = builder.tool_cmd(Tool::CargoTest);
+ try_run(build, cmd.arg(&build.initial_cargo)
+ .arg(&out_dir)
+ .env("RUSTC", builder.rustc(compiler))
+ .env("RUSTDOC", builder.rustdoc(compiler.host)));
+ }
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct Cargo {
+ stage: u32,
+ host: Interned<String>,
+}
+
+impl Step for Cargo {
+ type Output = ();
+ const ONLY_HOSTS: bool = true;
+
+ fn should_run(run: ShouldRun) -> ShouldRun {
+ run.path("src/tools/cargo")
+ }
+
+ fn make_run(run: RunConfig) {
+ run.builder.ensure(Cargo {
+ stage: run.builder.top_stage,
+ host: run.target,
+ });
+ }
+
+ /// Runs `cargo test` for `cargo` packaged with Rust.
+ fn run(self, builder: &Builder) {
+ let build = builder.build;
+ let compiler = builder.compiler(self.stage, self.host);
+
+ builder.ensure(tool::Cargo { compiler, target: self.host });
+ let mut cargo = builder.cargo(compiler, Mode::Tool, self.host, "test");
+ cargo.arg("--manifest-path").arg(build.src.join("src/tools/cargo/Cargo.toml"));
+ if !build.fail_fast {
+ cargo.arg("--no-fail-fast");
+ }
+
+ // Don't build tests dynamically, just a pain to work with
+ cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");
+
+ // Don't run cross-compile tests, we may not have cross-compiled libstd libs
+ // available.
+ cargo.env("CFG_DISABLE_CROSS_TESTS", "1");
+
+ try_run(build, cargo.env("PATH", &path_for_cargo(builder, compiler)));
+ }
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct Rls {
+ stage: u32,
+ host: Interned<String>,
+}
+
+impl Step for Rls {
+ type Output = ();
+ const ONLY_HOSTS: bool = true;
+
+ fn should_run(run: ShouldRun) -> ShouldRun {
+ run.path("src/tools/rls")
+ }
+
+ fn make_run(run: RunConfig) {
+ run.builder.ensure(Rls {
+ stage: run.builder.top_stage,
+ host: run.target,
+ });
+ }
+
+ /// Runs `cargo test` for the rls.
+ fn run(self, builder: &Builder) {
+ let build = builder.build;
+ let stage = self.stage;
+ let host = self.host;
+ let compiler = builder.compiler(stage, host);
+
+ builder.ensure(tool::Rls { compiler, target: self.host });
+ let mut cargo = tool::prepare_tool_cargo(builder,
+ compiler,
+ host,
+ "test",
+ "src/tools/rls");
+
+ // Don't build tests dynamically, just a pain to work with
+ cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");
+
+ builder.add_rustc_lib_path(compiler, &mut cargo);
+
+ if try_run(build, &mut cargo) {
+ build.save_toolstate("rls", ToolState::TestPass);
+ }
+ }
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct Rustfmt {
+ stage: u32,
+ host: Interned<String>,
+}
+
+impl Step for Rustfmt {
+ type Output = ();
+ const ONLY_HOSTS: bool = true;
+
+ fn should_run(run: ShouldRun) -> ShouldRun {
+ run.path("src/tools/rustfmt")
+ }
+
+ fn make_run(run: RunConfig) {
+ run.builder.ensure(Rustfmt {
+ stage: run.builder.top_stage,
+ host: run.target,
+ });
+ }
+
+ /// Runs `cargo test` for rustfmt.
+ fn run(self, builder: &Builder) {
+ let build = builder.build;
+ let stage = self.stage;
+ let host = self.host;
+ let compiler = builder.compiler(stage, host);
+
+ builder.ensure(tool::Rustfmt { compiler, target: self.host });
+ let mut cargo = tool::prepare_tool_cargo(builder,
+ compiler,
+ host,
+ "test",
+ "src/tools/rustfmt");
+
+ // Don't build tests dynamically, just a pain to work with
+ cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");
+
+ builder.add_rustc_lib_path(compiler, &mut cargo);
+
+ if try_run(build, &mut cargo) {
+ build.save_toolstate("rustfmt", ToolState::TestPass);
+ }
+ }
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct Miri {
+ stage: u32,
+ host: Interned<String>,
+}
+
+impl Step for Miri {
+ type Output = ();
+ const ONLY_HOSTS: bool = true;
+ const DEFAULT: bool = true;
+
+ fn should_run(run: ShouldRun) -> ShouldRun {
+ let test_miri = run.builder.build.config.test_miri;
+ run.path("src/tools/miri").default_condition(test_miri)
+ }
+
+ fn make_run(run: RunConfig) {
+ run.builder.ensure(Miri {
+ stage: run.builder.top_stage,
+ host: run.target,
+ });
+ }
+
+ /// Runs `cargo test` for miri.
+ fn run(self, builder: &Builder) {
+ let build = builder.build;
+ let stage = self.stage;
+ let host = self.host;
+ let compiler = builder.compiler(stage, host);
+
+ if let Some(miri) = builder.ensure(tool::Miri { compiler, target: self.host }) {
+ let mut cargo = builder.cargo(compiler, Mode::Tool, host, "test");
+ cargo.arg("--manifest-path").arg(build.src.join("src/tools/miri/Cargo.toml"));
+
+ // Don't build tests dynamically, just a pain to work with
+ cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");
+ // miri tests need to know about the stage sysroot
+ cargo.env("MIRI_SYSROOT", builder.sysroot(compiler));
+ cargo.env("RUSTC_TEST_SUITE", builder.rustc(compiler));
+ cargo.env("RUSTC_LIB_PATH", builder.rustc_libdir(compiler));
+ cargo.env("MIRI_PATH", miri);
+
+ builder.add_rustc_lib_path(compiler, &mut cargo);
+
+ if try_run(build, &mut cargo) {
+ build.save_toolstate("miri", ToolState::TestPass);
+ }
+ } else {
+ eprintln!("failed to test miri: could not build");
+ }
+ }
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct Clippy {
+ stage: u32,
+ host: Interned<String>,
+}
+
+impl Step for Clippy {
+ type Output = ();
+ const ONLY_HOSTS: bool = true;
+ const DEFAULT: bool = false;
+
+ fn should_run(run: ShouldRun) -> ShouldRun {
+ run.path("src/tools/clippy")
+ }
+
+ fn make_run(run: RunConfig) {
+ run.builder.ensure(Clippy {
+ stage: run.builder.top_stage,
+ host: run.target,
+ });
+ }
+
+ /// Runs `cargo test` for clippy.
+ fn run(self, builder: &Builder) {
+ let build = builder.build;
+ let stage = self.stage;
+ let host = self.host;
+ let compiler = builder.compiler(stage, host);
+
+ if let Some(clippy) = builder.ensure(tool::Clippy { compiler, target: self.host }) {
+ let mut cargo = builder.cargo(compiler, Mode::Tool, host, "test");
+ cargo.arg("--manifest-path").arg(build.src.join("src/tools/clippy/Cargo.toml"));
+
+ // Don't build tests dynamically, just a pain to work with
+ cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");
+ // clippy tests need to know about the stage sysroot
+ cargo.env("SYSROOT", builder.sysroot(compiler));
+ cargo.env("RUSTC_TEST_SUITE", builder.rustc(compiler));
+ cargo.env("RUSTC_LIB_PATH", builder.rustc_libdir(compiler));
+ let host_libs = builder.stage_out(compiler, Mode::Tool).join(builder.cargo_dir());
+ cargo.env("HOST_LIBS", host_libs);
+ // clippy tests need to find the driver
+ cargo.env("CLIPPY_DRIVER_PATH", clippy);
+
+ builder.add_rustc_lib_path(compiler, &mut cargo);
+
+ if try_run(build, &mut cargo) {
+ build.save_toolstate("clippy-driver", ToolState::TestPass);
+ }
+ } else {
+ eprintln!("failed to test clippy: could not build");
+ }
+ }
+}
+
+fn path_for_cargo(builder: &Builder, compiler: Compiler) -> OsString {
+ // Configure PATH to find the right rustc. NB. we have to use PATH
+ // and not RUSTC because the Cargo test suite has tests that will
+ // fail if rustc is not spelled `rustc`.
+ let path = builder.sysroot(compiler).join("bin");
+ let old_path = env::var_os("PATH").unwrap_or_default();
+ env::join_paths(iter::once(path).chain(env::split_paths(&old_path))).expect("")
+}
+
+#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+pub struct RustdocJS {
+ pub host: Interned<String>,
+ pub target: Interned<String>,
+}
+
+impl Step for RustdocJS {
+ type Output = ();
+ const DEFAULT: bool = true;
+ const ONLY_HOSTS: bool = true;
+
+ fn should_run(run: ShouldRun) -> ShouldRun {
+ run.path("src/test/rustdoc-js")
+ }
+
+ fn make_run(run: RunConfig) {
+ run.builder.ensure(RustdocJS {
+ host: run.host,
+ target: run.target,
+ });
+ }
+
+ fn run(self, builder: &Builder) {
+ if let Some(ref nodejs) = builder.config.nodejs {
+ let mut command = Command::new(nodejs);
+ command.args(&["src/tools/rustdoc-js/tester.js", &*self.host]);
+ builder.ensure(::doc::Std {
+ target: self.target,
+ stage: builder.top_stage,
+ });
+ builder.run(&mut command);
+ } else {
+ println!("No nodejs found, skipping \"src/test/rustdoc-js\" tests");
+ }
+ }
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct Tidy {
+ host: Interned<String>,
+}
+
+impl Step for Tidy {
+ type Output = ();
+ const DEFAULT: bool = true;
+ const ONLY_HOSTS: bool = true;
+ const ONLY_BUILD: bool = true;
+
+ /// Runs the `tidy` tool as compiled in `stage` by the `host` compiler.
+ ///
+ /// This tool in `src/tools` checks up on various bits and pieces of style and
+ /// otherwise just implements a few lint-like checks that are specific to the
+ /// compiler itself.
+ fn run(self, builder: &Builder) {
+ let build = builder.build;
+ let host = self.host;
+
+ let _folder = build.fold_output(|| "tidy");
+ println!("tidy check ({})", host);
+ let mut cmd = builder.tool_cmd(Tool::Tidy);
+ cmd.arg(build.src.join("src"));
+ if !build.config.vendor {
+ cmd.arg("--no-vendor");
+ }
+ if build.config.quiet_tests {
+ cmd.arg("--quiet");
+ }
+ try_run(build, &mut cmd);
+ }
+
+ fn should_run(run: ShouldRun) -> ShouldRun {
+ run.path("src/tools/tidy")
+ }
+
+ fn make_run(run: RunConfig) {
+ run.builder.ensure(Tidy {
+ host: run.builder.build.build,
+ });
+ }
+}
+
+fn testdir(build: &Build, host: Interned<String>) -> PathBuf {
+ build.out.join(host).join("test")
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+struct Test {
+ path: &'static str,
+ mode: &'static str,
+ suite: &'static str,
+}
+
+static DEFAULT_COMPILETESTS: &[Test] = &[
+ Test { path: "src/test/ui", mode: "ui", suite: "ui" },
+ Test { path: "src/test/run-pass", mode: "run-pass", suite: "run-pass" },
+ Test { path: "src/test/compile-fail", mode: "compile-fail", suite: "compile-fail" },
+ Test { path: "src/test/parse-fail", mode: "parse-fail", suite: "parse-fail" },
+ Test { path: "src/test/run-fail", mode: "run-fail", suite: "run-fail" },
+ Test {
+ path: "src/test/run-pass-valgrind",
+ mode: "run-pass-valgrind",
+ suite: "run-pass-valgrind"
+ },
+ Test { path: "src/test/mir-opt", mode: "mir-opt", suite: "mir-opt" },
+ Test { path: "src/test/codegen", mode: "codegen", suite: "codegen" },
+ Test { path: "src/test/codegen-units", mode: "codegen-units", suite: "codegen-units" },
+ Test { path: "src/test/incremental", mode: "incremental", suite: "incremental" },
+
+ // What this runs varies depending on the native platform being apple
+ Test { path: "src/test/debuginfo", mode: "debuginfo-XXX", suite: "debuginfo" },
+];
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct DefaultCompiletest {
+ compiler: Compiler,
+ target: Interned<String>,
+ mode: &'static str,
+ suite: &'static str,
+}
+
+impl Step for DefaultCompiletest {
+ type Output = ();
+ const DEFAULT: bool = true;
+
+ fn should_run(mut run: ShouldRun) -> ShouldRun {
+ for test in DEFAULT_COMPILETESTS {
+ run = run.path(test.path);
+ }
+ run
+ }
+
+ fn make_run(run: RunConfig) {
+ let compiler = run.builder.compiler(run.builder.top_stage, run.host);
+
+ let test = run.path.map(|path| {
+ DEFAULT_COMPILETESTS.iter().find(|&&test| {
+ path.ends_with(test.path)
+ }).unwrap_or_else(|| {
+ panic!("make_run in compile test to receive test path, received {:?}", path);
+ })
+ });
+
+ if let Some(test) = test {
+ run.builder.ensure(DefaultCompiletest {
+ compiler,
+ target: run.target,
+ mode: test.mode,
+ suite: test.suite,
+ });
+ } else {
+ for test in DEFAULT_COMPILETESTS {
+ run.builder.ensure(DefaultCompiletest {
+ compiler,
+ target: run.target,
+ mode: test.mode,
+ suite: test.suite
+ });
+ }
+ }
+ }
+
+ fn run(self, builder: &Builder) {
+ builder.ensure(Compiletest {
+ compiler: self.compiler,
+ target: self.target,
+ mode: self.mode,
+ suite: self.suite,
+ })
+ }
+}
+
+// Also default, but host-only.
+static HOST_COMPILETESTS: &[Test] = &[
+ Test { path: "src/test/ui-fulldeps", mode: "ui", suite: "ui-fulldeps" },
+ Test { path: "src/test/run-pass-fulldeps", mode: "run-pass", suite: "run-pass-fulldeps" },
+ Test { path: "src/test/run-fail-fulldeps", mode: "run-fail", suite: "run-fail-fulldeps" },
+ Test {
+ path: "src/test/compile-fail-fulldeps",
+ mode: "compile-fail",
+ suite: "compile-fail-fulldeps",
+ },
+ Test {
+ path: "src/test/incremental-fulldeps",
+ mode: "incremental",
+ suite: "incremental-fulldeps",
+ },
+ Test { path: "src/test/run-make", mode: "run-make", suite: "run-make" },
+ Test { path: "src/test/rustdoc", mode: "rustdoc", suite: "rustdoc" },
+
+ Test { path: "src/test/pretty", mode: "pretty", suite: "pretty" },
+ Test { path: "src/test/run-pass/pretty", mode: "pretty", suite: "run-pass" },
+ Test { path: "src/test/run-fail/pretty", mode: "pretty", suite: "run-fail" },
+ Test { path: "src/test/run-pass-valgrind/pretty", mode: "pretty", suite: "run-pass-valgrind" },
+ Test { path: "src/test/run-pass-fulldeps/pretty", mode: "pretty", suite: "run-pass-fulldeps" },
+ Test { path: "src/test/run-fail-fulldeps/pretty", mode: "pretty", suite: "run-fail-fulldeps" },
+];
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct HostCompiletest {
+ compiler: Compiler,
+ target: Interned<String>,
+ mode: &'static str,
+ suite: &'static str,
+}
+
+impl Step for HostCompiletest {
+ type Output = ();
+ const DEFAULT: bool = true;
+ const ONLY_HOSTS: bool = true;
+
+ fn should_run(mut run: ShouldRun) -> ShouldRun {
+ for test in HOST_COMPILETESTS {
+ run = run.path(test.path);
+ }
+ run
+ }
+
+ fn make_run(run: RunConfig) {
+ let compiler = run.builder.compiler(run.builder.top_stage, run.host);
+
+ let test = run.path.map(|path| {
+ HOST_COMPILETESTS.iter().find(|&&test| {
+ path.ends_with(test.path)
+ }).unwrap_or_else(|| {
+ panic!("make_run in compile test to receive test path, received {:?}", path);
+ })
+ });
+
+ if let Some(test) = test {
+ run.builder.ensure(HostCompiletest {
+ compiler,
+ target: run.target,
+ mode: test.mode,
+ suite: test.suite,
+ });
+ } else {
+ for test in HOST_COMPILETESTS {
+ if test.mode == "pretty" {
+ continue;
+ }
+ run.builder.ensure(HostCompiletest {
+ compiler,
+ target: run.target,
+ mode: test.mode,
+ suite: test.suite
+ });
+ }
+ }
+ }
+
+ fn run(self, builder: &Builder) {
+ builder.ensure(Compiletest {
+ compiler: self.compiler,
+ target: self.target,
+ mode: self.mode,
+ suite: self.suite,
+ })
+ }
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+struct Compiletest {
+ compiler: Compiler,
+ target: Interned<String>,
+ mode: &'static str,
+ suite: &'static str,
+}
+
+impl Step for Compiletest {
+ type Output = ();
+
+ fn should_run(run: ShouldRun) -> ShouldRun {
+ run.never()
+ }
+
+ /// Executes the `compiletest` tool to run a suite of tests.
+ ///
+ /// Compiles all tests with `compiler` for `target` with the specified
+ /// compiletest `mode` and `suite` arguments. For example `mode` can be
+ /// "run-pass" or `suite` can be something like `debuginfo`.
+ fn run(self, builder: &Builder) {
+ let build = builder.build;
+ let compiler = self.compiler;
+ let target = self.target;
+ let mode = self.mode;
+ let suite = self.suite;
+
+ // Skip codegen tests if they aren't enabled in configuration.
+ if !build.config.codegen_tests && suite == "codegen" {
+ return;
+ }
+
+ if suite == "debuginfo" {
+ // Skip debuginfo tests on MSVC
+ if build.build.contains("msvc") {
+ return;
+ }
+
+ if mode == "debuginfo-XXX" {
+ return if build.build.contains("apple") {
+ builder.ensure(Compiletest {
+ mode: "debuginfo-lldb",
+ ..self
+ });
+ } else {
+ builder.ensure(Compiletest {
+ mode: "debuginfo-gdb",
+ ..self
+ });
+ };
+ }
+
+ builder.ensure(dist::DebuggerScripts {
+ sysroot: builder.sysroot(compiler),
+ host: target
+ });
+ }
+
+ if suite.ends_with("fulldeps") ||
+ // FIXME: Does pretty need librustc compiled? Note that there are
+ // fulldeps test suites with mode = pretty as well.
+ mode == "pretty" ||
+ mode == "rustdoc" ||
+ mode == "run-make" {
+ builder.ensure(compile::Rustc { compiler, target });
+ }
+
+ builder.ensure(compile::Test { compiler, target });
+ builder.ensure(native::TestHelpers { target });
+ builder.ensure(RemoteCopyLibs { compiler, target });
+
+ let _folder = build.fold_output(|| format!("test_{}", suite));
+ println!("Check compiletest suite={} mode={} ({} -> {})",
+ suite, mode, &compiler.host, target);
+ let mut cmd = builder.tool_cmd(Tool::Compiletest);
+
+ // compiletest currently has... a lot of arguments, so let's just pass all
+ // of them!
+
+ cmd.arg("--compile-lib-path").arg(builder.rustc_libdir(compiler));
+ cmd.arg("--run-lib-path").arg(builder.sysroot_libdir(compiler, target));
+ cmd.arg("--rustc-path").arg(builder.rustc(compiler));
+
+ // Avoid depending on rustdoc when we don't need it.
+ if mode == "rustdoc" || mode == "run-make" {
+ cmd.arg("--rustdoc-path").arg(builder.rustdoc(compiler.host));
+ }
+
+ cmd.arg("--src-base").arg(build.src.join("src/test").join(suite));
+ cmd.arg("--build-base").arg(testdir(build, compiler.host).join(suite));
+ cmd.arg("--stage-id").arg(format!("stage{}-{}", compiler.stage, target));
+ cmd.arg("--mode").arg(mode);
+ cmd.arg("--target").arg(target);
+ cmd.arg("--host").arg(&*compiler.host);
+ cmd.arg("--llvm-filecheck").arg(build.llvm_filecheck(build.build));
+
+ if let Some(ref nodejs) = build.config.nodejs {
+ cmd.arg("--nodejs").arg(nodejs);
+ }
+
+ let mut flags = vec!["-Crpath".to_string()];
+ if build.config.rust_optimize_tests {
+ flags.push("-O".to_string());
+ }
+ if build.config.rust_debuginfo_tests {
+ flags.push("-g".to_string());
+ }
+ flags.push("-Zmiri -Zunstable-options".to_string());
+ flags.push(build.config.cmd.rustc_args().join(" "));
+
+ if let Some(linker) = build.linker(target) {
+ cmd.arg("--linker").arg(linker);
+ }
+
+ let hostflags = flags.clone();
+ cmd.arg("--host-rustcflags").arg(hostflags.join(" "));
+
+ let mut targetflags = flags.clone();
+ targetflags.push(format!("-Lnative={}",
+ build.test_helpers_out(target).display()));
+ cmd.arg("--target-rustcflags").arg(targetflags.join(" "));
+
+ cmd.arg("--docck-python").arg(build.python());
+
+ if build.build.ends_with("apple-darwin") {
+ // Force /usr/bin/python on macOS for LLDB tests because we're loading the
+ // LLDB plugin's compiled module which only works with the system python
+ // (namely not Homebrew-installed python)
+ cmd.arg("--lldb-python").arg("/usr/bin/python");
+ } else {
+ cmd.arg("--lldb-python").arg(build.python());
+ }
+
+ if let Some(ref gdb) = build.config.gdb {
+ cmd.arg("--gdb").arg(gdb);
+ }
+ if let Some(ref vers) = build.lldb_version {
+ cmd.arg("--lldb-version").arg(vers);
+ }
+ if let Some(ref dir) = build.lldb_python_dir {
+ cmd.arg("--lldb-python-dir").arg(dir);
+ }
+
+ cmd.args(&build.config.cmd.test_args());
+
+ if build.is_verbose() {
+ cmd.arg("--verbose");
+ }
+
+ if build.config.quiet_tests {
+ cmd.arg("--quiet");
+ }
+
+ if build.config.llvm_enabled {
+ let llvm_config = build.llvm_config(target);
+ let llvm_version = output(Command::new(&llvm_config).arg("--version"));
+ cmd.arg("--llvm-version").arg(llvm_version);
+ if !build.is_rust_llvm(target) {
+ cmd.arg("--system-llvm");
+ }
+
+ // Only pass correct values for these flags for the `run-make` suite as it
+ // requires that a C++ compiler was configured which isn't always the case.
+ if suite == "run-make" {
+ let llvm_components = output(Command::new(&llvm_config).arg("--components"));
+ let llvm_cxxflags = output(Command::new(&llvm_config).arg("--cxxflags"));
+ cmd.arg("--cc").arg(build.cc(target))
+ .arg("--cxx").arg(build.cxx(target).unwrap())
+ .arg("--cflags").arg(build.cflags(target).join(" "))
+ .arg("--llvm-components").arg(llvm_components.trim())
+ .arg("--llvm-cxxflags").arg(llvm_cxxflags.trim());
+ if let Some(ar) = build.ar(target) {
+ cmd.arg("--ar").arg(ar);
+ }
+ }
+ }
+ if suite == "run-make" && !build.config.llvm_enabled {
+ println!("Ignoring run-make test suite as they generally dont work without LLVM");
+ return;
+ }
+
+ if suite != "run-make" {
+ cmd.arg("--cc").arg("")
+ .arg("--cxx").arg("")
+ .arg("--cflags").arg("")
+ .arg("--llvm-components").arg("")
+ .arg("--llvm-cxxflags").arg("");
+ }
+
+ if build.remote_tested(target) {
+ cmd.arg("--remote-test-client").arg(builder.tool_exe(Tool::RemoteTestClient));
+ }
+
+ // Running a C compiler on MSVC requires a few env vars to be set, to be
+ // sure to set them here.
+ //
+ // Note that if we encounter `PATH` we make sure to append to our own `PATH`
+ // rather than stomp over it.
+ if target.contains("msvc") {
+ for &(ref k, ref v) in build.cc[&target].env() {
+ if k != "PATH" {
+ cmd.env(k, v);
+ }
+ }
+ }
+ cmd.env("RUSTC_BOOTSTRAP", "1");
+ build.add_rust_test_threads(&mut cmd);
+
+ if build.config.sanitizers {
+ cmd.env("SANITIZER_SUPPORT", "1");
+ }
+
+ if build.config.profiler {
+ cmd.env("PROFILER_SUPPORT", "1");
+ }
+
+ cmd.arg("--adb-path").arg("adb");
+ cmd.arg("--adb-test-dir").arg(ADB_TEST_DIR);
+ if target.contains("android") {
+ // Assume that cc for this target comes from the android sysroot
+ cmd.arg("--android-cross-path")
+ .arg(build.cc(target).parent().unwrap().parent().unwrap());
+ } else {
+ cmd.arg("--android-cross-path").arg("");
+ }
+
+ build.ci_env.force_coloring_in_ci(&mut cmd);
+
+ let _time = util::timeit();
+ try_run(build, &mut cmd);
+ }
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct Docs {
+ compiler: Compiler,
+}
+
+impl Step for Docs {
+ type Output = ();
+ const DEFAULT: bool = true;
+ const ONLY_HOSTS: bool = true;
+
+ fn should_run(run: ShouldRun) -> ShouldRun {
+ run.path("src/doc")
+ }
+
+ fn make_run(run: RunConfig) {
+ run.builder.ensure(Docs {
+ compiler: run.builder.compiler(run.builder.top_stage, run.host),
+ });
+ }
+
+ /// Run `rustdoc --test` for all documentation in `src/doc`.
+ ///
+ /// This will run all tests in our markdown documentation (e.g. the book)
+ /// located in `src/doc`. The `rustdoc` that's run is the one that sits next to
+ /// `compiler`.
+ fn run(self, builder: &Builder) {
+ let build = builder.build;
+ let compiler = self.compiler;
+
+ builder.ensure(compile::Test { compiler, target: compiler.host });
+
+ // Do a breadth-first traversal of the `src/doc` directory and just run
+ // tests for all files that end in `*.md`
+ let mut stack = vec![build.src.join("src/doc")];
+ let _time = util::timeit();
+ let _folder = build.fold_output(|| "test_docs");
+
+ while let Some(p) = stack.pop() {
+ if p.is_dir() {
+ stack.extend(t!(p.read_dir()).map(|p| t!(p).path()));
+ continue
+ }
+
+ if p.extension().and_then(|s| s.to_str()) != Some("md") {
+ continue;
+ }
+
+ // The nostarch directory in the book is for no starch, and so isn't
+ // guaranteed to build. We don't care if it doesn't build, so skip it.
+ if p.to_str().map_or(false, |p| p.contains("nostarch")) {
+ continue;
+ }
+
+ markdown_test(builder, compiler, &p);
+ }
+ }
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct ErrorIndex {
+ compiler: Compiler,
+}
+
+impl Step for ErrorIndex {
+ type Output = ();
+ const DEFAULT: bool = true;
+ const ONLY_HOSTS: bool = true;
+
+ fn should_run(run: ShouldRun) -> ShouldRun {
+ run.path("src/tools/error_index_generator")
+ }
+
+ fn make_run(run: RunConfig) {
+ run.builder.ensure(ErrorIndex {
+ compiler: run.builder.compiler(run.builder.top_stage, run.host),
+ });
+ }
+
+ /// Run the error index generator tool to execute the tests located in the error
+ /// index.
+ ///
+ /// The `error_index_generator` tool lives in `src/tools` and is used to
+ /// generate a markdown file from the error indexes of the code base which is
+ /// then passed to `rustdoc --test`.
+ fn run(self, builder: &Builder) {
+ let build = builder.build;
+ let compiler = self.compiler;
+
+ builder.ensure(compile::Std { compiler, target: compiler.host });
+
+ let _folder = build.fold_output(|| "test_error_index");
+ println!("Testing error-index stage{}", compiler.stage);
+
+ let dir = testdir(build, compiler.host);
+ t!(fs::create_dir_all(&dir));
+ let output = dir.join("error-index.md");
+
+ let _time = util::timeit();
+ build.run(builder.tool_cmd(Tool::ErrorIndex)
+ .arg("markdown")
+ .arg(&output)
+ .env("CFG_BUILD", &build.build)
+ .env("RUSTC_ERROR_METADATA_DST", build.extended_error_dir()));
+
+ markdown_test(builder, compiler, &output);
+ }
+}
+
+fn markdown_test(builder: &Builder, compiler: Compiler, markdown: &Path) {
+ let build = builder.build;
+ let mut file = t!(File::open(markdown));
+ let mut contents = String::new();
+ t!(file.read_to_string(&mut contents));
+ if !contents.contains("```") {
+ return;
+ }
+
+ println!("doc tests for: {}", markdown.display());
+ let mut cmd = builder.rustdoc_cmd(compiler.host);
+ build.add_rust_test_threads(&mut cmd);
+ cmd.arg("--test");
+ cmd.arg(markdown);
+ cmd.env("RUSTC_BOOTSTRAP", "1");
+
+ let test_args = build.config.cmd.test_args().join(" ");
+ cmd.arg("--test-args").arg(test_args);
+
+ if build.config.quiet_tests {
+ try_run_quiet(build, &mut cmd);
+ } else {
+ try_run(build, &mut cmd);
+ }
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct CrateLibrustc {
+ compiler: Compiler,
+ target: Interned<String>,
+ test_kind: TestKind,
+ krate: Option<Interned<String>>,
+}
+
+impl Step for CrateLibrustc {
+ type Output = ();
+ const DEFAULT: bool = true;
+ const ONLY_HOSTS: bool = true;
+
+ fn should_run(run: ShouldRun) -> ShouldRun {
+ run.krate("rustc-main")
+ }
+
+ fn make_run(run: RunConfig) {
+ let builder = run.builder;
+ let compiler = builder.compiler(builder.top_stage, run.host);
+
+ let make = |name: Option<Interned<String>>| {
+ let test_kind = if builder.kind == Kind::Test {
+ TestKind::Test
+ } else if builder.kind == Kind::Bench {
+ TestKind::Bench
+ } else {
+ panic!("unexpected builder.kind in crate: {:?}", builder.kind);
+ };
+
+ builder.ensure(CrateLibrustc {
+ compiler,
+ target: run.target,
+ test_kind,
+ krate: name,
+ });
+ };
+
+ if let Some(path) = run.path {
+ for (name, krate_path) in builder.crates("rustc-main") {
+ if path.ends_with(krate_path) {
+ make(Some(name));
+ }
+ }
+ } else {
+ make(None);
+ }
+ }
+
+
+ fn run(self, builder: &Builder) {
+ builder.ensure(Crate {
+ compiler: self.compiler,
+ target: self.target,
+ mode: Mode::Librustc,
+ test_kind: self.test_kind,
+ krate: self.krate,
+ });
+ }
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct Crate {
+ compiler: Compiler,
+ target: Interned<String>,
+ mode: Mode,
+ test_kind: TestKind,
+ krate: Option<Interned<String>>,
+}
+
+impl Step for Crate {
+ type Output = ();
+ const DEFAULT: bool = true;
+
+ fn should_run(run: ShouldRun) -> ShouldRun {
+ run.krate("std").krate("test")
+ }
+
+ fn make_run(run: RunConfig) {
+ let builder = run.builder;
+ let compiler = builder.compiler(builder.top_stage, run.host);
+
+ let make = |mode: Mode, name: Option<Interned<String>>| {
+ let test_kind = if builder.kind == Kind::Test {
+ TestKind::Test
+ } else if builder.kind == Kind::Bench {
+ TestKind::Bench
+ } else {
+ panic!("unexpected builder.kind in crate: {:?}", builder.kind);
+ };
+
+ builder.ensure(Crate {
+ compiler,
+ target: run.target,
+ mode,
+ test_kind,
+ krate: name,
+ });
+ };
+
+ if let Some(path) = run.path {
+ for (name, krate_path) in builder.crates("std") {
+ if path.ends_with(krate_path) {
+ make(Mode::Libstd, Some(name));
+ }
+ }
+ for (name, krate_path) in builder.crates("test") {
+ if path.ends_with(krate_path) {
+ make(Mode::Libtest, Some(name));
+ }
+ }
+ } else {
+ make(Mode::Libstd, None);
+ make(Mode::Libtest, None);
+ }
+ }
+
+ /// Run all unit tests plus documentation tests for an entire crate DAG defined
+ /// by a `Cargo.toml`
+ ///
+ /// This is what runs tests for crates like the standard library, compiler, etc.
+ /// It essentially is the driver for running `cargo test`.
+ ///
+ /// Currently this runs all tests for a DAG by passing a bunch of `-p foo`
+ /// arguments, and those arguments are discovered from `cargo metadata`.
+ fn run(self, builder: &Builder) {
+ let build = builder.build;
+ let compiler = self.compiler;
+ let target = self.target;
+ let mode = self.mode;
+ let test_kind = self.test_kind;
+ let krate = self.krate;
+
+ builder.ensure(compile::Test { compiler, target });
+ builder.ensure(RemoteCopyLibs { compiler, target });
+
+ // If we're not doing a full bootstrap but we're testing a stage2 version of
+ // libstd, then what we're actually testing is the libstd produced in
+ // stage1. Reflect that here by updating the compiler that we're working
+ // with automatically.
+ let compiler = if build.force_use_stage1(compiler, target) {
+ builder.compiler(1, compiler.host)
+ } else {
+ compiler.clone()
+ };
+
+ let mut cargo = builder.cargo(compiler, mode, target, test_kind.subcommand());
+ let (name, root) = match mode {
+ Mode::Libstd => {
+ compile::std_cargo(build, &compiler, target, &mut cargo);
+ ("libstd", "std")
+ }
+ Mode::Libtest => {
+ compile::test_cargo(build, &compiler, target, &mut cargo);
+ ("libtest", "test")
+ }
+ Mode::Librustc => {
+ builder.ensure(compile::Rustc { compiler, target });
+ compile::rustc_cargo(build, target, &mut cargo);
+ ("librustc", "rustc-main")
+ }
+ _ => panic!("can only test libraries"),
+ };
+ let root = INTERNER.intern_string(String::from(root));
+ let _folder = build.fold_output(|| {
+ format!("{}_stage{}-{}", test_kind.subcommand(), compiler.stage, name)
+ });
+ println!("{} {} stage{} ({} -> {})", test_kind, name, compiler.stage,
+ &compiler.host, target);
+
+ // Build up the base `cargo test` command.
+ //
+ // Pass in some standard flags then iterate over the graph we've discovered
+ // in `cargo metadata` with the maps above and figure out what `-p`
+ // arguments need to get passed.
+ if test_kind.subcommand() == "test" && !build.fail_fast {
+ cargo.arg("--no-fail-fast");
+ }
+
+ match krate {
+ Some(krate) => {
+ cargo.arg("-p").arg(krate);
+ }
+ None => {
+ let mut visited = HashSet::new();
+ let mut next = vec![root];
+ while let Some(name) = next.pop() {
+ // Right now jemalloc and the sanitizer crates are
+ // target-specific crate in the sense that it's not present
+ // on all platforms. Custom skip it here for now, but if we
+ // add more this probably wants to get more generalized.
+ //
+ // Also skip `build_helper` as it's not compiled normally
+ // for target during the bootstrap and it's just meant to be
+ // a helper crate, not tested. If it leaks through then it
+ // ends up messing with various mtime calculations and such.
+ if !name.contains("jemalloc") &&
+ *name != *"build_helper" &&
+ !(name.starts_with("rustc_") && name.ends_with("san")) &&
+ name != "dlmalloc" {
+ cargo.arg("-p").arg(&format!("{}:0.0.0", name));
+ }
+ for dep in build.crates[&name].deps.iter() {
+ if visited.insert(dep) {
+ next.push(*dep);
+ }
+ }
+ }
+ }
+ }
+
+ // The tests are going to run with the *target* libraries, so we need to
+ // ensure that those libraries show up in the LD_LIBRARY_PATH equivalent.
+ //
+ // Note that to run the compiler we need to run with the *host* libraries,
+ // but our wrapper scripts arrange for that to be the case anyway.
+ let mut dylib_path = dylib_path();
+ dylib_path.insert(0, PathBuf::from(&*builder.sysroot_libdir(compiler, target)));
+ cargo.env(dylib_path_var(), env::join_paths(&dylib_path).unwrap());
+
+ cargo.arg("--");
+ cargo.args(&build.config.cmd.test_args());
+
+ if build.config.quiet_tests {
+ cargo.arg("--quiet");
+ }
+
+ let _time = util::timeit();
+
+ if target.contains("emscripten") {
+ cargo.env(format!("CARGO_TARGET_{}_RUNNER", envify(&target)),
+ build.config.nodejs.as_ref().expect("nodejs not configured"));
+ } else if target.starts_with("wasm32") {
+ // On the wasm32-unknown-unknown target we're using LTO which is
+ // incompatible with `-C prefer-dynamic`, so disable that here
+ cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");
+
+ let node = build.config.nodejs.as_ref()
+ .expect("nodejs not configured");
+ let runner = format!("{} {}/src/etc/wasm32-shim.js",
+ node.display(),
+ build.src.display());
+ cargo.env(format!("CARGO_TARGET_{}_RUNNER", envify(&target)), &runner);
+ } else if build.remote_tested(target) {
+ cargo.env(format!("CARGO_TARGET_{}_RUNNER", envify(&target)),
+ format!("{} run",
+ builder.tool_exe(Tool::RemoteTestClient).display()));
+ }
+ try_run(build, &mut cargo);
+ }
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct Rustdoc {
+ host: Interned<String>,
+ test_kind: TestKind,
+}
+
+impl Step for Rustdoc {
+ type Output = ();
+ const DEFAULT: bool = true;
+ const ONLY_HOSTS: bool = true;
+
+ fn should_run(run: ShouldRun) -> ShouldRun {
+ run.path("src/librustdoc").path("src/tools/rustdoc")
+ }
+
+ fn make_run(run: RunConfig) {
+ let builder = run.builder;
+
+ let test_kind = if builder.kind == Kind::Test {
+ TestKind::Test
+ } else if builder.kind == Kind::Bench {
+ TestKind::Bench
+ } else {
+ panic!("unexpected builder.kind in crate: {:?}", builder.kind);
+ };
+
+ builder.ensure(Rustdoc {
+ host: run.host,
+ test_kind,
+ });
+ }
+
+ fn run(self, builder: &Builder) {
+ let build = builder.build;
+ let test_kind = self.test_kind;
+
+ let compiler = builder.compiler(builder.top_stage, self.host);
+ let target = compiler.host;
+
+ let mut cargo = tool::prepare_tool_cargo(builder,
+ compiler,
+ target,
+ test_kind.subcommand(),
+ "src/tools/rustdoc");
+ let _folder = build.fold_output(|| {
+ format!("{}_stage{}-rustdoc", test_kind.subcommand(), compiler.stage)
+ });
+ println!("{} rustdoc stage{} ({} -> {})", test_kind, compiler.stage,
+ &compiler.host, target);
+
+ if test_kind.subcommand() == "test" && !build.fail_fast {
+ cargo.arg("--no-fail-fast");
+ }
+
+ cargo.arg("-p").arg("rustdoc:0.0.0");
+
+ cargo.arg("--");
+ cargo.args(&build.config.cmd.test_args());
+
+ if build.config.quiet_tests {
+ cargo.arg("--quiet");
+ }
+
+ let _time = util::timeit();
+
+ try_run(build, &mut cargo);
+ }
+}
+
+fn envify(s: &str) -> String {
+ s.chars().map(|c| {
+ match c {
+ '-' => '_',
+ c => c,
+ }
+ }).flat_map(|c| c.to_uppercase()).collect()
+}
+
+/// Some test suites are run inside emulators or on remote devices, and most
+/// of our test binaries are linked dynamically which means we need to ship
+/// the standard library and such to the emulator ahead of time. This step
+/// represents this and is a dependency of all test suites.
+///
+/// Most of the time this is a noop. For some steps such as shipping data to
+/// QEMU we have to build our own tools so we've got conditional dependencies
+/// on those programs as well. Note that the remote test client is built for
+/// the build target (us) and the server is built for the target.
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct RemoteCopyLibs {
+ compiler: Compiler,
+ target: Interned<String>,
+}
+
+impl Step for RemoteCopyLibs {
+ type Output = ();
+
+ fn should_run(run: ShouldRun) -> ShouldRun {
+ run.never()
+ }
+
+ fn run(self, builder: &Builder) {
+ let build = builder.build;
+ let compiler = self.compiler;
+ let target = self.target;
+ if !build.remote_tested(target) {
+ return
+ }
+
+ builder.ensure(compile::Test { compiler, target });
+
+ println!("REMOTE copy libs to emulator ({})", target);
+ t!(fs::create_dir_all(build.out.join("tmp")));
+
+ let server = builder.ensure(tool::RemoteTestServer { compiler, target });
+
+ // Spawn the emulator and wait for it to come online
+ let tool = builder.tool_exe(Tool::RemoteTestClient);
+ let mut cmd = Command::new(&tool);
+ cmd.arg("spawn-emulator")
+ .arg(target)
+ .arg(&server)
+ .arg(build.out.join("tmp"));
+ if let Some(rootfs) = build.qemu_rootfs(target) {
+ cmd.arg(rootfs);
+ }
+ build.run(&mut cmd);
+
+ // Push all our dylibs to the emulator
+ for f in t!(builder.sysroot_libdir(compiler, target).read_dir()) {
+ let f = t!(f);
+ let name = f.file_name().into_string().unwrap();
+ if util::is_dylib(&name) {
+ build.run(Command::new(&tool)
+ .arg("push")
+ .arg(f.path()));
+ }
+ }
+ }
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct Distcheck;
+
+impl Step for Distcheck {
+ type Output = ();
+ const ONLY_BUILD: bool = true;
+
+ fn should_run(run: ShouldRun) -> ShouldRun {
+ run.path("distcheck")
+ }
+
+ fn make_run(run: RunConfig) {
+ run.builder.ensure(Distcheck);
+ }
+
+ /// Run "distcheck", a 'make check' from a tarball
+ fn run(self, builder: &Builder) {
+ let build = builder.build;
+
+ println!("Distcheck");
+ let dir = build.out.join("tmp").join("distcheck");
+ let _ = fs::remove_dir_all(&dir);
+ t!(fs::create_dir_all(&dir));
+
+ // Guarantee that these are built before we begin running.
+ builder.ensure(dist::PlainSourceTarball);
+ builder.ensure(dist::Src);
+
+ let mut cmd = Command::new("tar");
+ cmd.arg("-xzf")
+ .arg(builder.ensure(dist::PlainSourceTarball))
+ .arg("--strip-components=1")
+ .current_dir(&dir);
+ build.run(&mut cmd);
+ build.run(Command::new("./configure")
+ .args(&build.config.configure_args)
+ .arg("--enable-vendor")
+ .current_dir(&dir));
+ build.run(Command::new(build_helper::make(&build.build))
+ .arg("check")
+ .current_dir(&dir));
+
+ // Now make sure that rust-src has all of libstd's dependencies
+ println!("Distcheck rust-src");
+ let dir = build.out.join("tmp").join("distcheck-src");
+ let _ = fs::remove_dir_all(&dir);
+ t!(fs::create_dir_all(&dir));
+
+ let mut cmd = Command::new("tar");
+ cmd.arg("-xzf")
+ .arg(builder.ensure(dist::Src))
+ .arg("--strip-components=1")
+ .current_dir(&dir);
+ build.run(&mut cmd);
+
+ let toml = dir.join("rust-src/lib/rustlib/src/rust/src/libstd/Cargo.toml");
+ build.run(Command::new(&build.initial_cargo)
+ .arg("generate-lockfile")
+ .arg("--manifest-path")
+ .arg(&toml)
+ .current_dir(&dir));
+ }
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct Bootstrap;
+
+impl Step for Bootstrap {
+ type Output = ();
+ const DEFAULT: bool = true;
+ const ONLY_HOSTS: bool = true;
+ const ONLY_BUILD: bool = true;
+
+ /// Test the build system itself
+ fn run(self, builder: &Builder) {
+ let build = builder.build;
+ let mut cmd = Command::new(&build.initial_cargo);
+ cmd.arg("test")
+ .current_dir(build.src.join("src/bootstrap"))
+ .env("CARGO_TARGET_DIR", build.out.join("bootstrap"))
+ .env("RUSTC_BOOTSTRAP", "1")
+ .env("RUSTC", &build.initial_rustc);
+ if !build.fail_fast {
+ cmd.arg("--no-fail-fast");
+ }
+ cmd.arg("--").args(&build.config.cmd.test_args());
+ try_run(build, &mut cmd);
+ }
+
+ fn should_run(run: ShouldRun) -> ShouldRun {
+ run.path("src/bootstrap")
+ }
+
+ fn make_run(run: RunConfig) {
+ run.builder.ensure(Bootstrap);
+ }
+}
CC_x86_64_sun_solaris=x86_64-sun-solaris2.10-gcc \
CXX_x86_64_sun_solaris=x86_64-sun-solaris2.10-g++
-# FIXME(EdSchouten): Remove this once cc ≥1.0.4 has been merged. It can
-# automatically pick the right compiler path.
-ENV \
- AR_x86_64_unknown_cloudabi=x86_64-unknown-cloudabi-ar \
- CC_x86_64_unknown_cloudabi=x86_64-unknown-cloudabi-clang \
- CXX_x86_64_unknown_cloudabi=x86_64-unknown-cloudabi-clang++
-
ENV TARGETS=x86_64-unknown-fuchsia
ENV TARGETS=$TARGETS,aarch64-unknown-fuchsia
ENV TARGETS=$TARGETS,sparcv9-sun-solaris
ln -s ../lib/llvm-5.0/bin/lld /usr/bin/${target}-ld
ln -s ../../${target} /usr/lib/llvm-5.0/${target}
-# FIXME(EdSchouten): Remove this once cc ≥1.0.4 has been merged. It
-# can make use of ${target}-cc and ${target}-c++, without incorrectly
-# assuming it's MSVC.
-ln -s ../lib/llvm-5.0/bin/clang /usr/bin/${target}-clang
-ln -s ../lib/llvm-5.0/bin/clang /usr/bin/${target}-clang++
-
# Install the C++ runtime libraries from CloudABI Ports.
echo deb https://nuxi.nl/distfiles/cloudabi-ports/debian/ cloudabi cloudabi > \
/etc/apt/sources.list.d/cloudabi.list
-FROM ubuntu:16.04
+FROM ubuntu:18.04
RUN apt-get update && apt-get install -y --no-install-recommends \
clang \
An overview of how to use the `rustdoc` command is available [in the docs][1].
Further details are available from the command line by with `rustdoc --help`.
-[1]: https://github.com/rust-lang/rust/blob/master/src/doc/book/documentation.md
+[1]: https://github.com/rust-lang/rust/blob/master/src/doc/rustdoc/src/what-is-rustdoc.md
#[unstable(feature = "collection_placement",
reason = "placement protocol is subject to change",
issue = "30172")]
-impl<'a, T> Place<T> for BinaryHeapPlace<'a, T>
+unsafe impl<'a, T> Place<T> for BinaryHeapPlace<'a, T>
where T: Clone + Ord {
fn pointer(&mut self) -> *mut T {
self.place.pointer()
#[unstable(feature = "placement_in",
reason = "placement box design is still being worked out.",
issue = "27779")]
-impl<T> Place<T> for IntermediateBox<T> {
+unsafe impl<T> Place<T> for IntermediateBox<T> {
fn pointer(&mut self) -> *mut T {
self.ptr as *mut T
}
#[unstable(feature = "collection_placement",
reason = "placement protocol is subject to change",
issue = "30172")]
-impl<'a, T> Place<T> for FrontPlace<'a, T> {
+unsafe impl<'a, T> Place<T> for FrontPlace<'a, T> {
fn pointer(&mut self) -> *mut T {
unsafe { &mut (*self.node.pointer()).element }
}
#[unstable(feature = "collection_placement",
reason = "placement protocol is subject to change",
issue = "30172")]
-impl<'a, T> Place<T> for BackPlace<'a, T> {
+unsafe impl<'a, T> Place<T> for BackPlace<'a, T> {
fn pointer(&mut self) -> *mut T {
unsafe { &mut (*self.node.pointer()).element }
}
#![feature(pattern)]
#![feature(placement_in_syntax)]
#![feature(rand)]
-#![feature(repr_align)]
#![feature(slice_rotate)]
#![feature(splice)]
#![feature(str_escape)]
#[unstable(feature = "collection_placement",
reason = "placement protocol is subject to change",
issue = "30172")]
-impl<'a, T> Place<T> for PlaceBack<'a, T> {
+unsafe impl<'a, T> Place<T> for PlaceBack<'a, T> {
fn pointer(&mut self) -> *mut T {
unsafe { self.vec.as_mut_ptr().offset(self.vec.len as isize) }
}
#[unstable(feature = "collection_placement",
reason = "placement protocol is subject to change",
issue = "30172")]
-impl<'a, T> Place<T> for PlaceBack<'a, T> {
+unsafe impl<'a, T> Place<T> for PlaceBack<'a, T> {
fn pointer(&mut self) -> *mut T {
unsafe { self.vec_deque.ptr().offset(self.vec_deque.head as isize) }
}
#[unstable(feature = "collection_placement",
reason = "placement protocol is subject to change",
issue = "30172")]
-impl<'a, T> Place<T> for PlaceFront<'a, T> {
+unsafe impl<'a, T> Place<T> for PlaceFront<'a, T> {
fn pointer(&mut self) -> *mut T {
let tail = self.vec_deque.wrap_sub(self.vec_deque.tail, 1);
unsafe { self.vec_deque.ptr().offset(tail as isize) }
-Subproject commit 0ba07e49264a54cb5bbd4856fcea083bb3fbec15
+Subproject commit 0a95675bab808c49f86208bacc89c5d9c53ac43f
//! Many functions in this module only handle normal numbers. The dec2flt routines conservatively
//! take the universally-correct slow path (Algorithm M) for very small and very large numbers.
//! That algorithm needs only next_float() which does handle subnormals and zeros.
-use u32;
use cmp::Ordering::{Less, Equal, Greater};
-use ops::{Mul, Div, Neg};
+use convert::{TryFrom, TryInto};
+use ops::{Add, Mul, Div, Neg};
use fmt::{Debug, LowerExp};
-use mem::transmute;
use num::diy_float::Fp;
use num::FpCategory::{Infinite, Zero, Subnormal, Normal, Nan};
use num::Float;
///
/// Should **never ever** be implemented for other types or be used outside the dec2flt module.
/// Inherits from `Float` because there is some overlap, but all the reused methods are trivial.
-pub trait RawFloat : Float + Copy + Debug + LowerExp
- + Mul<Output=Self> + Div<Output=Self> + Neg<Output=Self>
+pub trait RawFloat
+ : Float
+ + Copy
+ + Debug
+ + LowerExp
+ + Mul<Output=Self>
+ + Div<Output=Self>
+ + Neg<Output=Self>
+where
+ Self: Float<Bits = <Self as RawFloat>::RawBits>
{
const INFINITY: Self;
const NAN: Self;
const ZERO: Self;
+ /// Same as `Float::Bits` with extra traits.
+ type RawBits: Add<Output = Self::RawBits> + From<u8> + TryFrom<u64>;
+
/// Returns the mantissa, exponent and sign as integers.
fn integer_decode(self) -> (u64, i16, i8);
- /// Get the raw binary representation of the float.
- fn transmute(self) -> u64;
-
- /// Transmute the raw binary representation into a float.
- fn from_bits(bits: u64) -> Self;
-
/// Decode the float.
fn unpack(self) -> Unpacked;
}
impl RawFloat for f32 {
+ type RawBits = u32;
+
const SIG_BITS: u8 = 24;
const EXP_BITS: u8 = 8;
const CEIL_LOG5_OF_MAX_SIG: i16 = 11;
/// Returns the mantissa, exponent and sign as integers.
fn integer_decode(self) -> (u64, i16, i8) {
- let bits: u32 = unsafe { transmute(self) };
+ let bits = self.to_bits();
let sign: i8 = if bits >> 31 == 0 { 1 } else { -1 };
let mut exponent: i16 = ((bits >> 23) & 0xff) as i16;
let mantissa = if exponent == 0 {
(mantissa as u64, exponent, sign)
}
- fn transmute(self) -> u64 {
- let bits: u32 = unsafe { transmute(self) };
- bits as u64
- }
-
- fn from_bits(bits: u64) -> f32 {
- assert!(bits < u32::MAX as u64, "f32::from_bits: too many bits");
- unsafe { transmute(bits as u32) }
- }
-
fn unpack(self) -> Unpacked {
let (sig, exp, _sig) = self.integer_decode();
Unpacked::new(sig, exp)
impl RawFloat for f64 {
+ type RawBits = u64;
+
const SIG_BITS: u8 = 53;
const EXP_BITS: u8 = 11;
const CEIL_LOG5_OF_MAX_SIG: i16 = 23;
/// Returns the mantissa, exponent and sign as integers.
fn integer_decode(self) -> (u64, i16, i8) {
- let bits: u64 = unsafe { transmute(self) };
+ let bits = self.to_bits();
let sign: i8 = if bits >> 63 == 0 { 1 } else { -1 };
let mut exponent: i16 = ((bits >> 52) & 0x7ff) as i16;
let mantissa = if exponent == 0 {
(mantissa, exponent, sign)
}
- fn transmute(self) -> u64 {
- let bits: u64 = unsafe { transmute(self) };
- bits
- }
-
- fn from_bits(bits: u64) -> f64 {
- unsafe { transmute(bits) }
- }
-
fn unpack(self) -> Unpacked {
let (sig, exp, _sig) = self.integer_decode();
Unpacked::new(sig, exp)
"encode_normal: exponent out of range");
// Leave sign bit at 0 ("+"), our numbers are all positive
let bits = (k_enc as u64) << T::EXPLICIT_SIG_BITS | sig_enc;
- T::from_bits(bits)
+ T::from_bits(bits.try_into().unwrap_or_else(|_| unreachable!()))
}
/// Construct a subnormal. A mantissa of 0 is allowed and constructs zero.
pub fn encode_subnormal<T: RawFloat>(significand: u64) -> T {
assert!(significand < T::MIN_SIG, "encode_subnormal: not actually subnormal");
// Encoded exponent is 0, the sign bit is 0, so we just have to reinterpret the bits.
- T::from_bits(significand)
+ T::from_bits(significand.try_into().unwrap_or_else(|_| unreachable!()))
}
/// Approximate a bignum with an Fp. Rounds within 0.5 ULP with half-to-even.
// too is exactly what we want!
// Finally, f64::MAX + 1 = 7eff...f + 1 = 7ff0...0 = f64::INFINITY.
Zero | Subnormal | Normal => {
- let bits: u64 = x.transmute();
- T::from_bits(bits + 1)
+ T::from_bits(x.to_bits() + T::Bits::from(1u8))
}
}
}
reason = "stable interface is via `impl f{32,64}` in later crates",
issue = "32110")]
impl Float for f32 {
+ type Bits = u32;
+
/// Returns `true` if the number is NaN.
#[inline]
fn is_nan(self) -> bool {
const EXP_MASK: u32 = 0x7f800000;
const MAN_MASK: u32 = 0x007fffff;
- let bits: u32 = unsafe { mem::transmute(self) };
+ let bits = self.to_bits();
match (bits & MAN_MASK, bits & EXP_MASK) {
(0, 0) => Fp::Zero,
(_, 0) => Fp::Subnormal,
fn is_sign_negative(self) -> bool {
// IEEE754 says: isSignMinus(x) is true if and only if x has negative sign. isSignMinus
// applies to zeros and NaNs as well.
- #[repr(C)]
- union F32Bytes {
- f: f32,
- b: u32
- }
- unsafe { F32Bytes { f: self }.b & 0x8000_0000 != 0 }
+ self.to_bits() & 0x8000_0000 != 0
}
/// Returns the reciprocal (multiplicative inverse) of the number.
// multiplying by 1.0. Should switch to the `canonicalize` when it works.
(if other.is_nan() || self < other { self } else { other }) * 1.0
}
+
+ /// Raw transmutation to `u32`.
+ #[inline]
+ fn to_bits(self) -> u32 {
+ unsafe { mem::transmute(self) }
+ }
+
+ /// Raw transmutation from `u32`.
+ #[inline]
+ fn from_bits(v: u32) -> Self {
+ // It turns out the safety issues with sNaN were overblown! Hooray!
+ unsafe { mem::transmute(v) }
+ }
}
reason = "stable interface is via `impl f{32,64}` in later crates",
issue = "32110")]
impl Float for f64 {
+ type Bits = u64;
+
/// Returns `true` if the number is NaN.
#[inline]
fn is_nan(self) -> bool {
const EXP_MASK: u64 = 0x7ff0000000000000;
const MAN_MASK: u64 = 0x000fffffffffffff;
- let bits: u64 = unsafe { mem::transmute(self) };
+ let bits = self.to_bits();
match (bits & MAN_MASK, bits & EXP_MASK) {
(0, 0) => Fp::Zero,
(_, 0) => Fp::Subnormal,
/// negative sign bit and negative infinity.
#[inline]
fn is_sign_negative(self) -> bool {
- #[repr(C)]
- union F64Bytes {
- f: f64,
- b: u64
- }
- unsafe { F64Bytes { f: self }.b & 0x8000_0000_0000_0000 != 0 }
+ self.to_bits() & 0x8000_0000_0000_0000 != 0
}
/// Returns the reciprocal (multiplicative inverse) of the number.
// multiplying by 1.0. Should switch to the `canonicalize` when it works.
(if other.is_nan() || self < other { self } else { other }) * 1.0
}
+
+ /// Raw transmutation to `u64`.
+ #[inline]
+ fn to_bits(self) -> u64 {
+ unsafe { mem::transmute(self) }
+ }
+
+ /// Raw transmutation from `u64`.
+ #[inline]
+ fn from_bits(v: u64) -> Self {
+ // It turns out the safety issues with sNaN were overblown! Hooray!
+ unsafe { mem::transmute(v) }
+ }
}
reason = "stable interface is via `impl f{32,64}` in later crates",
issue = "32110")]
pub trait Float: Sized {
+ /// Type used by `to_bits` and `from_bits`.
+ #[stable(feature = "core_float_bits", since = "1.24.0")]
+ type Bits;
+
/// Returns `true` if this value is NaN and false otherwise.
#[stable(feature = "core", since = "1.6.0")]
fn is_nan(self) -> bool;
/// Returns the minimum of the two numbers.
#[stable(feature = "core_float_min_max", since="1.20.0")]
fn min(self, other: Self) -> Self;
+
+ /// Raw transmutation to integer.
+ #[stable(feature = "core_float_bits", since="1.24.0")]
+ fn to_bits(self) -> Self::Bits;
+ /// Raw transmutation from integer.
+ #[stable(feature = "core_float_bits", since="1.24.0")]
+ fn from_bits(v: Self::Bits) -> Self;
}
macro_rules! from_str_radix_int_impl {
/// implementation of Place to clean up any intermediate state
/// (e.g. deallocate box storage, pop a stack, etc).
#[unstable(feature = "placement_new_protocol", issue = "27779")]
-pub trait Place<Data: ?Sized> {
+pub unsafe trait Place<Data: ?Sized> {
/// Returns the address where the input value will be written.
/// Note that the data at this address is generally uninitialized,
/// and thus one should use `ptr::write` for initializing it.
+ ///
+ /// This function must return a pointer through which a value
+ /// of type `Data` can be written.
fn pointer(&mut self) -> *mut Data;
}
[input] TargetFeaturesWhitelist,
[] TargetFeaturesEnabled(DefId),
+ [] InstanceDefSizeEstimate { instance_def: InstanceDef<'tcx> },
);
trait DepNodeParams<'a, 'gcx: 'tcx + 'a, 'tcx: 'a> : fmt::Debug {
E0657, // `impl Trait` can only capture lifetimes bound at the fn level
E0687, // in-band lifetimes cannot be used in `fn`/`Fn` syntax
E0688, // in-band lifetimes cannot be mixed with explicit lifetime binders
+
+ E0906, // closures cannot be static
}
use syntax::abi::Abi;
use syntax::ast::{NodeId, CRATE_NODE_ID, Name, Attribute};
-use syntax::codemap::Spanned;
use syntax_pos::Span;
use hir::*;
use hir::def::Def;
fn visit_variant(&mut self, v: &'v Variant, g: &'v Generics, item_id: NodeId) {
walk_variant(self, v, g, item_id)
}
+ fn visit_label(&mut self, label: &'v Label) {
+ walk_label(self, label)
+ }
fn visit_lifetime(&mut self, lifetime: &'v Lifetime) {
walk_lifetime(self, lifetime)
}
}
}
-pub fn walk_opt_name<'v, V: Visitor<'v>>(visitor: &mut V, span: Span, opt_name: Option<Name>) {
- if let Some(name) = opt_name {
- visitor.visit_name(span, name);
- }
-}
-
-pub fn walk_opt_sp_name<'v, V: Visitor<'v>>(visitor: &mut V, opt_sp_name: &Option<Spanned<Name>>) {
- if let Some(ref sp_name) = *opt_sp_name {
- visitor.visit_name(sp_name.span, sp_name.node);
- }
-}
-
/// Walks the contents of a crate. See also `Crate::visit_all_items`.
pub fn walk_crate<'v, V: Visitor<'v>>(visitor: &mut V, krate: &'v Crate) {
visitor.visit_mod(&krate.module, krate.span, CRATE_NODE_ID);
walk_list!(visitor, visit_ty, &local.ty);
}
+pub fn walk_label<'v, V: Visitor<'v>>(visitor: &mut V, label: &'v Label) {
+ visitor.visit_name(label.span, label.name);
+}
+
pub fn walk_lifetime<'v, V: Visitor<'v>>(visitor: &mut V, lifetime: &'v Lifetime) {
visitor.visit_id(lifetime.id);
match lifetime.name {
match item.node {
ItemExternCrate(opt_name) => {
visitor.visit_id(item.id);
- walk_opt_name(visitor, item.span, opt_name)
+ if let Some(name) = opt_name {
+ visitor.visit_name(item.span, name);
+ }
}
ItemUse(ref path, _) => {
visitor.visit_id(item.id);
visitor.visit_expr(if_block);
walk_list!(visitor, visit_expr, optional_else);
}
- ExprWhile(ref subexpression, ref block, ref opt_sp_name) => {
+ ExprWhile(ref subexpression, ref block, ref opt_label) => {
+ walk_list!(visitor, visit_label, opt_label);
visitor.visit_expr(subexpression);
visitor.visit_block(block);
- walk_opt_sp_name(visitor, opt_sp_name);
}
- ExprLoop(ref block, ref opt_sp_name, _) => {
+ ExprLoop(ref block, ref opt_label, _) => {
+ walk_list!(visitor, visit_label, opt_label);
visitor.visit_block(block);
- walk_opt_sp_name(visitor, opt_sp_name);
}
ExprMatch(ref subexpression, ref arms, _) => {
visitor.visit_expr(subexpression);
ExprPath(ref qpath) => {
visitor.visit_qpath(qpath, expression.id, expression.span);
}
- ExprBreak(label, ref opt_expr) => {
- label.ident.map(|ident| {
- match label.target_id {
+ ExprBreak(ref destination, ref opt_expr) => {
+ if let Some(ref label) = destination.label {
+ visitor.visit_label(label);
+ match destination.target_id {
ScopeTarget::Block(node_id) |
ScopeTarget::Loop(LoopIdResult::Ok(node_id)) =>
visitor.visit_def_mention(Def::Label(node_id)),
ScopeTarget::Loop(LoopIdResult::Err(_)) => {},
};
- visitor.visit_name(ident.span, ident.node.name);
- });
+ }
walk_list!(visitor, visit_expr, opt_expr);
}
- ExprAgain(label) => {
- label.ident.map(|ident| {
- match label.target_id {
+ ExprAgain(ref destination) => {
+ if let Some(ref label) = destination.label {
+ visitor.visit_label(label);
+ match destination.target_id {
ScopeTarget::Block(_) => bug!("can't `continue` to a non-loop block"),
ScopeTarget::Loop(LoopIdResult::Ok(node_id)) =>
visitor.visit_def_mention(Def::Label(node_id)),
ScopeTarget::Loop(LoopIdResult::Err(_)) => {},
};
- visitor.visit_name(ident.span, ident.node.name);
- });
+ }
}
ExprRet(ref optional_expression) => {
walk_list!(visitor, visit_expr, optional_expression);
*self.name_map.entry(ident).or_insert_with(|| Symbol::from_ident(ident))
}
- fn lower_opt_sp_ident(&mut self, o_id: Option<Spanned<Ident>>) -> Option<Spanned<Name>> {
- o_id.map(|sp_ident| respan(sp_ident.span, sp_ident.node.name))
+ fn lower_label(&mut self, label: Option<Label>) -> Option<hir::Label> {
+ label.map(|label| hir::Label { name: label.ident.name, span: label.span })
}
- fn lower_loop_destination(&mut self, destination: Option<(NodeId, Spanned<Ident>)>)
+ fn lower_loop_destination(&mut self, destination: Option<(NodeId, Label)>)
-> hir::Destination
{
match destination {
- Some((id, label_ident)) => {
+ Some((id, label)) => {
let target = if let Def::Label(loop_id) = self.expect_full_def(id) {
hir::LoopIdResult::Ok(self.lower_node_id(loop_id).node_id)
} else {
hir::LoopIdResult::Err(hir::LoopIdError::UnresolvedLabel)
};
hir::Destination {
- ident: Some(label_ident),
+ label: self.lower_label(Some(label)),
target_id: hir::ScopeTarget::Loop(target),
}
},
.map(|innermost_loop_id| *innermost_loop_id);
hir::Destination {
- ident: None,
+ label: None,
target_id: hir::ScopeTarget::Loop(
loop_id.map(|id| Ok(self.lower_node_id(id).node_id))
.unwrap_or(Err(hir::LoopIdError::OutsideLoopScope))
hir::ExprIf(P(self.lower_expr(cond)), P(then_expr), else_opt)
}
- ExprKind::While(ref cond, ref body, opt_ident) => {
+ ExprKind::While(ref cond, ref body, opt_label) => {
self.with_loop_scope(e.id, |this|
hir::ExprWhile(
this.with_loop_condition_scope(|this| P(this.lower_expr(cond))),
this.lower_block(body, false),
- this.lower_opt_sp_ident(opt_ident)))
+ this.lower_label(opt_label)))
}
- ExprKind::Loop(ref body, opt_ident) => {
+ ExprKind::Loop(ref body, opt_label) => {
self.with_loop_scope(e.id, |this|
hir::ExprLoop(this.lower_block(body, false),
- this.lower_opt_sp_ident(opt_ident),
+ this.lower_label(opt_label),
hir::LoopSource::Loop))
}
ExprKind::Catch(ref body) => {
arms.iter().map(|x| self.lower_arm(x)).collect(),
hir::MatchSource::Normal)
}
- ExprKind::Closure(capture_clause, ref decl, ref body, fn_decl_span) => {
+ ExprKind::Closure(capture_clause, movability, ref decl, ref body, fn_decl_span) => {
self.with_new_scopes(|this| {
this.with_parent_def(e.id, |this| {
let mut is_generator = false;
is_generator = this.is_generator;
e
});
- if is_generator && !decl.inputs.is_empty() {
- span_err!(this.sess, fn_decl_span, E0628,
- "generators cannot have explicit arguments");
- this.sess.abort_if_errors();
- }
+ let generator_option = if is_generator {
+ if !decl.inputs.is_empty() {
+ span_err!(this.sess, fn_decl_span, E0628,
+ "generators cannot have explicit arguments");
+ this.sess.abort_if_errors();
+ }
+ Some(match movability {
+ Movability::Movable => hir::GeneratorMovability::Movable,
+ Movability::Static => hir::GeneratorMovability::Static,
+ })
+ } else {
+ if movability == Movability::Static {
+ span_err!(this.sess, fn_decl_span, E0906,
+ "closures cannot be static");
+ }
+ None
+ };
hir::ExprClosure(this.lower_capture_clause(capture_clause),
this.lower_fn_decl(decl, None, false),
body_id,
fn_decl_span,
- is_generator)
+ generator_option)
})
})
}
(&None, &Some(..), Closed) => "RangeToInclusive",
(&Some(..), &Some(..), Closed) => "RangeInclusive",
(_, &None, Closed) =>
- panic!(self.diagnostic().span_fatal(
- e.span, "inclusive range with no end")),
+ self.diagnostic().span_fatal(
+ e.span, "inclusive range with no end").raise(),
};
let fields =
hir::ExprPath(self.lower_qpath(e.id, qself, path, ParamMode::Optional,
ImplTraitContext::Disallowed))
}
- ExprKind::Break(opt_ident, ref opt_expr) => {
- let label_result = if self.is_in_loop_condition && opt_ident.is_none() {
+ ExprKind::Break(opt_label, ref opt_expr) => {
+ let destination = if self.is_in_loop_condition && opt_label.is_none() {
hir::Destination {
- ident: opt_ident,
+ label: None,
target_id: hir::ScopeTarget::Loop(
Err(hir::LoopIdError::UnlabeledCfInWhileCondition).into()),
}
} else {
- self.lower_loop_destination(opt_ident.map(|ident| (e.id, ident)))
+ self.lower_loop_destination(opt_label.map(|label| (e.id, label)))
};
hir::ExprBreak(
- label_result,
+ destination,
opt_expr.as_ref().map(|x| P(self.lower_expr(x))))
}
- ExprKind::Continue(opt_ident) =>
+ ExprKind::Continue(opt_label) =>
hir::ExprAgain(
- if self.is_in_loop_condition && opt_ident.is_none() {
+ if self.is_in_loop_condition && opt_label.is_none() {
hir::Destination {
- ident: opt_ident,
+ label: None,
target_id: hir::ScopeTarget::Loop(Err(
hir::LoopIdError::UnlabeledCfInWhileCondition).into()),
}
} else {
- self.lower_loop_destination(opt_ident.map( |ident| (e.id, ident)))
+ self.lower_loop_destination(opt_label.map(|label| (e.id, label)))
}),
ExprKind::Ret(ref e) => hir::ExprRet(e.as_ref().map(|x| P(self.lower_expr(x)))),
ExprKind::InlineAsm(ref asm) => {
// Desugar ExprWhileLet
// From: `[opt_ident]: while let <pat> = <sub_expr> <body>`
- ExprKind::WhileLet(ref pat, ref sub_expr, ref body, opt_ident) => {
+ ExprKind::WhileLet(ref pat, ref sub_expr, ref body, opt_label) => {
// to:
//
// [opt_ident]: loop {
// `[opt_ident]: loop { ... }`
let loop_block = P(self.block_expr(P(match_expr)));
- let loop_expr = hir::ExprLoop(loop_block, self.lower_opt_sp_ident(opt_ident),
+ let loop_expr = hir::ExprLoop(loop_block, self.lower_label(opt_label),
hir::LoopSource::WhileLet);
// add attributes to the outer returned expr node
loop_expr
// Desugar ExprForLoop
// From: `[opt_ident]: for <pat> in <head> <body>`
- ExprKind::ForLoop(ref pat, ref head, ref body, opt_ident) => {
+ ExprKind::ForLoop(ref pat, ref head, ref body, opt_label) => {
// to:
//
// {
None));
// `[opt_ident]: loop { ... }`
- let loop_expr = hir::ExprLoop(loop_block, self.lower_opt_sp_ident(opt_ident),
+ let loop_expr = hir::ExprLoop(loop_block, self.lower_label(opt_label),
hir::LoopSource::ForLoop);
let LoweredNodeId { node_id, hir_id } = self.lower_node_id(e.id);
let loop_expr = P(hir::Expr {
e.span,
hir::ExprBreak(
hir::Destination {
- ident: None,
+ label: None,
target_id: hir::ScopeTarget::Block(catch_node),
},
Some(from_err_expr)
use syntax_pos::{Span, DUMMY_SP};
use syntax::codemap::{self, Spanned};
use syntax::abi::Abi;
-use syntax::ast::{self, Ident, Name, NodeId, DUMMY_NODE_ID, AsmDialect};
+use syntax::ast::{self, Name, NodeId, DUMMY_NODE_ID, AsmDialect};
use syntax::ast::{Attribute, Lit, StrStyle, FloatTy, IntTy, UintTy, MetaItem};
use syntax::ext::hygiene::SyntaxContext;
use syntax::ptr::P;
pub const DUMMY_ITEM_LOCAL_ID: ItemLocalId = ItemLocalId(!0);
+#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)]
+pub struct Label {
+ pub name: Name,
+ pub span: Span,
+}
+
+impl fmt::Debug for Label {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ write!(f, "label({:?})", self.name)
+ }
+}
+
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)]
pub struct Lifetime {
pub id: NodeId,
/// A while loop, with an optional label
///
/// `'label: while expr { block }`
- ExprWhile(P<Expr>, P<Block>, Option<Spanned<Name>>),
+ ExprWhile(P<Expr>, P<Block>, Option<Label>),
/// Conditionless loop (can be exited with break, continue, or return)
///
/// `'label: loop { block }`
- ExprLoop(P<Block>, Option<Spanned<Name>>, LoopSource),
+ ExprLoop(P<Block>, Option<Label>, LoopSource),
/// A `match` block, with a source that indicates whether or not it is
/// the result of a desugaring, and if so, which kind.
ExprMatch(P<Expr>, HirVec<Arm>, MatchSource),
///
/// This may also be a generator literal, indicated by the final boolean,
/// in that case there is an GeneratorClause.
- ExprClosure(CaptureClause, P<FnDecl>, BodyId, Span, bool),
+ ExprClosure(CaptureClause, P<FnDecl>, BodyId, Span, Option<GeneratorMovability>),
/// A block (`{ ... }`)
ExprBlock(P<Block>),
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub struct Destination {
// This is `Some(_)` iff there is an explicit user-specified `label
- pub ident: Option<Spanned<Ident>>,
+ pub label: Option<Label>,
// These errors are caught and then reported during the diagnostics pass in
// librustc_passes/loops.rs
pub target_id: ScopeTarget,
}
+#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
+pub enum GeneratorMovability {
+ Static,
+ Movable,
+}
+
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub enum CaptureClause {
CaptureByValue,
hir::ExprIf(ref test, ref blk, ref elseopt) => {
self.print_if(&test, &blk, elseopt.as_ref().map(|e| &**e))?;
}
- hir::ExprWhile(ref test, ref blk, opt_sp_name) => {
- if let Some(sp_name) = opt_sp_name {
- self.print_name(sp_name.node)?;
+ hir::ExprWhile(ref test, ref blk, opt_label) => {
+ if let Some(label) = opt_label {
+ self.print_name(label.name)?;
self.word_space(":")?;
}
self.head("while")?;
self.s.space()?;
self.print_block(&blk)?;
}
- hir::ExprLoop(ref blk, opt_sp_name, _) => {
- if let Some(sp_name) = opt_sp_name {
- self.print_name(sp_name.node)?;
+ hir::ExprLoop(ref blk, opt_label, _) => {
+ if let Some(label) = opt_label {
+ self.print_name(label.name)?;
self.word_space(":")?;
}
self.head("loop")?;
hir::ExprPath(ref qpath) => {
self.print_qpath(qpath, true)?
}
- hir::ExprBreak(label, ref opt_expr) => {
+ hir::ExprBreak(destination, ref opt_expr) => {
self.s.word("break")?;
self.s.space()?;
- if let Some(label_ident) = label.ident {
- self.print_name(label_ident.node.name)?;
+ if let Some(label) = destination.label {
+ self.print_name(label.name)?;
self.s.space()?;
}
if let Some(ref expr) = *opt_expr {
self.s.space()?;
}
}
- hir::ExprAgain(label) => {
+ hir::ExprAgain(destination) => {
self.s.word("continue")?;
self.s.space()?;
- if let Some(label_ident) = label.ident {
- self.print_name(label_ident.node.name)?;
+ if let Some(label) = destination.label {
+ self.print_name(label.name)?;
self.s.space()?
}
}
Name(name)
});
+impl_stable_hash_for!(struct hir::Label {
+ span,
+ name
+});
+
impl_stable_hash_for!(struct hir::Lifetime {
id,
span,
}
}
+impl_stable_hash_for!(enum hir::GeneratorMovability {
+ Static,
+ Movable
+});
+
impl_stable_hash_for!(enum hir::CaptureClause {
CaptureByValue,
CaptureByRef
impl_stable_hash_for_spanned!(usize);
impl_stable_hash_for!(struct hir::Destination {
- ident,
+ label,
target_id
});
impl_stable_hash_for!(struct ty::ClosureSubsts<'tcx> { substs });
-impl_stable_hash_for!(struct ty::GeneratorInterior<'tcx> { witness });
+impl_stable_hash_for!(struct ty::GeneratorInterior<'tcx> { witness, movable });
impl_stable_hash_for!(struct ty::GenericPredicates<'tcx> {
parent,
closure_substs.hash_stable(hcx, hasher);
interior.hash_stable(hcx, hasher);
}
+ TyGeneratorWitness(types) => {
+ types.hash_stable(hcx, hasher)
+ }
TyTuple(inner_tys, from_diverging_type_var) => {
inner_tys.hash_stable(hcx, hasher);
from_diverging_type_var.hash_stable(hcx, hasher);
if let Some(node_id) = hir.as_local_node_id(free_region.scope) {
match hir.get(node_id) {
NodeExpr(Expr {
- node: ExprClosure(_, _, _, closure_span, false),
+ node: ExprClosure(_, _, _, closure_span, None),
..
}) => {
let sup_sp = sup_origin.span();
ty::TyForeign(..) |
ty::TyParam(..) |
ty::TyClosure(..) |
+ ty::TyGeneratorWitness(..) |
ty::TyAnon(..) => {
t.super_fold_with(self)
}
/// Describes constraints between the region variables and other
/// regions, as well as other conditions that must be verified, or
/// assumptions that can be made.
-#[derive(Default)]
+#[derive(Debug, Default)]
pub struct RegionConstraintData<'tcx> {
/// Constraints of the form `A <= B`, where either `A` or `B` can
/// be a region variable (or neither, as it happens).
use syntax::ast;
use syntax::symbol::Symbol;
+use syntax_pos::Span;
use hir::itemlikevisit::ItemLikeVisitor;
use hir;
impl<'a, 'v, 'tcx> ItemLikeVisitor<'v> for LanguageItemCollector<'a, 'tcx> {
fn visit_item(&mut self, item: &hir::Item) {
- if let Some(value) = extract(&item.attrs) {
+ if let Some((value, span)) = extract(&item.attrs) {
let item_index = self.item_refs.get(&*value.as_str()).cloned();
if let Some(item_index) = item_index {
let def_id = self.tcx.hir.local_def_id(item.id);
self.collect_item(item_index, def_id);
} else {
- let span = self.tcx.hir.span(item.id);
- span_err!(self.tcx.sess, span, E0522,
- "definition of an unknown language item: `{}`.",
- value);
+ let mut err = struct_span_err!(self.tcx.sess, span, E0522,
+ "definition of an unknown language item: `{}`",
+ value);
+ err.span_label(span, format!("definition of unknown language item `{}`", value));
+ err.emit();
}
}
}
}
}
-pub fn extract(attrs: &[ast::Attribute]) -> Option<Symbol> {
+pub fn extract(attrs: &[ast::Attribute]) -> Option<(Symbol, Span)> {
for attribute in attrs {
if attribute.check_name("lang") {
if let Some(value) = attribute.value_str() {
- return Some(value)
+ return Some((value, attribute.span));
}
}
}
terminating_scopes: FxHashSet<hir::ItemLocalId>,
}
+struct ExprLocatorVisitor {
+ id: ast::NodeId,
+ result: Option<usize>,
+ expr_and_pat_count: usize,
+}
+
+// This visitor has to have the same visit_expr calls as RegionResolutionVisitor
+// since `expr_count` is compared against the results there.
+impl<'tcx> Visitor<'tcx> for ExprLocatorVisitor {
+ fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
+ NestedVisitorMap::None
+ }
+
+ fn visit_pat(&mut self, pat: &'tcx Pat) {
+ self.expr_and_pat_count += 1;
+
+ intravisit::walk_pat(self, pat);
+ }
+
+ fn visit_expr(&mut self, expr: &'tcx Expr) {
+ debug!("ExprLocatorVisitor - pre-increment {} expr = {:?}",
+ self.expr_and_pat_count,
+ expr);
+
+ intravisit::walk_expr(self, expr);
+
+ self.expr_and_pat_count += 1;
+
+ debug!("ExprLocatorVisitor - post-increment {} expr = {:?}",
+ self.expr_and_pat_count,
+ expr);
+
+ if expr.id == self.id {
+ self.result = Some(self.expr_and_pat_count);
+ }
+ }
+}
impl<'tcx> ScopeTree {
pub fn record_scope_parent(&mut self, child: Scope, parent: Option<Scope>) {
return true;
}
+ /// Returns the id of the innermost containing body
+ pub fn containing_body(&self, mut scope: Scope)-> Option<hir::ItemLocalId> {
+ loop {
+ if let ScopeData::CallSite(id) = scope.data() {
+ return Some(id);
+ }
+
+ match self.opt_encl_scope(scope) {
+ None => return None,
+ Some(parent) => scope = parent,
+ }
+ }
+ }
+
/// Finds the nearest common ancestor (if any) of two scopes. That is, finds the smallest
/// scope which is greater than or equal to both `scope_a` and `scope_b`.
pub fn nearest_common_ancestor(&self,
self.yield_in_scope.get(&scope).cloned()
}
+ /// Checks whether the given scope contains a `yield` and if that yield could execute
+ /// after `expr`. If so, it returns the span of that `yield`.
+ /// `scope` must be inside the body.
+ pub fn yield_in_scope_for_expr(&self,
+ scope: Scope,
+ expr: ast::NodeId,
+ body: &'tcx hir::Body) -> Option<Span> {
+ self.yield_in_scope(scope).and_then(|(span, count)| {
+ let mut visitor = ExprLocatorVisitor {
+ id: expr,
+ result: None,
+ expr_and_pat_count: 0,
+ };
+ visitor.visit_body(body);
+ if count >= visitor.result.unwrap() {
+ Some(span)
+ } else {
+ None
+ }
+ })
+ }
+
/// Gives the number of expressions visited in a body.
/// Used to sanity check visit_expr call count when
/// calculating generator interiors.
record_var_lifetime(visitor, pat.hir_id.local_id, pat.span);
}
+ debug!("resolve_pat - pre-increment {} pat = {:?}", visitor.expr_and_pat_count, pat);
+
intravisit::walk_pat(visitor, pat);
visitor.expr_and_pat_count += 1;
+
+ debug!("resolve_pat - post-increment {} pat = {:?}", visitor.expr_and_pat_count, pat);
}
fn resolve_stmt<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>, stmt: &'tcx hir::Stmt) {
}
fn resolve_expr<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>, expr: &'tcx hir::Expr) {
- debug!("resolve_expr(expr.id={:?})", expr.id);
+ debug!("resolve_expr - pre-increment {} expr = {:?}", visitor.expr_and_pat_count, expr);
let prev_cx = visitor.cx;
visitor.enter_node_scope_with_dtor(expr.hir_id.local_id);
visitor.expr_and_pat_count += 1;
+ debug!("resolve_expr post-increment {}, expr = {:?}", visitor.expr_and_pat_count, expr);
+
if let hir::ExprYield(..) = expr.node {
// Mark this expr's scope and all parent scopes as containing `yield`.
let mut scope = Scope::Node(expr.hir_id.local_id);
}
}
- if let Some(pat) = pat {
- visitor.visit_pat(pat);
- }
+ // Make sure we visit the initializer first, so expr_and_pat_count remains correct
if let Some(expr) = init {
visitor.visit_expr(expr);
}
+ if let Some(pat) = pat {
+ visitor.visit_pat(pat);
+ }
/// True if `pat` match the `P&` nonterminal:
///
fn expression_label(ex: &hir::Expr) -> Option<(ast::Name, Span)> {
match ex.node {
hir::ExprWhile(.., Some(label)) | hir::ExprLoop(_, Some(label), _) => {
- Some((label.node, label.span))
+ Some((label.name, label.span))
}
_ => None,
}
}
pub fn link_name(attrs: &[ast::Attribute]) -> Option<Symbol> {
- lang_items::extract(attrs).and_then(|name| {
+ lang_items::extract(attrs).and_then(|(name, _)| {
$(if name == stringify!($name) {
Some(Symbol::intern(stringify!($sym)))
} else)* {
}
fn visit_foreign_item(&mut self, i: &hir::ForeignItem) {
- if let Some(lang_item) = lang_items::extract(&i.attrs) {
+ if let Some((lang_item, _)) = lang_items::extract(&i.attrs) {
self.register(&lang_item.as_str(), i.span);
}
intravisit::walk_foreign_item(self, i)
use syntax::ast::NodeId;
use syntax::symbol::InternedString;
-use ty::Instance;
+use ty::{Instance, TyCtxt};
use util::nodemap::FxHashMap;
use rustc_data_structures::base_n;
use rustc_data_structures::stable_hasher::{HashStable, StableHasherResult,
GlobalAsm(NodeId),
}
+impl<'tcx> MonoItem<'tcx> {
+ pub fn size_estimate<'a>(&self, tcx: &TyCtxt<'a, 'tcx, 'tcx>) -> usize {
+ match *self {
+ MonoItem::Fn(instance) => {
+ // Estimate the size of a function based on how many statements
+ // it contains.
+ tcx.instance_def_size_estimate(instance.def)
+ },
+ // Conservatively estimate the size of a static declaration
+ // or assembly to be 1.
+ MonoItem::Static(_) | MonoItem::GlobalAsm(_) => 1,
+ }
+ }
+}
+
impl<'tcx> HashStable<StableHashingContext<'tcx>> for MonoItem<'tcx> {
fn hash_stable<W: StableHasherResult>(&self,
hcx: &mut StableHashingContext<'tcx>,
/// as well as the crate name and disambiguator.
name: InternedString,
items: FxHashMap<MonoItem<'tcx>, (Linkage, Visibility)>,
+ size_estimate: Option<usize>,
}
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
CodegenUnit {
name: name,
items: FxHashMap(),
+ size_estimate: None,
}
}
let hash = hash & ((1u128 << 80) - 1);
base_n::encode(hash, base_n::CASE_INSENSITIVE)
}
+
+ pub fn estimate_size<'a>(&mut self, tcx: &TyCtxt<'a, 'tcx, 'tcx>) {
+ // Estimate the size of a codegen unit as (approximately) the number of MIR
+ // statements it corresponds to.
+ self.size_estimate = Some(self.items.keys().map(|mi| mi.size_estimate(tcx)).sum());
+ }
+
+ pub fn size_estimate(&self) -> usize {
+ // Should only be called if `estimate_size` has previously been called.
+ self.size_estimate.expect("estimate_size must be called before getting a size_estimate")
+ }
+
+ pub fn modify_size_estimate(&mut self, delta: usize) {
+ assert!(self.size_estimate.is_some());
+ if let Some(size_estimate) = self.size_estimate {
+ self.size_estimate = Some(size_estimate + delta);
+ }
+ }
}
impl<'tcx> HashStable<StableHashingContext<'tcx>> for CodegenUnit<'tcx> {
let CodegenUnit {
ref items,
name,
+ // The size estimate is not relevant to the hash
+ size_estimate: _,
} = *self;
name.hash_stable(hcx, hasher);
SizeMin, // -Oz
}
+#[derive(Clone, Copy, PartialEq, Hash)]
+pub enum Lto {
+ /// Don't do any LTO whatsoever
+ No,
+
+ /// Do a full crate graph LTO. The flavor is determined by the compiler
+ /// (currently the default is "fat").
+ Yes,
+
+ /// Do a full crate graph LTO with ThinLTO
+ Thin,
+
+ /// Do a local graph LTO with ThinLTO (only relevant for multiple codegen
+ /// units).
+ ThinLocal,
+
+ /// Do a full crate graph LTO with "fat" LTO
+ Fat,
+}
+
#[derive(Clone, Copy, PartialEq, Hash)]
pub enum DebugInfoLevel {
NoDebugInfo,
// commands like `--emit llvm-ir` which they're often incompatible with
// if we otherwise use the defaults of rustc.
cli_forced_codegen_units: Option<usize> [UNTRACKED],
- cli_forced_thinlto: Option<bool> [UNTRACKED],
+ cli_forced_thinlto_off: bool [UNTRACKED],
}
);
debug_assertions: true,
actually_rustdoc: false,
cli_forced_codegen_units: None,
- cli_forced_thinlto: None,
+ cli_forced_thinlto_off: false,
}
}
Some("crate=integer");
pub const parse_unpretty: Option<&'static str> =
Some("`string` or `string=string`");
+ pub const parse_lto: Option<&'static str> =
+ Some("one of `thin`, `fat`, or omitted");
}
#[allow(dead_code)]
mod $mod_set {
- use super::{$struct_name, Passes, SomePasses, AllPasses, Sanitizer};
+ use super::{$struct_name, Passes, SomePasses, AllPasses, Sanitizer, Lto};
use rustc_back::{LinkerFlavor, PanicStrategy, RelroLevel};
use std::path::PathBuf;
_ => false,
}
}
+
+ fn parse_lto(slot: &mut Lto, v: Option<&str>) -> bool {
+ *slot = match v {
+ None => Lto::Yes,
+ Some("thin") => Lto::Thin,
+ Some("fat") => Lto::Fat,
+ Some(_) => return false,
+ };
+ true
+ }
}
) }
"extra arguments to append to the linker invocation (space separated)"),
link_dead_code: bool = (false, parse_bool, [UNTRACKED],
"don't let linker strip dead code (turning it on can be used for code coverage)"),
- lto: bool = (false, parse_bool, [TRACKED],
+ lto: Lto = (Lto::No, parse_lto, [TRACKED],
"perform LLVM link-time optimizations"),
target_cpu: Option<String> = (None, parse_opt_string, [TRACKED],
"select target processor (rustc --print target-cpus for details)"),
"treat all errors that occur as bugs"),
external_macro_backtrace: bool = (false, parse_bool, [UNTRACKED],
"show macro backtraces even for non-local macros"),
+ teach: bool = (false, parse_bool, [TRACKED],
+ "show extended diagnostic help"),
continue_parse_after_error: bool = (false, parse_bool, [TRACKED],
"attempt to recover from parse errors (experimental)"),
incremental: Option<String> = (None, parse_opt_string, [UNTRACKED],
sp.struct_fatal(&format!("Error loading target specification: {}", e))
.help("Use `--print target-list` for a list of built-in targets")
.emit();
- panic!(FatalError);
+ FatalError.raise();
}
};
"16" => (ast::IntTy::I16, ast::UintTy::U16),
"32" => (ast::IntTy::I32, ast::UintTy::U32),
"64" => (ast::IntTy::I64, ast::UintTy::U64),
- w => panic!(sp.fatal(&format!("target specification was invalid: \
- unrecognized target-pointer-width {}", w))),
+ w => sp.fatal(&format!("target specification was invalid: \
+ unrecognized target-pointer-width {}", w)).raise(),
};
Config {
let mut debugging_opts = build_debugging_options(matches, error_format);
if !debugging_opts.unstable_options && error_format == ErrorOutputType::Json(true) {
- early_error(ErrorOutputType::Json(false),
- "--error-format=pretty-json is unstable");
+ early_error(ErrorOutputType::Json(false), "--error-format=pretty-json is unstable");
}
let mut output_types = BTreeMap::new();
let mut cg = build_codegen_options(matches, error_format);
let mut codegen_units = cg.codegen_units;
- let mut thinlto = None;
+ let mut disable_thinlto = false;
// Issue #30063: if user requests llvm-related output to one
// particular path, disable codegen-units.
}
early_warn(error_format, "resetting to default -C codegen-units=1");
codegen_units = Some(1);
- thinlto = Some(false);
+ disable_thinlto = true;
}
}
_ => {
codegen_units = Some(1);
- thinlto = Some(false);
+ disable_thinlto = true;
}
}
}
(&None, &None) => None,
}.map(|m| PathBuf::from(m));
- if cg.lto && incremental.is_some() {
+ if cg.lto != Lto::No && incremental.is_some() {
early_error(error_format, "can't perform LTO when compiling incrementally");
}
debug_assertions,
actually_rustdoc: false,
cli_forced_codegen_units: codegen_units,
- cli_forced_thinlto: thinlto,
+ cli_forced_thinlto_off: disable_thinlto,
},
cfg)
}
use std::hash::Hash;
use std::path::PathBuf;
use std::collections::hash_map::DefaultHasher;
- use super::{Passes, CrateType, OptLevel, DebugInfoLevel,
+ use super::{Passes, CrateType, OptLevel, DebugInfoLevel, Lto,
OutputTypes, Externs, ErrorOutputType, Sanitizer};
use syntax::feature_gate::UnstableFeatures;
use rustc_back::{PanicStrategy, RelroLevel};
impl_dep_tracking_hash_via_hash!(RelroLevel);
impl_dep_tracking_hash_via_hash!(Passes);
impl_dep_tracking_hash_via_hash!(OptLevel);
+ impl_dep_tracking_hash_via_hash!(Lto);
impl_dep_tracking_hash_via_hash!(DebugInfoLevel);
impl_dep_tracking_hash_via_hash!(UnstableFeatures);
impl_dep_tracking_hash_via_hash!(Externs);
use lint;
use middle::cstore;
use session::config::{build_configuration, build_session_options_and_crate_config};
+ use session::config::Lto;
use session::build_session;
use std::collections::{BTreeMap, BTreeSet};
use std::iter::FromIterator;
// Make sure changing a [TRACKED] option changes the hash
opts = reference.clone();
- opts.cg.lto = true;
+ opts.cg.lto = Lto::Fat;
assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
opts = reference.clone();
}
pub fn span_fatal<S: Into<MultiSpan>>(&self, sp: S, msg: &str) -> ! {
- panic!(self.diagnostic().span_fatal(sp, msg))
+ self.diagnostic().span_fatal(sp, msg).raise()
}
pub fn span_fatal_with_code<S: Into<MultiSpan>>(
&self,
msg: &str,
code: DiagnosticId,
) -> ! {
- panic!(self.diagnostic().span_fatal_with_code(sp, msg, code))
+ self.diagnostic().span_fatal_with_code(sp, msg, code).raise()
}
pub fn fatal(&self, msg: &str) -> ! {
- panic!(self.diagnostic().fatal(msg))
+ self.diagnostic().fatal(msg).raise()
}
pub fn span_err_or_warn<S: Into<MultiSpan>>(&self, is_warning: bool, sp: S, msg: &str) {
if is_warning {
self.use_mir()
}
- pub fn lto(&self) -> bool {
- self.opts.cg.lto || self.target.target.options.requires_lto
+ /// Calculates the flavor of LTO to use for this compilation.
+ pub fn lto(&self) -> config::Lto {
+ // If our target has codegen requirements ignore the command line
+ if self.target.target.options.requires_lto {
+ return config::Lto::Fat
+ }
+
+ // If the user specified something, return that. If they only said `-C
+ // lto` and we've for whatever reason forced off ThinLTO via the CLI,
+ // then ensure we can't use a ThinLTO.
+ match self.opts.cg.lto {
+ config::Lto::No => {}
+ config::Lto::Yes if self.opts.cli_forced_thinlto_off => {
+ return config::Lto::Fat
+ }
+ other => return other,
+ }
+
+ // Ok at this point the target doesn't require anything and the user
+ // hasn't asked for anything. Our next decision is whether or not
+ // we enable "auto" ThinLTO where we use multiple codegen units and
+ // then do ThinLTO over those codegen units. The logic below will
+ // either return `No` or `ThinLocal`.
+
+ // If processing command line options determined that we're incompatible
+ // with ThinLTO (e.g. `-C lto --emit llvm-ir`) then return that option.
+ if self.opts.cli_forced_thinlto_off {
+ return config::Lto::No
+ }
+
+ // If `-Z thinlto` specified process that, but note that this is mostly
+ // a deprecated option now that `-C lto=thin` exists.
+ if let Some(enabled) = self.opts.debugging_opts.thinlto {
+ if enabled {
+ return config::Lto::ThinLocal
+ } else {
+ return config::Lto::No
+ }
+ }
+
+ // If there's only one codegen unit and LTO isn't enabled then there's
+ // no need for ThinLTO so just return false.
+ if self.codegen_units() == 1 {
+ return config::Lto::No
+ }
+
+ // Right now ThinLTO isn't compatible with incremental compilation.
+ if self.opts.incremental.is_some() {
+ return config::Lto::No
+ }
+
+ // Now we're in "defaults" territory. By default we enable ThinLTO for
+ // optimized compiles (anything greater than O0).
+ match self.opts.optimize {
+ config::OptLevel::No => config::Lto::No,
+ _ => config::Lto::ThinLocal,
+ }
}
+
/// Returns the panic strategy for this compile session. If the user explicitly selected one
/// using '-C panic', use that, otherwise use the panic strategy defined by the target.
pub fn panic_strategy(&self) -> PanicStrategy {
16
}
- /// Returns whether ThinLTO is enabled for this compilation
- pub fn thinlto(&self) -> bool {
- // If processing command line options determined that we're incompatible
- // with ThinLTO (e.g. `-C lto --emit llvm-ir`) then return that option.
- if let Some(enabled) = self.opts.cli_forced_thinlto {
- return enabled
- }
-
- // If explicitly specified, use that with the next highest priority
- if let Some(enabled) = self.opts.debugging_opts.thinlto {
- return enabled
- }
-
- // If there's only one codegen unit and LTO isn't enabled then there's
- // no need for ThinLTO so just return false.
- if self.codegen_units() == 1 && !self.lto() {
- return false
- }
-
- // Right now ThinLTO isn't compatible with incremental compilation.
- if self.opts.incremental.is_some() {
- return false
- }
-
- // Now we're in "defaults" territory. By default we enable ThinLTO for
- // optimized compiles (anything greater than O0).
- match self.opts.optimize {
- config::OptLevel::No => false,
- _ => true,
- }
+ pub fn teach(&self, code: &DiagnosticId) -> bool {
+ self.opts.debugging_opts.teach && !self.parse_sess.span_diagnostic.code_emitted(code)
}
}
let host = match Target::search(config::host_triple()) {
Ok(t) => t,
Err(e) => {
- panic!(span_diagnostic.fatal(&format!("Error loading host specification: {}", e)));
+ span_diagnostic.fatal(&format!("Error loading host specification: {}", e)).raise();
}
};
let target_cfg = config::build_target_config(&sopts, &span_diagnostic);
let working_dir = match env::current_dir() {
Ok(dir) => dir,
Err(e) => {
- panic!(p_s.span_diagnostic.fatal(&format!("Current directory is invalid: {}", e)))
+ p_s.span_diagnostic.fatal(&format!("Current directory is invalid: {}", e)).raise()
}
};
let working_dir = file_path_mapping.map_prefix(working_dir);
};
let handler = errors::Handler::with_emitter(true, false, emitter);
handler.emit(&MultiSpan::new(), msg, errors::Level::Fatal);
- panic!(errors::FatalError);
+ errors::FatalError.raise();
}
pub fn early_warn(output: config::ErrorOutputType, msg: &str) {
true
}
- ty::TyClosure(..) | ty::TyGenerator(..) | ty::TyAnon(..) => {
+ ty::TyClosure(..) |
+ ty::TyGenerator(..) |
+ ty::TyGeneratorWitness(..) |
+ ty::TyAnon(..) => {
bug!("ty_is_local invoked on unexpected type: {:?}", ty)
}
}
},
ty::TyGenerator(..) => Some(18),
ty::TyForeign(..) => Some(19),
+ ty::TyGeneratorWitness(..) => Some(20),
ty::TyInfer(..) | ty::TyError => None
}
}
pub err: ty::error::TypeError<'tcx>
}
-#[derive(PartialEq, Eq, Debug)]
+#[derive(PartialEq, Eq, PartialOrd, Ord, Debug)]
enum ProjectionTyCandidate<'tcx> {
// from a where-clause in the env or object type
ParamEnv(ty::PolyProjectionPredicate<'tcx>),
Reveal::UserFacing => ty,
Reveal::All => {
+ let recursion_limit = self.tcx().sess.recursion_limit.get();
+ if self.depth >= recursion_limit {
+ let obligation = Obligation::with_depth(
+ self.cause.clone(),
+ recursion_limit,
+ self.param_env,
+ ty,
+ );
+ self.selcx.infcx().report_overflow_error(&obligation, true);
+ }
+
let generic_ty = self.tcx().type_of(def_id);
let concrete_ty = generic_ty.subst(self.tcx(), substs);
- self.fold_ty(concrete_ty)
+ self.depth += 1;
+ let folded_ty = self.fold_ty(concrete_ty);
+ self.depth -= 1;
+ folded_ty
}
}
}
// Drop duplicates.
//
// Note: `candidates.vec` seems to be on the critical path of the
- // compiler. Replacing it with an hash set was also tried, which would
- // render the following dedup unnecessary. It led to cleaner code but
- // prolonged compiling time of `librustc` from 5m30s to 6m in one test, or
- // ~9% performance lost.
- if candidates.vec.len() > 1 {
- let mut i = 0;
- while i < candidates.vec.len() {
- let has_dup = (0..i).any(|j| candidates.vec[i] == candidates.vec[j]);
- if has_dup {
- candidates.vec.swap_remove(i);
- } else {
- i += 1;
- }
- }
- }
+ // compiler. Replacing it with an HashSet was also tried, which would
+ // render the following dedup unnecessary. The original comment indicated
+ // that it was 9% slower, but that data is now obsolete and a new
+ // benchmark should be performed.
+ candidates.vec.sort_unstable();
+ candidates.vec.dedup();
// Prefer where-clauses. As in select, if there are multiple
// candidates, we prefer where-clause candidates over impls. This
ty::TyUint(_) | ty::TyInt(_) | ty::TyBool | ty::TyFloat(_) |
ty::TyFnDef(..) | ty::TyFnPtr(_) | ty::TyRawPtr(..) |
ty::TyChar | ty::TyRef(..) | ty::TyGenerator(..) |
- ty::TyArray(..) | ty::TyClosure(..) | ty::TyNever |
- ty::TyError => {
+ ty::TyGeneratorWitness(..) | ty::TyArray(..) | ty::TyClosure(..) |
+ ty::TyNever | ty::TyError => {
// safe for everything
Where(ty::Binder(Vec::new()))
}
}
ty::TyDynamic(..) | ty::TyStr | ty::TySlice(..) |
- ty::TyGenerator(..) | ty::TyForeign(..) |
+ ty::TyGenerator(..) | ty::TyGeneratorWitness(..) | ty::TyForeign(..) |
ty::TyRef(_, ty::TypeAndMut { ty: _, mutbl: hir::MutMutable }) => {
Never
}
}
ty::TyGenerator(def_id, ref substs, interior) => {
- let witness = iter::once(interior.witness);
- substs.upvar_tys(def_id, self.tcx()).chain(witness).collect()
+ substs.upvar_tys(def_id, self.tcx()).chain(iter::once(interior.witness)).collect()
+ }
+
+ ty::TyGeneratorWitness(types) => {
+ // This is sound because no regions in the witness can refer to
+ // the binder outside the witness. So we'll effectivly reuse
+ // the implicit binder around the witness.
+ types.skip_binder().to_vec()
}
// for `PhantomData<T>`, we pass `T`
pub fn print_debug_stats(self) {
sty_debug_print!(
self,
- TyAdt, TyArray, TySlice, TyRawPtr, TyRef, TyFnDef, TyFnPtr, TyGenerator, TyForeign,
- TyDynamic, TyClosure, TyTuple, TyParam, TyInfer, TyProjection, TyAnon);
+ TyAdt, TyArray, TySlice, TyRawPtr, TyRef, TyFnDef, TyFnPtr,
+ TyGenerator, TyGeneratorWitness, TyDynamic, TyClosure, TyTuple,
+ TyParam, TyInfer, TyProjection, TyAnon, TyForeign);
println!("Substs interner: #{}", self.interners.substs.borrow().len());
println!("Region interner: #{}", self.interners.region.borrow().len());
self.mk_ty(TyGenerator(id, closure_substs, interior))
}
+ pub fn mk_generator_witness(self, types: ty::Binder<&'tcx Slice<Ty<'tcx>>>) -> Ty<'tcx> {
+ self.mk_ty(TyGeneratorWitness(types))
+ }
+
pub fn mk_var(self, v: TyVid) -> Ty<'tcx> {
self.mk_infer(TyVar(v))
}
}
ty::TyClosure(..) => "closure".to_string(),
ty::TyGenerator(..) => "generator".to_string(),
+ ty::TyGeneratorWitness(..) => "generator witness".to_string(),
ty::TyTuple(..) => "tuple".to_string(),
ty::TyInfer(ty::TyVar(_)) => "inferred type".to_string(),
ty::TyInfer(ty::IntVar(_)) => "integral variable".to_string(),
TraitSimplifiedType(D),
ClosureSimplifiedType(D),
GeneratorSimplifiedType(D),
+ GeneratorWitnessSimplifiedType(usize),
AnonSimplifiedType(D),
FunctionSimplifiedType(usize),
ParameterSimplifiedType,
ty::TyGenerator(def_id, _, _) => {
Some(GeneratorSimplifiedType(def_id))
}
+ ty::TyGeneratorWitness(ref tys) => {
+ Some(GeneratorWitnessSimplifiedType(tys.skip_binder().len()))
+ }
ty::TyNever => Some(NeverSimplifiedType),
ty::TyTuple(ref tys, _) => {
Some(TupleSimplifiedType(tys.len()))
TraitSimplifiedType(d) => TraitSimplifiedType(map(d)),
ClosureSimplifiedType(d) => ClosureSimplifiedType(map(d)),
GeneratorSimplifiedType(d) => GeneratorSimplifiedType(map(d)),
+ GeneratorWitnessSimplifiedType(n) => GeneratorWitnessSimplifiedType(n),
AnonSimplifiedType(d) => AnonSimplifiedType(map(d)),
FunctionSimplifiedType(n) => FunctionSimplifiedType(n),
ParameterSimplifiedType => ParameterSimplifiedType,
TraitSimplifiedType(d) => d.hash_stable(hcx, hasher),
ClosureSimplifiedType(d) => d.hash_stable(hcx, hasher),
GeneratorSimplifiedType(d) => d.hash_stable(hcx, hasher),
+ GeneratorWitnessSimplifiedType(n) => n.hash_stable(hcx, hasher),
AnonSimplifiedType(d) => d.hash_stable(hcx, hasher),
FunctionSimplifiedType(n) => n.hash_stable(hcx, hasher),
ForeignSimplifiedType(d) => d.hash_stable(hcx, hasher),
self.add_ty(interior.witness);
}
+ &ty::TyGeneratorWitness(ref ts) => {
+ let mut computation = FlagComputation::new();
+ computation.add_tys(&ts.skip_binder()[..]);
+ self.add_bound_computation(&computation);
+ }
+
&ty::TyClosure(_, ref substs) => {
self.add_flags(TypeFlags::HAS_TY_CLOSURE);
self.add_flags(TypeFlags::HAS_LOCAL_NAMES);
}))
},
TyArray(ty, len) => {
- if len.val.to_const_int().and_then(|i| i.to_u64()) == Some(0) {
- DefIdForest::empty()
- } else {
- ty.uninhabited_from(visited, tcx)
+ match len.val.to_const_int().and_then(|i| i.to_u64()) {
+ // If the array is definitely non-empty, it's uninhabited if
+ // the type of its elements is uninhabited.
+ Some(n) if n != 0 => ty.uninhabited_from(visited, tcx),
+ _ => DefIdForest::empty()
}
}
TyRef(_, ref tm) => {
ty::TyAnon(..) |
ty::TyInfer(_) |
ty::TyError |
+ ty::TyGeneratorWitness(..) |
ty::TyNever |
ty::TyFloat(_) => None,
}
ty::TyParam(_) => {
return Err(LayoutError::Unknown(ty));
}
- ty::TyInfer(_) | ty::TyError => {
+ ty::TyGeneratorWitness(..) | ty::TyInfer(_) | ty::TyError => {
bug!("LayoutDetails::compute: unexpected type `{}`", ty)
}
})
ty::TyFnPtr(_) |
ty::TyNever |
ty::TyFnDef(..) |
- ty::TyDynamic(..) |
- ty::TyForeign(..) => {
+ ty::TyGeneratorWitness(..) |
+ ty::TyForeign(..) |
+ ty::TyDynamic(..) => {
bug!("TyLayout::field_type({:?}): not applicable", self)
}
}
fn try_load_from_disk<'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- id: SerializedDepNodeIndex)
- -> Option<Self::Value> {
+ id: SerializedDepNodeIndex)
+ -> Option<Self::Value> {
let mir: Option<::mir::Mir<'tcx>> = tcx.on_disk_query_result_cache
.try_load_query_result(tcx, id);
mir.map(|x| tcx.alloc_mir(x))
}
}
+impl<'tcx> QueryDescription<'tcx> for queries::instance_def_size_estimate<'tcx> {
+ fn describe(tcx: TyCtxt, def: ty::InstanceDef<'tcx>) -> String {
+ format!("estimating size for `{}`", tcx.item_path_str(def.def_id()))
+ }
+}
+
+impl<'tcx> QueryDescription<'tcx> for queries::generics_of<'tcx> {
+ #[inline]
+ fn cache_on_disk(def_id: Self::Key) -> bool {
+ def_id.is_local()
+ }
+
+ fn try_load_from_disk<'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ id: SerializedDepNodeIndex)
+ -> Option<Self::Value> {
+ let generics: Option<ty::Generics> = tcx.on_disk_query_result_cache
+ .try_load_query_result(tcx, id);
+ generics.map(|x| tcx.alloc_generics(x))
+ }
+}
+
macro_rules! impl_disk_cacheable_query(
($query_name:ident, |$key:tt| $cond:expr) => {
impl<'tcx> QueryDescription<'tcx> for queries::$query_name<'tcx> {
impl_disk_cacheable_query!(check_match, |def_id| def_id.is_local());
impl_disk_cacheable_query!(contains_extern_indicator, |_| true);
impl_disk_cacheable_query!(def_symbol_name, |_| true);
+impl_disk_cacheable_query!(type_of, |def_id| def_id.is_local());
+impl_disk_cacheable_query!(predicates_of, |def_id| def_id.is_local());
+impl_disk_cacheable_query!(used_trait_imports, |def_id| def_id.is_local());
target_features_whitelist_node(CrateNum) -> Rc<FxHashSet<String>>,
[] fn target_features_enabled: TargetFeaturesEnabled(DefId) -> Rc<Vec<String>>,
+ // Get an estimate of the size of an InstanceDef based on its MIR for CGU partitioning.
+ [] fn instance_def_size_estimate: instance_def_size_estimate_dep_node(ty::InstanceDef<'tcx>)
+ -> usize,
}
//////////////////////////////////////////////////////////////////////
fn target_features_whitelist_node<'tcx>(_: CrateNum) -> DepConstructor<'tcx> {
DepConstructor::TargetFeaturesWhitelist
}
+
+fn instance_def_size_estimate_dep_node<'tcx>(instance_def: ty::InstanceDef<'tcx>)
+ -> DepConstructor<'tcx> {
+ DepConstructor::InstanceDefSizeEstimate {
+ instance_def
+ }
+}
let enc = &mut encoder;
let qri = &mut query_result_index;
- // Encode TypeckTables
+ encode_query_results::<type_of, _>(tcx, enc, qri)?;
+ encode_query_results::<generics_of, _>(tcx, enc, qri)?;
+ encode_query_results::<predicates_of, _>(tcx, enc, qri)?;
+ encode_query_results::<used_trait_imports, _>(tcx, enc, qri)?;
encode_query_results::<typeck_tables_of, _>(tcx, enc, qri)?;
+ encode_query_results::<trans_fulfill_obligation, _>(tcx, enc, qri)?;
encode_query_results::<optimized_mir, _>(tcx, enc, qri)?;
encode_query_results::<unsafety_check_result, _>(tcx, enc, qri)?;
encode_query_results::<borrowck, _>(tcx, enc, qri)?;
encode_query_results::<const_is_rvalue_promotable_to_static, _>(tcx, enc, qri)?;
encode_query_results::<contains_extern_indicator, _>(tcx, enc, qri)?;
encode_query_results::<symbol_name, _>(tcx, enc, qri)?;
- encode_query_results::<trans_fulfill_obligation, _>(tcx, enc, qri)?;
encode_query_results::<check_match, _>(tcx, enc, qri)?;
}
DepKind::EraseRegionsTy |
DepKind::NormalizeTy |
DepKind::SubstituteNormalizeAndTestPredicates |
+ DepKind::InstanceDefSizeEstimate |
// This one should never occur in this context
DepKind::Null => {
ConstIsRvaluePromotableToStatic => const_is_rvalue_promotable_to_static,
ContainsExternIndicator => contains_extern_indicator,
CheckMatch => check_match,
+ TypeOfItem => type_of,
+ GenericsOfItem => generics_of,
+ PredicatesOfItem => predicates_of,
+ UsedTraitImports => used_trait_imports,
);
use hir::{map as hir_map, FreevarMap, TraitMap};
use hir::def::{Def, CtorKind, ExportMap};
-use hir::def_id::{CrateNum, DefId, DefIndex, LocalDefId, CRATE_DEF_INDEX, LOCAL_CRATE};
+use hir::def_id::{CrateNum, DefId, LocalDefId, CRATE_DEF_INDEX, LOCAL_CRATE};
use hir::map::DefPathData;
use hir::svh::Svh;
use ich::Fingerprint;
use serialize::{self, Encodable, Encoder};
use std::cell::RefCell;
-use std::collections::BTreeMap;
use std::cmp;
+use std::cmp::Ordering;
use std::fmt;
use std::hash::{Hash, Hasher};
use std::iter::FromIterator;
}
}
+impl<'tcx> Ord for TyS<'tcx> {
+ #[inline]
+ fn cmp(&self, other: &TyS<'tcx>) -> Ordering {
+ // (self as *const _).cmp(other as *const _)
+ (self as *const TyS<'tcx>).cmp(&(other as *const TyS<'tcx>))
+ }
+}
+impl<'tcx> PartialOrd for TyS<'tcx> {
+ #[inline]
+ fn partial_cmp(&self, other: &TyS<'tcx>) -> Option<Ordering> {
+ Some(self.cmp(other))
+ }
+}
+
impl<'tcx> TyS<'tcx> {
pub fn is_primitive_ty(&self) -> bool {
match self.sty {
}
impl<T> Eq for Slice<T> {}
+impl<T> Ord for Slice<T> {
+ #[inline]
+ fn cmp(&self, other: &Slice<T>) -> Ordering {
+ (&self.0 as *const [T]).cmp(&(&other.0 as *const [T]))
+ }
+}
+impl<T> PartialOrd for Slice<T> {
+ #[inline]
+ fn partial_cmp(&self, other: &Slice<T>) -> Option<Ordering> {
+ Some(self.cmp(other))
+ }
+}
+
impl<T> Hash for Slice<T> {
fn hash<H: Hasher>(&self, s: &mut H) {
(self.as_ptr(), self.len()).hash(s)
pub regions: Vec<RegionParameterDef>,
pub types: Vec<TypeParameterDef>,
- /// Reverse map to each `TypeParameterDef`'s `index` field, from
- /// `def_id.index` (`def_id.krate` is the same as the item's).
- pub type_param_to_index: BTreeMap<DefIndex, u32>,
+ /// Reverse map to each `TypeParameterDef`'s `index` field
+ pub type_param_to_index: FxHashMap<DefId, u32>,
pub has_self: bool,
pub has_late_bound_regions: Option<Span>,
/// equality between arbitrary types. Processing an instance of
/// Form #2 eventually yields one of these `ProjectionPredicate`
/// instances to normalize the LHS.
-#[derive(Copy, Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
+#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable)]
pub struct ProjectionPredicate<'tcx> {
pub projection_ty: ProjectionTy<'tcx>,
pub ty: Ty<'tcx>,
vec![]
}
- TyStr | TyDynamic(..) | TySlice(_) | TyForeign(..) | TyError => {
+ TyStr |
+ TyDynamic(..) |
+ TySlice(_) |
+ TyForeign(..) |
+ TyError |
+ TyGeneratorWitness(..) => {
// these are never sized - return the target type
vec![ty]
}
tcx.hir.crate_hash
}
+fn instance_def_size_estimate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ instance_def: InstanceDef<'tcx>)
+ -> usize {
+ match instance_def {
+ InstanceDef::Item(..) |
+ InstanceDef::DropGlue(..) => {
+ let mir = tcx.instance_mir(instance_def);
+ mir.basic_blocks().iter().map(|bb| bb.statements.len()).sum()
+ },
+ // Estimate the size of other compiler-generated shims to be 1.
+ _ => 1
+ }
+}
+
pub fn provide(providers: &mut ty::maps::Providers) {
context::provide(providers);
erase_regions::provide(providers);
original_crate_name,
crate_hash,
trait_impls_of: trait_def::trait_impls_of_provider,
+ instance_def_size_estimate,
..*providers
};
}
}
}
- ty::TyGenerator(def_id, ref substs, ref interior) => {
+ ty::TyGenerator(def_id, ref substs, _) => {
// Same as the closure case
for upvar_ty in substs.upvar_tys(def_id, *self) {
self.compute_components(upvar_ty, out);
}
- // But generators can have additional interior types
- self.compute_components(interior.witness, out);
+ // We ignore regions in the generator interior as we don't
+ // want these to affect region inference
}
+ // All regions are bound inside a witness
+ ty::TyGeneratorWitness(..) => (),
+
// OutlivesTypeParameterEnv -- the actual checking that `X:'a`
// is implied by the environment is done in regionck.
ty::TyParam(p) => {
use traits::Reveal;
use ty::subst::{Kind, Substs};
use ty::{self, Ty, TyCtxt, TypeFoldable};
+use ty::fold::{TypeVisitor, TypeFolder};
use ty::error::{ExpectedFound, TypeError};
use util::common::ErrorReported;
use std::rc::Rc;
}
}
+#[derive(Debug, Clone)]
+struct GeneratorWitness<'tcx>(&'tcx ty::Slice<Ty<'tcx>>);
+
+impl<'tcx> TypeFoldable<'tcx> for GeneratorWitness<'tcx> {
+ fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ GeneratorWitness(self.0.fold_with(folder))
+ }
+
+ fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
+ self.0.visit_with(visitor)
+ }
+}
+
+impl<'tcx> Relate<'tcx> for GeneratorWitness<'tcx> {
+ fn relate<'a, 'gcx, R>(relation: &mut R,
+ a: &GeneratorWitness<'tcx>,
+ b: &GeneratorWitness<'tcx>)
+ -> RelateResult<'tcx, GeneratorWitness<'tcx>>
+ where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a
+ {
+ assert!(a.0.len() == b.0.len());
+ let tcx = relation.tcx();
+ let types = tcx.mk_type_list(a.0.iter().zip(b.0).map(|(a, b)| relation.relate(a, b)))?;
+ Ok(GeneratorWitness(types))
+ }
+}
+
impl<'tcx> Relate<'tcx> for Ty<'tcx> {
fn relate<'a, 'gcx, R>(relation: &mut R,
a: &Ty<'tcx>,
Ok(tcx.mk_generator(a_id, substs, interior))
}
+ (&ty::TyGeneratorWitness(a_types), &ty::TyGeneratorWitness(b_types)) =>
+ {
+ // Wrap our types with a temporary GeneratorWitness struct
+ // inside the binder so we can related them
+ let a_types = ty::Binder(GeneratorWitness(*a_types.skip_binder()));
+ let b_types = ty::Binder(GeneratorWitness(*b_types.skip_binder()));
+ // Then remove the GeneratorWitness for the result
+ let types = ty::Binder(relation.relate(&a_types, &b_types)?.skip_binder().0);
+ Ok(tcx.mk_generator_witness(types))
+ }
+
(&ty::TyClosure(a_id, a_substs),
&ty::TyClosure(b_id, b_substs))
if a_id == b_id =>
-> RelateResult<'tcx, ty::GeneratorInterior<'tcx>>
where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a
{
- let interior = relation.relate(&a.witness, &b.witness)?;
- Ok(ty::GeneratorInterior::new(interior))
+ assert_eq!(a.movable, b.movable);
+ let witness = relation.relate(&a.witness, &b.witness)?;
+ Ok(ty::GeneratorInterior { witness, movable: a.movable })
}
}
type Lifted = ty::GeneratorInterior<'tcx>;
fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
tcx.lift(&self.witness).map(|witness| {
- ty::GeneratorInterior { witness }
+ ty::GeneratorInterior { witness, movable: self.movable }
})
}
}
ty::TyGenerator(did, substs, interior) => {
ty::TyGenerator(did, substs.fold_with(folder), interior.fold_with(folder))
}
+ ty::TyGeneratorWitness(types) => ty::TyGeneratorWitness(types.fold_with(folder)),
ty::TyClosure(did, substs) => ty::TyClosure(did, substs.fold_with(folder)),
ty::TyProjection(ref data) => ty::TyProjection(data.fold_with(folder)),
ty::TyAnon(did, substs) => ty::TyAnon(did, substs.fold_with(folder)),
ty::TyGenerator(_did, ref substs, ref interior) => {
substs.visit_with(visitor) || interior.visit_with(visitor)
}
+ ty::TyGeneratorWitness(ref types) => types.visit_with(visitor),
ty::TyClosure(_did, ref substs) => substs.visit_with(visitor),
ty::TyProjection(ref data) => data.visit_with(visitor),
ty::TyAnon(_, ref substs) => substs.visit_with(visitor),
impl<'tcx> TypeFoldable<'tcx> for ty::GeneratorInterior<'tcx> {
fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
- ty::GeneratorInterior::new(self.witness.fold_with(folder))
+ ty::GeneratorInterior {
+ witness: self.witness.fold_with(folder),
+ movable: self.movable,
+ }
}
fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
/// `|a| yield a`.
TyGenerator(DefId, ClosureSubsts<'tcx>, GeneratorInterior<'tcx>),
+ /// A type representin the types stored inside a generator.
+ /// This should only appear in GeneratorInteriors.
+ TyGeneratorWitness(Binder<&'tcx Slice<Ty<'tcx>>>),
+
/// The never type `!`
TyNever,
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)]
pub struct GeneratorInterior<'tcx> {
pub witness: Ty<'tcx>,
-}
-
-impl<'tcx> GeneratorInterior<'tcx> {
- pub fn new(witness: Ty<'tcx>) -> GeneratorInterior<'tcx> {
- GeneratorInterior { witness }
- }
-
- pub fn as_slice(&self) -> &'tcx Slice<Ty<'tcx>> {
- match self.witness.sty {
- ty::TyTuple(s, _) => s,
- _ => bug!(),
- }
- }
+ pub movable: bool,
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
/// erase, or otherwise "discharge" these bound regions, we change the
/// type from `Binder<T>` to just `T` (see
/// e.g. `liberate_late_bound_regions`).
-#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)]
+#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, RustcEncodable, RustcDecodable)]
pub struct Binder<T>(pub T);
impl<T> Binder<T> {
/// Represents the projection of an associated type. In explicit UFCS
/// form this would be written `<T as Trait<..>>::N`.
-#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)]
+#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, RustcEncodable, RustcDecodable)]
pub struct ProjectionTy<'tcx> {
/// The parameters of the associated item.
pub substs: &'tcx Substs<'tcx>,
}
TyFnDef(..) |
TyFnPtr(_) |
+ TyGeneratorWitness(..) |
TyBool |
TyChar |
TyInt(_) |
/// To reduce memory usage, a `Kind` is a interned pointer,
/// with the lowest 2 bits being reserved for a tag to
/// indicate the type (`Ty` or `Region`) it points to.
-#[derive(Copy, Clone, PartialEq, Eq, Hash)]
+#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Kind<'tcx> {
ptr: NonZero<usize>,
marker: PhantomData<(Ty<'tcx>, ty::Region<'tcx>)>
HashStable};
use rustc_data_structures::fx::FxHashMap;
use std::cmp;
-use std::iter;
use std::hash::Hash;
use std::intrinsics;
use syntax::ast::{self, Name};
let result = match ty.sty {
ty::TyBool | ty::TyChar | ty::TyInt(_) | ty::TyUint(_) |
ty::TyFloat(_) | ty::TyStr | ty::TyNever | ty::TyForeign(..) |
- ty::TyRawPtr(..) | ty::TyRef(..) | ty::TyFnDef(..) | ty::TyFnPtr(_) => {
+ ty::TyRawPtr(..) | ty::TyRef(..) | ty::TyFnDef(..) | ty::TyFnPtr(_) |
+ ty::TyGeneratorWitness(..) => {
// these types never have a destructor
Ok(ty::DtorckConstraint::empty())
}
}).collect()
}
- ty::TyGenerator(def_id, substs, interior) => {
- substs.upvar_tys(def_id, self).chain(iter::once(interior.witness)).map(|ty| {
+ ty::TyGenerator(def_id, substs, _) => {
+ // Note that the interior types are ignored here.
+ // Any type reachable inside the interior must also be reachable
+ // through the upvars.
+ substs.upvar_tys(def_id, self).map(|ty| {
self.dtorck_constraint_for_ty(span, for_ty, depth+1, ty)
}).collect()
}
self.def_id(d);
}
}
+ TyGeneratorWitness(tys) => {
+ self.hash(tys.skip_binder().len());
+ }
TyTuple(tys, defaulted) => {
self.hash(tys.len());
self.hash(defaulted);
// Fast-path for primitive types
ty::TyInfer(ty::FreshIntTy(_)) | ty::TyInfer(ty::FreshFloatTy(_)) |
ty::TyBool | ty::TyInt(_) | ty::TyUint(_) | ty::TyFloat(_) | ty::TyNever |
- ty::TyFnDef(..) | ty::TyFnPtr(_) | ty::TyChar |
+ ty::TyFnDef(..) | ty::TyFnPtr(_) | ty::TyChar | ty::TyGeneratorWitness(..) |
ty::TyRawPtr(_) | ty::TyRef(..) | ty::TyStr => false,
// Foreign types can never have destructors
stack.extend(substs.substs.types().rev());
}
ty::TyGenerator(_, ref substs, ref interior) => {
- stack.extend(substs.substs.types().rev());
stack.push(interior.witness);
+ stack.extend(substs.substs.types().rev());
+ }
+ ty::TyGeneratorWitness(ts) => {
+ stack.extend(ts.skip_binder().iter().cloned().rev());
}
ty::TyTuple(ts, _) => {
stack.extend(ts.iter().cloned().rev());
ty::TyFloat(..) |
ty::TyError |
ty::TyStr |
+ ty::TyGeneratorWitness(..) |
ty::TyNever |
ty::TyParam(_) |
ty::TyForeign(..) => {
use ty::{TyBool, TyChar, TyAdt};
use ty::{TyError, TyStr, TyArray, TySlice, TyFloat, TyFnDef, TyFnPtr};
use ty::{TyParam, TyRawPtr, TyRef, TyNever, TyTuple};
-use ty::{TyClosure, TyGenerator, TyForeign, TyProjection, TyAnon};
+use ty::{TyClosure, TyGenerator, TyGeneratorWitness, TyForeign, TyProjection, TyAnon};
use ty::{TyDynamic, TyInt, TyUint, TyInfer};
use ty::{self, Ty, TyCtxt, TypeFoldable};
use util::nodemap::FxHashSet;
}
}
+define_print! {
+ ('tcx) &'tcx ty::Slice<Ty<'tcx>>, (self, f, cx) {
+ display {
+ write!(f, "{{")?;
+ let mut tys = self.iter();
+ if let Some(&ty) = tys.next() {
+ print!(f, cx, print(ty))?;
+ for &ty in tys {
+ print!(f, cx, write(", "), print(ty))?;
+ }
+ }
+ write!(f, "}}")
+ }
+ }
+}
+
define_print! {
('tcx) ty::TypeAndMut<'tcx>, (self, f, cx) {
display {
TyStr => write!(f, "str"),
TyGenerator(did, substs, interior) => ty::tls::with(|tcx| {
let upvar_tys = substs.upvar_tys(did, tcx);
- write!(f, "[generator")?;
+ if interior.movable {
+ write!(f, "[generator")?;
+ } else {
+ write!(f, "[static generator")?;
+ }
if let Some(node_id) = tcx.hir.as_local_node_id(did) {
write!(f, "@{:?}", tcx.hir.span(node_id))?;
print!(f, cx, write(" "), print(interior), write("]"))
}),
+ TyGeneratorWitness(types) => {
+ ty::tls::with(|tcx| cx.in_binder(f, tcx, &types, tcx.lift(&types)))
+ }
TyClosure(did, substs) => ty::tls::with(|tcx| {
let upvar_tys = substs.upvar_tys(did, tcx);
write!(f, "[closure")?;
/// Relocation model to use in object file. Corresponds to `llc
/// -relocation-model=$relocation_model`. Defaults to "pic".
pub relocation_model: String,
- /// Code model to use. Corresponds to `llc -code-model=$code_model`. Defaults to "default".
- pub code_model: String,
+ /// Code model to use. Corresponds to `llc -code-model=$code_model`.
+ pub code_model: Option<String>,
/// TLS model to use. Options are "global-dynamic" (default), "local-dynamic", "initial-exec"
/// and "local-exec". This is similar to the -ftls-model option in GCC/Clang.
pub tls_model: String,
only_cdylib: false,
executables: false,
relocation_model: "pic".to_string(),
- code_model: "default".to_string(),
+ code_model: None,
tls_model: "global-dynamic".to_string(),
disable_redzone: false,
eliminate_frame_pointer: true,
key!(only_cdylib, bool);
key!(executables, bool);
key!(relocation_model);
- key!(code_model);
+ key!(code_model, optional);
key!(tls_model);
key!(disable_redzone, bool);
key!(eliminate_frame_pointer, bool);
use rustc::middle::mem_categorization as mc;
use rustc::middle::mem_categorization::Categorization;
use rustc::middle::region;
-use rustc::ty::{self, TyCtxt};
+use rustc::ty::{self, TyCtxt, RegionKind};
use syntax::ast;
use syntax_pos::Span;
use rustc::hir;
move_data: &'a move_data::FlowedMoveData<'a, 'tcx>,
all_loans: &'a [Loan<'tcx>],
param_env: ty::ParamEnv<'tcx>,
+ movable_generator: bool,
}
impl<'a, 'tcx> euv::Delegate<'tcx> for CheckLoanCtxt<'a, 'tcx> {
}
self.check_for_conflicting_loans(hir_id.local_id);
+
+ self.check_for_loans_across_yields(cmt, loan_region, borrow_span);
}
fn mutate(&mut self,
debug!("check_loans(body id={})", body.value.id);
let def_id = bccx.tcx.hir.body_owner_def_id(body.id());
+
+ let node_id = bccx.tcx.hir.as_local_node_id(def_id).unwrap();
+ let movable_generator = !match bccx.tcx.hir.get(node_id) {
+ hir::map::Node::NodeExpr(&hir::Expr {
+ node: hir::ExprClosure(.., Some(hir::GeneratorMovability::Static)),
+ ..
+ }) => true,
+ _ => false,
+ };
+
let param_env = bccx.tcx.param_env(def_id);
let mut clcx = CheckLoanCtxt {
bccx,
move_data,
all_loans,
param_env,
+ movable_generator,
};
let rvalue_promotable_map = bccx.tcx.rvalue_promotable_map(def_id);
euv::ExprUseVisitor::new(&mut clcx,
return result;
}
+ pub fn check_for_loans_across_yields(&self,
+ cmt: mc::cmt<'tcx>,
+ loan_region: ty::Region<'tcx>,
+ borrow_span: Span) {
+ pub fn borrow_of_local_data<'tcx>(cmt: &mc::cmt<'tcx>) -> bool {
+ match cmt.cat {
+ // Borrows of static items is allowed
+ Categorization::StaticItem => false,
+ // Reborrow of already borrowed data is ignored
+ // Any errors will be caught on the initial borrow
+ Categorization::Deref(..) => false,
+
+ // By-ref upvars has Derefs so they will get ignored.
+ // Generators counts as FnOnce so this leaves only
+ // by-move upvars, which is local data for generators
+ Categorization::Upvar(..) => true,
+
+ Categorization::Rvalue(region) => {
+ // Rvalues promoted to 'static are no longer local
+ if let RegionKind::ReStatic = *region {
+ false
+ } else {
+ true
+ }
+ }
+
+ // Borrow of local data must be checked
+ Categorization::Local(..) => true,
+
+ // For interior references and downcasts, find out if the base is local
+ Categorization::Downcast(ref cmt_base, _) |
+ Categorization::Interior(ref cmt_base, _) => borrow_of_local_data(&cmt_base),
+ }
+ }
+
+ if !self.movable_generator {
+ return;
+ }
+
+ if !borrow_of_local_data(&cmt) {
+ return;
+ }
+
+ let scope = match *loan_region {
+ // A concrete region in which we will look for a yield expression
+ RegionKind::ReScope(scope) => scope,
+
+ // There cannot be yields inside an empty region
+ RegionKind::ReEmpty => return,
+
+ // Local data cannot have these lifetimes
+ RegionKind::ReEarlyBound(..) |
+ RegionKind::ReLateBound(..) |
+ RegionKind::ReFree(..) |
+ RegionKind::ReStatic => {
+ self.bccx
+ .tcx
+ .sess.delay_span_bug(borrow_span,
+ &format!("unexpected region for local data {:?}",
+ loan_region));
+ return
+ }
+
+ // These cannot exist in borrowck
+ RegionKind::ReVar(..) |
+ RegionKind::ReSkolemized(..) |
+ RegionKind::ReClosureBound(..) |
+ RegionKind::ReErased => span_bug!(borrow_span,
+ "unexpected region in borrowck {:?}",
+ loan_region),
+ };
+
+ let body_id = self.bccx.body.value.hir_id.local_id;
+
+ if self.bccx.region_scope_tree.containing_body(scope) != Some(body_id) {
+ // We are borrowing local data longer than its storage.
+ // This should result in other borrowck errors.
+ self.bccx.tcx.sess.delay_span_bug(borrow_span,
+ "borrowing local data longer than its storage");
+ return;
+ }
+
+ if let Some(yield_span) = self.bccx
+ .region_scope_tree
+ .yield_in_scope_for_expr(scope,
+ cmt.id,
+ self.bccx.body) {
+ self.bccx.cannot_borrow_across_generator_yield(borrow_span,
+ yield_span,
+ Origin::Ast).emit();
+ }
+ }
+
pub fn check_for_conflicting_loans(&self, node: hir::ItemLocalId) {
//! Checks to see whether any of the loans that are issued
//! on entrance to `node` conflict with loans that have already been
}
};
- // When you have a borrow that lives across a yield,
- // that reference winds up captured in the generator
- // type. Regionck then constraints it to live as long
- // as the generator itself. If that borrow is borrowing
- // data owned by the generator, this winds up resulting in
- // an `err_out_of_scope` error:
- //
- // ```
- // {
- // let g = || {
- // let a = &3; // this borrow is forced to ... -+
- // yield (); // |
- // println!("{}", a); // |
- // }; // |
- // } <----------------------... live until here --------+
- // ```
- //
- // To detect this case, we look for cases where the
- // `super_scope` (lifetime of the value) is within the
- // body, but the `sub_scope` is not.
- debug!("err_out_of_scope: self.body.is_generator = {:?}",
- self.body.is_generator);
- let maybe_borrow_across_yield = if self.body.is_generator {
- let body_scope = region::Scope::Node(self.body.value.hir_id.local_id);
- debug!("err_out_of_scope: body_scope = {:?}", body_scope);
- debug!("err_out_of_scope: super_scope = {:?}", super_scope);
- debug!("err_out_of_scope: sub_scope = {:?}", sub_scope);
- match (super_scope, sub_scope) {
- (&ty::RegionKind::ReScope(value_scope),
- &ty::RegionKind::ReScope(loan_scope)) => {
- if {
- // value_scope <= body_scope &&
- self.region_scope_tree.is_subscope_of(value_scope, body_scope) &&
- // body_scope <= loan_scope
- self.region_scope_tree.is_subscope_of(body_scope, loan_scope)
- } {
- // We now know that this is a case
- // that fits the bill described above:
- // a borrow of something whose scope
- // is within the generator, but the
- // borrow is for a scope outside the
- // generator.
- //
- // Now look within the scope of the of
- // the value being borrowed (in the
- // example above, that would be the
- // block remainder that starts with
- // `let a`) for a yield. We can cite
- // that for the user.
- self.region_scope_tree.yield_in_scope(value_scope)
- } else {
- None
- }
- }
- _ => None,
- }
- } else {
- None
- };
-
- if let Some((yield_span, _)) = maybe_borrow_across_yield {
- debug!("err_out_of_scope: opt_yield_span = {:?}", yield_span);
- self.cannot_borrow_across_generator_yield(error_span, yield_span, Origin::Ast)
- .emit();
- return;
- }
-
let mut db = self.path_does_not_live_long_enough(error_span, &msg, Origin::Ast);
let value_kind = match err.cmt.cat {
mc::Categorization::Rvalue(..) => "temporary value",
end: RangeEnd,
},
- /// matches against a slice, checking the length and extracting elements
+ /// matches against a slice, checking the length and extracting elements.
+ /// irrefutable when there is a slice pattern and both `prefix` and `suffix` are empty.
+ /// e.g. `&[ref xs..]`.
Slice {
prefix: Vec<Pattern<'tcx>>,
slice: Option<Pattern<'tcx>>,
use std::ffi::OsString;
use std::io::{self, Read, Write};
use std::iter::repeat;
+use std::panic;
use std::path::PathBuf;
use std::process::{self, Command, Stdio};
use std::rc::Rc;
use std::str;
-use std::sync::{Arc, Mutex};
use std::thread;
use syntax::ast;
handler.emit(&MultiSpan::new(),
"aborting due to previous error(s)",
errors::Level::Fatal);
- exit_on_err();
+ panic::resume_unwind(Box::new(errors::FatalErrorMarker));
}
}
}
/// The diagnostic emitter yielded to the procedure should be used for reporting
/// errors of the compiler.
pub fn monitor<F: FnOnce() + Send + 'static>(f: F) {
- struct Sink(Arc<Mutex<Vec<u8>>>);
- impl Write for Sink {
- fn write(&mut self, data: &[u8]) -> io::Result<usize> {
- Write::write(&mut *self.0.lock().unwrap(), data)
- }
- fn flush(&mut self) -> io::Result<()> {
- Ok(())
- }
- }
-
- let data = Arc::new(Mutex::new(Vec::new()));
- let err = Sink(data.clone());
-
let result = in_rustc_thread(move || {
- io::set_panic(Some(box err));
f()
});
if let Err(value) = result {
// Thread panicked without emitting a fatal diagnostic
- if !value.is::<errors::FatalError>() {
+ if !value.is::<errors::FatalErrorMarker>() {
+ // Emit a newline
+ eprintln!("");
+
let emitter =
Box::new(errors::emitter::EmitterWriter::stderr(errors::ColorConfig::Auto,
None,
¬e,
errors::Level::Note);
}
- if match env::var_os("RUST_BACKTRACE") {
- Some(val) => &val != "0",
- None => false,
- } {
- handler.emit(&MultiSpan::new(),
- "run with `RUST_BACKTRACE=1` for a backtrace",
- errors::Level::Note);
- }
-
- eprintln!("{}", str::from_utf8(&data.lock().unwrap()).unwrap());
}
- exit_on_err();
+ panic::resume_unwind(Box::new(errors::FatalErrorMarker));
}
}
-fn exit_on_err() -> ! {
- // Panic so the process returns a failure code, but don't pollute the
- // output with some unnecessary panic messages, we've already
- // printed everything that we needed to.
- io::set_panic(Some(box io::sink()));
- panic!();
-}
-
#[cfg(stage0)]
pub fn diagnostics_registry() -> errors::registry::Registry {
use errors::registry::Registry;
pub suggestions: Vec<CodeSuggestion>,
}
-#[derive(Clone, Debug, PartialEq, Hash, RustcEncodable, RustcDecodable)]
+#[derive(Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
pub enum DiagnosticId {
Error(String),
Lint(String),
self
}
+ pub fn get_code(&self) -> Option<DiagnosticId> {
+ self.code.clone()
+ }
+
pub fn message(&self) -> String {
self.message.iter().map(|i| i.0.to_owned()).collect::<String>()
}
#![cfg_attr(unix, feature(libc))]
#![feature(conservative_impl_trait)]
#![feature(i128_type)]
+#![feature(optin_builtin_traits)]
extern crate term;
#[cfg(unix)]
use std::{error, fmt};
use std::sync::atomic::AtomicUsize;
use std::sync::atomic::Ordering::SeqCst;
+use std::panic;
mod diagnostic;
mod diagnostic_builder;
#[must_use]
pub struct FatalError;
+pub struct FatalErrorMarker;
+
+// Don't implement Send on FatalError. This makes it impossible to panic!(FatalError).
+// We don't want to invoke the panic handler and print a backtrace for fatal errors.
+impl !Send for FatalError {}
+
+impl FatalError {
+ pub fn raise(self) -> ! {
+ panic::resume_unwind(Box::new(FatalErrorMarker))
+ }
+}
+
impl fmt::Display for FatalError {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
write!(f, "parser fatal error")
delayed_span_bug: RefCell<Option<Diagnostic>>,
tracked_diagnostics: RefCell<Option<Vec<Diagnostic>>>,
+ // This set contains the `DiagnosticId` of all emitted diagnostics to avoid
+ // emitting the same diagnostic with extended help (`--teach`) twice, which
+ // would be uneccessary repetition.
+ tracked_diagnostic_codes: RefCell<FxHashSet<DiagnosticId>>,
+
// This set contains a hash of every diagnostic that has been emitted by
// this handler. These hashes is used to avoid emitting the same error
// twice.
continue_after_error: Cell::new(true),
delayed_span_bug: RefCell::new(None),
tracked_diagnostics: RefCell::new(None),
+ tracked_diagnostic_codes: RefCell::new(FxHashSet()),
emitted_diagnostics: RefCell::new(FxHashSet()),
}
}
}
}
- panic!(self.fatal(&s));
+ self.fatal(&s).raise();
}
pub fn emit(&self, msp: &MultiSpan, msg: &str, lvl: Level) {
if lvl == Warning && !self.flags.can_emit_warnings {
(ret, diagnostics)
}
+ /// `true` if a diagnostic with this code has already been emitted in this handler.
+ ///
+ /// Used to suppress emitting the same error multiple times with extended explanation when
+ /// calling `-Zteach`.
+ pub fn code_emitted(&self, code: &DiagnosticId) -> bool {
+ self.tracked_diagnostic_codes.borrow().contains(code)
+ }
+
fn emit_db(&self, db: &DiagnosticBuilder) {
let diagnostic = &**db;
list.push(diagnostic.clone());
}
+ if let Some(ref code) = diagnostic.code {
+ self.tracked_diagnostic_codes.borrow_mut().insert(code.clone());
+ }
+
let diagnostic_hash = {
use std::hash::Hash;
let mut hasher = StableHasher::new();
ty::TyError |
ty::TyClosure(..) |
ty::TyGenerator(..) |
+ ty::TyGeneratorWitness(..) |
ty::TyProjection(..) |
ty::TyAnon(..) |
ty::TyFnDef(..) => bug!("Unexpected type in foreign function"),
#[repr(C)]
pub enum CodeModel {
Other,
- Default,
- JITDefault,
Small,
Kernel,
Medium,
Large,
+ None,
}
/// LLVMRustDiagnosticKind
pub enum ArchiveKind {
Other,
K_GNU,
- K_MIPS64,
K_BSD,
K_COFF,
}
const FlagStaticMember = (1 << 12);
const FlagLValueReference = (1 << 13);
const FlagRValueReference = (1 << 14);
+ const FlagExternalTypeRef = (1 << 15);
+ const FlagIntroducedVirtual = (1 << 18);
+ const FlagBitField = (1 << 19);
+ const FlagNoReturn = (1 << 20);
const FlagMainSubprogram = (1 << 21);
}
}
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"gnu" => Ok(ArchiveKind::K_GNU),
- "mips64" => Ok(ArchiveKind::K_MIPS64),
"bsd" => Ok(ArchiveKind::K_BSD),
"coff" => Ok(ArchiveKind::K_COFF),
_ => Err(()),
}
// Retrieve span of given borrow from the current MIR representation
- fn retrieve_borrow_span(&self, borrow: &BorrowData) -> Span {
+ pub fn retrieve_borrow_span(&self, borrow: &BorrowData) -> Span {
self.mir.source_info(borrow.location).span
}
};
let flow_inits = flow_inits; // remove mut
+ let movable_generator = !match tcx.hir.get(id) {
+ hir::map::Node::NodeExpr(&hir::Expr {
+ node: hir::ExprClosure(.., Some(hir::GeneratorMovability::Static)),
+ ..
+ }) => true,
+ _ => false,
+ };
+
let mut mbcx = MirBorrowckCtxt {
tcx: tcx,
mir: mir,
node_id: id,
move_data: &mdpe.move_data,
param_env: param_env,
+ movable_generator,
locals_are_invalidated_at_exit: match tcx.hir.body_owner_kind(id) {
hir::BodyOwnerKind::Const | hir::BodyOwnerKind::Static(_) => false,
hir::BodyOwnerKind::Fn => true,
node_id: ast::NodeId,
move_data: &'cx MoveData<'tcx>,
param_env: ParamEnv<'gcx>,
+ movable_generator: bool,
/// This keeps track of whether local variables are free-ed when the function
/// exits even without a `StorageDead`, which appears to be the case for
/// constants.
drop: _,
} => {
self.consume_operand(ContextKind::Yield.new(loc), (value, span), flow_state);
+
+ if self.movable_generator {
+ // Look for any active borrows to locals
+ let domain = flow_state.borrows.operator();
+ let data = domain.borrows();
+ flow_state.borrows.with_elems_outgoing(|borrows| {
+ for i in borrows {
+ let borrow = &data[i.borrow_index()];
+ self.check_for_local_borrow(borrow, span);
+ }
+ });
+ }
}
TerminatorKind::Resume | TerminatorKind::Return | TerminatorKind::GeneratorDrop => {
}
}
+ /// Reports an error if this is a borrow of local data.
+ /// This is called for all Yield statements on movable generators
+ fn check_for_local_borrow(
+ &mut self,
+ borrow: &BorrowData<'tcx>,
+ yield_span: Span)
+ {
+ fn borrow_of_local_data<'tcx>(place: &Place<'tcx>) -> bool {
+ match place {
+ Place::Static(..) => false,
+ Place::Local(..) => true,
+ Place::Projection(box proj) => {
+ match proj.elem {
+ // Reborrow of already borrowed data is ignored
+ // Any errors will be caught on the initial borrow
+ ProjectionElem::Deref => false,
+
+ // For interior references and downcasts, find out if the base is local
+ ProjectionElem::Field(..) |
+ ProjectionElem::Index(..) |
+ ProjectionElem::ConstantIndex { .. } |
+ ProjectionElem::Subslice { .. } |
+ ProjectionElem::Downcast(..) => {
+ borrow_of_local_data(&proj.base)
+ }
+ }
+ }
+ }
+ }
+
+ debug!("check_for_local_borrow({:?})", borrow);
+
+ if borrow_of_local_data(&borrow.borrowed_place) {
+ self.tcx.cannot_borrow_across_generator_yield(self.retrieve_borrow_span(borrow),
+ yield_span,
+ Origin::Mir).emit();
+ }
+ }
+
fn check_activations(
&mut self,
location: Location,
let data = self.infcx.take_and_reset_region_constraints();
if !data.is_empty() {
+ debug!("fully_perform_op: constraints generated at {:?} are {:#?}",
+ locations, data);
self.constraints
.outlives_sets
.push(OutlivesSet { locations, data });
where
T: fmt::Debug + TypeFoldable<'tcx>,
{
+ debug!("normalize(value={:?}, location={:?})", value, location);
self.fully_perform_op(location.at_self(), |this| {
let mut selcx = traits::SelectionContext::new(this.infcx);
let cause = this.misc(this.last_span);
Err(match_pair)
}
- PatternKind::Range { .. } |
- PatternKind::Slice { .. } => {
+ PatternKind::Range { .. } => {
Err(match_pair)
}
+ PatternKind::Slice { ref prefix, ref slice, ref suffix } => {
+ if prefix.is_empty() && slice.is_some() && suffix.is_empty() {
+ // irrefutable
+ self.prefix_slice_suffix(&mut candidate.match_pairs,
+ &match_pair.place,
+ prefix,
+ slice.as_ref(),
+ suffix);
+ Ok(())
+ } else {
+ Err(match_pair)
+ }
+ }
+
PatternKind::Variant { adt_def, substs, variant_index, ref subpatterns } => {
let irrefutable = adt_def.variants.iter().enumerate().all(|(i, v)| {
i == variant_index || {
TyDynamic(..) => bug!("miri produced a trait object"),
TyClosure(..) => bug!("miri produced a closure"),
TyGenerator(..) => bug!("miri produced a generator"),
+ TyGeneratorWitness(..) => bug!("miri produced a generator witness"),
TyNever => bug!("miri produced a value of the never type"),
TyProjection(_) => bug!("miri produced a projection"),
TyAnon(..) => bug!("miri produced an impl Trait type"),
ty::TyInfer(_) |
ty::TyProjection(..) |
ty::TyParam(_) |
+ ty::TyGeneratorWitness(_) |
ty::TyAnon(..) => {
bug!("DefPathBasedNames: Trying to create type name for \
unexpected type: {:?}", t);
use syntax::symbol::{Symbol, InternedString};
use rustc::mir::mono::MonoItem;
use monomorphize::item::{MonoItemExt, InstantiationMode};
+use core::usize;
pub use rustc::mir::mono::CodegenUnit;
let mut initial_partitioning = place_root_translation_items(tcx,
trans_items);
+ initial_partitioning.codegen_units.iter_mut().for_each(|cgu| cgu.estimate_size(&tcx));
+
debug_dump(tcx, "INITIAL PARTITIONING:", initial_partitioning.codegen_units.iter());
// If the partitioning should produce a fixed count of codegen units, merge
let mut post_inlining = place_inlined_translation_items(initial_partitioning,
inlining_map);
+ post_inlining.codegen_units.iter_mut().for_each(|cgu| cgu.estimate_size(&tcx));
+
debug_dump(tcx, "POST INLINING:", post_inlining.codegen_units.iter());
// Next we try to make as many symbols "internal" as possible, so LLVM has
codegen_units.sort_by_key(|cgu| cgu.name().clone());
// Merge the two smallest codegen units until the target size is reached.
- // Note that "size" is estimated here rather inaccurately as the number of
- // translation items in a given unit. This could be improved on.
while codegen_units.len() > target_cgu_count {
// Sort small cgus to the back
- codegen_units.sort_by_key(|cgu| -(cgu.items().len() as i64));
+ codegen_units.sort_by_key(|cgu| usize::MAX - cgu.size_estimate());
let mut smallest = codegen_units.pop().unwrap();
let second_smallest = codegen_units.last_mut().unwrap();
+ second_smallest.modify_size_estimate(smallest.size_estimate());
for (k, v) in smallest.items_mut().drain() {
second_smallest.items_mut().insert(k, v);
}
&AggregateKind::Array(..) |
&AggregateKind::Tuple |
&AggregateKind::Adt(..) => {}
- &AggregateKind::Closure(def_id, _) |
- &AggregateKind::Generator(def_id, _, _) => {
+ &AggregateKind::Closure(def_id, _) => {
let UnsafetyCheckResult {
violations, unsafe_blocks
} = self.tcx.unsafety_check_result(def_id);
self.register_violations(&violations, &unsafe_blocks);
}
+ &AggregateKind::Generator(def_id, _, interior) => {
+ let UnsafetyCheckResult {
+ violations, unsafe_blocks
+ } = self.tcx.unsafety_check_result(def_id);
+ self.register_violations(&violations, &unsafe_blocks);
+ if !interior.movable {
+ self.require_unsafe("construction of immovable generator")
+ }
+ }
}
}
self.super_rvalue(rvalue, location);
}
}
+struct BorrowedLocals(liveness::LocalSet);
+
+fn mark_as_borrowed<'tcx>(place: &Place<'tcx>, locals: &mut BorrowedLocals) {
+ match *place {
+ Place::Local(l) => { locals.0.add(&l); },
+ Place::Static(..) => (),
+ Place::Projection(ref proj) => {
+ match proj.elem {
+ // For derefs we don't look any further.
+ // If it pointed to a Local, it would already be borrowed elsewhere
+ ProjectionElem::Deref => (),
+ _ => mark_as_borrowed(&proj.base, locals)
+ }
+ }
+ }
+}
+
+impl<'tcx> Visitor<'tcx> for BorrowedLocals {
+ fn visit_rvalue(&mut self,
+ rvalue: &Rvalue<'tcx>,
+ location: Location) {
+ if let Rvalue::Ref(_, _, ref place) = *rvalue {
+ mark_as_borrowed(place, self);
+ }
+
+ self.super_rvalue(rvalue, location)
+ }
+}
+
fn locals_live_across_suspend_points<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
mir: &Mir<'tcx>,
- source: MirSource) ->
+ source: MirSource,
+ movable: bool) ->
(liveness::LocalSet,
HashMap<BasicBlock, liveness::LocalSet>) {
let dead_unwinds = IdxSetBuf::new_empty(mir.basic_blocks().len());
let mut ignored = StorageIgnored(IdxSetBuf::new_filled(mir.local_decls.len()));
ignored.visit_mir(mir);
+ let mut borrowed_locals = BorrowedLocals(IdxSetBuf::new_empty(mir.local_decls.len()));
+ borrowed_locals.visit_mir(mir);
+
let mut set = liveness::LocalSet::new_empty(mir.local_decls.len());
- let liveness = liveness::liveness_of_locals(mir, LivenessMode {
+ let mut liveness = liveness::liveness_of_locals(mir, LivenessMode {
include_regular_use: true,
include_drops: true,
});
// Mark locals without storage statements as always having live storage
live_locals.union(&ignored.0);
+ if !movable {
+ // For immovable generators we consider borrowed locals to always be live.
+ // This effectively makes those locals use just the storage liveness.
+ liveness.outs[block].union(&borrowed_locals.0);
+ }
+
// Locals live are live at this point only if they are used across suspension points
// and their storage is live
live_locals.intersect(&liveness.outs[block]);
fn compute_layout<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
source: MirSource,
+ upvars: Vec<Ty<'tcx>>,
interior: GeneratorInterior<'tcx>,
mir: &mut Mir<'tcx>)
-> (HashMap<Local, (Ty<'tcx>, usize)>,
HashMap<BasicBlock, liveness::LocalSet>)
{
// Use a liveness analysis to compute locals which are live across a suspension point
- let (live_locals, storage_liveness) = locals_live_across_suspend_points(tcx, mir, source);
-
+ let (live_locals, storage_liveness) = locals_live_across_suspend_points(tcx,
+ mir,
+ source,
+ interior.movable);
// Erase regions from the types passed in from typeck so we can compare them with
// MIR types
- let allowed = tcx.erase_regions(&interior.as_slice());
+ let allowed_upvars = tcx.erase_regions(&upvars);
+ let allowed = match interior.witness.sty {
+ ty::TyGeneratorWitness(s) => tcx.erase_late_bound_regions(&s),
+ _ => bug!(),
+ };
for (local, decl) in mir.local_decls.iter_enumerated() {
// Ignore locals which are internal or not live
// Sanity check that typeck knows about the type of locals which are
// live across a suspension point
- if !allowed.contains(&decl.ty) {
+ if !allowed.contains(&decl.ty) && !allowed_upvars.contains(&decl.ty) {
span_bug!(mir.span,
"Broken MIR: generator contains type {} in MIR, \
but typeck only knows about {}",
assert!(mir.generator_drop.is_none());
let def_id = source.def_id;
- let node_id = tcx.hir.as_local_node_id(def_id).unwrap();
- let hir_id = tcx.hir.node_to_hir_id(node_id);
-
- // Get the interior types which typeck computed
- let tables = tcx.typeck_tables_of(def_id);
- let interior = match tables.node_id_to_type(hir_id).sty {
- ty::TyGenerator(_, _, interior) => interior,
- ref t => bug!("type of generator not a generator: {:?}", t),
- };
// The first argument is the generator type passed by value
let gen_ty = mir.local_decls.raw[1].ty;
+ // Get the interior types and substs which typeck computed
+ let (upvars, interior) = match gen_ty.sty {
+ ty::TyGenerator(_, substs, interior) => {
+ (substs.upvar_tys(def_id, tcx).collect(), interior)
+ }
+ _ => bug!(),
+ };
+
// Compute GeneratorState<yield_ty, return_ty>
let state_did = tcx.lang_items().gen_state().unwrap();
let state_adt_ref = tcx.adt_def(state_did);
// Extract locals which are live across suspension point into `layout`
// `remap` gives a mapping from local indices onto generator struct indices
// `storage_liveness` tells us which locals have live storage at suspension points
- let (remap, layout, storage_liveness) = compute_layout(tcx, source, interior, mir);
+ let (remap, layout, storage_liveness) = compute_layout(tcx, source, upvars, interior, mir);
let state_field = mir.upvar_decls.len();
self.super_constant(constant, location);
let Constant { span, ty, literal } = constant;
self.push(&format!("mir::Constant"));
- self.push(&format!("â”” span: {:?}", span));
- self.push(&format!("â”” ty: {:?}", ty));
- self.push(&format!("â”” literal: {:?}", literal));
+ self.push(&format!("+ span: {:?}", span));
+ self.push(&format!("+ ty: {:?}", ty));
+ self.push(&format!("+ literal: {:?}", literal));
}
fn visit_const(&mut self, constant: &&'tcx ty::Const<'tcx>, _: Location) {
self.super_const(constant);
let ty::Const { ty, val } = constant;
self.push(&format!("ty::Const"));
- self.push(&format!("â”” ty: {:?}", ty));
- self.push(&format!("â”” val: {:?}", val));
+ self.push(&format!("+ ty: {:?}", ty));
+ self.push(&format!("+ val: {:?}", val));
}
fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
Rvalue::Aggregate(kind, _) => match **kind {
AggregateKind::Closure(def_id, substs) => {
self.push(&format!("closure"));
- self.push(&format!("â”” def_id: {:?}", def_id));
- self.push(&format!("â”” substs: {:#?}", substs));
+ self.push(&format!("+ def_id: {:?}", def_id));
+ self.push(&format!("+ substs: {:#?}", substs));
}
AggregateKind::Generator(def_id, substs, interior) => {
self.push(&format!("generator"));
- self.push(&format!("â”” def_id: {:?}", def_id));
- self.push(&format!("â”” substs: {:#?}", substs));
- self.push(&format!("â”” interior: {:?}", interior));
+ self.push(&format!("+ def_id: {:?}", def_id));
+ self.push(&format!("+ substs: {:#?}", substs));
+ self.push(&format!("+ interior: {:?}", interior));
}
_ => {}
impl<'a> Visitor<'a> for AstValidator<'a> {
fn visit_expr(&mut self, expr: &'a Expr) {
match expr.node {
- ExprKind::While(.., Some(ident)) |
- ExprKind::Loop(_, Some(ident)) |
- ExprKind::WhileLet(.., Some(ident)) |
- ExprKind::ForLoop(.., Some(ident)) |
- ExprKind::Break(Some(ident), _) |
- ExprKind::Continue(Some(ident)) => {
- self.check_label(ident.node, ident.span);
- }
ExprKind::InlineAsm(..) if !self.session.target.target.options.allow_asm => {
span_err!(self.session, expr.span, E0472, "asm! is unsupported on this target");
}
visit::walk_use_tree(self, use_tree, id);
}
+ fn visit_label(&mut self, label: &'a Label) {
+ self.check_label(label.ident, label.span);
+ visit::walk_label(self, label);
+ }
+
fn visit_lifetime(&mut self, lifetime: &'a Lifetime) {
self.check_lifetime(lifetime);
visit::walk_lifetime(self, lifetime);
}
if let ast::UseTreeKind::Nested(ref items) = use_tree.kind {
+ // If it's the parent group, cover the entire use item
+ let span = if nested {
+ use_tree.span
+ } else {
+ self.item_span
+ };
+
if items.len() == 0 {
self.unused_imports
.entry(self.base_id)
.or_insert_with(NodeMap)
- .insert(id, self.item_span);
+ .insert(id, span);
}
} else {
let base_id = self.base_id;
use syntax::ast::{Arm, BindingMode, Block, Crate, Expr, ExprKind};
use syntax::ast::{FnDecl, ForeignItem, ForeignItemKind, GenericParam, Generics};
use syntax::ast::{Item, ItemKind, ImplItem, ImplItemKind};
-use syntax::ast::{Local, Mutability, Pat, PatKind, Path};
+use syntax::ast::{Label, Local, Mutability, Pat, PatKind, Path};
use syntax::ast::{QSelf, TraitItemKind, TraitRef, Ty, TyKind};
use syntax::feature_gate::{feature_err, emit_feature_err, GateIssue};
use syntax::parse::token;
segments: vec![],
span: use_tree.span,
};
- self.resolve_use_tree(item, use_tree, &path);
+ self.resolve_use_tree(item.id, use_tree, &path);
}
ItemKind::ExternCrate(_) | ItemKind::MacroDef(..) | ItemKind::GlobalAsm(_) => {
}
}
- fn resolve_use_tree(&mut self, item: &Item, use_tree: &ast::UseTree, prefix: &Path) {
+ fn resolve_use_tree(&mut self, id: NodeId, use_tree: &ast::UseTree, prefix: &Path) {
match use_tree.kind {
ast::UseTreeKind::Nested(ref items) => {
let path = Path {
if items.len() == 0 {
// Resolve prefix of an import with empty braces (issue #28388).
- self.smart_resolve_path(item.id, None, &path, PathSource::ImportPrefix);
+ self.smart_resolve_path(id, None, &path, PathSource::ImportPrefix);
} else {
- for &(ref tree, _) in items {
- self.resolve_use_tree(item, tree, &path);
+ for &(ref tree, nested_id) in items {
+ self.resolve_use_tree(nested_id, tree, &path);
}
}
}
}
}
- fn with_resolved_label<F>(&mut self, label: Option<SpannedIdent>, id: NodeId, f: F)
+ fn with_resolved_label<F>(&mut self, label: Option<Label>, id: NodeId, f: F)
where F: FnOnce(&mut Resolver)
{
if let Some(label) = label {
let def = Def::Label(id);
self.with_label_rib(|this| {
- this.label_ribs.last_mut().unwrap().bindings.insert(label.node, def);
+ this.label_ribs.last_mut().unwrap().bindings.insert(label.ident, def);
f(this);
});
} else {
}
}
- fn resolve_labeled_block(&mut self, label: Option<SpannedIdent>, id: NodeId, block: &Block) {
+ fn resolve_labeled_block(&mut self, label: Option<Label>, id: NodeId, block: &Block) {
self.with_resolved_label(label, id, |this| this.visit_block(block));
}
}
ExprKind::Break(Some(label), _) | ExprKind::Continue(Some(label)) => {
- match self.search_label(label.node, |rib, id| rib.bindings.get(&id).cloned()) {
+ match self.search_label(label.ident, |rib, id| rib.bindings.get(&id).cloned()) {
None => {
// Search again for close matches...
// Picks the first label that is "close enough", which is not necessarily
// the closest match
- let close_match = self.search_label(label.node, |rib, ident| {
+ let close_match = self.search_label(label.ident, |rib, ident| {
let names = rib.bindings.iter().map(|(id, _)| &id.name);
find_best_match_for_name(names, &*ident.name.as_str(), None)
});
self.record_def(expr.id, err_path_resolution());
resolve_error(self,
label.span,
- ResolutionError::UndeclaredLabel(&label.node.name.as_str(),
+ ResolutionError::UndeclaredLabel(&label.ident.name.as_str(),
close_match));
}
Some(def @ Def::Label(_)) => {
_ => span_bug!(ex.span, "Expected struct or tuple type, found {:?}", ty),
}
}
- ast::ExprKind::Closure(_, ref decl, ref body, _fn_decl_span) => {
+ ast::ExprKind::Closure(_, _, ref decl, ref body, _fn_decl_span) => {
let mut id = String::from("$");
id.push_str(&ex.id.to_string());
use super::rpath;
use metadata::METADATA_FILENAME;
use rustc::session::config::{self, NoDebugInfo, OutputFilenames, OutputType, PrintRequest};
-use rustc::session::config::RUST_CGU_EXT;
+use rustc::session::config::{RUST_CGU_EXT, Lto};
use rustc::session::filesearch;
use rustc::session::search_paths::PathKind;
use rustc::session::Session;
});
ab.add_rlib(path,
&name.as_str(),
- sess.lto() && !ignored_for_lto(sess, &trans.crate_info, cnum),
+ is_full_lto_enabled(sess) &&
+ !ignored_for_lto(sess, &trans.crate_info, cnum),
skip_object_files).unwrap();
all_native_libs.extend(trans.crate_info.native_libraries[&cnum].iter().cloned());
lib.kind == NativeLibraryKind::NativeStatic && !relevant_lib(sess, lib)
});
- if (!sess.lto() || ignored_for_lto(sess, &trans.crate_info, cnum)) &&
+ if (!is_full_lto_enabled(sess) ||
+ ignored_for_lto(sess, &trans.crate_info, cnum)) &&
crate_type != config::CrateTypeDylib &&
!skip_native {
cmd.link_rlib(&fix_windows_verbatim_for_gcc(cratepath));
// file, then we don't need the object file as it's part of the
// LTO module. Note that `#![no_builtins]` is excluded from LTO,
// though, so we let that object file slide.
- let skip_because_lto = sess.lto() &&
+ let skip_because_lto = is_full_lto_enabled(sess) &&
is_rust_object &&
(sess.target.target.options.no_builtins ||
!trans.crate_info.is_no_builtins.contains(&cnum));
fn add_dynamic_crate(cmd: &mut Linker, sess: &Session, cratepath: &Path) {
// If we're performing LTO, then it should have been previously required
// that all upstream rust dependencies were available in an rlib format.
- assert!(!sess.lto());
+ assert!(!is_full_lto_enabled(sess));
// Just need to tell the linker about where the library lives and
// what its name is
e));
}
}
+
+fn is_full_lto_enabled(sess: &Session) -> bool {
+ match sess.lto() {
+ Lto::Yes |
+ Lto::Thin |
+ Lto::Fat => true,
+ Lto::No |
+ Lto::ThinLocal => false,
+ }
+}
use llvm;
use rustc::hir::def_id::LOCAL_CRATE;
use rustc::middle::exported_symbols::SymbolExportLevel;
-use rustc::session::config;
+use rustc::session::config::{self, Lto};
use rustc::util::common::time;
use time_graph::Timeline;
use {ModuleTranslation, ModuleLlvm, ModuleKind, ModuleSource};
}
}
-pub enum LTOMode {
- WholeCrateGraph,
- JustThisCrate,
-}
-
pub(crate) fn run(cgcx: &CodegenContext,
- modules: Vec<ModuleTranslation>,
- mode: LTOMode,
- timeline: &mut Timeline)
+ modules: Vec<ModuleTranslation>,
+ timeline: &mut Timeline)
-> Result<Vec<LtoModuleTranslation>, FatalError>
{
let diag_handler = cgcx.create_diag_handler();
- let export_threshold = match mode {
- LTOMode::WholeCrateGraph => {
+ let export_threshold = match cgcx.lto {
+ // We're just doing LTO for our one crate
+ Lto::ThinLocal => SymbolExportLevel::Rust,
+
+ // We're doing LTO for the entire crate graph
+ Lto::Yes | Lto::Fat | Lto::Thin => {
symbol_export::crates_export_threshold(&cgcx.crate_types)
}
- LTOMode::JustThisCrate => {
- SymbolExportLevel::Rust
- }
+
+ Lto::No => panic!("didn't request LTO but we're doing LTO"),
};
let symbol_filter = &|&(ref name, _, level): &(String, _, SymbolExportLevel)| {
// We save off all the bytecode and LLVM module ids for later processing
// with either fat or thin LTO
let mut upstream_modules = Vec::new();
- if let LTOMode::WholeCrateGraph = mode {
+ if cgcx.lto != Lto::ThinLocal {
if cgcx.opts.cg.prefer_dynamic {
diag_handler.struct_err("cannot prefer dynamic linking when performing LTO")
.note("only 'staticlib', 'bin', and 'cdylib' outputs are \
}
let arr = symbol_white_list.iter().map(|c| c.as_ptr()).collect::<Vec<_>>();
- match mode {
- LTOMode::WholeCrateGraph if !cgcx.thinlto => {
+ match cgcx.lto {
+ Lto::Yes | // `-C lto` == fat LTO by default
+ Lto::Fat => {
fat_lto(cgcx, &diag_handler, modules, upstream_modules, &arr, timeline)
}
- _ => {
+ Lto::Thin |
+ Lto::ThinLocal => {
thin_lto(&diag_handler, modules, upstream_modules, &arr, timeline)
}
+ Lto::No => unreachable!(),
}
}
use back::bytecode::{self, RLIB_BYTECODE_EXTENSION};
use back::lto::{self, ModuleBuffer, ThinBuffer};
use back::link::{self, get_linker, remove};
+use back::command::Command;
use back::linker::LinkerInfo;
use back::symbol_export::ExportedSymbols;
use base;
use rustc_incremental::{save_trans_partition, in_incr_comp_dir};
use rustc::dep_graph::{DepGraph, WorkProductFileKind};
use rustc::middle::cstore::{LinkMeta, EncodedMetadata};
-use rustc::session::config::{self, OutputFilenames, OutputType, OutputTypes, Passes, SomePasses,
- AllPasses, Sanitizer};
+use rustc::session::config::{self, OutputFilenames, OutputType, Passes, SomePasses,
+ AllPasses, Sanitizer, Lto};
use rustc::session::Session;
use rustc::util::nodemap::FxHashMap;
use rustc_back::LinkerFlavor;
use rustc::hir::def_id::{CrateNum, LOCAL_CRATE};
use rustc::ty::TyCtxt;
use rustc::util::common::{time, time_depth, set_time_depth, path2cstr, print_time_passes_entry};
-use rustc::util::fs::{link_or_copy, rename_or_copy_remove};
+use rustc::util::fs::{link_or_copy};
use errors::{self, Handler, Level, DiagnosticBuilder, FatalError, DiagnosticId};
use errors::emitter::{Emitter};
use syntax::attr;
("ropi-rwpi", llvm::RelocMode::ROPI_RWPI),
];
-pub const CODE_GEN_MODEL_ARGS : [(&'static str, llvm::CodeModel); 5] = [
- ("default", llvm::CodeModel::Default),
+pub const CODE_GEN_MODEL_ARGS: &[(&str, llvm::CodeModel)] = &[
("small", llvm::CodeModel::Small),
("kernel", llvm::CodeModel::Kernel),
("medium", llvm::CodeModel::Medium),
pub fn create_target_machine(sess: &Session) -> TargetMachineRef {
target_machine_factory(sess)().unwrap_or_else(|err| {
- panic!(llvm_err(sess.diagnostic(), err))
+ llvm_err(sess.diagnostic(), err).raise()
})
}
let ffunction_sections = sess.target.target.options.function_sections;
let fdata_sections = ffunction_sections;
- let code_model_arg = match sess.opts.cg.code_model {
- Some(ref s) => &s,
- None => &sess.target.target.options.code_model,
- };
-
- let code_model = match CODE_GEN_MODEL_ARGS.iter().find(
- |&&arg| arg.0 == code_model_arg) {
- Some(x) => x.1,
- _ => {
- sess.err(&format!("{:?} is not a valid code model",
- code_model_arg));
- sess.abort_if_errors();
- bug!();
+ let code_model_arg = sess.opts.cg.code_model.as_ref().or(
+ sess.target.target.options.code_model.as_ref(),
+ );
+
+ let code_model = match code_model_arg {
+ Some(s) => {
+ match CODE_GEN_MODEL_ARGS.iter().find(|arg| arg.0 == s) {
+ Some(x) => x.1,
+ _ => {
+ sess.err(&format!("{:?} is not a valid code model",
+ code_model_arg));
+ sess.abort_if_errors();
+ bug!();
+ }
+ }
}
+ None => llvm::CodeModel::None,
};
let singlethread = sess.target.target.options.singlethread;
// make the object file bitcode. Provides easy compatibility with
// emscripten's ecc compiler, when used as the linker.
obj_is_bitcode: bool,
+ no_integrated_as: bool,
}
impl ModuleConfig {
emit_asm: false,
emit_obj: false,
obj_is_bitcode: false,
+ no_integrated_as: false,
no_verify: false,
no_prepopulate_passes: false,
}
}
+/// Assembler name and command used by codegen when no_integrated_as is enabled
+struct AssemblerCommand {
+ name: PathBuf,
+ cmd: Command,
+}
+
/// Additional resources used by optimize_and_codegen (not module specific)
#[derive(Clone)]
pub struct CodegenContext {
// Resouces needed when running LTO
pub time_passes: bool,
- pub lto: bool,
- pub thinlto: bool,
+ pub lto: Lto,
pub no_landing_pads: bool,
pub save_temps: bool,
pub fewer_names: bool,
// A reference to the TimeGraph so we can register timings. None means that
// measuring is disabled.
time_graph: Option<TimeGraph>,
+ // The assembler command if no_integrated_as option is enabled, None otherwise
+ assembler_cmd: Option<Arc<AssemblerCommand>>,
}
impl CodegenContext {
TRANS_WORK_PACKAGE_KIND,
"generate lto")
}).unwrap_or(Timeline::noop());
- let mode = if cgcx.lto {
- lto::LTOMode::WholeCrateGraph
- } else {
- lto::LTOMode::JustThisCrate
- };
- let lto_modules = lto::run(cgcx, modules, mode, &mut timeline)
- .unwrap_or_else(|e| panic!(e));
+ let lto_modules = lto::run(cgcx, modules, &mut timeline)
+ .unwrap_or_else(|e| e.raise());
lto_modules.into_iter().map(|module| {
let cost = module.cost();
!cgcx.crate_types.contains(&config::CrateTypeRlib) &&
mtrans.kind == ModuleKind::Regular;
+ // If we don't have the integrated assembler, then we need to emit asm
+ // from LLVM and use `gcc` to create the object file.
+ let asm_to_obj = config.emit_obj && config.no_integrated_as;
+
// Change what we write and cleanup based on whether obj files are
// just llvm bitcode. In that case write bitcode, and possibly
// delete the bitcode if it wasn't requested. Don't generate the
// machine code, instead copy the .o file from the .bc
let write_bc = config.emit_bc || (config.obj_is_bitcode && !asm2wasm);
let rm_bc = !config.emit_bc && config.obj_is_bitcode && !asm2wasm;
- let write_obj = config.emit_obj && !config.obj_is_bitcode && !asm2wasm;
+ let write_obj = config.emit_obj && !config.obj_is_bitcode && !asm2wasm && !asm_to_obj;
let copy_bc_to_obj = config.emit_obj && config.obj_is_bitcode && !asm2wasm;
let bc_out = cgcx.output_filenames.temp_path(OutputType::Bitcode, module_name);
timeline.record("ir");
}
- if config.emit_asm || (asm2wasm && config.emit_obj) {
+ if config.emit_asm || (asm2wasm && config.emit_obj) || asm_to_obj {
let path = cgcx.output_filenames.temp_path(OutputType::Assembly, module_name);
// We can't use the same module for asm and binary output, because that triggers
// various errors like invalid IR or broken binaries, so we might have to clone the
// module to produce the asm output
- let llmod = if config.emit_obj {
+ let llmod = if config.emit_obj && !asm2wasm {
llvm::LLVMCloneModule(llmod)
} else {
llmod
write_output_file(diag_handler, tm, cpm, llmod, &path,
llvm::FileType::AssemblyFile)
})?;
- if config.emit_obj {
+ if config.emit_obj && !asm2wasm {
llvm::LLVMDisposeModule(llmod);
}
timeline.record("asm");
llvm::FileType::ObjectFile)
})?;
timeline.record("obj");
+ } else if asm_to_obj {
+ let assembly = cgcx.output_filenames.temp_path(OutputType::Assembly, module_name);
+ run_assembler(cgcx, diag_handler, &assembly, &obj_out);
+ timeline.record("asm_to_obj");
+
+ if !config.emit_asm && !cgcx.save_temps {
+ drop(fs::remove_file(&assembly));
+ }
}
Ok(())
total_cgus: usize)
-> OngoingCrateTranslation {
let sess = tcx.sess;
- let crate_output = tcx.output_filenames(LOCAL_CRATE);
let crate_name = tcx.crate_name(LOCAL_CRATE);
let no_builtins = attr::contains_name(&tcx.hir.krate().attrs, "no_builtins");
let subsystem = attr::first_attr_value_str_by_name(&tcx.hir.krate().attrs,
subsystem.to_string()
});
- let no_integrated_as = tcx.sess.opts.cg.no_integrated_as ||
- (tcx.sess.target.target.options.no_integrated_as &&
- (crate_output.outputs.contains_key(&OutputType::Object) ||
- crate_output.outputs.contains_key(&OutputType::Exe)));
let linker_info = LinkerInfo::new(tcx);
let crate_info = CrateInfo::new(tcx);
- let output_types_override = if no_integrated_as {
- OutputTypes::new(&[(OutputType::Assembly, None)])
- } else {
- sess.opts.output_types.clone()
- };
-
// Figure out what we actually need to build.
let mut modules_config = ModuleConfig::new(sess.opts.cg.passes.clone());
let mut metadata_config = ModuleConfig::new(vec![]);
allocator_config.emit_bc_compressed = true;
}
- for output_type in output_types_override.keys() {
+ modules_config.no_integrated_as = tcx.sess.opts.cg.no_integrated_as ||
+ tcx.sess.target.target.options.no_integrated_as;
+
+ for output_type in sess.opts.output_types.keys() {
match *output_type {
OutputType::Bitcode => { modules_config.emit_bc = true; }
OutputType::LlvmAssembly => { modules_config.emit_ir = true; }
metadata,
windows_subsystem,
linker_info,
- no_integrated_as,
crate_info,
time_graph,
unsafe {
optimize(cgcx, &diag_handler, &mtrans, config, timeline)?;
- let lto = cgcx.lto;
+ // After we've done the initial round of optimizations we need to
+ // decide whether to synchronously codegen this module or ship it
+ // back to the coordinator thread for further LTO processing (which
+ // has to wait for all the initial modules to be optimized).
+ //
+ // Here we dispatch based on the `cgcx.lto` and kind of module we're
+ // translating...
+ let needs_lto = match cgcx.lto {
+ Lto::No => false,
+
+ // Here we've got a full crate graph LTO requested. We ignore
+ // this, however, if the crate type is only an rlib as there's
+ // no full crate graph to process, that'll happen later.
+ //
+ // This use case currently comes up primarily for targets that
+ // require LTO so the request for LTO is always unconditionally
+ // passed down to the backend, but we don't actually want to do
+ // anything about it yet until we've got a final product.
+ Lto::Yes | Lto::Fat | Lto::Thin => {
+ cgcx.crate_types.len() != 1 ||
+ cgcx.crate_types[0] != config::CrateTypeRlib
+ }
- let auto_thin_lto =
- cgcx.thinlto &&
- cgcx.total_cgus > 1 &&
- mtrans.kind != ModuleKind::Allocator;
+ // When we're automatically doing ThinLTO for multi-codegen-unit
+ // builds we don't actually want to LTO the allocator modules if
+ // it shows up. This is due to various linker shenanigans that
+ // we'll encounter later.
+ //
+ // Additionally here's where we also factor in the current LLVM
+ // version. If it doesn't support ThinLTO we skip this.
+ Lto::ThinLocal => {
+ mtrans.kind != ModuleKind::Allocator &&
+ llvm::LLVMRustThinLTOAvailable()
+ }
+ };
- // If we're a metadata module we never participate in LTO.
- //
- // If LTO was explicitly requested on the command line, we always
- // LTO everything else.
- //
- // If LTO *wasn't* explicitly requested and we're not a metdata
- // module, then we may automatically do ThinLTO if we've got
- // multiple codegen units. Note, however, that the allocator module
- // doesn't participate here automatically because of linker
- // shenanigans later on.
- if mtrans.kind == ModuleKind::Metadata || (!lto && !auto_thin_lto) {
+ // Metadata modules never participate in LTO regardless of the lto
+ // settings.
+ let needs_lto = needs_lto && mtrans.kind != ModuleKind::Metadata;
+
+ if needs_lto {
+ Ok(WorkItemResult::NeedsLTO(mtrans))
+ } else {
let module = codegen(cgcx, &diag_handler, mtrans, config, timeline)?;
Ok(WorkItemResult::Compiled(module))
- } else {
- Ok(WorkItemResult::NeedsLTO(mtrans))
}
}
}
each_linked_rlib_for_lto.push((cnum, path.to_path_buf()));
}));
- let crate_types = sess.crate_types.borrow();
- let only_rlib = crate_types.len() == 1 &&
- crate_types[0] == config::CrateTypeRlib;
-
let wasm_import_memory =
attr::contains_name(&tcx.hir.krate().attrs, "wasm_import_memory");
+ let assembler_cmd = if modules_config.no_integrated_as {
+ // HACK: currently we use linker (gcc) as our assembler
+ let (name, mut cmd, _) = get_linker(sess);
+ cmd.args(&sess.target.target.options.asm_args);
+ Some(Arc::new(AssemblerCommand {
+ name,
+ cmd,
+ }))
+ } else {
+ None
+ };
+
let cgcx = CodegenContext {
crate_types: sess.crate_types.borrow().clone(),
each_linked_rlib_for_lto,
- // If we're only building an rlibc then allow the LTO flag to be passed
- // but don't actually do anything, the full LTO will happen later
- lto: sess.lto() && !only_rlib,
-
- // Enable ThinLTO if requested, but only if the target we're compiling
- // for doesn't require full LTO. Some targets require one LLVM module
- // (they effectively don't have a linker) so it's up to us to use LTO to
- // link everything together.
- thinlto: sess.thinlto() &&
- !sess.target.target.options.requires_lto &&
- unsafe { llvm::LLVMRustThinLTOAvailable() },
-
+ lto: sess.lto(),
no_landing_pads: sess.no_landing_pads(),
fewer_names: sess.fewer_names(),
save_temps: sess.opts.cg.save_temps,
binaryen_linker: tcx.sess.linker_flavor() == LinkerFlavor::Binaryen,
debuginfo: tcx.sess.opts.debuginfo,
wasm_import_memory: wasm_import_memory,
+ assembler_cmd,
};
// This is the "main loop" of parallel work happening for parallel codegen.
});
}
-pub fn run_assembler(sess: &Session, outputs: &OutputFilenames) {
- let (pname, mut cmd, _) = get_linker(sess);
-
- for arg in &sess.target.target.options.asm_args {
- cmd.arg(arg);
- }
+pub fn run_assembler(cgcx: &CodegenContext, handler: &Handler, assembly: &Path, object: &Path) {
+ let assembler = cgcx.assembler_cmd
+ .as_ref()
+ .expect("cgcx.assembler_cmd is missing?");
- cmd.arg("-c").arg("-o").arg(&outputs.path(OutputType::Object))
- .arg(&outputs.temp_path(OutputType::Assembly, None));
+ let pname = &assembler.name;
+ let mut cmd = assembler.cmd.clone();
+ cmd.arg("-c").arg("-o").arg(object).arg(assembly);
debug!("{:?}", cmd);
match cmd.output() {
let mut note = prog.stderr.clone();
note.extend_from_slice(&prog.stdout);
- sess.struct_err(&format!("linking with `{}` failed: {}",
- pname.display(),
- prog.status))
+ handler.struct_err(&format!("linking with `{}` failed: {}",
+ pname.display(),
+ prog.status))
.note(&format!("{:?}", &cmd))
.note(str::from_utf8(¬e[..]).unwrap())
.emit();
- sess.abort_if_errors();
+ handler.abort_if_errors();
}
},
Err(e) => {
- sess.err(&format!("could not exec the linker `{}`: {}", pname.display(), e));
- sess.abort_if_errors();
+ handler.err(&format!("could not exec the linker `{}`: {}", pname.display(), e));
+ handler.abort_if_errors();
}
}
}
metadata: EncodedMetadata,
windows_subsystem: Option<String>,
linker_info: LinkerInfo,
- no_integrated_as: bool,
crate_info: CrateInfo,
time_graph: Option<TimeGraph>,
coordinator_send: Sender<Box<Any + Send>>,
metadata_module: compiled_modules.metadata_module,
};
- if self.no_integrated_as {
- run_assembler(sess, &self.output_filenames);
-
- // HACK the linker expects the object file to be named foo.0.o but
- // `run_assembler` produces an object named just foo.o. Rename it if we
- // are going to build an executable
- if sess.opts.output_types.contains_key(&OutputType::Exe) {
- let f = self.output_filenames.path(OutputType::Object);
- rename_or_copy_remove(&f,
- f.with_file_name(format!("{}.0.o",
- f.file_stem().unwrap().to_string_lossy()))).unwrap();
- }
-
- // Remove assembly source, unless --save-temps was specified
- if !sess.opts.cg.save_temps {
- fs::remove_file(&self.output_filenames
- .temp_path(OutputType::Assembly, None)).unwrap();
- }
- }
-
trans
}
use std::str;
use std::sync::Arc;
use std::time::{Instant, Duration};
-use std::i32;
+use std::{i32, usize};
use std::iter;
use std::sync::mpsc;
use syntax_pos::Span;
ongoing_translation.submit_pre_translated_module_to_llvm(tcx, metadata_module);
// We sort the codegen units by size. This way we can schedule work for LLVM
- // a bit more efficiently. Note that "size" is defined rather crudely at the
- // moment as it is just the number of TransItems in the CGU, not taking into
- // account the size of each TransItem.
+ // a bit more efficiently.
let codegen_units = {
let mut codegen_units = codegen_units;
- codegen_units.sort_by_key(|cgu| -(cgu.items().len() as isize));
+ codegen_units.sort_by_key(|cgu| usize::MAX - cgu.size_estimate());
codegen_units
};
use rustc::ty::layout::{self, TyLayout, Size};
-#[derive(Clone, Copy, PartialEq, Debug)]
+/// Classification of "eightbyte" components.
+// NB: the order of the variants is from general to specific,
+// such that `unify(a, b)` is the "smaller" of `a` and `b`.
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug)]
enum Class {
- None,
Int,
Sse,
SseUp
const MAX_EIGHTBYTES: usize = LARGEST_VECTOR_SIZE / 64;
fn classify_arg<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>, arg: &ArgType<'tcx>)
- -> Result<[Class; MAX_EIGHTBYTES], Memory> {
- fn unify(cls: &mut [Class],
- off: Size,
- c: Class) {
- let i = (off.bytes() / 8) as usize;
- let to_write = match (cls[i], c) {
- (Class::None, _) => c,
- (_, Class::None) => return,
-
- (Class::Int, _) |
- (_, Class::Int) => Class::Int,
-
- (Class::Sse, _) |
- (_, Class::Sse) => Class::Sse,
-
- (Class::SseUp, Class::SseUp) => Class::SseUp
- };
- cls[i] = to_write;
- }
-
+ -> Result<[Option<Class>; MAX_EIGHTBYTES], Memory> {
fn classify<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>,
layout: TyLayout<'tcx>,
- cls: &mut [Class],
+ cls: &mut [Option<Class>],
off: Size)
-> Result<(), Memory> {
if !off.is_abi_aligned(layout.align) {
return Ok(());
}
- match layout.abi {
- layout::Abi::Uninhabited => {}
+ let mut c = match layout.abi {
+ layout::Abi::Uninhabited => return Ok(()),
layout::Abi::Scalar(ref scalar) => {
- let reg = match scalar.value {
+ match scalar.value {
layout::Int(..) |
layout::Pointer => Class::Int,
layout::F32 |
layout::F64 => Class::Sse
- };
- unify(cls, off, reg);
- }
-
- layout::Abi::Vector { ref element, count } => {
- unify(cls, off, Class::Sse);
-
- // everything after the first one is the upper
- // half of a register.
- let stride = element.value.size(cx);
- for i in 1..count {
- let field_off = off + stride * i;
- unify(cls, field_off, Class::SseUp);
}
}
+ layout::Abi::Vector { .. } => Class::Sse,
+
layout::Abi::ScalarPair(..) |
layout::Abi::Aggregate { .. } => {
match layout.variants {
let field_off = off + layout.fields.offset(i);
classify(cx, layout.field(cx, i), cls, field_off)?;
}
+ return Ok(());
}
layout::Variants::Tagged { .. } |
layout::Variants::NicheFilling { .. } => return Err(Memory),
}
}
+ };
+
+ // Fill in `cls` for scalars (Int/Sse) and vectors (Sse).
+ let first = (off.bytes() / 8) as usize;
+ let last = ((off.bytes() + layout.size.bytes() - 1) / 8) as usize;
+ for cls in &mut cls[first..=last] {
+ *cls = Some(cls.map_or(c, |old| old.min(c)));
+
+ // Everything after the first Sse "eightbyte"
+ // component is the upper half of a register.
+ if c == Class::Sse {
+ c = Class::SseUp;
+ }
}
Ok(())
return Err(Memory);
}
- let mut cls = [Class::None; MAX_EIGHTBYTES];
+ let mut cls = [None; MAX_EIGHTBYTES];
classify(cx, arg.layout, &mut cls, Size::from_bytes(0))?;
if n > 2 {
- if cls[0] != Class::Sse {
+ if cls[0] != Some(Class::Sse) {
return Err(Memory);
}
- if cls[1..n].iter().any(|&c| c != Class::SseUp) {
+ if cls[1..n].iter().any(|&c| c != Some(Class::SseUp)) {
return Err(Memory);
}
} else {
let mut i = 0;
while i < n {
- if cls[i] == Class::SseUp {
- cls[i] = Class::Sse;
- } else if cls[i] == Class::Sse {
+ if cls[i] == Some(Class::SseUp) {
+ cls[i] = Some(Class::Sse);
+ } else if cls[i] == Some(Class::Sse) {
i += 1;
- while i != n && cls[i] == Class::SseUp { i += 1; }
+ while i != n && cls[i] == Some(Class::SseUp) { i += 1; }
} else {
i += 1;
}
Ok(cls)
}
-fn reg_component(cls: &[Class], i: &mut usize, size: Size) -> Option<Reg> {
+fn reg_component(cls: &[Option<Class>], i: &mut usize, size: Size) -> Option<Reg> {
if *i >= cls.len() {
return None;
}
match cls[*i] {
- Class::None => None,
- Class::Int => {
+ None => None,
+ Some(Class::Int) => {
*i += 1;
Some(match size.bytes() {
1 => Reg::i8(),
_ => Reg::i64()
})
}
- Class::Sse => {
- let vec_len = 1 + cls[*i+1..].iter().take_while(|&&c| c == Class::SseUp).count();
+ Some(Class::Sse) => {
+ let vec_len = 1 + cls[*i+1..].iter()
+ .take_while(|&&c| c == Some(Class::SseUp))
+ .count();
*i += vec_len;
Some(if vec_len == 1 {
match size.bytes() {
}
})
}
- c => bug!("reg_component: unhandled class {:?}", c)
+ Some(c) => bug!("reg_component: unhandled class {:?}", c)
}
}
-fn cast_target(cls: &[Class], size: Size) -> CastTarget {
+fn cast_target(cls: &[Option<Class>], size: Size) -> CastTarget {
let mut i = 0;
let lo = reg_component(cls, &mut i, size).unwrap();
let offset = Size::from_bytes(8) * (i as u64);
- let target = if size <= offset {
- CastTarget::from(lo)
- } else {
- let hi = reg_component(cls, &mut i, size - offset).unwrap();
- CastTarget::Pair(lo, hi)
- };
+ let mut target = CastTarget::from(lo);
+ if size > offset {
+ if let Some(hi) = reg_component(cls, &mut i, size - offset) {
+ target = CastTarget::Pair(lo, hi);
+ }
+ }
assert_eq!(reg_component(cls, &mut i, Size::from_bytes(0)), None);
target
}
let mut sse_regs = 8; // XMM0-7
let mut x86_64_ty = |arg: &mut ArgType<'tcx>, is_arg: bool| {
- let cls = classify_arg(cx, arg);
+ let mut cls_or_mem = classify_arg(cx, arg);
let mut needed_int = 0;
let mut needed_sse = 0;
- let in_mem = match cls {
- Err(Memory) => true,
- Ok(ref cls) if is_arg => {
- for &c in cls {
+ if is_arg {
+ if let Ok(cls) = cls_or_mem {
+ for &c in &cls {
match c {
- Class::Int => needed_int += 1,
- Class::Sse => needed_sse += 1,
+ Some(Class::Int) => needed_int += 1,
+ Some(Class::Sse) => needed_sse += 1,
_ => {}
}
}
- arg.layout.is_aggregate() &&
- (int_regs < needed_int || sse_regs < needed_sse)
+ if arg.layout.is_aggregate() {
+ if int_regs < needed_int || sse_regs < needed_sse {
+ cls_or_mem = Err(Memory);
+ }
+ }
}
- Ok(_) => false
- };
+ }
- if in_mem {
- if is_arg {
- arg.make_indirect_byval();
- } else {
- // `sret` parameter thus one less integer register available
- arg.make_indirect();
- int_regs -= 1;
+ match cls_or_mem {
+ Err(Memory) => {
+ if is_arg {
+ arg.make_indirect_byval();
+ } else {
+ // `sret` parameter thus one less integer register available
+ arg.make_indirect();
+ int_regs -= 1;
+ }
}
- } else {
- // split into sized chunks passed individually
- int_regs -= needed_int;
- sse_regs -= needed_sse;
-
- if arg.layout.is_aggregate() {
- let size = arg.layout.size;
- arg.cast_to(cast_target(cls.as_ref().unwrap(), size))
- } else {
- arg.extend_integer_width_to(32);
+ Ok(ref cls) => {
+ // split into sized chunks passed individually
+ int_regs -= needed_int;
+ sse_regs -= needed_sse;
+
+ if arg.layout.is_aggregate() {
+ let size = arg.layout.size;
+ arg.cast_to(cast_target(cls, size))
+ } else {
+ arg.extend_integer_width_to(32);
+ }
}
}
};
}
None => {}
};
+ if sig.output().is_never() {
+ flags = flags | DIFlags::FlagNoReturn;
+ }
let fn_metadata = unsafe {
llvm::LLVMRustDIBuilderCreateFunction(
ty::TyInfer(_) |
ty::TyProjection(..) |
ty::TyAnon(..) |
+ ty::TyGeneratorWitness(..) |
ty::TyParam(_) => {
bug!("debuginfo: Trying to create type name for \
unexpected type: {:?}", t);
let item_id = tcx.hir.get_parent_node(node_id);
let item_def_id = tcx.hir.local_def_id(item_id);
let generics = tcx.generics_of(item_def_id);
- let index = generics.type_param_to_index[&tcx.hir.local_def_id(node_id).index];
+ let index = generics.type_param_to_index[&tcx.hir.local_def_id(node_id)];
tcx.mk_param(index, tcx.hir.name(node_id))
}
Def::SelfTy(_, Some(def_id)) => {
let output = bare_fn_ty.output();
let late_bound_in_ret = tcx.collect_referenced_late_bound_regions(&output);
for br in late_bound_in_ret.difference(&late_bound_in_args) {
- let br_name = match *br {
- ty::BrNamed(_, name) => name,
- _ => {
- span_bug!(
- decl.output.span(),
- "anonymous bound region {:?} in return but not args",
- br);
- }
+ let lifetime_name = match *br {
+ ty::BrNamed(_, name) => format!("lifetime `{}`,", name),
+ ty::BrAnon(_) | ty::BrFresh(_) | ty::BrEnv => format!("an anonymous lifetime"),
};
- struct_span_err!(tcx.sess,
- decl.output.span(),
- E0581,
- "return type references lifetime `{}`, \
- which does not appear in the fn input types",
- br_name)
- .emit();
+ let mut err = struct_span_err!(tcx.sess,
+ decl.output.span(),
+ E0581,
+ "return type references {} \
+ which is not constrained by the fn input types",
+ lifetime_name);
+ if let ty::BrAnon(_) = *br {
+ // The only way for an anonymous lifetime to wind up
+ // in the return type but **also** be unconstrained is
+ // if it only appears in "associated types" in the
+ // input. See #47511 for an example. In this case,
+ // though we can easily give a hint that ought to be
+ // relevant.
+ err.note("lifetimes appearing in an associated type \
+ are not considered constrained");
+ }
+ err.emit();
}
bare_fn_ty
ty::TyInfer(_) => None,
ty::TyBool | ty::TyChar | ty::TyInt(..) | ty::TyUint(..) |
- ty::TyFloat(_) | ty::TyArray(..) |
+ ty::TyFloat(_) | ty::TyArray(..) | ty::TyGeneratorWitness(..) |
ty::TyRawPtr(_) | ty::TyRef(..) | ty::TyFnDef(..) |
ty::TyFnPtr(..) | ty::TyClosure(..) | ty::TyGenerator(..) |
ty::TyAdt(..) | ty::TyNever | ty::TyError => {
.emit();
}
CastError::SizedUnsizedCast => {
- type_error_struct!(fcx.tcx.sess, self.span, self.expr_ty, E0607,
- "cannot cast thin pointer `{}` to fat pointer `{}`",
- self.expr_ty,
- fcx.ty_to_string(self.cast_ty)).emit();
+ use structured_errors::{SizedUnsizedCastError, StructuredDiagnostic};
+ SizedUnsizedCastError::new(&fcx.tcx.sess,
+ self.span,
+ self.expr_ty,
+ fcx.ty_to_string(self.cast_ty))
+ .diagnostic().emit();
}
CastError::UnknownCastPtrKind |
CastError::UnknownExprPtrKind => {
_capture: hir::CaptureClause,
decl: &'gcx hir::FnDecl,
body_id: hir::BodyId,
+ gen: Option<hir::GeneratorMovability>,
expected: Expectation<'tcx>,
) -> Ty<'tcx> {
debug!(
None => (None, None),
};
let body = self.tcx.hir.body(body_id);
- self.check_closure(expr, expected_kind, decl, body, expected_sig)
+ self.check_closure(expr, expected_kind, decl, body, gen, expected_sig)
}
fn check_closure(
opt_kind: Option<ty::ClosureKind>,
decl: &'gcx hir::FnDecl,
body: &'gcx hir::Body,
+ gen: Option<hir::GeneratorMovability>,
expected_sig: Option<ty::FnSig<'tcx>>,
) -> Ty<'tcx> {
debug!(
decl,
expr.id,
body,
- true,
+ gen,
).1;
// Create type variables (for now) to represent the transformed
use rustc::traits::ObligationCause;
use syntax::ast;
-use syntax::util::parser::AssocOp;
+use syntax::util::parser::PREC_POSTFIX;
use syntax_pos::{self, Span};
use rustc::hir;
use rustc::hir::print;
// For now, don't suggest casting with `as`.
let can_cast = false;
- let needs_paren = expr.precedence().order() < (AssocOp::As.precedence() as i8);
+ let needs_paren = expr.precedence().order() < (PREC_POSTFIX as i8);
if let Ok(src) = self.tcx.sess.codemap().span_to_snippet(expr.span) {
let msg = format!("you can cast an `{}` to `{}`", checked_ty, expected_ty);
use rustc::hir::intravisit::{self, Visitor, NestedVisitorMap};
use rustc::hir::{self, Pat, PatKind, Expr};
use rustc::middle::region;
-use rustc::ty::Ty;
+use rustc::ty::{self, Ty, GeneratorInterior};
use std::rc::Rc;
+use syntax_pos::Span;
use super::FnCtxt;
use util::nodemap::FxHashMap;
}
impl<'a, 'gcx, 'tcx> InteriorVisitor<'a, 'gcx, 'tcx> {
- fn record(&mut self, ty: Ty<'tcx>, scope: Option<region::Scope>, expr: Option<&'tcx Expr>) {
+ fn record(&mut self,
+ ty: Ty<'tcx>,
+ scope: Option<region::Scope>,
+ expr: Option<&'tcx Expr>,
+ source_span: Span) {
use syntax_pos::DUMMY_SP;
let live_across_yield = scope.map_or(Some(DUMMY_SP), |s| {
- self.region_scope_tree.yield_in_scope(s).and_then(|(span, expr_count)| {
+ self.region_scope_tree.yield_in_scope(s).and_then(|(yield_span, expr_count)| {
// If we are recording an expression that is the last yield
// in the scope, or that has a postorder CFG index larger
// than the one of all of the yields, then its value can't
// be storage-live (and therefore live) at any of the yields.
//
// See the mega-comment at `yield_in_scope` for a proof.
+
+ debug!("comparing counts yield: {} self: {}, source_span = {:?}",
+ expr_count, self.expr_count, source_span);
+
if expr_count >= self.expr_count {
- Some(span)
+ Some(yield_span)
} else {
None
}
})
});
- if let Some(span) = live_across_yield {
+ if let Some(yield_span) = live_across_yield {
let ty = self.fcx.resolve_type_vars_if_possible(&ty);
- debug!("type in expr = {:?}, scope = {:?}, type = {:?}, span = {:?}",
- expr, scope, ty, span);
-
- // Map the type to the number of types added before it
- let entries = self.types.len();
- self.types.entry(&ty).or_insert(entries);
+ debug!("type in expr = {:?}, scope = {:?}, type = {:?}, count = {}, yield_span = {:?}",
+ expr, scope, ty, self.expr_count, yield_span);
+
+ if self.fcx.any_unresolved_type_vars(&ty) {
+ let mut err = struct_span_err!(self.fcx.tcx.sess, source_span, E0907,
+ "type inside generator must be known in this context");
+ err.span_note(yield_span,
+ "the type is part of the generator because of this `yield`");
+ err.emit();
+ } else {
+ // Map the type to the number of types added before it
+ let entries = self.types.len();
+ self.types.entry(&ty).or_insert(entries);
+ }
} else {
- debug!("no type in expr = {:?}, span = {:?}", expr, expr.map(|e| e.span));
+ debug!("no type in expr = {:?}, count = {:?}, span = {:?}",
+ expr, self.expr_count, expr.map(|e| e.span));
}
}
}
pub fn resolve_interior<'a, 'gcx, 'tcx>(fcx: &'a FnCtxt<'a, 'gcx, 'tcx>,
def_id: DefId,
body_id: hir::BodyId,
- witness: Ty<'tcx>) {
+ interior: GeneratorInterior<'tcx>) {
let body = fcx.tcx.hir.body(body_id);
let mut visitor = InteriorVisitor {
fcx,
types.sort_by_key(|t| t.1);
// Extract type components
- let types: Vec<_> = types.into_iter().map(|t| t.0).collect();
-
- let tuple = fcx.tcx.intern_tup(&types, false);
-
- debug!("Types in generator {:?}, span = {:?}", tuple, body.value.span);
-
- // Unify the tuple with the witness
- match fcx.at(&fcx.misc(body.value.span), fcx.param_env).eq(witness, tuple) {
+ let type_list = fcx.tcx.mk_type_list(types.into_iter().map(|t| t.0));
+
+ // The types in the generator interior contain lifetimes local to the generator itself,
+ // which should not be exposed outside of the generator. Therefore, we replace these
+ // lifetimes with existentially-bound lifetimes, which reflect the exact value of the
+ // lifetimes not being known by users.
+ //
+ // These lifetimes are used in auto trait impl checking (for example,
+ // if a Sync generator contains an &'α T, we need to check whether &'α T: Sync),
+ // so knowledge of the exact relationships between them isn't particularly important.
+
+ debug!("Types in generator {:?}, span = {:?}", type_list, body.value.span);
+
+ // Replace all regions inside the generator interior with late bound regions
+ // Note that each region slot in the types gets a new fresh late bound region,
+ // which means that none of the regions inside relate to any other, even if
+ // typeck had previously found contraints that would cause them to be related.
+ let mut counter = 0;
+ let type_list = fcx.tcx.fold_regions(&type_list, &mut false, |_, current_depth| {
+ counter += 1;
+ fcx.tcx.mk_region(ty::ReLateBound(ty::DebruijnIndex::new(current_depth),
+ ty::BrAnon(counter)))
+ });
+
+ let witness = fcx.tcx.mk_generator_witness(ty::Binder(type_list));
+
+ debug!("Types in generator after region replacement {:?}, span = {:?}",
+ witness, body.value.span);
+
+ // Unify the type variable inside the generator with the new witness
+ match fcx.at(&fcx.misc(body.value.span), fcx.param_env).eq(interior.witness, witness) {
Ok(ok) => fcx.register_infer_ok_obligations(ok),
_ => bug!(),
- }
+ }
}
// This visitor has to have the same visit_expr calls as RegionResolutionVisitor in
if let PatKind::Binding(..) = pat.node {
let scope = self.region_scope_tree.var_scope(pat.hir_id.local_id);
let ty = self.fcx.tables.borrow().pat_ty(pat);
- self.record(ty, Some(scope), None);
+ self.record(ty, Some(scope), None, pat.span);
}
self.expr_count += 1;
let scope = self.region_scope_tree.temporary_scope(expr.hir_id.local_id);
let ty = self.fcx.tables.borrow().expr_ty_adjusted(expr);
- self.record(ty, scope, Some(expr));
+ self.record(ty, scope, Some(expr), expr.span);
}
}
// this isn't perfect (that is, there are cases when
// implementing a trait would be legal but is rejected
// here).
- (type_is_local || info.def_id.is_local())
- && self.associated_item(info.def_id, item_name, Namespace::Value).is_some()
+ (type_is_local || info.def_id.is_local()) &&
+ self.associated_item(info.def_id, item_name, Namespace::Value)
+ .filter(|item| {
+ // We only want to suggest public or local traits (#45781).
+ item.vis == ty::Visibility::Public || info.def_id.is_local()
+ })
+ .is_some()
})
.collect::<Vec<_>>();
use rustc::ty::util::{Representability, IntTypeExt};
use rustc::ty::layout::LayoutOf;
use errors::{DiagnosticBuilder, DiagnosticId};
+
use require_c_abi_if_variadic;
use session::{CompileIncomplete, config, Session};
use TypeAndSubsts;
deferred_cast_checks: RefCell<Vec<cast::CastCheck<'tcx>>>,
- deferred_generator_interiors: RefCell<Vec<(hir::BodyId, Ty<'tcx>)>>,
+ deferred_generator_interiors: RefCell<Vec<(hir::BodyId, ty::GeneratorInterior<'tcx>)>>,
// Anonymized types found in explicit return types and their
// associated fresh inference variable. Writeback resolves these
param_env,
&fn_sig);
- let fcx = check_fn(&inh, param_env, fn_sig, decl, id, body, false).0;
+ let fcx = check_fn(&inh, param_env, fn_sig, decl, id, body, None).0;
fcx
} else {
let fcx = FnCtxt::new(&inh, param_env, body.value.id);
decl: &'gcx hir::FnDecl,
fn_id: ast::NodeId,
body: &'gcx hir::Body,
- can_be_generator: bool)
+ can_be_generator: Option<hir::GeneratorMovability>)
-> (FnCtxt<'a, 'gcx, 'tcx>, Option<GeneratorTypes<'tcx>>)
{
let mut fn_sig = fn_sig.clone();
let span = body.value.span;
- if body.is_generator && can_be_generator {
+ if body.is_generator && can_be_generator.is_some() {
fcx.yield_ty = Some(fcx.next_ty_var(TypeVariableOrigin::TypeInference(span)));
}
}
let fn_hir_id = fcx.tcx.hir.node_to_hir_id(fn_id);
- let gen_ty = if can_be_generator && body.is_generator {
+ inherited.tables.borrow_mut().liberated_fn_sigs_mut().insert(fn_hir_id, fn_sig);
+
+ fcx.check_return_expr(&body.value);
+
+ // We insert the deferred_generator_interiors entry after visiting the body.
+ // This ensures that all nested generators appear before the entry of this generator.
+ // resolve_generator_interiors relies on this property.
+ let gen_ty = if can_be_generator.is_some() && body.is_generator {
let witness = fcx.next_ty_var(TypeVariableOrigin::MiscVariable(span));
- fcx.deferred_generator_interiors.borrow_mut().push((body.id(), witness));
- let interior = ty::GeneratorInterior::new(witness);
+ let interior = ty::GeneratorInterior {
+ witness,
+ movable: can_be_generator.unwrap() == hir::GeneratorMovability::Movable,
+ };
+ fcx.deferred_generator_interiors.borrow_mut().push((body.id(), interior));
Some(GeneratorTypes { yield_ty: fcx.yield_ty.unwrap(), interior: interior })
} else {
None
};
- inherited.tables.borrow_mut().liberated_fn_sigs_mut().insert(fn_hir_id, fn_sig);
-
- fcx.check_return_expr(&body.value);
// Finalize the return check by taking the LUB of the return types
// we saw and assigning it to the expected return type. This isn't
let item_id = tcx.hir.ty_param_owner(node_id);
let item_def_id = tcx.hir.local_def_id(item_id);
let generics = tcx.generics_of(item_def_id);
- let index = generics.type_param_to_index[&def_id.index];
+ let index = generics.type_param_to_index[&def_id];
ty::GenericPredicates {
parent: None,
predicates: self.param_env.caller_bounds.iter().filter(|predicate| {
}
fn resolve_generator_interiors(&self, def_id: DefId) {
- let mut deferred_generator_interiors = self.deferred_generator_interiors.borrow_mut();
- for (body_id, witness) in deferred_generator_interiors.drain(..) {
- generator_interior::resolve_interior(self, def_id, body_id, witness);
+ let mut generators = self.deferred_generator_interiors.borrow_mut();
+ for (body_id, interior) in generators.drain(..) {
+ self.select_obligations_where_possible();
+ generator_interior::resolve_interior(self, def_id, body_id, interior);
}
}
// arguments which we skipped above.
if variadic {
fn variadic_error<'tcx>(s: &Session, span: Span, t: Ty<'tcx>, cast_ty: &str) {
- type_error_struct!(s, span, t, E0617,
- "can't pass `{}` to variadic function, cast to `{}`",
- t, cast_ty).emit();
+ use structured_errors::{VariadicError, StructuredDiagnostic};
+ VariadicError::new(s, span, t, cast_ty).diagnostic().emit();
}
for arg in args.iter().skip(expected_arg_count) {
hir::ExprMatch(ref discrim, ref arms, match_src) => {
self.check_match(expr, &discrim, arms, expected, match_src)
}
- hir::ExprClosure(capture, ref decl, body_id, _, _) => {
- self.check_expr_closure(expr, capture, &decl, body_id, expected)
+ hir::ExprClosure(capture, ref decl, body_id, _, gen) => {
+ self.check_expr_closure(expr, capture, &decl, body_id, gen, expected)
}
hir::ExprBlock(ref body) => {
self.check_block_with_expected(&body, expected)
fn visit_expr(&mut self, expr: &'gcx hir::Expr) {
match expr.node {
- hir::ExprClosure(cc, _, body_id, _, is_generator) => {
+ hir::ExprClosure(cc, _, body_id, _, gen) => {
let body = self.fcx.tcx.hir.body(body_id);
self.visit_body(body);
self.fcx
- .analyze_closure(expr.id, expr.hir_id, expr.span, body, cc, is_generator);
+ .analyze_closure(expr.id, expr.hir_id, expr.span, body, cc, gen);
}
_ => {}
span: Span,
body: &hir::Body,
capture_clause: hir::CaptureClause,
- is_generator: bool,
+ gen: Option<hir::GeneratorMovability>,
) {
/*!
* Analysis starting point.
}
};
- let infer_kind = if is_generator {
+ let infer_kind = if gen.is_some() {
false
} else {
self.closure_kind(closure_def_id, closure_substs).is_none()
use rustc_const_math::ConstInt;
-use std::collections::BTreeMap;
-
use syntax::{abi, ast};
use syntax::codemap::Spanned;
use syntax::symbol::{Symbol, keywords};
let param_owner = tcx.hir.ty_param_owner(param_id);
let param_owner_def_id = tcx.hir.local_def_id(param_owner);
let generics = tcx.generics_of(param_owner_def_id);
- let index = generics.type_param_to_index[&def_id.index];
+ let index = generics.type_param_to_index[&def_id];
let ty = tcx.mk_param(index, tcx.hir.ty_param_name(param_id));
// Don't look for bounds where the type parameter isn't in scope.
});
}
- let mut type_param_to_index = BTreeMap::new();
- for param in &types {
- type_param_to_index.insert(param.def_id.index, param.index);
- }
+ let type_param_to_index = types.iter()
+ .map(|param| (param.def_id, param.index))
+ .collect();
tcx.alloc_generics(ty::Generics {
parent: parent_def_id,
NodeField(field) => icx.to_ty(&field.ty),
- NodeExpr(&hir::Expr { node: hir::ExprClosure(.., is_generator), .. }) => {
- if is_generator {
+ NodeExpr(&hir::Expr { node: hir::ExprClosure(.., gen), .. }) => {
+ if gen.is_some() {
let hir_id = tcx.hir.node_to_hir_id(node_id);
return tcx.typeck_tables_of(def_id).node_id_to_type(hir_id);
}
// argument position.
E0641, // cannot cast to/from a pointer with an unknown kind
E0645, // trait aliases not finished
+ E0907, // type inside generator must be known in this context
}
#![feature(advanced_slice_patterns)]
#![feature(box_patterns)]
#![feature(box_syntax)]
-#![feature(crate_visibility_modifier)]
#![feature(conservative_impl_trait)]
#![feature(copy_closures, clone_closures)]
+#![feature(crate_visibility_modifier)]
#![feature(from_ref)]
#![feature(match_default_bindings)]
#![feature(never_type)]
+#![feature(option_filter)]
#![feature(quote)]
#![feature(refcell_replace_swap)]
#![feature(rustc_diagnostic_macros)]
// registered before they are used.
mod diagnostics;
+mod astconv;
mod check;
mod check_unused;
-mod astconv;
+mod coherence;
mod collect;
mod constrained_type_params;
+mod structured_errors;
mod impl_wf_check;
-mod coherence;
+mod namespace;
mod outlives;
mod variance;
-mod namespace;
pub struct TypeAndSubsts<'tcx> {
substs: &'tcx Substs<'tcx>,
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use rustc::session::Session;
+use syntax_pos::Span;
+use errors::{DiagnosticId, DiagnosticBuilder};
+use rustc::ty::{Ty, TypeFoldable};
+
+pub trait StructuredDiagnostic<'tcx> {
+ fn session(&self) -> &Session;
+
+ fn code(&self) -> DiagnosticId;
+
+ fn common(&self) -> DiagnosticBuilder<'tcx>;
+
+ fn diagnostic(&self) -> DiagnosticBuilder<'tcx> {
+ let err = self.common();
+ if self.session().teach(&self.code()) {
+ self.extended(err)
+ } else {
+ self.regular(err)
+ }
+ }
+
+ fn regular(&self, err: DiagnosticBuilder<'tcx>) -> DiagnosticBuilder<'tcx> {
+ err
+ }
+
+ fn extended(&self, err: DiagnosticBuilder<'tcx>) -> DiagnosticBuilder<'tcx> {
+ err
+ }
+}
+
+pub struct VariadicError<'tcx> {
+ sess: &'tcx Session,
+ span: Span,
+ t: Ty<'tcx>,
+ cast_ty: &'tcx str,
+}
+
+impl<'tcx> VariadicError<'tcx> {
+ pub fn new(sess: &'tcx Session,
+ span: Span,
+ t: Ty<'tcx>,
+ cast_ty: &'tcx str) -> VariadicError<'tcx> {
+ VariadicError { sess, span, t, cast_ty }
+ }
+}
+
+impl<'tcx> StructuredDiagnostic<'tcx> for VariadicError<'tcx> {
+ fn session(&self) -> &Session { self.sess }
+
+ fn code(&self) -> DiagnosticId {
+ __diagnostic_used!(E0617);
+ DiagnosticId::Error("E0617".to_owned())
+ }
+
+ fn common(&self) -> DiagnosticBuilder<'tcx> {
+ let mut err = if self.t.references_error() {
+ self.sess.diagnostic().struct_dummy()
+ } else {
+ self.sess.struct_span_fatal_with_code(
+ self.span,
+ &format!("can't pass `{}` to variadic function", self.t),
+ self.code(),
+ )
+ };
+ if let Ok(snippet) = self.sess.codemap().span_to_snippet(self.span) {
+ err.span_suggestion(self.span,
+ &format!("cast the value to `{}`", self.cast_ty),
+ format!("{} as {}", snippet, self.cast_ty));
+ } else {
+ err.help(&format!("cast the value to `{}`", self.cast_ty));
+ }
+ err
+ }
+
+ fn extended(&self, mut err: DiagnosticBuilder<'tcx>) -> DiagnosticBuilder<'tcx> {
+ err.note(&format!("certain types, like `{}`, must be cast before passing them to a \
+ variadic function, because of arcane ABI rules dictated by the C \
+ standard",
+ self.t));
+ err
+ }
+}
+
+pub struct SizedUnsizedCastError<'tcx> {
+ sess: &'tcx Session,
+ span: Span,
+ expr_ty: Ty<'tcx>,
+ cast_ty: String,
+}
+
+impl<'tcx> SizedUnsizedCastError<'tcx> {
+ pub fn new(sess: &'tcx Session,
+ span: Span,
+ expr_ty: Ty<'tcx>,
+ cast_ty: String) -> SizedUnsizedCastError<'tcx> {
+ SizedUnsizedCastError { sess, span, expr_ty, cast_ty }
+ }
+}
+
+impl<'tcx> StructuredDiagnostic<'tcx> for SizedUnsizedCastError<'tcx> {
+ fn session(&self) -> &Session { self.sess }
+
+ fn code(&self) -> DiagnosticId {
+ __diagnostic_used!(E0607);
+ DiagnosticId::Error("E0607".to_owned())
+ }
+
+ fn common(&self) -> DiagnosticBuilder<'tcx> {
+ if self.expr_ty.references_error() {
+ self.sess.diagnostic().struct_dummy()
+ } else {
+ self.sess.struct_span_fatal_with_code(
+ self.span,
+ &format!("cannot cast thin pointer `{}` to fat pointer `{}`",
+ self.expr_ty,
+ self.cast_ty),
+ self.code(),
+ )
+ }
+ }
+
+ fn extended(&self, mut err: DiagnosticBuilder<'tcx>) -> DiagnosticBuilder<'tcx> {
+ err.help(
+ "Thin pointers are \"simple\" pointers: they are purely a reference to a
+memory address.
+
+Fat pointers are pointers referencing \"Dynamically Sized Types\" (also
+called DST). DST don't have a statically known size, therefore they can
+only exist behind some kind of pointers that contain additional
+information. Slices and trait objects are DSTs. In the case of slices,
+the additional information the fat pointer holds is their size.
+
+To fix this error, don't try to cast directly between thin and fat
+pointers.
+
+For more information about casts, take a look at The Book:
+https://doc.rust-lang.org/book/first-edition/casting-between-types.html");
+ err
+ }
+}
//! We walk the set of items and, for each member, generate new constraints.
use hir::def_id::DefId;
-use rustc::dep_graph::{DepGraphSafe, DepKind, DepNodeColor};
-use rustc::ich::StableHashingContext;
use rustc::ty::subst::Substs;
use rustc::ty::{self, Ty, TyCtxt};
use syntax::ast;
use rustc::hir;
use rustc::hir::itemlikevisit::ItemLikeVisitor;
-use rustc_data_structures::stable_hasher::StableHashingContextProvider;
-
use super::terms::*;
use super::terms::VarianceTerm::*;
}
}
-impl<'a, 'tcx> StableHashingContextProvider for ConstraintContext<'a, 'tcx> {
- type ContextType = StableHashingContext<'tcx>;
-
- fn create_stable_hashing_context(&self) -> Self::ContextType {
- self.terms_cx.tcx.create_stable_hashing_context()
- }
-}
-
-impl<'a, 'tcx> DepGraphSafe for ConstraintContext<'a, 'tcx> {}
-
impl<'a, 'tcx> ConstraintContext<'a, 'tcx> {
fn visit_node_helper(&mut self, id: ast::NodeId) {
let tcx = self.terms_cx.tcx;
let def_id = tcx.hir.local_def_id(id);
-
- // Encapsulate constructing the constraints into a task we can
- // reference later. This can go away once the red-green
- // algorithm is in place.
- //
- // See README.md for a detailed discussion
- // on dep-graph management.
- let dep_node = def_id.to_dep_node(tcx, DepKind::ItemVarianceConstraints);
-
- if let Some(DepNodeColor::Green(_)) = tcx.dep_graph.node_color(&dep_node) {
- // If the corresponding node has already been marked as green, the
- // appropriate portion of the DepGraph has already been loaded from
- // the previous graph, so we don't do any dep-tracking. Since we
- // don't cache any values though, we still have to re-run the
- // computation.
- tcx.dep_graph.with_ignore(|| {
- self.build_constraints_for_item(def_id);
- });
- } else {
- tcx.dep_graph.with_task(dep_node,
- self,
- def_id,
- visit_item_task);
- }
-
- fn visit_item_task<'a, 'tcx>(ccx: &mut ConstraintContext<'a, 'tcx>,
- def_id: DefId)
- {
- ccx.build_constraints_for_item(def_id);
- }
+ self.build_constraints_for_item(def_id);
}
fn tcx(&self) -> TyCtxt<'a, 'tcx, 'tcx> {
// types, where we use TyError as the Self type
}
+ ty::TyGeneratorWitness(..) |
ty::TyInfer(..) => {
bug!("unexpected type encountered in \
variance inference: {}",
//! parameters. See README.md for details.
use arena;
-use rustc::dep_graph::DepKind;
use rustc::hir;
use rustc::hir::def_id::{CrateNum, DefId, LOCAL_CRATE};
use rustc::ty::{self, CrateVariancesMap, TyCtxt};
// Everything else must be inferred.
let crate_map = tcx.crate_variances(LOCAL_CRATE);
- let dep_node = item_def_id.to_dep_node(tcx, DepKind::ItemVarianceConstraints);
- tcx.dep_graph.read(dep_node);
-
crate_map.variances.get(&item_def_id)
.unwrap_or(&crate_map.empty_variance)
.clone()
pub other_attrs: Vec<ast::Attribute>,
pub cfg: Option<Rc<Cfg>>,
pub span: Option<syntax_pos::Span>,
- pub links: Vec<(String, DefId)>,
+ /// map from Rust paths to resolved defs and potential URL fragments
+ pub links: Vec<(String, DefId, Option<String>)>,
}
impl Attributes {
/// Cache must be populated before call
pub fn links(&self) -> Vec<(String, String)> {
use html::format::href;
- self.links.iter().filter_map(|&(ref s, did)| {
- if let Some((href, ..)) = href(did) {
+ self.links.iter().filter_map(|&(ref s, did, ref fragment)| {
+ if let Some((mut href, ..)) = href(did) {
+ if let Some(ref fragment) = *fragment {
+ href.push_str("#");
+ href.push_str(fragment);
+ }
Some((s.clone(), href))
} else {
None
/// they exist in both namespaces (structs and modules)
fn value_ns_kind(def: Def, path_str: &str) -> Option<(&'static str, String)> {
match def {
- // structs and mods exist in both namespaces. skip them
- Def::StructCtor(..) | Def::Mod(..) => None,
- Def::Variant(..) | Def::VariantCtor(..)
- => Some(("variant", format!("{}()", path_str))),
+ // structs, variants, and mods exist in both namespaces. skip them
+ Def::StructCtor(..) | Def::Mod(..) | Def::Variant(..) | Def::VariantCtor(..) => None,
Def::Fn(..)
=> Some(("function", format!("{}()", path_str))),
Def::Method(..)
let sp = attrs.doc_strings.first()
.map_or(DUMMY_SP, |a| a.span());
cx.sess()
- .struct_span_err(sp,
- &format!("`{}` is both {} {} and {} {}",
- path_str, article1, kind1,
- article2, kind2))
+ .struct_span_warn(sp,
+ &format!("`{}` is both {} {} and {} {}",
+ path_str, article1, kind1,
+ article2, kind2))
.help(&format!("try `{}` if you want to select the {}, \
or `{}` if you want to \
select the {}",
.emit();
}
+/// Given an enum variant's def, return the def of its enum and the associated fragment
+fn handle_variant(cx: &DocContext, def: Def) -> Result<(Def, Option<String>), ()> {
+ use rustc::ty::DefIdTree;
+
+ let parent = if let Some(parent) = cx.tcx.parent(def.def_id()) {
+ parent
+ } else {
+ return Err(())
+ };
+ let parent_def = Def::Enum(parent);
+ let variant = cx.tcx.expect_variant_def(def);
+ Ok((parent_def, Some(format!("{}.v", variant.name))))
+}
+
/// Resolve a given string as a path, along with whether or not it is
-/// in the value namespace
-fn resolve(cx: &DocContext, path_str: &str, is_val: bool) -> Result<hir::Path, ()> {
+/// in the value namespace. Also returns an optional URL fragment in the case
+/// of variants and methods
+fn resolve(cx: &DocContext, path_str: &str, is_val: bool) -> Result<(Def, Option<String>), ()> {
// In case we're in a module, try to resolve the relative
// path
if let Some(id) = cx.mod_ids.borrow().last() {
- cx.resolver.borrow_mut()
- .with_scope(*id, |resolver| {
- resolver.resolve_str_path_error(DUMMY_SP,
- &path_str, is_val)
- })
+ let result = cx.resolver.borrow_mut()
+ .with_scope(*id,
+ |resolver| {
+ resolver.resolve_str_path_error(DUMMY_SP,
+ &path_str, is_val)
+ });
+
+ if let Ok(result) = result {
+ // In case this is a trait item, skip the
+ // early return and try looking for the trait
+ let value = match result.def {
+ Def::Method(_) | Def::AssociatedConst(_) => true,
+ Def::AssociatedTy(_) => false,
+ Def::Variant(_) => return handle_variant(cx, result.def),
+ // not a trait item, just return what we found
+ _ => return Ok((result.def, None))
+ };
+
+ if value != is_val {
+ return Err(())
+ }
+ } else {
+ // If resolution failed, it may still be a method
+ // because methods are not handled by the resolver
+ // If so, bail when we're not looking for a value
+ if !is_val {
+ return Err(())
+ }
+ }
+
+ // Try looking for methods and associated items
+ let mut split = path_str.rsplitn(2, "::");
+ let mut item_name = if let Some(first) = split.next() {
+ first
+ } else {
+ return Err(())
+ };
+
+ let mut path = if let Some(second) = split.next() {
+ second
+ } else {
+ return Err(())
+ };
+
+ let ty = cx.resolver.borrow_mut()
+ .with_scope(*id,
+ |resolver| {
+ resolver.resolve_str_path_error(DUMMY_SP,
+ &path, false)
+ })?;
+ match ty.def {
+ Def::Struct(did) | Def::Union(did) | Def::Enum(did) | Def::TyAlias(did) => {
+ let item = cx.tcx.inherent_impls(did).iter()
+ .flat_map(|imp| cx.tcx.associated_items(*imp))
+ .find(|item| item.name == item_name);
+ if let Some(item) = item {
+ if item.kind == ty::AssociatedKind::Method && is_val {
+ Ok((ty.def, Some(format!("method.{}", item_name))))
+ } else {
+ Err(())
+ }
+ } else {
+ Err(())
+ }
+ }
+ Def::Trait(did) => {
+ let item = cx.tcx.associated_item_def_ids(did).iter()
+ .map(|item| cx.tcx.associated_item(*item))
+ .find(|item| item.name == item_name);
+ if let Some(item) = item {
+ let kind = match item.kind {
+ ty::AssociatedKind::Const if is_val => "associatedconstant",
+ ty::AssociatedKind::Type if !is_val => "associatedtype",
+ ty::AssociatedKind::Method if is_val => "tymethod",
+ _ => return Err(())
+ };
+
+ Ok((ty.def, Some(format!("{}.{}", kind, item_name))))
+ } else {
+ Err(())
+ }
+ }
+ _ => Err(())
+ }
+
} else {
- // FIXME(Manishearth) this branch doesn't seem to ever be hit, really
- cx.resolver.borrow_mut()
- .resolve_str_path_error(DUMMY_SP, &path_str, is_val)
+ Err(())
}
}
if UnstableFeatures::from_environment().is_nightly_build() {
let dox = attrs.collapsed_doc_value().unwrap_or_else(String::new);
for link in markdown_links(&dox, cx.render_type) {
- let def = {
+ let (def, fragment) = {
let mut kind = PathKind::Unknown;
let path_str = if let Some(prefix) =
["struct@", "enum@", "type@",
link.trim_left_matches(prefix)
} else if let Some(prefix) =
["const@", "static@",
- "value@", "function@", "mod@", "fn@", "module@"]
+ "value@", "function@", "mod@",
+ "fn@", "module@", "method@"]
.iter().find(|p| link.starts_with(**p)) {
kind = PathKind::Value;
link.trim_left_matches(prefix)
match kind {
PathKind::Value => {
- if let Ok(path) = resolve(cx, path_str, true) {
- path.def
+ if let Ok(def) = resolve(cx, path_str, true) {
+ def
} else {
// this could just be a normal link or a broken link
// we could potentially check if something is
}
}
PathKind::Type => {
- if let Ok(path) = resolve(cx, path_str, false) {
- path.def
+ if let Ok(def) = resolve(cx, path_str, false) {
+ def
} else {
// this could just be a normal link
continue;
PathKind::Unknown => {
// try everything!
if let Some(macro_def) = macro_resolve(cx, path_str) {
- if let Ok(type_path) = resolve(cx, path_str, false) {
+ if let Ok(type_def) = resolve(cx, path_str, false) {
let (type_kind, article, type_disambig)
- = type_ns_kind(type_path.def, path_str);
+ = type_ns_kind(type_def.0, path_str);
ambiguity_error(cx, &attrs, path_str,
article, type_kind, &type_disambig,
"a", "macro", &format!("macro@{}", path_str));
continue;
- } else if let Ok(value_path) = resolve(cx, path_str, true) {
+ } else if let Ok(value_def) = resolve(cx, path_str, true) {
let (value_kind, value_disambig)
- = value_ns_kind(value_path.def, path_str)
+ = value_ns_kind(value_def.0, path_str)
.expect("struct and mod cases should have been \
caught in previous branch");
ambiguity_error(cx, &attrs, path_str,
"a", value_kind, &value_disambig,
"a", "macro", &format!("macro@{}", path_str));
}
- macro_def
- } else if let Ok(type_path) = resolve(cx, path_str, false) {
+ (macro_def, None)
+ } else if let Ok(type_def) = resolve(cx, path_str, false) {
// It is imperative we search for not-a-value first
// Otherwise we will find struct ctors for when we are looking
// for structs, and the link won't work.
// if there is something in both namespaces
- if let Ok(value_path) = resolve(cx, path_str, true) {
- let kind = value_ns_kind(value_path.def, path_str);
+ if let Ok(value_def) = resolve(cx, path_str, true) {
+ let kind = value_ns_kind(value_def.0, path_str);
if let Some((value_kind, value_disambig)) = kind {
let (type_kind, article, type_disambig)
- = type_ns_kind(type_path.def, path_str);
+ = type_ns_kind(type_def.0, path_str);
ambiguity_error(cx, &attrs, path_str,
article, type_kind, &type_disambig,
"a", value_kind, &value_disambig);
continue;
}
}
- type_path.def
- } else if let Ok(value_path) = resolve(cx, path_str, true) {
- value_path.def
+ type_def
+ } else if let Ok(value_def) = resolve(cx, path_str, true) {
+ value_def
} else {
// this could just be a normal link
continue;
}
PathKind::Macro => {
if let Some(def) = macro_resolve(cx, path_str) {
- def
+ (def, None)
} else {
continue
}
let id = register_def(cx, def);
- attrs.links.push((link, id));
+ attrs.links.push((link, id, fragment));
}
cx.sess().abort_if_errors();
ty::TyClosure(..) | ty::TyGenerator(..) => Tuple(vec![]), // FIXME(pcwalton)
+ ty::TyGeneratorWitness(..) => panic!("TyGeneratorWitness"),
ty::TyInfer(..) => panic!("TyInfer"),
ty::TyError => panic!("TyError"),
}
{sidebar}
</nav>
- <button id="theme-picker">
- <img src="{root_path}brush.svg" width="18" alt="Pick another theme!">
+ <div class="theme-picker">
+ <button id="theme-picker" aria-label="Pick another theme!">
+ <img src="{root_path}brush.svg" width="18" alt="Pick another theme!">
+ </button>
<div id="theme-choices"></div>
- </button>
+ </div>
<script src="{root_path}theme.js"></script>
<nav class="sub">
<form class="search-form js-only">
}}
}};
[{}].forEach(function(item) {{
- var div = document.createElement('div');
- div.innerHTML = item;
- div.onclick = function(el) {{
+ var but = document.createElement('button');
+ but.innerHTML = item;
+ but.onclick = function(el) {{
switchTheme(currentTheme, mainTheme, item);
}};
- themes.appendChild(div);
+ themes.appendChild(but);
}});
"#, themes.iter()
.map(|s| format!("\"{}\"", s))
border: solid 1px;
border-radius: 3px;
box-shadow: inset 0 -1px 0;
+ cursor: default;
}
-#theme-picker {
+.theme-picker {
position: absolute;
left: 211px;
- top: 17px;
+ top: 19px;
+}
+
+#theme-picker {
padding: 4px;
+ width: 27px;
+ height: 29px;
border: 1px solid;
border-radius: 3px;
cursor: pointer;
#theme-choices {
display: none;
position: absolute;
- left: -1px;
- top: 30px;
+ left: 0;
+ top: 28px;
border: 1px solid;
border-radius: 3px;
z-index: 1;
+ cursor: pointer;
}
-#theme-choices > div {
- border-top: 1px solid;
+#theme-choices > button {
+ border: none;
+ width: 100%;
padding: 4px;
text-align: center;
+ background: rgba(0,0,0,0);
+}
+
+#theme-choices > button:not(:first-child) {
+ border-top: 1px solid;
}
@media (max-width: 700px) {
- #theme-picker {
+ .theme-picker {
left: 109px;
top: 7px;
z-index: 1;
#help dt {
border-color: #bfbfbf;
- background: #fff;
+ background: rgba(0,0,0,0);
color: black;
}
}
kbd {
- color: #444d56;
+ color: #000;
background-color: #fafbfc;
border-color: #d1d5da;
border-bottom-color: #c6cbd1;
background: #f0f0f0;
}
+#theme-picker:hover, #theme-picker:focus {
+ border-color: #ffb900;
+}
+
#theme-choices {
border-color: #e0e0e0;
background-color: #353535;
}
-#theme-choices > div {
- border-top: #e0e0e0;
+#theme-choices > button:not(:first-child) {
+ border-top-color: #e0e0e0;
}
-#theme-choices > div:hover {
+#theme-choices > button:hover, #theme-choices > button:focus {
background-color: #444;
}
.stab.deprecated { background: #F3DFFF; border-color: #7F0087; }
.stab.portability { background: #C4ECFF; border-color: #7BA5DB; }
+.module-item .stab {
+ color: #000;
+}
+
#help > div {
background: #e9e9e9;
border-color: #bfbfbf;
}
kbd {
- color: #444d56;
+ color: #000;
background-color: #fafbfc;
border-color: #d1d5da;
border-bottom-color: #c6cbd1;
#theme-picker {
border-color: #e0e0e0;
+ background-color: #fff;
+}
+
+#theme-picker:hover, #theme-picker:focus {
+ border-color: #717171;
}
#theme-choices {
background-color: #fff;
}
-#theme-choices > div {
- border-top: #e0e0e0;
+#theme-choices > button:not(:first-child) {
+ border-top-color: #e0e0e0;
}
-#theme-choices > div:hover {
+#theme-choices > button:hover, #theme-choices > button:focus {
background-color: #eee;
}
#[unstable(feature = "collection_placement",
reason = "placement protocol is subject to change",
issue = "30172")]
-impl<'a, K, V> Place<V> for EntryPlace<'a, K, V> {
+unsafe impl<'a, K, V> Place<V> for EntryPlace<'a, K, V> {
fn pointer(&mut self) -> *mut V {
self.bucket.read_mut().1
}
#[stable(feature = "float_bits_conv", since = "1.20.0")]
#[inline]
pub fn to_bits(self) -> u32 {
- unsafe { ::mem::transmute(self) }
+ num::Float::to_bits(self)
}
/// Raw transmutation from `u32`.
#[stable(feature = "float_bits_conv", since = "1.20.0")]
#[inline]
pub fn from_bits(v: u32) -> Self {
- // It turns out the safety issues with sNaN were overblown! Hooray!
- unsafe { ::mem::transmute(v) }
+ num::Float::from_bits(v)
}
}
#[stable(feature = "float_bits_conv", since = "1.20.0")]
#[inline]
pub fn to_bits(self) -> u64 {
- unsafe { ::mem::transmute(self) }
+ num::Float::to_bits(self)
}
/// Raw transmutation from `u64`.
#[stable(feature = "float_bits_conv", since = "1.20.0")]
#[inline]
pub fn from_bits(v: u64) -> Self {
- // It turns out the safety issues with sNaN were overblown! Hooray!
- unsafe { ::mem::transmute(v) }
+ num::Float::from_bits(v)
}
}
#![feature(ptr_internals)]
#![feature(rand)]
#![feature(raw)]
-#![feature(repr_align)]
#![feature(rustc_attrs)]
#![feature(sip_hash_13)]
#![feature(slice_bytes)]
#![feature(doc_spotlight)]
#![cfg_attr(test, feature(update_panic_count))]
#![cfg_attr(windows, feature(used))]
+#![cfg_attr(stage0, feature(repr_align))]
#![default_lib_allocator]
/// ```
#[stable(feature = "duration", since = "1.3.0")]
#[inline]
- pub fn from_secs(secs: u64) -> Duration {
+ pub const fn from_secs(secs: u64) -> Duration {
Duration { secs: secs, nanos: 0 }
}
/// ```
#[stable(feature = "duration", since = "1.3.0")]
#[inline]
- pub fn from_millis(millis: u64) -> Duration {
- let secs = millis / MILLIS_PER_SEC;
- let nanos = ((millis % MILLIS_PER_SEC) as u32) * NANOS_PER_MILLI;
- Duration { secs: secs, nanos: nanos }
+ pub const fn from_millis(millis: u64) -> Duration {
+ Duration {
+ secs: millis / MILLIS_PER_SEC,
+ nanos: ((millis % MILLIS_PER_SEC) as u32) * NANOS_PER_MILLI,
+ }
}
/// Creates a new `Duration` from the specified number of microseconds.
/// ```
#[unstable(feature = "duration_from_micros", issue = "44400")]
#[inline]
- pub fn from_micros(micros: u64) -> Duration {
- let secs = micros / MICROS_PER_SEC;
- let nanos = ((micros % MICROS_PER_SEC) as u32) * NANOS_PER_MICRO;
- Duration { secs: secs, nanos: nanos }
+ pub const fn from_micros(micros: u64) -> Duration {
+ Duration {
+ secs: micros / MICROS_PER_SEC,
+ nanos: ((micros % MICROS_PER_SEC) as u32) * NANOS_PER_MICRO,
+ }
}
/// Creates a new `Duration` from the specified number of nanoseconds.
/// ```
#[unstable(feature = "duration_extras", issue = "46507")]
#[inline]
- pub fn from_nanos(nanos: u64) -> Duration {
- let secs = nanos / (NANOS_PER_SEC as u64);
- let nanos = (nanos % (NANOS_PER_SEC as u64)) as u32;
- Duration { secs: secs, nanos: nanos }
+ pub const fn from_nanos(nanos: u64) -> Duration {
+ Duration {
+ secs: nanos / (NANOS_PER_SEC as u64),
+ nanos: (nanos % (NANOS_PER_SEC as u64)) as u32,
+ }
}
/// Returns the number of _whole_ seconds contained by this `Duration`.
use std::rc::Rc;
use std::u32;
+#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)]
+pub struct Label {
+ pub ident: Ident,
+ pub span: Span,
+}
+
+impl fmt::Debug for Label {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ write!(f, "label({:?})", self.ident)
+ }
+}
+
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)]
pub struct Lifetime {
pub id: NodeId,
/// A while loop, with an optional label
///
/// `'label: while expr { block }`
- While(P<Expr>, P<Block>, Option<SpannedIdent>),
+ While(P<Expr>, P<Block>, Option<Label>),
/// A while-let loop, with an optional label
///
/// `'label: while let pat = expr { block }`
///
/// This is desugared to a combination of `loop` and `match` expressions.
- WhileLet(P<Pat>, P<Expr>, P<Block>, Option<SpannedIdent>),
+ WhileLet(P<Pat>, P<Expr>, P<Block>, Option<Label>),
/// A for loop, with an optional label
///
/// `'label: for pat in expr { block }`
///
/// This is desugared to a combination of `loop` and `match` expressions.
- ForLoop(P<Pat>, P<Expr>, P<Block>, Option<SpannedIdent>),
+ ForLoop(P<Pat>, P<Expr>, P<Block>, Option<Label>),
/// Conditionless loop (can be exited with break, continue, or return)
///
/// `'label: loop { block }`
- Loop(P<Block>, Option<SpannedIdent>),
+ Loop(P<Block>, Option<Label>),
/// A `match` block.
Match(P<Expr>, Vec<Arm>),
/// A closure (for example, `move |a, b, c| a + b + c`)
///
/// The final span is the span of the argument block `|...|`
- Closure(CaptureBy, P<FnDecl>, P<Expr>, Span),
+ Closure(CaptureBy, Movability, P<FnDecl>, P<Expr>, Span),
/// A block (`{ ... }`)
Block(P<Block>),
/// A catch block (`catch { ... }`)
/// A referencing operation (`&a` or `&mut a`)
AddrOf(Mutability, P<Expr>),
/// A `break`, with an optional label to break, and an optional expression
- Break(Option<SpannedIdent>, Option<P<Expr>>),
+ Break(Option<Label>, Option<P<Expr>>),
/// A `continue`, with an optional label
- Continue(Option<SpannedIdent>),
+ Continue(Option<Label>),
/// A `return`, with an optional value to be returned
Ret(Option<P<Expr>>),
Ref,
}
+/// The movability of a generator / closure literal
+#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
+pub enum Movability {
+ Static,
+ Movable,
+}
+
pub type Mac = Spanned<Mac_>;
/// Represents a macro invocation. The Path indicates which macro
/// substitute; we never hit resolve/type-checking so the dummy
/// value doesn't have to match anything)
pub fn span_fatal<S: Into<MultiSpan>>(&self, sp: S, msg: &str) -> ! {
- panic!(self.parse_sess.span_diagnostic.span_fatal(sp, msg));
+ self.parse_sess.span_diagnostic.span_fatal(sp, msg).raise();
}
/// Emit `msg` attached to `sp`, without immediately stopping
fn_decl_span: Span) // span of the `|...|` part
-> P<ast::Expr> {
self.expr(span, ast::ExprKind::Closure(ast::CaptureBy::Ref,
+ ast::Movability::Movable,
fn_decl,
body,
fn_decl_span))
// part of the lambda, but it probably (maybe?) corresponds to
// the entire lambda body. Probably we should extend the API
// here, but that's not entirely clear.
- self.expr(span, ast::ExprKind::Closure(ast::CaptureBy::Ref, fn_decl, body, span))
+ self.expr(span, ast::ExprKind::Closure(ast::CaptureBy::Ref,
+ ast::Movability::Movable,
+ fn_decl,
+ body,
+ span))
}
fn lambda0(&self, span: Span, body: P<ast::Expr>) -> P<ast::Expr> {
suggested_limit));
err.emit();
self.cx.trace_macros_diag();
- panic!(FatalError);
+ FatalError.raise();
}
Some(result)
while self.p.token != token::Eof {
match panictry!(self.p.parse_item()) {
Some(item) => ret.push(item),
- None => panic!(self.p.diagnostic().span_fatal(self.p.span,
+ None => self.p.diagnostic().span_fatal(self.p.span,
&format!("expected item, found `{}`",
- self.p.this_token_to_string())))
+ self.p.this_token_to_string()))
+ .raise()
}
}
Some(ret)
Some(i) => token::NtItem(i),
None => {
p.fatal("expected an item keyword").emit();
- panic!(FatalError);
+ FatalError.raise();
}
},
"block" => token::NtBlock(panictry!(p.parse_block())),
Some(s) => token::NtStmt(s),
None => {
p.fatal("expected a statement").emit();
- panic!(FatalError);
+ FatalError.raise();
}
},
"pat" => token::NtPat(panictry!(p.parse_pat())),
let token_str = pprust::token_to_string(&p.token);
p.fatal(&format!("expected ident, found {}",
&token_str[..])).emit();
- panic!(FatalError)
+ FatalError.raise()
}
},
"path" => token::NtPath(panictry!(p.parse_path_common(PathStyle::Type, false))),
Success(m) => m,
Failure(sp, tok) => {
let s = parse_failure_msg(tok);
- panic!(sess.span_diagnostic.span_fatal(sp.substitute_dummy(def.span), &s));
+ sess.span_diagnostic.span_fatal(sp.substitute_dummy(def.span), &s).raise();
}
Error(sp, s) => {
- panic!(sess.span_diagnostic.span_fatal(sp.substitute_dummy(def.span), &s));
+ sess.span_diagnostic.span_fatal(sp.substitute_dummy(def.span), &s).raise();
}
};
// Allows the `catch {...}` expression
(active, catch_expr, "1.17.0", Some(31436)),
- // Allows `repr(align(u16))` struct attribute (RFC 1358)
- (active, repr_align, "1.17.0", Some(33626)),
-
// Used to preserve symbols (see llvm.used)
(active, used, "1.18.0", Some(40289)),
// Allows the sysV64 ABI to be specified on all platforms
// instead of just the platforms on which it is the C ABI
(accepted, abi_sysv64, "1.24.0", Some(36167)),
+ // Allows `repr(align(16))` struct attribute (RFC 1358)
+ (accepted, repr_align, "1.24.0", Some(33626)),
);
// If you change this, please modify src/doc/unstable-book as well. You must
}
}
+ // allow attr_literals in #[repr(align(x))]
+ let mut is_repr_align = false;
+ if attr.path == "repr" {
+ if let Some(content) = attr.meta_item_list() {
+ is_repr_align = content.iter().any(|c| c.check_name("align"));
+ }
+ }
+
if self.context.features.proc_macro && attr::is_known(attr) {
return
}
- let meta = panictry!(attr.parse_meta(self.context.parse_sess));
- if contains_novel_literal(&meta) {
- gate_feature_post!(&self, attr_literals, attr.span,
- "non-string literals in attributes, or string \
- literals in top-level positions, are experimental");
+ if !is_repr_align {
+ let meta = panictry!(attr.parse_meta(self.context.parse_sess));
+ if contains_novel_literal(&meta) {
+ gate_feature_post!(&self, attr_literals, attr.span,
+ "non-string literals in attributes, or string \
+ literals in top-level positions, are experimental");
+ }
}
}
gate_feature_post!(&self, repr_simd, attr.span,
"SIMD types are experimental and possibly buggy");
}
- if item.check_name("align") {
- gate_feature_post!(&self, repr_align, attr.span,
- "the struct `#[repr(align(u16))]` attribute \
- is experimental");
- }
if item.check_name("transparent") {
gate_feature_post!(&self, repr_transparent, attr.span,
"the `#[repr(transparent)]` attribute \
.span_note(ca_span, "`#![feature(custom_attribute)]` declared here")
.emit();
- panic!(FatalError);
+ FatalError.raise();
}
if let (Some(span), None) = (self.copy_closures, self.clone_closures) {
.span_note(span, "`#![feature(copy_closures)]` declared here")
.emit();
- panic!(FatalError);
+ FatalError.raise();
}
}
}
noop_fold_macro_def(def, self)
}
+ fn fold_label(&mut self, label: Label) -> Label {
+ noop_fold_label(label, self)
+ }
+
fn fold_lifetime(&mut self, l: Lifetime) -> Lifetime {
noop_fold_lifetime(l, self)
}
params.move_map(|p| fld.fold_generic_param(p))
}
+pub fn noop_fold_label<T: Folder>(label: Label, fld: &mut T) -> Label {
+ Label {
+ ident: fld.fold_ident(label.ident),
+ span: fld.new_span(label.span),
+ }
+}
+
pub fn noop_fold_lifetime<T: Folder>(l: Lifetime, fld: &mut T) -> Lifetime {
Lifetime {
id: fld.new_id(l.id),
folder.fold_block(tr),
fl.map(|x| folder.fold_expr(x)))
}
- ExprKind::While(cond, body, opt_ident) => {
+ ExprKind::While(cond, body, opt_label) => {
ExprKind::While(folder.fold_expr(cond),
folder.fold_block(body),
- opt_ident.map(|label| respan(folder.new_span(label.span),
- folder.fold_ident(label.node))))
+ opt_label.map(|label| folder.fold_label(label)))
}
- ExprKind::WhileLet(pat, expr, body, opt_ident) => {
+ ExprKind::WhileLet(pat, expr, body, opt_label) => {
ExprKind::WhileLet(folder.fold_pat(pat),
folder.fold_expr(expr),
folder.fold_block(body),
- opt_ident.map(|label| respan(folder.new_span(label.span),
- folder.fold_ident(label.node))))
+ opt_label.map(|label| folder.fold_label(label)))
}
- ExprKind::ForLoop(pat, iter, body, opt_ident) => {
+ ExprKind::ForLoop(pat, iter, body, opt_label) => {
ExprKind::ForLoop(folder.fold_pat(pat),
folder.fold_expr(iter),
folder.fold_block(body),
- opt_ident.map(|label| respan(folder.new_span(label.span),
- folder.fold_ident(label.node))))
+ opt_label.map(|label| folder.fold_label(label)))
}
- ExprKind::Loop(body, opt_ident) => {
+ ExprKind::Loop(body, opt_label) => {
ExprKind::Loop(folder.fold_block(body),
- opt_ident.map(|label| respan(folder.new_span(label.span),
- folder.fold_ident(label.node))))
+ opt_label.map(|label| folder.fold_label(label)))
}
ExprKind::Match(expr, arms) => {
ExprKind::Match(folder.fold_expr(expr),
arms.move_map(|x| folder.fold_arm(x)))
}
- ExprKind::Closure(capture_clause, decl, body, span) => {
+ ExprKind::Closure(capture_clause, movability, decl, body, span) => {
ExprKind::Closure(capture_clause,
+ movability,
folder.fold_fn_decl(decl),
folder.fold_expr(body),
folder.new_span(span))
});
ExprKind::Path(qself, folder.fold_path(path))
}
- ExprKind::Break(opt_ident, opt_expr) => {
- ExprKind::Break(opt_ident.map(|label| respan(folder.new_span(label.span),
- folder.fold_ident(label.node))),
+ ExprKind::Break(opt_label, opt_expr) => {
+ ExprKind::Break(opt_label.map(|label| folder.fold_label(label)),
opt_expr.map(|e| folder.fold_expr(e)))
}
- ExprKind::Continue(opt_ident) => ExprKind::Continue(opt_ident.map(|label|
- respan(folder.new_span(label.span),
- folder.fold_ident(label.node)))
- ),
+ ExprKind::Continue(opt_label) => {
+ ExprKind::Continue(opt_label.map(|label| folder.fold_label(label)))
+ }
ExprKind::Ret(e) => ExprKind::Ret(e.map(|x| folder.fold_expr(x))),
ExprKind::InlineAsm(asm) => ExprKind::InlineAsm(asm.map(|asm| {
InlineAsm {
Ok(e) => e,
Err(mut e) => {
e.emit();
- panic!(FatalError);
+ FatalError.raise()
}
}
})
while level > 0 {
debug!("=== block comment level {}", level);
if rdr.is_eof() {
- panic!(rdr.fatal("unterminated block comment"));
+ rdr.fatal("unterminated block comment").raise();
}
if rdr.ch_is('\n') {
trim_whitespace_prefix_and_push_line(&mut lines, curr_line, col);
Ok(tok) => tok,
Err(_) => {
self.emit_fatal_errors();
- panic!(FatalError);
+ FatalError.raise();
}
}
}
let mut sr = StringReader::new_raw(sess, filemap);
if sr.advance_token().is_err() {
sr.emit_fatal_errors();
- panic!(FatalError);
+ FatalError.raise();
}
sr
}
if sr.advance_token().is_err() {
sr.emit_fatal_errors();
- panic!(FatalError);
+ FatalError.raise();
}
sr
}
"unterminated block comment"
};
let last_bpos = self.pos;
- panic!(self.fatal_span_(start_bpos, last_bpos, msg));
+ self.fatal_span_(start_bpos, last_bpos, msg).raise();
}
let n = self.ch.unwrap();
match n {
for _ in 0..n_digits {
if self.is_eof() {
let last_bpos = self.pos;
- panic!(self.fatal_span_(start_bpos,
- last_bpos,
- "unterminated numeric character escape"));
+ self.fatal_span_(start_bpos,
+ last_bpos,
+ "unterminated numeric character escape").raise();
}
if self.ch_is(delim) {
let last_bpos = self.pos;
}
},
None => {
- panic!(self.fatal_span_(start_bpos,
- self.pos,
- "unterminated unicode escape (found EOF)"));
+ self.fatal_span_(start_bpos,
+ self.pos,
+ "unterminated unicode escape (found EOF)").raise();
}
}
self.bump();
// lifetimes shouldn't end with a single quote
// if we find one, then this is an invalid character literal
if self.ch_is('\'') {
- panic!(self.fatal_span_verbose(
- start_with_quote, self.next_pos,
- String::from("character literal may only contain one codepoint")));
+ self.fatal_span_verbose(start_with_quote, self.next_pos,
+ String::from("character literal may only contain one codepoint"))
+ .raise();
}
break;
}
}
- panic!(self.fatal_span_verbose(
- start_with_quote, pos,
- String::from("character literal may only contain one codepoint")));
+ self.fatal_span_verbose(start_with_quote, pos,
+ String::from("character literal may only contain one codepoint")).raise();
}
let id = if valid {
while !self.ch_is('"') {
if self.is_eof() {
let last_bpos = self.pos;
- panic!(self.fatal_span_(start_bpos,
- last_bpos,
- "unterminated double quote string"));
+ self.fatal_span_(start_bpos,
+ last_bpos,
+ "unterminated double quote string").raise();
}
let ch_start = self.pos;
if self.is_eof() {
let last_bpos = self.pos;
- panic!(self.fatal_span_(start_bpos, last_bpos, "unterminated raw string"));
+ self.fatal_span_(start_bpos, last_bpos, "unterminated raw string").raise();
} else if !self.ch_is('"') {
let last_bpos = self.pos;
let curr_char = self.ch.unwrap();
- panic!(self.fatal_span_char(start_bpos,
- last_bpos,
- "found invalid character; only `#` is allowed \
- in raw string delimitation",
- curr_char));
+ self.fatal_span_char(start_bpos,
+ last_bpos,
+ "found invalid character; only `#` is allowed \
+ in raw string delimitation",
+ curr_char).raise();
}
self.bump();
let content_start_bpos = self.pos;
'outer: loop {
if self.is_eof() {
let last_bpos = self.pos;
- panic!(self.fatal_span_(start_bpos, last_bpos, "unterminated raw string"));
+ self.fatal_span_(start_bpos, last_bpos, "unterminated raw string").raise();
}
// if self.ch_is('"') {
// content_end_bpos = self.pos;
// character before position `start` are an
// ascii single quote and ascii 'b'.
let pos = self.pos;
- panic!(self.fatal_span_verbose(start - BytePos(2),
- pos,
- "unterminated byte constant".to_string()));
+ self.fatal_span_verbose(start - BytePos(2),
+ pos,
+ "unterminated byte constant".to_string()).raise();
}
let id = if valid {
while !self.ch_is('"') {
if self.is_eof() {
let pos = self.pos;
- panic!(self.fatal_span_(start, pos, "unterminated double quote byte string"));
+ self.fatal_span_(start, pos, "unterminated double quote byte string").raise();
}
let ch_start = self.pos;
if self.is_eof() {
let pos = self.pos;
- panic!(self.fatal_span_(start_bpos, pos, "unterminated raw string"));
+ self.fatal_span_(start_bpos, pos, "unterminated raw string").raise();
} else if !self.ch_is('"') {
let pos = self.pos;
let ch = self.ch.unwrap();
- panic!(self.fatal_span_char(start_bpos,
+ self.fatal_span_char(start_bpos,
pos,
"found invalid character; only `#` is allowed in raw \
string delimitation",
- ch));
+ ch).raise();
}
self.bump();
let content_start_bpos = self.pos;
match self.ch {
None => {
let pos = self.pos;
- panic!(self.fatal_span_(start_bpos, pos, "unterminated raw string"))
+ self.fatal_span_(start_bpos, pos, "unterminated raw string").raise()
}
Some('"') => {
content_end_bpos = self.pos;
Err(e) => {
let msg = format!("couldn't read {:?}: {}", path.display(), e);
match spanopt {
- Some(sp) => panic!(sess.span_diagnostic.span_fatal(sp, &msg)),
- None => panic!(sess.span_diagnostic.fatal(&msg))
+ Some(sp) => sess.span_diagnostic.span_fatal(sp, &msg).raise(),
+ None => sess.span_diagnostic.fatal(&msg).raise()
}
}
}
use ast::Unsafety;
use ast::{Mod, Arg, Arm, Attribute, BindingMode, TraitItemKind};
use ast::Block;
-use ast::{BlockCheckMode, CaptureBy};
+use ast::{BlockCheckMode, CaptureBy, Movability};
use ast::{Constness, Crate};
use ast::Defaultness;
use ast::EnumDef;
use ast::{ForeignItem, ForeignItemKind, FunctionRetTy};
use ast::GenericParam;
use ast::{Ident, ImplItem, IsAuto, Item, ItemKind};
-use ast::{Lifetime, LifetimeDef, Lit, LitKind, UintTy};
+use ast::{Label, Lifetime, LifetimeDef, Lit, LitKind, UintTy};
use ast::Local;
use ast::MacStmtStyle;
use ast::Mac_;
self.check_keyword(keywords::Extern)
}
- fn get_label(&mut self) -> ast::Ident {
- match self.token {
+ fn eat_label(&mut self) -> Option<Label> {
+ let ident = match self.token {
token::Lifetime(ref ident) => *ident,
token::Interpolated(ref nt) => match nt.0 {
token::NtLifetime(lifetime) => lifetime.ident,
- _ => self.bug("not a lifetime"),
+ _ => return None,
},
- _ => self.bug("not a lifetime"),
- }
+ _ => return None,
+ };
+ self.bump();
+ Some(Label { ident, span: self.prev_span })
}
/// parse a TyKind::BareFn type:
return self.parse_block_expr(lo, BlockCheckMode::Default, attrs);
}
token::BinOp(token::Or) | token::OrOr => {
- let lo = self.span;
- return self.parse_lambda_expr(lo, CaptureBy::Ref, attrs);
+ return self.parse_lambda_expr(attrs);
}
token::OpenDelim(token::Bracket) => {
self.bump();
hi = path.span;
return Ok(self.mk_expr(lo.to(hi), ExprKind::Path(Some(qself), path), attrs));
}
- if self.eat_keyword(keywords::Move) {
- let lo = self.prev_span;
- return self.parse_lambda_expr(lo, CaptureBy::Value, attrs);
+ if self.check_keyword(keywords::Move) || self.check_keyword(keywords::Static) {
+ return self.parse_lambda_expr(attrs);
}
if self.eat_keyword(keywords::If) {
return self.parse_if_expr(attrs);
let lo = self.prev_span;
return self.parse_while_expr(None, lo, attrs);
}
- if self.token.is_lifetime() {
- let label = Spanned { node: self.get_label(),
- span: self.span };
- let lo = self.span;
- self.bump();
+ if let Some(label) = self.eat_label() {
+ let lo = label.span;
self.expect(&token::Colon)?;
if self.eat_keyword(keywords::While) {
return self.parse_while_expr(Some(label), lo, attrs)
return self.parse_loop_expr(None, lo, attrs);
}
if self.eat_keyword(keywords::Continue) {
- let ex = if self.token.is_lifetime() {
- let ex = ExprKind::Continue(Some(Spanned{
- node: self.get_label(),
- span: self.span
- }));
- self.bump();
- ex
- } else {
- ExprKind::Continue(None)
- };
+ let label = self.eat_label();
+ let ex = ExprKind::Continue(label);
let hi = self.prev_span;
return Ok(self.mk_expr(lo.to(hi), ex, attrs));
}
ex = ExprKind::Ret(None);
}
} else if self.eat_keyword(keywords::Break) {
- let lt = if self.token.is_lifetime() {
- let spanned_lt = Spanned {
- node: self.get_label(),
- span: self.span
- };
- self.bump();
- Some(spanned_lt)
- } else {
- None
- };
+ let label = self.eat_label();
let e = if self.token.can_begin_expr()
&& !(self.token == token::OpenDelim(token::Brace)
&& self.restrictions.contains(
} else {
None
};
- ex = ExprKind::Break(lt, e);
+ ex = ExprKind::Break(label, e);
hi = self.prev_span;
} else if self.eat_keyword(keywords::Yield) {
if self.token.can_begin_expr() {
// `move |args| expr`
pub fn parse_lambda_expr(&mut self,
- lo: Span,
- capture_clause: CaptureBy,
attrs: ThinVec<Attribute>)
-> PResult<'a, P<Expr>>
{
+ let lo = self.span;
+ let movability = if self.eat_keyword(keywords::Static) {
+ Movability::Static
+ } else {
+ Movability::Movable
+ };
+ let capture_clause = if self.eat_keyword(keywords::Move) {
+ CaptureBy::Value
+ } else {
+ CaptureBy::Ref
+ };
let decl = self.parse_fn_block_decl()?;
let decl_hi = self.prev_span;
let body = match decl.output {
Ok(self.mk_expr(
lo.to(body.span),
- ExprKind::Closure(capture_clause, decl, body, lo.to(decl_hi)),
+ ExprKind::Closure(capture_clause, movability, decl, body, lo.to(decl_hi)),
attrs))
}
}
/// Parse a 'for' .. 'in' expression ('for' token already eaten)
- pub fn parse_for_expr(&mut self, opt_ident: Option<ast::SpannedIdent>,
+ pub fn parse_for_expr(&mut self, opt_label: Option<Label>,
span_lo: Span,
mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
// Parse: `for <src_pat> in <src_expr> <src_loop_block>`
attrs.extend(iattrs);
let hi = self.prev_span;
- Ok(self.mk_expr(span_lo.to(hi), ExprKind::ForLoop(pat, expr, loop_block, opt_ident), attrs))
+ Ok(self.mk_expr(span_lo.to(hi), ExprKind::ForLoop(pat, expr, loop_block, opt_label), attrs))
}
/// Parse a 'while' or 'while let' expression ('while' token already eaten)
- pub fn parse_while_expr(&mut self, opt_ident: Option<ast::SpannedIdent>,
+ pub fn parse_while_expr(&mut self, opt_label: Option<Label>,
span_lo: Span,
mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
if self.token.is_keyword(keywords::Let) {
- return self.parse_while_let_expr(opt_ident, span_lo, attrs);
+ return self.parse_while_let_expr(opt_label, span_lo, attrs);
}
let cond = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?;
let (iattrs, body) = self.parse_inner_attrs_and_block()?;
attrs.extend(iattrs);
let span = span_lo.to(body.span);
- return Ok(self.mk_expr(span, ExprKind::While(cond, body, opt_ident), attrs));
+ return Ok(self.mk_expr(span, ExprKind::While(cond, body, opt_label), attrs));
}
/// Parse a 'while let' expression ('while' token already eaten)
- pub fn parse_while_let_expr(&mut self, opt_ident: Option<ast::SpannedIdent>,
+ pub fn parse_while_let_expr(&mut self, opt_label: Option<Label>,
span_lo: Span,
mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
self.expect_keyword(keywords::Let)?;
let (iattrs, body) = self.parse_inner_attrs_and_block()?;
attrs.extend(iattrs);
let span = span_lo.to(body.span);
- return Ok(self.mk_expr(span, ExprKind::WhileLet(pat, expr, body, opt_ident), attrs));
+ return Ok(self.mk_expr(span, ExprKind::WhileLet(pat, expr, body, opt_label), attrs));
}
// parse `loop {...}`, `loop` token already eaten
- pub fn parse_loop_expr(&mut self, opt_ident: Option<ast::SpannedIdent>,
+ pub fn parse_loop_expr(&mut self, opt_label: Option<Label>,
span_lo: Span,
mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
let (iattrs, body) = self.parse_inner_attrs_and_block()?;
attrs.extend(iattrs);
let span = span_lo.to(body.span);
- Ok(self.mk_expr(span, ExprKind::Loop(body, opt_ident), attrs))
+ Ok(self.mk_expr(span, ExprKind::Loop(body, opt_label), attrs))
}
/// Parse a `do catch {...}` expression (`do catch` token already eaten)
}
}
+ fn is_static_global(&mut self) -> bool {
+ if self.check_keyword(keywords::Static) {
+ // Check if this could be a closure
+ !self.look_ahead(1, |token| {
+ if token.is_keyword(keywords::Move) {
+ return true;
+ }
+ match *token {
+ token::BinOp(token::Or) | token::OrOr => true,
+ _ => false,
+ }
+ })
+ } else {
+ false
+ }
+ }
+
/// Parse one of the items allowed by the flags.
/// NB: this function no longer parses the items inside an
/// extern crate.
self.unexpected()?;
}
- if self.eat_keyword(keywords::Static) {
+ if self.is_static_global() {
+ self.bump();
// STATIC ITEM
let m = if self.eat_keyword(keywords::Mut) {
Mutability::Mutable
ast::ExprKind::IfLet(ref pat, ref expr, ref blk, ref elseopt) => {
self.print_if_let(pat, expr, blk, elseopt.as_ref().map(|e| &**e))?;
}
- ast::ExprKind::While(ref test, ref blk, opt_ident) => {
- if let Some(ident) = opt_ident {
- self.print_ident(ident.node)?;
+ ast::ExprKind::While(ref test, ref blk, opt_label) => {
+ if let Some(label) = opt_label {
+ self.print_ident(label.ident)?;
self.word_space(":")?;
}
self.head("while")?;
self.s.space()?;
self.print_block_with_attrs(blk, attrs)?;
}
- ast::ExprKind::WhileLet(ref pat, ref expr, ref blk, opt_ident) => {
- if let Some(ident) = opt_ident {
- self.print_ident(ident.node)?;
+ ast::ExprKind::WhileLet(ref pat, ref expr, ref blk, opt_label) => {
+ if let Some(label) = opt_label {
+ self.print_ident(label.ident)?;
self.word_space(":")?;
}
self.head("while let")?;
self.s.space()?;
self.print_block_with_attrs(blk, attrs)?;
}
- ast::ExprKind::ForLoop(ref pat, ref iter, ref blk, opt_ident) => {
- if let Some(ident) = opt_ident {
- self.print_ident(ident.node)?;
+ ast::ExprKind::ForLoop(ref pat, ref iter, ref blk, opt_label) => {
+ if let Some(label) = opt_label {
+ self.print_ident(label.ident)?;
self.word_space(":")?;
}
self.head("for")?;
self.s.space()?;
self.print_block_with_attrs(blk, attrs)?;
}
- ast::ExprKind::Loop(ref blk, opt_ident) => {
- if let Some(ident) = opt_ident {
- self.print_ident(ident.node)?;
+ ast::ExprKind::Loop(ref blk, opt_label) => {
+ if let Some(label) = opt_label {
+ self.print_ident(label.ident)?;
self.word_space(":")?;
}
self.head("loop")?;
}
self.bclose_(expr.span, INDENT_UNIT)?;
}
- ast::ExprKind::Closure(capture_clause, ref decl, ref body, _) => {
+ ast::ExprKind::Closure(capture_clause, movability, ref decl, ref body, _) => {
+ self.print_movability(movability)?;
self.print_capture_clause(capture_clause)?;
self.print_fn_block_args(decl)?;
ast::ExprKind::Path(Some(ref qself), ref path) => {
self.print_qpath(path, qself, true)?
}
- ast::ExprKind::Break(opt_ident, ref opt_expr) => {
+ ast::ExprKind::Break(opt_label, ref opt_expr) => {
self.s.word("break")?;
self.s.space()?;
- if let Some(ident) = opt_ident {
- self.print_ident(ident.node)?;
+ if let Some(label) = opt_label {
+ self.print_ident(label.ident)?;
self.s.space()?;
}
if let Some(ref expr) = *opt_expr {
self.s.space()?;
}
}
- ast::ExprKind::Continue(opt_ident) => {
+ ast::ExprKind::Continue(opt_label) => {
self.s.word("continue")?;
self.s.space()?;
- if let Some(ident) = opt_ident {
- self.print_ident(ident.node)?;
+ if let Some(label) = opt_label {
+ self.print_ident(label.ident)?;
self.s.space()?
}
}
}
}
+ pub fn print_movability(&mut self, movability: ast::Movability)
+ -> io::Result<()> {
+ match movability {
+ ast::Movability::Static => self.word_space("static"),
+ ast::Movability::Movable => Ok(()),
+ }
+ }
+
pub fn print_capture_clause(&mut self, capture_clause: ast::CaptureBy)
-> io::Result<()> {
match capture_clause {
match i.node {
ast::ItemKind::Fn(_, ast::Unsafety::Unsafe, _, _, _, _) => {
let diag = self.cx.span_diagnostic;
- panic!(diag.span_fatal(i.span, "unsafe functions cannot be used for tests"));
+ diag.span_fatal(i.span, "unsafe functions cannot be used for tests").raise();
}
_ => {
debug!("this is a test function");
fn visit_variant(&mut self, v: &'ast Variant, g: &'ast Generics, item_id: NodeId) {
walk_variant(self, v, g, item_id)
}
+ fn visit_label(&mut self, label: &'ast Label) {
+ walk_label(self, label)
+ }
fn visit_lifetime(&mut self, lifetime: &'ast Lifetime) {
walk_lifetime(self, lifetime)
}
}
}
-pub fn walk_opt_name<'a, V: Visitor<'a>>(visitor: &mut V, span: Span, opt_name: Option<Name>) {
- if let Some(name) = opt_name {
- visitor.visit_name(span, name);
- }
-}
-
-pub fn walk_opt_ident<'a, V: Visitor<'a>>(visitor: &mut V, span: Span, opt_ident: Option<Ident>) {
- if let Some(ident) = opt_ident {
- visitor.visit_ident(span, ident);
- }
-}
-
-pub fn walk_opt_sp_ident<'a, V: Visitor<'a>>(visitor: &mut V,
- opt_sp_ident: &Option<Spanned<Ident>>) {
- if let Some(ref sp_ident) = *opt_sp_ident {
- visitor.visit_ident(sp_ident.span, sp_ident.node);
- }
-}
-
pub fn walk_ident<'a, V: Visitor<'a>>(visitor: &mut V, span: Span, ident: Ident) {
visitor.visit_name(span, ident.name);
}
walk_list!(visitor, visit_expr, &local.init);
}
+pub fn walk_label<'a, V: Visitor<'a>>(visitor: &mut V, label: &'a Label) {
+ visitor.visit_ident(label.span, label.ident);
+}
+
pub fn walk_lifetime<'a, V: Visitor<'a>>(visitor: &mut V, lifetime: &'a Lifetime) {
visitor.visit_ident(lifetime.span, lifetime.ident);
}
visitor.visit_ident(item.span, item.ident);
match item.node {
ItemKind::ExternCrate(opt_name) => {
- walk_opt_name(visitor, item.span, opt_name)
+ if let Some(name) = opt_name {
+ visitor.visit_name(item.span, name);
+ }
}
ItemKind::Use(ref use_tree) => {
visitor.visit_use_tree(use_tree, item.id, false)
pub fn walk_struct_field<'a, V: Visitor<'a>>(visitor: &mut V, struct_field: &'a StructField) {
visitor.visit_vis(&struct_field.vis);
- walk_opt_ident(visitor, struct_field.span, struct_field.ident);
+ if let Some(ident) = struct_field.ident {
+ visitor.visit_ident(struct_field.span, ident);
+ }
visitor.visit_ty(&struct_field.ty);
walk_list!(visitor, visit_attribute, &struct_field.attrs);
}
visitor.visit_block(if_block);
walk_list!(visitor, visit_expr, optional_else);
}
- ExprKind::While(ref subexpression, ref block, ref opt_sp_ident) => {
+ ExprKind::While(ref subexpression, ref block, ref opt_label) => {
+ walk_list!(visitor, visit_label, opt_label);
visitor.visit_expr(subexpression);
visitor.visit_block(block);
- walk_opt_sp_ident(visitor, opt_sp_ident);
}
ExprKind::IfLet(ref pattern, ref subexpression, ref if_block, ref optional_else) => {
visitor.visit_pat(pattern);
visitor.visit_block(if_block);
walk_list!(visitor, visit_expr, optional_else);
}
- ExprKind::WhileLet(ref pattern, ref subexpression, ref block, ref opt_sp_ident) => {
+ ExprKind::WhileLet(ref pattern, ref subexpression, ref block, ref opt_label) => {
+ walk_list!(visitor, visit_label, opt_label);
visitor.visit_pat(pattern);
visitor.visit_expr(subexpression);
visitor.visit_block(block);
- walk_opt_sp_ident(visitor, opt_sp_ident);
}
- ExprKind::ForLoop(ref pattern, ref subexpression, ref block, ref opt_sp_ident) => {
+ ExprKind::ForLoop(ref pattern, ref subexpression, ref block, ref opt_label) => {
+ walk_list!(visitor, visit_label, opt_label);
visitor.visit_pat(pattern);
visitor.visit_expr(subexpression);
visitor.visit_block(block);
- walk_opt_sp_ident(visitor, opt_sp_ident);
}
- ExprKind::Loop(ref block, ref opt_sp_ident) => {
+ ExprKind::Loop(ref block, ref opt_label) => {
+ walk_list!(visitor, visit_label, opt_label);
visitor.visit_block(block);
- walk_opt_sp_ident(visitor, opt_sp_ident);
}
ExprKind::Match(ref subexpression, ref arms) => {
visitor.visit_expr(subexpression);
walk_list!(visitor, visit_arm, arms);
}
- ExprKind::Closure(_, ref function_declaration, ref body, _decl_span) => {
+ ExprKind::Closure(_, _, ref function_declaration, ref body, _decl_span) => {
visitor.visit_fn(FnKind::Closure(body),
function_declaration,
expression.span,
}
visitor.visit_path(path, expression.id)
}
- ExprKind::Break(ref opt_sp_ident, ref opt_expr) => {
- walk_opt_sp_ident(visitor, opt_sp_ident);
+ ExprKind::Break(ref opt_label, ref opt_expr) => {
+ walk_list!(visitor, visit_label, opt_label);
walk_list!(visitor, visit_expr, opt_expr);
}
- ExprKind::Continue(ref opt_sp_ident) => {
- walk_opt_sp_ident(visitor, opt_sp_ident);
+ ExprKind::Continue(ref opt_label) => {
+ walk_list!(visitor, visit_label, opt_label);
}
ExprKind::Ret(ref optional_expression) => {
walk_list!(visitor, visit_expr, optional_expression);
}
err.emit();
- panic!(FatalError);
+ FatalError.raise();
}
};
// fail if there have been errors emitted
Ok(_) if ecx.parse_sess.span_diagnostic.err_count() > error_count_before => {
ecx.struct_span_fatal(span, msg).emit();
- panic!(FatalError);
+ FatalError.raise();
}
Ok(new_items) => new_items.into_iter().map(Annotatable::Item).collect(),
Err(_) => {
// FIXME: handle this better
ecx.struct_span_fatal(span, msg).emit();
- panic!(FatalError);
+ FatalError.raise();
}
}
})
}
err.emit();
- panic!(FatalError);
+ FatalError.raise();
}
}
}
}
err.emit();
- panic!(FatalError);
+ FatalError.raise();
}
}
}
enum class LLVMRustArchiveKind {
Other,
GNU,
- MIPS64,
BSD,
COFF,
};
switch (Kind) {
case LLVMRustArchiveKind::GNU:
return Archive::K_GNU;
- case LLVMRustArchiveKind::MIPS64:
- return Archive::K_MIPS64;
case LLVMRustArchiveKind::BSD:
return Archive::K_BSD;
case LLVMRustArchiveKind::COFF:
Members.push_back(std::move(*MOrErr));
}
}
- auto Pair = writeArchive(Dst, Members, WriteSymbtab, Kind, true, false);
- if (!Pair.second)
+ auto Result = writeArchive(Dst, Members, WriteSymbtab, Kind, true, false);
+#if LLVM_VERSION_GE(6, 0)
+ if (!Result)
return LLVMRustResult::Success;
- LLVMRustSetLastError(Pair.second.message().c_str());
+ LLVMRustSetLastError(toString(std::move(Result)).c_str());
+#else
+ if (!Result.second)
+ return LLVMRustResult::Success;
+ LLVMRustSetLastError(Result.second.message().c_str());
+#endif
+
return LLVMRustResult::Failure;
}
#include "llvm/Support/FileSystem.h"
#include "llvm/Support/Host.h"
#include "llvm/Target/TargetMachine.h"
-#include "llvm/Target/TargetSubtargetInfo.h"
#include "llvm/Transforms/IPO/PassManagerBuilder.h"
+#if LLVM_VERSION_GE(6, 0)
+#include "llvm/CodeGen/TargetSubtargetInfo.h"
+#include "llvm/IR/IntrinsicInst.h"
+#else
+#include "llvm/Target/TargetSubtargetInfo.h"
+#endif
+
#if LLVM_VERSION_GE(4, 0)
#include "llvm/Transforms/IPO/AlwaysInliner.h"
#include "llvm/Transforms/IPO/FunctionImport.h"
enum class LLVMRustCodeModel {
Other,
- Default,
- JITDefault,
Small,
Kernel,
Medium,
Large,
+ None,
};
static CodeModel::Model fromRust(LLVMRustCodeModel Model) {
switch (Model) {
- case LLVMRustCodeModel::Default:
- return CodeModel::Default;
- case LLVMRustCodeModel::JITDefault:
- return CodeModel::JITDefault;
case LLVMRustCodeModel::Small:
return CodeModel::Small;
case LLVMRustCodeModel::Kernel:
bool TrapUnreachable,
bool Singlethread) {
- auto CM = fromRust(RustCM);
auto OptLevel = fromRust(RustOptLevel);
auto RM = fromRust(RustReloc);
Options.ThreadModel = ThreadModel::Single;
}
+#if LLVM_VERSION_GE(6, 0)
+ Optional<CodeModel::Model> CM;
+#else
+ CodeModel::Model CM = CodeModel::Model::Default;
+#endif
+ if (RustCM != LLVMRustCodeModel::None)
+ CM = fromRust(RustCM);
TargetMachine *TM = TheTarget->createTargetMachine(
Trip.getTriple(), RealCPU, Feature, Options, RM, CM, OptLevel);
return wrap(TM);
// enable fpmath flag UnsafeAlgebra
extern "C" void LLVMRustSetHasUnsafeAlgebra(LLVMValueRef V) {
if (auto I = dyn_cast<Instruction>(unwrap<Value>(V))) {
+#if LLVM_VERSION_GE(6, 0)
+ I->setFast(true);
+#else
I->setHasUnsafeAlgebra(true);
+#endif
}
}
FlagStaticMember = (1 << 12),
FlagLValueReference = (1 << 13),
FlagRValueReference = (1 << 14),
- FlagMainSubprogram = (1 << 21),
+ FlagExternalTypeRef = (1 << 15),
+ FlagIntroducedVirtual = (1 << 18),
+ FlagBitField = (1 << 19),
+ FlagNoReturn = (1 << 20),
+ FlagMainSubprogram = (1 << 21),
// Do not add values that are not supported by the minimum LLVM
- // version we support!
+ // version we support! see llvm/include/llvm/IR/DebugInfoFlags.def
};
inline LLVMRustDIFlags operator&(LLVMRustDIFlags A, LLVMRustDIFlags B) {
if (isSet(Flags & LLVMRustDIFlags::FlagRValueReference)) {
Result |= DINode::DIFlags::FlagRValueReference;
}
+ if (isSet(Flags & LLVMRustDIFlags::FlagExternalTypeRef)) {
+ Result |= DINode::DIFlags::FlagExternalTypeRef;
+ }
+ if (isSet(Flags & LLVMRustDIFlags::FlagIntroducedVirtual)) {
+ Result |= DINode::DIFlags::FlagIntroducedVirtual;
+ }
+ if (isSet(Flags & LLVMRustDIFlags::FlagBitField)) {
+ Result |= DINode::DIFlags::FlagBitField;
+ }
#if LLVM_RUSTLLVM || LLVM_VERSION_GE(4, 0)
+ if (isSet(Flags & LLVMRustDIFlags::FlagNoReturn)) {
+ Result |= DINode::DIFlags::FlagNoReturn;
+ }
if (isSet(Flags & LLVMRustDIFlags::FlagMainSubprogram)) {
Result |= DINode::DIFlags::FlagMainSubprogram;
}
#![crate_type = "lib"]
-#![feature(attr_literals)]
-#![feature(repr_align)]
-
#[repr(align(64))]
pub struct Align64(i32);
// CHECK: %Align64 = type { [0 x i32], i32, [15 x i32] }
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// ignore-tidy-linelength
+// min-llvm-version 4.0
+
+// compile-flags: -g -C no-prepopulate-passes
+
+// CHECK: {{.*}}DISubprogram{{.*}}name: "foo"{{.*}}DIFlagNoReturn
+
+fn foo() -> ! {
+ loop {}
+}
+
+pub fn main() {
+ foo();
+}
#![feature(lang_items)]
#[lang = "cookie"]
-fn cookie() -> ! { //~ E0522
+fn cookie() -> ! {
+//~^^ ERROR definition of an unknown language item: `cookie` [E0522]
loop {}
}
fn main() {
unsafe {
printf(::std::ptr::null(), 0f32);
- //~^ ERROR can't pass `f32` to variadic function, cast to `c_double` [E0617]
+ //~^ ERROR can't pass `f32` to variadic function
+ //~| HELP cast the value to `c_double`
printf(::std::ptr::null(), 0i8);
- //~^ ERROR can't pass `i8` to variadic function, cast to `c_int` [E0617]
+ //~^ ERROR can't pass `i8` to variadic function
+ //~| HELP cast the value to `c_int`
printf(::std::ptr::null(), 0i16);
- //~^ ERROR can't pass `i16` to variadic function, cast to `c_int` [E0617]
+ //~^ ERROR can't pass `i16` to variadic function
+ //~| HELP cast the value to `c_int`
printf(::std::ptr::null(), 0u8);
- //~^ ERROR can't pass `u8` to variadic function, cast to `c_uint` [E0617]
+ //~^ ERROR can't pass `u8` to variadic function
+ //~| HELP cast the value to `c_uint`
printf(::std::ptr::null(), 0u16);
- //~^ ERROR can't pass `u16` to variadic function, cast to `c_uint` [E0617]
+ //~^ ERROR can't pass `u16` to variadic function
+ //~| HELP cast the value to `c_uint`
printf(::std::ptr::null(), printf);
- //~^ ERROR can't pass `unsafe extern "C" fn(*const i8, ...) {printf}` to variadic function, cast to `unsafe extern "C" fn(*const i8, ...)` [E0617]
+ //~^ ERROR can't pass `unsafe extern "C" fn(*const i8, ...) {printf}` to variadic function
+ //~| HELP cast the value to `unsafe extern "C" fn(*const i8, ...)`
}
}
// except according to those terms.
#![allow(dead_code)]
-#![feature(attr_literals)]
-#![feature(repr_align)]
#[repr(C)]
enum A { A }
f: T
}
-#[rustc_if_this_changed]
+#[rustc_if_this_changed(Krate)]
type TypeAlias<T> = Foo<T>;
#[rustc_then_this_would_need(ItemVariances)] //~ ERROR OK
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(never_type)]
+
+enum Helper<T, U> {
+ T(T, [!; 0]),
+ #[allow(dead_code)]
+ U(U),
+}
+
+fn transmute<T, U>(t: T) -> U {
+ let Helper::U(u) = Helper::T(t, []);
+ //~^ ERROR refutable pattern in local binding: `T(_, _)` not covered
+ u
+}
+
+fn main() {
+ println!("{:?}", transmute::<&str, (*const u8, u64)>("type safety"));
+}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Test that attempts to construct infinite types via impl trait fail
+// in a graceful way.
+//
+// Regression test for #38064.
+
+// error-pattern:overflow evaluating the requirement `impl Quux`
+
+#![feature(conservative_impl_trait)]
+
+trait Quux {}
+
+fn foo() -> impl Quux {
+ struct Foo<T>(T);
+ impl<T> Quux for Foo<T> {}
+ Foo(bar())
+}
+
+fn bar() -> impl Quux {
+ struct Bar<T>(T);
+ impl<T> Quux for Bar<T> {}
+ Bar(foo())
+}
+
+// effectively:
+// struct Foo(Bar);
+// struct Bar(Foo);
+// should produce an error about infinite size
+
+fn main() { foo(); }
fn main() {
unsafe {
foo(0, bar);
- //~^ ERROR can't pass `fn(*const u8) {bar}` to variadic function, cast to `fn(*const u8)`
+ //~^ ERROR can't pass `fn(*const u8) {bar}` to variadic function
+ //~| HELP cast the value to `fn(*const u8)`
}
}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// no-prefer-dynamic
+
+#![feature(panic_runtime)]
+#![crate_type = "rlib"]
+#![panic_runtime]
+#![no_std]
+
+extern crate needs_panic_runtime;
+++ /dev/null
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// no-prefer-dynamic
-
-#![feature(panic_runtime)]
-#![crate_type = "rlib"]
-#![panic_runtime]
-#![no_std]
-
-extern crate needs_panic_runtime;
// except according to those terms.
// aux-build:needs-panic-runtime.rs
-// aux-build:runtime-depending-on-panic-runtime.rs
+// aux-build:depends.rs
// error-pattern:cannot depend on a crate that needs a panic runtime
-extern crate runtime_depending_on_panic_runtime;
+extern crate depends;
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(dead_code)]
-#![feature(attr_literals)]
-#![feature(repr_align)]
#[repr(align(16.0))] //~ ERROR: invalid `repr(align)` attribute: not an unsuffixed integer
struct A(i32);
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-#![feature(attr_literals)]
-#![feature(repr_align)]
#![feature(untagged_unions)]
#![allow(dead_code)]
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn main() {
+ static || {};
+ //~^ ERROR closures cannot be static
+}
$(RUSTC) -C extra-filename=foo dummy.rs 2>&1
#Option taking no argument
$(RUSTC) -C lto= dummy.rs 2>&1 | \
- $(CGREP) 'codegen option `lto` takes no value'
+ $(CGREP) 'codegen option `lto` - one of `thin`, `fat`, or'
$(RUSTC) -C lto=1 dummy.rs 2>&1 | \
- $(CGREP) 'codegen option `lto` takes no value'
+ $(CGREP) 'codegen option `lto` - one of `thin`, `fat`, or'
$(RUSTC) -C lto=foo dummy.rs 2>&1 | \
- $(CGREP) 'codegen option `lto` takes no value'
+ $(CGREP) 'codegen option `lto` - one of `thin`, `fat`, or'
$(RUSTC) -C lto dummy.rs
# Should not link dead code...
--- /dev/null
+-include ../tools.mk
+
+all:
+ifeq ($(TARGET),x86_64-unknown-linux-gnu)
+ $(RUSTC) hello.rs -C no_integrated_as
+ $(call RUN,hello)
+endif
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn main() {
+ println!("Hello, world!");
+}
variadic: false,
});
iter_exprs(depth - 1, &mut |e| g(
- ExprKind::Closure(CaptureBy::Value, decl.clone(), e, DUMMY_SP)));
+ ExprKind::Closure(CaptureBy::Value,
+ Movability::Movable,
+ decl.clone(),
+ e,
+ DUMMY_SP)));
},
10 => {
iter_exprs(depth - 1, &mut |e| g(ExprKind::Assign(e, make_x())));
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-#![feature(attr_literals)]
-#![feature(repr_align)]
#![feature(box_syntax)]
use std::mem;
#![feature(attr_literals)]
#[repr(align(16))]
-pub struct A {
- y: i64,
-}
+pub struct A(i64);
pub extern "C" fn foo(x: A) {}
-fn main() {}
+fn main() {
+ foo(A(0));
+}
+++ /dev/null
-// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-
-pub trait Foo {
- fn f(&self) -> isize;
-}
-
-pub struct A {
- pub x: isize
-}
-
-impl Foo for A {
- fn f(&self) -> isize { 10 }
-}
--- /dev/null
+// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+
+pub trait Foo {
+ fn f(&self) -> isize;
+}
+
+pub struct A {
+ pub x: isize
+}
+
+impl Foo for A {
+ fn f(&self) -> isize { 10 }
+}
+++ /dev/null
-// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// compile-flags: -Z borrowck=mir -Z nll
-
-// This example comes from the NLL RFC.
-
-struct List<T> {
- value: T,
- next: Option<Box<List<T>>>,
-}
-
-fn to_refs<T>(list: &mut List<T>) -> Vec<&mut T> {
- let mut list = list;
- let mut result = vec![];
- loop {
- result.push(&mut list.value);
- if let Some(n) = list.next.as_mut() {
- list = n;
- } else {
- return result;
- }
- }
-}
-
-fn main() {
-}
#[cfg(target_arch = "mips64")]
pub fn main() { }
+#[cfg(target_arch = "powerpc")]
+pub fn main() { }
+
#[cfg(target_arch = "powerpc64")]
pub fn main() { }
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// compile-flags: -Clto=fat
+// no-prefer-dynamic
+
+fn main() {
+ println!("hello!");
+}
+
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(conservative_impl_trait, generators, generator_trait)]
+
+use std::ops::{ Generator, GeneratorState };
+
+fn foo(_: &str) -> String {
+ String::new()
+}
+
+fn bar(baz: String) -> impl Generator<Yield = String, Return = ()> {
+ move || {
+ yield foo(&baz);
+ }
+}
+
+fn foo2(_: &str) -> Result<String, ()> {
+ Err(())
+}
+
+fn bar2(baz: String) -> impl Generator<Yield = String, Return = ()> {
+ move || {
+ if let Ok(quux) = foo2(&baz) {
+ yield quux;
+ }
+ }
+}
+
+fn main() {
+ assert_eq!(bar(String::new()).resume(), GeneratorState::Yielded(String::new()));
+ assert_eq!(bar2(String::new()).resume(), GeneratorState::Complete(()));
+}
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(generators, generator_trait)]
+
+use std::ops::Generator;
+
+fn main() {
+ let b = |_| 3;
+ let mut a = || {
+ b(yield);
+ };
+ a.resume();
+}
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(generators)]
+#![feature(generator_trait)]
+
+use std::ops::Generator;
+use std::ops::GeneratorState;
+
+fn main() {
+ let _generator = || {
+ let mut sub_generator = || {
+ yield 2;
+ };
+
+ match sub_generator.resume() {
+ GeneratorState::Yielded(x) => {
+ yield x;
+ }
+ _ => panic!(),
+ };
+ };
+}
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(generators)]
+
+fn _run(bar: &mut i32) {
+ || {
+ {
+ let _baz = &*bar;
+ yield;
+ }
+
+ *bar = 2;
+ };
+}
+
+fn main() {}
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(generators, generator_trait)]
+
+use std::ops::{Generator, GeneratorState};
+
+fn main() {
+ let mut generator = unsafe {
+ static || {
+ let a = true;
+ let b = &a;
+ yield;
+ assert_eq!(b as *const _, &a as *const _);
+ }
+ };
+ assert_eq!(generator.resume(), GeneratorState::Yielded(()));
+ assert_eq!(generator.resume(), GeneratorState::Complete(()));
+}
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// #47096
+
+#![feature(slice_patterns)]
+
+fn foo(s: &[i32]) -> &[i32] {
+ let &[ref xs..] = s;
+ xs
+}
+
+fn main() {
+ let x = [1, 2, 3];
+ let y = foo(&x);
+ assert_eq!(x, y);
+}
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// ignore-emscripten
+
+#![feature(i128_type)]
+
+#[repr(C)]
+pub struct Foo(i128);
+
+#[no_mangle]
+pub extern "C" fn foo(x: Foo) -> Foo { x }
+
+fn main() {
+ foo(Foo(1));
+}
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(use_nested_groups)]
+#![allow(unused_import)]
+
+use {{}, {}};
+
+fn main() {}
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(nll)]
+
+struct List<T> {
+ value: T,
+ next: Option<Box<List<T>>>,
+}
+
+fn to_refs<T>(mut list: &mut List<T>) -> Vec<&mut T> {
+ let mut result = vec![];
+ loop {
+ result.push(&mut list.value);
+ if let Some(n) = list.next.as_mut() {
+ list = n;
+ } else {
+ return result;
+ }
+ }
+}
+
+fn main() {
+ let mut list = List { value: 1, next: None };
+ let vec = to_refs(&mut list);
+ assert_eq!(vec![&mut 1], vec);
+}
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// compile-flags: -Clto=thin
+// no-prefer-dynamic
+// min-llvm-version 4.0
+
+fn main() {
+ println!("hello!");
+}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: -Z thinlto -C codegen-units=8 -O -C lto
+// compile-flags: -C codegen-units=8 -O -C lto=thin
// aux-build:thin-lto-inlines-aux.rs
// min-llvm-version 4.0
// no-prefer-dynamic
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// aux-build:trait_inheritance_cross_trait_call_xc_aux.rs
+// aux-build:trait_xc_call_aux.rs
-extern crate trait_inheritance_cross_trait_call_xc_aux as aux;
+extern crate trait_xc_call_aux as aux;
use aux::Foo;
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-#![feature(attr_literals)]
-#![feature(repr_align)]
#![feature(untagged_unions)]
use std::mem::{size_of, size_of_val, align_of, align_of_val};
// @has intra_links/index.html
// @has - '//a/@href' '../intra_links/struct.ThisType.html'
+// @has - '//a/@href' '../intra_links/struct.ThisType.html#method.this_method'
// @has - '//a/@href' '../intra_links/enum.ThisEnum.html'
+// @has - '//a/@href' '../intra_links/enum.ThisEnum.html#ThisVariant.v'
+// @has - '//a/@href' '../intra_links/trait.ThisTrait.html'
+// @has - '//a/@href' '../intra_links/trait.ThisTrait.html#tymethod.this_associated_method'
+// @has - '//a/@href' '../intra_links/trait.ThisTrait.html#associatedtype.ThisAssociatedType'
+// @has - '//a/@href' '../intra_links/trait.ThisTrait.html#associatedconstant.THIS_ASSOCIATED_CONST'
// @has - '//a/@href' '../intra_links/trait.ThisTrait.html'
// @has - '//a/@href' '../intra_links/type.ThisAlias.html'
// @has - '//a/@href' '../intra_links/union.ThisUnion.html'
//! In this crate we would like to link to:
//!
//! * [`ThisType`](ThisType)
+//! * [`ThisType::this_method`](ThisType::this_method)
//! * [`ThisEnum`](ThisEnum)
+//! * [`ThisEnum::ThisVariant`](ThisEnum::ThisVariant)
//! * [`ThisTrait`](ThisTrait)
+//! * [`ThisTrait::this_associated_method`](ThisTrait::this_associated_method)
+//! * [`ThisTrait::ThisAssociatedType`](ThisTrait::ThisAssociatedType)
+//! * [`ThisTrait::THIS_ASSOCIATED_CONST`](ThisTrait::THIS_ASSOCIATED_CONST)
//! * [`ThisAlias`](ThisAlias)
//! * [`ThisUnion`](ThisUnion)
//! * [`this_function`](this_function())
}
pub struct ThisType;
+
+impl ThisType {
+ pub fn this_method() {}
+}
pub enum ThisEnum { ThisVariant, }
-pub trait ThisTrait {}
+pub trait ThisTrait {
+ type ThisAssociatedType;
+ const THIS_ASSOCIATED_CONST: u8;
+ fn this_associated_method();
+}
pub type ThisAlias = Result<(), ()>;
pub union ThisUnion { this_field: usize, }
+++ /dev/null
-// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-#![feature(attr_literals)]
-
-#[repr(align(64))] //~ error: the struct `#[repr(align(u16))]` attribute is experimental
-struct Foo(u64, u64);
-
-fn main() {}
+++ /dev/null
-error[E0658]: the struct `#[repr(align(u16))]` attribute is experimental (see issue #33626)
- --> $DIR/feature-gate-repr_align.rs:12:1
- |
-12 | #[repr(align(64))] //~ error: the struct `#[repr(align(u16))]` attribute is experimental
- | ^^^^^^^^^^^^^^^^^^
- |
- = help: add #![feature(repr_align)] to the crate attributes to enable
-
-error: aborting due to previous error
-
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(generators)]
+#![feature(optin_builtin_traits)]
+
+auto trait Foo {}
+
+struct No;
+
+impl !Foo for No {}
+
+struct A<'a, 'b>(&'a mut bool, &'b mut bool, No);
+
+impl<'a, 'b: 'a> Foo for A<'a, 'b> {}
+
+struct OnlyFooIfStaticRef(No);
+impl Foo for &'static OnlyFooIfStaticRef {}
+
+struct OnlyFooIfRef(No);
+impl<'a> Foo for &'a OnlyFooIfRef {}
+
+fn assert_foo<T: Foo>(f: T) {}
+
+fn main() {
+ // Make sure 'static is erased for generator interiors so we can't match it in trait selection
+ let x: &'static _ = &OnlyFooIfStaticRef(No);
+ let gen = || {
+ let x = x;
+ yield;
+ assert_foo(x);
+ };
+ assert_foo(gen); //~ ERROR the trait bound `No: Foo` is not satisfied
+
+ // Allow impls which matches any lifetime
+ let x = &OnlyFooIfRef(No);
+ let gen = || {
+ let x = x;
+ yield;
+ assert_foo(x);
+ };
+ assert_foo(gen); // ok
+
+ // Disallow impls which relates lifetimes in the generator interior
+ let gen = || {
+ let a = A(&mut true, &mut true, No);
+ yield;
+ assert_foo(a);
+ };
+ assert_foo(gen); //~ ERROR the requirement `for<'r, 's> 'r : 's` is not satisfied
+}
--- /dev/null
+error[E0277]: the trait bound `No: Foo` is not satisfied in `[generator@$DIR/auto-trait-regions.rs:35:15: 39:6 x:&&'static OnlyFooIfStaticRef for<'r> {&'r OnlyFooIfStaticRef, ()}]`
+ --> $DIR/auto-trait-regions.rs:40:5
+ |
+40 | assert_foo(gen); //~ ERROR the trait bound `No: Foo` is not satisfied
+ | ^^^^^^^^^^ within `[generator@$DIR/auto-trait-regions.rs:35:15: 39:6 x:&&'static OnlyFooIfStaticRef for<'r> {&'r OnlyFooIfStaticRef, ()}]`, the trait `Foo` is not implemented for `No`
+ |
+ = help: the following implementations were found:
+ <No as Foo>
+ = note: required because it appears within the type `OnlyFooIfStaticRef`
+ = note: required because it appears within the type `&OnlyFooIfStaticRef`
+ = note: required because it appears within the type `for<'r> {&'r OnlyFooIfStaticRef, ()}`
+ = note: required because it appears within the type `[generator@$DIR/auto-trait-regions.rs:35:15: 39:6 x:&&'static OnlyFooIfStaticRef for<'r> {&'r OnlyFooIfStaticRef, ()}]`
+note: required by `assert_foo`
+ --> $DIR/auto-trait-regions.rs:30:1
+ |
+30 | fn assert_foo<T: Foo>(f: T) {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error[E0279]: the requirement `for<'r, 's> 'r : 's` is not satisfied (`expected bound lifetime parameter, found concrete lifetime`)
+ --> $DIR/auto-trait-regions.rs:57:5
+ |
+57 | assert_foo(gen); //~ ERROR the requirement `for<'r, 's> 'r : 's` is not satisfied
+ | ^^^^^^^^^^
+ |
+ = note: required because of the requirements on the impl of `for<'r, 's> Foo` for `A<'_, '_>`
+ = note: required because it appears within the type `for<'r, 's> {A<'r, 's>, ()}`
+ = note: required because it appears within the type `[generator@$DIR/auto-trait-regions.rs:52:15: 56:6 for<'r, 's> {A<'r, 's>, ()}]`
+note: required by `assert_foo`
+ --> $DIR/auto-trait-regions.rs:30:1
+ |
+30 | fn assert_foo<T: Foo>(f: T) {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 2 previous errors
+
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(generators, generator_trait, box_leak)]
+
+use std::cell::RefCell;
+use std::ops::Generator;
+
+fn main() {
+ let (cell, mut gen);
+ cell = Box::new(RefCell::new(0));
+ let ref_ = Box::leak(Box::new(Some(cell.borrow_mut())));
+ // the upvar is the non-dropck `&mut Option<Ref<'a, i32>>`.
+ gen = || {
+ // but the generator can use it to drop a `Ref<'a, i32>`.
+ let _d = ref_.take(); //~ ERROR `ref_` does not live long enough
+ yield;
+ };
+ gen.resume();
+ // drops the RefCell and then the Ref, leading to use-after-free
+}
--- /dev/null
+error[E0597]: `ref_` does not live long enough
+ --> $DIR/dropck.rs:23:18
+ |
+21 | gen = || {
+ | -- capture occurs here
+22 | // but the generator can use it to drop a `Ref<'a, i32>`.
+23 | let _d = ref_.take(); //~ ERROR `ref_` does not live long enough
+ | ^^^^ borrowed value does not live long enough
+...
+28 | }
+ | - borrowed value dropped before borrower
+ |
+ = note: values in a scope are dropped in the opposite order they are created
+
+error: aborting due to previous error
+
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// compile-flags: -Z borrowck=compare
+
+#![feature(generators)]
+#![feature(nll)]
+
+fn main() {
+ || {
+ // The reference in `_a` is a Legal with NLL since it ends before the yield
+ let _a = &mut true; //~ ERROR borrow may still be in use when generator yields (Ast)
+ let b = &mut true; //~ ERROR borrow may still be in use when generator yields (Ast)
+ //~^ borrow may still be in use when generator yields (Mir)
+ yield ();
+ println!("{}", b);
+ };
+}
--- /dev/null
+error[E0626]: borrow may still be in use when generator yields (Mir)
+ --> $DIR/generator-with-nll.rs:20:17
+ |
+20 | let b = &mut true; //~ ERROR borrow may still be in use when generator yields (Ast)
+ | ^^^^^^^^^
+21 | //~^ borrow may still be in use when generator yields (Mir)
+22 | yield ();
+ | -------- possible yield occurs here
+
+error[E0626]: borrow may still be in use when generator yields (Ast)
+ --> $DIR/generator-with-nll.rs:19:23
+ |
+19 | let _a = &mut true; //~ ERROR borrow may still be in use when generator yields (Ast)
+ | ^^^^
+...
+22 | yield ();
+ | -------- possible yield occurs here
+
+error[E0626]: borrow may still be in use when generator yields (Ast)
+ --> $DIR/generator-with-nll.rs:20:22
+ |
+20 | let b = &mut true; //~ ERROR borrow may still be in use when generator yields (Ast)
+ | ^^^^
+21 | //~^ borrow may still be in use when generator yields (Mir)
+22 | yield ();
+ | -------- possible yield occurs here
+
+error: aborting due to 3 previous errors
+
17 | fn assert_send<T: Send>(_: T) {}
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-error[E0277]: the trait bound `std::cell::Cell<i32>: std::marker::Sync` is not satisfied in `[generator@$DIR/not-send-sync.rs:19:17: 23:6 (std::cell::Cell<i32>, ())]`
+error[E0277]: the trait bound `std::cell::Cell<i32>: std::marker::Sync` is not satisfied in `[generator@$DIR/not-send-sync.rs:19:17: 23:6 {std::cell::Cell<i32>, ()}]`
--> $DIR/not-send-sync.rs:19:5
|
19 | assert_sync(|| {
| ^^^^^^^^^^^ `std::cell::Cell<i32>` cannot be shared between threads safely
|
- = help: within `[generator@$DIR/not-send-sync.rs:19:17: 23:6 (std::cell::Cell<i32>, ())]`, the trait `std::marker::Sync` is not implemented for `std::cell::Cell<i32>`
- = note: required because it appears within the type `(std::cell::Cell<i32>, ())`
- = note: required because it appears within the type `[generator@$DIR/not-send-sync.rs:19:17: 23:6 (std::cell::Cell<i32>, ())]`
+ = help: within `[generator@$DIR/not-send-sync.rs:19:17: 23:6 {std::cell::Cell<i32>, ()}]`, the trait `std::marker::Sync` is not implemented for `std::cell::Cell<i32>`
+ = note: required because it appears within the type `{std::cell::Cell<i32>, ()}`
+ = note: required because it appears within the type `[generator@$DIR/not-send-sync.rs:19:17: 23:6 {std::cell::Cell<i32>, ()}]`
note: required by `main::assert_sync`
--> $DIR/not-send-sync.rs:16:5
|
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(generators)]
+
+fn main() {
+ static || { //~ ERROR: construction of immovable generator requires unsafe
+ yield;
+ };
+}
--- /dev/null
+error[E0133]: construction of immovable generator requires unsafe function or block
+ --> $DIR/unsafe-immovable.rs:14:5
+ |
+14 | / static || { //~ ERROR: construction of immovable generator requires unsafe
+15 | | yield;
+16 | | };
+ | |_____^ construction of immovable generator
+
+error: aborting due to previous error
+
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+// compile-flags: -Z borrowck=compare
+
#![feature(generators, generator_trait)]
use std::ops::{GeneratorState, Generator};
// (This error occurs because the region shows up in the type of
// `b` and gets extended by region inference.)
let mut b = move || {
- let a = &3;
+ let a = &mut 3;
+ //~^ ERROR borrow may still be in use when generator yields (Ast)
+ //~| ERROR borrow may still be in use when generator yields (Mir)
yield();
println!("{}", a);
};
// No error here -- `a` is not in scope at the point of `yield`.
let mut b = move || {
{
- let a = &3;
+ let a = &mut 3;
}
yield();
};
let mut b = move || {
let a = 3;
{
- let b = &a; //~ ERROR
+ let b = &a;
+ //~^ ERROR borrow may still be in use when generator yields (Ast)
+ //~| ERROR borrow may still be in use when generator yields (Mir)
yield();
println!("{}", b);
}
-error[E0626]: borrow may still be in use when generator yields
- --> $DIR/yield-while-local-borrowed.rs:48:22
+error[E0626]: borrow may still be in use when generator yields (Mir)
+ --> $DIR/yield-while-local-borrowed.rs:24:17
|
-48 | let b = &a; //~ ERROR
+24 | let a = &mut 3;
+ | ^^^^^^
+...
+27 | yield();
+ | ------- possible yield occurs here
+
+error[E0626]: borrow may still be in use when generator yields (Ast)
+ --> $DIR/yield-while-local-borrowed.rs:24:22
+ |
+24 | let a = &mut 3;
+ | ^
+...
+27 | yield();
+ | ------- possible yield occurs here
+
+error[E0626]: borrow may still be in use when generator yields (Ast)
+ --> $DIR/yield-while-local-borrowed.rs:52:22
+ |
+52 | let b = &a;
| ^
-49 | yield();
+...
+55 | yield();
+ | ------- possible yield occurs here
+
+error[E0626]: borrow may still be in use when generator yields (Mir)
+ --> $DIR/yield-while-local-borrowed.rs:52:21
+ |
+52 | let b = &a;
+ | ^^
+...
+55 | yield();
| ------- possible yield occurs here
-error: aborting due to previous error
+error: aborting due to 4 previous errors
= note: the following traits define an item `method`, perhaps you need to implement one of them:
candidate #1: `foo::Bar`
candidate #2: `no_method_suggested_traits::foo::PubPub`
- candidate #3: `no_method_suggested_traits::bar::PubPriv`
- candidate #4: `no_method_suggested_traits::qux::PrivPub`
- candidate #5: `no_method_suggested_traits::quz::PrivPriv`
- candidate #6: `no_method_suggested_traits::Reexported`
+ candidate #3: `no_method_suggested_traits::qux::PrivPub`
+ candidate #4: `no_method_suggested_traits::Reexported`
error[E0599]: no method named `method` found for type `std::rc::Rc<&mut std::boxed::Box<&Foo>>` in the current scope
--> $DIR/no-method-suggested-traits.rs:52:43
= note: the following traits define an item `method`, perhaps you need to implement one of them:
candidate #1: `foo::Bar`
candidate #2: `no_method_suggested_traits::foo::PubPub`
- candidate #3: `no_method_suggested_traits::bar::PubPriv`
- candidate #4: `no_method_suggested_traits::qux::PrivPub`
- candidate #5: `no_method_suggested_traits::quz::PrivPriv`
- candidate #6: `no_method_suggested_traits::Reexported`
+ candidate #3: `no_method_suggested_traits::qux::PrivPub`
+ candidate #4: `no_method_suggested_traits::Reexported`
error[E0599]: no method named `method2` found for type `u64` in the current scope
--> $DIR/no-method-suggested-traits.rs:55:10
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Regression test for #47511: anonymous lifetimes can appear
+// unconstrained in a return type, but only if they appear just once
+// in the input, as the input to a projection.
+
+fn f(_: X) -> X {
+ //~^ ERROR return type references an anonymous lifetime
+ unimplemented!()
+}
+
+fn g<'a>(_: X<'a>) -> X<'a> {
+ //~^ ERROR return type references lifetime `'a`, which is not constrained
+ unimplemented!()
+}
+
+type X<'a> = <&'a () as Trait>::Value;
+
+trait Trait {
+ type Value;
+}
+
+impl<'a> Trait for &'a () {
+ type Value = ();
+}
+
+fn main() {}
--- /dev/null
+error[E0581]: return type references an anonymous lifetime which is not constrained by the fn input types
+ --> $DIR/issue-47511.rs:15:15
+ |
+15 | fn f(_: X) -> X {
+ | ^
+ |
+ = note: lifetimes appearing in an associated type are not considered constrained
+
+error[E0581]: return type references lifetime `'a`, which is not constrained by the fn input types
+ --> $DIR/issue-47511.rs:20:23
+ |
+20 | fn g<'a>(_: X<'a>) -> X<'a> {
+ | ^^^^^
+
+error: aborting due to 2 previous errors
+
`&mut Foo : std::iter::Iterator`
= help: items from traits can only be used if the trait is implemented and in scope
= note: the following traits define an item `take`, perhaps you need to implement one of them:
- candidate #1: `std::collections::hash::Recover`
- candidate #2: `std::io::Read`
- candidate #3: `std::iter::Iterator`
- candidate #4: `alloc::btree::Recover`
+ candidate #1: `std::io::Read`
+ candidate #2: `std::iter::Iterator`
error: aborting due to 4 previous errors
+++ /dev/null
-// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-#![feature(use_nested_groups)]
-#![deny(unused_imports)]
-
-mod foo {
- pub enum Bar {}
-}
-
-use foo::{*, *}; //~ ERROR unused import: `*`
-
-fn main() {
- let _: Bar;
-}
+++ /dev/null
-error: unused import: `*`
- --> $DIR/owl-import-generates-unused-import-lint.rs:18:14
- |
-18 | use foo::{*, *}; //~ ERROR unused import: `*`
- | ^
- |
-note: lint level defined here
- --> $DIR/owl-import-generates-unused-import-lint.rs:12:9
- |
-12 | #![deny(unused_imports)]
- | ^^^^^^^^^^^^^^
-
-error: aborting due to previous error
-
// It avoids using u64/i64 because on some targets that is only 4-byte
// aligned (while on most it is 8-byte aligned) and so the resulting
// padding and overall computed sizes can be quite different.
-#![feature(attr_literals)]
-#![feature(repr_align)]
#![feature(start)]
#![allow(dead_code)]
#![feature(attr_literals)]
-#[repr(align(16))] //~ ERROR is experimental
+#[repr(align(16))]
struct Gem {
mohs_hardness: u8,
poofed: bool,
-error[E0658]: the struct `#[repr(align(u16))]` attribute is experimental (see issue #33626)
- --> $DIR/gated-features-attr-spans.rs:13:1
- |
-13 | #[repr(align(16))] //~ ERROR is experimental
- | ^^^^^^^^^^^^^^^^^^
- |
- = help: add #![feature(repr_align)] to the crate attributes to enable
-
error[E0658]: SIMD types are experimental and possibly buggy (see issue #27731)
--> $DIR/gated-features-attr-spans.rs:20:1
|
|
= help: add #![feature(fn_must_use)] to the crate attributes to enable
-error: aborting due to 2 previous errors
+error: aborting due to previous error
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+struct T;
+
+fn main() {
+ T::new();
+ //~^ ERROR no function or associated item named `new` found for type `T` in the current scope
+}
--- /dev/null
+error[E0599]: no function or associated item named `new` found for type `T` in the current scope
+ --> $DIR/dont-suggest-private-trait-method.rs:14:5
+ |
+11 | struct T;
+ | --------- function or associated item `new` not found for this
+...
+14 | T::new();
+ | ^^^^^^ function or associated item not found in `T`
+
+error: aborting due to previous error
+
foo::<f32>(x_f64);
//~^ ERROR mismatched types
foo::<f32>(x_f32);
+
+ foo::<u32>(x_u8 as u16);
+ //~^ ERROR mismatched types
+ foo::<i32>(-x_i8);
+ //~^ ERROR mismatched types
}
312 | foo::<f32>(x_f64);
| ^^^^^ expected f32, found f64
-error: aborting due to 132 previous errors
+error[E0308]: mismatched types
+ --> $DIR/numeric-cast.rs:316:16
+ |
+316 | foo::<u32>(x_u8 as u16);
+ | ^^^^^^^^^^^ expected u32, found u16
+help: you can cast an `u16` to `u32`, which will zero-extend the source value
+ |
+316 | foo::<u32>((x_u8 as u16).into());
+ | ^^^^^^^^^^^^^^^^^^^^
+
+error[E0308]: mismatched types
+ --> $DIR/numeric-cast.rs:318:16
+ |
+318 | foo::<i32>(-x_i8);
+ | ^^^^^ expected i32, found i8
+help: you can cast an `i8` to `i32`, which will sign-extend the source value
+ |
+318 | foo::<i32>((-x_i8).into());
+ | ^^^^^^^^^^^^^^
+
+error: aborting due to 134 previous errors
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![allow(unused)]
+#![feature(lang_items)]
+
+#[lang = "foo"]
+fn bar() -> ! {
+//~^^ ERROR definition of an unknown language item: `foo`
+ loop {}
+}
+
+fn main() {}
--- /dev/null
+error[E0522]: definition of an unknown language item: `foo`
+ --> $DIR/unknown-language-item.rs:14:1
+ |
+14 | #[lang = "foo"]
+ | ^^^^^^^^^^^^^^^ definition of unknown language item `foo`
+
+error: aborting due to previous error
+
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(use_nested_groups)]
+#![allow(dead_code)]
+#![deny(unused_imports)]
+
+mod foo {
+ pub mod bar {
+ pub mod baz {
+ pub struct Bar();
+ }
+ pub mod foobar {}
+ }
+
+ pub struct Foo();
+}
+
+use foo::{Foo, bar::{baz::{}, foobar::*}, *};
+ //~^ ERROR unused imports: `*`, `Foo`, `baz::{}`, `foobar::*`
+use foo::bar::baz::{*, *};
+ //~^ ERROR unused import: `*`
+use foo::{};
+ //~^ ERROR unused import: `use foo::{};`
+
+fn main() {
+ let _: Bar;
+}
--- /dev/null
+error: unused imports: `*`, `Foo`, `baz::{}`, `foobar::*`
+ --> $DIR/use-nested-groups-unused-imports.rs:26:11
+ |
+26 | use foo::{Foo, bar::{baz::{}, foobar::*}, *};
+ | ^^^ ^^^^^^^ ^^^^^^^^^ ^
+ |
+note: lint level defined here
+ --> $DIR/use-nested-groups-unused-imports.rs:13:9
+ |
+13 | #![deny(unused_imports)]
+ | ^^^^^^^^^^^^^^
+
+error: unused import: `*`
+ --> $DIR/use-nested-groups-unused-imports.rs:28:24
+ |
+28 | use foo::bar::baz::{*, *};
+ | ^
+
+error: unused import: `use foo::{};`
+ --> $DIR/use-nested-groups-unused-imports.rs:30:1
+ |
+30 | use foo::{};
+ | ^^^^^^^^^^^^
+
+error: aborting due to 3 previous errors
+
//~| expected type `extern "C" fn(isize, u8, ...)`
//~| found type `extern "C" fn(isize, u8) {bar}`
- foo(1, 2, 3f32); //~ ERROR can't pass `f32` to variadic function, cast to `c_double`
- foo(1, 2, true); //~ ERROR can't pass `bool` to variadic function, cast to `c_int`
- foo(1, 2, 1i8); //~ ERROR can't pass `i8` to variadic function, cast to `c_int`
- foo(1, 2, 1u8); //~ ERROR can't pass `u8` to variadic function, cast to `c_uint`
- foo(1, 2, 1i16); //~ ERROR can't pass `i16` to variadic function, cast to `c_int`
- foo(1, 2, 1u16); //~ ERROR can't pass `u16` to variadic function, cast to `c_uint`
+ foo(1, 2, 3f32); //~ ERROR can't pass `f32` to variadic function
+ foo(1, 2, true); //~ ERROR can't pass `bool` to variadic function
+ foo(1, 2, 1i8); //~ ERROR can't pass `i8` to variadic function
+ foo(1, 2, 1u8); //~ ERROR can't pass `u8` to variadic function
+ foo(1, 2, 1i16); //~ ERROR can't pass `i16` to variadic function
+ foo(1, 2, 1u16); //~ ERROR can't pass `u16` to variadic function
}
}
= note: expected type `extern "C" fn(isize, u8, ...)`
found type `extern "C" fn(isize, u8) {bar}`
-error[E0617]: can't pass `f32` to variadic function, cast to `c_double`
+error[E0617]: can't pass `f32` to variadic function
--> $DIR/variadic-ffi-3.rs:34:19
|
-34 | foo(1, 2, 3f32); //~ ERROR can't pass `f32` to variadic function, cast to `c_double`
- | ^^^^
+34 | foo(1, 2, 3f32); //~ ERROR can't pass `f32` to variadic function
+ | ^^^^ help: cast the value to `c_double`: `3f32 as c_double`
-error[E0617]: can't pass `bool` to variadic function, cast to `c_int`
+error[E0617]: can't pass `bool` to variadic function
--> $DIR/variadic-ffi-3.rs:35:19
|
-35 | foo(1, 2, true); //~ ERROR can't pass `bool` to variadic function, cast to `c_int`
- | ^^^^
+35 | foo(1, 2, true); //~ ERROR can't pass `bool` to variadic function
+ | ^^^^ help: cast the value to `c_int`: `true as c_int`
-error[E0617]: can't pass `i8` to variadic function, cast to `c_int`
+error[E0617]: can't pass `i8` to variadic function
--> $DIR/variadic-ffi-3.rs:36:19
|
-36 | foo(1, 2, 1i8); //~ ERROR can't pass `i8` to variadic function, cast to `c_int`
- | ^^^
+36 | foo(1, 2, 1i8); //~ ERROR can't pass `i8` to variadic function
+ | ^^^ help: cast the value to `c_int`: `1i8 as c_int`
-error[E0617]: can't pass `u8` to variadic function, cast to `c_uint`
+error[E0617]: can't pass `u8` to variadic function
--> $DIR/variadic-ffi-3.rs:37:19
|
-37 | foo(1, 2, 1u8); //~ ERROR can't pass `u8` to variadic function, cast to `c_uint`
- | ^^^
+37 | foo(1, 2, 1u8); //~ ERROR can't pass `u8` to variadic function
+ | ^^^ help: cast the value to `c_uint`: `1u8 as c_uint`
-error[E0617]: can't pass `i16` to variadic function, cast to `c_int`
+error[E0617]: can't pass `i16` to variadic function
--> $DIR/variadic-ffi-3.rs:38:19
|
-38 | foo(1, 2, 1i16); //~ ERROR can't pass `i16` to variadic function, cast to `c_int`
- | ^^^^
+38 | foo(1, 2, 1i16); //~ ERROR can't pass `i16` to variadic function
+ | ^^^^ help: cast the value to `c_int`: `1i16 as c_int`
-error[E0617]: can't pass `u16` to variadic function, cast to `c_uint`
+error[E0617]: can't pass `u16` to variadic function
--> $DIR/variadic-ffi-3.rs:39:19
|
-39 | foo(1, 2, 1u16); //~ ERROR can't pass `u16` to variadic function, cast to `c_uint`
- | ^^^^
+39 | foo(1, 2, 1u16); //~ ERROR can't pass `u16` to variadic function
+ | ^^^^ help: cast the value to `c_uint`: `1u16 as c_uint`
error: aborting due to 10 previous errors
-Subproject commit 6a8eb71f6d226f9ac869dbacd5ff6aa76deef1c4
+Subproject commit 91e36aa86c7037de50642f2fec1cf47c3d18af02