[submodule "src/tools/clippy"]
path = src/tools/clippy
url = https://github.com/rust-lang-nursery/rust-clippy.git
+[submodule "src/tools/rustfmt"]
+ path = src/tools/rustfmt
+ url = https://github.com/rust-lang-nursery/rustfmt.git
+[submodule "src/tools/miri"]
+ path = src/tools/miri
+ url = https://github.com/solson/miri.git
install:
- case "$TRAVIS_OS_NAME" in
linux)
- travis_retry curl -fo $HOME/stamp https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-17-stamp-x86_64-unknown-linux-musl &&
+ travis_retry curl -fo $HOME/stamp https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror/2017-03-17-stamp-x86_64-unknown-linux-musl &&
chmod +x $HOME/stamp &&
export PATH=$PATH:$HOME
;;
travis_retry brew update &&
travis_retry brew install xz;
fi &&
- travis_retry curl -fo /usr/local/bin/sccache https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-apple-darwin &&
+ travis_retry curl -fo /usr/local/bin/sccache https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror/2017-05-12-sccache-x86_64-apple-darwin &&
chmod +x /usr/local/bin/sccache &&
- travis_retry curl -fo /usr/local/bin/stamp https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-17-stamp-x86_64-apple-darwin &&
+ travis_retry curl -fo /usr/local/bin/stamp https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror/2017-03-17-stamp-x86_64-apple-darwin &&
chmod +x /usr/local/bin/stamp
;;
esac
deploy:
- provider: s3
- bucket: rust-lang-ci
+ bucket: rust-lang-ci2
skip_cleanup: true
local_dir: deploy
upload_dir: rustc-builds
acl: public_read
- region: us-east-1
+ region: us-west-1
access_key_id: AKIAJVBODR3IA4O72THQ
secret_access_key:
secure: "kUGd3t7JcVWFESgIlzvsM8viZgCA9Encs3creW0xLJaLSeI1iVjlJK4h/2/nO6y224AFrh/GUfsNr4/4AlxPuYb8OU5oC5Lv+Ff2JiRDYtuNpyQSKAQp+bRYytWMtrmhja91h118Mbm90cUfcLPwkdiINgJNTXhPKg5Cqu3VYn0="
condition: $DEPLOY = 1
- provider: s3
- bucket: rust-lang-ci
+ bucket: rust-lang-ci2
skip_cleanup: true
local_dir: deploy
upload_dir: rustc-builds-try
acl: public_read
- region: us-east-1
+ region: us-west-1
access_key_id: AKIAJVBODR3IA4O72THQ
secret_access_key:
secure: "kUGd3t7JcVWFESgIlzvsM8viZgCA9Encs3creW0xLJaLSeI1iVjlJK4h/2/nO6y224AFrh/GUfsNr4/4AlxPuYb8OU5oC5Lv+Ff2JiRDYtuNpyQSKAQp+bRYytWMtrmhja91h118Mbm90cUfcLPwkdiINgJNTXhPKg5Cqu3VYn0="
# this is the same as the above deployment provider except that it uploads to
# a slightly different directory and has a different trigger
- provider: s3
- bucket: rust-lang-ci
+ bucket: rust-lang-ci2
skip_cleanup: true
local_dir: deploy
upload_dir: rustc-builds-alt
acl: public_read
- region: us-east-1
+ region: us-west-1
access_key_id: AKIAJVBODR3IA4O72THQ
secret_access_key:
secure: "kUGd3t7JcVWFESgIlzvsM8viZgCA9Encs3creW0xLJaLSeI1iVjlJK4h/2/nO6y224AFrh/GUfsNr4/4AlxPuYb8OU5oC5Lv+Ff2JiRDYtuNpyQSKAQp+bRYytWMtrmhja91h118Mbm90cUfcLPwkdiINgJNTXhPKg5Cqu3VYn0="
- MSYS_BITS: 32
RUST_CONFIGURE_ARGS: --build=i686-pc-windows-gnu
SCRIPT: python x.py test
- MINGW_URL: https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror
+ MINGW_URL: https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror
MINGW_ARCHIVE: i686-6.3.0-release-posix-dwarf-rt_v5-rev2.7z
MINGW_DIR: mingw32
- MSYS_BITS: 64
SCRIPT: python x.py test
RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-gnu
- MINGW_URL: https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror
+ MINGW_URL: https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror
MINGW_ARCHIVE: x86_64-6.3.0-release-posix-seh-rt_v5-rev2.7z
MINGW_DIR: mingw64
- MSYS_BITS: 32
RUST_CONFIGURE_ARGS: --build=i686-pc-windows-gnu --enable-extended
SCRIPT: python x.py dist
- MINGW_URL: https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror
+ MINGW_URL: https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror
MINGW_ARCHIVE: i686-6.3.0-release-posix-dwarf-rt_v5-rev2.7z
MINGW_DIR: mingw32
DEPLOY: 1
- MSYS_BITS: 64
SCRIPT: python x.py dist
RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-gnu --enable-extended
- MINGW_URL: https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror
+ MINGW_URL: https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror
MINGW_ARCHIVE: x86_64-6.3.0-release-posix-seh-rt_v5-rev2.7z
MINGW_DIR: mingw64
DEPLOY: 1
- set PATH=C:\Python27;%PATH%
# Download and install sccache
- - appveyor-retry appveyor DownloadFile https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-pc-windows-msvc
+ - appveyor-retry appveyor DownloadFile https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror/2017-05-12-sccache-x86_64-pc-windows-msvc
- mv 2017-05-12-sccache-x86_64-pc-windows-msvc sccache.exe
- set PATH=%PATH%;%CD%
# Download and install ninja
#
# Note that this is originally from the github releases patch of Ninja
- - appveyor-retry appveyor DownloadFile https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-15-ninja-win.zip
+ - appveyor-retry appveyor DownloadFile https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror/2017-03-15-ninja-win.zip
- 7z x 2017-03-15-ninja-win.zip
- set RUST_CONFIGURE_ARGS=%RUST_CONFIGURE_ARGS% --enable-ninja
# - set PATH=%PATH%;%CD% -- this already happens above for sccache
# Install InnoSetup to get `iscc` used to produce installers
- - appveyor-retry appveyor DownloadFile https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-08-22-is.exe
+ - appveyor-retry appveyor DownloadFile https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror/2017-08-22-is.exe
- 2017-08-22-is.exe /VERYSILENT /SUPPRESSMSGBOXES /NORESTART /SP-
- set PATH="C:\Program Files (x86)\Inno Setup 5";%PATH%
# Help debug some handle issues on AppVeyor
- - appveyor-retry appveyor DownloadFile https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-15-Handle.zip
+ - appveyor-retry appveyor DownloadFile https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror/2017-05-15-Handle.zip
- mkdir handle
- 7z x -ohandle 2017-05-15-Handle.zip
- set PATH=%PATH%;%CD%\handle
access_key_id: AKIAJVBODR3IA4O72THQ
secret_access_key:
secure: tQWIE+DJHjXaV4np/3YeETkEmXngtIuIgAO/LYKQaUshGLgN8cBCFGG3cHx5lKLt
- bucket: rust-lang-ci
+ bucket: rust-lang-ci2
set_public: true
- region: us-east-1
+ region: us-west-1
artifact: /.*/
folder: rustc-builds
on:
access_key_id: AKIAJVBODR3IA4O72THQ
secret_access_key:
secure: tQWIE+DJHjXaV4np/3YeETkEmXngtIuIgAO/LYKQaUshGLgN8cBCFGG3cHx5lKLt
- bucket: rust-lang-ci
+ bucket: rust-lang-ci2
set_public: true
- region: us-east-1
+ region: us-west-1
artifact: /.*/
folder: rustc-builds-alt
on:
# When creating source tarballs whether or not to create a source tarball.
#dist-src = false
+# Whether to also run the Miri tests suite when running tests.
+# As a side-effect also generates MIR for all libraries.
+#test-miri = false
+
# =============================================================================
# Options for specific targets
#
+++ /dev/null
-
-fn foo(x: fn(&u8, &u8), y: Vec<&u8>, z: &u8) {
-// Debruijn 1 1 1 1
-// Anon-Index 0 1 0 1
-// ------
-// debruijn indices are shifted by 1 in here
- y.push(z); // index will be zero or one
-}
version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
+[[package]]
+name = "bitflags"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
[[package]]
name = "bootstrap"
version = "0.0.0"
"rls-rustc 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rls-span 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rls-vfs 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustfmt-nightly 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustfmt-nightly 0.2.5",
"serde 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
version = "0.0.0"
dependencies = [
"arena 0.0.0",
+ "bitflags 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"flate2 0.2.19 (registry+https://github.com/rust-lang/crates.io-index)",
"fmt_macros 0.0.0",
"graphviz 0.0.0",
"log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
"owning_ref 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc_back 0.0.0",
- "rustc_bitflags 0.0.0",
"rustc_const_math 0.0.0",
"rustc_data_structures 0.0.0",
"rustc_errors 0.0.0",
name = "rustc_apfloat"
version = "0.0.0"
dependencies = [
- "rustc_bitflags 0.0.0",
+ "bitflags 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc_cratesio_shim 0.0.0",
]
[[package]]
"syntax 0.0.0",
]
-[[package]]
-name = "rustc_bitflags"
-version = "0.0.0"
-
[[package]]
name = "rustc_borrowck"
version = "0.0.0"
"syntax 0.0.0",
]
+[[package]]
+name = "rustc_cratesio_shim"
+version = "0.0.0"
+dependencies = [
+ "bitflags 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "rustc_data_structures"
version = "0.0.0"
name = "rustc_llvm"
version = "0.0.0"
dependencies = [
+ "bitflags 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"build_helper 0.1.0",
"gcc 0.3.54 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc_bitflags 0.0.0",
+ "rustc_cratesio_shim 0.0.0",
]
[[package]]
name = "rustc_mir"
version = "0.0.0"
dependencies = [
+ "bitflags 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"graphviz 0.0.0",
"log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc 0.0.0",
- "rustc_bitflags 0.0.0",
"rustc_const_eval 0.0.0",
"rustc_const_math 0.0.0",
"rustc_data_structures 0.0.0",
name = "rustc_trans"
version = "0.0.0"
dependencies = [
+ "bitflags 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"flate2 0.2.19 (registry+https://github.com/rust-lang/crates.io-index)",
"gcc 0.3.54 (registry+https://github.com/rust-lang/crates.io-index)",
"jobserver 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-demangle 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc_allocator 0.0.0",
"rustc_back 0.0.0",
- "rustc_bitflags 0.0.0",
"rustc_const_math 0.0.0",
"rustc_data_structures 0.0.0",
"rustc_errors 0.0.0",
[[package]]
name = "rustfmt-nightly"
version = "0.2.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"diff 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
name = "syntax"
version = "0.0.0"
dependencies = [
- "bitflags 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "bitflags 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc_cratesio_shim 0.0.0",
"rustc_data_structures 0.0.0",
"rustc_errors 0.0.0",
"serialize 0.0.0",
"checksum bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "aad18937a628ec6abcd26d1489012cc0e18c21798210f491af69ded9b881106d"
"checksum bitflags 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1370e9fc2a6ae53aea8b7a5110edbd08836ed87c88736dfabccade1c2b44bff4"
"checksum bitflags 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4efd02e230a02e18f92fc2735f44597385ed02ad8f831e7c1c1156ee5e1ab3a5"
+"checksum bitflags 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f5cde24d1b2e2216a726368b2363a273739c91f4e3eb4e0dd12d672d396ad989"
"checksum bufstream 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "f2f382711e76b9de6c744cc00d0497baba02fb00a787f088c879f01d09468e32"
"checksum cfg-if 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "d4c819a1287eb618df47cc647173c5c4c66ba19d888a6e50d605672aed3140de"
"checksum clap 2.26.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2267a8fdd4dce6956ba6649e130f62fb279026e5e84b92aa939ac8f85ce3f9f0"
"checksum rls-vfs 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "ffd34691a510938bb67fe0444fb363103c73ffb31c121d1e16bc92d8945ea8ff"
"checksum rustc-demangle 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "aee45432acc62f7b9a108cc054142dac51f979e69e71ddce7d6fc7adf29e817e"
"checksum rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)" = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda"
-"checksum rustfmt-nightly 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "7d6dbb39239e54df780a850721fba87b3fdb2e645b39041742ec111369cec6af"
"checksum same-file 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "d931a44fdaa43b8637009e7632a02adc4f2b2e0733c08caa4cf00e8da4a117a7"
"checksum scoped-tls 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f417c22df063e9450888a7561788e9bd46d3bb3c1466435b4eccb903807f147d"
"checksum scopeguard 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "59a076157c1e2dc561d8de585151ee6965d910dd4dcb5dabb7ae3e83981a6c57"
"tools/cargo",
"tools/rustdoc",
"tools/rls",
+ "tools/rustfmt",
# FIXME(https://github.com/rust-lang/cargo/issues/4089): move these to exclude
"tools/rls/test_data/borrow_error",
"tools/rls/test_data/completion",
debug = false
debug-assertions = false
-[patch.'https://github.com/rust-lang/cargo']
+[patch."https://github.com/rust-lang/cargo"]
cargo = { path = "tools/cargo" }
+
+# Override rustfmt dependencies both on the repo and the crate (the RLS
+# sometimes uses either).
+# FIXME should only need the crates.io patch, long term.
+[patch."https://github.com/rust-lang-nursery/rustfmt"]
+rustfmt-nightly = { path = "tools/rustfmt" }
+[patch.crates-io]
+rustfmt-nightly = { path = "tools/rustfmt" }
}
}
+ // When running miri tests, we need to generate MIR for all libraries
+ if env::var("TEST_MIRI").ok().map_or(false, |val| val == "true") {
+ cmd.arg("-Zalways-encode-mir");
+ cmd.arg("-Zmir-emit-validate=1");
+ }
+
// Force all crates compiled by this compiler to (a) be unstable and (b)
// allow the `rustc_private` feature to link to other unstable crates
// also in the sysroot.
def build_triple(self):
"""Build triple as in LLVM"""
- default_encoding = sys.getdefaultencoding()
config = self.get_toml('build')
if config:
return config
return
print('Updating submodules')
default_encoding = sys.getdefaultencoding()
- run(["git", "submodule", "-q", "sync"], cwd=self.rust_root)
+ run(["git", "submodule", "-q", "sync"], cwd=self.rust_root, verbose=self.verbose)
submodules = [s.split(' ', 1)[1] for s in subprocess.check_output(
["git", "config", "--file",
os.path.join(self.rust_root, ".gitmodules"),
try:
with open(args.config or 'config.toml') as config:
build.config_toml = config.read()
- except:
+ except OSError:
pass
if '\nverbose = 2' in build.config_toml:
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+use std::any::Any;
+use std::cell::RefCell;
+use std::collections::BTreeSet;
+use std::env;
use std::fmt::Debug;
+use std::fs;
use std::hash::Hash;
-use std::cell::RefCell;
+use std::ops::Deref;
use std::path::{Path, PathBuf};
use std::process::Command;
-use std::fs;
-use std::ops::Deref;
-use std::any::Any;
-use std::collections::BTreeSet;
use compile;
use install;
tool::UnstableBookGen, tool::Tidy, tool::Linkchecker, tool::CargoTest,
tool::Compiletest, tool::RemoteTestServer, tool::RemoteTestClient,
tool::RustInstaller, tool::Cargo, tool::Rls, tool::Rustdoc, tool::Clippy,
- native::Llvm),
+ native::Llvm, tool::Rustfmt, tool::Miri),
Kind::Test => describe!(check::Tidy, check::Bootstrap, check::DefaultCompiletest,
check::HostCompiletest, check::Crate, check::CrateLibrustc, check::Rustdoc,
check::Linkcheck, check::Cargotest, check::Cargo, check::Rls, check::Docs,
- check::ErrorIndex, check::Distcheck),
+ check::ErrorIndex, check::Distcheck, check::Rustfmt, check::Miri),
Kind::Bench => describe!(check::Crate, check::CrateLibrustc),
Kind::Doc => describe!(doc::UnstableBook, doc::UnstableBookGen, doc::TheBook,
doc::Standalone, doc::Std, doc::Test, doc::Rustc, doc::ErrorIndex, doc::Nomicon,
doc::Reference, doc::Rustdoc, doc::CargoBook),
Kind::Dist => describe!(dist::Docs, dist::Mingw, dist::Rustc, dist::DebuggerScripts,
dist::Std, dist::Analysis, dist::Src, dist::PlainSourceTarball, dist::Cargo,
- dist::Rls, dist::Extended, dist::HashSign),
+ dist::Rls, dist::Extended, dist::HashSign, dist::DontDistWithMiriEnabled),
Kind::Install => describe!(install::Docs, install::Std, install::Cargo, install::Rls,
install::Analysis, install::Src, install::Rustc),
}
let out_dir = self.stage_out(compiler, mode);
cargo.env("CARGO_TARGET_DIR", out_dir)
.arg(cmd)
- .arg("-j").arg(self.jobs().to_string())
.arg("--target").arg(target);
+ // If we were invoked from `make` then that's already got a jobserver
+ // set up for us so no need to tell Cargo about jobs all over again.
+ if env::var_os("MAKEFLAGS").is_none() && env::var_os("MFLAGS").is_none() {
+ cargo.arg("-j").arg(self.jobs().to_string());
+ }
+
// FIXME: Temporary fix for https://github.com/rust-lang/cargo/issues/3005
// Force cargo to output binaries with disambiguating hashes in the name
cargo.env("__CARGO_DEFAULT_LIB_METADATA", &self.config.channel);
} else {
PathBuf::from("/path/to/nowhere/rustdoc/not/required")
})
+ .env("TEST_MIRI", self.config.test_miri.to_string())
.env("RUSTC_FLAGS", self.rustc_flags(target).join(" "));
if mode != Mode::Tool {
use std::process::Command;
use std::io::Read;
-use build_helper::{self, output};
+use build_helper::{self, output, BuildExpectation};
use builder::{Kind, RunConfig, ShouldRun, Builder, Compiler, Step};
use cache::{INTERNER, Interned};
use tool::{self, Tool};
use util::{self, dylib_path, dylib_path_var};
use {Build, Mode};
+use toolstate::ToolState;
const ADB_TEST_DIR: &str = "/data/tmp/work";
}
}
-fn try_run(build: &Build, cmd: &mut Command) {
+fn try_run_expecting(build: &Build, cmd: &mut Command, expect: BuildExpectation) {
if !build.fail_fast {
- if !build.try_run(cmd) {
- let failures = build.delayed_failures.get();
- build.delayed_failures.set(failures + 1);
+ if !build.try_run(cmd, expect) {
+ let mut failures = build.delayed_failures.borrow_mut();
+ failures.push(format!("{:?}", cmd));
}
} else {
- build.run(cmd);
+ build.run_expecting(cmd, expect);
}
}
+fn try_run(build: &Build, cmd: &mut Command) {
+ try_run_expecting(build, cmd, BuildExpectation::None)
+}
+
fn try_run_quiet(build: &Build, cmd: &mut Command) {
if !build.fail_fast {
if !build.try_run_quiet(cmd) {
- let failures = build.delayed_failures.get();
- build.delayed_failures.set(failures + 1);
+ let mut failures = build.delayed_failures.borrow_mut();
+ failures.push(format!("{:?}", cmd));
}
} else {
build.run_quiet(cmd);
}
}
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct Rustfmt {
+ stage: u32,
+ host: Interned<String>,
+}
+
+impl Step for Rustfmt {
+ type Output = ();
+ const ONLY_HOSTS: bool = true;
+
+ fn should_run(run: ShouldRun) -> ShouldRun {
+ run.path("src/tools/rustfmt")
+ }
+
+ fn make_run(run: RunConfig) {
+ run.builder.ensure(Rustfmt {
+ stage: run.builder.top_stage,
+ host: run.target,
+ });
+ }
+
+ /// Runs `cargo test` for rustfmt.
+ fn run(self, builder: &Builder) {
+ let build = builder.build;
+ let stage = self.stage;
+ let host = self.host;
+ let compiler = builder.compiler(stage, host);
+
+ builder.ensure(tool::Rustfmt { compiler, target: self.host });
+ let mut cargo = builder.cargo(compiler, Mode::Tool, host, "test");
+ cargo.arg("--manifest-path").arg(build.src.join("src/tools/rustfmt/Cargo.toml"));
+
+ // Don't build tests dynamically, just a pain to work with
+ cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");
+
+ builder.add_rustc_lib_path(compiler, &mut cargo);
+
+ try_run(build, &mut cargo);
+ }
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct Miri {
+ host: Interned<String>,
+}
+
+impl Step for Miri {
+ type Output = ();
+ const ONLY_HOSTS: bool = true;
+ const DEFAULT: bool = true;
+
+ fn should_run(run: ShouldRun) -> ShouldRun {
+ let test_miri = run.builder.build.config.test_miri;
+ run.path("src/tools/miri").default_condition(test_miri)
+ }
+
+ fn make_run(run: RunConfig) {
+ run.builder.ensure(Miri {
+ host: run.target,
+ });
+ }
+
+ /// Runs `cargo test` for miri.
+ fn run(self, builder: &Builder) {
+ let build = builder.build;
+ let host = self.host;
+ let compiler = builder.compiler(1, host);
+
+ let miri = builder.ensure(tool::Miri { compiler, target: self.host });
+ let mut cargo = builder.cargo(compiler, Mode::Tool, host, "test");
+ cargo.arg("--manifest-path").arg(build.src.join("src/tools/miri/Cargo.toml"));
+
+ // Don't build tests dynamically, just a pain to work with
+ cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");
+ // miri tests need to know about the stage sysroot
+ cargo.env("MIRI_SYSROOT", builder.sysroot(compiler));
+ cargo.env("RUSTC_TEST_SUITE", builder.rustc(compiler));
+ cargo.env("RUSTC_LIB_PATH", builder.rustc_libdir(compiler));
+ cargo.env("MIRI_PATH", miri);
+
+ builder.add_rustc_lib_path(compiler, &mut cargo);
+
+ try_run_expecting(
+ build,
+ &mut cargo,
+ builder.build.config.toolstate.miri.passes(ToolState::Testing),
+ );
+ }
+}
+
+
fn path_for_cargo(builder: &Builder, compiler: Compiler) -> OsString {
// Configure PATH to find the right rustc. NB. we have to use PATH
// and not RUSTC because the Cargo test suite has tests that will
use cache::{INTERNER, Interned};
use flags::Flags;
pub use flags::Subcommand;
+use toolstate::ToolStates;
/// Global configuration for the entire build and/or bootstrap.
///
pub low_priority: bool,
pub channel: String,
pub quiet_tests: bool,
+ pub test_miri: bool,
// Fallback musl-root for all targets
pub musl_root: Option<PathBuf>,
pub prefix: Option<PathBuf>,
// These are either the stage0 downloaded binaries or the locally installed ones.
pub initial_cargo: PathBuf,
pub initial_rustc: PathBuf,
+
+ pub toolstate: ToolStates,
}
/// Per-target configuration stored in the global configuration structure.
debug: Option<bool>,
dist_src: Option<bool>,
quiet_tests: Option<bool>,
+ test_miri: Option<bool>,
}
/// TOML representation of how each build target is configured.
config.codegen_tests = true;
config.ignore_git = false;
config.rust_dist_src = true;
+ config.test_miri = false;
config.on_fail = flags.on_fail;
config.stage = flags.stage;
}
}).unwrap_or_else(|| TomlConfig::default());
+ let toolstate_toml_path = config.src.join("src/tools/toolstate.toml");
+ let parse_toolstate = || -> Result<_, Box<::std::error::Error>> {
+ let mut f = File::open(toolstate_toml_path)?;
+ let mut contents = String::new();
+ f.read_to_string(&mut contents)?;
+ Ok(toml::from_str(&contents)?)
+ };
+ config.toolstate = parse_toolstate().unwrap_or_else(|err| {
+ println!("failed to parse TOML configuration 'toolstate.toml': {}", err);
+ process::exit(2);
+ });
+
let build = toml.build.clone().unwrap_or(Build::default());
set(&mut config.build, build.build.clone().map(|x| INTERNER.intern_string(x)));
set(&mut config.build, flags.build);
set(&mut config.channel, rust.channel.clone());
set(&mut config.rust_dist_src, rust.dist_src);
set(&mut config.quiet_tests, rust.quiet_tests);
+ set(&mut config.test_miri, rust.test_miri);
config.rustc_default_linker = rust.default_linker.clone();
config.rustc_default_ar = rust.default_ar.clone();
config.musl_root = rust.musl_root.clone().map(PathBuf::from);
o("docs", "build.docs", "build standard library documentation")
o("compiler-docs", "build.compiler-docs", "build compiler documentation")
o("optimize-tests", "rust.optimize-tests", "build tests with optimizations")
+o("test-miri", "rust.test-miri", "run miri's test suite")
o("debuginfo-tests", "rust.debuginfo-tests", "build tests with debugger metadata")
o("quiet-tests", "rust.quiet-tests", "enable quieter output when running tests")
o("ccache", "llvm.ccache", "invoke gcc/clang via ccache to reuse object files between builds")
}
}
+
+#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+pub struct DontDistWithMiriEnabled;
+
+impl Step for DontDistWithMiriEnabled {
+ type Output = PathBuf;
+ const DEFAULT: bool = true;
+
+ fn should_run(run: ShouldRun) -> ShouldRun {
+ let build_miri = run.builder.build.config.test_miri;
+ run.default_condition(build_miri)
+ }
+
+ fn make_run(run: RunConfig) {
+ run.builder.ensure(DontDistWithMiriEnabled);
+ }
+
+ fn run(self, _: &Builder) -> PathBuf {
+ panic!("Do not distribute with miri enabled.\n\
+ The distributed libraries would include all MIR (increasing binary size).
+ The distributed MIR would include validation statements.");
+ }
+}
+
+
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
pub struct Extended {
stage: u32,
#[cfg(unix)]
extern crate libc;
-use std::cell::Cell;
+use std::cell::RefCell;
use std::collections::{HashSet, HashMap};
use std::env;
use std::fs::{self, File};
use std::io::Read;
use std::path::{PathBuf, Path};
-use std::process::Command;
+use std::process::{self, Command};
use std::slice;
-use build_helper::{run_silent, run_suppressed, try_run_silent, try_run_suppressed, output, mtime};
+use build_helper::{run_silent, run_suppressed, try_run_silent, try_run_suppressed, output, mtime,
+ BuildExpectation};
use util::{exe, libdir, OutputFolder, CiEnv};
mod builder;
mod cache;
mod tool;
+mod toolstate;
#[cfg(windows)]
mod job;
crates: HashMap<Interned<String>, Crate>,
is_sudo: bool,
ci_env: CiEnv,
- delayed_failures: Cell<usize>,
+ delayed_failures: RefCell<Vec<String>>,
}
#[derive(Debug)]
lldb_python_dir: None,
is_sudo,
ci_env: CiEnv::current(),
- delayed_failures: Cell::new(0),
+ delayed_failures: RefCell::new(Vec::new()),
}
}
metadata::build(self);
builder::Builder::run(&self);
+
+ // Check for postponed failures from `test --no-fail-fast`.
+ let failures = self.delayed_failures.borrow();
+ if failures.len() > 0 {
+ println!("\n{} command(s) did not execute successfully:\n", failures.len());
+ for failure in failures.iter() {
+ println!(" - {}\n", failure);
+ }
+ process::exit(1);
+ }
}
/// Clear out `dir` if `input` is newer.
.join(libdir(&self.config.build))
}
+ /// Runs a command, printing out nice contextual information if its build
+ /// status is not the expected one
+ fn run_expecting(&self, cmd: &mut Command, expect: BuildExpectation) {
+ self.verbose(&format!("running: {:?}", cmd));
+ run_silent(cmd, expect)
+ }
+
/// Runs a command, printing out nice contextual information if it fails.
fn run(&self, cmd: &mut Command) {
- self.verbose(&format!("running: {:?}", cmd));
- run_silent(cmd)
+ self.run_expecting(cmd, BuildExpectation::None)
}
/// Runs a command, printing out nice contextual information if it fails.
fn run_quiet(&self, cmd: &mut Command) {
self.verbose(&format!("running: {:?}", cmd));
- run_suppressed(cmd)
+ run_suppressed(cmd, BuildExpectation::None)
}
- /// Runs a command, printing out nice contextual information if it fails.
- /// Exits if the command failed to execute at all, otherwise returns its
- /// `status.success()`.
- fn try_run(&self, cmd: &mut Command) -> bool {
+ /// Runs a command, printing out nice contextual information if its build
+ /// status is not the expected one.
+ /// Exits if the command failed to execute at all, otherwise returns whether
+ /// the expectation was met
+ fn try_run(&self, cmd: &mut Command, expect: BuildExpectation) -> bool {
self.verbose(&format!("running: {:?}", cmd));
- try_run_silent(cmd)
+ try_run_silent(cmd, expect)
}
/// Runs a command, printing out nice contextual information if it fails.
/// `status.success()`.
fn try_run_quiet(&self, cmd: &mut Command) -> bool {
self.verbose(&format!("running: {:?}", cmd));
- try_run_suppressed(cmd)
+ try_run_suppressed(cmd, BuildExpectation::None)
}
pub fn is_verbose(&self) -> bool {
src/tools/cargotest \
src/tools/cargo \
src/tools/rls \
+ src/tools/rustfmt \
+ src/tools/miri \
src/test/pretty \
src/test/run-pass/pretty \
src/test/run-fail/pretty \
if !tarball.exists() {
let tmp = tarball.with_extension("tmp");
// originally from https://www.openssl.org/source/...
- let url = format!("https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/{}",
+ let url = format!("https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror/{}",
name);
let mut ok = false;
for _ in 0..3 {
use native;
use channel::GitInfo;
use cache::Interned;
+use toolstate::ToolState;
+use build_helper::BuildExpectation;
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
pub struct CleanTools {
tool: &'static str,
path: &'static str,
mode: Mode,
+ expectation: BuildExpectation,
}
impl Step for ToolBuild {
let target = self.target;
let tool = self.tool;
let path = self.path;
+ let expectation = self.expectation;
match self.mode {
Mode::Libstd => builder.ensure(compile::Std { compiler, target }),
println!("Building stage{} tool {} ({})", compiler.stage, tool, target);
let mut cargo = prepare_tool_cargo(builder, compiler, target, "build", path);
- build.run(&mut cargo);
+ build.run_expecting(&mut cargo, expectation);
build.cargo_out(compiler, Mode::Tool, target).join(exe(tool, &compiler.host))
}
}
tool: $tool_name,
mode: $mode,
path: $path,
+ expectation: BuildExpectation::None,
})
}
}
tool: "remote-test-server",
mode: Mode::Libstd,
path: "src/tools/remote-test-server",
+ expectation: BuildExpectation::None,
})
}
}
tool: "cargo",
mode: Mode::Librustc,
path: "src/tools/cargo",
+ expectation: BuildExpectation::None,
})
}
}
tool: "clippy",
mode: Mode::Librustc,
path: "src/tools/clippy",
+ expectation: BuildExpectation::None,
})
}
}
tool: "rls",
mode: Mode::Librustc,
path: "src/tools/rls",
+ expectation: BuildExpectation::None,
+ })
+ }
+}
+
+#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+pub struct Rustfmt {
+ pub compiler: Compiler,
+ pub target: Interned<String>,
+}
+
+impl Step for Rustfmt {
+ type Output = PathBuf;
+ const DEFAULT: bool = true;
+ const ONLY_HOSTS: bool = true;
+
+ fn should_run(run: ShouldRun) -> ShouldRun {
+ let builder = run.builder;
+ run.path("src/tools/rustfmt").default_condition(builder.build.config.extended)
+ }
+
+ fn make_run(run: RunConfig) {
+ run.builder.ensure(Rustfmt {
+ compiler: run.builder.compiler(run.builder.top_stage, run.builder.build.build),
+ target: run.target,
+ });
+ }
+
+ fn run(self, builder: &Builder) -> PathBuf {
+ builder.ensure(ToolBuild {
+ compiler: self.compiler,
+ target: self.target,
+ tool: "rustfmt",
+ mode: Mode::Librustc,
+ path: "src/tools/rustfmt",
+ expectation: BuildExpectation::None,
+ })
+ }
+}
+
+
+#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+pub struct Miri {
+ pub compiler: Compiler,
+ pub target: Interned<String>,
+}
+
+impl Step for Miri {
+ type Output = PathBuf;
+ const DEFAULT: bool = true;
+ const ONLY_HOSTS: bool = true;
+
+ fn should_run(run: ShouldRun) -> ShouldRun {
+ let build_miri = run.builder.build.config.test_miri;
+ run.path("src/tools/miri").default_condition(build_miri)
+ }
+
+ fn make_run(run: RunConfig) {
+ run.builder.ensure(Miri {
+ compiler: run.builder.compiler(run.builder.top_stage, run.builder.build.build),
+ target: run.target,
+ });
+ }
+
+ fn run(self, builder: &Builder) -> PathBuf {
+ builder.ensure(ToolBuild {
+ compiler: self.compiler,
+ target: self.target,
+ tool: "miri",
+ mode: Mode::Librustc,
+ path: "src/tools/miri",
+ expectation: builder.build.config.toolstate.miri.passes(ToolState::Compiling),
})
}
}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use build_helper::BuildExpectation;
+
+#[derive(Copy, Clone, Debug, Deserialize, PartialEq, Eq)]
+/// Whether a tool can be compiled, tested or neither
+pub enum ToolState {
+ /// The tool compiles successfully, but the test suite fails
+ Compiling = 1,
+ /// The tool compiles successfully and its test suite passes
+ Testing = 2,
+ /// The tool can't even be compiled
+ Broken = 0,
+}
+
+impl ToolState {
+ /// If a tool with the current toolstate should be working on
+ /// the given toolstate
+ pub fn passes(self, other: ToolState) -> BuildExpectation {
+ if self as usize >= other as usize {
+ BuildExpectation::Succeeding
+ } else {
+ BuildExpectation::Failing
+ }
+ }
+}
+
+impl Default for ToolState {
+ fn default() -> Self {
+ // err on the safe side
+ ToolState::Broken
+ }
+}
+
+#[derive(Copy, Clone, Debug, Deserialize, Default)]
+/// Used to express which tools should (not) be compiled or tested.
+/// This is created from `toolstate.toml`.
+pub struct ToolStates {
+ pub miri: ToolState,
+}
})
}
-pub fn run(cmd: &mut Command) {
+#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+pub enum BuildExpectation {
+ Succeeding,
+ Failing,
+ None,
+}
+
+pub fn run(cmd: &mut Command, expect: BuildExpectation) {
println!("running: {:?}", cmd);
- run_silent(cmd);
+ run_silent(cmd, expect);
}
-pub fn run_silent(cmd: &mut Command) {
- if !try_run_silent(cmd) {
+pub fn run_silent(cmd: &mut Command, expect: BuildExpectation) {
+ if !try_run_silent(cmd, expect) {
std::process::exit(1);
}
}
-pub fn try_run_silent(cmd: &mut Command) -> bool {
+pub fn try_run_silent(cmd: &mut Command, expect: BuildExpectation) -> bool {
let status = match cmd.status() {
Ok(status) => status,
Err(e) => fail(&format!("failed to execute command: {:?}\nerror: {}",
cmd, e)),
};
- if !status.success() {
- println!("\n\ncommand did not execute successfully: {:?}\n\
- expected success, got: {}\n\n",
- cmd,
- status);
+ process_status(
+ cmd,
+ status.success(),
+ expect,
+ || println!("\n\ncommand did not execute successfully: {:?}\n\
+ expected success, got: {}\n\n",
+ cmd,
+ status))
+}
+
+fn process_status<F: FnOnce()>(
+ cmd: &Command,
+ success: bool,
+ expect: BuildExpectation,
+ f: F,
+) -> bool {
+ use BuildExpectation::*;
+ match (expect, success) {
+ (None, false) => { f(); false },
+ // Non-tool build succeeds, everything is good
+ (None, true) => true,
+ // Tool expected to work and is working
+ (Succeeding, true) => true,
+ // Tool expected to fail and is failing
+ (Failing, false) => {
+ println!("This failure is expected (see `src/tools/toolstate.toml`)");
+ true
+ },
+ // Tool expected to work, but is failing
+ (Succeeding, false) => {
+ f();
+ println!("You can disable the tool in `src/tools/toolstate.toml`");
+ false
+ },
+ // Tool expected to fail, but is working
+ (Failing, true) => {
+ println!("Expected `{:?}` to fail, but it succeeded.\n\
+ Please adjust `src/tools/toolstate.toml` accordingly", cmd);
+ false
+ }
}
- status.success()
}
-pub fn run_suppressed(cmd: &mut Command) {
- if !try_run_suppressed(cmd) {
+pub fn run_suppressed(cmd: &mut Command, expect: BuildExpectation) {
+ if !try_run_suppressed(cmd, expect) {
std::process::exit(1);
}
}
-pub fn try_run_suppressed(cmd: &mut Command) -> bool {
+pub fn try_run_suppressed(cmd: &mut Command, expect: BuildExpectation) -> bool {
let output = match cmd.output() {
Ok(status) => status,
Err(e) => fail(&format!("failed to execute command: {:?}\nerror: {}",
cmd, e)),
};
- if !output.status.success() {
- println!("\n\ncommand did not execute successfully: {:?}\n\
+ process_status(
+ cmd,
+ output.status.success(),
+ expect,
+ || println!("\n\ncommand did not execute successfully: {:?}\n\
expected success, got: {}\n\n\
stdout ----\n{}\n\
stderr ----\n{}\n\n",
cmd,
output.status,
String::from_utf8_lossy(&output.stdout),
- String::from_utf8_lossy(&output.stderr));
- }
- output.status.success()
+ String::from_utf8_lossy(&output.stderr)))
}
pub fn gnu_target(target: &str) -> String {
- Each directory, excluding `scripts` and `disabled`, corresponds to a docker image
- `scripts` contains files shared by docker images
-- `disabled` contains images that are not build travis
+- `disabled` contains images that are not built on travis
## Cross toolchains
# originally from
# https://downloads.openwrt.org/snapshots/trunk/ar71xx/generic/
# OpenWrt-Toolchain-ar71xx-generic_gcc-5.3.0_musl-1.1.16.Linux-x86_64.tar.bz2
-URL="https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror"
+URL="https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror"
FILE="OpenWrt-Toolchain-ar71xx-generic_gcc-5.3.0_musl-1.1.16.Linux-x86_64.tar.bz2"
curl -L "$URL/$FILE" | tar xjf - -C /usr/local/mips-linux-musl --strip-components=2
# Note that this originally came from:
# https://downloads.openwrt.org/snapshots/trunk/malta/generic/
# OpenWrt-Toolchain-malta-le_gcc-5.3.0_musl-1.1.15.Linux-x86_64.tar.bz2
-URL="https://s3.amazonaws.com/rust-lang-ci/libc"
+URL="https://s3-us-west-1.amazonaws.com/rust-lang-ci2/libc"
FILE="OpenWrt-Toolchain-malta-le_gcc-5.3.0_musl-1.1.15.Linux-x86_64.tar.bz2"
curl -L "$URL/$FILE" | tar xjf - -C /usr/local/mipsel-linux-musl --strip-components=2
--- /dev/null
+FROM ubuntu:16.04
+
+RUN apt-get update && apt-get install -y --no-install-recommends \
+ autoconf \
+ automake \
+ bison \
+ bzip2 \
+ ca-certificates \
+ cmake \
+ curl \
+ file \
+ flex \
+ g++ \
+ gawk \
+ git \
+ libcurl4-openssl-dev \
+ libssl-dev \
+ make \
+ nasm \
+ pkg-config \
+ python2.7 \
+ sudo \
+ texinfo \
+ wget \
+ xz-utils \
+ zlib1g-dev
+
+COPY dist-x86_64-haiku/llvm-config.sh /bin/llvm-config-haiku
+
+ENV ARCH=x86_64
+
+WORKDIR /tmp
+COPY dist-x86_64-haiku/build-toolchain.sh /tmp/
+RUN /tmp/build-toolchain.sh $ARCH
+
+COPY dist-x86_64-haiku/fetch-packages.sh /tmp/
+RUN /tmp/fetch-packages.sh
+
+COPY scripts/sccache.sh /scripts/
+RUN sh /scripts/sccache.sh
+
+ENV HOST=x86_64-unknown-haiku
+ENV TARGET=target.$HOST
+
+ENV RUST_CONFIGURE_ARGS --host=$HOST --target=$HOST --disable-jemalloc \
+ --set=$TARGET.cc=x86_64-unknown-haiku-gcc \
+ --set=$TARGET.cxx=x86_64-unknown-haiku-g++ \
+ --set=$TARGET.llvm-config=/bin/llvm-config-haiku
+ENV SCRIPT python2.7 ../x.py dist
--- /dev/null
+#!/bin/bash
+# Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+# file at the top-level directory of this distribution and at
+# http://rust-lang.org/COPYRIGHT.
+#
+# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+# option. This file may not be copied, modified, or distributed
+# except according to those terms.
+
+set -ex
+
+ARCH=$1
+
+TOP=$(pwd)
+
+BUILDTOOLS=$TOP/buildtools
+HAIKU=$TOP/haiku
+OUTPUT=/tools
+SYSROOT=$OUTPUT/cross-tools-$ARCH/sysroot
+PACKAGE_ROOT=/system
+
+hide_output() {
+ set +x
+ on_err="
+echo ERROR: An error was encountered with the build.
+cat /tmp/build.log
+exit 1
+"
+ trap "$on_err" ERR
+ bash -c "while true; do sleep 30; echo \$(date) - building ...; done" &
+ PING_LOOP_PID=$!
+ $@ &> /tmp/build.log
+ trap - ERR
+ kill $PING_LOOP_PID
+ set -x
+}
+
+# First up, build a cross-compiler
+git clone --depth=1 https://git.haiku-os.org/haiku
+git clone --depth=1 https://git.haiku-os.org/buildtools
+cd $BUILDTOOLS/jam
+hide_output make
+hide_output ./jam0 install
+mkdir -p $OUTPUT
+cd $OUTPUT
+hide_output $HAIKU/configure --build-cross-tools $ARCH $TOP/buildtools
+
+# Set up sysroot to redirect to /system
+mkdir -p $SYSROOT/boot
+mkdir -p $PACKAGE_ROOT
+ln -s $PACKAGE_ROOT $SYSROOT/boot/system
+
+# Build needed packages and tools for the cross-compiler
+hide_output jam -q haiku.hpkg haiku_devel.hpkg '<build>package'
+
+# Set up our sysroot
+cp $OUTPUT/objects/linux/lib/*.so /lib/x86_64-linux-gnu
+cp $OUTPUT/objects/linux/x86_64/release/tools/package/package /bin/
+find $SYSROOT/../bin/ -type f -exec ln -s {} /bin/ \;
+
+# Extract packages
+package extract -C $PACKAGE_ROOT $OUTPUT/objects/haiku/$ARCH/packaging/packages/haiku.hpkg
+package extract -C $PACKAGE_ROOT $OUTPUT/objects/haiku/$ARCH/packaging/packages/haiku_devel.hpkg
+find $OUTPUT/download/ -name '*.hpkg' -exec package extract -C $PACKAGE_ROOT {} \;
+
+# Fix libgcc_s so we can link to it
+cd $PACKAGE_ROOT/develop/lib
+ln -s ../../lib/libgcc_s.so libgcc_s.so
+
+# Clean up
+rm -rf $BUILDTOOLS $HAIKU $OUTPUT/Jamfile $OUTPUT/attributes $OUTPUT/build \
+ $OUTPUT/build_packages $OUTPUT/download $OUTPUT/objects
--- /dev/null
+#!/bin/bash
+# Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+# file at the top-level directory of this distribution and at
+# http://rust-lang.org/COPYRIGHT.
+#
+# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+# option. This file may not be copied, modified, or distributed
+# except according to those terms.
+
+wget http://packages.haiku-os.org/haikuports/master/hpkg/llvm-4.0.1-2-x86_64.hpkg
+wget http://packages.haiku-os.org/haikuports/master/hpkg/llvm_libs-4.0.1-2-x86_64.hpkg
+
+package extract -C /system llvm-4.0.1-2-x86_64.hpkg
+package extract -C /system llvm_libs-4.0.1-2-x86_64.hpkg
+
+rm -f *.hpkg
--- /dev/null
+#!/bin/sh
+# Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+# file at the top-level directory of this distribution and at
+# http://rust-lang.org/COPYRIGHT.
+#
+# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+# option. This file may not be copied, modified, or distributed
+# except according to those terms.
+
+case $1 in
+--version) echo 4.0.1;;
+--prefix) echo $SCRATCH/haiku-cross/sysroot/boot/system;;
+--bindir) echo $SCRATCH/haiku-cross/sysroot/boot/system/bin;;
+--includedir) echo $SCRATCH/haiku-cross/sysroot/boot/system/develop/headers;;
+--libdir) echo $SCRATCH/haiku-/cross/sysroot/boot/system/develop/lib;;
+--cmakedir) echo $SCRATCH/haiku-/cross/sysroot/boot/system/develop/lib/cmake/llvm;;
+--cppflags) echo -I$SCRATCH/haiku-/cross/sysroot/boot/system/develop/headers \
+ -D__STDC_CONSTANT_MACROS -D__STDC_FORMAT_MACROS -D__STDC_LIMIT_MACROS;;
+--cflags) echo -I$SCRATCH/haiku-cross/sysroot/boot/system/develop/headers \
+ -fPIC -Wall -W -Wno-unused-parameter -Wwrite-strings \
+ -Wno-missing-field-initializers -pedantic -Wno-long-long -Wno-comment \
+ -Werror=date-time -ffunction-sections -fdata-sections -O3 -DNDEBUG \
+ -D__STDC_CONSTANT_MACROS -D__STDC_FORMAT_MACROS -D__STDC_LIMIT_MACROS;;
+--cxxflags) echo -I/$SCRATCH/haiku-cross/sysroot/boot/system/develop/headers \
+ -fPIC -fvisibility-inlines-hidden -Wall -W -Wno-unused-parameter \
+ -Wwrite-strings -Wcast-qual -Wno-missing-field-initializers -pedantic \
+ -Wno-long-long -Wno-maybe-uninitialized -Wdelete-non-virtual-dtor \
+ -Wno-comment -Werror=date-time -std=c++11 -ffunction-sections \
+ -fdata-sections -O3 -DNDEBUG -fno-exceptions \
+ -D__STDC_CONSTANT_MACROS -D__STDC_FORMAT_MACROS -D__STDC_LIMIT_MACROS;;
+--ldflags) echo -L$SCRATCH/haiku-cross/sysroot/boot/system/develop/lib ;;
+--system-libs) echo ;;
+--libs) echo -lLLVM-4.0;;
+--libfiles) echo $SCRATCH/haiku-cross/sysroot/boot/system/develop/lib/libLLVM-4.0.so;;
+--components) echo aarch64 aarch64asmparser aarch64asmprinter aarch64codegen \
+ aarch64desc aarch64disassembler aarch64info aarch64utils all \
+ all-targets amdgpu amdgpuasmparser amdgpuasmprinter amdgpucodegen \
+ amdgpudesc amdgpudisassembler amdgpuinfo amdgpuutils analysis arm \
+ armasmparser armasmprinter armcodegen armdesc armdisassembler \
+ arminfo asmparser asmprinter bitreader bitwriter bpf bpfasmprinter \
+ bpfcodegen bpfdesc bpfdisassembler bpfinfo codegen core coroutines \
+ coverage debuginfocodeview debuginfodwarf debuginfomsf debuginfopdb \
+ demangle engine executionengine globalisel hexagon hexagonasmparser \
+ hexagoncodegen hexagondesc hexagondisassembler hexagoninfo \
+ instcombine instrumentation interpreter ipo irreader lanai \
+ lanaiasmparser lanaicodegen lanaidesc lanaidisassembler lanaiinfo \
+ lanaiinstprinter libdriver lineeditor linker lto mc mcdisassembler \
+ mcjit mcparser mips mipsasmparser mipsasmprinter mipscodegen \
+ mipsdesc mipsdisassembler mipsinfo mirparser msp430 msp430asmprinter \
+ msp430codegen msp430desc msp430info native nativecodegen nvptx \
+ nvptxasmprinter nvptxcodegen nvptxdesc nvptxinfo objcarcopts object \
+ objectyaml option orcjit passes powerpc powerpcasmparser \
+ powerpcasmprinter powerpccodegen powerpcdesc powerpcdisassembler \
+ powerpcinfo profiledata riscv riscvcodegen riscvdesc riscvinfo \
+ runtimedyld scalaropts selectiondag sparc sparcasmparser \
+ sparcasmprinter sparccodegen sparcdesc sparcdisassembler sparcinfo \
+ support symbolize systemz systemzasmparser systemzasmprinter \
+ systemzcodegen systemzdesc systemzdisassembler systemzinfo tablegen \
+ target transformutils vectorize x86 x86asmparser x86asmprinter \
+ x86codegen x86desc x86disassembler x86info x86utils xcore \
+ xcoreasmprinter xcorecodegen xcoredesc xcoredisassembler xcoreinfo;;
+--host-target) echo x86_64-unknown-haiku;;
+--has-rtti) echo YES;;
+--shared-mode) echo shared;;
+esac
source shared.sh
VERSION=1.0.2k
-URL=https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/openssl-$VERSION.tar.gz
+URL=https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror/openssl-$VERSION.tar.gz
curl $URL | tar xzf -
source shared.sh
VERSION=1.0.2k
-URL=https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/openssl-$VERSION.tar.gz
+URL=https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror/openssl-$VERSION.tar.gz
curl $URL | tar xzf -
mkdir -p /x-tools/x86_64-unknown-netbsd/sysroot
-URL=https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror
+URL=https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror
# Originally from ftp://ftp.netbsd.org/pub/NetBSD/NetBSD-$BSD/source/sets/*.tgz
curl $URL/2017-03-17-netbsd-src.tgz | tar xzf -
# option. This file may not be copied, modified, or distributed
# except according to those terms.
+# ignore-tidy-linelength
+
set -ex
curl -fo /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl
+ https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl
chmod +x /usr/local/bin/sccache
COPY scripts/sccache.sh /scripts/
RUN sh /scripts/sccache.sh
-ENV RUST_CONFIGURE_ARGS --build=x86_64-unknown-linux-gnu
+ENV RUST_CONFIGURE_ARGS --build=x86_64-unknown-linux-gnu --enable-test-miri
ENV RUST_CHECK_TARGET check-aux
--- /dev/null
+# `doc_masked`
+
+The tracking issue for this feature is: [#44027]
+
+-----
+
+The `doc_masked` feature allows a crate to exclude types from a given crate from appearing in lists
+of trait implementations. The specifics of the feature are as follows:
+
+1. When rustdoc encounters an `extern crate` statement annotated with a `#[doc(masked)]` attribute,
+ it marks the crate as being masked.
+
+2. When listing traits a given type implements, rustdoc ensures that traits from masked crates are
+ not emitted into the documentation.
+
+3. When listing types that implement a given trait, rustdoc ensures that types from masked crates
+ are not emitted into the documentation.
+
+This feature was introduced in PR [#44026] to ensure that compiler-internal and
+implementation-specific types and traits were not included in the standard library's documentation.
+Such types would introduce broken links into the documentation.
+
+[#44026]: https://github.com/rust-lang/rust/pull/44026
+[#44027]: https://github.com/rust-lang/rust/pull/44027
--- /dev/null
+# `fn_must_use`
+
+The tracking issue for this feature is [#43302].
+
+[#43302]: https://github.com/rust-lang/rust/issues/43302
+
+------------------------
+
+The `fn_must_use` feature allows functions and methods to be annotated with
+`#[must_use]`, indicating that the `unused_must_use` lint should require their
+return values to be used (similarly to how types annotated with `must_use`,
+most notably `Result`, are linted if not used).
+
+## Examples
+
+```rust
+#![feature(fn_must_use)]
+
+#[must_use]
+fn double(x: i32) -> i32 {
+ 2 * x
+}
+
+fn main() {
+ double(4); // warning: unused return value of `double` which must be used
+
+ let _ = double(4); // (no warning)
+}
+
+```
+++ /dev/null
-# `compiler_fences`
-
-The tracking issue for this feature is: [#41091]
-
-[#41091]: https://github.com/rust-lang/rust/issues/41091
-
-------------------------
-
-The `compiler_fences` feature exposes the `compiler_fence` function
-in `std::sync::atomic`. This function is conceptually similar to C++'s
-`atomic_signal_fence`, which can currently only be accessed in nightly
-Rust using the `atomic_singlethreadfence_*` instrinsic functions in
-`core`, or through the mostly equivalent literal assembly:
-
-```rust
-#![feature(asm)]
-unsafe { asm!("" ::: "memory" : "volatile") };
-```
-
-A `compiler_fence` restricts the kinds of memory re-ordering the
-compiler is allowed to do. Specifically, depending on the given ordering
-semantics, the compiler may be disallowed from moving reads or writes
-from before or after the call to the other side of the call to
-`compiler_fence`. Note that it does **not** prevent the *hardware*
-from doing such re-ordering. This is not a problem in a single-threaded,
-execution context, but when other threads may modify memory at the same
-time, stronger synchronization primitives are required.
-
-## Examples
-
-`compiler_fence` is generally only useful for preventing a thread from
-racing *with itself*. That is, if a given thread is executing one piece
-of code, and is then interrupted, and starts executing code elsewhere
-(while still in the same thread, and conceptually still on the same
-core). In traditional programs, this can only occur when a signal
-handler is registered. In more low-level code, such situations can also
-arise when handling interrupts, when implementing green threads with
-pre-emption, etc.
-
-To give a straightforward example of when a `compiler_fence` is
-necessary, consider the following example:
-
-```rust
-# use std::sync::atomic::{AtomicBool, AtomicUsize};
-# use std::sync::atomic::{ATOMIC_BOOL_INIT, ATOMIC_USIZE_INIT};
-# use std::sync::atomic::Ordering;
-static IMPORTANT_VARIABLE: AtomicUsize = ATOMIC_USIZE_INIT;
-static IS_READY: AtomicBool = ATOMIC_BOOL_INIT;
-
-fn main() {
- IMPORTANT_VARIABLE.store(42, Ordering::Relaxed);
- IS_READY.store(true, Ordering::Relaxed);
-}
-
-fn signal_handler() {
- if IS_READY.load(Ordering::Relaxed) {
- assert_eq!(IMPORTANT_VARIABLE.load(Ordering::Relaxed), 42);
- }
-}
-```
-
-The way it is currently written, the `assert_eq!` is *not* guaranteed to
-succeed, despite everything happening in a single thread. To see why,
-remember that the compiler is free to swap the stores to
-`IMPORTANT_VARIABLE` and `IS_READ` since they are both
-`Ordering::Relaxed`. If it does, and the signal handler is invoked right
-after `IS_READY` is updated, then the signal handler will see
-`IS_READY=1`, but `IMPORTANT_VARIABLE=0`.
-
-Using a `compiler_fence`, we can remedy this situation:
-
-```rust
-#![feature(compiler_fences)]
-# use std::sync::atomic::{AtomicBool, AtomicUsize};
-# use std::sync::atomic::{ATOMIC_BOOL_INIT, ATOMIC_USIZE_INIT};
-# use std::sync::atomic::Ordering;
-use std::sync::atomic::compiler_fence;
-
-static IMPORTANT_VARIABLE: AtomicUsize = ATOMIC_USIZE_INIT;
-static IS_READY: AtomicBool = ATOMIC_BOOL_INIT;
-
-fn main() {
- IMPORTANT_VARIABLE.store(42, Ordering::Relaxed);
- // prevent earlier writes from being moved beyond this point
- compiler_fence(Ordering::Release);
- IS_READY.store(true, Ordering::Relaxed);
-}
-
-fn signal_handler() {
- if IS_READY.load(Ordering::Relaxed) {
- assert_eq!(IMPORTANT_VARIABLE.load(Ordering::Relaxed), 42);
- }
-}
-```
-
-A deeper discussion of compiler barriers with various re-ordering
-semantics (such as `Ordering::SeqCst`) is beyond the scope of this text.
-Curious readers are encouraged to read the Linux kernel's discussion of
-[memory barriers][1], the C++ references on [`std::memory_order`][2] and
-[`atomic_signal_fence`][3], and [this StackOverflow answer][4] for
-further details.
-
-[1]: https://www.kernel.org/doc/Documentation/memory-barriers.txt
-[2]: http://en.cppreference.com/w/cpp/atomic/memory_order
-[3]: http://www.cplusplus.com/reference/atomic/atomic_signal_fence/
-[4]: http://stackoverflow.com/a/18454971/472927
+++ /dev/null
-# `iterator_for_each`
-
-The tracking issue for this feature is: [#42986]
-
-[#42986]: https://github.com/rust-lang/rust/issues/42986
-
-------------------------
-
-To call a closure on each element of an iterator, you can use `for_each`:
-
-```rust
-#![feature(iterator_for_each)]
-
-fn main() {
- (0..10).for_each(|i| println!("{}", i));
-}
-```
# `splice`
-The tracking issue for this feature is: [#32310]
+The tracking issue for this feature is: [#44643]
-[#32310]: https://github.com/rust-lang/rust/issues/32310
+[#44643]: https://github.com/rust-lang/rust/issues/44643
------------------------
-The `splice()` method on `Vec` and `String` allows you to replace a range
-of values in a vector or string with another range of values, and returns
-the replaced values.
+The `splice()` method on `String` allows you to replace a range
+of values in a string with another range of values.
A simple example:
// Replace the range up until the β from the string
s.splice(..beta_offset, "Α is capital alpha; ");
assert_eq!(s, "Α is capital alpha; β is beta");
-```
\ No newline at end of file
+```
use core::fmt;
use core::cmp::Ordering;
use core::intrinsics::abort;
-use core::mem::{self, size_of_val, uninitialized};
+use core::mem::{self, align_of_val, size_of_val, uninitialized};
use core::ops::Deref;
use core::ops::CoerceUnsized;
use core::ptr::{self, Shared};
Ok(elem)
}
}
+}
+impl<T: ?Sized> Arc<T> {
/// Consumes the `Arc`, returning the wrapped pointer.
///
/// To avoid a memory leak the pointer must be converted back to an `Arc` using
/// ```
#[stable(feature = "rc_raw", since = "1.17.0")]
pub unsafe fn from_raw(ptr: *const T) -> Self {
- // To find the corresponding pointer to the `ArcInner` we need to subtract the offset of the
- // `data` field from the pointer.
- let ptr = (ptr as *const u8).offset(-offset_of!(ArcInner<T>, data));
+ // Align the unsized value to the end of the ArcInner.
+ // Because it is ?Sized, it will always be the last field in memory.
+ let align = align_of_val(&*ptr);
+ let layout = Layout::new::<ArcInner<()>>();
+ let offset = (layout.size() + layout.padding_needed_for(align)) as isize;
+
+ // Reverse the offset to find the original ArcInner.
+ let fake_ptr = ptr as *mut ArcInner<T>;
+ let arc_ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset));
+
Arc {
- ptr: Shared::new_unchecked(ptr as *mut u8 as *mut _),
+ ptr: Shared::new_unchecked(arc_ptr),
}
}
-}
-impl<T: ?Sized> Arc<T> {
/// Creates a new [`Weak`][weak] pointer to this value.
///
/// [weak]: struct.Weak.html
}
}
+ #[test]
+ fn test_into_from_raw_unsized() {
+ use std::fmt::Display;
+ use std::string::ToString;
+
+ let arc: Arc<str> = Arc::from("foo");
+
+ let ptr = Arc::into_raw(arc.clone());
+ let arc2 = unsafe { Arc::from_raw(ptr) };
+
+ assert_eq!(unsafe { &*ptr }, "foo");
+ assert_eq!(arc, arc2);
+
+ let arc: Arc<Display> = Arc::new(123);
+
+ let ptr = Arc::into_raw(arc.clone());
+ let arc2 = unsafe { Arc::from_raw(ptr) };
+
+ assert_eq!(unsafe { &*ptr }.to_string(), "123");
+ assert_eq!(arc2.to_string(), "123");
+ }
+
#[test]
fn test_cowarc_clone_make_mut() {
let mut cow0 = Arc::new(75);
macro_rules! format {
($($arg:tt)*) => ($crate::fmt::format(format_args!($($arg)*)))
}
-
-// Private macro to get the offset of a struct field in bytes from the address of the struct.
-macro_rules! offset_of {
- ($container:path, $field:ident) => {{
- // Make sure the field actually exists. This line ensures that a compile-time error is
- // generated if $field is accessed through a Deref impl.
- let $container { $field : _, .. };
-
- // Create an (invalid) instance of the container and calculate the offset to its
- // field. Using a null pointer might be UB if `&(*(0 as *const T)).field` is interpreted to
- // be nullptr deref.
- let invalid: $container = ::core::mem::uninitialized();
- let offset = &invalid.$field as *const _ as usize - &invalid as *const _ as usize;
-
- // Do not run destructors on the made up invalid instance.
- ::core::mem::forget(invalid);
- offset as isize
- }};
-}
use core::intrinsics::abort;
use core::marker;
use core::marker::Unsize;
-use core::mem::{self, forget, size_of_val, uninitialized};
+use core::mem::{self, align_of_val, forget, size_of_val, uninitialized};
use core::ops::Deref;
use core::ops::CoerceUnsized;
use core::ptr::{self, Shared};
Err(this)
}
}
+}
+impl<T: ?Sized> Rc<T> {
/// Consumes the `Rc`, returning the wrapped pointer.
///
/// To avoid a memory leak the pointer must be converted back to an `Rc` using
/// ```
#[stable(feature = "rc_raw", since = "1.17.0")]
pub unsafe fn from_raw(ptr: *const T) -> Self {
- // To find the corresponding pointer to the `RcBox` we need to subtract the offset of the
- // `value` field from the pointer.
+ // Align the unsized value to the end of the RcBox.
+ // Because it is ?Sized, it will always be the last field in memory.
+ let align = align_of_val(&*ptr);
+ let layout = Layout::new::<RcBox<()>>();
+ let offset = (layout.size() + layout.padding_needed_for(align)) as isize;
+
+ // Reverse the offset to find the original RcBox.
+ let fake_ptr = ptr as *mut RcBox<T>;
+ let rc_ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset));
- let ptr = (ptr as *const u8).offset(-offset_of!(RcBox<T>, value));
Rc {
- ptr: Shared::new_unchecked(ptr as *mut u8 as *mut _)
+ ptr: Shared::new_unchecked(rc_ptr),
}
}
-}
-impl<T: ?Sized> Rc<T> {
/// Creates a new [`Weak`][weak] pointer to this value.
///
/// [weak]: struct.Weak.html
}
}
+ #[test]
+ fn test_into_from_raw_unsized() {
+ use std::fmt::Display;
+ use std::string::ToString;
+
+ let rc: Rc<str> = Rc::from("foo");
+
+ let ptr = Rc::into_raw(rc.clone());
+ let rc2 = unsafe { Rc::from_raw(ptr) };
+
+ assert_eq!(unsafe { &*ptr }, "foo");
+ assert_eq!(rc, rc2);
+
+ let rc: Rc<Display> = Rc::new(123);
+
+ let ptr = Rc::into_raw(rc.clone());
+ let rc2 = unsafe { Rc::from_raw(ptr) };
+
+ assert_eq!(unsafe { &*ptr }.to_string(), "123");
+ assert_eq!(rc2.to_string(), "123");
+ }
+
#[test]
fn get_mut() {
let mut x = Rc::new(3);
/// s.splice(..beta_offset, "Α is capital alpha; ");
/// assert_eq!(s, "Α is capital alpha; β is beta");
/// ```
- #[unstable(feature = "splice", reason = "recently added", issue = "32310")]
+ #[unstable(feature = "splice", reason = "recently added", issue = "44643")]
pub fn splice<R>(&mut self, range: R, replace_with: &str)
where R: RangeArgument<usize>
{
/// # Examples
///
/// ```
- /// #![feature(splice)]
/// let mut v = vec![1, 2, 3];
/// let new = [7, 8];
/// let u: Vec<_> = v.splice(..2, new.iter().cloned()).collect();
/// assert_eq!(u, &[1, 2]);
/// ```
#[inline]
- #[unstable(feature = "splice", reason = "recently added", issue = "32310")]
+ #[stable(feature = "vec_splice", since = "1.22.0")]
pub fn splice<R, I>(&mut self, range: R, replace_with: I) -> Splice<I::IntoIter>
where R: RangeArgument<usize>, I: IntoIterator<Item=T>
{
/// [`splice()`]: struct.Vec.html#method.splice
/// [`Vec`]: struct.Vec.html
#[derive(Debug)]
-#[unstable(feature = "splice", reason = "recently added", issue = "32310")]
+#[stable(feature = "vec_splice", since = "1.22.0")]
pub struct Splice<'a, I: Iterator + 'a> {
drain: Drain<'a, I::Item>,
replace_with: I,
}
-#[unstable(feature = "splice", reason = "recently added", issue = "32310")]
+#[stable(feature = "vec_splice", since = "1.22.0")]
impl<'a, I: Iterator> Iterator for Splice<'a, I> {
type Item = I::Item;
}
}
-#[unstable(feature = "splice", reason = "recently added", issue = "32310")]
+#[stable(feature = "vec_splice", since = "1.22.0")]
impl<'a, I: Iterator> DoubleEndedIterator for Splice<'a, I> {
fn next_back(&mut self) -> Option<Self::Item> {
self.drain.next_back()
}
}
-#[unstable(feature = "splice", reason = "recently added", issue = "32310")]
+#[stable(feature = "vec_splice", since = "1.22.0")]
impl<'a, I: Iterator> ExactSizeIterator for Splice<'a, I> {}
-#[unstable(feature = "splice", reason = "recently added", issue = "32310")]
+#[stable(feature = "vec_splice", since = "1.22.0")]
impl<'a, I: Iterator> Drop for Splice<'a, I> {
fn drop(&mut self) {
// exhaust drain first
use std::env;
use std::path::PathBuf;
use std::process::Command;
-use build_helper::{run, native_lib_boilerplate};
+use build_helper::{run, native_lib_boilerplate, BuildExpectation};
fn main() {
// FIXME: This is a hack to support building targets that don't
cmd.arg("--with-lg-quantum=4");
}
- run(&mut cmd);
+ run(&mut cmd, BuildExpectation::None);
let mut make = Command::new(build_helper::make(&host));
make.current_dir(&native.out_dir)
.arg(env::var("NUM_JOBS").expect("NUM_JOBS was not set"));
}
- run(&mut make);
+ run(&mut make, BuildExpectation::None);
// The pthread_atfork symbols is used by jemalloc on android but the really
// old android we're building on doesn't have them defined, so just make
#![feature(core_intrinsics)]
#![feature(dropck_eyepatch)]
#![feature(generic_param_attrs)]
-#![feature(needs_drop)]
#![cfg_attr(test, feature(test))]
#![allow(deprecated)]
-Subproject commit 38ffaf97aa418cc369ca0197a72a0b927cc0f622
+Subproject commit ef4951582f620c589cd9e18ec182538bf116bce3
}
#[cfg(stage0)]
-/// Computes the byte offset that needs to be applied to `ptr` in order to
-/// make it aligned to `align`.
-/// If it is not possible to align `ptr`, the implementation returns
-/// `usize::max_value()`.
-///
-/// There are no guarantees whatsover that offsetting the pointer will not
-/// overflow or go beyond the allocation that `ptr` points into.
-/// It is up to the caller to ensure that the returned offset is correct
-/// in all terms other than alignment.
-///
-/// # Examples
-///
-/// Accessing adjacent `u8` as `u16`
-///
-/// ```
-/// # #![feature(core_intrinsics)]
-/// # fn foo(n: usize) {
-/// # use std::intrinsics::align_offset;
-/// # use std::mem::align_of;
-/// # unsafe {
-/// let x = [5u8, 6u8, 7u8, 8u8, 9u8];
-/// let ptr = &x[n] as *const u8;
-/// let offset = align_offset(ptr as *const (), align_of::<u16>());
-/// if offset < x.len() - n - 1 {
-/// let u16_ptr = ptr.offset(offset as isize) as *const u16;
-/// assert_ne!(*u16_ptr, 500);
-/// } else {
-/// // while the pointer can be aligned via `offset`, it would point
-/// // outside the allocation
-/// }
-/// # } }
-/// ```
+/// remove me after the next release
pub unsafe fn align_offset(ptr: *const (), align: usize) -> usize {
let offset = ptr as usize % align;
if offset == 0 {
/// Basic usage:
///
/// ```
- /// #![feature(iterator_for_each)]
- ///
/// use std::sync::mpsc::channel;
///
/// let (tx, rx) = channel();
/// might be preferable to keep a functional style with longer iterators:
///
/// ```
- /// #![feature(iterator_for_each)]
- ///
/// (0..5).flat_map(|x| x * 100 .. x * 110)
/// .enumerate()
/// .filter(|&(i, x)| (i + x) % 3 == 0)
/// .for_each(|(i, x)| println!("{}:{}", i, x));
/// ```
#[inline]
- #[unstable(feature = "iterator_for_each", issue = "42986")]
+ #[stable(feature = "iterator_for_each", since = "1.22.0")]
fn for_each<F>(self, mut f: F) where
Self: Sized, F: FnMut(Self::Item),
{
/// assert_eq!(i as i32, n);
/// }
/// ```
+///
+/// It is common to use `IntoIterator` as a trait bound. This allows
+/// the input collection type to change, so long as it is still an
+/// iterator. Additional bounds can be specified by restricting on
+/// `Item`:
+///
+/// ```rust
+/// fn collect_as_strings<T>(collection: T) -> Vec<String>
+/// where T: IntoIterator,
+/// T::Item : std::fmt::Debug,
+/// {
+/// collection
+/// .into_iter()
+/// .map(|item| format!("{:?}", item))
+/// .collect()
+/// }
+/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub trait IntoIterator {
/// The type of the elements being iterated over.
/// Here's an example of how a collection might make use of needs_drop:
///
/// ```
-/// #![feature(needs_drop)]
/// use std::{mem, ptr};
///
/// pub struct MyCollection<T> {
/// }
/// ```
#[inline]
-#[unstable(feature = "needs_drop", issue = "41890")]
+#[stable(feature = "needs_drop", since = "1.22.0")]
pub fn needs_drop<T>() -> bool {
unsafe { intrinsics::needs_drop::<T>() }
}
copy_nonoverlapping(self, dest, count)
}
-
+ /// Computes the byte offset that needs to be applied in order to
+ /// make the pointer aligned to `align`.
+ /// If it is not possible to align the pointer, the implementation returns
+ /// `usize::max_value()`.
+ ///
+ /// There are no guarantees whatsover that offsetting the pointer will not
+ /// overflow or go beyond the allocation that the pointer points into.
+ /// It is up to the caller to ensure that the returned offset is correct
+ /// in all terms other than alignment.
+ ///
+ /// # Examples
+ ///
+ /// Accessing adjacent `u8` as `u16`
+ ///
+ /// ```
+ /// # #![feature(align_offset)]
+ /// # fn foo(n: usize) {
+ /// # use std::mem::align_of;
+ /// # unsafe {
+ /// let x = [5u8, 6u8, 7u8, 8u8, 9u8];
+ /// let ptr = &x[n] as *const u8;
+ /// let offset = ptr.align_offset(align_of::<u16>());
+ /// if offset < x.len() - n - 1 {
+ /// let u16_ptr = ptr.offset(offset as isize) as *const u16;
+ /// assert_ne!(*u16_ptr, 500);
+ /// } else {
+ /// // while the pointer can be aligned via `offset`, it would point
+ /// // outside the allocation
+ /// }
+ /// # } }
+ /// ```
+ #[unstable(feature = "align_offset", issue = "44488")]
+ pub fn align_offset(self, align: usize) -> usize {
+ unsafe {
+ intrinsics::align_offset(self as *const _, align)
+ }
+ }
}
#[lang = "mut_ptr"]
}
}
+ /// Computes the byte offset that needs to be applied in order to
+ /// make the pointer aligned to `align`.
+ /// If it is not possible to align the pointer, the implementation returns
+ /// `usize::max_value()`.
+ ///
+ /// There are no guarantees whatsover that offsetting the pointer will not
+ /// overflow or go beyond the allocation that the pointer points into.
+ /// It is up to the caller to ensure that the returned offset is correct
+ /// in all terms other than alignment.
+ ///
+ /// # Examples
+ ///
+ /// Accessing adjacent `u8` as `u16`
+ ///
+ /// ```
+ /// # #![feature(align_offset)]
+ /// # fn foo(n: usize) {
+ /// # use std::mem::align_of;
+ /// # unsafe {
+ /// let x = [5u8, 6u8, 7u8, 8u8, 9u8];
+ /// let ptr = &x[n] as *const u8;
+ /// let offset = ptr.align_offset(align_of::<u16>());
+ /// if offset < x.len() - n - 1 {
+ /// let u16_ptr = ptr.offset(offset as isize) as *const u16;
+ /// assert_ne!(*u16_ptr, 500);
+ /// } else {
+ /// // while the pointer can be aligned via `offset`, it would point
+ /// // outside the allocation
+ /// }
+ /// # } }
+ /// ```
+ #[unstable(feature = "align_offset", issue = "44488")]
+ pub fn align_offset(self, align: usize) -> usize {
+ unsafe {
+ intrinsics::align_offset(self as *const _, align)
+ }
+ }
/// Calculates the offset from a pointer (convenience for `.offset(count as isize)`).
///
use iter::{Map, Cloned, FusedIterator};
use slice::{self, SliceIndex};
use mem;
-use intrinsics::align_offset;
pub mod pattern;
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub unsafe fn from_utf8_unchecked(v: &[u8]) -> &str {
- mem::transmute(v)
+ &*(v as *const [u8] as *const str)
}
/// Converts a slice of bytes to a string slice without checking
#[inline]
#[stable(feature = "str_mut_extras", since = "1.20.0")]
pub unsafe fn from_utf8_unchecked_mut(v: &mut [u8]) -> &mut str {
- mem::transmute(v)
+ &mut *(v as *mut [u8] as *mut str)
}
#[stable(feature = "rust1", since = "1.0.0")]
let ptr = v.as_ptr();
let align = unsafe {
// the offset is safe, because `index` is guaranteed inbounds
- align_offset(ptr.offset(index as isize) as *const (), usize_bytes)
+ ptr.offset(index as isize).align_offset(usize_bytes)
};
if align == 0 {
while index < blocks_end {
#[inline]
fn as_bytes(&self) -> &[u8] {
- unsafe { mem::transmute(self) }
+ unsafe { &*(self as *const str as *const [u8]) }
}
#[inline]
unsafe fn as_bytes_mut(&mut self) -> &mut [u8] {
- mem::transmute(self)
+ &mut *(self as *mut str as *mut [u8])
}
fn find<'a, P: Pattern<'a>>(&'a self, pat: P) -> Option<usize> {
/// A compiler memory fence.
///
-/// `compiler_fence` does not emit any machine code, but prevents the compiler from re-ordering
-/// memory operations across this point. Which reorderings are disallowed is dictated by the given
-/// [`Ordering`]. Note that `compiler_fence` does *not* introduce inter-thread memory
-/// synchronization; for that, a [`fence`] is needed.
+/// `compiler_fence` does not emit any machine code, but restricts the kinds
+/// of memory re-ordering the compiler is allowed to do. Specifically, depending on
+/// the given [`Ordering`] semantics, the compiler may be disallowed from moving reads
+/// or writes from before or after the call to the other side of the call to
+/// `compiler_fence`. Note that it does **not** prevent the *hardware*
+/// from doing such re-ordering. This is not a problem in a single-threaded,
+/// execution context, but when other threads may modify memory at the same
+/// time, stronger synchronization primitives such as [`fence`] are required.
///
/// The re-ordering prevented by the different ordering semantics are:
///
/// - with [`Acquire`], subsequent reads and writes cannot be moved ahead of preceding reads.
/// - with [`AcqRel`], both of the above rules are enforced.
///
+/// `compiler_fence` is generally only useful for preventing a thread from
+/// racing *with itself*. That is, if a given thread is executing one piece
+/// of code, and is then interrupted, and starts executing code elsewhere
+/// (while still in the same thread, and conceptually still on the same
+/// core). In traditional programs, this can only occur when a signal
+/// handler is registered. In more low-level code, such situations can also
+/// arise when handling interrupts, when implementing green threads with
+/// pre-emption, etc. Curious readers are encouraged to read the Linux kernel's
+/// discussion of [memory barriers].
+///
/// # Panics
///
/// Panics if `order` is [`Relaxed`].
///
+/// # Examples
+///
+/// Without `compiler_fence`, the `assert_eq!` in following code
+/// is *not* guaranteed to succeed, despite everything happening in a single thread.
+/// To see why, remember that the compiler is free to swap the stores to
+/// `IMPORTANT_VARIABLE` and `IS_READ` since they are both
+/// `Ordering::Relaxed`. If it does, and the signal handler is invoked right
+/// after `IS_READY` is updated, then the signal handler will see
+/// `IS_READY=1`, but `IMPORTANT_VARIABLE=0`.
+/// Using a `compiler_fence` remedies this situation.
+///
+/// ```
+/// use std::sync::atomic::{AtomicBool, AtomicUsize};
+/// use std::sync::atomic::{ATOMIC_BOOL_INIT, ATOMIC_USIZE_INIT};
+/// use std::sync::atomic::Ordering;
+/// use std::sync::atomic::compiler_fence;
+///
+/// static IMPORTANT_VARIABLE: AtomicUsize = ATOMIC_USIZE_INIT;
+/// static IS_READY: AtomicBool = ATOMIC_BOOL_INIT;
+///
+/// fn main() {
+/// IMPORTANT_VARIABLE.store(42, Ordering::Relaxed);
+/// // prevent earlier writes from being moved beyond this point
+/// compiler_fence(Ordering::Release);
+/// IS_READY.store(true, Ordering::Relaxed);
+/// }
+///
+/// fn signal_handler() {
+/// if IS_READY.load(Ordering::Relaxed) {
+/// assert_eq!(IMPORTANT_VARIABLE.load(Ordering::Relaxed), 42);
+/// }
+/// }
+/// ```
+///
/// [`fence`]: fn.fence.html
/// [`Ordering`]: enum.Ordering.html
/// [`Acquire`]: enum.Ordering.html#variant.Acquire
/// [`Release`]: enum.Ordering.html#variant.Release
/// [`AcqRel`]: enum.Ordering.html#variant.AcqRel
/// [`Relaxed`]: enum.Ordering.html#variant.Relaxed
+/// [memory barriers]: https://www.kernel.org/doc/Documentation/memory-barriers.txt
#[inline]
-#[unstable(feature = "compiler_fences", issue = "41091")]
+#[stable(feature = "compiler_fences", since = "1.22.0")]
pub fn compiler_fence(order: Ordering) {
unsafe {
match order {
use syntax::ast;
use syntax::errors::DiagnosticBuilder;
-use syntax::parse::{self, token, parse_stream_from_source_str};
-use syntax::print::pprust;
+use syntax::parse::{self, token};
use syntax::symbol::Symbol;
use syntax::tokenstream;
use syntax_pos::DUMMY_SP;
Ident(ident) | Lifetime(ident) => TokenNode::Term(Term(ident.name)),
Literal(..) | DocComment(..) => TokenNode::Literal(self::Literal(token)),
- Interpolated(ref nt) => {
- // An `Interpolated` token means that we have a `Nonterminal`
- // which is often a parsed AST item. At this point we now need
- // to convert the parsed AST to an actual token stream, e.g.
- // un-parse it basically.
- //
- // Unfortunately there's not really a great way to do that in a
- // guaranteed lossless fashion right now. The fallback here is
- // to just stringify the AST node and reparse it, but this loses
- // all span information.
- //
- // As a result, some AST nodes are annotated with the token
- // stream they came from. Attempt to extract these lossless
- // token streams before we fall back to the stringification.
- let mut tokens = None;
-
- match nt.0 {
- Nonterminal::NtItem(ref item) => {
- tokens = prepend_attrs(&item.attrs, item.tokens.as_ref(), span);
- }
- Nonterminal::NtTraitItem(ref item) => {
- tokens = prepend_attrs(&item.attrs, item.tokens.as_ref(), span);
- }
- Nonterminal::NtImplItem(ref item) => {
- tokens = prepend_attrs(&item.attrs, item.tokens.as_ref(), span);
- }
- _ => {}
- }
-
- tokens.map(|tokens| {
- TokenNode::Group(Delimiter::None,
- TokenStream(tokens.clone()))
- }).unwrap_or_else(|| {
- __internal::with_sess(|(sess, _)| {
- TokenNode::Group(Delimiter::None, TokenStream(nt.1.force(|| {
- // FIXME(jseyfried): Avoid this pretty-print + reparse hack
- let name = "<macro expansion>".to_owned();
- let source = pprust::token_to_string(&token);
- parse_stream_from_source_str(name, source, sess, Some(span))
- })))
- })
+ Interpolated(_) => {
+ __internal::with_sess(|(sess, _)| {
+ let tts = token.interpolated_to_tokenstream(sess, span);
+ TokenNode::Group(Delimiter::None, TokenStream(tts))
})
}
}
}
-fn prepend_attrs(attrs: &[ast::Attribute],
- tokens: Option<&tokenstream::TokenStream>,
- span: syntax_pos::Span)
- -> Option<tokenstream::TokenStream>
-{
- let tokens = match tokens {
- Some(tokens) => tokens,
- None => return None,
- };
- if attrs.len() == 0 {
- return Some(tokens.clone())
- }
- let mut builder = tokenstream::TokenStreamBuilder::new();
- for attr in attrs {
- assert_eq!(attr.style, ast::AttrStyle::Outer,
- "inner attributes should prevent cached tokens from existing");
- let stream = __internal::with_sess(|(sess, _)| {
- // FIXME: Avoid this pretty-print + reparse hack as bove
- let name = "<macro expansion>".to_owned();
- let source = pprust::attr_to_string(attr);
- parse_stream_from_source_str(name, source, sess, Some(span))
- });
- builder.push(stream);
- }
- builder.push(tokens.clone());
- Some(builder.build())
-}
-
/// Permanently unstable internal implementation details of this crate. This
/// should not be used.
///
[dependencies]
arena = { path = "../libarena" }
+bitflags = "1.0"
fmt_macros = { path = "../libfmt_macros" }
graphviz = { path = "../libgraphviz" }
jobserver = "0.1"
log = "0.3"
owning_ref = "0.3.3"
rustc_back = { path = "../librustc_back" }
-rustc_bitflags = { path = "../librustc_bitflags" }
rustc_const_math = { path = "../librustc_const_math" }
rustc_data_structures = { path = "../librustc_data_structures" }
rustc_errors = { path = "../librustc_errors" }
Your concerns are probably the same as someone else's.
+You may also be interested in the
+[Rust Forge](https://forge.rust-lang.org/), which includes a number of
+interesting bits of information.
+
+Finally, at the end of this file is a GLOSSARY defining a number of
+common (and not necessarily obvious!) names that are used in the Rust
+compiler code. If you see some funky name and you'd like to know what
+it stands for, check there!
+
The crates of rustc
===================
-Rustc consists of a number of crates, including `libsyntax`,
-`librustc`, `librustc_back`, `librustc_trans`, and `librustc_driver`
-(the names and divisions are not set in stone and may change;
-in general, a finer-grained division of crates is preferable):
-
-- [`libsyntax`][libsyntax] contains those things concerned purely with syntax –
- that is, the AST, parser, pretty-printer, lexer, macro expander, and
- utilities for traversing ASTs – are in a separate crate called
- "syntax", whose files are in `./../libsyntax`, where `.` is the
- current directory (that is, the parent directory of front/, middle/,
- back/, and so on).
-
-- `librustc` (the current directory) contains the high-level analysis
- passes, such as the type checker, borrow checker, and so forth.
- It is the heart of the compiler.
-
-- [`librustc_back`][back] contains some very low-level details that are
- specific to different LLVM targets and so forth.
-
-- [`librustc_trans`][trans] contains the code to convert from Rust IR into LLVM
- IR, and then from LLVM IR into machine code, as well as the main
- driver that orchestrates all the other passes and various other bits
- of miscellany. In general it contains code that runs towards the
- end of the compilation process.
-
-- [`librustc_driver`][driver] invokes the compiler from
- [`libsyntax`][libsyntax], then the analysis phases from `librustc`, and
- finally the lowering and codegen passes from [`librustc_trans`][trans].
-
-Roughly speaking the "order" of the three crates is as follows:
-
- librustc_driver
- |
- +-----------------+-------------------+
- | |
- libsyntax -> librustc -> librustc_trans
-
-
-The compiler process:
-=====================
-
-The Rust compiler is comprised of six main compilation phases.
-
-1. Parsing input
-2. Configuration & expanding (cfg rules & syntax extension expansion)
-3. Running analysis passes
-4. Translation to LLVM
-5. LLVM passes
-6. Linking
-
-Phase one is responsible for parsing & lexing the input to the compiler. The
-output of this phase is an abstract syntax tree (AST). The AST at this point
-includes all macro uses & attributes. This means code which will be later
-expanded and/or removed due to `cfg` attributes is still present in this
-version of the AST. Parsing abstracts away details about individual files which
-have been read into the AST.
-
-Phase two handles configuration and macro expansion. You can think of this
-phase as a function acting on the AST from the previous phase. The input for
-this phase is the unexpanded AST from phase one, and the output is an expanded
-version of the same AST. This phase will expand all macros & syntax
-extensions and will evaluate all `cfg` attributes, potentially removing some
-code. The resulting AST will not contain any macros or `macro_use` statements.
-
-The code for these first two phases is in [`libsyntax`][libsyntax].
-
-After this phase, the compiler allocates IDs to each node in the AST
-(technically not every node, but most of them). If we are writing out
-dependencies, that happens now.
-
-The third phase is analysis. This is the most complex phase in the compiler,
-and makes up much of the code. This phase included name resolution, type
-checking, borrow checking, type & lifetime inference, trait selection, method
-selection, linting and so on. Most of the error detection in the compiler comes
-from this phase (with the exception of parse errors which arise during
-parsing). The "output" of this phase is a set of side tables containing
-semantic information about the source program. The analysis code is in
-[`librustc`][rustc] and some other crates with the `librustc_` prefix.
-
-The fourth phase is translation. This phase translates the AST (and the side
-tables from the previous phase) into LLVM IR (intermediate representation).
-This is achieved by calling into the LLVM libraries. The code for this is in
-[`librustc_trans`][trans].
-
-Phase five runs the LLVM backend. This runs LLVM's optimization passes on the
-generated IR and generates machine code resulting in object files. This phase
-is not really part of the Rust compiler, as LLVM carries out all the work.
-The interface between LLVM and Rust is in [`librustc_llvm`][llvm].
-
-The final phase, phase six, links the object files into an executable. This is
-again outsourced to other tools and not performed by the Rust compiler
-directly. The interface is in [`librustc_back`][back] (which also contains some
-things used primarily during translation).
-
-A module called the driver coordinates all these phases. It handles all the
-highest level coordination of compilation from parsing command line arguments
-all the way to invoking the linker to produce an executable.
-
-Modules in the librustc crate
-=============================
-
-The librustc crate itself consists of the following submodules
-(mostly, but not entirely, in their own directories):
-
-- session: options and data that pertain to the compilation session as
- a whole
-- middle: middle-end: name resolution, typechecking, LLVM code
- generation
-- metadata: encoder and decoder for data required by separate
- compilation
-- plugin: infrastructure for compiler plugins
-- lint: infrastructure for compiler warnings
-- util: ubiquitous types and helper functions
-- lib: bindings to LLVM
-
-The entry-point for the compiler is main() in the [`librustc_driver`][driver]
-crate.
-
-The 3 central data structures:
-------------------------------
-
-1. `./../libsyntax/ast.rs` defines the AST. The AST is treated as
- immutable after parsing, but it depends on mutable context data
- structures (mainly hash maps) to give it meaning.
-
- - Many – though not all – nodes within this data structure are
- wrapped in the type `spanned<T>`, meaning that the front-end has
- marked the input coordinates of that node. The member `node` is
- the data itself, the member `span` is the input location (file,
- line, column; both low and high).
-
- - Many other nodes within this data structure carry a
- `def_id`. These nodes represent the 'target' of some name
- reference elsewhere in the tree. When the AST is resolved, by
- `middle/resolve.rs`, all names wind up acquiring a def that they
- point to. So anything that can be pointed-to by a name winds
- up with a `def_id`.
-
-2. `middle/ty.rs` defines the datatype `sty`. This is the type that
- represents types after they have been resolved and normalized by
- the middle-end. The typeck phase converts every ast type to a
- `ty::sty`, and the latter is used to drive later phases of
- compilation. Most variants in the `ast::ty` tag have a
- corresponding variant in the `ty::sty` tag.
-
-3. `./../librustc_llvm/lib.rs` defines the exported types
- `ValueRef`, `TypeRef`, `BasicBlockRef`, and several others.
- Each of these is an opaque pointer to an LLVM type,
- manipulated through the `lib::llvm` interface.
-
-[libsyntax]: https://github.com/rust-lang/rust/tree/master/src/libsyntax/
-[trans]: https://github.com/rust-lang/rust/tree/master/src/librustc_trans/
-[llvm]: https://github.com/rust-lang/rust/tree/master/src/librustc_llvm/
-[back]: https://github.com/rust-lang/rust/tree/master/src/librustc_back/
-[rustc]: https://github.com/rust-lang/rust/tree/master/src/librustc/
-[driver]: https://github.com/rust-lang/rust/tree/master/src/librustc_driver
+Rustc consists of a number of crates, including `syntax`,
+`rustc`, `rustc_back`, `rustc_trans`, `rustc_driver`, and
+many more. The source for each crate can be found in a directory
+like `src/libXXX`, where `XXX` is the crate name.
+
+(NB. The names and divisions of these crates are not set in
+stone and may change over time -- for the time being, we tend towards
+a finer-grained division to help with compilation time, though as
+incremental improves that may change.)
+
+The dependency structure of these crates is roughly a diamond:
+
+````
+ rustc_driver
+ / | \
+ / | \
+ / | \
+ / v \
+rustc_trans rustc_borrowck ... rustc_metadata
+ \ | /
+ \ | /
+ \ | /
+ \ v /
+ rustc
+ |
+ v
+ syntax
+ / \
+ / \
+ syntax_pos syntax_ext
+```
+
+The `rustc_driver` crate, at the top of this lattice, is effectively
+the "main" function for the rust compiler. It doesn't have much "real
+code", but instead ties together all of the code defined in the other
+crates and defines the overall flow of execution. (As we transition
+more and more to the [query model](ty/maps/README.md), however, the
+"flow" of compilation is becoming less centrally defined.)
+
+At the other extreme, the `rustc` crate defines the common and
+pervasive data structures that all the rest of the compiler uses
+(e.g., how to represent types, traits, and the program itself). It
+also contains some amount of the compiler itself, although that is
+relatively limited.
+
+Finally, all the crates in the bulge in the middle define the bulk of
+the compiler -- they all depend on `rustc`, so that they can make use
+of the various types defined there, and they export public routines
+that `rustc_driver` will invoke as needed (more and more, what these
+crates export are "query definitions", but those are covered later
+on).
+
+Below `rustc` lie various crates that make up the parser and error
+reporting mechanism. For historical reasons, these crates do not have
+the `rustc_` prefix, but they are really just as much an internal part
+of the compiler and not intended to be stable (though they do wind up
+getting used by some crates in the wild; a practice we hope to
+gradually phase out).
+
+Each crate has a `README.md` file that describes, at a high-level,
+what it contains, and tries to give some kind of explanation (some
+better than others).
+
+The compiler process
+====================
+
+The Rust compiler is in a bit of transition right now. It used to be a
+purely "pass-based" compiler, where we ran a number of passes over the
+entire program, and each did a particular check of transformation.
+
+We are gradually replacing this pass-based code with an alternative
+setup based on on-demand **queries**. In the query-model, we work
+backwards, executing a *query* that expresses our ultimate goal (e.g.,
+"compiler this crate"). This query in turn may make other queries
+(e.g., "get me a list of all modules in the crate"). Those queries
+make other queries that ultimately bottom out in the base operations,
+like parsing the input, running the type-checker, and so forth. This
+on-demand model permits us to do exciting things like only do the
+minimal amount of work needed to type-check a single function. It also
+helps with incremental compilation. (For details on defining queries,
+check out `src/librustc/ty/maps/README.md`.)
+
+Regardless of the general setup, the basic operations that the
+compiler must perform are the same. The only thing that changes is
+whether these operations are invoked front-to-back, or on demand. In
+order to compile a Rust crate, these are the general steps that we
+take:
+
+1. **Parsing input**
+ - this processes the `.rs` files and produces the AST ("abstract syntax tree")
+ - the AST is defined in `syntax/ast.rs`. It is intended to match the lexical
+ syntax of the Rust language quite closely.
+2. **Name resolution, macro expansion, and configuration**
+ - once parsing is complete, we process the AST recursively, resolving paths
+ and expanding macros. This same process also processes `#[cfg]` nodes, and hence
+ may strip things out of the AST as well.
+3. **Lowering to HIR**
+ - Once name resolution completes, we convert the AST into the HIR,
+ or "high-level IR". The HIR is defined in `src/librustc/hir/`; that module also includes
+ the lowering code.
+ - The HIR is a lightly desugared variant of the AST. It is more processed than the
+ AST and more suitable for the analyses that follow. It is **not** required to match
+ the syntax of the Rust language.
+ - As a simple example, in the **AST**, we preserve the parentheses
+ that the user wrote, so `((1 + 2) + 3)` and `1 + 2 + 3` parse
+ into distinct trees, even though they are equivalent. In the
+ HIR, however, parentheses nodes are removed, and those two
+ expressions are represented in the same way.
+3. **Type-checking and subsequent analyses**
+ - An important step in processing the HIR is to perform type
+ checking. This process assigns types to every HIR expression,
+ for example, and also is responsible for resolving some
+ "type-dependent" paths, such as field accesses (`x.f` -- we
+ can't know what field `f` is being accessed until we know the
+ type of `x`) and associated type references (`T::Item` -- we
+ can't know what type `Item` is until we know what `T` is).
+ - Type checking creates "side-tables" (`TypeckTables`) that include
+ the types of expressions, the way to resolve methods, and so forth.
+ - After type-checking, we can do other analyses, such as privacy checking.
+4. **Lowering to MIR and post-processing**
+ - Once type-checking is done, we can lower the HIR into MIR ("middle IR"), which
+ is a **very** desugared version of Rust, well suited to the borrowck but also
+ certain high-level optimizations.
+5. **Translation to LLVM and LLVM optimizations**
+ - From MIR, we can produce LLVM IR.
+ - LLVM then runs its various optimizations, which produces a number of `.o` files
+ (one for each "codegen unit").
+6. **Linking**
+ - Finally, those `.o` files are linke together.
+
+Glossary
+========
+
+The compiler uses a number of...idiosyncratic abbreviations and
+things. This glossary attempts to list them and give you a few
+pointers for understanding them better.
+
+- AST -- the **abstract syntax tree** produced the `syntax` crate; reflects user syntax
+ very closely.
+- codegen unit -- when we produce LLVM IR, we group the Rust code into a number of codegen
+ units. Each of these units is processed by LLVM independently from one another,
+ enabling parallelism. They are also the unit of incremental re-use.
+- cx -- we tend to use "cx" as an abbrevation for context. See also tcx, infcx, etc.
+- `DefId` -- an index identifying a **definition** (see `librustc/hir/def_id.rs`). Uniquely
+ identifies a `DefPath`.
+- HIR -- the **High-level IR**, created by lowering and desugaring the AST. See `librustc/hir`.
+- `HirId` -- identifies a particular node in the HIR by combining a
+ def-id with an "intra-definition offset".
+- `'gcx` -- the lifetime of the global arena (see `librustc/ty`).
+- generics -- the set of generic type parameters defined on a type or item
+- ICE -- internal compiler error. When the compiler crashes.
+- infcx -- the inference context (see `librustc/infer`)
+- MIR -- the **Mid-level IR** that is created after type-checking for use by borrowck and trans.
+ Defined in the `src/librustc/mir/` module, but much of the code that manipulates it is
+ found in `src/librustc_mir`.
+- obligation -- something that must be proven by the trait system; see `librustc/traits`.
+- local crate -- the crate currently being compiled.
+- node-id or `NodeId` -- an index identifying a particular node in the
+ AST or HIR; gradually being phased out and replaced with `HirId`.
+- query -- perhaps some sub-computation during compilation; see `librustc/maps`.
+- provider -- the function that executes a query; see `librustc/maps`.
+- sess -- the **compiler session**, which stores global data used throughout compilation
+- side tables -- because the AST and HIR are immutable once created, we often carry extra
+ information about them in the form of hashtables, indexed by the id of a particular node.
+- span -- a location in the user's source code, used for error
+ reporting primarily. These are like a file-name/line-number/column
+ tuple on steroids: they carry a start/end point, and also track
+ macro expansions and compiler desugaring. All while being packed
+ into a few bytes (really, it's an index into a table). See the
+ `Span` datatype for more.
+- substs -- the **substitutions** for a given generic type or item
+ (e.g., the `i32, u32` in `HashMap<i32, u32>`)
+- tcx -- the "typing context", main data structure of the compiler (see `librustc/ty`).
+- trans -- the code to **translate** MIR into LLVM IR.
+- trait reference -- a trait and values for its type parameters (see `librustc/ty`).
+- ty -- the internal representation of a **type** (see `librustc/ty`).
use ich::StableHashingContext;
use std::fmt;
use std::hash::Hash;
+use syntax_pos::symbol::InternedString;
// erase!() just makes tokens go away. It's used to specify which macro argument
// is repeated (i.e. which sub-expression of the macro we are in) but don't need
[] GetPanicStrategy(CrateNum),
[] IsNoBuiltins(CrateNum),
[] ImplDefaultness(DefId),
- [] ExportedSymbols(CrateNum),
+ [] ExportedSymbolIds(CrateNum),
[] NativeLibraries(CrateNum),
[] PluginRegistrarFn(CrateNum),
[] DeriveRegistrarFn(CrateNum),
[] MaybeUnusedExternCrates,
[] StabilityIndex,
[] AllCrateNums,
+ [] ExportedSymbols(CrateNum),
+ [] CollectAndPartitionTranslationItems,
+ [] ExportName(DefId),
+ [] ContainsExternIndicator(DefId),
+ [] IsTranslatedFunction(DefId),
+ [] CodegenUnit(InternedString),
+ [] CompileCodegenUnit(InternedString),
+ [] OutputFilenames,
);
trait DepNodeParams<'a, 'gcx: 'tcx + 'a, 'tcx: 'a> : fmt::Debug {
}
impl<'a, 'gcx: 'tcx + 'a, 'tcx: 'a, T> DepNodeParams<'a, 'gcx, 'tcx> for T
- where T: HashStable<StableHashingContext<'a, 'gcx, 'tcx>> + fmt::Debug
+ where T: HashStable<StableHashingContext<'gcx>> + fmt::Debug
{
default const CAN_RECONSTRUCT_QUERY_KEY: bool = false;
default fn to_fingerprint(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Fingerprint {
- let mut hcx = StableHashingContext::new(tcx);
+ let mut hcx = tcx.create_stable_hashing_context();
let mut hasher = StableHasher::new();
self.hash_stable(&mut hcx, &mut hasher);
}
}
+impl<'a, 'gcx: 'tcx + 'a, 'tcx: 'a> DepNodeParams<'a, 'gcx, 'tcx> for (DefIndex,) {
+ const CAN_RECONSTRUCT_QUERY_KEY: bool = true;
+
+ fn to_fingerprint(&self, tcx: TyCtxt) -> Fingerprint {
+ tcx.hir.definitions().def_path_hash(self.0).0
+ }
+
+ fn to_debug_str(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> String {
+ tcx.item_path_str(DefId::local(self.0))
+ }
+}
+
impl<'a, 'gcx: 'tcx + 'a, 'tcx: 'a> DepNodeParams<'a, 'gcx, 'tcx> for (DefId, DefId) {
const CAN_RECONSTRUCT_QUERY_KEY: bool = false;
// except according to those terms.
use rustc_data_structures::fx::FxHashMap;
+use rustc_data_structures::stable_hasher::{HashStable, StableHasher,
+ StableHashingContextProvider};
use session::config::OutputType;
use std::cell::{Ref, RefCell};
use std::rc::Rc;
use util::common::{ProfileQueriesMsg, profq_msg};
+use ich::Fingerprint;
+
use super::dep_node::{DepNode, DepKind, WorkProductId};
use super::query::DepGraphQuery;
use super::raii;
self.data.as_ref().map(|data| raii::IgnoreTask::new(&data.edges))
}
- pub fn in_task<'graph>(&'graph self, key: DepNode) -> Option<raii::DepTask<'graph>> {
- self.data.as_ref().map(|data| raii::DepTask::new(&data.edges, key))
- }
-
pub fn with_ignore<OP,R>(&self, op: OP) -> R
where OP: FnOnce() -> R
{
/// `arg` parameter.
///
/// [README]: README.md
- pub fn with_task<C, A, R>(&self,
- key: DepNode,
- cx: C,
- arg: A,
- task: fn(C, A) -> R)
- -> (R, DepNodeIndex)
- where C: DepGraphSafe
+ pub fn with_task<C, A, R, HCX>(&self,
+ key: DepNode,
+ cx: C,
+ arg: A,
+ task: fn(C, A) -> R)
+ -> (R, DepNodeIndex)
+ where C: DepGraphSafe + StableHashingContextProvider<ContextType=HCX>,
+ R: HashStable<HCX>,
{
if let Some(ref data) = self.data {
data.edges.borrow_mut().push_task(key);
if cfg!(debug_assertions) {
profq_msg(ProfileQueriesMsg::TaskBegin(key.clone()))
};
+
+ // In incremental mode, hash the result of the task. We don't
+ // do anything with the hash yet, but we are computing it
+ // anyway so that
+ // - we make sure that the infrastructure works and
+ // - we can get an idea of the runtime cost.
+ let mut hcx = cx.create_stable_hashing_context();
+
let result = task(cx, arg);
if cfg!(debug_assertions) {
profq_msg(ProfileQueriesMsg::TaskEnd)
};
let dep_node_index = data.edges.borrow_mut().pop_task(key);
+
+ let mut stable_hasher = StableHasher::new();
+ result.hash_stable(&mut hcx, &mut stable_hasher);
+ let _: Fingerprint = stable_hasher.finish();
+
(result, dep_node_index)
} else {
(task(cx, arg), DepNodeIndex::INVALID)
{
}
+/// Mut ref to dep-graph-safe stuff should still be dep-graph-safe.
+impl<'a, A> DepGraphSafe for &'a mut A
+ where A: DepGraphSafe,
+{
+}
+
+
/// No data here! :)
impl DepGraphSafe for () {
}
--- /dev/null
+# Introduction to the HIR
+
+The HIR -- "High-level IR" -- is the primary IR used in most of
+rustc. It is a desugared version of the "abstract syntax tree" (AST)
+that is generated after parsing, macro expansion, and name resolution
+have completed. Many parts of HIR resemble Rust surface syntax quite
+closely, with the exception that some of Rust's expression forms have
+been desugared away (as an example, `for` loops are converted into a
+`loop` and do not appear in the HIR).
+
+This README covers the main concepts of the HIR.
+
+### Out-of-band storage and the `Crate` type
+
+The top-level data-structure in the HIR is the `Crate`, which stores
+the contents of the crate currently being compiled (we only ever
+construct HIR for the current crate). Whereas in the AST the crate
+data structure basically just contains the root module, the HIR
+`Crate` structure contains a number of maps and other things that
+serve to organize the content of the crate for easier access.
+
+For example, the contents of individual items (e.g., modules,
+functions, traits, impls, etc) in the HIR are not immediately
+accessible in the parents. So, for example, if had a module item `foo`
+containing a function `bar()`:
+
+```
+mod foo {
+ fn bar() { }
+}
+```
+
+Then in the HIR the representation of module `foo` (the `Mod`
+stuct) would have only the **`ItemId`** `I` of `bar()`. To get the
+details of the function `bar()`, we would lookup `I` in the
+`items` map.
+
+One nice result from this representation is that one can iterate
+over all items in the crate by iterating over the key-value pairs
+in these maps (without the need to trawl through the IR in total).
+There are similar maps for things like trait items and impl items,
+as well as "bodies" (explained below).
+
+The other reason to setup the representation this way is for better
+integration with incremental compilation. This way, if you gain access
+to a `&hir::Item` (e.g. for the mod `foo`), you do not immediately
+gain access to the contents of the function `bar()`. Instead, you only
+gain access to the **id** for `bar()`, and you must invoke some
+function to lookup the contents of `bar()` given its id; this gives us
+a chance to observe that you accessed the data for `bar()` and record
+the dependency.
+
+### Identifiers in the HIR
+
+Most of the code that has to deal with things in HIR tends not to
+carry around references into the HIR, but rather to carry around
+*identifier numbers* (or just "ids"). Right now, you will find four
+sorts of identifiers in active use:
+
+- `DefId`, which primarily name "definitions" or top-level items.
+ - You can think of a `DefId` as being shorthand for a very explicit
+ and complete path, like `std::collections::HashMap`. However,
+ these paths are able to name things that are not nameable in
+ normal Rust (e.g., impls), and they also include extra information
+ about the crate (such as its version number, as two versions of
+ the same crate can co-exist).
+ - A `DefId` really consists of two parts, a `CrateNum` (which
+ identifies the crate) and a `DefIndex` (which indixes into a list
+ of items that is maintained per crate).
+- `HirId`, which combines the index of a particular item with an
+ offset within that item.
+ - the key point of a `HirId` is that it is *relative* to some item (which is named
+ via a `DefId`).
+- `BodyId`, this is an absolute identifier that refers to a specific
+ body (definition of a function or constant) in the crate. It is currently
+ effectively a "newtype'd" `NodeId`.
+- `NodeId`, which is an absolute id that identifies a single node in the HIR tree.
+ - While these are still in common use, **they are being slowly phased out**.
+ - Since they are absolute within the crate, adding a new node
+ anywhere in the tree causes the node-ids of all subsequent code in
+ the crate to change. This is terrible for incremental compilation,
+ as you can perhaps imagine.
+
+### HIR Map
+
+Most of the time when you are working with the HIR, you will do so via
+the **HIR Map**, accessible in the tcx via `tcx.hir` (and defined in
+the `hir::map` module). The HIR map contains a number of methods to
+convert between ids of various kinds and to lookup data associated
+with a HIR node.
+
+For example, if you have a `DefId`, and you would like to convert it
+to a `NodeId`, you can use `tcx.hir.as_local_node_id(def_id)`. This
+returns an `Option<NodeId>` -- this will be `None` if the def-id
+refers to something outside of the current crate (since then it has no
+HIR node), but otherwise returns `Some(n)` where `n` is the node-id of
+the definition.
+
+Similarly, you can use `tcx.hir.find(n)` to lookup the node for a
+`NodeId`. This returns a `Option<Node<'tcx>>`, where `Node` is an enum
+defined in the map; by matching on this you can find out what sort of
+node the node-id referred to and also get a pointer to the data
+itself. Often, you know what sort of node `n` is -- e.g., if you know
+that `n` must be some HIR expression, you can do
+`tcx.hir.expect_expr(n)`, which will extract and return the
+`&hir::Expr`, panicking if `n` is not in fact an expression.
+
+Finally, you can use the HIR map to find the parents of nodes, via
+calls like `tcx.hir.get_parent_node(n)`.
+
+### HIR Bodies
+
+A **body** represents some kind of executable code, such as the body
+of a function/closure or the definition of a constant. Bodies are
+associated with an **owner**, which is typically some kind of item
+(e.g., a `fn()` or `const`), but could also be a closure expression
+(e.g., `|x, y| x + y`). You can use the HIR map to find find the body
+associated with a given def-id (`maybe_body_owned_by()`) or to find
+the owner of a body (`body_owner_def_id()`).
use syntax::codemap::{self, respan, Spanned, CompilerDesugaringKind};
use syntax::std_inject;
use syntax::symbol::{Symbol, keywords};
+use syntax::tokenstream::{TokenStream, TokenTree, Delimited};
+use syntax::parse::token::{Token, DelimToken};
use syntax::util::small_vector::SmallVector;
use syntax::visit::{self, Visitor};
use syntax_pos::Span;
}
fn lower_attrs(&mut self, attrs: &Vec<Attribute>) -> hir::HirVec<Attribute> {
- attrs.clone().into()
+ attrs.iter().map(|a| self.lower_attr(a)).collect::<Vec<_>>().into()
+ }
+
+ fn lower_attr(&mut self, attr: &Attribute) -> Attribute {
+ Attribute {
+ id: attr.id,
+ style: attr.style,
+ path: attr.path.clone(),
+ tokens: self.lower_token_stream(attr.tokens.clone()),
+ is_sugared_doc: attr.is_sugared_doc,
+ span: attr.span,
+ }
+ }
+
+ fn lower_token_stream(&mut self, tokens: TokenStream) -> TokenStream {
+ tokens.into_trees().map(|tree| self.lower_token_tree(tree)).collect()
+ }
+
+ fn lower_token_tree(&mut self, tree: TokenTree) -> TokenTree {
+ match tree {
+ TokenTree::Token(span, token) => {
+ self.lower_token(token, span)
+ }
+ TokenTree::Delimited(span, delimited) => {
+ TokenTree::Delimited(span, Delimited {
+ delim: delimited.delim,
+ tts: self.lower_token_stream(delimited.tts.into()).into(),
+ })
+ }
+ }
+ }
+
+ fn lower_token(&mut self, token: Token, span: Span) -> TokenTree {
+ match token {
+ Token::Interpolated(_) => {}
+ other => return TokenTree::Token(span, other),
+ }
+
+ let tts = token.interpolated_to_tokenstream(&self.sess.parse_sess, span);
+ let tts = self.lower_token_stream(tts);
+ TokenTree::Delimited(span, Delimited {
+ delim: DelimToken::NoDelim,
+ tts: tts.into(),
+ })
}
fn lower_arm(&mut self, arm: &Arm) -> hir::Arm {
let attrs = self.lower_attrs(&i.attrs);
if let ItemKind::MacroDef(ref def) = i.node {
if !def.legacy || i.attrs.iter().any(|attr| attr.path == "macro_export") {
+ let body = self.lower_token_stream(def.stream());
self.exported_macros.push(hir::MacroDef {
name,
vis,
attrs,
id: i.id,
span: i.span,
- body: def.stream(),
+ body,
legacy: def.legacy,
});
}
--- /dev/null
+The HIR map, accessible via `tcx.hir`, allows you to quickly navigate the
+HIR and convert between various forms of identifiers. See [the HIR README] for more information.
+
+[the HIR README]: ../README.md
/// plain old integers.
map: Vec<MapEntry<'hir>>,
- definitions: Definitions,
+ definitions: &'hir Definitions,
/// Bodies inlined from other crates are cached here.
inlined_bodies: RefCell<DefIdMap<&'hir Body>>,
}
#[inline]
- pub fn definitions(&self) -> &Definitions {
- &self.definitions
+ pub fn definitions(&self) -> &'hir Definitions {
+ self.definitions
}
pub fn def_key(&self, def_id: DefId) -> DefKey {
impl Named for ImplItem { fn name(&self) -> Name { self.name } }
pub fn map_crate<'hir>(forest: &'hir mut Forest,
- definitions: Definitions)
+ definitions: &'hir Definitions)
-> Map<'hir> {
let map = {
let mut collector = NodeCollector::root(&forest.krate,
pub type CrateConfig = HirVec<P<MetaItem>>;
+/// The top-level data structure that stores the entire contents of
+/// the crate currently being compiled.
+///
+/// For more details, see [the module-level README](README.md).
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Debug)]
pub struct Crate {
pub module: Mod,
pub node_id: NodeId,
}
-/// The body of a function or constant value.
+/// The body of a function, closure, or constant value. In the case of
+/// a function, the body contains not only the function body itself
+/// (which is an expression), but also the argument patterns, since
+/// those are something that the caller doesn't really care about.
+///
+/// # Examples
+///
+/// ```
+/// fn foo((x, y): (u32, u32)) -> u32 {
+/// x + y
+/// }
+/// ```
+///
+/// Here, the `Body` associated with `foo()` would contain:
+///
+/// - an `arguments` array containing the `(x, y)` pattern
+/// - a `value` containing the `x + y` expression (maybe wrapped in a block)
+/// - `is_generator` would be false
+///
+/// All bodies have an **owner**, which can be accessed via the HIR
+/// map using `body_owner_def_id()`.
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct Body {
pub arguments: HirVec<Arg>,
use std::rc::Rc;
use syntax::codemap::CodeMap;
use syntax_pos::{BytePos, FileMap};
-use ty::TyCtxt;
#[derive(Clone)]
struct CacheEntry {
file_index: usize,
}
-pub struct CachingCodemapView<'tcx> {
- codemap: &'tcx CodeMap,
+pub struct CachingCodemapView<'cm> {
+ codemap: &'cm CodeMap,
line_cache: [CacheEntry; 3],
time_stamp: usize,
}
-impl<'gcx> CachingCodemapView<'gcx> {
- pub fn new<'a, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>) -> CachingCodemapView<'gcx> {
- let codemap = tcx.sess.codemap();
+impl<'cm> CachingCodemapView<'cm> {
+ pub fn new(codemap: &'cm CodeMap) -> CachingCodemapView<'cm> {
let files = codemap.files();
let first_file = files[0].clone();
let entry = CacheEntry {
// except according to those terms.
use hir;
-use hir::def_id::DefId;
+use hir::def_id::{DefId, DefIndex};
use hir::map::DefPathHash;
+use hir::map::definitions::Definitions;
use ich::{self, CachingCodemapView};
+use middle::cstore::CrateStore;
use session::config::DebugInfoLevel::NoDebugInfo;
-use ty::TyCtxt;
-use util::nodemap::{NodeMap, ItemLocalMap};
+use ty::{TyCtxt, fast_reject};
+use session::Session;
+use std::cmp::Ord;
use std::hash as std_hash;
-use std::collections::{HashMap, HashSet, BTreeMap};
+use std::cell::RefCell;
+use std::collections::HashMap;
use syntax::ast;
use syntax::attr;
+use syntax::codemap::CodeMap;
use syntax::ext::hygiene::SyntaxContext;
use syntax::symbol::Symbol;
use syntax_pos::Span;
-use rustc_data_structures::stable_hasher::{HashStable, StableHasher,
- StableHasherResult};
+use rustc_data_structures::stable_hasher::{HashStable, StableHashingContextProvider,
+ StableHasher, StableHasherResult,
+ ToStableHashKey};
use rustc_data_structures::accumulate_vec::AccumulateVec;
+use rustc_data_structures::fx::FxHashSet;
+
+thread_local!(static IGNORED_ATTR_NAMES: RefCell<FxHashSet<Symbol>> =
+ RefCell::new(FxHashSet()));
/// This is the context state available during incr. comp. hashing. It contains
/// enough information to transform DefIds and HirIds into stable DefPaths (i.e.
/// a reference to the TyCtxt) and it holds a few caches for speeding up various
/// things (e.g. each DefId/DefPath is only hashed once).
-pub struct StableHashingContext<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
- codemap: CachingCodemapView<'gcx>,
+pub struct StableHashingContext<'gcx> {
+ sess: &'gcx Session,
+ definitions: &'gcx Definitions,
+ cstore: &'gcx CrateStore,
+ body_resolver: BodyResolver<'gcx>,
hash_spans: bool,
hash_bodies: bool,
overflow_checks_enabled: bool,
node_id_hashing_mode: NodeIdHashingMode,
- // A sorted array of symbol keys for fast lookup.
- ignored_attr_names: Vec<Symbol>,
+
+ // Very often, we are hashing something that does not need the
+ // CachingCodemapView, so we initialize it lazily.
+ raw_codemap: &'gcx CodeMap,
+ caching_codemap: Option<CachingCodemapView<'gcx>>,
}
#[derive(PartialEq, Eq, Clone, Copy)]
pub enum NodeIdHashingMode {
Ignore,
HashDefPath,
- HashTraitsInScope,
}
-impl<'a, 'gcx, 'tcx> StableHashingContext<'a, 'gcx, 'tcx> {
-
- pub fn new(tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Self {
- let hash_spans_initial = tcx.sess.opts.debuginfo != NoDebugInfo;
- let check_overflow_initial = tcx.sess.overflow_checks();
-
- let mut ignored_attr_names: Vec<_> = ich::IGNORED_ATTRIBUTES
- .iter()
- .map(|&s| Symbol::intern(s))
- .collect();
+/// The BodyResolver allows to map a BodyId to the corresponding hir::Body.
+/// We could also just store a plain reference to the hir::Crate but we want
+/// to avoid that the crate is used to get untracked access to all of the HIR.
+#[derive(Clone, Copy)]
+struct BodyResolver<'gcx>(&'gcx hir::Crate);
+
+impl<'gcx> BodyResolver<'gcx> {
+ // Return a reference to the hir::Body with the given BodyId.
+ // DOES NOT DO ANY TRACKING, use carefully.
+ fn body(self, id: hir::BodyId) -> &'gcx hir::Body {
+ self.0.body(id)
+ }
+}
- ignored_attr_names.sort();
+impl<'gcx> StableHashingContext<'gcx> {
+ // The `krate` here is only used for mapping BodyIds to Bodies.
+ // Don't use it for anything else or you'll run the risk of
+ // leaking data out of the tracking system.
+ pub fn new(sess: &'gcx Session,
+ krate: &'gcx hir::Crate,
+ definitions: &'gcx Definitions,
+ cstore: &'gcx CrateStore)
+ -> Self {
+ let hash_spans_initial = sess.opts.debuginfo != NoDebugInfo;
+ let check_overflow_initial = sess.overflow_checks();
+
+ debug_assert!(ich::IGNORED_ATTRIBUTES.len() > 0);
+ IGNORED_ATTR_NAMES.with(|names| {
+ let mut names = names.borrow_mut();
+ if names.is_empty() {
+ names.extend(ich::IGNORED_ATTRIBUTES.iter()
+ .map(|&s| Symbol::intern(s)));
+ }
+ });
StableHashingContext {
- tcx,
- codemap: CachingCodemapView::new(tcx),
+ sess,
+ body_resolver: BodyResolver(krate),
+ definitions,
+ cstore,
+ caching_codemap: None,
+ raw_codemap: sess.codemap(),
hash_spans: hash_spans_initial,
hash_bodies: true,
overflow_checks_enabled: check_overflow_initial,
node_id_hashing_mode: NodeIdHashingMode::HashDefPath,
- ignored_attr_names,
}
}
+ #[inline]
+ pub fn sess(&self) -> &'gcx Session {
+ self.sess
+ }
+
pub fn force_span_hashing(mut self) -> Self {
self.hash_spans = true;
self
}
#[inline]
- pub fn tcx(&self) -> TyCtxt<'a, 'gcx, 'tcx> {
- self.tcx
+ pub fn def_path_hash(&self, def_id: DefId) -> DefPathHash {
+ if def_id.is_local() {
+ self.definitions.def_path_hash(def_id.index)
+ } else {
+ self.cstore.def_path_hash(def_id)
+ }
}
#[inline]
- pub fn def_path_hash(&mut self, def_id: DefId) -> DefPathHash {
- self.tcx.def_path_hash(def_id)
+ pub fn local_def_path_hash(&self, def_index: DefIndex) -> DefPathHash {
+ self.definitions.def_path_hash(def_index)
}
#[inline]
#[inline]
pub fn codemap(&mut self) -> &mut CachingCodemapView<'gcx> {
- &mut self.codemap
+ match self.caching_codemap {
+ Some(ref mut cm) => {
+ cm
+ }
+ ref mut none => {
+ *none = Some(CachingCodemapView::new(self.raw_codemap));
+ none.as_mut().unwrap()
+ }
+ }
}
#[inline]
pub fn is_ignored_attr(&self, name: Symbol) -> bool {
- self.ignored_attr_names.binary_search(&name).is_ok()
+ IGNORED_ATTR_NAMES.with(|names| {
+ names.borrow().contains(&name)
+ })
}
pub fn hash_hir_item_like<F: FnOnce(&mut Self)>(&mut self,
}
}
+impl<'a, 'gcx, 'lcx> StableHashingContextProvider for TyCtxt<'a, 'gcx, 'lcx> {
+ type ContextType = StableHashingContext<'gcx>;
+ fn create_stable_hashing_context(&self) -> Self::ContextType {
+ (*self).create_stable_hashing_context()
+ }
+}
+
+impl<'gcx> HashStable<StableHashingContext<'gcx>> for hir::BodyId {
+ fn hash_stable<W: StableHasherResult>(&self,
+ hcx: &mut StableHashingContext<'gcx>,
+ hasher: &mut StableHasher<W>) {
+ if hcx.hash_bodies() {
+ hcx.body_resolver.body(*self).hash_stable(hcx, hasher);
+ }
+ }
+}
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for ast::NodeId {
+impl<'gcx> HashStable<StableHashingContext<'gcx>> for hir::HirId {
+ #[inline]
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
- let hir_id = hcx.tcx.hir.node_to_hir_id(*self);
match hcx.node_id_hashing_mode {
NodeIdHashingMode::Ignore => {
- // Most NodeIds in the HIR can be ignored, but if there is a
- // corresponding entry in the `trait_map` we need to hash that.
- // Make sure we don't ignore too much by checking that there is
- // no entry in a debug_assert!().
- debug_assert!(hcx.tcx.in_scope_traits(hir_id).is_none());
+ // Don't do anything.
}
NodeIdHashingMode::HashDefPath => {
- hir_id.hash_stable(hcx, hasher);
+ let hir::HirId {
+ owner,
+ local_id,
+ } = *self;
+
+ hcx.local_def_path_hash(owner).hash_stable(hcx, hasher);
+ local_id.hash_stable(hcx, hasher);
}
- NodeIdHashingMode::HashTraitsInScope => {
- if let Some(traits) = hcx.tcx.in_scope_traits(hir_id) {
- // The ordering of the candidates is not fixed. So we hash
- // the def-ids and then sort them and hash the collection.
- let mut candidates: AccumulateVec<[_; 8]> =
- traits.iter()
- .map(|&hir::TraitCandidate { def_id, import_id: _ }| {
- hcx.def_path_hash(def_id)
- })
- .collect();
- if traits.len() > 1 {
- candidates.sort();
- }
- candidates.hash_stable(hcx, hasher);
- }
+ }
+ }
+}
+
+impl<'gcx> ToStableHashKey<StableHashingContext<'gcx>> for hir::HirId {
+ type KeyType = (DefPathHash, hir::ItemLocalId);
+
+ #[inline]
+ fn to_stable_hash_key(&self,
+ hcx: &StableHashingContext<'gcx>)
+ -> (DefPathHash, hir::ItemLocalId) {
+ let def_path_hash = hcx.local_def_path_hash(self.owner);
+ (def_path_hash, self.local_id)
+ }
+}
+
+impl<'gcx> HashStable<StableHashingContext<'gcx>> for ast::NodeId {
+ fn hash_stable<W: StableHasherResult>(&self,
+ hcx: &mut StableHashingContext<'gcx>,
+ hasher: &mut StableHasher<W>) {
+ match hcx.node_id_hashing_mode {
+ NodeIdHashingMode::Ignore => {
+ // Don't do anything.
+ }
+ NodeIdHashingMode::HashDefPath => {
+ hcx.definitions.node_to_hir_id(*self).hash_stable(hcx, hasher);
}
}
}
}
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for Span {
+impl<'gcx> ToStableHashKey<StableHashingContext<'gcx>> for ast::NodeId {
+ type KeyType = (DefPathHash, hir::ItemLocalId);
+
+ #[inline]
+ fn to_stable_hash_key(&self,
+ hcx: &StableHashingContext<'gcx>)
+ -> (DefPathHash, hir::ItemLocalId) {
+ hcx.definitions.node_to_hir_id(*self).to_stable_hash_key(hcx)
+ }
+}
+
+impl<'gcx> HashStable<StableHashingContext<'gcx>> for Span {
// Hash a span in a stable way. We can't directly hash the span's BytePos
// fields (that would be similar to hashing pointers, since those are just
// Also, hashing filenames is expensive so we avoid doing it twice when the
// span starts and ends in the same file, which is almost always the case.
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
use syntax_pos::Pos;
}
}
-pub fn hash_stable_hashmap<'a, 'gcx, 'tcx, K, V, R, SK, F, W>(
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+pub fn hash_stable_trait_impls<'gcx, W, R>(
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>,
- map: &HashMap<K, V, R>,
- extract_stable_key: F)
- where K: Eq + std_hash::Hash,
- V: HashStable<StableHashingContext<'a, 'gcx, 'tcx>>,
+ blanket_impls: &Vec<DefId>,
+ non_blanket_impls: &HashMap<fast_reject::SimplifiedType, Vec<DefId>, R>)
+ where W: StableHasherResult,
R: std_hash::BuildHasher,
- SK: HashStable<StableHashingContext<'a, 'gcx, 'tcx>> + Ord + Clone,
- F: Fn(&mut StableHashingContext<'a, 'gcx, 'tcx>, &K) -> SK,
- W: StableHasherResult,
{
- let mut keys: Vec<_> = map.keys()
- .map(|k| (extract_stable_key(hcx, k), k))
- .collect();
- keys.sort_unstable_by_key(|&(ref stable_key, _)| stable_key.clone());
- keys.len().hash_stable(hcx, hasher);
- for (stable_key, key) in keys {
- stable_key.hash_stable(hcx, hasher);
- map[key].hash_stable(hcx, hasher);
- }
-}
-
-pub fn hash_stable_hashset<'a, 'tcx, 'gcx, K, R, SK, F, W>(
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
- hasher: &mut StableHasher<W>,
- set: &HashSet<K, R>,
- extract_stable_key: F)
- where K: Eq + std_hash::Hash,
- R: std_hash::BuildHasher,
- SK: HashStable<StableHashingContext<'a, 'gcx, 'tcx>> + Ord + Clone,
- F: Fn(&mut StableHashingContext<'a, 'gcx, 'tcx>, &K) -> SK,
- W: StableHasherResult,
-{
- let mut keys: Vec<_> = set.iter()
- .map(|k| extract_stable_key(hcx, k))
- .collect();
- keys.sort_unstable();
- keys.hash_stable(hcx, hasher);
-}
+ {
+ let mut blanket_impls: AccumulateVec<[_; 8]> = blanket_impls
+ .iter()
+ .map(|&def_id| hcx.def_path_hash(def_id))
+ .collect();
-pub fn hash_stable_nodemap<'a, 'tcx, 'gcx, V, W>(
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
- hasher: &mut StableHasher<W>,
- map: &NodeMap<V>)
- where V: HashStable<StableHashingContext<'a, 'gcx, 'tcx>>,
- W: StableHasherResult,
-{
- hash_stable_hashmap(hcx, hasher, map, |hcx, node_id| {
- hcx.tcx.hir.definitions().node_to_hir_id(*node_id).local_id
- });
-}
+ if blanket_impls.len() > 1 {
+ blanket_impls.sort_unstable();
+ }
-pub fn hash_stable_itemlocalmap<'a, 'tcx, 'gcx, V, W>(
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
- hasher: &mut StableHasher<W>,
- map: &ItemLocalMap<V>)
- where V: HashStable<StableHashingContext<'a, 'gcx, 'tcx>>,
- W: StableHasherResult,
-{
- hash_stable_hashmap(hcx, hasher, map, |_, local_id| {
- *local_id
- });
-}
+ blanket_impls.hash_stable(hcx, hasher);
+ }
+ {
+ let mut keys: AccumulateVec<[_; 8]> =
+ non_blanket_impls.keys()
+ .map(|k| (k, k.map_def(|d| hcx.def_path_hash(d))))
+ .collect();
+ keys.sort_unstable_by(|&(_, ref k1), &(_, ref k2)| k1.cmp(k2));
+ keys.len().hash_stable(hcx, hasher);
+ for (key, ref stable_key) in keys {
+ stable_key.hash_stable(hcx, hasher);
+ let mut impls : AccumulateVec<[_; 8]> = non_blanket_impls[key]
+ .iter()
+ .map(|&impl_id| hcx.def_path_hash(impl_id))
+ .collect();
+
+ if impls.len() > 1 {
+ impls.sort_unstable();
+ }
-pub fn hash_stable_btreemap<'a, 'tcx, 'gcx, K, V, SK, F, W>(
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
- hasher: &mut StableHasher<W>,
- map: &BTreeMap<K, V>,
- extract_stable_key: F)
- where K: Eq + Ord,
- V: HashStable<StableHashingContext<'a, 'gcx, 'tcx>>,
- SK: HashStable<StableHashingContext<'a, 'gcx, 'tcx>> + Ord + Clone,
- F: Fn(&mut StableHashingContext<'a, 'gcx, 'tcx>, &K) -> SK,
- W: StableHasherResult,
-{
- let mut keys: Vec<_> = map.keys()
- .map(|k| (extract_stable_key(hcx, k), k))
- .collect();
- keys.sort_unstable_by_key(|&(ref stable_key, _)| stable_key.clone());
- keys.len().hash_stable(hcx, hasher);
- for (stable_key, key) in keys {
- stable_key.hash_stable(hcx, hasher);
- map[key].hash_stable(hcx, hasher);
+ impls.hash_stable(hcx, hasher);
+ }
}
}
+
//! This module contains `HashStable` implementations for various data types
//! from rustc::middle::cstore in no particular order.
+use rustc_data_structures::stable_hasher::{HashStable, StableHasher, StableHasherResult};
+
use middle;
impl_stable_hash_for!(enum middle::cstore::DepKind {
RequireDynamic,
RequireStatic
});
+
+impl_stable_hash_for!(struct middle::cstore::ExternCrate {
+ def_id,
+ span,
+ direct,
+ path_len
+});
+
+impl_stable_hash_for!(struct middle::cstore::CrateSource {
+ dylib,
+ rlib,
+ rmeta
+});
+
+impl<HCX> HashStable<HCX> for middle::cstore::ExternBodyNestedBodies {
+ fn hash_stable<W: StableHasherResult>(&self,
+ hcx: &mut HCX,
+ hasher: &mut StableHasher<W>) {
+ let middle::cstore::ExternBodyNestedBodies {
+ nested_bodies: _,
+ fingerprint,
+ } = *self;
+
+ fingerprint.hash_stable(hcx, hasher);
+ }
+}
+
+impl<'a, HCX> HashStable<HCX> for middle::cstore::ExternConstBody<'a> {
+ fn hash_stable<W: StableHasherResult>(&self,
+ hcx: &mut HCX,
+ hasher: &mut StableHasher<W>) {
+ let middle::cstore::ExternConstBody {
+ body: _,
+ fingerprint,
+ } = *self;
+
+ fingerprint.hash_stable(hcx, hasher);
+ }
+}
//! types in no particular order.
use hir;
+use hir::map::DefPathHash;
use hir::def_id::{DefId, CrateNum, CRATE_DEF_INDEX};
use ich::{StableHashingContext, NodeIdHashingMode};
+use rustc_data_structures::stable_hasher::{HashStable, ToStableHashKey,
+ StableHasher, StableHasherResult};
use std::mem;
-
use syntax::ast;
-use rustc_data_structures::stable_hasher::{HashStable, StableHasher,
- StableHasherResult};
-
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for DefId {
+impl<'gcx> HashStable<StableHashingContext<'gcx>> for DefId {
#[inline]
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
hcx.def_path_hash(*self).hash_stable(hcx, hasher);
}
}
+impl<'gcx> ToStableHashKey<StableHashingContext<'gcx>> for DefId {
+ type KeyType = DefPathHash;
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for hir::HirId {
#[inline]
- fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
- hasher: &mut StableHasher<W>) {
- let hir::HirId {
- owner,
- local_id,
- } = *self;
-
- hcx.def_path_hash(DefId::local(owner)).hash_stable(hcx, hasher);
- local_id.hash_stable(hcx, hasher);
+ fn to_stable_hash_key(&self, hcx: &StableHashingContext<'gcx>) -> DefPathHash {
+ hcx.def_path_hash(*self)
}
}
-
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for CrateNum {
+impl<'gcx> HashStable<StableHashingContext<'gcx>> for CrateNum {
#[inline]
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
hcx.def_path_hash(DefId {
krate: *self,
}
}
+impl<'gcx> ToStableHashKey<StableHashingContext<'gcx>> for CrateNum {
+ type KeyType = DefPathHash;
+
+ #[inline]
+ fn to_stable_hash_key(&self, hcx: &StableHashingContext<'gcx>) -> DefPathHash {
+ let def_id = DefId { krate: *self, index: CRATE_DEF_INDEX };
+ def_id.to_stable_hash_key(hcx)
+ }
+}
+
impl_stable_hash_for!(tuple_struct hir::ItemLocalId { index });
+impl<'gcx> ToStableHashKey<StableHashingContext<'gcx>>
+for hir::ItemLocalId {
+ type KeyType = hir::ItemLocalId;
+
+ #[inline]
+ fn to_stable_hash_key(&self,
+ _: &StableHashingContext<'gcx>)
+ -> hir::ItemLocalId {
+ *self
+ }
+}
+
// The following implementations of HashStable for ItemId, TraitItemId, and
// ImplItemId deserve special attention. Normally we do not hash NodeIds within
// the HIR, since they just signify a HIR nodes own path. But ItemId et al
// want to pick up on a reference changing its target, so we hash the NodeIds
// in "DefPath Mode".
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for hir::ItemId {
+impl<'gcx> HashStable<StableHashingContext<'gcx>> for hir::ItemId {
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
let hir::ItemId {
id
}
}
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for hir::TraitItemId {
+impl<'gcx> HashStable<StableHashingContext<'gcx>> for hir::TraitItemId {
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
let hir::TraitItemId {
node_id
}
}
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for hir::ImplItemId {
+impl<'gcx> HashStable<StableHashingContext<'gcx>> for hir::ImplItemId {
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
let hir::ImplItemId {
node_id
span
});
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for hir::Ty {
+impl<'gcx> HashStable<StableHashingContext<'gcx>> for hir::Ty {
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
- let node_id_hashing_mode = match self.node {
- hir::TySlice(..) |
- hir::TyArray(..) |
- hir::TyPtr(..) |
- hir::TyRptr(..) |
- hir::TyBareFn(..) |
- hir::TyNever |
- hir::TyTup(..) |
- hir::TyTraitObject(..) |
- hir::TyImplTrait(..) |
- hir::TyTypeof(..) |
- hir::TyErr |
- hir::TyInfer => {
- NodeIdHashingMode::Ignore
- }
- hir::TyPath(..) => {
- NodeIdHashingMode::HashTraitsInScope
- }
- };
-
hcx.while_hashing_hir_bodies(true, |hcx| {
let hir::Ty {
- id,
+ id: _,
ref node,
ref span,
} = *self;
- hcx.with_node_id_hashing_mode(node_id_hashing_mode, |hcx| {
- id.hash_stable(hcx, hasher);
- });
node.hash_stable(hcx, hasher);
span.hash_stable(hcx, hasher);
})
Return(t)
});
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for hir::TraitRef {
+impl<'gcx> HashStable<StableHashingContext<'gcx>> for hir::TraitRef {
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
let hir::TraitRef {
ref path,
- ref_id,
+ // Don't hash the ref_id. It is tracked via the thing it is used to access
+ ref_id: _,
} = *self;
path.hash_stable(hcx, hasher);
- hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashTraitsInScope, |hcx| {
- ref_id.hash_stable(hcx, hasher);
- });
}
}
});
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for hir::Block {
+impl<'gcx> HashStable<StableHashingContext<'gcx>> for hir::Block {
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
let hir::Block {
ref stmts,
ref expr,
- id,
+ id: _,
hir_id: _,
rules,
span,
}
expr.hash_stable(hcx, hasher);
- id.hash_stable(hcx, hasher);
rules.hash_stable(hcx, hasher);
span.hash_stable(hcx, hasher);
targeted_by_break.hash_stable(hcx, hasher);
}
}
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for hir::Pat {
+impl<'gcx> HashStable<StableHashingContext<'gcx>> for hir::Pat {
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
- let node_id_hashing_mode = match self.node {
- hir::PatKind::Wild |
- hir::PatKind::Binding(..) |
- hir::PatKind::Tuple(..) |
- hir::PatKind::Box(..) |
- hir::PatKind::Ref(..) |
- hir::PatKind::Lit(..) |
- hir::PatKind::Range(..) |
- hir::PatKind::Slice(..) => {
- NodeIdHashingMode::Ignore
- }
- hir::PatKind::Path(..) |
- hir::PatKind::Struct(..) |
- hir::PatKind::TupleStruct(..) => {
- NodeIdHashingMode::HashTraitsInScope
- }
- };
-
let hir::Pat {
- id,
+ id: _,
hir_id: _,
ref node,
ref span
} = *self;
- hcx.with_node_id_hashing_mode(node_id_hashing_mode, |hcx| {
- id.hash_stable(hcx, hasher);
- });
+
node.hash_stable(hcx, hasher);
span.hash_stable(hcx, hasher);
}
UserProvided
});
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for hir::Expr {
+impl<'gcx> HashStable<StableHashingContext<'gcx>> for hir::Expr {
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
hcx.while_hashing_hir_bodies(true, |hcx| {
let hir::Expr {
- id,
+ id: _,
hir_id: _,
ref span,
ref node,
ref attrs
} = *self;
- let (spans_always_on, node_id_hashing_mode) = match *node {
+ let spans_always_on = match *node {
hir::ExprBox(..) |
hir::ExprArray(..) |
hir::ExprCall(..) |
hir::ExprBreak(..) |
hir::ExprAgain(..) |
hir::ExprRet(..) |
- hir::ExprYield(..) |
+ hir::ExprYield(..) |
hir::ExprInlineAsm(..) |
hir::ExprRepeat(..) |
- hir::ExprTup(..) => {
+ hir::ExprTup(..) |
+ hir::ExprMethodCall(..) |
+ hir::ExprPath(..) |
+ hir::ExprStruct(..) |
+ hir::ExprField(..) => {
// For these we only hash the span when debuginfo is on.
- (false, NodeIdHashingMode::Ignore)
+ false
}
// For the following, spans might be significant because of
// panic messages indicating the source location.
hir::ExprBinary(op, ..) => {
- (hcx.binop_can_panic_at_runtime(op.node), NodeIdHashingMode::Ignore)
+ hcx.binop_can_panic_at_runtime(op.node)
}
hir::ExprUnary(op, _) => {
- (hcx.unop_can_panic_at_runtime(op), NodeIdHashingMode::Ignore)
+ hcx.unop_can_panic_at_runtime(op)
}
hir::ExprAssignOp(op, ..) => {
- (hcx.binop_can_panic_at_runtime(op.node), NodeIdHashingMode::Ignore)
+ hcx.binop_can_panic_at_runtime(op.node)
}
hir::ExprIndex(..) => {
- (true, NodeIdHashingMode::Ignore)
- }
- // For these we don't care about the span, but want to hash the
- // trait in scope
- hir::ExprMethodCall(..) |
- hir::ExprPath(..) |
- hir::ExprStruct(..) |
- hir::ExprField(..) => {
- (false, NodeIdHashingMode::HashTraitsInScope)
+ true
}
};
- hcx.with_node_id_hashing_mode(node_id_hashing_mode, |hcx| {
- id.hash_stable(hcx, hasher);
- });
-
if spans_always_on {
hcx.while_hashing_spans(true, |hcx| {
span.hash_stable(hcx, hasher);
ForLoop
});
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for hir::MatchSource {
+impl<'gcx> HashStable<StableHashingContext<'gcx>> for hir::MatchSource {
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
use hir::MatchSource;
Loop(loop_id_result)
});
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for ast::Ident {
+impl<'gcx> HashStable<StableHashingContext<'gcx>> for ast::Ident {
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
let ast::Ident {
ref name,
}
}
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for hir::TraitItem {
+impl<'gcx> HashStable<StableHashingContext<'gcx>> for hir::TraitItem {
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
let hir::TraitItem {
id,
Type(bounds, rhs)
});
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for hir::ImplItem {
+impl<'gcx> HashStable<StableHashingContext<'gcx>> for hir::ImplItem {
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
let hir::ImplItem {
id,
Type(t)
});
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for hir::Visibility {
+impl<'gcx> HashStable<StableHashingContext<'gcx>> for hir::Visibility {
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
mem::discriminant(self).hash_stable(hcx, hasher);
match *self {
// No fields to hash.
}
hir::Visibility::Restricted { ref path, id } => {
- hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashTraitsInScope, |hcx| {
+ hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashDefPath, |hcx| {
id.hash_stable(hcx, hasher);
});
path.hash_stable(hcx, hasher);
}
}
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for hir::Defaultness {
+impl<'gcx> HashStable<StableHashingContext<'gcx>> for hir::Defaultness {
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
mem::discriminant(self).hash_stable(hcx, hasher);
match *self {
Negative
});
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for hir::Mod {
+impl<'gcx> HashStable<StableHashingContext<'gcx>> for hir::Mod {
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
let hir::Mod {
inner,
Unit(id)
});
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for hir::Item {
+impl<'gcx> HashStable<StableHashingContext<'gcx>> for hir::Item {
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
- let (node_id_hashing_mode, hash_spans) = match self.node {
+ let hash_spans = match self.node {
hir::ItemStatic(..) |
hir::ItemConst(..) |
hir::ItemFn(..) => {
- (NodeIdHashingMode::Ignore, hcx.hash_spans())
+ hcx.hash_spans()
}
- hir::ItemUse(..) => {
- (NodeIdHashingMode::HashTraitsInScope, false)
- }
-
+ hir::ItemUse(..) |
hir::ItemExternCrate(..) |
hir::ItemForeignMod(..) |
hir::ItemGlobalAsm(..) |
hir::ItemEnum(..) |
hir::ItemStruct(..) |
hir::ItemUnion(..) => {
- (NodeIdHashingMode::Ignore, false)
+ false
}
};
let hir::Item {
name,
ref attrs,
- id,
+ id: _,
hir_id: _,
ref node,
ref vis,
hcx.hash_hir_item_like(attrs, |hcx| {
hcx.while_hashing_spans(hash_spans, |hcx| {
- hcx.with_node_id_hashing_mode(node_id_hashing_mode, |hcx| {
- id.hash_stable(hcx, hasher);
- });
name.hash_stable(hcx, hasher);
attrs.hash_stable(hcx, hasher);
node.hash_stable(hcx, hasher);
defaultness
});
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
+impl<'gcx> HashStable<StableHashingContext<'gcx>>
for hir::AssociatedItemKind {
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
mem::discriminant(self).hash_stable(hcx, hasher);
match *self {
hir_id
});
-impl_stable_hash_for!(struct hir::Body {
- arguments,
- value,
- is_generator
-});
-
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for hir::BodyId {
+impl<'gcx> HashStable<StableHashingContext<'gcx>> for hir::Body {
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
- if hcx.hash_bodies() {
- hcx.tcx().hir.body(*self).hash_stable(hcx, hasher);
- }
+ let hir::Body {
+ ref arguments,
+ ref value,
+ is_generator,
+ } = *self;
+
+ hcx.with_node_id_hashing_mode(NodeIdHashingMode::Ignore, |hcx| {
+ arguments.hash_stable(hcx, hasher);
+ value.hash_stable(hcx, hasher);
+ is_generator.hash_stable(hcx, hasher);
+ });
+ }
+}
+
+impl<'gcx> ToStableHashKey<StableHashingContext<'gcx>> for hir::BodyId {
+ type KeyType = (DefPathHash, hir::ItemLocalId);
+
+ #[inline]
+ fn to_stable_hash_key(&self,
+ hcx: &StableHashingContext<'gcx>)
+ -> (DefPathHash, hir::ItemLocalId) {
+ let hir::BodyId { node_id } = *self;
+ node_id.to_stable_hash_key(hcx)
}
}
is_indirect
});
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for hir::GlobalAsm {
+impl<'gcx> HashStable<StableHashingContext<'gcx>> for hir::GlobalAsm {
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
let hir::GlobalAsm {
asm,
}
}
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for hir::InlineAsm {
+impl<'gcx> HashStable<StableHashingContext<'gcx>> for hir::InlineAsm {
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
let hir::InlineAsm {
asm,
NotConst
});
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
+impl<'gcx> HashStable<StableHashingContext<'gcx>>
for hir::def_id::DefIndex {
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
- DefId::local(*self).hash_stable(hcx, hasher);
+ hcx.local_def_path_hash(*self).hash_stable(hcx, hasher);
+ }
+}
+
+impl<'gcx> ToStableHashKey<StableHashingContext<'gcx>>
+for hir::def_id::DefIndex {
+ type KeyType = DefPathHash;
+
+ #[inline]
+ fn to_stable_hash_key(&self, hcx: &StableHashingContext<'gcx>) -> DefPathHash {
+ hcx.local_def_path_hash(*self)
}
}
span
});
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
+impl<'gcx> HashStable<StableHashingContext<'gcx>>
for ::middle::lang_items::LangItem {
fn hash_stable<W: StableHasherResult>(&self,
- _: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ _: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
::std::hash::Hash::hash(self, hasher);
}
}
+
+impl_stable_hash_for!(struct ::middle::lang_items::LanguageItems {
+ items,
+ missing
+});
+
+impl<'gcx> HashStable<StableHashingContext<'gcx>>
+for hir::TraitCandidate {
+ fn hash_stable<W: StableHasherResult>(&self,
+ hcx: &mut StableHashingContext<'gcx>,
+ hasher: &mut StableHasher<W>) {
+ hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashDefPath, |hcx| {
+ let hir::TraitCandidate {
+ def_id,
+ import_id,
+ } = *self;
+
+ def_id.hash_stable(hcx, hasher);
+ import_id.hash_stable(hcx, hasher);
+ });
+ }
+}
+
+impl_stable_hash_for!(struct hir::Freevar {
+ def,
+ span
+});
impl_stable_hash_for!(struct mir::UpvarDecl { debug_name, by_ref });
impl_stable_hash_for!(struct mir::BasicBlockData<'tcx> { statements, terminator, is_cleanup });
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
+impl<'gcx> HashStable<StableHashingContext<'gcx>>
for mir::Terminator<'gcx> {
#[inline]
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
let mir::Terminator {
ref kind,
}
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for mir::Local {
+impl<'gcx> HashStable<StableHashingContext<'gcx>> for mir::Local {
#[inline]
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
use rustc_data_structures::indexed_vec::Idx;
self.index().hash_stable(hcx, hasher);
}
}
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for mir::BasicBlock {
+impl<'gcx> HashStable<StableHashingContext<'gcx>> for mir::BasicBlock {
#[inline]
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
use rustc_data_structures::indexed_vec::Idx;
self.index().hash_stable(hcx, hasher);
}
}
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for mir::Field {
+impl<'gcx> HashStable<StableHashingContext<'gcx>> for mir::Field {
#[inline]
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
use rustc_data_structures::indexed_vec::Idx;
self.index().hash_stable(hcx, hasher);
}
}
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
+impl<'gcx> HashStable<StableHashingContext<'gcx>>
for mir::VisibilityScope {
#[inline]
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
use rustc_data_structures::indexed_vec::Idx;
self.index().hash_stable(hcx, hasher);
}
}
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for mir::Promoted {
+impl<'gcx> HashStable<StableHashingContext<'gcx>> for mir::Promoted {
#[inline]
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
use rustc_data_structures::indexed_vec::Idx;
self.index().hash_stable(hcx, hasher);
}
}
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
+impl<'gcx> HashStable<StableHashingContext<'gcx>>
for mir::TerminatorKind<'gcx> {
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
mem::discriminant(self).hash_stable(hcx, hasher);
}
}
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
+impl<'gcx> HashStable<StableHashingContext<'gcx>>
for mir::AssertMessage<'gcx> {
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
mem::discriminant(self).hash_stable(hcx, hasher);
impl_stable_hash_for!(struct mir::Statement<'tcx> { source_info, kind });
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
+impl<'gcx> HashStable<StableHashingContext<'gcx>>
for mir::StatementKind<'gcx> {
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
mem::discriminant(self).hash_stable(hcx, hasher);
}
}
-impl<'a, 'gcx, 'tcx, T> HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
+impl<'gcx, T> HashStable<StableHashingContext<'gcx>>
for mir::ValidationOperand<'gcx, T>
- where T: HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
+ where T: HashStable<StableHashingContext<'gcx>>
{
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>)
{
self.lval.hash_stable(hcx, hasher);
impl_stable_hash_for!(enum mir::ValidationOp { Acquire, Release, Suspend(region_scope) });
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for mir::Lvalue<'gcx> {
+impl<'gcx> HashStable<StableHashingContext<'gcx>> for mir::Lvalue<'gcx> {
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
mem::discriminant(self).hash_stable(hcx, hasher);
match *self {
}
}
-impl<'a, 'gcx, 'tcx, B, V, T> HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
+impl<'gcx, B, V, T> HashStable<StableHashingContext<'gcx>>
for mir::Projection<'gcx, B, V, T>
- where B: HashStable<StableHashingContext<'a, 'gcx, 'tcx>>,
- V: HashStable<StableHashingContext<'a, 'gcx, 'tcx>>,
- T: HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
+ where B: HashStable<StableHashingContext<'gcx>>,
+ V: HashStable<StableHashingContext<'gcx>>,
+ T: HashStable<StableHashingContext<'gcx>>
{
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
let mir::Projection {
ref base,
}
}
-impl<'a, 'gcx, 'tcx, V, T> HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
+impl<'gcx, V, T> HashStable<StableHashingContext<'gcx>>
for mir::ProjectionElem<'gcx, V, T>
- where V: HashStable<StableHashingContext<'a, 'gcx, 'tcx>>,
- T: HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
+ where V: HashStable<StableHashingContext<'gcx>>,
+ T: HashStable<StableHashingContext<'gcx>>
{
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
mem::discriminant(self).hash_stable(hcx, hasher);
match *self {
impl_stable_hash_for!(struct mir::VisibilityScopeData { span, parent_scope });
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for mir::Operand<'gcx> {
+impl<'gcx> HashStable<StableHashingContext<'gcx>> for mir::Operand<'gcx> {
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
mem::discriminant(self).hash_stable(hcx, hasher);
}
}
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for mir::Rvalue<'gcx> {
+impl<'gcx> HashStable<StableHashingContext<'gcx>> for mir::Rvalue<'gcx> {
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
mem::discriminant(self).hash_stable(hcx, hasher);
Unsize
});
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
+impl<'gcx> HashStable<StableHashingContext<'gcx>>
for mir::AggregateKind<'gcx> {
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
mem::discriminant(self).hash_stable(hcx, hasher);
match *self {
impl_stable_hash_for!(struct mir::Constant<'tcx> { span, ty, literal });
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for mir::Literal<'gcx> {
+impl<'gcx> HashStable<StableHashingContext<'gcx>> for mir::Literal<'gcx> {
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
mem::discriminant(self).hash_stable(hcx, hasher);
match *self {
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! This module contains `HashStable` implementations for various data types
+//! that don't fit into any of the other impls_xxx modules.
+
+impl_stable_hash_for!(enum ::session::search_paths::PathKind {
+ Native,
+ Crate,
+ Dependency,
+ Framework,
+ ExternFlag,
+ All
+});
+
+impl_stable_hash_for!(enum ::rustc_back::PanicStrategy {
+ Abort,
+ Unwind
+});
use syntax::ast;
use syntax::parse::token;
+use syntax::symbol::InternedString;
use syntax::tokenstream;
-use syntax_pos::{Span, FileMap};
+use syntax_pos::FileMap;
use hir::def_id::{DefId, CrateNum, CRATE_DEF_INDEX};
-use rustc_data_structures::stable_hasher::{HashStable, StableHasher,
- StableHasherResult};
+use rustc_data_structures::stable_hasher::{HashStable, ToStableHashKey,
+ StableHasher, StableHasherResult};
use rustc_data_structures::accumulate_vec::AccumulateVec;
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
-for ::syntax::symbol::InternedString {
+impl<'gcx> HashStable<StableHashingContext<'gcx>> for InternedString {
#[inline]
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
let s: &str = &**self;
s.hash_stable(hcx, hasher);
}
}
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for ast::Name {
+impl<'gcx> ToStableHashKey<StableHashingContext<'gcx>> for InternedString {
+ type KeyType = InternedString;
+
+ #[inline]
+ fn to_stable_hash_key(&self,
+ _: &StableHashingContext<'gcx>)
+ -> InternedString {
+ self.clone()
+ }
+}
+
+impl<'gcx> HashStable<StableHashingContext<'gcx>> for ast::Name {
#[inline]
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
self.as_str().hash_stable(hcx, hasher);
}
}
+impl<'gcx> ToStableHashKey<StableHashingContext<'gcx>> for ast::Name {
+ type KeyType = InternedString;
+
+ #[inline]
+ fn to_stable_hash_key(&self,
+ _: &StableHashingContext<'gcx>)
+ -> InternedString {
+ self.as_str()
+ }
+}
+
impl_stable_hash_for!(enum ::syntax::ast::AsmDialect {
Att,
Intel
rustc_const_unstable
});
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
+impl<'gcx> HashStable<StableHashingContext<'gcx>>
for ::syntax::attr::StabilityLevel {
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
mem::discriminant(self).hash_stable(hcx, hasher);
match *self {
impl_stable_hash_for!(enum ::syntax::ast::StrStyle { Cooked, Raw(pounds) });
impl_stable_hash_for!(enum ::syntax::ast::AttrStyle { Outer, Inner });
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for [ast::Attribute] {
+impl<'gcx> HashStable<StableHashingContext<'gcx>> for [ast::Attribute] {
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
+ if self.len() == 0 {
+ self.len().hash_stable(hcx, hasher);
+ return
+ }
+
// Some attributes are always ignored during hashing.
let filtered: AccumulateVec<[&ast::Attribute; 8]> = self
.iter()
}
}
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for ast::Attribute {
+impl<'gcx> HashStable<StableHashingContext<'gcx>> for ast::Attribute {
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
// Make sure that these have been filtered out.
debug_assert!(self.name().map(|name| !hcx.is_ignored_attr(name)).unwrap_or(true));
}
}
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
+impl<'gcx> HashStable<StableHashingContext<'gcx>>
for tokenstream::TokenTree {
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
mem::discriminant(self).hash_stable(hcx, hasher);
match *self {
tokenstream::TokenTree::Token(span, ref token) => {
span.hash_stable(hcx, hasher);
- hash_token(token, hcx, hasher, span);
+ hash_token(token, hcx, hasher);
}
tokenstream::TokenTree::Delimited(span, ref delimited) => {
span.hash_stable(hcx, hasher);
}
}
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
+impl<'gcx> HashStable<StableHashingContext<'gcx>>
for tokenstream::TokenStream {
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
for sub_tt in self.trees() {
sub_tt.hash_stable(hcx, hasher);
}
}
-fn hash_token<'a, 'gcx, 'tcx, W: StableHasherResult>(token: &token::Token,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
- hasher: &mut StableHasher<W>,
- error_reporting_span: Span) {
+fn hash_token<'gcx, W: StableHasherResult>(token: &token::Token,
+ hcx: &mut StableHashingContext<'gcx>,
+ hasher: &mut StableHasher<W>) {
mem::discriminant(token).hash_stable(hcx, hasher);
match *token {
token::Token::Eq |
token::Token::Ident(ident) |
token::Token::Lifetime(ident) => ident.name.hash_stable(hcx, hasher),
- token::Token::Interpolated(ref non_terminal) => {
- // FIXME(mw): This could be implemented properly. It's just a
- // lot of work, since we would need to hash the AST
- // in a stable way, in addition to the HIR.
- // Since this is hardly used anywhere, just emit a
- // warning for now.
- if hcx.tcx().sess.opts.debugging_opts.incremental.is_some() {
- let msg = format!("Quasi-quoting might make incremental \
- compilation very inefficient: {:?}",
- non_terminal);
- hcx.tcx().sess.span_warn(error_reporting_span, &msg[..]);
- }
-
- std_hash::Hash::hash(non_terminal, hasher);
+ token::Token::Interpolated(_) => {
+ bug!("interpolated tokens should not be present in the HIR")
}
token::Token::DocComment(val) |
NameValue(lit)
});
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for FileMap {
+impl<'gcx> HashStable<StableHashingContext<'gcx>> for FileMap {
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
let FileMap {
ref name,
//! This module contains `HashStable` implementations for various data types
//! from rustc::ty in no particular order.
-use ich::StableHashingContext;
-use rustc_data_structures::stable_hasher::{HashStable, StableHasher,
- StableHasherResult};
+use ich::{StableHashingContext, NodeIdHashingMode};
+use rustc_data_structures::stable_hasher::{HashStable, ToStableHashKey,
+ StableHasher, StableHasherResult};
use std::hash as std_hash;
use std::mem;
use middle::region;
+use traits;
use ty;
-impl<'a, 'gcx, 'tcx, T> HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
+impl<'gcx, T> HashStable<StableHashingContext<'gcx>>
for &'gcx ty::Slice<T>
- where T: HashStable<StableHashingContext<'a, 'gcx, 'tcx>> {
+ where T: HashStable<StableHashingContext<'gcx>> {
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
(&self[..]).hash_stable(hcx, hasher);
}
}
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
+impl<'gcx> HashStable<StableHashingContext<'gcx>>
for ty::subst::Kind<'gcx> {
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
self.as_type().hash_stable(hcx, hasher);
self.as_region().hash_stable(hcx, hasher);
}
}
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
+impl<'gcx> HashStable<StableHashingContext<'gcx>>
for ty::RegionKind {
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
mem::discriminant(self).hash_stable(hcx, hasher);
match *self {
}
}
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
+impl<'gcx> HashStable<StableHashingContext<'gcx>>
for ty::adjustment::AutoBorrow<'gcx> {
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
mem::discriminant(self).hash_stable(hcx, hasher);
match *self {
}
}
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
+impl<'gcx> HashStable<StableHashingContext<'gcx>>
for ty::adjustment::Adjust<'gcx> {
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
mem::discriminant(self).hash_stable(hcx, hasher);
match *self {
impl_stable_hash_for!(struct ty::adjustment::Adjustment<'tcx> { kind, target });
impl_stable_hash_for!(struct ty::adjustment::OverloadedDeref<'tcx> { region, mutbl });
-impl_stable_hash_for!(struct ty::UpvarId { var_id, closure_expr_id });
impl_stable_hash_for!(struct ty::UpvarBorrow<'tcx> { kind, region });
+impl_stable_hash_for!(struct ty::UpvarId { var_id, closure_expr_id });
+
impl_stable_hash_for!(enum ty::BorrowKind {
ImmBorrow,
UniqueImmBorrow,
MutBorrow
});
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
+impl<'gcx> HashStable<StableHashingContext<'gcx>>
for ty::UpvarCapture<'gcx> {
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
mem::discriminant(self).hash_stable(hcx, hasher);
match *self {
abi
});
-impl<'a, 'gcx, 'tcx, T> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for ty::Binder<T>
- where T: HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
+impl<'gcx, T> HashStable<StableHashingContext<'gcx>> for ty::Binder<T>
+ where T: HashStable<StableHashingContext<'gcx>>
{
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
let ty::Binder(ref inner) = *self;
inner.hash_stable(hcx, hasher);
impl_stable_hash_for!(tuple_struct ty::EquatePredicate<'tcx> { t1, t2 });
impl_stable_hash_for!(struct ty::SubtypePredicate<'tcx> { a_is_expected, a, b });
-impl<'a, 'gcx, 'tcx, A, B> HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
+impl<'gcx, A, B> HashStable<StableHashingContext<'gcx>>
for ty::OutlivesPredicate<A, B>
- where A: HashStable<StableHashingContext<'a, 'gcx, 'tcx>>,
- B: HashStable<StableHashingContext<'a, 'gcx, 'tcx>>,
+ where A: HashStable<StableHashingContext<'gcx>>,
+ B: HashStable<StableHashingContext<'gcx>>,
{
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
let ty::OutlivesPredicate(ref a, ref b) = *self;
a.hash_stable(hcx, hasher);
impl_stable_hash_for!(struct ty::ProjectionTy<'tcx> { substs, item_def_id });
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for ty::Predicate<'gcx> {
+impl<'gcx> HashStable<StableHashingContext<'gcx>> for ty::Predicate<'gcx> {
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
mem::discriminant(self).hash_stable(hcx, hasher);
match *self {
}
}
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for ty::AdtFlags {
+impl<'gcx> HashStable<StableHashingContext<'gcx>> for ty::AdtFlags {
fn hash_stable<W: StableHasherResult>(&self,
- _: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ _: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
std_hash::Hash::hash(self, hasher);
}
vis
});
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
+impl<'gcx> HashStable<StableHashingContext<'gcx>>
for ::middle::const_val::ConstVal<'gcx> {
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
use middle::const_val::ConstVal::*;
use middle::const_val::ConstAggregate::*;
}
Function(def_id, substs) => {
def_id.hash_stable(hcx, hasher);
- substs.hash_stable(hcx, hasher);
+ hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashDefPath, |hcx| {
+ substs.hash_stable(hcx, hasher);
+ });
}
Aggregate(Struct(ref name_values)) => {
let mut values = name_values.to_vec();
val
});
+impl_stable_hash_for!(struct ::middle::const_val::ConstEvalErr<'tcx> {
+ span,
+ kind
+});
+
+impl<'gcx> HashStable<StableHashingContext<'gcx>>
+for ::middle::const_val::ErrKind<'gcx> {
+ fn hash_stable<W: StableHasherResult>(&self,
+ hcx: &mut StableHashingContext<'gcx>,
+ hasher: &mut StableHasher<W>) {
+ use middle::const_val::ErrKind::*;
+
+ mem::discriminant(self).hash_stable(hcx, hasher);
+
+ match *self {
+ CannotCast |
+ MissingStructField |
+ NonConstPath |
+ ExpectedConstTuple |
+ ExpectedConstStruct |
+ IndexedNonVec |
+ IndexNotUsize |
+ MiscBinaryOp |
+ MiscCatchAll |
+ IndexOpFeatureGated |
+ TypeckError => {
+ // nothing to do
+ }
+ UnimplementedConstVal(s) => {
+ s.hash_stable(hcx, hasher);
+ }
+ IndexOutOfBounds { len, index } => {
+ len.hash_stable(hcx, hasher);
+ index.hash_stable(hcx, hasher);
+ }
+ Math(ref const_math_err) => {
+ const_math_err.hash_stable(hcx, hasher);
+ }
+ LayoutError(ref layout_error) => {
+ layout_error.hash_stable(hcx, hasher);
+ }
+ ErroneousReferencedConstant(ref const_val) => {
+ const_val.hash_stable(hcx, hasher);
+ }
+ }
+ }
+}
+
impl_stable_hash_for!(struct ty::ClosureSubsts<'tcx> { substs });
impl_stable_hash_for!(struct ty::GeneratorInterior<'tcx> { witness });
Struct(index)
});
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for ty::Generics {
+impl<'gcx> HashStable<StableHashingContext<'gcx>> for ty::Generics {
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
let ty::Generics {
parent,
}
}
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
+impl<'gcx> HashStable<StableHashingContext<'gcx>>
for ty::RegionParameterDef {
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
let ty::RegionParameterDef {
name,
pure_wrt_drop
});
-
-impl<'a, 'gcx, 'tcx, T> HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
+impl<'gcx, T> HashStable<StableHashingContext<'gcx>>
for ::middle::resolve_lifetime::Set1<T>
- where T: HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
+ where T: HashStable<StableHashingContext<'gcx>>
{
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
use middle::resolve_lifetime::Set1;
FnPtrAddrCast
});
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
-for region::Scope
-{
- fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
- hasher: &mut StableHasher<W>) {
- mem::discriminant(self).hash_stable(hcx, hasher);
- match *self {
- region::Scope::Node(node_id) |
- region::Scope::Destruction(node_id) => {
- node_id.hash_stable(hcx, hasher);
- }
- region::Scope::CallSite(body_id) |
- region::Scope::Arguments(body_id) => {
- body_id.hash_stable(hcx, hasher);
- }
- region::Scope::Remainder(block_remainder) => {
- block_remainder.hash_stable(hcx, hasher);
- }
- }
+impl_stable_hash_for!(enum ::middle::region::Scope {
+ Node(local_id),
+ Destruction(local_id),
+ CallSite(local_id),
+ Arguments(local_id),
+ Remainder(block_remainder)
+});
+
+impl<'gcx> ToStableHashKey<StableHashingContext<'gcx>> for region::Scope {
+ type KeyType = region::Scope;
+
+ #[inline]
+ fn to_stable_hash_key(&self, _: &StableHashingContext<'gcx>) -> region::Scope {
+ *self
}
}
BrEnv
});
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
+impl<'gcx> HashStable<StableHashingContext<'gcx>>
for ty::TypeVariants<'gcx>
{
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
use ty::TypeVariants::*;
TyBool |
TyChar |
TyStr |
+ TyError |
TyNever => {
// Nothing more to hash.
}
TyParam(param_ty) => {
param_ty.hash_stable(hcx, hasher);
}
-
- TyError |
TyInfer(..) => {
- bug!("ty::TypeVariants::hash_stable() - Unexpected variant.")
+ bug!("ty::TypeVariants::hash_stable() - Unexpected variant {:?}.", *self)
}
}
}
mutbl
});
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
+impl<'gcx> HashStable<StableHashingContext<'gcx>>
for ty::ExistentialPredicate<'gcx>
{
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
mem::discriminant(self).hash_stable(hcx, hasher);
match *self {
ty
});
-impl_stable_hash_for!(enum ty::fast_reject::SimplifiedType {
- BoolSimplifiedType,
- CharSimplifiedType,
- IntSimplifiedType(int_ty),
- UintSimplifiedType(int_ty),
- FloatSimplifiedType(float_ty),
- AdtSimplifiedType(def_id),
- StrSimplifiedType,
- ArraySimplifiedType,
- PtrSimplifiedType,
- NeverSimplifiedType,
- TupleSimplifiedType(size),
- TraitSimplifiedType(def_id),
- ClosureSimplifiedType(def_id),
- GeneratorSimplifiedType(def_id),
- AnonSimplifiedType(def_id),
- FunctionSimplifiedType(params),
- ParameterSimplifiedType
-});
-
impl_stable_hash_for!(struct ty::Instance<'tcx> {
def,
substs
});
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for ty::InstanceDef<'gcx> {
+impl<'gcx> HashStable<StableHashingContext<'gcx>> for ty::InstanceDef<'gcx> {
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
mem::discriminant(self).hash_stable(hcx, hasher);
}
}
+impl<'gcx> HashStable<StableHashingContext<'gcx>> for ty::TraitDef {
+ fn hash_stable<W: StableHasherResult>(&self,
+ hcx: &mut StableHashingContext<'gcx>,
+ hasher: &mut StableHasher<W>) {
+ let ty::TraitDef {
+ // We already have the def_path_hash below, no need to hash it twice
+ def_id: _,
+ unsafety,
+ paren_sugar,
+ has_default_impl,
+ def_path_hash,
+ } = *self;
+
+ unsafety.hash_stable(hcx, hasher);
+ paren_sugar.hash_stable(hcx, hasher);
+ has_default_impl.hash_stable(hcx, hasher);
+ def_path_hash.hash_stable(hcx, hasher);
+ }
+}
+
+impl_stable_hash_for!(struct ty::Destructor {
+ did
+});
+
+impl_stable_hash_for!(struct ty::DtorckConstraint<'tcx> {
+ outlives,
+ dtorck_types
+});
+
+
+impl<'gcx> HashStable<StableHashingContext<'gcx>> for ty::CrateVariancesMap {
+ fn hash_stable<W: StableHasherResult>(&self,
+ hcx: &mut StableHashingContext<'gcx>,
+ hasher: &mut StableHasher<W>) {
+ let ty::CrateVariancesMap {
+ ref dependencies,
+ ref variances,
+ // This is just an irrelevant helper value.
+ empty_variance: _,
+ } = *self;
+
+ dependencies.hash_stable(hcx, hasher);
+ variances.hash_stable(hcx, hasher);
+ }
+}
+
+impl_stable_hash_for!(struct ty::AssociatedItem {
+ def_id,
+ name,
+ kind,
+ vis,
+ defaultness,
+ container,
+ method_has_self_argument
+});
+
+impl_stable_hash_for!(enum ty::AssociatedKind {
+ Const,
+ Method,
+ Type
+});
+
+impl_stable_hash_for!(enum ty::AssociatedItemContainer {
+ TraitContainer(def_id),
+ ImplContainer(def_id)
+});
+
+
+impl<'gcx, T> HashStable<StableHashingContext<'gcx>>
+for ty::steal::Steal<T>
+ where T: HashStable<StableHashingContext<'gcx>>
+{
+ fn hash_stable<W: StableHasherResult>(&self,
+ hcx: &mut StableHashingContext<'gcx>,
+ hasher: &mut StableHasher<W>) {
+ self.borrow().hash_stable(hcx, hasher);
+ }
+}
+
+impl_stable_hash_for!(struct ty::ParamEnv<'tcx> {
+ caller_bounds,
+ reveal
+});
+
+impl_stable_hash_for!(enum traits::Reveal {
+ UserFacing,
+ All
+});
+
+impl_stable_hash_for!(enum ::middle::privacy::AccessLevel {
+ Reachable,
+ Exported,
+ Public
+});
+
+impl<'gcx> HashStable<StableHashingContext<'gcx>>
+for ::middle::privacy::AccessLevels {
+ fn hash_stable<W: StableHasherResult>(&self,
+ hcx: &mut StableHashingContext<'gcx>,
+ hasher: &mut StableHasher<W>) {
+ hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashDefPath, |hcx| {
+ let ::middle::privacy::AccessLevels {
+ ref map
+ } = *self;
+
+ map.hash_stable(hcx, hasher);
+ });
+ }
+}
+
+impl_stable_hash_for!(struct ty::CrateInherentImpls {
+ inherent_impls
+});
+
+impl_stable_hash_for!(enum ::session::CompileIncomplete {
+ Stopped,
+ Errored(error_reported)
+});
+
+impl_stable_hash_for!(struct ::util::common::ErrorReported {});
+
+impl_stable_hash_for!(tuple_struct ::middle::reachable::ReachableSet {
+ reachable_set
+});
pub use self::fingerprint::Fingerprint;
pub use self::caching_codemap_view::CachingCodemapView;
-pub use self::hcx::{StableHashingContext, NodeIdHashingMode, hash_stable_hashmap,
- hash_stable_hashset, hash_stable_nodemap,
- hash_stable_btreemap, hash_stable_itemlocalmap};
+pub use self::hcx::{StableHashingContext, NodeIdHashingMode,
+ hash_stable_trait_impls};
mod fingerprint;
mod caching_codemap_view;
mod hcx;
mod impls_cstore;
mod impls_hir;
mod impls_mir;
+mod impls_misc;
mod impls_ty;
mod impls_syntax;
// region at the right depth with the same index
(Some(rl::Region::EarlyBound(_, id)), ty::BrNamed(def_id, _)) => {
debug!("EarlyBound self.infcx.tcx.hir.local_def_id(id)={:?} \
- def_id={:?}",
- self.infcx.tcx.hir.local_def_id(id),
- def_id);
- if self.infcx.tcx.hir.local_def_id(id) == def_id {
+ def_id={:?}", id, def_id);
+ if id == def_id {
self.found_type = Some(arg);
return; // we can stop visiting now
}
(Some(rl::Region::LateBound(debruijn_index, id)), ty::BrNamed(def_id, _)) => {
debug!("FindNestedTypeVisitor::visit_ty: LateBound depth = {:?}",
debruijn_index.depth);
- debug!("self.infcx.tcx.hir.local_def_id(id)={:?}",
- self.infcx.tcx.hir.local_def_id(id));
+ debug!("self.infcx.tcx.hir.local_def_id(id)={:?}", id);
debug!("def_id={:?}", def_id);
- if debruijn_index.depth == self.depth &&
- self.infcx.tcx.hir.local_def_id(id) == def_id {
+ if debruijn_index.depth == self.depth && id == def_id {
self.found_type = Some(arg);
return; // we can stop visiting now
}
found_it: false,
bound_region: self.bound_region,
hir_map: self.hir_map,
+ depth: self.depth,
};
intravisit::walk_ty(subvisitor, arg); // call walk_ty; as visit_ty is empty,
// this will visit only outermost type
hir_map: &'a hir::map::Map<'gcx>,
found_it: bool,
bound_region: ty::BoundRegion,
+ depth: u32,
}
impl<'a, 'gcx, 'tcx> Visitor<'gcx> for TyPathVisitor<'a, 'gcx, 'tcx> {
}
fn visit_lifetime(&mut self, lifetime: &hir::Lifetime) {
- let br_index = match self.bound_region {
- ty::BrAnon(index) => index,
- _ => return,
- };
let hir_id = self.infcx.tcx.hir.node_to_hir_id(lifetime.id);
- match self.infcx.tcx.named_region(hir_id) {
+ match (self.infcx.tcx.named_region(hir_id), self.bound_region) {
// the lifetime of the TyPath!
- Some(rl::Region::LateBoundAnon(debruijn_index, anon_index)) => {
- if debruijn_index.depth == 1 && anon_index == br_index {
+ (Some(rl::Region::LateBoundAnon(debruijn_index, anon_index)), ty::BrAnon(br_index)) => {
+ if debruijn_index.depth == self.depth && anon_index == br_index {
+ self.found_it = true;
+ return;
+ }
+ }
+
+ (Some(rl::Region::EarlyBound(_, id)), ty::BrNamed(def_id, _)) => {
+ debug!("EarlyBound self.infcx.tcx.hir.local_def_id(id)={:?} \
+ def_id={:?}", id, def_id);
+ if id == def_id {
self.found_it = true;
+ return; // we can stop visiting now
}
}
- Some(rl::Region::Static) |
- Some(rl::Region::EarlyBound(_, _)) |
- Some(rl::Region::LateBound(_, _)) |
- Some(rl::Region::Free(_, _)) |
- None => {
+
+ (Some(rl::Region::LateBound(debruijn_index, id)), ty::BrNamed(def_id, _)) => {
+ debug!("FindNestedTypeVisitor::visit_ty: LateBound depth = {:?}",
+ debruijn_index.depth);
+ debug!("id={:?}", id);
+ debug!("def_id={:?}", def_id);
+ if debruijn_index.depth == self.depth && id == def_id {
+ self.found_it = true;
+ return; // we can stop visiting now
+ }
+ }
+
+ (Some(rl::Region::Static), _) |
+ (Some(rl::Region::EarlyBound(_, _)), _) |
+ (Some(rl::Region::LateBound(_, _)), _) |
+ (Some(rl::Region::LateBoundAnon(_, _)), _) |
+ (Some(rl::Region::Free(_, _)), _) |
+ (None, _) => {
debug!("no arg found");
}
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-//! The Rust compiler.
+//! The "main crate" of the Rust compiler. This crate contains common
+//! type definitions that are used by the other crates in the rustc
+//! "family". Some prominent examples (note that each of these modules
+//! has their own README with further details).
+//!
+//! - **HIR.** The "high-level (H) intermediate representation (IR)" is
+//! defined in the `hir` module.
+//! - **MIR.** The "mid-level (M) intermediate representation (IR)" is
+//! defined in the `mir` module. This module contains only the
+//! *definition* of the MIR; the passes that transform and operate
+//! on MIR are found in `librustc_mir` crate.
+//! - **Types.** The internal representation of types used in rustc is
+//! defined in the `ty` module. This includes the **type context**
+//! (or `tcx`), which is the central context during most of
+//! compilation, containing the interners and other things.
+//! - **Traits.** Trait resolution is implemented in the `traits` module.
+//! - **Type inference.** The type inference code can be found in the `infer` module;
+//! this code handles low-level equality and subtyping operations. The
+//! type check pass in the compiler is found in the `librustc_typeck` crate.
+//!
+//! For a deeper explanation of how the compiler works and is
+//! organized, see the README.md file in this directory.
//!
//! # Note
//!
#![recursion_limit="256"]
extern crate arena;
+#[macro_use] extern crate bitflags;
extern crate core;
extern crate fmt_macros;
extern crate getopts;
#[macro_use] extern crate log;
#[macro_use] extern crate syntax;
extern crate syntax_pos;
-#[macro_use] #[no_link] extern crate rustc_bitflags;
extern crate jobserver;
extern crate serialize as rustc_serialize; // used by deriving
pub mod dependency_format;
pub mod effect;
pub mod entry;
+ pub mod exported_symbols;
pub mod free_region;
pub mod intrinsicck;
pub mod lang_items;
pub mod recursion_limit;
pub mod resolve_lifetime;
pub mod stability;
+ pub mod trans;
pub mod weak_lang_items;
}
use errors::DiagnosticBuilder;
use hir::HirId;
+use ich::StableHashingContext;
use lint::builtin;
use lint::context::CheckLintNameResult;
use lint::{self, Lint, LintId, Level, LintSource};
+use rustc_data_structures::stable_hasher::{HashStable, ToStableHashKey,
+ StableHasher, StableHasherResult};
use session::Session;
use syntax::ast;
use syntax::attr;
})
}
}
+
+impl<'gcx> HashStable<StableHashingContext<'gcx>> for LintLevelMap {
+ #[inline]
+ fn hash_stable<W: StableHasherResult>(&self,
+ hcx: &mut StableHashingContext<'gcx>,
+ hasher: &mut StableHasher<W>) {
+ let LintLevelMap {
+ ref sets,
+ ref id_to_set,
+ } = *self;
+
+ id_to_set.hash_stable(hcx, hasher);
+
+ let LintLevelSets {
+ ref list,
+ lint_cap,
+ } = *sets;
+
+ lint_cap.hash_stable(hcx, hasher);
+
+ hcx.while_hashing_spans(true, |hcx| {
+ list.len().hash_stable(hcx, hasher);
+
+ // We are working under the assumption here that the list of
+ // lint-sets is built in a deterministic order.
+ for lint_set in list {
+ ::std::mem::discriminant(lint_set).hash_stable(hcx, hasher);
+
+ match *lint_set {
+ LintSet::CommandLine { ref specs } => {
+ specs.hash_stable(hcx, hasher);
+ }
+ LintSet::Node { ref specs, parent } => {
+ specs.hash_stable(hcx, hasher);
+ parent.hash_stable(hcx, hasher);
+ }
+ }
+ }
+ })
+ }
+}
+
+impl<HCX> HashStable<HCX> for LintId {
+ #[inline]
+ fn hash_stable<W: StableHasherResult>(&self,
+ hcx: &mut HCX,
+ hasher: &mut StableHasher<W>) {
+ self.lint_name_raw().hash_stable(hcx, hasher);
+ }
+}
+
+impl<HCX> ToStableHashKey<HCX> for LintId {
+ type KeyType = &'static str;
+
+ #[inline]
+ fn to_stable_hash_key(&self, _: &HCX) -> &'static str {
+ self.lint_name_raw()
+ }
+}
}
}
+ pub fn lint_name_raw(&self) -> &'static str {
+ self.lint.name
+ }
+
/// Get the name of the lint.
pub fn to_string(&self) -> String {
self.lint.name_lower()
Allow, Warn, Deny, Forbid
}
+impl_stable_hash_for!(enum self::Level {
+ Allow,
+ Warn,
+ Deny,
+ Forbid
+});
+
impl Level {
/// Convert a level to a lower-case string.
pub fn as_str(self) -> &'static str {
CommandLine(Symbol),
}
+impl_stable_hash_for!(enum self::LintSource {
+ Default,
+ Node(name, span),
+ CommandLine(text)
+});
+
pub type LevelSource = (Level, LintSource);
pub mod builtin;
#[macro_export]
macro_rules! impl_stable_hash_for {
(enum $enum_name:path { $( $variant:ident $( ( $($arg:ident),* ) )* ),* }) => {
- impl<'a, 'tcx, 'lcx> ::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'a, 'tcx, 'lcx>> for $enum_name {
+ impl<'tcx> ::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'tcx>> for $enum_name {
#[inline]
fn hash_stable<W: ::rustc_data_structures::stable_hasher::StableHasherResult>(&self,
- __ctx: &mut $crate::ich::StableHashingContext<'a, 'tcx, 'lcx>,
+ __ctx: &mut $crate::ich::StableHashingContext<'tcx>,
__hasher: &mut ::rustc_data_structures::stable_hasher::StableHasher<W>) {
use $enum_name::*;
::std::mem::discriminant(self).hash_stable(__ctx, __hasher);
}
};
(struct $struct_name:path { $($field:ident),* }) => {
- impl<'a, 'tcx, 'lcx> ::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'a, 'tcx, 'lcx>> for $struct_name {
+ impl<'tcx> ::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'tcx>> for $struct_name {
#[inline]
fn hash_stable<W: ::rustc_data_structures::stable_hasher::StableHasherResult>(&self,
- __ctx: &mut $crate::ich::StableHashingContext<'a, 'tcx, 'lcx>,
+ __ctx: &mut $crate::ich::StableHashingContext<'tcx>,
__hasher: &mut ::rustc_data_structures::stable_hasher::StableHasher<W>) {
let $struct_name {
$(ref $field),*
}
};
(tuple_struct $struct_name:path { $($field:ident),* }) => {
- impl<'a, 'tcx, 'lcx> ::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'a, 'tcx, 'lcx>> for $struct_name {
+ impl<'tcx> ::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'tcx>> for $struct_name {
#[inline]
fn hash_stable<W: ::rustc_data_structures::stable_hasher::StableHasherResult>(&self,
- __ctx: &mut $crate::ich::StableHashingContext<'a, 'tcx, 'lcx>,
+ __ctx: &mut $crate::ich::StableHashingContext<'tcx>,
__hasher: &mut ::rustc_data_structures::stable_hasher::StableHasher<W>) {
let $struct_name (
$(ref $field),*
macro_rules! impl_stable_hash_for_spanned {
($T:path) => (
- impl<'a, 'tcx, 'lcx> HashStable<StableHashingContext<'a, 'tcx, 'lcx>> for ::syntax::codemap::Spanned<$T>
+ impl<'tcx> HashStable<StableHashingContext<'tcx>> for ::syntax::codemap::Spanned<$T>
{
#[inline]
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'tcx, 'lcx>,
+ hcx: &mut StableHashingContext<'tcx>,
hasher: &mut StableHasher<W>) {
self.node.hash_stable(hcx, hasher);
self.span.hash_stable(hcx, hasher);
//! are *mostly* used as a part of that interface, but these should
//! probably get a better home if someone can find one.
+use hir;
use hir::def;
use hir::def_id::{CrateNum, DefId, DefIndex, LOCAL_CRATE};
use hir::map as hir_map;
use util::nodemap::NodeSet;
use std::any::Any;
+use std::collections::BTreeMap;
use std::path::{Path, PathBuf};
use std::rc::Rc;
use owning_ref::ErasedBoxRef;
pub kind: NativeLibraryKind,
pub name: Symbol,
pub cfg: Option<ast::MetaItem>,
- pub foreign_items: Vec<DefIndex>,
+ pub foreign_items: Vec<DefId>,
}
pub enum LoadedMacro {
-> Result<ErasedBoxRef<[u8]>, String>;
}
+#[derive(Clone)]
+pub struct ExternConstBody<'tcx> {
+ pub body: &'tcx hir::Body,
+
+ // It would require a lot of infrastructure to enable stable-hashing Bodies
+ // from other crates, so we hash on export and just store the fingerprint
+ // with them.
+ pub fingerprint: ich::Fingerprint,
+}
+
+#[derive(Clone)]
+pub struct ExternBodyNestedBodies {
+ pub nested_bodies: Rc<BTreeMap<hir::BodyId, hir::Body>>,
+
+ // It would require a lot of infrastructure to enable stable-hashing Bodies
+ // from other crates, so we hash on export and just store the fingerprint
+ // with them.
+ pub fingerprint: ich::Fingerprint,
+}
+
/// A store of Rust crates, through with their metadata
/// can be accessed.
///
// In order to get this left-to-right dependency ordering, we perform a
// topological sort of all crates putting the leaves at the right-most
// positions.
-pub fn used_crates<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- prefer: LinkagePreference) -> Vec<(CrateNum, LibSource)> {
+pub fn used_crates(tcx: TyCtxt, prefer: LinkagePreference)
+ -> Vec<(CrateNum, LibSource)>
+{
let mut libs = tcx.crates()
.iter()
.cloned()
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+/// The SymbolExportLevel of a symbols specifies from which kinds of crates
+/// the symbol will be exported. `C` symbols will be exported from any
+/// kind of crate, including cdylibs which export very few things.
+/// `Rust` will only be exported if the crate produced is a Rust
+/// dylib.
+#[derive(Eq, PartialEq, Debug, Copy, Clone)]
+pub enum SymbolExportLevel {
+ C,
+ Rust,
+}
+
+impl_stable_hash_for!(enum self::SymbolExportLevel {
+ C,
+ Rust
+});
+
+impl SymbolExportLevel {
+ pub fn is_below_threshold(self, threshold: SymbolExportLevel) -> bool {
+ if threshold == SymbolExportLevel::Rust {
+ // We export everything from Rust dylibs
+ true
+ } else {
+ self == SymbolExportLevel::C
+ }
+ }
+}
pub fn collect<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> LanguageItems {
let mut collector = LanguageItemCollector::new(tcx);
for &cnum in tcx.crates().iter() {
- for &(index, item_index) in tcx.defined_lang_items(cnum).iter() {
- let def_id = DefId { krate: cnum, index: index };
+ for &(def_id, item_index) in tcx.defined_lang_items(cnum).iter() {
collector.collect_item(item_index, def_id);
}
}
} else {
false
};
- let is_extern = attr::contains_extern_indicator(&self.tcx.sess.diagnostic(),
- &item.attrs);
+ let def_id = self.tcx.hir.local_def_id(item.id);
+ let is_extern = self.tcx.contains_extern_indicator(def_id);
if reachable || is_extern {
self.reachable_symbols.insert(search_item);
}
}
}
-pub fn find_reachable<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Rc<NodeSet> {
- tcx.reachable_set(LOCAL_CRATE)
-}
+// We introduce a new-type here, so we can have a specialized HashStable
+// implementation for it.
+#[derive(Clone)]
+pub struct ReachableSet(pub Rc<NodeSet>);
+
-fn reachable_set<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, crate_num: CrateNum) -> Rc<NodeSet> {
+fn reachable_set<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, crate_num: CrateNum) -> ReachableSet {
debug_assert!(crate_num == LOCAL_CRATE);
let access_levels = &tcx.privacy_access_levels(LOCAL_CRATE);
reachable_context.propagate();
// Return the set of reachable symbols.
- Rc::new(reachable_context.reachable_symbols)
+ ReachableSet(Rc::new(reachable_context.reachable_symbols))
}
pub fn provide(providers: &mut Providers) {
//! Most of the documentation on regions can be found in
//! `middle/infer/region_inference/README.md`
+use ich::{StableHashingContext, NodeIdHashingMode};
use util::nodemap::{FxHashMap, FxHashSet};
use ty;
use hir::intravisit::{self, Visitor, NestedVisitorMap};
use hir::{Block, Arm, Pat, PatKind, Stmt, Expr, Local};
use mir::transform::MirSource;
+use rustc_data_structures::stable_hasher::{HashStable, StableHasher,
+ StableHasherResult};
/// Scope represents a statically-describable scope that can be
/// used to bound the lifetime/region for values.
..*providers
};
}
+
+impl<'gcx> HashStable<StableHashingContext<'gcx>> for ScopeTree {
+ fn hash_stable<W: StableHasherResult>(&self,
+ hcx: &mut StableHashingContext<'gcx>,
+ hasher: &mut StableHasher<W>) {
+ let ScopeTree {
+ root_body,
+ root_parent,
+ ref parent_map,
+ ref var_map,
+ ref destruction_scopes,
+ ref rvalue_scopes,
+ ref closure_tree,
+ ref yield_in_scope,
+ } = *self;
+
+ hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashDefPath, |hcx| {
+ root_body.hash_stable(hcx, hasher);
+ root_parent.hash_stable(hcx, hasher);
+ });
+
+ parent_map.hash_stable(hcx, hasher);
+ var_map.hash_stable(hcx, hasher);
+ destruction_scopes.hash_stable(hcx, hasher);
+ rvalue_scopes.hash_stable(hcx, hasher);
+ closure_tree.hash_stable(hcx, hasher);
+ yield_in_scope.hash_stable(hcx, hasher);
+ }
+}
#[derive(Clone, Copy, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, Debug)]
pub enum Region {
Static,
- EarlyBound(/* index */ u32, /* lifetime decl */ ast::NodeId),
- LateBound(ty::DebruijnIndex, /* lifetime decl */ ast::NodeId),
+ EarlyBound(/* index */ u32, /* lifetime decl */ DefId),
+ LateBound(ty::DebruijnIndex, /* lifetime decl */ DefId),
LateBoundAnon(ty::DebruijnIndex, /* anon index */ u32),
- Free(DefId, /* lifetime decl */ ast::NodeId),
+ Free(DefId, /* lifetime decl */ DefId),
}
impl Region {
- fn early(index: &mut u32, def: &hir::LifetimeDef) -> (ast::Name, Region) {
+ fn early(hir_map: &Map, index: &mut u32, def: &hir::LifetimeDef) -> (ast::Name, Region) {
let i = *index;
*index += 1;
- (def.lifetime.name, Region::EarlyBound(i, def.lifetime.id))
+ let def_id = hir_map.local_def_id(def.lifetime.id);
+ (def.lifetime.name, Region::EarlyBound(i, def_id))
}
- fn late(def: &hir::LifetimeDef) -> (ast::Name, Region) {
+ fn late(hir_map: &Map, def: &hir::LifetimeDef) -> (ast::Name, Region) {
let depth = ty::DebruijnIndex::new(1);
- (def.lifetime.name, Region::LateBound(depth, def.lifetime.id))
+ let def_id = hir_map.local_def_id(def.lifetime.id);
+ (def.lifetime.name, Region::LateBound(depth, def_id))
}
fn late_anon(index: &Cell<u32>) -> Region {
Region::LateBoundAnon(depth, i)
}
- fn id(&self) -> Option<ast::NodeId> {
+ fn id(&self) -> Option<DefId> {
match *self {
Region::Static |
Region::LateBoundAnon(..) => None,
0
};
let lifetimes = generics.lifetimes.iter().map(|def| {
- Region::early(&mut index, def)
+ Region::early(self.hir_map, &mut index, def)
}).collect();
let scope = Scope::Binder {
lifetimes,
match ty.node {
hir::TyBareFn(ref c) => {
let scope = Scope::Binder {
- lifetimes: c.lifetimes.iter().map(Region::late).collect(),
+ lifetimes: c.lifetimes.iter().map(|def| {
+ Region::late(self.hir_map, def)
+ }).collect(),
s: self.scope
};
self.with(scope, |old_scope, this| {
if !bound_lifetimes.is_empty() {
self.trait_ref_hack = true;
let scope = Scope::Binder {
- lifetimes: bound_lifetimes.iter().map(Region::late).collect(),
+ lifetimes: bound_lifetimes.iter().map(|def| {
+ Region::late(self.hir_map, def)
+ }).collect(),
s: self.scope
};
let result = self.with(scope, |old_scope, this| {
"nested quantification of lifetimes");
}
let scope = Scope::Binder {
- lifetimes: trait_ref.bound_lifetimes.iter().map(Region::late).collect(),
+ lifetimes: trait_ref.bound_lifetimes.iter().map(|def| {
+ Region::late(self.hir_map, def)
+ }).collect(),
s: self.scope
};
self.with(scope, |old_scope, this| {
Scope::Binder { ref lifetimes, s } => {
// FIXME (#24278): non-hygienic comparison
if let Some(def) = lifetimes.get(&label) {
+ let node_id = hir_map.as_local_node_id(def.id().unwrap())
+ .unwrap();
+
signal_shadowing_problem(
sess,
label,
- original_lifetime(hir_map.span(def.id().unwrap())),
+ original_lifetime(hir_map.span(node_id)),
shadower_label(label_span));
return;
}
generics.lifetimes.iter().enumerate().find(|&(_, def)| {
def.lifetime.name == name
}).map_or(Set1::Many, |(i, def)| {
- Set1::One(Region::EarlyBound(i as u32, def.lifetime.id))
+ let def_id = hir_map.local_def_id(def.lifetime.id);
+ Set1::One(Region::EarlyBound(i as u32, def_id))
})
}
}
let lifetimes = generics.lifetimes.iter().map(|def| {
if self.map.late_bound.contains(&def.lifetime.id) {
- Region::late(def)
+ Region::late(self.hir_map, def)
} else {
- Region::early(&mut index, def)
+ Region::early(self.hir_map, &mut index, def)
}
}).collect();
Scope::Binder { ref lifetimes, s } => {
if let Some(&def) = lifetimes.get(&lifetime.name) {
+ let node_id = self.hir_map
+ .as_local_node_id(def.id().unwrap())
+ .unwrap();
+
signal_shadowing_problem(
self.sess,
lifetime.name,
- original_lifetime(self.hir_map.span(def.id().unwrap())),
+ original_lifetime(self.hir_map.span(node_id)),
shadower_lifetime(&lifetime));
return;
}
origin: Option<HirId>,
}
+impl_stable_hash_for!(struct self::DeprecationEntry {
+ attr,
+ origin
+});
+
impl DeprecationEntry {
fn local(attr: Deprecation, id: HirId) -> DeprecationEntry {
DeprecationEntry {
active_features: FxHashSet<Symbol>,
}
+impl_stable_hash_for!(struct self::Index<'tcx> {
+ stab_map,
+ depr_map,
+ staged_api,
+ active_features
+});
+
// A private tree-walker for producing an Index.
struct Annotator<'a, 'tcx: 'a> {
tcx: TyCtxt<'a, 'tcx, 'tcx>,
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use syntax::ast::NodeId;
+use syntax::symbol::InternedString;
+use ty::Instance;
+use util::nodemap::FxHashMap;
+use rustc_data_structures::stable_hasher::{HashStable, StableHasherResult,
+ StableHasher};
+use ich::{Fingerprint, StableHashingContext, NodeIdHashingMode};
+
+#[derive(PartialEq, Eq, Clone, Copy, Debug, Hash)]
+pub enum TransItem<'tcx> {
+ Fn(Instance<'tcx>),
+ Static(NodeId),
+ GlobalAsm(NodeId),
+}
+
+impl<'tcx> HashStable<StableHashingContext<'tcx>> for TransItem<'tcx> {
+ fn hash_stable<W: StableHasherResult>(&self,
+ hcx: &mut StableHashingContext<'tcx>,
+ hasher: &mut StableHasher<W>) {
+ ::std::mem::discriminant(self).hash_stable(hcx, hasher);
+
+ match *self {
+ TransItem::Fn(ref instance) => {
+ instance.hash_stable(hcx, hasher);
+ }
+ TransItem::Static(node_id) |
+ TransItem::GlobalAsm(node_id) => {
+ hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashDefPath, |hcx| {
+ node_id.hash_stable(hcx, hasher);
+ })
+ }
+ }
+ }
+}
+
+pub struct CodegenUnit<'tcx> {
+ /// A name for this CGU. Incremental compilation requires that
+ /// name be unique amongst **all** crates. Therefore, it should
+ /// contain something unique to this crate (e.g., a module path)
+ /// as well as the crate name and disambiguator.
+ name: InternedString,
+ items: FxHashMap<TransItem<'tcx>, (Linkage, Visibility)>,
+}
+
+#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
+pub enum Linkage {
+ External,
+ AvailableExternally,
+ LinkOnceAny,
+ LinkOnceODR,
+ WeakAny,
+ WeakODR,
+ Appending,
+ Internal,
+ Private,
+ ExternalWeak,
+ Common,
+}
+
+impl_stable_hash_for!(enum self::Linkage {
+ External,
+ AvailableExternally,
+ LinkOnceAny,
+ LinkOnceODR,
+ WeakAny,
+ WeakODR,
+ Appending,
+ Internal,
+ Private,
+ ExternalWeak,
+ Common
+});
+
+#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
+pub enum Visibility {
+ Default,
+ Hidden,
+ Protected,
+}
+
+impl_stable_hash_for!(enum self::Visibility {
+ Default,
+ Hidden,
+ Protected
+});
+
+impl<'tcx> CodegenUnit<'tcx> {
+ pub fn new(name: InternedString) -> CodegenUnit<'tcx> {
+ CodegenUnit {
+ name: name,
+ items: FxHashMap(),
+ }
+ }
+
+ pub fn name(&self) -> &InternedString {
+ &self.name
+ }
+
+ pub fn set_name(&mut self, name: InternedString) {
+ self.name = name;
+ }
+
+ pub fn items(&self) -> &FxHashMap<TransItem<'tcx>, (Linkage, Visibility)> {
+ &self.items
+ }
+
+ pub fn items_mut(&mut self)
+ -> &mut FxHashMap<TransItem<'tcx>, (Linkage, Visibility)>
+ {
+ &mut self.items
+ }
+}
+
+impl<'tcx> HashStable<StableHashingContext<'tcx>> for CodegenUnit<'tcx> {
+ fn hash_stable<W: StableHasherResult>(&self,
+ hcx: &mut StableHashingContext<'tcx>,
+ hasher: &mut StableHasher<W>) {
+ let CodegenUnit {
+ ref items,
+ name,
+ } = *self;
+
+ name.hash_stable(hcx, hasher);
+
+ let mut items: Vec<(Fingerprint, _)> = items.iter().map(|(trans_item, &attrs)| {
+ let mut hasher = StableHasher::new();
+ trans_item.hash_stable(hcx, &mut hasher);
+ let trans_item_fingerprint = hasher.finish();
+ (trans_item_fingerprint, attrs)
+ }).collect();
+
+ items.sort_unstable_by_key(|i| i.0);
+ items.hash_stable(hcx, hasher);
+ }
+}
+
+#[derive(Clone, Default)]
+pub struct Stats {
+ pub n_glues_created: usize,
+ pub n_null_glues: usize,
+ pub n_real_glues: usize,
+ pub n_fns: usize,
+ pub n_inlines: usize,
+ pub n_closures: usize,
+ pub n_llvm_insns: usize,
+ pub llvm_insns: FxHashMap<String, usize>,
+ // (ident, llvm-instructions)
+ pub fn_stats: Vec<(String, usize)>,
+}
+
+impl_stable_hash_for!(struct self::Stats {
+ n_glues_created,
+ n_null_glues,
+ n_real_glues,
+ n_fns,
+ n_inlines,
+ n_closures,
+ n_llvm_insns,
+ llvm_insns,
+ fn_stats
+});
+
+impl Stats {
+ pub fn extend(&mut self, stats: Stats) {
+ self.n_glues_created += stats.n_glues_created;
+ self.n_null_glues += stats.n_null_glues;
+ self.n_real_glues += stats.n_real_glues;
+ self.n_fns += stats.n_fns;
+ self.n_inlines += stats.n_inlines;
+ self.n_closures += stats.n_closures;
+ self.n_llvm_insns += stats.n_llvm_insns;
+
+ for (k, v) in stats.llvm_insns {
+ *self.llvm_insns.entry(k).or_insert(0) += v;
+ }
+ self.fn_stats.extend(stats.fn_stats);
+ }
+}
+
}
}
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for Cache {
+impl<'gcx> HashStable<StableHashingContext<'gcx>> for Cache {
fn hash_stable<W: StableHasherResult>(&self,
- _: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ _: &mut StableHashingContext<'gcx>,
_: &mut StableHasher<W>) {
// do nothing
}
use session::{early_error, early_warn, Session};
use session::search_paths::SearchPaths;
+use ich::StableHashingContext;
use rustc_back::{LinkerFlavor, PanicStrategy, RelroLevel};
use rustc_back::target::Target;
+use rustc_data_structures::stable_hasher::ToStableHashKey;
use lint;
use middle::cstore;
DepInfo,
}
+impl_stable_hash_for!(enum self::OutputType {
+ Bitcode,
+ Assembly,
+ LlvmAssembly,
+ Mir,
+ Metadata,
+ Object,
+ Exe,
+ DepInfo
+});
+
+impl<'tcx> ToStableHashKey<StableHashingContext<'tcx>> for OutputType {
+ type KeyType = OutputType;
+ #[inline]
+ fn to_stable_hash_key(&self, _: &StableHashingContext<'tcx>) -> Self::KeyType {
+ *self
+ }
+}
+
impl OutputType {
fn is_compatible_with_codegen_units_and_single_output_file(&self) -> bool {
match *self {
#[derive(Clone, Hash)]
pub struct OutputTypes(BTreeMap<OutputType, Option<PathBuf>>);
+impl_stable_hash_for!(tuple_struct self::OutputTypes {
+ map
+});
+
impl OutputTypes {
pub fn new(entries: &[(OutputType, Option<PathBuf>)]) -> OutputTypes {
OutputTypes(BTreeMap::from_iter(entries.iter()
pub outputs: OutputTypes,
}
+impl_stable_hash_for!(struct self::OutputFilenames {
+ out_directory,
+ out_filestem,
+ single_output_file,
+ extra,
+ outputs
+});
+
/// Codegen unit names generated by the numbered naming scheme will contain this
/// marker right before the index of the codegen unit.
pub const NUMBERED_CODEGEN_UNIT_MARKER: &'static str = ".cgu-";
debug_assertions: Option<bool> = (None, parse_opt_bool, [TRACKED],
"explicitly enable the cfg(debug_assertions) directive"),
inline_threshold: Option<usize> = (None, parse_opt_uint, [TRACKED],
- "set the inlining threshold for"),
+ "set the threshold for inlining a function (default: 225)"),
panic: Option<PanicStrategy> = (None, parse_panic_strategy,
[TRACKED], "panic strategy to compile crate with"),
}
linker_flavor: Option<LinkerFlavor> = (None, parse_linker_flavor, [UNTRACKED],
"Linker flavor"),
fuel: Option<(String, u64)> = (None, parse_optimization_fuel, [TRACKED],
- "Set the optimization fuel quota for a crate."),
+ "set the optimization fuel quota for a crate"),
print_fuel: Option<String> = (None, parse_opt_string, [TRACKED],
- "Make Rustc print the total optimization fuel used by a crate."),
+ "make Rustc print the total optimization fuel used by a crate"),
remap_path_prefix_from: Vec<String> = (vec![], parse_string_push, [TRACKED],
"add a source pattern to the file path remapping config"),
remap_path_prefix_to: Vec<String> = (vec![], parse_string_push, [TRACKED],
use super::OverlapError;
use hir::def_id::DefId;
+use ich::{self, StableHashingContext};
+use rustc_data_structures::stable_hasher::{HashStable, StableHasher,
+ StableHasherResult};
use traits;
use ty::{self, TyCtxt, TypeFoldable};
use ty::fast_reject::{self, SimplifiedType};
current_source: Some(Node::Impl(start_from_impl)),
}
}
+
+impl<'gcx> HashStable<StableHashingContext<'gcx>> for Children {
+ fn hash_stable<W: StableHasherResult>(&self,
+ hcx: &mut StableHashingContext<'gcx>,
+ hasher: &mut StableHasher<W>) {
+ let Children {
+ ref nonblanket_impls,
+ ref blanket_impls,
+ } = *self;
+
+ ich::hash_stable_trait_impls(hcx, hasher, blanket_impls, nonblanket_impls);
+ }
+}
+
+impl_stable_hash_for!(struct self::Graph {
+ parent,
+ children
+});
--- /dev/null
+# Types and the Type Context
+
+The `ty` module defines how the Rust compiler represents types
+internally. It also defines the *typing context* (`tcx` or `TyCtxt`),
+which is the central data structure in the compiler.
+
+## The tcx and how it uses lifetimes
+
+The `tcx` ("typing context") is the central data structure in the
+compiler. It is the context that you use to perform all manner of
+queries. The struct `TyCtxt` defines a reference to this shared context:
+
+```rust
+tcx: TyCtxt<'a, 'gcx, 'tcx>
+// -- ---- ----
+// | | |
+// | | innermost arena lifetime (if any)
+// | "global arena" lifetime
+// lifetime of this reference
+```
+
+As you can see, the `TyCtxt` type takes three lifetime parameters.
+These lifetimes are perhaps the most complex thing to understand about
+the tcx. During Rust compilation, we allocate most of our memory in
+**arenas**, which are basically pools of memory that get freed all at
+once. When you see a reference with a lifetime like `'tcx` or `'gcx`,
+you know that it refers to arena-allocated data (or data that lives as
+long as the arenas, anyhow).
+
+We use two distinct levels of arenas. The outer level is the "global
+arena". This arena lasts for the entire compilation: so anything you
+allocate in there is only freed once compilation is basically over
+(actually, when we shift to executing LLVM).
+
+To reduce peak memory usage, when we do type inference, we also use an
+inner level of arena. These arenas get thrown away once type inference
+is over. This is done because type inference generates a lot of
+"throw-away" types that are not particularly interesting after type
+inference completes, so keeping around those allocations would be
+wasteful.
+
+Often, we wish to write code that explicitly asserts that it is not
+taking place during inference. In that case, there is no "local"
+arena, and all the types that you can access are allocated in the
+global arena. To express this, the idea is to us the same lifetime
+for the `'gcx` and `'tcx` parameters of `TyCtxt`. Just to be a touch
+confusing, we tend to use the name `'tcx` in such contexts. Here is an
+example:
+
+```rust
+fn not_in_inference<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) {
+ // ---- ----
+ // Using the same lifetime here asserts
+ // that the innermost arena accessible through
+ // this reference *is* the global arena.
+}
+```
+
+In contrast, if we want to code that can be usable during type inference, then you
+need to declare a distinct `'gcx` and `'tcx` lifetime parameter:
+
+```rust
+fn maybe_in_inference<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>, def_id: DefId) {
+ // ---- ----
+ // Using different lifetimes here means that
+ // the innermost arena *may* be distinct
+ // from the global arena (but doesn't have to be).
+}
+```
+
+### Allocating and working with types
+
+Rust types are represented using the `Ty<'tcx>` defined in the `ty`
+module (not to be confused with the `Ty` struct from [the HIR]). This
+is in fact a simple type alias for a reference with `'tcx` lifetime:
+
+```rust
+pub type Ty<'tcx> = &'tcx TyS<'tcx>;
+```
+
+[the HIR]: ../hir/README.md
+
+You can basically ignore the `TyS` struct -- you will basically never
+access it explicitly. We always pass it by reference using the
+`Ty<'tcx>` alias -- the only exception I think is to define inherent
+methods on types. Instances of `TyS` are only ever allocated in one of
+the rustc arenas (never e.g. on the stack).
+
+One common operation on types is to **match** and see what kinds of
+types they are. This is done by doing `match ty.sty`, sort of like this:
+
+```rust
+fn test_type<'tcx>(ty: Ty<'tcx>) {
+ match ty.sty {
+ ty::TyArray(elem_ty, len) => { ... }
+ ...
+ }
+}
+```
+
+The `sty` field (the origin of this name is unclear to me; perhaps
+structural type?) is of type `TypeVariants<'tcx>`, which is an enum
+definined all of the different kinds of types in the compiler.
+
+> NB: inspecting the `sty` field on types during type inference can be
+> risky, as there are may be inference variables and other things to
+> consider, or sometimes types are not yet known that will become
+> known later.).
+
+To allocate a new type, you can use the various `mk_` methods defined
+on the `tcx`. These have names that correpond mostly to the various kinds
+of type variants. For example:
+
+```rust
+let array_ty = tcx.mk_array(elem_ty, len * 2);
+```
+
+These methods all return a `Ty<'tcx>` -- note that the lifetime you
+get back is the lifetime of the innermost arena that this `tcx` has
+access to. In fact, types are always canonicalized and interned (so we
+never allocate exactly the same type twice) and are always allocated
+in the outermost arena where they can be (so, if they do not contain
+any inference variables or other "temporary" types, they will be
+allocated in the global arena). However, the lifetime `'tcx` is always
+a safe approximation, so that is what you get back.
+
+> NB. Because types are interned, it is possible to compare them for
+> equality efficiently using `==` -- however, this is almost never what
+> you want to do unless you happen to be hashing and looking for
+> duplicates. This is because often in Rust there are multiple ways to
+> represent the same type, particularly once inference is involved. If
+> you are going to be testing for type equality, you probably need to
+> start looking into the inference code to do it right.
+
+You can also find various common types in the tcx itself by accessing
+`tcx.types.bool`, `tcx.types.char`, etc (see `CommonTypes` for more).
+
+### Beyond types: Other kinds of arena-allocated data structures
+
+In addition to types, there are a number of other arena-allocated data
+structures that you can allocate, and which are found in this
+module. Here are a few examples:
+
+- `Substs`, allocated with `mk_substs` -- this will intern a slice of types, often used to
+ specify the values to be substituted for generics (e.g., `HashMap<i32, u32>`
+ would be represented as a slice `&'tcx [tcx.types.i32, tcx.types.u32]`.
+- `TraitRef`, typically passed by value -- a **trait reference**
+ consists of a reference to a trait along with its various type
+ parameters (including `Self`), like `i32: Display` (here, the def-id
+ would reference the `Display` trait, and the substs would contain
+ `i32`).
+- `Predicate` defines something the trait system has to prove (see `traits` module).
+
+### Import conventions
+
+Although there is no hard and fast rule, the `ty` module tends to be used like so:
+
+```rust
+use ty::{self, Ty, TyCtxt};
+```
+
+In particular, since they are so common, the `Ty` and `TyCtxt` types
+are imported directly. Other types are often referenced with an
+explicit `ty::` prefix (e.g., `ty::TraitRef<'tcx>`). But some modules
+choose to import a larger or smaller set of names explicitly.
use dep_graph::DepGraph;
use errors::DiagnosticBuilder;
use session::Session;
+use session::config::OutputFilenames;
use middle;
use hir::{TraitCandidate, HirId, ItemLocalId};
use hir::def::{Def, Export};
use hir::map as hir_map;
use hir::map::DefPathHash;
use lint::{self, Lint};
-use ich::{self, StableHashingContext, NodeIdHashingMode};
+use ich::{StableHashingContext, NodeIdHashingMode};
use middle::const_val::ConstVal;
use middle::cstore::{CrateStore, LinkMeta, EncodedMetadataHashes};
use middle::cstore::EncodedMetadata;
use util::nodemap::{NodeMap, NodeSet, DefIdSet, ItemLocalMap};
use util::nodemap::{FxHashMap, FxHashSet};
use rustc_data_structures::accumulate_vec::AccumulateVec;
-use rustc_data_structures::stable_hasher::{HashStable, StableHasher,
- StableHasherResult};
+use rustc_data_structures::stable_hasher::{HashStable, hash_stable_hashmap,
+ StableHasher, StableHasherResult};
use arena::{TypedArena, DroplessArena};
use rustc_const_math::{ConstInt, ConstUsize};
use std::ops::Deref;
use std::iter;
use std::rc::Rc;
+use std::sync::mpsc;
+use std::sync::Arc;
use syntax::abi;
use syntax::ast::{self, Name, NodeId};
use syntax::attr;
}
}
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for TypeckTables<'gcx> {
+impl<'gcx> HashStable<StableHashingContext<'gcx>> for TypeckTables<'gcx> {
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
let ty::TypeckTables {
local_id_root,
} = *self;
hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashDefPath, |hcx| {
- ich::hash_stable_itemlocalmap(hcx, hasher, type_dependent_defs);
- ich::hash_stable_itemlocalmap(hcx, hasher, node_types);
- ich::hash_stable_itemlocalmap(hcx, hasher, node_substs);
- ich::hash_stable_itemlocalmap(hcx, hasher, adjustments);
- ich::hash_stable_itemlocalmap(hcx, hasher, pat_binding_modes);
- ich::hash_stable_hashmap(hcx, hasher, upvar_capture_map, |hcx, up_var_id| {
+ type_dependent_defs.hash_stable(hcx, hasher);
+ node_types.hash_stable(hcx, hasher);
+ node_substs.hash_stable(hcx, hasher);
+ adjustments.hash_stable(hcx, hasher);
+ pat_binding_modes.hash_stable(hcx, hasher);
+ hash_stable_hashmap(hcx, hasher, upvar_capture_map, |up_var_id, hcx| {
let ty::UpvarId {
var_id,
closure_expr_id
krate: local_id_root.krate,
index: closure_expr_id,
};
- ((hcx.def_path_hash(var_owner_def_id), var_id.local_id),
+ (hcx.def_path_hash(var_owner_def_id),
+ var_id.local_id,
hcx.def_path_hash(closure_def_id))
});
- ich::hash_stable_itemlocalmap(hcx, hasher, closure_tys);
- ich::hash_stable_itemlocalmap(hcx, hasher, closure_kinds);
- ich::hash_stable_itemlocalmap(hcx, hasher, liberated_fn_sigs);
- ich::hash_stable_itemlocalmap(hcx, hasher, fru_field_types);
- ich::hash_stable_itemlocalmap(hcx, hasher, cast_kinds);
- ich::hash_stable_itemlocalmap(hcx, hasher, generator_sigs);
- ich::hash_stable_itemlocalmap(hcx, hasher, generator_interiors);
-
- ich::hash_stable_hashset(hcx, hasher, used_trait_imports, |hcx, def_id| {
- hcx.def_path_hash(*def_id)
- });
-
+ closure_tys.hash_stable(hcx, hasher);
+ closure_kinds.hash_stable(hcx, hasher);
+ liberated_fn_sigs.hash_stable(hcx, hasher);
+ fru_field_types.hash_stable(hcx, hasher);
+ cast_kinds.hash_stable(hcx, hasher);
+ generator_sigs.hash_stable(hcx, hasher);
+ generator_interiors.hash_stable(hcx, hasher);
+ used_trait_imports.hash_stable(hcx, hasher);
tainted_by_errors.hash_stable(hcx, hasher);
free_region_map.hash_stable(hcx, hasher);
})
}
}
-/// The data structure to keep track of all the information that typechecker
-/// generates so that so that it can be reused and doesn't have to be redone
-/// later on.
+/// The central data structure of the compiler. It stores references
+/// to the various **arenas** and also houses the results of the
+/// various **compiler queries** that have been performed. See [the
+/// README](README.md) for more deatils.
#[derive(Copy, Clone)]
pub struct TyCtxt<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
gcx: &'a GlobalCtxt<'gcx>,
/// error reporting, and so is lazily initialized and generally
/// shouldn't taint the common path (hence the RefCell).
pub all_traits: RefCell<Option<Vec<DefId>>>,
+
+ /// A general purpose channel to throw data out the back towards LLVM worker
+ /// threads.
+ ///
+ /// This is intended to only get used during the trans phase of the compiler
+ /// when satisfying the query for a particular codegen unit. Internally in
+ /// the query it'll send data along this channel to get processed later.
+ pub tx_to_llvm_workers: mpsc::Sender<Box<Any + Send>>,
+
+ output_filenames: Arc<OutputFilenames>,
}
impl<'tcx> GlobalCtxt<'tcx> {
named_region_map: resolve_lifetime::NamedRegionMap,
hir: hir_map::Map<'tcx>,
crate_name: &str,
+ tx: mpsc::Sender<Box<Any + Send>>,
+ output_filenames: &OutputFilenames,
f: F) -> R
where F: for<'b> FnOnce(TyCtxt<'b, 'tcx, 'tcx>) -> R
{
None
};
+ // FIXME(mw): Each of the Vecs in the trait_map should be brought into
+ // a deterministic order here. Otherwise we might end up with
+ // unnecessarily unstable incr. comp. hashes.
let mut trait_map = FxHashMap();
for (k, v) in resolutions.trait_map {
let hir_id = hir.node_to_hir_id(k);
derive_macros: RefCell::new(NodeMap()),
stability_interner: RefCell::new(FxHashSet()),
all_traits: RefCell::new(None),
+ tx_to_llvm_workers: tx,
+ output_filenames: Arc::new(output_filenames.clone()),
}, f)
}
}
pub fn lang_items(self) -> Rc<middle::lang_items::LanguageItems> {
- // FIXME(#42293) Right now we insert a `with_ignore` node in the dep
- // graph here to ignore the fact that `get_lang_items` below depends on
- // the entire crate. For now this'll prevent false positives of
- // recompiling too much when anything changes.
- //
- // Once red/green incremental compilation lands we should be able to
- // remove this because while the crate changes often the lint level map
- // will change rarely.
- self.dep_graph.with_ignore(|| {
- self.get_lang_items(LOCAL_CRATE)
- })
+ self.get_lang_items(LOCAL_CRATE)
}
pub fn stability(self) -> Rc<stability::Index<'tcx>> {
pub fn crate_data_as_rc_any(self, cnum: CrateNum) -> Rc<Any> {
self.cstore.crate_data_as_rc_any(cnum)
}
+
+ pub fn create_stable_hashing_context(self) -> StableHashingContext<'gcx> {
+ let krate = self.dep_graph.with_ignore(|| self.gcx.hir.krate());
+
+ StableHashingContext::new(self.sess,
+ krate,
+ self.hir.definitions(),
+ self.cstore)
+ }
}
impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> {
};
providers.get_lang_items = |tcx, id| {
assert_eq!(id, LOCAL_CRATE);
- Rc::new(middle::lang_items::collect(tcx))
+ // FIXME(#42293) Right now we insert a `with_ignore` node in the dep
+ // graph here to ignore the fact that `get_lang_items` below depends on
+ // the entire crate. For now this'll prevent false positives of
+ // recompiling too much when anything changes.
+ //
+ // Once red/green incremental compilation lands we should be able to
+ // remove this because while the crate changes often the lint level map
+ // will change rarely.
+ tcx.dep_graph.with_ignore(|| Rc::new(middle::lang_items::collect(tcx)))
};
providers.freevars = |tcx, id| tcx.gcx.freevars.get(&id).cloned();
providers.maybe_unused_trait_import = |tcx, id| {
assert_eq!(cnum, LOCAL_CRATE);
Rc::new(tcx.cstore.postorder_cnums_untracked())
};
+ providers.output_filenames = |tcx, cnum| {
+ assert_eq!(cnum, LOCAL_CRATE);
+ tcx.output_filenames.clone()
+ };
}
// except according to those terms.
use hir::def_id::DefId;
-use ty::{self, Ty, TyCtxt};
+use ich::StableHashingContext;
+use rustc_data_structures::stable_hasher::{StableHasher, StableHasherResult,
+ HashStable};
+use std::fmt::Debug;
+use std::hash::Hash;
+use std::mem;
use syntax::ast;
+use ty::{self, Ty, TyCtxt};
-use self::SimplifiedType::*;
+use self::SimplifiedTypeGen::*;
-/// See `simplify_type
-#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
-pub enum SimplifiedType {
+pub type SimplifiedType = SimplifiedTypeGen<DefId>;
+
+/// See `simplify_type`
+///
+/// Note that we keep this type generic over the type of identifier it uses
+/// because we sometimes need to use SimplifiedTypeGen values as stable sorting
+/// keys (in which case we use a DefPathHash as id-type) but in the general case
+/// the non-stable but fast to construct DefId-version is the better choice.
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
+pub enum SimplifiedTypeGen<D>
+ where D: Copy + Debug + Ord + Eq + Hash
+{
BoolSimplifiedType,
CharSimplifiedType,
IntSimplifiedType(ast::IntTy),
UintSimplifiedType(ast::UintTy),
FloatSimplifiedType(ast::FloatTy),
- AdtSimplifiedType(DefId),
+ AdtSimplifiedType(D),
StrSimplifiedType,
ArraySimplifiedType,
PtrSimplifiedType,
NeverSimplifiedType,
TupleSimplifiedType(usize),
- TraitSimplifiedType(DefId),
- ClosureSimplifiedType(DefId),
- GeneratorSimplifiedType(DefId),
- AnonSimplifiedType(DefId),
+ TraitSimplifiedType(D),
+ ClosureSimplifiedType(D),
+ GeneratorSimplifiedType(D),
+ AnonSimplifiedType(D),
FunctionSimplifiedType(usize),
ParameterSimplifiedType,
}
ty::TyInfer(_) | ty::TyError => None,
}
}
+
+impl<D: Copy + Debug + Ord + Eq + Hash> SimplifiedTypeGen<D> {
+ pub fn map_def<U, F>(self, map: F) -> SimplifiedTypeGen<U>
+ where F: Fn(D) -> U,
+ U: Copy + Debug + Ord + Eq + Hash,
+ {
+ match self {
+ BoolSimplifiedType => BoolSimplifiedType,
+ CharSimplifiedType => CharSimplifiedType,
+ IntSimplifiedType(t) => IntSimplifiedType(t),
+ UintSimplifiedType(t) => UintSimplifiedType(t),
+ FloatSimplifiedType(t) => FloatSimplifiedType(t),
+ AdtSimplifiedType(d) => AdtSimplifiedType(map(d)),
+ StrSimplifiedType => StrSimplifiedType,
+ ArraySimplifiedType => ArraySimplifiedType,
+ PtrSimplifiedType => PtrSimplifiedType,
+ NeverSimplifiedType => NeverSimplifiedType,
+ TupleSimplifiedType(n) => TupleSimplifiedType(n),
+ TraitSimplifiedType(d) => TraitSimplifiedType(map(d)),
+ ClosureSimplifiedType(d) => ClosureSimplifiedType(map(d)),
+ GeneratorSimplifiedType(d) => GeneratorSimplifiedType(map(d)),
+ AnonSimplifiedType(d) => AnonSimplifiedType(map(d)),
+ FunctionSimplifiedType(n) => FunctionSimplifiedType(n),
+ ParameterSimplifiedType => ParameterSimplifiedType,
+ }
+ }
+}
+
+impl<'gcx, D> HashStable<StableHashingContext<'gcx>> for SimplifiedTypeGen<D>
+ where D: Copy + Debug + Ord + Eq + Hash +
+ HashStable<StableHashingContext<'gcx>>,
+{
+ fn hash_stable<W: StableHasherResult>(&self,
+ hcx: &mut StableHashingContext<'gcx>,
+ hasher: &mut StableHasher<W>) {
+ mem::discriminant(self).hash_stable(hcx, hasher);
+ match *self {
+ BoolSimplifiedType |
+ CharSimplifiedType |
+ StrSimplifiedType |
+ ArraySimplifiedType |
+ PtrSimplifiedType |
+ NeverSimplifiedType |
+ ParameterSimplifiedType => {
+ // nothing to do
+ }
+ IntSimplifiedType(t) => t.hash_stable(hcx, hasher),
+ UintSimplifiedType(t) => t.hash_stable(hcx, hasher),
+ FloatSimplifiedType(t) => t.hash_stable(hcx, hasher),
+ AdtSimplifiedType(d) => d.hash_stable(hcx, hasher),
+ TupleSimplifiedType(n) => n.hash_stable(hcx, hasher),
+ TraitSimplifiedType(d) => d.hash_stable(hcx, hasher),
+ ClosureSimplifiedType(d) => d.hash_stable(hcx, hasher),
+ GeneratorSimplifiedType(d) => d.hash_stable(hcx, hasher),
+ AnonSimplifiedType(d) => d.hash_stable(hcx, hasher),
+ FunctionSimplifiedType(n) => n.hash_stable(hcx, hasher),
+ }
+ }
+}
use std::fmt;
use std::i64;
use std::iter;
+use std::mem;
use std::ops::Deref;
+use ich::StableHashingContext;
+use rustc_data_structures::stable_hasher::{HashStable, StableHasher,
+ StableHasherResult};
+
/// Parsed [Data layout](http://llvm.org/docs/LangRef.html#data-layout)
/// for a target, which contains everything needed to compute layouts.
pub struct TargetDataLayout {
cx.layout_of(cx.normalize_projections(self.field_type(cx, i)))
}
}
+
+impl<'gcx> HashStable<StableHashingContext<'gcx>> for Layout
+{
+ fn hash_stable<W: StableHasherResult>(&self,
+ hcx: &mut StableHashingContext<'gcx>,
+ hasher: &mut StableHasher<W>) {
+ use ty::layout::Layout::*;
+ mem::discriminant(self).hash_stable(hcx, hasher);
+
+ match *self {
+ Scalar { value, non_zero } => {
+ value.hash_stable(hcx, hasher);
+ non_zero.hash_stable(hcx, hasher);
+ }
+ Vector { element, count } => {
+ element.hash_stable(hcx, hasher);
+ count.hash_stable(hcx, hasher);
+ }
+ Array { sized, align, primitive_align, element_size, count } => {
+ sized.hash_stable(hcx, hasher);
+ align.hash_stable(hcx, hasher);
+ primitive_align.hash_stable(hcx, hasher);
+ element_size.hash_stable(hcx, hasher);
+ count.hash_stable(hcx, hasher);
+ }
+ FatPointer { ref metadata, non_zero } => {
+ metadata.hash_stable(hcx, hasher);
+ non_zero.hash_stable(hcx, hasher);
+ }
+ CEnum { discr, signed, non_zero, min, max } => {
+ discr.hash_stable(hcx, hasher);
+ signed.hash_stable(hcx, hasher);
+ non_zero.hash_stable(hcx, hasher);
+ min.hash_stable(hcx, hasher);
+ max.hash_stable(hcx, hasher);
+ }
+ Univariant { ref variant, non_zero } => {
+ variant.hash_stable(hcx, hasher);
+ non_zero.hash_stable(hcx, hasher);
+ }
+ UntaggedUnion { ref variants } => {
+ variants.hash_stable(hcx, hasher);
+ }
+ General { discr, ref variants, size, align, primitive_align } => {
+ discr.hash_stable(hcx, hasher);
+ variants.hash_stable(hcx, hasher);
+ size.hash_stable(hcx, hasher);
+ align.hash_stable(hcx, hasher);
+ primitive_align.hash_stable(hcx, hasher);
+ }
+ RawNullablePointer { nndiscr, ref value } => {
+ nndiscr.hash_stable(hcx, hasher);
+ value.hash_stable(hcx, hasher);
+ }
+ StructWrappedNullablePointer {
+ nndiscr,
+ ref nonnull,
+ ref discrfield,
+ ref discrfield_source
+ } => {
+ nndiscr.hash_stable(hcx, hasher);
+ nonnull.hash_stable(hcx, hasher);
+ discrfield.hash_stable(hcx, hasher);
+ discrfield_source.hash_stable(hcx, hasher);
+ }
+ }
+ }
+}
+
+impl_stable_hash_for!(enum ::ty::layout::Integer {
+ I1,
+ I8,
+ I16,
+ I32,
+ I64,
+ I128
+});
+
+impl_stable_hash_for!(enum ::ty::layout::Primitive {
+ Int(integer),
+ F32,
+ F64,
+ Pointer
+});
+
+impl_stable_hash_for!(struct ::ty::layout::Align {
+ abi,
+ pref
+});
+
+impl_stable_hash_for!(struct ::ty::layout::Size {
+ raw
+});
+
+impl<'gcx> HashStable<StableHashingContext<'gcx>> for LayoutError<'gcx>
+{
+ fn hash_stable<W: StableHasherResult>(&self,
+ hcx: &mut StableHashingContext<'gcx>,
+ hasher: &mut StableHasher<W>) {
+ use ty::layout::LayoutError::*;
+ mem::discriminant(self).hash_stable(hcx, hasher);
+
+ match *self {
+ Unknown(t) |
+ SizeOverflow(t) => t.hash_stable(hcx, hasher)
+ }
+ }
+}
+
+impl_stable_hash_for!(struct ::ty::layout::Struct {
+ align,
+ primitive_align,
+ packed,
+ sized,
+ offsets,
+ memory_index,
+ min_size
+});
+
+impl_stable_hash_for!(struct ::ty::layout::Union {
+ align,
+ primitive_align,
+ min_size,
+ packed
+});
+++ /dev/null
-// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-use dep_graph::{DepConstructor, DepNode, DepNodeIndex};
-use errors::{Diagnostic, DiagnosticBuilder};
-use hir::def_id::{CrateNum, DefId, LOCAL_CRATE, DefIndex};
-use hir::def::{Def, Export};
-use hir::{self, TraitCandidate, ItemLocalId};
-use hir::svh::Svh;
-use lint;
-use middle::const_val;
-use middle::cstore::{ExternCrate, LinkagePreference, NativeLibrary};
-use middle::cstore::{NativeLibraryKind, DepKind, CrateSource};
-use middle::privacy::AccessLevels;
-use middle::region;
-use middle::resolve_lifetime::{Region, ObjectLifetimeDefault};
-use middle::stability::{self, DeprecationEntry};
-use middle::lang_items::{LanguageItems, LangItem};
-use mir;
-use mir::transform::{MirSuite, MirPassIndex};
-use session::CompileResult;
-use traits::specialization_graph;
-use ty::{self, CrateInherentImpls, Ty, TyCtxt};
-use ty::layout::{Layout, LayoutError};
-use ty::item_path;
-use ty::steal::Steal;
-use ty::subst::Substs;
-use ty::fast_reject::SimplifiedType;
-use util::nodemap::{DefIdSet, NodeSet, DefIdMap};
-use util::common::{profq_msg, ProfileQueriesMsg};
-
-use rustc_data_structures::indexed_set::IdxSetBuf;
-use rustc_back::PanicStrategy;
-use rustc_data_structures::indexed_vec::IndexVec;
-use rustc_data_structures::fx::{FxHashMap, FxHashSet};
-use std::cell::{RefCell, RefMut, Cell};
-use std::fmt::Debug;
-use std::hash::Hash;
-use std::marker::PhantomData;
-use std::mem;
-use std::collections::BTreeMap;
-use std::ops::Deref;
-use std::rc::Rc;
-use syntax_pos::{Span, DUMMY_SP};
-use syntax::attr;
-use syntax::ast;
-use syntax::symbol::Symbol;
-
-pub trait Key: Clone + Hash + Eq + Debug {
- fn map_crate(&self) -> CrateNum;
- fn default_span(&self, tcx: TyCtxt) -> Span;
-}
-
-impl<'tcx> Key for ty::InstanceDef<'tcx> {
- fn map_crate(&self) -> CrateNum {
- LOCAL_CRATE
- }
-
- fn default_span(&self, tcx: TyCtxt) -> Span {
- tcx.def_span(self.def_id())
- }
-}
-
-impl<'tcx> Key for ty::Instance<'tcx> {
- fn map_crate(&self) -> CrateNum {
- LOCAL_CRATE
- }
-
- fn default_span(&self, tcx: TyCtxt) -> Span {
- tcx.def_span(self.def_id())
- }
-}
-
-impl Key for CrateNum {
- fn map_crate(&self) -> CrateNum {
- *self
- }
- fn default_span(&self, _: TyCtxt) -> Span {
- DUMMY_SP
- }
-}
-
-impl Key for DefIndex {
- fn map_crate(&self) -> CrateNum {
- LOCAL_CRATE
- }
- fn default_span(&self, _tcx: TyCtxt) -> Span {
- DUMMY_SP
- }
-}
-
-impl Key for DefId {
- fn map_crate(&self) -> CrateNum {
- self.krate
- }
- fn default_span(&self, tcx: TyCtxt) -> Span {
- tcx.def_span(*self)
- }
-}
-
-impl Key for (DefId, DefId) {
- fn map_crate(&self) -> CrateNum {
- self.0.krate
- }
- fn default_span(&self, tcx: TyCtxt) -> Span {
- self.1.default_span(tcx)
- }
-}
-
-impl Key for (CrateNum, DefId) {
- fn map_crate(&self) -> CrateNum {
- self.0
- }
- fn default_span(&self, tcx: TyCtxt) -> Span {
- self.1.default_span(tcx)
- }
-}
-
-impl Key for (DefId, SimplifiedType) {
- fn map_crate(&self) -> CrateNum {
- self.0.krate
- }
- fn default_span(&self, tcx: TyCtxt) -> Span {
- self.0.default_span(tcx)
- }
-}
-
-impl<'tcx> Key for (DefId, &'tcx Substs<'tcx>) {
- fn map_crate(&self) -> CrateNum {
- self.0.krate
- }
- fn default_span(&self, tcx: TyCtxt) -> Span {
- self.0.default_span(tcx)
- }
-}
-
-impl Key for (MirSuite, DefId) {
- fn map_crate(&self) -> CrateNum {
- self.1.map_crate()
- }
- fn default_span(&self, tcx: TyCtxt) -> Span {
- self.1.default_span(tcx)
- }
-}
-
-impl Key for (MirSuite, MirPassIndex, DefId) {
- fn map_crate(&self) -> CrateNum {
- self.2.map_crate()
- }
- fn default_span(&self, tcx: TyCtxt) -> Span {
- self.2.default_span(tcx)
- }
-}
-
-impl<'tcx> Key for Ty<'tcx> {
- fn map_crate(&self) -> CrateNum {
- LOCAL_CRATE
- }
- fn default_span(&self, _: TyCtxt) -> Span {
- DUMMY_SP
- }
-}
-
-impl<'tcx, T: Key> Key for ty::ParamEnvAnd<'tcx, T> {
- fn map_crate(&self) -> CrateNum {
- self.value.map_crate()
- }
- fn default_span(&self, tcx: TyCtxt) -> Span {
- self.value.default_span(tcx)
- }
-}
-
-trait Value<'tcx>: Sized {
- fn from_cycle_error<'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Self;
-}
-
-impl<'tcx, T> Value<'tcx> for T {
- default fn from_cycle_error<'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> T {
- tcx.sess.abort_if_errors();
- bug!("Value::from_cycle_error called without errors");
- }
-}
-
-impl<'tcx, T: Default> Value<'tcx> for T {
- default fn from_cycle_error<'a>(_: TyCtxt<'a, 'tcx, 'tcx>) -> T {
- T::default()
- }
-}
-
-impl<'tcx> Value<'tcx> for Ty<'tcx> {
- fn from_cycle_error<'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Ty<'tcx> {
- tcx.types.err
- }
-}
-
-impl<'tcx> Value<'tcx> for ty::DtorckConstraint<'tcx> {
- fn from_cycle_error<'a>(_: TyCtxt<'a, 'tcx, 'tcx>) -> Self {
- Self::empty()
- }
-}
-
-impl<'tcx> Value<'tcx> for ty::SymbolName {
- fn from_cycle_error<'a>(_: TyCtxt<'a, 'tcx, 'tcx>) -> Self {
- ty::SymbolName { name: Symbol::intern("<error>").as_str() }
- }
-}
-
-struct QueryMap<D: QueryDescription> {
- phantom: PhantomData<D>,
- map: FxHashMap<D::Key, QueryValue<D::Value>>,
-}
-
-struct QueryValue<T> {
- value: T,
- index: DepNodeIndex,
- diagnostics: Option<Box<QueryDiagnostics>>,
-}
-
-struct QueryDiagnostics {
- diagnostics: Vec<Diagnostic>,
- emitted_diagnostics: Cell<bool>,
-}
-
-impl<M: QueryDescription> QueryMap<M> {
- fn new() -> QueryMap<M> {
- QueryMap {
- phantom: PhantomData,
- map: FxHashMap(),
- }
- }
-}
-
-struct CycleError<'a, 'tcx: 'a> {
- span: Span,
- cycle: RefMut<'a, [(Span, Query<'tcx>)]>,
-}
-
-impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
- fn report_cycle(self, CycleError { span, cycle }: CycleError)
- -> DiagnosticBuilder<'a>
- {
- // Subtle: release the refcell lock before invoking `describe()`
- // below by dropping `cycle`.
- let stack = cycle.to_vec();
- mem::drop(cycle);
-
- assert!(!stack.is_empty());
-
- // Disable naming impls with types in this path, since that
- // sometimes cycles itself, leading to extra cycle errors.
- // (And cycle errors around impls tend to occur during the
- // collect/coherence phases anyhow.)
- item_path::with_forced_impl_filename_line(|| {
- let mut err =
- struct_span_err!(self.sess, span, E0391,
- "unsupported cyclic reference between types/traits detected");
- err.span_label(span, "cyclic reference");
-
- err.span_note(stack[0].0, &format!("the cycle begins when {}...",
- stack[0].1.describe(self)));
-
- for &(span, ref query) in &stack[1..] {
- err.span_note(span, &format!("...which then requires {}...",
- query.describe(self)));
- }
-
- err.note(&format!("...which then again requires {}, completing the cycle.",
- stack[0].1.describe(self)));
-
- return err
- })
- }
-
- fn cycle_check<F, R>(self, span: Span, query: Query<'gcx>, compute: F)
- -> Result<R, CycleError<'a, 'gcx>>
- where F: FnOnce() -> R
- {
- {
- let mut stack = self.maps.query_stack.borrow_mut();
- if let Some((i, _)) = stack.iter().enumerate().rev()
- .find(|&(_, &(_, ref q))| *q == query) {
- return Err(CycleError {
- span,
- cycle: RefMut::map(stack, |stack| &mut stack[i..])
- });
- }
- stack.push((span, query));
- }
-
- let result = compute();
-
- self.maps.query_stack.borrow_mut().pop();
-
- Ok(result)
- }
-}
-
-pub trait QueryConfig {
- type Key: Eq + Hash + Clone;
- type Value;
-}
-
-trait QueryDescription: QueryConfig {
- fn describe(tcx: TyCtxt, key: Self::Key) -> String;
-}
-
-impl<M: QueryConfig<Key=DefId>> QueryDescription for M {
- default fn describe(tcx: TyCtxt, def_id: DefId) -> String {
- format!("processing `{}`", tcx.item_path_str(def_id))
- }
-}
-
-impl<'tcx> QueryDescription for queries::is_copy_raw<'tcx> {
- fn describe(_tcx: TyCtxt, env: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> String {
- format!("computing whether `{}` is `Copy`", env.value)
- }
-}
-
-impl<'tcx> QueryDescription for queries::is_sized_raw<'tcx> {
- fn describe(_tcx: TyCtxt, env: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> String {
- format!("computing whether `{}` is `Sized`", env.value)
- }
-}
-
-impl<'tcx> QueryDescription for queries::is_freeze_raw<'tcx> {
- fn describe(_tcx: TyCtxt, env: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> String {
- format!("computing whether `{}` is freeze", env.value)
- }
-}
-
-impl<'tcx> QueryDescription for queries::needs_drop_raw<'tcx> {
- fn describe(_tcx: TyCtxt, env: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> String {
- format!("computing whether `{}` needs drop", env.value)
- }
-}
-
-impl<'tcx> QueryDescription for queries::layout_raw<'tcx> {
- fn describe(_tcx: TyCtxt, env: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> String {
- format!("computing layout of `{}`", env.value)
- }
-}
-
-impl<'tcx> QueryDescription for queries::super_predicates_of<'tcx> {
- fn describe(tcx: TyCtxt, def_id: DefId) -> String {
- format!("computing the supertraits of `{}`",
- tcx.item_path_str(def_id))
- }
-}
-
-impl<'tcx> QueryDescription for queries::type_param_predicates<'tcx> {
- fn describe(tcx: TyCtxt, (_, def_id): (DefId, DefId)) -> String {
- let id = tcx.hir.as_local_node_id(def_id).unwrap();
- format!("computing the bounds for type parameter `{}`",
- tcx.hir.ty_param_name(id))
- }
-}
-
-impl<'tcx> QueryDescription for queries::coherent_trait<'tcx> {
- fn describe(tcx: TyCtxt, (_, def_id): (CrateNum, DefId)) -> String {
- format!("coherence checking all impls of trait `{}`",
- tcx.item_path_str(def_id))
- }
-}
-
-impl<'tcx> QueryDescription for queries::crate_inherent_impls<'tcx> {
- fn describe(_: TyCtxt, k: CrateNum) -> String {
- format!("all inherent impls defined in crate `{:?}`", k)
- }
-}
-
-impl<'tcx> QueryDescription for queries::crate_inherent_impls_overlap_check<'tcx> {
- fn describe(_: TyCtxt, _: CrateNum) -> String {
- format!("check for overlap between inherent impls defined in this crate")
- }
-}
-
-impl<'tcx> QueryDescription for queries::crate_variances<'tcx> {
- fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
- format!("computing the variances for items in this crate")
- }
-}
-
-impl<'tcx> QueryDescription for queries::mir_shims<'tcx> {
- fn describe(tcx: TyCtxt, def: ty::InstanceDef<'tcx>) -> String {
- format!("generating MIR shim for `{}`",
- tcx.item_path_str(def.def_id()))
- }
-}
-
-impl<'tcx> QueryDescription for queries::privacy_access_levels<'tcx> {
- fn describe(_: TyCtxt, _: CrateNum) -> String {
- format!("privacy access levels")
- }
-}
-
-impl<'tcx> QueryDescription for queries::typeck_item_bodies<'tcx> {
- fn describe(_: TyCtxt, _: CrateNum) -> String {
- format!("type-checking all item bodies")
- }
-}
-
-impl<'tcx> QueryDescription for queries::reachable_set<'tcx> {
- fn describe(_: TyCtxt, _: CrateNum) -> String {
- format!("reachability")
- }
-}
-
-impl<'tcx> QueryDescription for queries::const_eval<'tcx> {
- fn describe(tcx: TyCtxt, key: ty::ParamEnvAnd<'tcx, (DefId, &'tcx Substs<'tcx>)>) -> String {
- format!("const-evaluating `{}`", tcx.item_path_str(key.value.0))
- }
-}
-
-impl<'tcx> QueryDescription for queries::mir_keys<'tcx> {
- fn describe(_: TyCtxt, _: CrateNum) -> String {
- format!("getting a list of all mir_keys")
- }
-}
-
-impl<'tcx> QueryDescription for queries::symbol_name<'tcx> {
- fn describe(_tcx: TyCtxt, instance: ty::Instance<'tcx>) -> String {
- format!("computing the symbol for `{}`", instance)
- }
-}
-
-impl<'tcx> QueryDescription for queries::describe_def<'tcx> {
- fn describe(_: TyCtxt, _: DefId) -> String {
- bug!("describe_def")
- }
-}
-
-impl<'tcx> QueryDescription for queries::def_span<'tcx> {
- fn describe(_: TyCtxt, _: DefId) -> String {
- bug!("def_span")
- }
-}
-
-
-impl<'tcx> QueryDescription for queries::lookup_stability<'tcx> {
- fn describe(_: TyCtxt, _: DefId) -> String {
- bug!("stability")
- }
-}
-
-impl<'tcx> QueryDescription for queries::lookup_deprecation_entry<'tcx> {
- fn describe(_: TyCtxt, _: DefId) -> String {
- bug!("deprecation")
- }
-}
-
-impl<'tcx> QueryDescription for queries::item_attrs<'tcx> {
- fn describe(_: TyCtxt, _: DefId) -> String {
- bug!("item_attrs")
- }
-}
-
-impl<'tcx> QueryDescription for queries::is_exported_symbol<'tcx> {
- fn describe(_: TyCtxt, _: DefId) -> String {
- bug!("is_exported_symbol")
- }
-}
-
-impl<'tcx> QueryDescription for queries::fn_arg_names<'tcx> {
- fn describe(_: TyCtxt, _: DefId) -> String {
- bug!("fn_arg_names")
- }
-}
-
-impl<'tcx> QueryDescription for queries::impl_parent<'tcx> {
- fn describe(_: TyCtxt, _: DefId) -> String {
- bug!("impl_parent")
- }
-}
-
-impl<'tcx> QueryDescription for queries::trait_of_item<'tcx> {
- fn describe(_: TyCtxt, _: DefId) -> String {
- bug!("trait_of_item")
- }
-}
-
-impl<'tcx> QueryDescription for queries::item_body_nested_bodies<'tcx> {
- fn describe(tcx: TyCtxt, def_id: DefId) -> String {
- format!("nested item bodies of `{}`", tcx.item_path_str(def_id))
- }
-}
-
-impl<'tcx> QueryDescription for queries::const_is_rvalue_promotable_to_static<'tcx> {
- fn describe(tcx: TyCtxt, def_id: DefId) -> String {
- format!("const checking if rvalue is promotable to static `{}`",
- tcx.item_path_str(def_id))
- }
-}
-
-impl<'tcx> QueryDescription for queries::is_mir_available<'tcx> {
- fn describe(tcx: TyCtxt, def_id: DefId) -> String {
- format!("checking if item is mir available: `{}`",
- tcx.item_path_str(def_id))
- }
-}
-
-impl<'tcx> QueryDescription for queries::trait_impls_of<'tcx> {
- fn describe(tcx: TyCtxt, def_id: DefId) -> String {
- format!("trait impls of `{}`", tcx.item_path_str(def_id))
- }
-}
-
-impl<'tcx> QueryDescription for queries::is_object_safe<'tcx> {
- fn describe(tcx: TyCtxt, def_id: DefId) -> String {
- format!("determine object safety of trait `{}`", tcx.item_path_str(def_id))
- }
-}
-
-impl<'tcx> QueryDescription for queries::is_const_fn<'tcx> {
- fn describe(tcx: TyCtxt, def_id: DefId) -> String {
- format!("checking if item is const fn: `{}`", tcx.item_path_str(def_id))
- }
-}
-
-impl<'tcx> QueryDescription for queries::dylib_dependency_formats<'tcx> {
- fn describe(_: TyCtxt, _: CrateNum) -> String {
- "dylib dependency formats of crate".to_string()
- }
-}
-
-impl<'tcx> QueryDescription for queries::is_panic_runtime<'tcx> {
- fn describe(_: TyCtxt, _: CrateNum) -> String {
- "checking if the crate is_panic_runtime".to_string()
- }
-}
-
-impl<'tcx> QueryDescription for queries::is_compiler_builtins<'tcx> {
- fn describe(_: TyCtxt, _: CrateNum) -> String {
- "checking if the crate is_compiler_builtins".to_string()
- }
-}
-
-impl<'tcx> QueryDescription for queries::has_global_allocator<'tcx> {
- fn describe(_: TyCtxt, _: CrateNum) -> String {
- "checking if the crate has_global_allocator".to_string()
- }
-}
-
-impl<'tcx> QueryDescription for queries::extern_crate<'tcx> {
- fn describe(_: TyCtxt, _: DefId) -> String {
- "getting crate's ExternCrateData".to_string()
- }
-}
-
-impl<'tcx> QueryDescription for queries::lint_levels<'tcx> {
- fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
- format!("computing the lint levels for items in this crate")
- }
-}
-
-impl<'tcx> QueryDescription for queries::specializes<'tcx> {
- fn describe(_tcx: TyCtxt, _: (DefId, DefId)) -> String {
- format!("computing whether impls specialize one another")
- }
-}
-
-impl<'tcx> QueryDescription for queries::in_scope_traits_map<'tcx> {
- fn describe(_tcx: TyCtxt, _: DefIndex) -> String {
- format!("traits in scope at a block")
- }
-}
-
-impl<'tcx> QueryDescription for queries::is_no_builtins<'tcx> {
- fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
- format!("test whether a crate has #![no_builtins]")
- }
-}
-
-impl<'tcx> QueryDescription for queries::panic_strategy<'tcx> {
- fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
- format!("query a crate's configured panic strategy")
- }
-}
-
-impl<'tcx> QueryDescription for queries::is_profiler_runtime<'tcx> {
- fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
- format!("query a crate is #![profiler_runtime]")
- }
-}
-
-impl<'tcx> QueryDescription for queries::is_sanitizer_runtime<'tcx> {
- fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
- format!("query a crate is #![sanitizer_runtime]")
- }
-}
-
-impl<'tcx> QueryDescription for queries::exported_symbols<'tcx> {
- fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
- format!("looking up the exported symbols of a crate")
- }
-}
-
-impl<'tcx> QueryDescription for queries::native_libraries<'tcx> {
- fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
- format!("looking up the native libraries of a linked crate")
- }
-}
-
-impl<'tcx> QueryDescription for queries::plugin_registrar_fn<'tcx> {
- fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
- format!("looking up the plugin registrar for a crate")
- }
-}
-
-impl<'tcx> QueryDescription for queries::derive_registrar_fn<'tcx> {
- fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
- format!("looking up the derive registrar for a crate")
- }
-}
-
-impl<'tcx> QueryDescription for queries::crate_disambiguator<'tcx> {
- fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
- format!("looking up the disambiguator a crate")
- }
-}
-
-impl<'tcx> QueryDescription for queries::crate_hash<'tcx> {
- fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
- format!("looking up the hash a crate")
- }
-}
-
-impl<'tcx> QueryDescription for queries::original_crate_name<'tcx> {
- fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
- format!("looking up the original name a crate")
- }
-}
-
-impl<'tcx> QueryDescription for queries::implementations_of_trait<'tcx> {
- fn describe(_tcx: TyCtxt, _: (CrateNum, DefId)) -> String {
- format!("looking up implementations of a trait in a crate")
- }
-}
-
-impl<'tcx> QueryDescription for queries::all_trait_implementations<'tcx> {
- fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
- format!("looking up all (?) trait implementations")
- }
-}
-
-impl<'tcx> QueryDescription for queries::link_args<'tcx> {
- fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
- format!("looking up link arguments for a crate")
- }
-}
-
-impl<'tcx> QueryDescription for queries::named_region_map<'tcx> {
- fn describe(_tcx: TyCtxt, _: DefIndex) -> String {
- format!("looking up a named region")
- }
-}
-
-impl<'tcx> QueryDescription for queries::is_late_bound_map<'tcx> {
- fn describe(_tcx: TyCtxt, _: DefIndex) -> String {
- format!("testing if a region is late boudn")
- }
-}
-
-impl<'tcx> QueryDescription for queries::object_lifetime_defaults_map<'tcx> {
- fn describe(_tcx: TyCtxt, _: DefIndex) -> String {
- format!("looking up lifetime defaults for a region")
- }
-}
-
-impl<'tcx> QueryDescription for queries::dep_kind<'tcx> {
- fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
- format!("fetching what a dependency looks like")
- }
-}
-
-impl<'tcx> QueryDescription for queries::crate_name<'tcx> {
- fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
- format!("fetching what a crate is named")
- }
-}
-
-impl<'tcx> QueryDescription for queries::get_lang_items<'tcx> {
- fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
- format!("calculating the lang items map")
- }
-}
-
-impl<'tcx> QueryDescription for queries::defined_lang_items<'tcx> {
- fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
- format!("calculating the lang items defined in a crate")
- }
-}
-
-impl<'tcx> QueryDescription for queries::missing_lang_items<'tcx> {
- fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
- format!("calculating the missing lang items in a crate")
- }
-}
-
-impl<'tcx> QueryDescription for queries::visible_parent_map<'tcx> {
- fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
- format!("calculating the visible parent map")
- }
-}
-
-impl<'tcx> QueryDescription for queries::missing_extern_crate_item<'tcx> {
- fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
- format!("seeing if we're missing an `extern crate` item for this crate")
- }
-}
-
-impl<'tcx> QueryDescription for queries::used_crate_source<'tcx> {
- fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
- format!("looking at the source for a crate")
- }
-}
-
-impl<'tcx> QueryDescription for queries::postorder_cnums<'tcx> {
- fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
- format!("generating a postorder list of CrateNums")
- }
-}
-
-impl<'tcx> QueryDescription for queries::maybe_unused_extern_crates<'tcx> {
- fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
- format!("looking up all possibly unused extern crates")
- }
-}
-
-impl<'tcx> QueryDescription for queries::stability_index<'tcx> {
- fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
- format!("calculating the stability index for the local crate")
- }
-}
-
-impl<'tcx> QueryDescription for queries::all_crate_nums<'tcx> {
- fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
- format!("fetching all foreign CrateNum instances")
- }
-}
-
-// If enabled, send a message to the profile-queries thread
-macro_rules! profq_msg {
- ($tcx:expr, $msg:expr) => {
- if cfg!(debug_assertions) {
- if $tcx.sess.profile_queries() {
- profq_msg($msg)
- }
- }
- }
-}
-
-// If enabled, format a key using its debug string, which can be
-// expensive to compute (in terms of time).
-macro_rules! profq_key {
- ($tcx:expr, $key:expr) => {
- if cfg!(debug_assertions) {
- if $tcx.sess.profile_queries_and_keys() {
- Some(format!("{:?}", $key))
- } else { None }
- } else { None }
- }
-}
-
-macro_rules! define_maps {
- (<$tcx:tt>
- $($(#[$attr:meta])*
- [$($modifiers:tt)*] fn $name:ident: $node:ident($K:ty) -> $V:ty,)*) => {
- define_map_struct! {
- tcx: $tcx,
- input: ($(([$($modifiers)*] [$($attr)*] [$name]))*)
- }
-
- impl<$tcx> Maps<$tcx> {
- pub fn new(providers: IndexVec<CrateNum, Providers<$tcx>>)
- -> Self {
- Maps {
- providers,
- query_stack: RefCell::new(vec![]),
- $($name: RefCell::new(QueryMap::new())),*
- }
- }
- }
-
- #[allow(bad_style)]
- #[derive(Copy, Clone, Debug, PartialEq, Eq)]
- pub enum Query<$tcx> {
- $($(#[$attr])* $name($K)),*
- }
-
- #[allow(bad_style)]
- #[derive(Clone, Debug, PartialEq, Eq)]
- pub enum QueryMsg {
- $($name(Option<String>)),*
- }
-
- impl<$tcx> Query<$tcx> {
- pub fn describe(&self, tcx: TyCtxt) -> String {
- let (r, name) = match *self {
- $(Query::$name(key) => {
- (queries::$name::describe(tcx, key), stringify!($name))
- })*
- };
- if tcx.sess.verbose() {
- format!("{} [{}]", r, name)
- } else {
- r
- }
- }
- }
-
- pub mod queries {
- use std::marker::PhantomData;
-
- $(#[allow(bad_style)]
- pub struct $name<$tcx> {
- data: PhantomData<&$tcx ()>
- })*
- }
-
- $(impl<$tcx> QueryConfig for queries::$name<$tcx> {
- type Key = $K;
- type Value = $V;
- }
-
- impl<'a, $tcx, 'lcx> queries::$name<$tcx> {
- #[allow(unused)]
- fn to_dep_node(tcx: TyCtxt<'a, $tcx, 'lcx>, key: &$K) -> DepNode {
- use dep_graph::DepConstructor::*;
-
- DepNode::new(tcx, $node(*key))
- }
-
- fn try_get_with<F, R>(tcx: TyCtxt<'a, $tcx, 'lcx>,
- mut span: Span,
- key: $K,
- f: F)
- -> Result<R, CycleError<'a, $tcx>>
- where F: FnOnce(&$V) -> R
- {
- debug!("ty::queries::{}::try_get_with(key={:?}, span={:?})",
- stringify!($name),
- key,
- span);
-
- profq_msg!(tcx,
- ProfileQueriesMsg::QueryBegin(
- span.clone(),
- QueryMsg::$name(profq_key!(tcx, key))
- )
- );
-
- if let Some(value) = tcx.maps.$name.borrow().map.get(&key) {
- if let Some(ref d) = value.diagnostics {
- if !d.emitted_diagnostics.get() {
- d.emitted_diagnostics.set(true);
- let handle = tcx.sess.diagnostic();
- for diagnostic in d.diagnostics.iter() {
- DiagnosticBuilder::new_diagnostic(handle, diagnostic.clone())
- .emit();
- }
- }
- }
- profq_msg!(tcx, ProfileQueriesMsg::CacheHit);
- tcx.dep_graph.read_index(value.index);
- return Ok(f(&value.value));
- }
- // else, we are going to run the provider:
- profq_msg!(tcx, ProfileQueriesMsg::ProviderBegin);
-
- // FIXME(eddyb) Get more valid Span's on queries.
- // def_span guard is necessary to prevent a recursive loop,
- // default_span calls def_span query internally.
- if span == DUMMY_SP && stringify!($name) != "def_span" {
- span = key.default_span(tcx)
- }
-
- let res = tcx.cycle_check(span, Query::$name(key), || {
- let dep_node = Self::to_dep_node(tcx, &key);
-
- tcx.sess.diagnostic().track_diagnostics(|| {
- if dep_node.kind.is_anon() {
- tcx.dep_graph.with_anon_task(dep_node.kind, || {
- let provider = tcx.maps.providers[key.map_crate()].$name;
- provider(tcx.global_tcx(), key)
- })
- } else {
- fn run_provider<'a, 'tcx, 'lcx>(tcx: TyCtxt<'a, 'tcx, 'lcx>,
- key: $K)
- -> $V {
- let provider = tcx.maps.providers[key.map_crate()].$name;
- provider(tcx.global_tcx(), key)
- }
-
- tcx.dep_graph.with_task(dep_node, tcx, key, run_provider)
- }
- })
- })?;
- profq_msg!(tcx, ProfileQueriesMsg::ProviderEnd);
- let ((result, dep_node_index), diagnostics) = res;
-
- tcx.dep_graph.read_index(dep_node_index);
-
- let value = QueryValue {
- value: result,
- index: dep_node_index,
- diagnostics: if diagnostics.len() == 0 {
- None
- } else {
- Some(Box::new(QueryDiagnostics {
- diagnostics,
- emitted_diagnostics: Cell::new(true),
- }))
- },
- };
-
- Ok(f(&tcx.maps
- .$name
- .borrow_mut()
- .map
- .entry(key)
- .or_insert(value)
- .value))
- }
-
- pub fn try_get(tcx: TyCtxt<'a, $tcx, 'lcx>, span: Span, key: $K)
- -> Result<$V, DiagnosticBuilder<'a>> {
- match Self::try_get_with(tcx, span, key, Clone::clone) {
- Ok(e) => Ok(e),
- Err(e) => Err(tcx.report_cycle(e)),
- }
- }
-
- pub fn force(tcx: TyCtxt<'a, $tcx, 'lcx>, span: Span, key: $K) {
- // Ignore dependencies, since we not reading the computed value
- let _task = tcx.dep_graph.in_ignore();
-
- match Self::try_get_with(tcx, span, key, |_| ()) {
- Ok(()) => {}
- Err(e) => tcx.report_cycle(e).emit(),
- }
- }
- })*
-
- #[derive(Copy, Clone)]
- pub struct TyCtxtAt<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
- pub tcx: TyCtxt<'a, 'gcx, 'tcx>,
- pub span: Span,
- }
-
- impl<'a, 'gcx, 'tcx> Deref for TyCtxtAt<'a, 'gcx, 'tcx> {
- type Target = TyCtxt<'a, 'gcx, 'tcx>;
- fn deref(&self) -> &Self::Target {
- &self.tcx
- }
- }
-
- impl<'a, $tcx, 'lcx> TyCtxt<'a, $tcx, 'lcx> {
- /// Return a transparent wrapper for `TyCtxt` which uses
- /// `span` as the location of queries performed through it.
- pub fn at(self, span: Span) -> TyCtxtAt<'a, $tcx, 'lcx> {
- TyCtxtAt {
- tcx: self,
- span
- }
- }
-
- $($(#[$attr])*
- pub fn $name(self, key: $K) -> $V {
- self.at(DUMMY_SP).$name(key)
- })*
- }
-
- impl<'a, $tcx, 'lcx> TyCtxtAt<'a, $tcx, 'lcx> {
- $($(#[$attr])*
- pub fn $name(self, key: $K) -> $V {
- queries::$name::try_get(self.tcx, self.span, key).unwrap_or_else(|mut e| {
- e.emit();
- Value::from_cycle_error(self.global_tcx())
- })
- })*
- }
-
- define_provider_struct! {
- tcx: $tcx,
- input: ($(([$($modifiers)*] [$name] [$K] [$V]))*),
- output: ()
- }
-
- impl<$tcx> Copy for Providers<$tcx> {}
- impl<$tcx> Clone for Providers<$tcx> {
- fn clone(&self) -> Self { *self }
- }
- }
-}
-
-macro_rules! define_map_struct {
- // Initial state
- (tcx: $tcx:tt,
- input: $input:tt) => {
- define_map_struct! {
- tcx: $tcx,
- input: $input,
- output: ()
- }
- };
-
- // Final output
- (tcx: $tcx:tt,
- input: (),
- output: ($($output:tt)*)) => {
- pub struct Maps<$tcx> {
- providers: IndexVec<CrateNum, Providers<$tcx>>,
- query_stack: RefCell<Vec<(Span, Query<$tcx>)>>,
- $($output)*
- }
- };
-
- // Field recognized and ready to shift into the output
- (tcx: $tcx:tt,
- ready: ([$($pub:tt)*] [$($attr:tt)*] [$name:ident]),
- input: $input:tt,
- output: ($($output:tt)*)) => {
- define_map_struct! {
- tcx: $tcx,
- input: $input,
- output: ($($output)*
- $(#[$attr])* $($pub)* $name: RefCell<QueryMap<queries::$name<$tcx>>>,)
- }
- };
-
- // No modifiers left? This is a private item.
- (tcx: $tcx:tt,
- input: (([] $attrs:tt $name:tt) $($input:tt)*),
- output: $output:tt) => {
- define_map_struct! {
- tcx: $tcx,
- ready: ([] $attrs $name),
- input: ($($input)*),
- output: $output
- }
- };
-
- // Skip other modifiers
- (tcx: $tcx:tt,
- input: (([$other_modifier:tt $($modifiers:tt)*] $($fields:tt)*) $($input:tt)*),
- output: $output:tt) => {
- define_map_struct! {
- tcx: $tcx,
- input: (([$($modifiers)*] $($fields)*) $($input)*),
- output: $output
- }
- };
-}
-
-macro_rules! define_provider_struct {
- // Initial state:
- (tcx: $tcx:tt, input: $input:tt) => {
- define_provider_struct! {
- tcx: $tcx,
- input: $input,
- output: ()
- }
- };
-
- // Final state:
- (tcx: $tcx:tt,
- input: (),
- output: ($(([$name:ident] [$K:ty] [$R:ty]))*)) => {
- pub struct Providers<$tcx> {
- $(pub $name: for<'a> fn(TyCtxt<'a, $tcx, $tcx>, $K) -> $R,)*
- }
-
- impl<$tcx> Default for Providers<$tcx> {
- fn default() -> Self {
- $(fn $name<'a, $tcx>(_: TyCtxt<'a, $tcx, $tcx>, key: $K) -> $R {
- bug!("tcx.maps.{}({:?}) unsupported by its crate",
- stringify!($name), key);
- })*
- Providers { $($name),* }
- }
- }
- };
-
- // Something ready to shift:
- (tcx: $tcx:tt,
- ready: ($name:tt $K:tt $V:tt),
- input: $input:tt,
- output: ($($output:tt)*)) => {
- define_provider_struct! {
- tcx: $tcx,
- input: $input,
- output: ($($output)* ($name $K $V))
- }
- };
-
- // Regular queries produce a `V` only.
- (tcx: $tcx:tt,
- input: (([] $name:tt $K:tt $V:tt) $($input:tt)*),
- output: $output:tt) => {
- define_provider_struct! {
- tcx: $tcx,
- ready: ($name $K $V),
- input: ($($input)*),
- output: $output
- }
- };
-
- // Skip modifiers.
- (tcx: $tcx:tt,
- input: (([$other_modifier:tt $($modifiers:tt)*] $($fields:tt)*) $($input:tt)*),
- output: $output:tt) => {
- define_provider_struct! {
- tcx: $tcx,
- input: (([$($modifiers)*] $($fields)*) $($input)*),
- output: $output
- }
- };
-}
-
-// Each of these maps also corresponds to a method on a
-// `Provider` trait for requesting a value of that type,
-// and a method on `Maps` itself for doing that in a
-// a way that memoizes and does dep-graph tracking,
-// wrapping around the actual chain of providers that
-// the driver creates (using several `rustc_*` crates).
-define_maps! { <'tcx>
- /// Records the type of every item.
- [] fn type_of: TypeOfItem(DefId) -> Ty<'tcx>,
-
- /// Maps from the def-id of an item (trait/struct/enum/fn) to its
- /// associated generics and predicates.
- [] fn generics_of: GenericsOfItem(DefId) -> &'tcx ty::Generics,
- [] fn predicates_of: PredicatesOfItem(DefId) -> ty::GenericPredicates<'tcx>,
-
- /// Maps from the def-id of a trait to the list of
- /// super-predicates. This is a subset of the full list of
- /// predicates. We store these in a separate map because we must
- /// evaluate them even during type conversion, often before the
- /// full predicates are available (note that supertraits have
- /// additional acyclicity requirements).
- [] fn super_predicates_of: SuperPredicatesOfItem(DefId) -> ty::GenericPredicates<'tcx>,
-
- /// To avoid cycles within the predicates of a single item we compute
- /// per-type-parameter predicates for resolving `T::AssocTy`.
- [] fn type_param_predicates: type_param_predicates((DefId, DefId))
- -> ty::GenericPredicates<'tcx>,
-
- [] fn trait_def: TraitDefOfItem(DefId) -> &'tcx ty::TraitDef,
- [] fn adt_def: AdtDefOfItem(DefId) -> &'tcx ty::AdtDef,
- [] fn adt_destructor: AdtDestructor(DefId) -> Option<ty::Destructor>,
- [] fn adt_sized_constraint: SizedConstraint(DefId) -> &'tcx [Ty<'tcx>],
- [] fn adt_dtorck_constraint: DtorckConstraint(DefId) -> ty::DtorckConstraint<'tcx>,
-
- /// True if this is a const fn
- [] fn is_const_fn: IsConstFn(DefId) -> bool,
-
- /// True if this is a foreign item (i.e., linked via `extern { ... }`).
- [] fn is_foreign_item: IsForeignItem(DefId) -> bool,
-
- /// True if this is a default impl (aka impl Foo for ..)
- [] fn is_default_impl: IsDefaultImpl(DefId) -> bool,
-
- /// Get a map with the variance of every item; use `item_variance`
- /// instead.
- [] fn crate_variances: crate_variances(CrateNum) -> Rc<ty::CrateVariancesMap>,
-
- /// Maps from def-id of a type or region parameter to its
- /// (inferred) variance.
- [] fn variances_of: ItemVariances(DefId) -> Rc<Vec<ty::Variance>>,
-
- /// Maps from an impl/trait def-id to a list of the def-ids of its items
- [] fn associated_item_def_ids: AssociatedItemDefIds(DefId) -> Rc<Vec<DefId>>,
-
- /// Maps from a trait item to the trait item "descriptor"
- [] fn associated_item: AssociatedItems(DefId) -> ty::AssociatedItem,
-
- [] fn impl_trait_ref: ImplTraitRef(DefId) -> Option<ty::TraitRef<'tcx>>,
- [] fn impl_polarity: ImplPolarity(DefId) -> hir::ImplPolarity,
-
- /// Maps a DefId of a type to a list of its inherent impls.
- /// Contains implementations of methods that are inherent to a type.
- /// Methods in these implementations don't need to be exported.
- [] fn inherent_impls: InherentImpls(DefId) -> Rc<Vec<DefId>>,
-
- /// Set of all the def-ids in this crate that have MIR associated with
- /// them. This includes all the body owners, but also things like struct
- /// constructors.
- [] fn mir_keys: mir_keys(CrateNum) -> Rc<DefIdSet>,
-
- /// Maps DefId's that have an associated Mir to the result
- /// of the MIR qualify_consts pass. The actual meaning of
- /// the value isn't known except to the pass itself.
- [] fn mir_const_qualif: MirConstQualif(DefId) -> (u8, Rc<IdxSetBuf<mir::Local>>),
-
- /// Fetch the MIR for a given def-id up till the point where it is
- /// ready for const evaluation.
- ///
- /// See the README for the `mir` module for details.
- [] fn mir_const: MirConst(DefId) -> &'tcx Steal<mir::Mir<'tcx>>,
-
- [] fn mir_validated: MirValidated(DefId) -> &'tcx Steal<mir::Mir<'tcx>>,
-
- /// MIR after our optimization passes have run. This is MIR that is ready
- /// for trans. This is also the only query that can fetch non-local MIR, at present.
- [] fn optimized_mir: MirOptimized(DefId) -> &'tcx mir::Mir<'tcx>,
-
- /// Type of each closure. The def ID is the ID of the
- /// expression defining the closure.
- [] fn closure_kind: ClosureKind(DefId) -> ty::ClosureKind,
-
- /// The signature of functions and closures.
- [] fn fn_sig: FnSignature(DefId) -> ty::PolyFnSig<'tcx>,
-
- /// Records the signature of each generator. The def ID is the ID of the
- /// expression defining the closure.
- [] fn generator_sig: GenSignature(DefId) -> Option<ty::PolyGenSig<'tcx>>,
-
- /// Caches CoerceUnsized kinds for impls on custom types.
- [] fn coerce_unsized_info: CoerceUnsizedInfo(DefId)
- -> ty::adjustment::CoerceUnsizedInfo,
-
- [] fn typeck_item_bodies: typeck_item_bodies_dep_node(CrateNum) -> CompileResult,
-
- [] fn typeck_tables_of: TypeckTables(DefId) -> &'tcx ty::TypeckTables<'tcx>,
-
- [] fn has_typeck_tables: HasTypeckTables(DefId) -> bool,
-
- [] fn coherent_trait: coherent_trait_dep_node((CrateNum, DefId)) -> (),
-
- [] fn borrowck: BorrowCheck(DefId) -> (),
- // FIXME: shouldn't this return a `Result<(), BorrowckErrors>` instead?
- [] fn mir_borrowck: MirBorrowCheck(DefId) -> (),
-
- /// Gets a complete map from all types to their inherent impls.
- /// Not meant to be used directly outside of coherence.
- /// (Defined only for LOCAL_CRATE)
- [] fn crate_inherent_impls: crate_inherent_impls_dep_node(CrateNum) -> CrateInherentImpls,
-
- /// Checks all types in the krate for overlap in their inherent impls. Reports errors.
- /// Not meant to be used directly outside of coherence.
- /// (Defined only for LOCAL_CRATE)
- [] fn crate_inherent_impls_overlap_check: inherent_impls_overlap_check_dep_node(CrateNum) -> (),
-
- /// Results of evaluating const items or constants embedded in
- /// other items (such as enum variant explicit discriminants).
- [] fn const_eval: const_eval_dep_node(ty::ParamEnvAnd<'tcx, (DefId, &'tcx Substs<'tcx>)>)
- -> const_val::EvalResult<'tcx>,
-
- /// Performs the privacy check and computes "access levels".
- [] fn privacy_access_levels: PrivacyAccessLevels(CrateNum) -> Rc<AccessLevels>,
-
- [] fn reachable_set: reachability_dep_node(CrateNum) -> Rc<NodeSet>,
-
- /// Per-body `region::ScopeTree`. The `DefId` should be the owner-def-id for the body;
- /// in the case of closures, this will be redirected to the enclosing function.
- [] fn region_scope_tree: RegionScopeTree(DefId) -> Rc<region::ScopeTree>,
-
- [] fn mir_shims: mir_shim_dep_node(ty::InstanceDef<'tcx>) -> &'tcx mir::Mir<'tcx>,
-
- [] fn def_symbol_name: SymbolName(DefId) -> ty::SymbolName,
- [] fn symbol_name: symbol_name_dep_node(ty::Instance<'tcx>) -> ty::SymbolName,
-
- [] fn describe_def: DescribeDef(DefId) -> Option<Def>,
- [] fn def_span: DefSpan(DefId) -> Span,
- [] fn lookup_stability: LookupStability(DefId) -> Option<&'tcx attr::Stability>,
- [] fn lookup_deprecation_entry: LookupDeprecationEntry(DefId) -> Option<DeprecationEntry>,
- [] fn item_attrs: ItemAttrs(DefId) -> Rc<[ast::Attribute]>,
- [] fn fn_arg_names: FnArgNames(DefId) -> Vec<ast::Name>,
- [] fn impl_parent: ImplParent(DefId) -> Option<DefId>,
- [] fn trait_of_item: TraitOfItem(DefId) -> Option<DefId>,
- [] fn is_exported_symbol: IsExportedSymbol(DefId) -> bool,
- [] fn item_body_nested_bodies: ItemBodyNestedBodies(DefId)
- -> Rc<BTreeMap<hir::BodyId, hir::Body>>,
- [] fn const_is_rvalue_promotable_to_static: ConstIsRvaluePromotableToStatic(DefId) -> bool,
- [] fn is_mir_available: IsMirAvailable(DefId) -> bool,
-
- [] fn trait_impls_of: TraitImpls(DefId) -> Rc<ty::trait_def::TraitImpls>,
- [] fn specialization_graph_of: SpecializationGraph(DefId) -> Rc<specialization_graph::Graph>,
- [] fn is_object_safe: ObjectSafety(DefId) -> bool,
-
- // Get the ParameterEnvironment for a given item; this environment
- // will be in "user-facing" mode, meaning that it is suitabe for
- // type-checking etc, and it does not normalize specializable
- // associated types. This is almost always what you want,
- // unless you are doing MIR optimizations, in which case you
- // might want to use `reveal_all()` method to change modes.
- [] fn param_env: ParamEnv(DefId) -> ty::ParamEnv<'tcx>,
-
- // Trait selection queries. These are best used by invoking `ty.moves_by_default()`,
- // `ty.is_copy()`, etc, since that will prune the environment where possible.
- [] fn is_copy_raw: is_copy_dep_node(ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> bool,
- [] fn is_sized_raw: is_sized_dep_node(ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> bool,
- [] fn is_freeze_raw: is_freeze_dep_node(ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> bool,
- [] fn needs_drop_raw: needs_drop_dep_node(ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> bool,
- [] fn layout_raw: layout_dep_node(ty::ParamEnvAnd<'tcx, Ty<'tcx>>)
- -> Result<&'tcx Layout, LayoutError<'tcx>>,
-
- [] fn dylib_dependency_formats: DylibDepFormats(CrateNum)
- -> Rc<Vec<(CrateNum, LinkagePreference)>>,
-
- [] fn is_panic_runtime: IsPanicRuntime(CrateNum) -> bool,
- [] fn is_compiler_builtins: IsCompilerBuiltins(CrateNum) -> bool,
- [] fn has_global_allocator: HasGlobalAllocator(CrateNum) -> bool,
- [] fn is_sanitizer_runtime: IsSanitizerRuntime(CrateNum) -> bool,
- [] fn is_profiler_runtime: IsProfilerRuntime(CrateNum) -> bool,
- [] fn panic_strategy: GetPanicStrategy(CrateNum) -> PanicStrategy,
- [] fn is_no_builtins: IsNoBuiltins(CrateNum) -> bool,
-
- [] fn extern_crate: ExternCrate(DefId) -> Rc<Option<ExternCrate>>,
-
- [] fn specializes: specializes_node((DefId, DefId)) -> bool,
- [] fn in_scope_traits_map: InScopeTraits(DefIndex)
- -> Option<Rc<FxHashMap<ItemLocalId, Rc<Vec<TraitCandidate>>>>>,
- [] fn module_exports: ModuleExports(DefId) -> Option<Rc<Vec<Export>>>,
- [] fn lint_levels: lint_levels_node(CrateNum) -> Rc<lint::LintLevelMap>,
-
- [] fn impl_defaultness: ImplDefaultness(DefId) -> hir::Defaultness,
- [] fn exported_symbols: ExportedSymbols(CrateNum) -> Rc<Vec<DefId>>,
- [] fn native_libraries: NativeLibraries(CrateNum) -> Rc<Vec<NativeLibrary>>,
- [] fn plugin_registrar_fn: PluginRegistrarFn(CrateNum) -> Option<DefId>,
- [] fn derive_registrar_fn: DeriveRegistrarFn(CrateNum) -> Option<DefId>,
- [] fn crate_disambiguator: CrateDisambiguator(CrateNum) -> Symbol,
- [] fn crate_hash: CrateHash(CrateNum) -> Svh,
- [] fn original_crate_name: OriginalCrateName(CrateNum) -> Symbol,
-
- [] fn implementations_of_trait: implementations_of_trait_node((CrateNum, DefId))
- -> Rc<Vec<DefId>>,
- [] fn all_trait_implementations: AllTraitImplementations(CrateNum)
- -> Rc<Vec<DefId>>,
-
- [] fn is_dllimport_foreign_item: IsDllimportForeignItem(DefId) -> bool,
- [] fn is_statically_included_foreign_item: IsStaticallyIncludedForeignItem(DefId) -> bool,
- [] fn native_library_kind: NativeLibraryKind(DefId)
- -> Option<NativeLibraryKind>,
- [] fn link_args: link_args_node(CrateNum) -> Rc<Vec<String>>,
-
- [] fn named_region_map: NamedRegion(DefIndex) ->
- Option<Rc<FxHashMap<ItemLocalId, Region>>>,
- [] fn is_late_bound_map: IsLateBound(DefIndex) ->
- Option<Rc<FxHashSet<ItemLocalId>>>,
- [] fn object_lifetime_defaults_map: ObjectLifetimeDefaults(DefIndex)
- -> Option<Rc<FxHashMap<ItemLocalId, Rc<Vec<ObjectLifetimeDefault>>>>>,
-
- [] fn visibility: Visibility(DefId) -> ty::Visibility,
- [] fn dep_kind: DepKind(CrateNum) -> DepKind,
- [] fn crate_name: CrateName(CrateNum) -> Symbol,
- [] fn item_children: ItemChildren(DefId) -> Rc<Vec<Export>>,
- [] fn extern_mod_stmt_cnum: ExternModStmtCnum(DefId) -> Option<CrateNum>,
-
- [] fn get_lang_items: get_lang_items_node(CrateNum) -> Rc<LanguageItems>,
- [] fn defined_lang_items: DefinedLangItems(CrateNum) -> Rc<Vec<(DefIndex, usize)>>,
- [] fn missing_lang_items: MissingLangItems(CrateNum) -> Rc<Vec<LangItem>>,
- [] fn extern_const_body: ExternConstBody(DefId) -> &'tcx hir::Body,
- [] fn visible_parent_map: visible_parent_map_node(CrateNum)
- -> Rc<DefIdMap<DefId>>,
- [] fn missing_extern_crate_item: MissingExternCrateItem(CrateNum) -> bool,
- [] fn used_crate_source: UsedCrateSource(CrateNum) -> Rc<CrateSource>,
- [] fn postorder_cnums: postorder_cnums_node(CrateNum) -> Rc<Vec<CrateNum>>,
-
- [] fn freevars: Freevars(DefId) -> Option<Rc<Vec<hir::Freevar>>>,
- [] fn maybe_unused_trait_import: MaybeUnusedTraitImport(DefId) -> bool,
- [] fn maybe_unused_extern_crates: maybe_unused_extern_crates_node(CrateNum)
- -> Rc<Vec<(DefId, Span)>>,
-
- [] fn stability_index: stability_index_node(CrateNum) -> Rc<stability::Index<'tcx>>,
- [] fn all_crate_nums: all_crate_nums_node(CrateNum) -> Rc<Vec<CrateNum>>,
-}
-
-fn type_param_predicates<'tcx>((item_id, param_id): (DefId, DefId)) -> DepConstructor<'tcx> {
- DepConstructor::TypeParamPredicates {
- item_id,
- param_id
- }
-}
-
-fn coherent_trait_dep_node<'tcx>((_, def_id): (CrateNum, DefId)) -> DepConstructor<'tcx> {
- DepConstructor::CoherenceCheckTrait(def_id)
-}
-
-fn crate_inherent_impls_dep_node<'tcx>(_: CrateNum) -> DepConstructor<'tcx> {
- DepConstructor::Coherence
-}
-
-fn inherent_impls_overlap_check_dep_node<'tcx>(_: CrateNum) -> DepConstructor<'tcx> {
- DepConstructor::CoherenceInherentImplOverlapCheck
-}
-
-fn reachability_dep_node<'tcx>(_: CrateNum) -> DepConstructor<'tcx> {
- DepConstructor::Reachability
-}
-
-fn mir_shim_dep_node<'tcx>(instance_def: ty::InstanceDef<'tcx>) -> DepConstructor<'tcx> {
- DepConstructor::MirShim {
- instance_def
- }
-}
-
-fn symbol_name_dep_node<'tcx>(instance: ty::Instance<'tcx>) -> DepConstructor<'tcx> {
- DepConstructor::InstanceSymbolName { instance }
-}
-
-fn typeck_item_bodies_dep_node<'tcx>(_: CrateNum) -> DepConstructor<'tcx> {
- DepConstructor::TypeckBodiesKrate
-}
-
-fn const_eval_dep_node<'tcx>(_: ty::ParamEnvAnd<'tcx, (DefId, &'tcx Substs<'tcx>)>)
- -> DepConstructor<'tcx> {
- DepConstructor::ConstEval
-}
-
-fn mir_keys<'tcx>(_: CrateNum) -> DepConstructor<'tcx> {
- DepConstructor::MirKeys
-}
-
-fn crate_variances<'tcx>(_: CrateNum) -> DepConstructor<'tcx> {
- DepConstructor::CrateVariances
-}
-
-fn is_copy_dep_node<'tcx>(_: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> DepConstructor<'tcx> {
- DepConstructor::IsCopy
-}
-
-fn is_sized_dep_node<'tcx>(_: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> DepConstructor<'tcx> {
- DepConstructor::IsSized
-}
-
-fn is_freeze_dep_node<'tcx>(_: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> DepConstructor<'tcx> {
- DepConstructor::IsFreeze
-}
-
-fn needs_drop_dep_node<'tcx>(_: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> DepConstructor<'tcx> {
- DepConstructor::NeedsDrop
-}
-
-fn layout_dep_node<'tcx>(_: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> DepConstructor<'tcx> {
- DepConstructor::Layout
-}
-
-fn lint_levels_node<'tcx>(_: CrateNum) -> DepConstructor<'tcx> {
- DepConstructor::LintLevels
-}
-
-fn specializes_node<'tcx>((a, b): (DefId, DefId)) -> DepConstructor<'tcx> {
- DepConstructor::Specializes { impl1: a, impl2: b }
-}
-
-fn implementations_of_trait_node<'tcx>((krate, trait_id): (CrateNum, DefId))
- -> DepConstructor<'tcx>
-{
- DepConstructor::ImplementationsOfTrait { krate, trait_id }
-}
-
-fn link_args_node<'tcx>(_: CrateNum) -> DepConstructor<'tcx> {
- DepConstructor::LinkArgs
-}
-
-fn get_lang_items_node<'tcx>(_: CrateNum) -> DepConstructor<'tcx> {
- DepConstructor::GetLangItems
-}
-
-fn visible_parent_map_node<'tcx>(_: CrateNum) -> DepConstructor<'tcx> {
- DepConstructor::VisibleParentMap
-}
-
-fn postorder_cnums_node<'tcx>(_: CrateNum) -> DepConstructor<'tcx> {
- DepConstructor::PostorderCnums
-}
-
-fn maybe_unused_extern_crates_node<'tcx>(_: CrateNum) -> DepConstructor<'tcx> {
- DepConstructor::MaybeUnusedExternCrates
-}
-
-fn stability_index_node<'tcx>(_: CrateNum) -> DepConstructor<'tcx> {
- DepConstructor::StabilityIndex
-}
-
-fn all_crate_nums_node<'tcx>(_: CrateNum) -> DepConstructor<'tcx> {
- DepConstructor::AllCrateNums
-}
--- /dev/null
+# The Rust Compiler Query System
+
+The Compiler Query System is the key to our new demand-driven
+organization. The idea is pretty simple. You have various queries
+that compute things about the input -- for example, there is a query
+called `type_of(def_id)` that, given the def-id of some item, will
+compute the type of that item and return it to you.
+
+Query execution is **memoized** -- so the first time you invoke a
+query, it will go do the computation, but the next time, the result is
+returned from a hashtable. Moreover, query execution fits nicely into
+**incremental computation**; the idea is roughly that, when you do a
+query, the result **may** be returned to you by loading stored data
+from disk (but that's a separate topic we won't discuss further here).
+
+The overall vision is that, eventually, the entire compiler
+control-flow will be query driven. There will effectively be one
+top-level query ("compile") that will run compilation on a crate; this
+will in turn demand information about that crate, starting from the
+*end*. For example:
+
+- This "compile" query might demand to get a list of codegen-units
+ (i.e., modules that need to be compiled by LLVM).
+- But computing the list of codegen-units would invoke some subquery
+ that returns the list of all modules defined in the Rust source.
+- That query in turn would invoke something asking for the HIR.
+- This keeps going further and further back until we wind up doing the
+ actual parsing.
+
+However, that vision is not fully realized. Still, big chunks of the
+compiler (for example, generating MIR) work exactly like this.
+
+### Invoking queries
+
+To invoke a query is simple. The tcx ("type context") offers a method
+for each defined query. So, for example, to invoke the `type_of`
+query, you would just do this:
+
+```rust
+let ty = tcx.type_of(some_def_id);
+```
+
+### Cycles between queries
+
+Currently, cycles during query execution should always result in a
+compilation error. Typically, they arise because of illegal programs
+that contain cyclic references they shouldn't (though sometimes they
+arise because of compiler bugs, in which case we need to factor our
+queries in a more fine-grained fashion to avoid them).
+
+However, it is nonetheless often useful to *recover* from a cycle
+(after reporting an error, say) and try to soldier on, so as to give a
+better user experience. In order to recover from a cycle, you don't
+get to use the nice method-call-style syntax. Instead, you invoke
+using the `try_get` method, which looks roughly like this:
+
+```rust
+use ty::maps::queries;
+...
+match queries::type_of::try_get(tcx, DUMMY_SP, self.did) {
+ Ok(result) => {
+ // no cycle occurred! You can use `result`
+ }
+ Err(err) => {
+ // A cycle occurred! The error value `err` is a `DiagnosticBuilder`,
+ // meaning essentially an "in-progress", not-yet-reported error message.
+ // See below for more details on what to do here.
+ }
+}
+```
+
+So, if you get back an `Err` from `try_get`, then a cycle *did* occur. This means that
+you must ensure that a compiler error message is reported. You can do that in two ways:
+
+The simplest is to invoke `err.emit()`. This will emit the cycle error to the user.
+
+However, often cycles happen because of an illegal program, and you
+know at that point that an error either already has been reported or
+will be reported due to this cycle by some other bit of code. In that
+case, you can invoke `err.cancel()` to not emit any error. It is
+traditional to then invoke:
+
+```
+tcx.sess.delay_span_bug(some_span, "some message")
+```
+
+`delay_span_bug()` is a helper that says: we expect a compilation
+error to have happened or to happen in the future; so, if compilation
+ultimately succeeds, make an ICE with the message `"some
+message"`. This is basically just a precaution in case you are wrong.
+
+### How the compiler executes a query
+
+So you may be wondering what happens when you invoke a query
+method. The answer is that, for each query, the compiler maintains a
+cache -- if your query has already been executed, then, the answer is
+simple: we clone the return value out of the cache and return it
+(therefore, you should try to ensure that the return types of queries
+are cheaply cloneable; insert a `Rc` if necessary).
+
+#### Providers
+
+If, however, the query is *not* in the cache, then the compiler will
+try to find a suitable **provider**. A provider is a function that has
+been defined and linked into the compiler somewhere that contains the
+code to compute the result of the query.
+
+**Providers are defined per-crate.** The compiler maintains,
+internally, a table of providers for every crate, at least
+conceptually. Right now, there are really two sets: the providers for
+queries about the **local crate** (that is, the one being compiled)
+and providers for queries about **external crates** (that is,
+dependencies of the local crate). Note that what determines the crate
+that a query is targeting is not the *kind* of query, but the *key*.
+For example, when you invoke `tcx.type_of(def_id)`, that could be a
+local query or an external query, depending on what crate the `def_id`
+is referring to (see the `self::keys::Key` trait for more information
+on how that works).
+
+Providers always have the same signature:
+
+```rust
+fn provider<'cx, 'tcx>(tcx: TyCtxt<'cx, 'tcx, 'tcx>,
+ key: QUERY_KEY)
+ -> QUERY_RESULT
+{
+ ...
+}
+```
+
+Providers take two arguments: the `tcx` and the query key. Note also
+that they take the *global* tcx (i.e., they use the `'tcx` lifetime
+twice), rather than taking a tcx with some active inference context.
+They return the result of the query.
+
+#### How providers are setup
+
+When the tcx is created, it is given the providers by its creator using
+the `Providers` struct. This struct is generate by the macros here, but it
+is basically a big list of function pointers:
+
+```rust
+struct Providers {
+ type_of: for<'cx, 'tcx> fn(TyCtxt<'cx, 'tcx, 'tcx>, DefId) -> Ty<'tcx>,
+ ...
+}
+```
+
+At present, we have one copy of the struct for local crates, and one
+for external crates, though the plan is that we may eventually have
+one per crate.
+
+These `Provider` structs are ultimately created and populated by
+`librustc_driver`, but it does this by distributing the work
+throughout the other `rustc_*` crates. This is done by invoking
+various `provide` functions. These functions tend to look something
+like this:
+
+```rust
+pub fn provide(providers: &mut Providers) {
+ *providers = Providers {
+ type_of,
+ ..*providers
+ };
+}
+```
+
+That is, they take an `&mut Providers` and mutate it in place. Usually
+we use the formulation above just because it looks nice, but you could
+as well do `providers.type_of = type_of`, which would be equivalent.
+(Here, `type_of` would be a top-level function, defined as we saw
+before.) So, if we wanted to have add a provider for some other query,
+let's call it `fubar`, into the crate above, we might modify the `provide()`
+function like so:
+
+```rust
+pub fn provide(providers: &mut Providers) {
+ *providers = Providers {
+ type_of,
+ fubar,
+ ..*providers
+ };
+}
+
+fn fubar<'cx, 'tcx>(tcx: TyCtxt<'cx, 'tcx>, key: DefId) -> Fubar<'tcx> { .. }
+```
+
+NB. Most of the `rustc_*` crate only provide **local
+providers**. Almost all **extern providers** wind up going through the
+`rustc_metadata` crate, which loads the information from the crate
+metadata. But in some cases there are crates that provide queries for
+*both* local and external crates, in which case they define both a
+`provide` and a `provide_extern` function that `rustc_driver` can
+invoke.
+
+### Adding a new kind of query
+
+So suppose you want to add a new kind of query, how do you do so?
+Well, defining a query takes place in two steps:
+
+1. first, you have to specify the query name and arguments; and then,
+2. you have to supply query providers where needed.
+
+The specify the query name and arguments, you simply add an entry
+to the big macro invocation in `mod.rs`. This will probably have changed
+by the time you read this README, but at present it looks something
+like:
+
+```
+define_maps! { <'tcx>
+ /// Records the type of every item.
+ [] fn type_of: TypeOfItem(DefId) -> Ty<'tcx>,
+
+ ...
+}
+```
+
+Each line of the macro defines one query. The name is broken up like this:
+
+```
+[] fn type_of: TypeOfItem(DefId) -> Ty<'tcx>,
+^^ ^^^^^^^ ^^^^^^^^^^ ^^^^^ ^^^^^^^^
+| | | | |
+| | | | result type of query
+| | | query key type
+| | dep-node constructor
+| name of query
+query flags
+```
+
+Let's go over them one by one:
+
+- **Query flags:** these are largely unused right now, but the intention
+ is that we'll be able to customize various aspects of how the query is
+ processed.
+- **Name of query:** the name of the query method
+ (`tcx.type_of(..)`). Also used as the name of a struct
+ (`ty::maps::queries::type_of`) that will be generated to represent
+ this query.
+- **Dep-node constructor:** indicates the constructor function that
+ connects this query to incremental compilation. Typically, this is a
+ `DepNode` variant, which can be added by modifying the
+ `define_dep_nodes!` macro invocation in
+ `librustc/dep_graph/dep_node.rs`.
+ - However, sometimes we use a custom function, in which case the
+ name will be in snake case and the function will be defined at the
+ bottom of the file. This is typically used when the query key is
+ not a def-id, or just not the type that the dep-node expects.
+- **Query key type:** the type of the argument to this query.
+ This type must implement the `ty::maps::keys::Key` trait, which
+ defines (for example) how to map it to a crate, and so forth.
+- **Result type of query:** the type produced by this query. This type
+ should (a) not use `RefCell` or other interior mutability and (b) be
+ cheaply cloneable. Interning or using `Rc` or `Arc` is recommended for
+ non-trivial data types.
+ - The one exception to those rules is the `ty::steal::Steal` type,
+ which is used to cheaply modify MIR in place. See the definition
+ of `Steal` for more details. New uses of `Steal` should **not** be
+ added without alerting `@rust-lang/compiler`.
+
+So, to add a query:
+
+- Add an entry to `define_maps!` using the format above.
+- Possibly add a corresponding entry to the dep-node macro.
+- Link the provider by modifying the appropriate `provide` method;
+ or add a new one if needed and ensure that `rustc_driver` is invoking it.
+
+#### Query structs and descriptions
+
+For each kind, the `define_maps` macro will generate a "query struct"
+named after the query. This struct is a kind of a place-holder
+describing the query. Each such struct implements the
+`self::config::QueryConfig` trait, which has associated types for the
+key/value of that particular query. Basically the code generated looks something
+like this:
+
+```rust
+// Dummy struct representing a particular kind of query:
+pub struct type_of<'tcx> { phantom: PhantomData<&'tcx ()> }
+
+impl<'tcx> QueryConfig for type_of<'tcx> {
+ type Key = DefId;
+ type Value = Ty<'tcx>;
+}
+```
+
+There is an additional trait that you may wish to implement called
+`self::config::QueryDescription`. This trait is used during cycle
+errors to give a "human readable" name for the query, so that we can
+summarize what was happening when the cycle occurred. Implementing
+this trait is optional if the query key is `DefId`, but if you *don't*
+implement it, you get a pretty generic error ("processing `foo`...").
+You can put new impls into the `config` module. They look something like this:
+
+```rust
+impl<'tcx> QueryDescription for queries::type_of<'tcx> {
+ fn describe(tcx: TyCtxt, key: DefId) -> String {
+ format!("computing the type of `{}`", tcx.item_path_str(key))
+ }
+}
+```
+
--- /dev/null
+// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use hir::def_id::{CrateNum, DefId, DefIndex};
+use ty::{self, Ty, TyCtxt};
+use ty::maps::queries;
+use ty::subst::Substs;
+
+use std::hash::Hash;
+use syntax_pos::symbol::InternedString;
+
+/// Query configuration and description traits.
+
+pub trait QueryConfig {
+ type Key: Eq + Hash + Clone;
+ type Value;
+}
+
+pub(super) trait QueryDescription: QueryConfig {
+ fn describe(tcx: TyCtxt, key: Self::Key) -> String;
+}
+
+impl<M: QueryConfig<Key=DefId>> QueryDescription for M {
+ default fn describe(tcx: TyCtxt, def_id: DefId) -> String {
+ format!("processing `{}`", tcx.item_path_str(def_id))
+ }
+}
+
+impl<'tcx> QueryDescription for queries::is_copy_raw<'tcx> {
+ fn describe(_tcx: TyCtxt, env: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> String {
+ format!("computing whether `{}` is `Copy`", env.value)
+ }
+}
+
+impl<'tcx> QueryDescription for queries::is_sized_raw<'tcx> {
+ fn describe(_tcx: TyCtxt, env: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> String {
+ format!("computing whether `{}` is `Sized`", env.value)
+ }
+}
+
+impl<'tcx> QueryDescription for queries::is_freeze_raw<'tcx> {
+ fn describe(_tcx: TyCtxt, env: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> String {
+ format!("computing whether `{}` is freeze", env.value)
+ }
+}
+
+impl<'tcx> QueryDescription for queries::needs_drop_raw<'tcx> {
+ fn describe(_tcx: TyCtxt, env: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> String {
+ format!("computing whether `{}` needs drop", env.value)
+ }
+}
+
+impl<'tcx> QueryDescription for queries::layout_raw<'tcx> {
+ fn describe(_tcx: TyCtxt, env: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> String {
+ format!("computing layout of `{}`", env.value)
+ }
+}
+
+impl<'tcx> QueryDescription for queries::super_predicates_of<'tcx> {
+ fn describe(tcx: TyCtxt, def_id: DefId) -> String {
+ format!("computing the supertraits of `{}`",
+ tcx.item_path_str(def_id))
+ }
+}
+
+impl<'tcx> QueryDescription for queries::type_param_predicates<'tcx> {
+ fn describe(tcx: TyCtxt, (_, def_id): (DefId, DefId)) -> String {
+ let id = tcx.hir.as_local_node_id(def_id).unwrap();
+ format!("computing the bounds for type parameter `{}`",
+ tcx.hir.ty_param_name(id))
+ }
+}
+
+impl<'tcx> QueryDescription for queries::coherent_trait<'tcx> {
+ fn describe(tcx: TyCtxt, (_, def_id): (CrateNum, DefId)) -> String {
+ format!("coherence checking all impls of trait `{}`",
+ tcx.item_path_str(def_id))
+ }
+}
+
+impl<'tcx> QueryDescription for queries::crate_inherent_impls<'tcx> {
+ fn describe(_: TyCtxt, k: CrateNum) -> String {
+ format!("all inherent impls defined in crate `{:?}`", k)
+ }
+}
+
+impl<'tcx> QueryDescription for queries::crate_inherent_impls_overlap_check<'tcx> {
+ fn describe(_: TyCtxt, _: CrateNum) -> String {
+ format!("check for overlap between inherent impls defined in this crate")
+ }
+}
+
+impl<'tcx> QueryDescription for queries::crate_variances<'tcx> {
+ fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
+ format!("computing the variances for items in this crate")
+ }
+}
+
+impl<'tcx> QueryDescription for queries::mir_shims<'tcx> {
+ fn describe(tcx: TyCtxt, def: ty::InstanceDef<'tcx>) -> String {
+ format!("generating MIR shim for `{}`",
+ tcx.item_path_str(def.def_id()))
+ }
+}
+
+impl<'tcx> QueryDescription for queries::privacy_access_levels<'tcx> {
+ fn describe(_: TyCtxt, _: CrateNum) -> String {
+ format!("privacy access levels")
+ }
+}
+
+impl<'tcx> QueryDescription for queries::typeck_item_bodies<'tcx> {
+ fn describe(_: TyCtxt, _: CrateNum) -> String {
+ format!("type-checking all item bodies")
+ }
+}
+
+impl<'tcx> QueryDescription for queries::reachable_set<'tcx> {
+ fn describe(_: TyCtxt, _: CrateNum) -> String {
+ format!("reachability")
+ }
+}
+
+impl<'tcx> QueryDescription for queries::const_eval<'tcx> {
+ fn describe(tcx: TyCtxt, key: ty::ParamEnvAnd<'tcx, (DefId, &'tcx Substs<'tcx>)>) -> String {
+ format!("const-evaluating `{}`", tcx.item_path_str(key.value.0))
+ }
+}
+
+impl<'tcx> QueryDescription for queries::mir_keys<'tcx> {
+ fn describe(_: TyCtxt, _: CrateNum) -> String {
+ format!("getting a list of all mir_keys")
+ }
+}
+
+impl<'tcx> QueryDescription for queries::symbol_name<'tcx> {
+ fn describe(_tcx: TyCtxt, instance: ty::Instance<'tcx>) -> String {
+ format!("computing the symbol for `{}`", instance)
+ }
+}
+
+impl<'tcx> QueryDescription for queries::describe_def<'tcx> {
+ fn describe(_: TyCtxt, _: DefId) -> String {
+ bug!("describe_def")
+ }
+}
+
+impl<'tcx> QueryDescription for queries::def_span<'tcx> {
+ fn describe(_: TyCtxt, _: DefId) -> String {
+ bug!("def_span")
+ }
+}
+
+
+impl<'tcx> QueryDescription for queries::lookup_stability<'tcx> {
+ fn describe(_: TyCtxt, _: DefId) -> String {
+ bug!("stability")
+ }
+}
+
+impl<'tcx> QueryDescription for queries::lookup_deprecation_entry<'tcx> {
+ fn describe(_: TyCtxt, _: DefId) -> String {
+ bug!("deprecation")
+ }
+}
+
+impl<'tcx> QueryDescription for queries::item_attrs<'tcx> {
+ fn describe(_: TyCtxt, _: DefId) -> String {
+ bug!("item_attrs")
+ }
+}
+
+impl<'tcx> QueryDescription for queries::is_exported_symbol<'tcx> {
+ fn describe(_: TyCtxt, _: DefId) -> String {
+ bug!("is_exported_symbol")
+ }
+}
+
+impl<'tcx> QueryDescription for queries::fn_arg_names<'tcx> {
+ fn describe(_: TyCtxt, _: DefId) -> String {
+ bug!("fn_arg_names")
+ }
+}
+
+impl<'tcx> QueryDescription for queries::impl_parent<'tcx> {
+ fn describe(_: TyCtxt, _: DefId) -> String {
+ bug!("impl_parent")
+ }
+}
+
+impl<'tcx> QueryDescription for queries::trait_of_item<'tcx> {
+ fn describe(_: TyCtxt, _: DefId) -> String {
+ bug!("trait_of_item")
+ }
+}
+
+impl<'tcx> QueryDescription for queries::item_body_nested_bodies<'tcx> {
+ fn describe(tcx: TyCtxt, def_id: DefId) -> String {
+ format!("nested item bodies of `{}`", tcx.item_path_str(def_id))
+ }
+}
+
+impl<'tcx> QueryDescription for queries::const_is_rvalue_promotable_to_static<'tcx> {
+ fn describe(tcx: TyCtxt, def_id: DefId) -> String {
+ format!("const checking if rvalue is promotable to static `{}`",
+ tcx.item_path_str(def_id))
+ }
+}
+
+impl<'tcx> QueryDescription for queries::is_mir_available<'tcx> {
+ fn describe(tcx: TyCtxt, def_id: DefId) -> String {
+ format!("checking if item is mir available: `{}`",
+ tcx.item_path_str(def_id))
+ }
+}
+
+impl<'tcx> QueryDescription for queries::trait_impls_of<'tcx> {
+ fn describe(tcx: TyCtxt, def_id: DefId) -> String {
+ format!("trait impls of `{}`", tcx.item_path_str(def_id))
+ }
+}
+
+impl<'tcx> QueryDescription for queries::is_object_safe<'tcx> {
+ fn describe(tcx: TyCtxt, def_id: DefId) -> String {
+ format!("determine object safety of trait `{}`", tcx.item_path_str(def_id))
+ }
+}
+
+impl<'tcx> QueryDescription for queries::is_const_fn<'tcx> {
+ fn describe(tcx: TyCtxt, def_id: DefId) -> String {
+ format!("checking if item is const fn: `{}`", tcx.item_path_str(def_id))
+ }
+}
+
+impl<'tcx> QueryDescription for queries::dylib_dependency_formats<'tcx> {
+ fn describe(_: TyCtxt, _: CrateNum) -> String {
+ "dylib dependency formats of crate".to_string()
+ }
+}
+
+impl<'tcx> QueryDescription for queries::is_panic_runtime<'tcx> {
+ fn describe(_: TyCtxt, _: CrateNum) -> String {
+ "checking if the crate is_panic_runtime".to_string()
+ }
+}
+
+impl<'tcx> QueryDescription for queries::is_compiler_builtins<'tcx> {
+ fn describe(_: TyCtxt, _: CrateNum) -> String {
+ "checking if the crate is_compiler_builtins".to_string()
+ }
+}
+
+impl<'tcx> QueryDescription for queries::has_global_allocator<'tcx> {
+ fn describe(_: TyCtxt, _: CrateNum) -> String {
+ "checking if the crate has_global_allocator".to_string()
+ }
+}
+
+impl<'tcx> QueryDescription for queries::extern_crate<'tcx> {
+ fn describe(_: TyCtxt, _: DefId) -> String {
+ "getting crate's ExternCrateData".to_string()
+ }
+}
+
+impl<'tcx> QueryDescription for queries::lint_levels<'tcx> {
+ fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
+ format!("computing the lint levels for items in this crate")
+ }
+}
+
+impl<'tcx> QueryDescription for queries::specializes<'tcx> {
+ fn describe(_tcx: TyCtxt, _: (DefId, DefId)) -> String {
+ format!("computing whether impls specialize one another")
+ }
+}
+
+impl<'tcx> QueryDescription for queries::in_scope_traits_map<'tcx> {
+ fn describe(_tcx: TyCtxt, _: DefIndex) -> String {
+ format!("traits in scope at a block")
+ }
+}
+
+impl<'tcx> QueryDescription for queries::is_no_builtins<'tcx> {
+ fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
+ format!("test whether a crate has #![no_builtins]")
+ }
+}
+
+impl<'tcx> QueryDescription for queries::panic_strategy<'tcx> {
+ fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
+ format!("query a crate's configured panic strategy")
+ }
+}
+
+impl<'tcx> QueryDescription for queries::is_profiler_runtime<'tcx> {
+ fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
+ format!("query a crate is #![profiler_runtime]")
+ }
+}
+
+impl<'tcx> QueryDescription for queries::is_sanitizer_runtime<'tcx> {
+ fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
+ format!("query a crate is #![sanitizer_runtime]")
+ }
+}
+
+impl<'tcx> QueryDescription for queries::exported_symbol_ids<'tcx> {
+ fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
+ format!("looking up the exported symbols of a crate")
+ }
+}
+
+impl<'tcx> QueryDescription for queries::native_libraries<'tcx> {
+ fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
+ format!("looking up the native libraries of a linked crate")
+ }
+}
+
+impl<'tcx> QueryDescription for queries::plugin_registrar_fn<'tcx> {
+ fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
+ format!("looking up the plugin registrar for a crate")
+ }
+}
+
+impl<'tcx> QueryDescription for queries::derive_registrar_fn<'tcx> {
+ fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
+ format!("looking up the derive registrar for a crate")
+ }
+}
+
+impl<'tcx> QueryDescription for queries::crate_disambiguator<'tcx> {
+ fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
+ format!("looking up the disambiguator a crate")
+ }
+}
+
+impl<'tcx> QueryDescription for queries::crate_hash<'tcx> {
+ fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
+ format!("looking up the hash a crate")
+ }
+}
+
+impl<'tcx> QueryDescription for queries::original_crate_name<'tcx> {
+ fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
+ format!("looking up the original name a crate")
+ }
+}
+
+impl<'tcx> QueryDescription for queries::implementations_of_trait<'tcx> {
+ fn describe(_tcx: TyCtxt, _: (CrateNum, DefId)) -> String {
+ format!("looking up implementations of a trait in a crate")
+ }
+}
+
+impl<'tcx> QueryDescription for queries::all_trait_implementations<'tcx> {
+ fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
+ format!("looking up all (?) trait implementations")
+ }
+}
+
+impl<'tcx> QueryDescription for queries::link_args<'tcx> {
+ fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
+ format!("looking up link arguments for a crate")
+ }
+}
+
+impl<'tcx> QueryDescription for queries::named_region_map<'tcx> {
+ fn describe(_tcx: TyCtxt, _: DefIndex) -> String {
+ format!("looking up a named region")
+ }
+}
+
+impl<'tcx> QueryDescription for queries::is_late_bound_map<'tcx> {
+ fn describe(_tcx: TyCtxt, _: DefIndex) -> String {
+ format!("testing if a region is late boudn")
+ }
+}
+
+impl<'tcx> QueryDescription for queries::object_lifetime_defaults_map<'tcx> {
+ fn describe(_tcx: TyCtxt, _: DefIndex) -> String {
+ format!("looking up lifetime defaults for a region")
+ }
+}
+
+impl<'tcx> QueryDescription for queries::dep_kind<'tcx> {
+ fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
+ format!("fetching what a dependency looks like")
+ }
+}
+
+impl<'tcx> QueryDescription for queries::crate_name<'tcx> {
+ fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
+ format!("fetching what a crate is named")
+ }
+}
+
+impl<'tcx> QueryDescription for queries::get_lang_items<'tcx> {
+ fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
+ format!("calculating the lang items map")
+ }
+}
+
+impl<'tcx> QueryDescription for queries::defined_lang_items<'tcx> {
+ fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
+ format!("calculating the lang items defined in a crate")
+ }
+}
+
+impl<'tcx> QueryDescription for queries::missing_lang_items<'tcx> {
+ fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
+ format!("calculating the missing lang items in a crate")
+ }
+}
+
+impl<'tcx> QueryDescription for queries::visible_parent_map<'tcx> {
+ fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
+ format!("calculating the visible parent map")
+ }
+}
+
+impl<'tcx> QueryDescription for queries::missing_extern_crate_item<'tcx> {
+ fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
+ format!("seeing if we're missing an `extern crate` item for this crate")
+ }
+}
+
+impl<'tcx> QueryDescription for queries::used_crate_source<'tcx> {
+ fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
+ format!("looking at the source for a crate")
+ }
+}
+
+impl<'tcx> QueryDescription for queries::postorder_cnums<'tcx> {
+ fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
+ format!("generating a postorder list of CrateNums")
+ }
+}
+
+impl<'tcx> QueryDescription for queries::maybe_unused_extern_crates<'tcx> {
+ fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
+ format!("looking up all possibly unused extern crates")
+ }
+}
+
+impl<'tcx> QueryDescription for queries::stability_index<'tcx> {
+ fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
+ format!("calculating the stability index for the local crate")
+ }
+}
+
+impl<'tcx> QueryDescription for queries::all_crate_nums<'tcx> {
+ fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
+ format!("fetching all foreign CrateNum instances")
+ }
+}
+
+impl<'tcx> QueryDescription for queries::exported_symbols<'tcx> {
+ fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
+ format!("exported_symbols")
+ }
+}
+
+impl<'tcx> QueryDescription for queries::collect_and_partition_translation_items<'tcx> {
+ fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
+ format!("collect_and_partition_translation_items")
+ }
+}
+
+impl<'tcx> QueryDescription for queries::codegen_unit<'tcx> {
+ fn describe(_tcx: TyCtxt, _: InternedString) -> String {
+ format!("codegen_unit")
+ }
+}
+
+impl<'tcx> QueryDescription for queries::compile_codegen_unit<'tcx> {
+ fn describe(_tcx: TyCtxt, _: InternedString) -> String {
+ format!("compile_codegen_unit")
+ }
+}
+
+impl<'tcx> QueryDescription for queries::output_filenames<'tcx> {
+ fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
+ format!("output_filenames")
+ }
+}
--- /dev/null
+// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Defines the set of legal keys that can be used in queries.
+
+use hir::def_id::{CrateNum, DefId, LOCAL_CRATE, DefIndex};
+use mir::transform::{MirSuite, MirPassIndex};
+use ty::{self, Ty, TyCtxt};
+use ty::subst::Substs;
+use ty::fast_reject::SimplifiedType;
+
+use std::fmt::Debug;
+use std::hash::Hash;
+use syntax_pos::{Span, DUMMY_SP};
+use syntax_pos::symbol::InternedString;
+
+/// The `Key` trait controls what types can legally be used as the key
+/// for a query.
+pub trait Key: Clone + Hash + Eq + Debug {
+ /// Given an instance of this key, what crate is it referring to?
+ /// This is used to find the provider.
+ fn map_crate(&self) -> CrateNum;
+
+ /// In the event that a cycle occurs, if no explicit span has been
+ /// given for a query with key `self`, what span should we use?
+ fn default_span(&self, tcx: TyCtxt) -> Span;
+}
+
+impl<'tcx> Key for ty::InstanceDef<'tcx> {
+ fn map_crate(&self) -> CrateNum {
+ LOCAL_CRATE
+ }
+
+ fn default_span(&self, tcx: TyCtxt) -> Span {
+ tcx.def_span(self.def_id())
+ }
+}
+
+impl<'tcx> Key for ty::Instance<'tcx> {
+ fn map_crate(&self) -> CrateNum {
+ LOCAL_CRATE
+ }
+
+ fn default_span(&self, tcx: TyCtxt) -> Span {
+ tcx.def_span(self.def_id())
+ }
+}
+
+impl Key for CrateNum {
+ fn map_crate(&self) -> CrateNum {
+ *self
+ }
+ fn default_span(&self, _: TyCtxt) -> Span {
+ DUMMY_SP
+ }
+}
+
+impl Key for DefIndex {
+ fn map_crate(&self) -> CrateNum {
+ LOCAL_CRATE
+ }
+ fn default_span(&self, _tcx: TyCtxt) -> Span {
+ DUMMY_SP
+ }
+}
+
+impl Key for DefId {
+ fn map_crate(&self) -> CrateNum {
+ self.krate
+ }
+ fn default_span(&self, tcx: TyCtxt) -> Span {
+ tcx.def_span(*self)
+ }
+}
+
+impl Key for (DefId, DefId) {
+ fn map_crate(&self) -> CrateNum {
+ self.0.krate
+ }
+ fn default_span(&self, tcx: TyCtxt) -> Span {
+ self.1.default_span(tcx)
+ }
+}
+
+impl Key for (CrateNum, DefId) {
+ fn map_crate(&self) -> CrateNum {
+ self.0
+ }
+ fn default_span(&self, tcx: TyCtxt) -> Span {
+ self.1.default_span(tcx)
+ }
+}
+
+impl Key for (DefId, SimplifiedType) {
+ fn map_crate(&self) -> CrateNum {
+ self.0.krate
+ }
+ fn default_span(&self, tcx: TyCtxt) -> Span {
+ self.0.default_span(tcx)
+ }
+}
+
+impl<'tcx> Key for (DefId, &'tcx Substs<'tcx>) {
+ fn map_crate(&self) -> CrateNum {
+ self.0.krate
+ }
+ fn default_span(&self, tcx: TyCtxt) -> Span {
+ self.0.default_span(tcx)
+ }
+}
+
+impl Key for (MirSuite, DefId) {
+ fn map_crate(&self) -> CrateNum {
+ self.1.map_crate()
+ }
+ fn default_span(&self, tcx: TyCtxt) -> Span {
+ self.1.default_span(tcx)
+ }
+}
+
+impl Key for (MirSuite, MirPassIndex, DefId) {
+ fn map_crate(&self) -> CrateNum {
+ self.2.map_crate()
+ }
+ fn default_span(&self, tcx: TyCtxt) -> Span {
+ self.2.default_span(tcx)
+ }
+}
+
+impl<'tcx> Key for Ty<'tcx> {
+ fn map_crate(&self) -> CrateNum {
+ LOCAL_CRATE
+ }
+ fn default_span(&self, _: TyCtxt) -> Span {
+ DUMMY_SP
+ }
+}
+
+impl<'tcx, T: Key> Key for ty::ParamEnvAnd<'tcx, T> {
+ fn map_crate(&self) -> CrateNum {
+ self.value.map_crate()
+ }
+ fn default_span(&self, tcx: TyCtxt) -> Span {
+ self.value.default_span(tcx)
+ }
+}
+
+impl Key for InternedString {
+ fn map_crate(&self) -> CrateNum {
+ LOCAL_CRATE
+ }
+ fn default_span(&self, _tcx: TyCtxt) -> Span {
+ DUMMY_SP
+ }
+}
--- /dev/null
+// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use dep_graph::{DepConstructor, DepNode};
+use errors::DiagnosticBuilder;
+use hir::def_id::{CrateNum, DefId, DefIndex};
+use hir::def::{Def, Export};
+use hir::{self, TraitCandidate, ItemLocalId};
+use hir::svh::Svh;
+use lint;
+use middle::const_val;
+use middle::cstore::{ExternCrate, LinkagePreference, NativeLibrary,
+ ExternBodyNestedBodies};
+use middle::cstore::{NativeLibraryKind, DepKind, CrateSource, ExternConstBody};
+use middle::privacy::AccessLevels;
+use middle::reachable::ReachableSet;
+use middle::region;
+use middle::resolve_lifetime::{Region, ObjectLifetimeDefault};
+use middle::stability::{self, DeprecationEntry};
+use middle::lang_items::{LanguageItems, LangItem};
+use middle::exported_symbols::SymbolExportLevel;
+use middle::trans::{CodegenUnit, Stats};
+use mir;
+use session::CompileResult;
+use session::config::OutputFilenames;
+use traits::specialization_graph;
+use ty::{self, CrateInherentImpls, Ty, TyCtxt};
+use ty::layout::{Layout, LayoutError};
+use ty::steal::Steal;
+use ty::subst::Substs;
+use util::nodemap::{DefIdSet, DefIdMap};
+use util::common::{profq_msg, ProfileQueriesMsg};
+
+use rustc_data_structures::indexed_set::IdxSetBuf;
+use rustc_back::PanicStrategy;
+use rustc_data_structures::indexed_vec::IndexVec;
+use rustc_data_structures::fx::{FxHashMap, FxHashSet};
+use std::cell::{RefCell, Cell};
+
+use std::ops::Deref;
+use std::rc::Rc;
+use std::sync::Arc;
+use syntax_pos::{Span, DUMMY_SP};
+use syntax_pos::symbol::InternedString;
+use syntax::attr;
+use syntax::ast;
+use syntax::symbol::Symbol;
+
+#[macro_use]
+mod plumbing;
+use self::plumbing::*;
+
+mod keys;
+pub use self::keys::Key;
+
+mod values;
+use self::values::Value;
+
+mod config;
+pub use self::config::QueryConfig;
+use self::config::QueryDescription;
+
+// Each of these maps also corresponds to a method on a
+// `Provider` trait for requesting a value of that type,
+// and a method on `Maps` itself for doing that in a
+// a way that memoizes and does dep-graph tracking,
+// wrapping around the actual chain of providers that
+// the driver creates (using several `rustc_*` crates).
+define_maps! { <'tcx>
+ /// Records the type of every item.
+ [] fn type_of: TypeOfItem(DefId) -> Ty<'tcx>,
+
+ /// Maps from the def-id of an item (trait/struct/enum/fn) to its
+ /// associated generics and predicates.
+ [] fn generics_of: GenericsOfItem(DefId) -> &'tcx ty::Generics,
+ [] fn predicates_of: PredicatesOfItem(DefId) -> ty::GenericPredicates<'tcx>,
+
+ /// Maps from the def-id of a trait to the list of
+ /// super-predicates. This is a subset of the full list of
+ /// predicates. We store these in a separate map because we must
+ /// evaluate them even during type conversion, often before the
+ /// full predicates are available (note that supertraits have
+ /// additional acyclicity requirements).
+ [] fn super_predicates_of: SuperPredicatesOfItem(DefId) -> ty::GenericPredicates<'tcx>,
+
+ /// To avoid cycles within the predicates of a single item we compute
+ /// per-type-parameter predicates for resolving `T::AssocTy`.
+ [] fn type_param_predicates: type_param_predicates((DefId, DefId))
+ -> ty::GenericPredicates<'tcx>,
+
+ [] fn trait_def: TraitDefOfItem(DefId) -> &'tcx ty::TraitDef,
+ [] fn adt_def: AdtDefOfItem(DefId) -> &'tcx ty::AdtDef,
+ [] fn adt_destructor: AdtDestructor(DefId) -> Option<ty::Destructor>,
+ [] fn adt_sized_constraint: SizedConstraint(DefId) -> &'tcx [Ty<'tcx>],
+ [] fn adt_dtorck_constraint: DtorckConstraint(DefId) -> ty::DtorckConstraint<'tcx>,
+
+ /// True if this is a const fn
+ [] fn is_const_fn: IsConstFn(DefId) -> bool,
+
+ /// True if this is a foreign item (i.e., linked via `extern { ... }`).
+ [] fn is_foreign_item: IsForeignItem(DefId) -> bool,
+
+ /// True if this is a default impl (aka impl Foo for ..)
+ [] fn is_default_impl: IsDefaultImpl(DefId) -> bool,
+
+ /// Get a map with the variance of every item; use `item_variance`
+ /// instead.
+ [] fn crate_variances: crate_variances(CrateNum) -> Rc<ty::CrateVariancesMap>,
+
+ /// Maps from def-id of a type or region parameter to its
+ /// (inferred) variance.
+ [] fn variances_of: ItemVariances(DefId) -> Rc<Vec<ty::Variance>>,
+
+ /// Maps from an impl/trait def-id to a list of the def-ids of its items
+ [] fn associated_item_def_ids: AssociatedItemDefIds(DefId) -> Rc<Vec<DefId>>,
+
+ /// Maps from a trait item to the trait item "descriptor"
+ [] fn associated_item: AssociatedItems(DefId) -> ty::AssociatedItem,
+
+ [] fn impl_trait_ref: ImplTraitRef(DefId) -> Option<ty::TraitRef<'tcx>>,
+ [] fn impl_polarity: ImplPolarity(DefId) -> hir::ImplPolarity,
+
+ /// Maps a DefId of a type to a list of its inherent impls.
+ /// Contains implementations of methods that are inherent to a type.
+ /// Methods in these implementations don't need to be exported.
+ [] fn inherent_impls: InherentImpls(DefId) -> Rc<Vec<DefId>>,
+
+ /// Set of all the def-ids in this crate that have MIR associated with
+ /// them. This includes all the body owners, but also things like struct
+ /// constructors.
+ [] fn mir_keys: mir_keys(CrateNum) -> Rc<DefIdSet>,
+
+ /// Maps DefId's that have an associated Mir to the result
+ /// of the MIR qualify_consts pass. The actual meaning of
+ /// the value isn't known except to the pass itself.
+ [] fn mir_const_qualif: MirConstQualif(DefId) -> (u8, Rc<IdxSetBuf<mir::Local>>),
+
+ /// Fetch the MIR for a given def-id up till the point where it is
+ /// ready for const evaluation.
+ ///
+ /// See the README for the `mir` module for details.
+ [] fn mir_const: MirConst(DefId) -> &'tcx Steal<mir::Mir<'tcx>>,
+
+ [] fn mir_validated: MirValidated(DefId) -> &'tcx Steal<mir::Mir<'tcx>>,
+
+ /// MIR after our optimization passes have run. This is MIR that is ready
+ /// for trans. This is also the only query that can fetch non-local MIR, at present.
+ [] fn optimized_mir: MirOptimized(DefId) -> &'tcx mir::Mir<'tcx>,
+
+ /// Type of each closure. The def ID is the ID of the
+ /// expression defining the closure.
+ [] fn closure_kind: ClosureKind(DefId) -> ty::ClosureKind,
+
+ /// The signature of functions and closures.
+ [] fn fn_sig: FnSignature(DefId) -> ty::PolyFnSig<'tcx>,
+
+ /// Records the signature of each generator. The def ID is the ID of the
+ /// expression defining the closure.
+ [] fn generator_sig: GenSignature(DefId) -> Option<ty::PolyGenSig<'tcx>>,
+
+ /// Caches CoerceUnsized kinds for impls on custom types.
+ [] fn coerce_unsized_info: CoerceUnsizedInfo(DefId)
+ -> ty::adjustment::CoerceUnsizedInfo,
+
+ [] fn typeck_item_bodies: typeck_item_bodies_dep_node(CrateNum) -> CompileResult,
+
+ [] fn typeck_tables_of: TypeckTables(DefId) -> &'tcx ty::TypeckTables<'tcx>,
+
+ [] fn has_typeck_tables: HasTypeckTables(DefId) -> bool,
+
+ [] fn coherent_trait: coherent_trait_dep_node((CrateNum, DefId)) -> (),
+
+ [] fn borrowck: BorrowCheck(DefId) -> (),
+ // FIXME: shouldn't this return a `Result<(), BorrowckErrors>` instead?
+ [] fn mir_borrowck: MirBorrowCheck(DefId) -> (),
+
+ /// Gets a complete map from all types to their inherent impls.
+ /// Not meant to be used directly outside of coherence.
+ /// (Defined only for LOCAL_CRATE)
+ [] fn crate_inherent_impls: crate_inherent_impls_dep_node(CrateNum) -> CrateInherentImpls,
+
+ /// Checks all types in the krate for overlap in their inherent impls. Reports errors.
+ /// Not meant to be used directly outside of coherence.
+ /// (Defined only for LOCAL_CRATE)
+ [] fn crate_inherent_impls_overlap_check: inherent_impls_overlap_check_dep_node(CrateNum) -> (),
+
+ /// Results of evaluating const items or constants embedded in
+ /// other items (such as enum variant explicit discriminants).
+ [] fn const_eval: const_eval_dep_node(ty::ParamEnvAnd<'tcx, (DefId, &'tcx Substs<'tcx>)>)
+ -> const_val::EvalResult<'tcx>,
+
+ /// Performs the privacy check and computes "access levels".
+ [] fn privacy_access_levels: PrivacyAccessLevels(CrateNum) -> Rc<AccessLevels>,
+
+ [] fn reachable_set: reachability_dep_node(CrateNum) -> ReachableSet,
+
+ /// Per-body `region::ScopeTree`. The `DefId` should be the owner-def-id for the body;
+ /// in the case of closures, this will be redirected to the enclosing function.
+ [] fn region_scope_tree: RegionScopeTree(DefId) -> Rc<region::ScopeTree>,
+
+ [] fn mir_shims: mir_shim_dep_node(ty::InstanceDef<'tcx>) -> &'tcx mir::Mir<'tcx>,
+
+ [] fn def_symbol_name: SymbolName(DefId) -> ty::SymbolName,
+ [] fn symbol_name: symbol_name_dep_node(ty::Instance<'tcx>) -> ty::SymbolName,
+
+ [] fn describe_def: DescribeDef(DefId) -> Option<Def>,
+ [] fn def_span: DefSpan(DefId) -> Span,
+ [] fn lookup_stability: LookupStability(DefId) -> Option<&'tcx attr::Stability>,
+ [] fn lookup_deprecation_entry: LookupDeprecationEntry(DefId) -> Option<DeprecationEntry>,
+ [] fn item_attrs: ItemAttrs(DefId) -> Rc<[ast::Attribute]>,
+ [] fn fn_arg_names: FnArgNames(DefId) -> Vec<ast::Name>,
+ [] fn impl_parent: ImplParent(DefId) -> Option<DefId>,
+ [] fn trait_of_item: TraitOfItem(DefId) -> Option<DefId>,
+ [] fn is_exported_symbol: IsExportedSymbol(DefId) -> bool,
+ [] fn item_body_nested_bodies: ItemBodyNestedBodies(DefId) -> ExternBodyNestedBodies,
+ [] fn const_is_rvalue_promotable_to_static: ConstIsRvaluePromotableToStatic(DefId) -> bool,
+ [] fn is_mir_available: IsMirAvailable(DefId) -> bool,
+
+ [] fn trait_impls_of: TraitImpls(DefId) -> Rc<ty::trait_def::TraitImpls>,
+ [] fn specialization_graph_of: SpecializationGraph(DefId) -> Rc<specialization_graph::Graph>,
+ [] fn is_object_safe: ObjectSafety(DefId) -> bool,
+
+ // Get the ParameterEnvironment for a given item; this environment
+ // will be in "user-facing" mode, meaning that it is suitabe for
+ // type-checking etc, and it does not normalize specializable
+ // associated types. This is almost always what you want,
+ // unless you are doing MIR optimizations, in which case you
+ // might want to use `reveal_all()` method to change modes.
+ [] fn param_env: ParamEnv(DefId) -> ty::ParamEnv<'tcx>,
+
+ // Trait selection queries. These are best used by invoking `ty.moves_by_default()`,
+ // `ty.is_copy()`, etc, since that will prune the environment where possible.
+ [] fn is_copy_raw: is_copy_dep_node(ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> bool,
+ [] fn is_sized_raw: is_sized_dep_node(ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> bool,
+ [] fn is_freeze_raw: is_freeze_dep_node(ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> bool,
+ [] fn needs_drop_raw: needs_drop_dep_node(ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> bool,
+ [] fn layout_raw: layout_dep_node(ty::ParamEnvAnd<'tcx, Ty<'tcx>>)
+ -> Result<&'tcx Layout, LayoutError<'tcx>>,
+
+ [] fn dylib_dependency_formats: DylibDepFormats(CrateNum)
+ -> Rc<Vec<(CrateNum, LinkagePreference)>>,
+
+ [] fn is_panic_runtime: IsPanicRuntime(CrateNum) -> bool,
+ [] fn is_compiler_builtins: IsCompilerBuiltins(CrateNum) -> bool,
+ [] fn has_global_allocator: HasGlobalAllocator(CrateNum) -> bool,
+ [] fn is_sanitizer_runtime: IsSanitizerRuntime(CrateNum) -> bool,
+ [] fn is_profiler_runtime: IsProfilerRuntime(CrateNum) -> bool,
+ [] fn panic_strategy: GetPanicStrategy(CrateNum) -> PanicStrategy,
+ [] fn is_no_builtins: IsNoBuiltins(CrateNum) -> bool,
+
+ [] fn extern_crate: ExternCrate(DefId) -> Rc<Option<ExternCrate>>,
+
+ [] fn specializes: specializes_node((DefId, DefId)) -> bool,
+ [] fn in_scope_traits_map: InScopeTraits(DefIndex)
+ -> Option<Rc<FxHashMap<ItemLocalId, Rc<Vec<TraitCandidate>>>>>,
+ [] fn module_exports: ModuleExports(DefId) -> Option<Rc<Vec<Export>>>,
+ [] fn lint_levels: lint_levels_node(CrateNum) -> Rc<lint::LintLevelMap>,
+
+ [] fn impl_defaultness: ImplDefaultness(DefId) -> hir::Defaultness,
+ [] fn exported_symbol_ids: ExportedSymbolIds(CrateNum) -> Rc<DefIdSet>,
+ [] fn native_libraries: NativeLibraries(CrateNum) -> Rc<Vec<NativeLibrary>>,
+ [] fn plugin_registrar_fn: PluginRegistrarFn(CrateNum) -> Option<DefId>,
+ [] fn derive_registrar_fn: DeriveRegistrarFn(CrateNum) -> Option<DefId>,
+ [] fn crate_disambiguator: CrateDisambiguator(CrateNum) -> Symbol,
+ [] fn crate_hash: CrateHash(CrateNum) -> Svh,
+ [] fn original_crate_name: OriginalCrateName(CrateNum) -> Symbol,
+
+ [] fn implementations_of_trait: implementations_of_trait_node((CrateNum, DefId))
+ -> Rc<Vec<DefId>>,
+ [] fn all_trait_implementations: AllTraitImplementations(CrateNum)
+ -> Rc<Vec<DefId>>,
+
+ [] fn is_dllimport_foreign_item: IsDllimportForeignItem(DefId) -> bool,
+ [] fn is_statically_included_foreign_item: IsStaticallyIncludedForeignItem(DefId) -> bool,
+ [] fn native_library_kind: NativeLibraryKind(DefId)
+ -> Option<NativeLibraryKind>,
+ [] fn link_args: link_args_node(CrateNum) -> Rc<Vec<String>>,
+
+ [] fn named_region_map: NamedRegion(DefIndex) ->
+ Option<Rc<FxHashMap<ItemLocalId, Region>>>,
+ [] fn is_late_bound_map: IsLateBound(DefIndex) ->
+ Option<Rc<FxHashSet<ItemLocalId>>>,
+ [] fn object_lifetime_defaults_map: ObjectLifetimeDefaults(DefIndex)
+ -> Option<Rc<FxHashMap<ItemLocalId, Rc<Vec<ObjectLifetimeDefault>>>>>,
+
+ [] fn visibility: Visibility(DefId) -> ty::Visibility,
+ [] fn dep_kind: DepKind(CrateNum) -> DepKind,
+ [] fn crate_name: CrateName(CrateNum) -> Symbol,
+ [] fn item_children: ItemChildren(DefId) -> Rc<Vec<Export>>,
+ [] fn extern_mod_stmt_cnum: ExternModStmtCnum(DefId) -> Option<CrateNum>,
+
+ [] fn get_lang_items: get_lang_items_node(CrateNum) -> Rc<LanguageItems>,
+ [] fn defined_lang_items: DefinedLangItems(CrateNum) -> Rc<Vec<(DefId, usize)>>,
+ [] fn missing_lang_items: MissingLangItems(CrateNum) -> Rc<Vec<LangItem>>,
+ [] fn extern_const_body: ExternConstBody(DefId) -> ExternConstBody<'tcx>,
+ [] fn visible_parent_map: visible_parent_map_node(CrateNum)
+ -> Rc<DefIdMap<DefId>>,
+ [] fn missing_extern_crate_item: MissingExternCrateItem(CrateNum) -> bool,
+ [] fn used_crate_source: UsedCrateSource(CrateNum) -> Rc<CrateSource>,
+ [] fn postorder_cnums: postorder_cnums_node(CrateNum) -> Rc<Vec<CrateNum>>,
+
+ [] fn freevars: Freevars(DefId) -> Option<Rc<Vec<hir::Freevar>>>,
+ [] fn maybe_unused_trait_import: MaybeUnusedTraitImport(DefId) -> bool,
+ [] fn maybe_unused_extern_crates: maybe_unused_extern_crates_node(CrateNum)
+ -> Rc<Vec<(DefId, Span)>>,
+
+ [] fn stability_index: stability_index_node(CrateNum) -> Rc<stability::Index<'tcx>>,
+ [] fn all_crate_nums: all_crate_nums_node(CrateNum) -> Rc<Vec<CrateNum>>,
+
+ [] fn exported_symbols: ExportedSymbols(CrateNum)
+ -> Arc<Vec<(String, Option<DefId>, SymbolExportLevel)>>,
+ [] fn collect_and_partition_translation_items:
+ collect_and_partition_translation_items_node(CrateNum)
+ -> (Arc<DefIdSet>, Arc<Vec<Arc<CodegenUnit<'tcx>>>>),
+ [] fn export_name: ExportName(DefId) -> Option<Symbol>,
+ [] fn contains_extern_indicator: ContainsExternIndicator(DefId) -> bool,
+ [] fn is_translated_function: IsTranslatedFunction(DefId) -> bool,
+ [] fn codegen_unit: CodegenUnit(InternedString) -> Arc<CodegenUnit<'tcx>>,
+ [] fn compile_codegen_unit: CompileCodegenUnit(InternedString) -> Stats,
+ [] fn output_filenames: output_filenames_node(CrateNum)
+ -> Arc<OutputFilenames>,
+}
+
+//////////////////////////////////////////////////////////////////////
+// These functions are little shims used to find the dep-node for a
+// given query when there is not a *direct* mapping:
+
+fn type_param_predicates<'tcx>((item_id, param_id): (DefId, DefId)) -> DepConstructor<'tcx> {
+ DepConstructor::TypeParamPredicates {
+ item_id,
+ param_id
+ }
+}
+
+fn coherent_trait_dep_node<'tcx>((_, def_id): (CrateNum, DefId)) -> DepConstructor<'tcx> {
+ DepConstructor::CoherenceCheckTrait(def_id)
+}
+
+fn crate_inherent_impls_dep_node<'tcx>(_: CrateNum) -> DepConstructor<'tcx> {
+ DepConstructor::Coherence
+}
+
+fn inherent_impls_overlap_check_dep_node<'tcx>(_: CrateNum) -> DepConstructor<'tcx> {
+ DepConstructor::CoherenceInherentImplOverlapCheck
+}
+
+fn reachability_dep_node<'tcx>(_: CrateNum) -> DepConstructor<'tcx> {
+ DepConstructor::Reachability
+}
+
+fn mir_shim_dep_node<'tcx>(instance_def: ty::InstanceDef<'tcx>) -> DepConstructor<'tcx> {
+ DepConstructor::MirShim {
+ instance_def
+ }
+}
+
+fn symbol_name_dep_node<'tcx>(instance: ty::Instance<'tcx>) -> DepConstructor<'tcx> {
+ DepConstructor::InstanceSymbolName { instance }
+}
+
+fn typeck_item_bodies_dep_node<'tcx>(_: CrateNum) -> DepConstructor<'tcx> {
+ DepConstructor::TypeckBodiesKrate
+}
+
+fn const_eval_dep_node<'tcx>(_: ty::ParamEnvAnd<'tcx, (DefId, &'tcx Substs<'tcx>)>)
+ -> DepConstructor<'tcx> {
+ DepConstructor::ConstEval
+}
+
+fn mir_keys<'tcx>(_: CrateNum) -> DepConstructor<'tcx> {
+ DepConstructor::MirKeys
+}
+
+fn crate_variances<'tcx>(_: CrateNum) -> DepConstructor<'tcx> {
+ DepConstructor::CrateVariances
+}
+
+fn is_copy_dep_node<'tcx>(_: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> DepConstructor<'tcx> {
+ DepConstructor::IsCopy
+}
+
+fn is_sized_dep_node<'tcx>(_: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> DepConstructor<'tcx> {
+ DepConstructor::IsSized
+}
+
+fn is_freeze_dep_node<'tcx>(_: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> DepConstructor<'tcx> {
+ DepConstructor::IsFreeze
+}
+
+fn needs_drop_dep_node<'tcx>(_: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> DepConstructor<'tcx> {
+ DepConstructor::NeedsDrop
+}
+
+fn layout_dep_node<'tcx>(_: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> DepConstructor<'tcx> {
+ DepConstructor::Layout
+}
+
+fn lint_levels_node<'tcx>(_: CrateNum) -> DepConstructor<'tcx> {
+ DepConstructor::LintLevels
+}
+
+fn specializes_node<'tcx>((a, b): (DefId, DefId)) -> DepConstructor<'tcx> {
+ DepConstructor::Specializes { impl1: a, impl2: b }
+}
+
+fn implementations_of_trait_node<'tcx>((krate, trait_id): (CrateNum, DefId))
+ -> DepConstructor<'tcx>
+{
+ DepConstructor::ImplementationsOfTrait { krate, trait_id }
+}
+
+fn link_args_node<'tcx>(_: CrateNum) -> DepConstructor<'tcx> {
+ DepConstructor::LinkArgs
+}
+
+fn get_lang_items_node<'tcx>(_: CrateNum) -> DepConstructor<'tcx> {
+ DepConstructor::GetLangItems
+}
+
+fn visible_parent_map_node<'tcx>(_: CrateNum) -> DepConstructor<'tcx> {
+ DepConstructor::VisibleParentMap
+}
+
+fn postorder_cnums_node<'tcx>(_: CrateNum) -> DepConstructor<'tcx> {
+ DepConstructor::PostorderCnums
+}
+
+fn maybe_unused_extern_crates_node<'tcx>(_: CrateNum) -> DepConstructor<'tcx> {
+ DepConstructor::MaybeUnusedExternCrates
+}
+
+fn stability_index_node<'tcx>(_: CrateNum) -> DepConstructor<'tcx> {
+ DepConstructor::StabilityIndex
+}
+
+fn all_crate_nums_node<'tcx>(_: CrateNum) -> DepConstructor<'tcx> {
+ DepConstructor::AllCrateNums
+}
+
+fn collect_and_partition_translation_items_node<'tcx>(_: CrateNum) -> DepConstructor<'tcx> {
+ DepConstructor::CollectAndPartitionTranslationItems
+}
+
+fn output_filenames_node<'tcx>(_: CrateNum) -> DepConstructor<'tcx> {
+ DepConstructor::OutputFilenames
+}
--- /dev/null
+// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! The implementation of the query system itself. Defines the macros
+//! that generate the actual methods on tcx which find and execute the
+//! provider, manage the caches, and so forth.
+
+use dep_graph::{DepNodeIndex};
+use errors::{Diagnostic, DiagnosticBuilder};
+use ty::{TyCtxt};
+use ty::maps::Query; // NB: actually generated by the macros in this file
+use ty::maps::config::QueryDescription;
+use ty::item_path;
+
+use rustc_data_structures::fx::{FxHashMap};
+use std::cell::{RefMut, Cell};
+use std::marker::PhantomData;
+use std::mem;
+use syntax_pos::Span;
+
+pub(super) struct QueryMap<D: QueryDescription> {
+ phantom: PhantomData<D>,
+ pub(super) map: FxHashMap<D::Key, QueryValue<D::Value>>,
+}
+
+pub(super) struct QueryValue<T> {
+ pub(super) value: T,
+ pub(super) index: DepNodeIndex,
+ pub(super) diagnostics: Option<Box<QueryDiagnostics>>,
+}
+
+pub(super) struct QueryDiagnostics {
+ pub(super) diagnostics: Vec<Diagnostic>,
+ pub(super) emitted_diagnostics: Cell<bool>,
+}
+
+impl<M: QueryDescription> QueryMap<M> {
+ pub(super) fn new() -> QueryMap<M> {
+ QueryMap {
+ phantom: PhantomData,
+ map: FxHashMap(),
+ }
+ }
+}
+
+pub(super) struct CycleError<'a, 'tcx: 'a> {
+ span: Span,
+ cycle: RefMut<'a, [(Span, Query<'tcx>)]>,
+}
+
+impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
+ pub(super) fn report_cycle(self, CycleError { span, cycle }: CycleError)
+ -> DiagnosticBuilder<'a>
+ {
+ // Subtle: release the refcell lock before invoking `describe()`
+ // below by dropping `cycle`.
+ let stack = cycle.to_vec();
+ mem::drop(cycle);
+
+ assert!(!stack.is_empty());
+
+ // Disable naming impls with types in this path, since that
+ // sometimes cycles itself, leading to extra cycle errors.
+ // (And cycle errors around impls tend to occur during the
+ // collect/coherence phases anyhow.)
+ item_path::with_forced_impl_filename_line(|| {
+ let mut err =
+ struct_span_err!(self.sess, span, E0391,
+ "unsupported cyclic reference between types/traits detected");
+ err.span_label(span, "cyclic reference");
+
+ err.span_note(stack[0].0, &format!("the cycle begins when {}...",
+ stack[0].1.describe(self)));
+
+ for &(span, ref query) in &stack[1..] {
+ err.span_note(span, &format!("...which then requires {}...",
+ query.describe(self)));
+ }
+
+ err.note(&format!("...which then again requires {}, completing the cycle.",
+ stack[0].1.describe(self)));
+
+ return err
+ })
+ }
+
+ pub(super) fn cycle_check<F, R>(self, span: Span, query: Query<'gcx>, compute: F)
+ -> Result<R, CycleError<'a, 'gcx>>
+ where F: FnOnce() -> R
+ {
+ {
+ let mut stack = self.maps.query_stack.borrow_mut();
+ if let Some((i, _)) = stack.iter().enumerate().rev()
+ .find(|&(_, &(_, ref q))| *q == query) {
+ return Err(CycleError {
+ span,
+ cycle: RefMut::map(stack, |stack| &mut stack[i..])
+ });
+ }
+ stack.push((span, query));
+ }
+
+ let result = compute();
+
+ self.maps.query_stack.borrow_mut().pop();
+
+ Ok(result)
+ }
+}
+
+// If enabled, send a message to the profile-queries thread
+macro_rules! profq_msg {
+ ($tcx:expr, $msg:expr) => {
+ if cfg!(debug_assertions) {
+ if $tcx.sess.profile_queries() {
+ profq_msg($msg)
+ }
+ }
+ }
+}
+
+// If enabled, format a key using its debug string, which can be
+// expensive to compute (in terms of time).
+macro_rules! profq_key {
+ ($tcx:expr, $key:expr) => {
+ if cfg!(debug_assertions) {
+ if $tcx.sess.profile_queries_and_keys() {
+ Some(format!("{:?}", $key))
+ } else { None }
+ } else { None }
+ }
+}
+
+macro_rules! define_maps {
+ (<$tcx:tt>
+ $($(#[$attr:meta])*
+ [$($modifiers:tt)*] fn $name:ident: $node:ident($K:ty) -> $V:ty,)*) => {
+ define_map_struct! {
+ tcx: $tcx,
+ input: ($(([$($modifiers)*] [$($attr)*] [$name]))*)
+ }
+
+ impl<$tcx> Maps<$tcx> {
+ pub fn new(providers: IndexVec<CrateNum, Providers<$tcx>>)
+ -> Self {
+ Maps {
+ providers,
+ query_stack: RefCell::new(vec![]),
+ $($name: RefCell::new(QueryMap::new())),*
+ }
+ }
+ }
+
+ #[allow(bad_style)]
+ #[derive(Copy, Clone, Debug, PartialEq, Eq)]
+ pub enum Query<$tcx> {
+ $($(#[$attr])* $name($K)),*
+ }
+
+ #[allow(bad_style)]
+ #[derive(Clone, Debug, PartialEq, Eq)]
+ pub enum QueryMsg {
+ $($name(Option<String>)),*
+ }
+
+ impl<$tcx> Query<$tcx> {
+ pub fn describe(&self, tcx: TyCtxt) -> String {
+ let (r, name) = match *self {
+ $(Query::$name(key) => {
+ (queries::$name::describe(tcx, key), stringify!($name))
+ })*
+ };
+ if tcx.sess.verbose() {
+ format!("{} [{}]", r, name)
+ } else {
+ r
+ }
+ }
+ }
+
+ pub mod queries {
+ use std::marker::PhantomData;
+
+ $(#[allow(bad_style)]
+ pub struct $name<$tcx> {
+ data: PhantomData<&$tcx ()>
+ })*
+ }
+
+ $(impl<$tcx> QueryConfig for queries::$name<$tcx> {
+ type Key = $K;
+ type Value = $V;
+ }
+
+ impl<'a, $tcx, 'lcx> queries::$name<$tcx> {
+ #[allow(unused)]
+ fn to_dep_node(tcx: TyCtxt<'a, $tcx, 'lcx>, key: &$K) -> DepNode {
+ use dep_graph::DepConstructor::*;
+
+ DepNode::new(tcx, $node(*key))
+ }
+
+ fn try_get_with<F, R>(tcx: TyCtxt<'a, $tcx, 'lcx>,
+ mut span: Span,
+ key: $K,
+ f: F)
+ -> Result<R, CycleError<'a, $tcx>>
+ where F: FnOnce(&$V) -> R
+ {
+ debug!("ty::queries::{}::try_get_with(key={:?}, span={:?})",
+ stringify!($name),
+ key,
+ span);
+
+ profq_msg!(tcx,
+ ProfileQueriesMsg::QueryBegin(
+ span.clone(),
+ QueryMsg::$name(profq_key!(tcx, key))
+ )
+ );
+
+ if let Some(value) = tcx.maps.$name.borrow().map.get(&key) {
+ if let Some(ref d) = value.diagnostics {
+ if !d.emitted_diagnostics.get() {
+ d.emitted_diagnostics.set(true);
+ let handle = tcx.sess.diagnostic();
+ for diagnostic in d.diagnostics.iter() {
+ DiagnosticBuilder::new_diagnostic(handle, diagnostic.clone())
+ .emit();
+ }
+ }
+ }
+ profq_msg!(tcx, ProfileQueriesMsg::CacheHit);
+ tcx.dep_graph.read_index(value.index);
+ return Ok(f(&value.value));
+ }
+ // else, we are going to run the provider:
+ profq_msg!(tcx, ProfileQueriesMsg::ProviderBegin);
+
+ // FIXME(eddyb) Get more valid Span's on queries.
+ // def_span guard is necessary to prevent a recursive loop,
+ // default_span calls def_span query internally.
+ if span == DUMMY_SP && stringify!($name) != "def_span" {
+ span = key.default_span(tcx)
+ }
+
+ let dep_node = Self::to_dep_node(tcx, &key);
+ let res = tcx.cycle_check(span, Query::$name(key), || {
+ tcx.sess.diagnostic().track_diagnostics(|| {
+ if dep_node.kind.is_anon() {
+ tcx.dep_graph.with_anon_task(dep_node.kind, || {
+ let provider = tcx.maps.providers[key.map_crate()].$name;
+ provider(tcx.global_tcx(), key)
+ })
+ } else {
+ fn run_provider<'a, 'tcx, 'lcx>(tcx: TyCtxt<'a, 'tcx, 'lcx>,
+ key: $K)
+ -> $V {
+ let provider = tcx.maps.providers[key.map_crate()].$name;
+ provider(tcx.global_tcx(), key)
+ }
+
+ tcx.dep_graph.with_task(dep_node, tcx, key, run_provider)
+ }
+ })
+ })?;
+ profq_msg!(tcx, ProfileQueriesMsg::ProviderEnd);
+ let ((result, dep_node_index), diagnostics) = res;
+
+ tcx.dep_graph.read_index(dep_node_index);
+
+ let value = QueryValue {
+ value: result,
+ index: dep_node_index,
+ diagnostics: if diagnostics.len() == 0 {
+ None
+ } else {
+ Some(Box::new(QueryDiagnostics {
+ diagnostics,
+ emitted_diagnostics: Cell::new(true),
+ }))
+ },
+ };
+
+ Ok(f(&tcx.maps
+ .$name
+ .borrow_mut()
+ .map
+ .entry(key)
+ .or_insert(value)
+ .value))
+ }
+
+ pub fn try_get(tcx: TyCtxt<'a, $tcx, 'lcx>, span: Span, key: $K)
+ -> Result<$V, DiagnosticBuilder<'a>> {
+ match Self::try_get_with(tcx, span, key, Clone::clone) {
+ Ok(e) => Ok(e),
+ Err(e) => Err(tcx.report_cycle(e)),
+ }
+ }
+
+ pub fn force(tcx: TyCtxt<'a, $tcx, 'lcx>, span: Span, key: $K) {
+ // Ignore dependencies, since we not reading the computed value
+ let _task = tcx.dep_graph.in_ignore();
+
+ match Self::try_get_with(tcx, span, key, |_| ()) {
+ Ok(()) => {}
+ Err(e) => tcx.report_cycle(e).emit(),
+ }
+ }
+ })*
+
+ #[derive(Copy, Clone)]
+ pub struct TyCtxtAt<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
+ pub tcx: TyCtxt<'a, 'gcx, 'tcx>,
+ pub span: Span,
+ }
+
+ impl<'a, 'gcx, 'tcx> Deref for TyCtxtAt<'a, 'gcx, 'tcx> {
+ type Target = TyCtxt<'a, 'gcx, 'tcx>;
+ fn deref(&self) -> &Self::Target {
+ &self.tcx
+ }
+ }
+
+ impl<'a, $tcx, 'lcx> TyCtxt<'a, $tcx, 'lcx> {
+ /// Return a transparent wrapper for `TyCtxt` which uses
+ /// `span` as the location of queries performed through it.
+ pub fn at(self, span: Span) -> TyCtxtAt<'a, $tcx, 'lcx> {
+ TyCtxtAt {
+ tcx: self,
+ span
+ }
+ }
+
+ $($(#[$attr])*
+ pub fn $name(self, key: $K) -> $V {
+ self.at(DUMMY_SP).$name(key)
+ })*
+ }
+
+ impl<'a, $tcx, 'lcx> TyCtxtAt<'a, $tcx, 'lcx> {
+ $($(#[$attr])*
+ pub fn $name(self, key: $K) -> $V {
+ queries::$name::try_get(self.tcx, self.span, key).unwrap_or_else(|mut e| {
+ e.emit();
+ Value::from_cycle_error(self.global_tcx())
+ })
+ })*
+ }
+
+ define_provider_struct! {
+ tcx: $tcx,
+ input: ($(([$($modifiers)*] [$name] [$K] [$V]))*),
+ output: ()
+ }
+
+ impl<$tcx> Copy for Providers<$tcx> {}
+ impl<$tcx> Clone for Providers<$tcx> {
+ fn clone(&self) -> Self { *self }
+ }
+ }
+}
+
+macro_rules! define_map_struct {
+ // Initial state
+ (tcx: $tcx:tt,
+ input: $input:tt) => {
+ define_map_struct! {
+ tcx: $tcx,
+ input: $input,
+ output: ()
+ }
+ };
+
+ // Final output
+ (tcx: $tcx:tt,
+ input: (),
+ output: ($($output:tt)*)) => {
+ pub struct Maps<$tcx> {
+ providers: IndexVec<CrateNum, Providers<$tcx>>,
+ query_stack: RefCell<Vec<(Span, Query<$tcx>)>>,
+ $($output)*
+ }
+ };
+
+ // Field recognized and ready to shift into the output
+ (tcx: $tcx:tt,
+ ready: ([$($pub:tt)*] [$($attr:tt)*] [$name:ident]),
+ input: $input:tt,
+ output: ($($output:tt)*)) => {
+ define_map_struct! {
+ tcx: $tcx,
+ input: $input,
+ output: ($($output)*
+ $(#[$attr])* $($pub)* $name: RefCell<QueryMap<queries::$name<$tcx>>>,)
+ }
+ };
+
+ // No modifiers left? This is a private item.
+ (tcx: $tcx:tt,
+ input: (([] $attrs:tt $name:tt) $($input:tt)*),
+ output: $output:tt) => {
+ define_map_struct! {
+ tcx: $tcx,
+ ready: ([] $attrs $name),
+ input: ($($input)*),
+ output: $output
+ }
+ };
+
+ // Skip other modifiers
+ (tcx: $tcx:tt,
+ input: (([$other_modifier:tt $($modifiers:tt)*] $($fields:tt)*) $($input:tt)*),
+ output: $output:tt) => {
+ define_map_struct! {
+ tcx: $tcx,
+ input: (([$($modifiers)*] $($fields)*) $($input)*),
+ output: $output
+ }
+ };
+}
+
+macro_rules! define_provider_struct {
+ // Initial state:
+ (tcx: $tcx:tt, input: $input:tt) => {
+ define_provider_struct! {
+ tcx: $tcx,
+ input: $input,
+ output: ()
+ }
+ };
+
+ // Final state:
+ (tcx: $tcx:tt,
+ input: (),
+ output: ($(([$name:ident] [$K:ty] [$R:ty]))*)) => {
+ pub struct Providers<$tcx> {
+ $(pub $name: for<'a> fn(TyCtxt<'a, $tcx, $tcx>, $K) -> $R,)*
+ }
+
+ impl<$tcx> Default for Providers<$tcx> {
+ fn default() -> Self {
+ $(fn $name<'a, $tcx>(_: TyCtxt<'a, $tcx, $tcx>, key: $K) -> $R {
+ bug!("tcx.maps.{}({:?}) unsupported by its crate",
+ stringify!($name), key);
+ })*
+ Providers { $($name),* }
+ }
+ }
+ };
+
+ // Something ready to shift:
+ (tcx: $tcx:tt,
+ ready: ($name:tt $K:tt $V:tt),
+ input: $input:tt,
+ output: ($($output:tt)*)) => {
+ define_provider_struct! {
+ tcx: $tcx,
+ input: $input,
+ output: ($($output)* ($name $K $V))
+ }
+ };
+
+ // Regular queries produce a `V` only.
+ (tcx: $tcx:tt,
+ input: (([] $name:tt $K:tt $V:tt) $($input:tt)*),
+ output: $output:tt) => {
+ define_provider_struct! {
+ tcx: $tcx,
+ ready: ($name $K $V),
+ input: ($($input)*),
+ output: $output
+ }
+ };
+
+ // Skip modifiers.
+ (tcx: $tcx:tt,
+ input: (([$other_modifier:tt $($modifiers:tt)*] $($fields:tt)*) $($input:tt)*),
+ output: $output:tt) => {
+ define_provider_struct! {
+ tcx: $tcx,
+ input: (([$($modifiers)*] $($fields)*) $($input)*),
+ output: $output
+ }
+ };
+}
--- /dev/null
+// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use ty::{self, Ty, TyCtxt};
+
+use syntax::symbol::Symbol;
+
+pub(super) trait Value<'tcx>: Sized {
+ fn from_cycle_error<'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Self;
+}
+
+impl<'tcx, T> Value<'tcx> for T {
+ default fn from_cycle_error<'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> T {
+ tcx.sess.abort_if_errors();
+ bug!("Value::from_cycle_error called without errors");
+ }
+}
+
+impl<'tcx, T: Default> Value<'tcx> for T {
+ default fn from_cycle_error<'a>(_: TyCtxt<'a, 'tcx, 'tcx>) -> T {
+ T::default()
+ }
+}
+
+impl<'tcx> Value<'tcx> for Ty<'tcx> {
+ fn from_cycle_error<'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Ty<'tcx> {
+ tcx.types.err
+ }
+}
+
+impl<'tcx> Value<'tcx> for ty::DtorckConstraint<'tcx> {
+ fn from_cycle_error<'a>(_: TyCtxt<'a, 'tcx, 'tcx>) -> Self {
+ Self::empty()
+ }
+}
+
+impl<'tcx> Value<'tcx> for ty::SymbolName {
+ fn from_cycle_error<'a>(_: TyCtxt<'a, 'tcx, 'tcx>) -> Self {
+ ty::SymbolName { name: Symbol::intern("<error>").as_str() }
+ }
+}
+
#[derive(Clone)]
pub struct CrateAnalysis {
pub access_levels: Rc<AccessLevels>,
- pub reachable: Rc<NodeSet>,
pub name: String,
pub glob_map: Option<hir::GlobMap>,
}
// check whether the type has various kinds of types in it without
// recursing over the type itself.
bitflags! {
- flags TypeFlags: u32 {
- const HAS_PARAMS = 1 << 0,
- const HAS_SELF = 1 << 1,
- const HAS_TY_INFER = 1 << 2,
- const HAS_RE_INFER = 1 << 3,
- const HAS_RE_SKOL = 1 << 4,
- const HAS_RE_EARLY_BOUND = 1 << 5,
- const HAS_FREE_REGIONS = 1 << 6,
- const HAS_TY_ERR = 1 << 7,
- const HAS_PROJECTION = 1 << 8,
+ pub struct TypeFlags: u32 {
+ const HAS_PARAMS = 1 << 0;
+ const HAS_SELF = 1 << 1;
+ const HAS_TY_INFER = 1 << 2;
+ const HAS_RE_INFER = 1 << 3;
+ const HAS_RE_SKOL = 1 << 4;
+ const HAS_RE_EARLY_BOUND = 1 << 5;
+ const HAS_FREE_REGIONS = 1 << 6;
+ const HAS_TY_ERR = 1 << 7;
+ const HAS_PROJECTION = 1 << 8;
// FIXME: Rename this to the actual property since it's used for generators too
- const HAS_TY_CLOSURE = 1 << 9,
+ const HAS_TY_CLOSURE = 1 << 9;
// true if there are "names" of types and regions and so forth
// that are local to a particular fn
- const HAS_LOCAL_NAMES = 1 << 10,
+ const HAS_LOCAL_NAMES = 1 << 10;
// Present if the type belongs in a local type context.
// Only set for TyInfer other than Fresh.
- const KEEP_IN_LOCAL_TCX = 1 << 11,
+ const KEEP_IN_LOCAL_TCX = 1 << 11;
// Is there a projection that does not involve a bound region?
// Currently we can't normalize projections w/ bound regions.
- const HAS_NORMALIZABLE_PROJECTION = 1 << 12,
+ const HAS_NORMALIZABLE_PROJECTION = 1 << 12;
const NEEDS_SUBST = TypeFlags::HAS_PARAMS.bits |
TypeFlags::HAS_SELF.bits |
- TypeFlags::HAS_RE_EARLY_BOUND.bits,
+ TypeFlags::HAS_RE_EARLY_BOUND.bits;
// Flags representing the nominal content of a type,
// computed by FlagsComputation. If you add a new nominal
TypeFlags::HAS_PROJECTION.bits |
TypeFlags::HAS_TY_CLOSURE.bits |
TypeFlags::HAS_LOCAL_NAMES.bits |
- TypeFlags::KEEP_IN_LOCAL_TCX.bits,
+ TypeFlags::KEEP_IN_LOCAL_TCX.bits;
}
}
}
}
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for ty::TyS<'gcx> {
+impl<'gcx> HashStable<StableHashingContext<'gcx>> for ty::TyS<'gcx> {
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
let ty::TyS {
ref sty,
}
bitflags! {
- flags AdtFlags: u32 {
- const NO_ADT_FLAGS = 0,
- const IS_ENUM = 1 << 0,
- const IS_PHANTOM_DATA = 1 << 1,
- const IS_FUNDAMENTAL = 1 << 2,
- const IS_UNION = 1 << 3,
- const IS_BOX = 1 << 4,
+ pub struct AdtFlags: u32 {
+ const NO_ADT_FLAGS = 0;
+ const IS_ENUM = 1 << 0;
+ const IS_PHANTOM_DATA = 1 << 1;
+ const IS_FUNDAMENTAL = 1 << 2;
+ const IS_UNION = 1 << 3;
+ const IS_BOX = 1 << 4;
}
}
impl<'tcx> serialize::UseSpecializedDecodable for &'tcx AdtDef {}
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for AdtDef {
+impl<'gcx> HashStable<StableHashingContext<'gcx>> for AdtDef {
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
let ty::AdtDef {
did,
bitflags! {
#[derive(RustcEncodable, RustcDecodable, Default)]
- flags ReprFlags: u8 {
- const IS_C = 1 << 0,
- const IS_PACKED = 1 << 1,
- const IS_SIMD = 1 << 2,
+ pub struct ReprFlags: u8 {
+ const IS_C = 1 << 0;
+ const IS_PACKED = 1 << 1;
+ const IS_SIMD = 1 << 2;
// Internal only for now. If true, don't reorder fields.
- const IS_LINEAR = 1 << 3,
+ const IS_LINEAR = 1 << 3;
// Any of these flags being set prevent field reordering optimisation.
const IS_UNOPTIMISABLE = ReprFlags::IS_C.bits |
ReprFlags::IS_PACKED.bits |
ReprFlags::IS_SIMD.bits |
- ReprFlags::IS_LINEAR.bits,
+ ReprFlags::IS_LINEAR.bits;
}
}
pub name: InternedString
}
+impl_stable_hash_for!(struct self::SymbolName {
+ name
+});
+
impl Deref for SymbolName {
type Target = str;
use ty::{Slice, TyS};
use ty::subst::Kind;
-use std::fmt;
use std::iter;
use std::cmp::Ordering;
use syntax::abi;
}
}
-impl fmt::Debug for TypeFlags {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- write!(f, "{:x}", self.bits)
- }
-}
-
/// Represents the projection of an associated type. In explicit UFCS
/// form this would be written `<T as Trait<..>>::N`.
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)]
use hir;
use hir::def_id::DefId;
use hir::map::DefPathHash;
+use ich::{self, StableHashingContext};
use traits::specialization_graph;
use ty::fast_reject;
use ty::fold::TypeFoldable;
use ty::{Ty, TyCtxt};
use rustc_data_structures::fx::FxHashMap;
-
+use rustc_data_structures::stable_hasher::{HashStable, StableHasher,
+ StableHasherResult};
use std::rc::Rc;
/// A trait's definition with type information.
non_blanket_impls: non_blanket_impls,
})
}
+
+impl<'gcx> HashStable<StableHashingContext<'gcx>> for TraitImpls {
+ fn hash_stable<W: StableHasherResult>(&self,
+ hcx: &mut StableHashingContext<'gcx>,
+ hasher: &mut StableHasher<W>) {
+ let TraitImpls {
+ ref blanket_impls,
+ ref non_blanket_impls,
+ } = *self;
+
+ ich::hash_stable_trait_impls(hcx, hasher, blanket_impls, non_blanket_impls);
+ }
+}
use hir::def_id::{DefId, LOCAL_CRATE};
use hir::map::DefPathData;
-use ich::{StableHashingContext, NodeIdHashingMode};
+use ich::NodeIdHashingMode;
use middle::const_val::ConstVal;
use traits::{self, Reveal};
use ty::{self, Ty, TyCtxt, TypeFoldable};
/// context it's calculated within. This is used by the `type_id` intrinsic.
pub fn type_id_hash(self, ty: Ty<'tcx>) -> u64 {
let mut hasher = StableHasher::new();
- let mut hcx = StableHashingContext::new(self);
+ let mut hcx = self.create_stable_hashing_context();
// We want the type_id be independent of the types free regions, so we
// erase them. The erase_regions() call will also anonymize bound
path = "lib.rs"
[dependencies]
-rustc_bitflags = { path = "../librustc_bitflags" }
+bitflags = "1.0"
+rustc_cratesio_shim = { path = "../librustc_cratesio_shim" }
#![cfg_attr(not(stage0), feature(const_min_value))]
#![cfg_attr(not(stage0), feature(const_max_value))]
+// See librustc_cratesio_shim/Cargo.toml for a comment explaining this.
+#[allow(unused_extern_crates)]
+extern crate rustc_cratesio_shim;
+
#[macro_use]
-extern crate rustc_bitflags;
+extern crate bitflags;
use std::cmp::Ordering;
use std::fmt;
use std::ops::{Neg, Add, Sub, Mul, Div, Rem};
-use std::ops::{AddAssign, SubAssign, MulAssign, DivAssign, RemAssign, BitOrAssign};
+use std::ops::{AddAssign, SubAssign, MulAssign, DivAssign, RemAssign};
use std::str::FromStr;
bitflags! {
///
/// UNDERFLOW or OVERFLOW are always returned or-ed with INEXACT.
#[must_use]
- #[derive(Debug)]
- flags Status: u8 {
- const OK = 0x00,
- const INVALID_OP = 0x01,
- const DIV_BY_ZERO = 0x02,
- const OVERFLOW = 0x04,
- const UNDERFLOW = 0x08,
- const INEXACT = 0x10
- }
-}
-
-impl BitOrAssign for Status {
- fn bitor_assign(&mut self, rhs: Self) {
- *self = *self | rhs;
+ pub struct Status: u8 {
+ const OK = 0x00;
+ const INVALID_OP = 0x01;
+ const DIV_BY_ZERO = 0x02;
+ const OVERFLOW = 0x04;
+ const UNDERFLOW = 0x08;
+ const INEXACT = 0x10;
}
}
--- /dev/null
+NB: This crate is part of the Rust compiler. For an overview of the
+compiler as a whole, see
+[the README.md file found in `librustc`](../librustc/README.md).
+
+`librustc_back` contains some very low-level details that are
+specific to different LLVM targets and so forth.
+++ /dev/null
-[package]
-authors = ["The Rust Project Developers"]
-name = "rustc_bitflags"
-version = "0.0.0"
-
-[lib]
-name = "rustc_bitflags"
-path = "lib.rs"
-doctest = false
+++ /dev/null
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-
-#![no_std]
-#![deny(warnings)]
-
-//! A typesafe bitmask flag generator.
-
-#[cfg(test)]
-#[macro_use]
-extern crate std;
-
-/// The `bitflags!` macro generates a `struct` that holds a set of C-style
-/// bitmask flags. It is useful for creating typesafe wrappers for C APIs.
-///
-/// The flags should only be defined for integer types, otherwise unexpected
-/// type errors may occur at compile time.
-///
-/// # Examples
-///
-/// ```{.rust}
-/// #![feature(rustc_private)]
-/// #[macro_use] extern crate rustc_bitflags;
-///
-/// bitflags! {
-/// flags Flags: u32 {
-/// const FLAG_A = 0b00000001,
-/// const FLAG_B = 0b00000010,
-/// const FLAG_C = 0b00000100,
-/// const FLAG_ABC = Flags::FLAG_A.bits
-/// | Flags::FLAG_B.bits
-/// | Flags::FLAG_C.bits,
-/// }
-/// }
-///
-/// fn main() {
-/// let e1 = Flags::FLAG_A | Flags::FLAG_C;
-/// let e2 = Flags::FLAG_B | Flags::FLAG_C;
-/// assert!((e1 | e2) == Flags::FLAG_ABC); // union
-/// assert!((e1 & e2) == Flags::FLAG_C); // intersection
-/// assert!((e1 - e2) == Flags::FLAG_A); // set difference
-/// assert!(!e2 == Flags::FLAG_A); // set complement
-/// }
-/// ```
-///
-/// The generated `struct`s can also be extended with type and trait implementations:
-///
-/// ```{.rust}
-/// #![feature(rustc_private)]
-/// #[macro_use] extern crate rustc_bitflags;
-///
-/// use std::fmt;
-///
-/// bitflags! {
-/// flags Flags: u32 {
-/// const FLAG_A = 0b00000001,
-/// const FLAG_B = 0b00000010,
-/// }
-/// }
-///
-/// impl Flags {
-/// pub fn clear(&mut self) {
-/// self.bits = 0; // The `bits` field can be accessed from within the
-/// // same module where the `bitflags!` macro was invoked.
-/// }
-/// }
-///
-/// impl fmt::Debug for Flags {
-/// fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-/// write!(f, "hi!")
-/// }
-/// }
-///
-/// fn main() {
-/// let mut flags = Flags::FLAG_A | Flags::FLAG_B;
-/// flags.clear();
-/// assert!(flags.is_empty());
-/// assert_eq!(format!("{:?}", flags), "hi!");
-/// }
-/// ```
-///
-/// # Attributes
-///
-/// Attributes can be attached to the generated `struct` by placing them
-/// before the `flags` keyword.
-///
-/// # Derived traits
-///
-/// The `PartialEq` and `Clone` traits are automatically derived for the `struct` using
-/// the `deriving` attribute. Additional traits can be derived by providing an
-/// explicit `deriving` attribute on `flags`.
-///
-/// # Operators
-///
-/// The following operator traits are implemented for the generated `struct`:
-///
-/// - `BitOr`: union
-/// - `BitAnd`: intersection
-/// - `BitXor`: toggle
-/// - `Sub`: set difference
-/// - `Not`: set complement
-///
-/// # Methods
-///
-/// The following methods are defined for the generated `struct`:
-///
-/// - `empty`: an empty set of flags
-/// - `all`: the set of all flags
-/// - `bits`: the raw value of the flags currently stored
-/// - `from_bits`: convert from underlying bit representation, unless that
-/// representation contains bits that do not correspond to a flag
-/// - `from_bits_truncate`: convert from underlying bit representation, dropping
-/// any bits that do not correspond to flags
-/// - `is_empty`: `true` if no flags are currently stored
-/// - `is_all`: `true` if all flags are currently set
-/// - `intersects`: `true` if there are flags common to both `self` and `other`
-/// - `contains`: `true` all of the flags in `other` are contained within `self`
-/// - `insert`: inserts the specified flags in-place
-/// - `remove`: removes the specified flags in-place
-/// - `toggle`: the specified flags will be inserted if not present, and removed
-/// if they are.
-#[macro_export]
-macro_rules! bitflags {
- ($(#[$attr:meta])* flags $BitFlags:ident: $T:ty {
- $($(#[$Flag_attr:meta])* const $Flag:ident = $value:expr),+
- }) => {
- #[derive(Copy, PartialEq, Eq, Clone, PartialOrd, Ord, Hash)]
- $(#[$attr])*
- pub struct $BitFlags {
- bits: $T,
- }
-
- impl $BitFlags {
- $($(#[$Flag_attr])* pub const $Flag: $BitFlags = $BitFlags { bits: $value };)+
-
- /// Returns an empty set of flags.
- #[inline]
- pub fn empty() -> $BitFlags {
- $BitFlags { bits: 0 }
- }
-
- /// Returns the set containing all flags.
- #[inline]
- pub fn all() -> $BitFlags {
- $BitFlags { bits: $($value)|+ }
- }
-
- /// Returns the raw value of the flags currently stored.
- #[inline]
- pub fn bits(&self) -> $T {
- self.bits
- }
-
- /// Convert from underlying bit representation, unless that
- /// representation contains bits that do not correspond to a flag.
- #[inline]
- pub fn from_bits(bits: $T) -> ::std::option::Option<$BitFlags> {
- if (bits & !$BitFlags::all().bits()) != 0 {
- ::std::option::Option::None
- } else {
- ::std::option::Option::Some($BitFlags { bits: bits })
- }
- }
-
- /// Convert from underlying bit representation, dropping any bits
- /// that do not correspond to flags.
- #[inline]
- pub fn from_bits_truncate(bits: $T) -> $BitFlags {
- $BitFlags { bits: bits } & $BitFlags::all()
- }
-
- /// Returns `true` if no flags are currently stored.
- #[inline]
- pub fn is_empty(&self) -> bool {
- *self == $BitFlags::empty()
- }
-
- /// Returns `true` if all flags are currently set.
- #[inline]
- pub fn is_all(&self) -> bool {
- *self == $BitFlags::all()
- }
-
- /// Returns `true` if there are flags common to both `self` and `other`.
- #[inline]
- pub fn intersects(&self, other: $BitFlags) -> bool {
- !(*self & other).is_empty()
- }
-
- /// Returns `true` if all of the flags in `other` are contained within `self`.
- #[inline]
- pub fn contains(&self, other: $BitFlags) -> bool {
- (*self & other) == other
- }
-
- /// Inserts the specified flags in-place.
- #[inline]
- pub fn insert(&mut self, other: $BitFlags) {
- self.bits |= other.bits;
- }
-
- /// Removes the specified flags in-place.
- #[inline]
- pub fn remove(&mut self, other: $BitFlags) {
- self.bits &= !other.bits;
- }
-
- /// Toggles the specified flags in-place.
- #[inline]
- pub fn toggle(&mut self, other: $BitFlags) {
- self.bits ^= other.bits;
- }
- }
-
- impl ::std::ops::BitOr for $BitFlags {
- type Output = $BitFlags;
-
- /// Returns the union of the two sets of flags.
- #[inline]
- fn bitor(self, other: $BitFlags) -> $BitFlags {
- $BitFlags { bits: self.bits | other.bits }
- }
- }
-
- impl ::std::ops::BitXor for $BitFlags {
- type Output = $BitFlags;
-
- /// Returns the left flags, but with all the right flags toggled.
- #[inline]
- fn bitxor(self, other: $BitFlags) -> $BitFlags {
- $BitFlags { bits: self.bits ^ other.bits }
- }
- }
-
- impl ::std::ops::BitAnd for $BitFlags {
- type Output = $BitFlags;
-
- /// Returns the intersection between the two sets of flags.
- #[inline]
- fn bitand(self, other: $BitFlags) -> $BitFlags {
- $BitFlags { bits: self.bits & other.bits }
- }
- }
-
- impl ::std::ops::Sub for $BitFlags {
- type Output = $BitFlags;
-
- /// Returns the set difference of the two sets of flags.
- #[inline]
- fn sub(self, other: $BitFlags) -> $BitFlags {
- $BitFlags { bits: self.bits & !other.bits }
- }
- }
-
- impl ::std::ops::Not for $BitFlags {
- type Output = $BitFlags;
-
- /// Returns the complement of this set of flags.
- #[inline]
- fn not(self) -> $BitFlags {
- $BitFlags { bits: !self.bits } & $BitFlags::all()
- }
- }
- };
- ($(#[$attr:meta])* flags $BitFlags:ident: $T:ty {
- $($(#[$Flag_attr:meta])* const $Flag:ident = $value:expr),+,
- }) => {
- bitflags! {
- $(#[$attr])*
- flags $BitFlags: $T {
- $($(#[$Flag_attr])* const $Flag = $value),+
- }
- }
- };
-}
-
-#[cfg(test)]
-#[allow(non_upper_case_globals)]
-mod tests {
- use std::hash::{Hash, Hasher};
- use std::collections::hash_map::DefaultHasher;
- use std::option::Option::{None, Some};
-
- bitflags! {
- #[doc = "> The first principle is that you must not fool yourself — and"]
- #[doc = "> you are the easiest person to fool."]
- #[doc = "> "]
- #[doc = "> - Richard Feynman"]
- flags Flags: u32 {
- const FlagA = 0b00000001,
- #[doc = "<pcwalton> macros are way better at generating code than trans is"]
- const FlagB = 0b00000010,
- const FlagC = 0b00000100,
- #[doc = "* cmr bed"]
- #[doc = "* strcat table"]
- #[doc = "<strcat> wait what?"]
- const FlagABC = Flags::FlagA.bits
- | Flags::FlagB.bits
- | Flags::FlagC.bits,
- }
- }
-
- bitflags! {
- flags AnotherSetOfFlags: i8 {
- const AnotherFlag = -1,
- }
- }
-
- #[test]
- fn test_bits() {
- assert_eq!(Flags::empty().bits(), 0b00000000);
- assert_eq!(Flags::FlagA.bits(), 0b00000001);
- assert_eq!(Flags::FlagABC.bits(), 0b00000111);
-
- assert_eq!(AnotherSetOfFlags::empty().bits(), 0b00);
- assert_eq!(AnotherSetOfFlags::AnotherFlag.bits(), !0);
- }
-
- #[test]
- fn test_from_bits() {
- assert!(Flags::from_bits(0) == Some(Flags::empty()));
- assert!(Flags::from_bits(0b1) == Some(Flags::FlagA));
- assert!(Flags::from_bits(0b10) == Some(Flags::FlagB));
- assert!(Flags::from_bits(0b11) == Some(Flags::FlagA | Flags::FlagB));
- assert!(Flags::from_bits(0b1000) == None);
-
- assert!(AnotherSetOfFlags::from_bits(!0) == Some(AnotherSetOfFlags::AnotherFlag));
- }
-
- #[test]
- fn test_from_bits_truncate() {
- assert!(Flags::from_bits_truncate(0) == Flags::empty());
- assert!(Flags::from_bits_truncate(0b1) == Flags::FlagA);
- assert!(Flags::from_bits_truncate(0b10) == Flags::FlagB);
- assert!(Flags::from_bits_truncate(0b11) == (Flags::FlagA | Flags::FlagB));
- assert!(Flags::from_bits_truncate(0b1000) == Flags::empty());
- assert!(Flags::from_bits_truncate(0b1001) == Flags::FlagA);
-
- assert!(AnotherSetOfFlags::from_bits_truncate(0) == AnotherSetOfFlags::empty());
- }
-
- #[test]
- fn test_is_empty() {
- assert!(Flags::empty().is_empty());
- assert!(!Flags::FlagA.is_empty());
- assert!(!Flags::FlagABC.is_empty());
-
- assert!(!AnotherSetOfFlags::AnotherFlag.is_empty());
- }
-
- #[test]
- fn test_is_all() {
- assert!(Flags::all().is_all());
- assert!(!Flags::FlagA.is_all());
- assert!(Flags::FlagABC.is_all());
-
- assert!(AnotherSetOfFlags::AnotherFlag.is_all());
- }
-
- #[test]
- fn test_two_empties_do_not_intersect() {
- let e1 = Flags::empty();
- let e2 = Flags::empty();
- assert!(!e1.intersects(e2));
-
- assert!(AnotherSetOfFlags::AnotherFlag.intersects(AnotherSetOfFlags::AnotherFlag));
- }
-
- #[test]
- fn test_empty_does_not_intersect_with_full() {
- let e1 = Flags::empty();
- let e2 = Flags::FlagABC;
- assert!(!e1.intersects(e2));
- }
-
- #[test]
- fn test_disjoint_intersects() {
- let e1 = Flags::FlagA;
- let e2 = Flags::FlagB;
- assert!(!e1.intersects(e2));
- }
-
- #[test]
- fn test_overlapping_intersects() {
- let e1 = Flags::FlagA;
- let e2 = Flags::FlagA | Flags::FlagB;
- assert!(e1.intersects(e2));
- }
-
- #[test]
- fn test_contains() {
- let e1 = Flags::FlagA;
- let e2 = Flags::FlagA | Flags::FlagB;
- assert!(!e1.contains(e2));
- assert!(e2.contains(e1));
- assert!(Flags::FlagABC.contains(e2));
-
- assert!(AnotherSetOfFlags::AnotherFlag.contains(AnotherSetOfFlags::AnotherFlag));
- }
-
- #[test]
- fn test_insert() {
- let mut e1 = Flags::FlagA;
- let e2 = Flags::FlagA | Flags::FlagB;
- e1.insert(e2);
- assert!(e1 == e2);
-
- let mut e3 = AnotherSetOfFlags::empty();
- e3.insert(AnotherSetOfFlags::AnotherFlag);
- assert!(e3 == AnotherSetOfFlags::AnotherFlag);
- }
-
- #[test]
- fn test_remove() {
- let mut e1 = Flags::FlagA | Flags::FlagB;
- let e2 = Flags::FlagA | Flags::FlagC;
- e1.remove(e2);
- assert!(e1 == Flags::FlagB);
-
- let mut e3 = AnotherSetOfFlags::AnotherFlag;
- e3.remove(AnotherSetOfFlags::AnotherFlag);
- assert!(e3 == AnotherSetOfFlags::empty());
- }
-
- #[test]
- fn test_operators() {
- let e1 = Flags::FlagA | Flags::FlagC;
- let e2 = Flags::FlagB | Flags::FlagC;
- assert!((e1 | e2) == Flags::FlagABC); // union
- assert!((e1 & e2) == Flags::FlagC); // intersection
- assert!((e1 - e2) == Flags::FlagA); // set difference
- assert!(!e2 == Flags::FlagA); // set complement
- assert!(e1 ^ e2 == Flags::FlagA | Flags::FlagB); // toggle
- let mut e3 = e1;
- e3.toggle(e2);
- assert!(e3 == Flags::FlagA | Flags::FlagB);
-
- let mut m4 = AnotherSetOfFlags::empty();
- m4.toggle(AnotherSetOfFlags::empty());
- assert!(m4 == AnotherSetOfFlags::empty());
- }
-
- #[test]
- fn test_lt() {
- let mut a = Flags::empty();
- let mut b = Flags::empty();
-
- assert!(!(a < b) && !(b < a));
- b = Flags::FlagB;
- assert!(a < b);
- a = Flags::FlagC;
- assert!(!(a < b) && b < a);
- b = Flags::FlagC | Flags::FlagB;
- assert!(a < b);
- }
-
- #[test]
- fn test_ord() {
- let mut a = Flags::empty();
- let mut b = Flags::empty();
-
- assert!(a <= b && a >= b);
- a = Flags::FlagA;
- assert!(a > b && a >= b);
- assert!(b < a && b <= a);
- b = Flags::FlagB;
- assert!(b > a && b >= a);
- assert!(a < b && a <= b);
- }
-
- #[test]
- fn test_hash() {
- let mut x = Flags::empty();
- let mut y = Flags::empty();
- assert!(hash(&x) == hash(&y));
- x = Flags::all();
- y = Flags::FlagABC;
- assert!(hash(&x) == hash(&y));
- }
-
- fn hash<T: Hash>(t: &T) -> u64 {
- let mut s = DefaultHasher::new();
- t.hash(&mut s);
- s.finish()
- }
-}
use std::cmp::Ordering;
use rustc_const_math::*;
-
macro_rules! signal {
($e:expr, $exn:expr) => {
return Err(ConstEvalErr { span: $e.span, kind: $exn })
}
} else {
if tcx.is_const_fn(def_id) {
- tcx.extern_const_body(def_id)
+ tcx.extern_const_body(def_id).body
} else {
signal!(e, TypeckError)
}
tcx.mir_const_qualif(def_id);
tcx.hir.body(tcx.hir.body_owned_by(id))
} else {
- tcx.extern_const_body(def_id)
+ tcx.extern_const_body(def_id).body
};
ConstContext::new(tcx, key.param_env.and(substs), tables).eval(&body.value)
}
let body = if let Some(id) = self.tcx.hir.as_local_node_id(def_id) {
self.tcx.hir.body(self.tcx.hir.body_owned_by(id))
} else {
- self.tcx.extern_const_body(def_id)
+ self.tcx.extern_const_body(def_id).body
};
let pat = self.lower_const_expr(&body.value, pat_id, span);
self.tables = old_tables;
--- /dev/null
+# This crate exists to allow rustc to link certain crates from crates.io into
+# the distribution. This doesn't work normally because:
+#
+# - Cargo always builds dependencies as rlibs:
+# https://github.com/rust-lang/cargo/issues/629
+# - rustc wants to avoid multiple definitions of the same symbol, so it refuses
+# to link multiple dylibs containing the same rlib
+# - multiple dylibs depend on the same crates.io crates
+#
+# This solution works by including all the conflicting rlibs in a single dylib,
+# which is then linked into all dylibs that depend on these crates.io crates.
+# The result is that each rlib only appears once, and things work!
+
+[package]
+authors = ["The Rust Project Developers"]
+name = "rustc_cratesio_shim"
+version = "0.0.0"
+
+[lib]
+crate-type = ["dylib"]
+
+[dependencies]
+bitflags = "1.0"
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// See Cargo.toml for a comment explaining this crate.
+#![allow(unused_extern_crates)]
+
+extern crate bitflags;
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use std::hash::{Hash, Hasher};
+use std::hash::{Hash, Hasher, BuildHasher};
use std::marker::PhantomData;
use std::mem;
use blake2b::Blake2bHasher;
}
+/// Something that can provide a stable hashing context.
+pub trait StableHashingContextProvider {
+ type ContextType;
+ fn create_stable_hashing_context(&self) -> Self::ContextType;
+}
+
+impl<'a, T: StableHashingContextProvider> StableHashingContextProvider for &'a T {
+ type ContextType = T::ContextType;
+
+ fn create_stable_hashing_context(&self) -> Self::ContextType {
+ (**self).create_stable_hashing_context()
+ }
+}
+
+impl<'a, T: StableHashingContextProvider> StableHashingContextProvider for &'a mut T {
+ type ContextType = T::ContextType;
+
+ fn create_stable_hashing_context(&self) -> Self::ContextType {
+ (**self).create_stable_hashing_context()
+ }
+}
+
/// Something that implements `HashStable<CTX>` can be hashed in a way that is
/// stable across multiple compilation sessions.
pub trait HashStable<CTX> {
hasher: &mut StableHasher<W>);
}
+/// Implement this for types that can be turned into stable keys like, for
+/// example, for DefId that can be converted to a DefPathHash. This is used for
+/// bringing maps into a predictable order before hashing them.
+pub trait ToStableHashKey<HCX> {
+ type KeyType: Ord + Clone + Sized + HashStable<HCX>;
+ fn to_stable_hash_key(&self, hcx: &HCX) -> Self::KeyType;
+}
+
// Implement HashStable by just calling `Hash::hash()`. This works fine for
// self-contained values that don't depend on the hashing context `CTX`.
macro_rules! impl_stable_hash_via_hash {
fn hash_stable<W: StableHasherResult>(&self,
ctx: &mut CTX,
hasher: &mut StableHasher<W>) {
- self.0.hash_stable(ctx, hasher);
+ let (ref _0,) = *self;
+ _0.hash_stable(ctx, hasher);
}
}
fn hash_stable<W: StableHasherResult>(&self,
ctx: &mut CTX,
hasher: &mut StableHasher<W>) {
- self.0.hash_stable(ctx, hasher);
- self.1.hash_stable(ctx, hasher);
+ let (ref _0, ref _1) = *self;
+ _0.hash_stable(ctx, hasher);
+ _1.hash_stable(ctx, hasher);
+ }
+}
+
+impl<T1, T2, T3, CTX> HashStable<CTX> for (T1, T2, T3)
+ where T1: HashStable<CTX>,
+ T2: HashStable<CTX>,
+ T3: HashStable<CTX>,
+{
+ fn hash_stable<W: StableHasherResult>(&self,
+ ctx: &mut CTX,
+ hasher: &mut StableHasher<W>) {
+ let (ref _0, ref _1, ref _2) = *self;
+ _0.hash_stable(ctx, hasher);
+ _1.hash_stable(ctx, hasher);
+ _2.hash_stable(ctx, hasher);
}
}
}
}
-impl<T: HashStable<CTX>, CTX> HashStable<CTX> for Box<T> {
+impl<T: ?Sized + HashStable<CTX>, CTX> HashStable<CTX> for Box<T> {
#[inline]
fn hash_stable<W: StableHasherResult>(&self,
ctx: &mut CTX,
}
}
-impl<T: HashStable<CTX>, CTX> HashStable<CTX> for ::std::rc::Rc<T> {
+impl<T: ?Sized + HashStable<CTX>, CTX> HashStable<CTX> for ::std::rc::Rc<T> {
#[inline]
fn hash_stable<W: StableHasherResult>(&self,
ctx: &mut CTX,
}
}
-impl<T: HashStable<CTX>, CTX> HashStable<CTX> for ::std::sync::Arc<T> {
+impl<T: ?Sized + HashStable<CTX>, CTX> HashStable<CTX> for ::std::sync::Arc<T> {
#[inline]
fn hash_stable<W: StableHasherResult>(&self,
ctx: &mut CTX,
}
}
+impl<HCX> ToStableHashKey<HCX> for String {
+ type KeyType = String;
+ #[inline]
+ fn to_stable_hash_key(&self, _: &HCX) -> Self::KeyType {
+ self.clone()
+ }
+}
+
impl<CTX> HashStable<CTX> for bool {
#[inline]
fn hash_stable<W: StableHasherResult>(&self,
}
}
+impl<T1, T2, CTX> HashStable<CTX> for Result<T1, T2>
+ where T1: HashStable<CTX>,
+ T2: HashStable<CTX>,
+{
+ #[inline]
+ fn hash_stable<W: StableHasherResult>(&self,
+ ctx: &mut CTX,
+ hasher: &mut StableHasher<W>) {
+ mem::discriminant(self).hash_stable(ctx, hasher);
+ match *self {
+ Ok(ref x) => x.hash_stable(ctx, hasher),
+ Err(ref x) => x.hash_stable(ctx, hasher),
+ }
+ }
+}
+
impl<'a, T, CTX> HashStable<CTX> for &'a T
- where T: HashStable<CTX>
+ where T: HashStable<CTX> + ?Sized
{
#[inline]
fn hash_stable<W: StableHasherResult>(&self,
}
}
-impl<K, V, CTX> HashStable<CTX> for ::std::collections::BTreeMap<K, V>
- where K: Ord + HashStable<CTX>,
- V: HashStable<CTX>,
+impl<I: ::indexed_vec::Idx, T, CTX> HashStable<CTX> for ::indexed_vec::IndexVec<I, T>
+ where T: HashStable<CTX>,
{
fn hash_stable<W: StableHasherResult>(&self,
ctx: &mut CTX,
hasher: &mut StableHasher<W>) {
self.len().hash_stable(ctx, hasher);
- for (k, v) in self {
- k.hash_stable(ctx, hasher);
+ for v in &self.raw {
v.hash_stable(ctx, hasher);
}
}
}
-impl<T, CTX> HashStable<CTX> for ::std::collections::BTreeSet<T>
- where T: Ord + HashStable<CTX>,
+
+impl<I: ::indexed_vec::Idx, CTX> HashStable<CTX> for ::indexed_set::IdxSetBuf<I>
{
fn hash_stable<W: StableHasherResult>(&self,
ctx: &mut CTX,
hasher: &mut StableHasher<W>) {
- self.len().hash_stable(ctx, hasher);
- for v in self {
- v.hash_stable(ctx, hasher);
- }
+ self.words().hash_stable(ctx, hasher);
}
}
-impl<I: ::indexed_vec::Idx, T, CTX> HashStable<CTX> for ::indexed_vec::IndexVec<I, T>
- where T: HashStable<CTX>,
+impl_stable_hash_via_hash!(::std::path::Path);
+impl_stable_hash_via_hash!(::std::path::PathBuf);
+
+impl<K, V, R, HCX> HashStable<HCX> for ::std::collections::HashMap<K, V, R>
+ where K: ToStableHashKey<HCX> + Eq + Hash,
+ V: HashStable<HCX>,
+ R: BuildHasher,
{
+ #[inline]
fn hash_stable<W: StableHasherResult>(&self,
- ctx: &mut CTX,
+ hcx: &mut HCX,
hasher: &mut StableHasher<W>) {
- self.len().hash_stable(ctx, hasher);
- for v in &self.raw {
- v.hash_stable(ctx, hasher);
- }
+ hash_stable_hashmap(hcx, hasher, self, ToStableHashKey::to_stable_hash_key);
}
}
+
+impl<K, R, HCX> HashStable<HCX> for ::std::collections::HashSet<K, R>
+ where K: ToStableHashKey<HCX> + Eq + Hash,
+ R: BuildHasher,
+{
+ fn hash_stable<W: StableHasherResult>(&self,
+ hcx: &mut HCX,
+ hasher: &mut StableHasher<W>) {
+ let mut keys: Vec<_> = self.iter()
+ .map(|k| k.to_stable_hash_key(hcx))
+ .collect();
+ keys.sort_unstable();
+ keys.hash_stable(hcx, hasher);
+ }
+}
+
+impl<K, V, HCX> HashStable<HCX> for ::std::collections::BTreeMap<K, V>
+ where K: ToStableHashKey<HCX>,
+ V: HashStable<HCX>,
+{
+ fn hash_stable<W: StableHasherResult>(&self,
+ hcx: &mut HCX,
+ hasher: &mut StableHasher<W>) {
+ let mut entries: Vec<_> = self.iter()
+ .map(|(k, v)| (k.to_stable_hash_key(hcx), v))
+ .collect();
+ entries.sort_unstable_by(|&(ref sk1, _), &(ref sk2, _)| sk1.cmp(sk2));
+ entries.hash_stable(hcx, hasher);
+ }
+}
+
+impl<K, HCX> HashStable<HCX> for ::std::collections::BTreeSet<K>
+ where K: ToStableHashKey<HCX>,
+{
+ fn hash_stable<W: StableHasherResult>(&self,
+ hcx: &mut HCX,
+ hasher: &mut StableHasher<W>) {
+ let mut keys: Vec<_> = self.iter()
+ .map(|k| k.to_stable_hash_key(hcx))
+ .collect();
+ keys.sort_unstable();
+ keys.hash_stable(hcx, hasher);
+ }
+}
+
+pub fn hash_stable_hashmap<HCX, K, V, R, SK, F, W>(
+ hcx: &mut HCX,
+ hasher: &mut StableHasher<W>,
+ map: &::std::collections::HashMap<K, V, R>,
+ to_stable_hash_key: F)
+ where K: Eq + Hash,
+ V: HashStable<HCX>,
+ R: BuildHasher,
+ SK: HashStable<HCX> + Ord + Clone,
+ F: Fn(&K, &HCX) -> SK,
+ W: StableHasherResult,
+{
+ let mut entries: Vec<_> = map.iter()
+ .map(|(k, v)| (to_stable_hash_key(k, hcx), v))
+ .collect();
+ entries.sort_unstable_by(|&(ref sk1, _), &(ref sk2, _)| sk1.cmp(sk2));
+ entries.hash_stable(hcx, hasher);
+}
+
--- /dev/null
+NB: This crate is part of the Rust compiler. For an overview of the
+compiler as a whole, see
+[the README.md file found in `librustc`](../librustc/README.md).
+
+The `driver` crate is effectively the "main" function for the rust
+compiler. It orchstrates the compilation process and "knits together"
+the code from the other crates within rustc. This crate itself does
+not contain any of the "main logic" of the compiler (though it does
+have some code related to pretty printing or other minor compiler
+options).
+
+
use rustc::ty::{self, TyCtxt, Resolutions, GlobalArenas};
use rustc::traits;
use rustc::util::common::{ErrorReported, time};
-use rustc::util::nodemap::NodeSet;
use rustc_allocator as allocator;
use rustc_borrowck as borrowck;
use rustc_incremental::{self, IncrementalHashesMap};
use serialize::json;
+use std::any::Any;
use std::env;
use std::ffi::{OsString, OsStr};
use std::fs;
use std::iter;
use std::path::{Path, PathBuf};
use std::rc::Rc;
+use std::sync::mpsc;
use syntax::{ast, diagnostics, visit};
use syntax::attr;
use syntax::ext::base::ExtCtxt;
// Construct the HIR map
let hir_map = time(sess.time_passes(),
"indexing hir",
- || hir_map::map_crate(&mut hir_forest, defs));
+ || hir_map::map_crate(&mut hir_forest, &defs));
{
let _ignore = hir_map.dep_graph.in_ignore();
&resolutions,
&expanded_crate,
&hir_map.krate(),
+ &outputs,
&crate_name),
Ok(()));
}
&arena,
&arenas,
&crate_name,
- |tcx, analysis, incremental_hashes_map, result| {
+ &outputs,
+ |tcx, analysis, incremental_hashes_map, rx, result| {
{
// Eventually, we will want to track plugins.
let _ignore = tcx.dep_graph.in_ignore();
tcx.print_debug_stats();
}
- let trans = phase_4_translate_to_llvm(tcx, analysis, incremental_hashes_map,
- &outputs);
+ let trans = phase_4_translate_to_llvm(tcx,
+ incremental_hashes_map,
+ rx);
if log_enabled!(::log::LogLevel::Info) {
println!("Post-trans");
}
}
- Ok((outputs, trans, tcx.dep_graph.clone()))
+ Ok((outputs.clone(), trans, tcx.dep_graph.clone()))
})??
};
resolutions: &'a Resolutions,
krate: &'a ast::Crate,
hir_crate: &'a hir::Crate,
+ output_filenames: &'a OutputFilenames,
crate_name: &'a str)
-> Self {
CompileState {
resolutions: Some(resolutions),
expanded_crate: Some(krate),
hir_crate: Some(hir_crate),
+ output_filenames: Some(output_filenames),
out_file: out_file.as_ref().map(|s| &**s),
..CompileState::empty(input, session, out_dir)
}
defs: resolver.definitions,
analysis: ty::CrateAnalysis {
access_levels: Rc::new(AccessLevels::default()),
- reachable: Rc::new(NodeSet()),
name: crate_name.to_string(),
glob_map: if resolver.make_glob_map { Some(resolver.glob_map) } else { None },
},
arena: &'tcx DroplessArena,
arenas: &'tcx GlobalArenas<'tcx>,
name: &str,
+ output_filenames: &OutputFilenames,
f: F)
-> Result<R, CompileIncomplete>
where F: for<'a> FnOnce(TyCtxt<'a, 'tcx, 'tcx>,
ty::CrateAnalysis,
IncrementalHashesMap,
+ mpsc::Receiver<Box<Any + Send>>,
CompileResult) -> R
{
macro_rules! try_with_f {
- ($e: expr, ($t: expr, $a: expr, $h: expr)) => {
+ ($e: expr, ($($t:tt)*)) => {
match $e {
Ok(x) => x,
Err(x) => {
- f($t, $a, $h, Err(x));
+ f($($t)*, Err(x));
return Err(x);
}
}
mir::provide(&mut local_providers);
reachable::provide(&mut local_providers);
rustc_privacy::provide(&mut local_providers);
- trans::provide(&mut local_providers);
+ trans::provide_local(&mut local_providers);
typeck::provide(&mut local_providers);
ty::provide(&mut local_providers);
traits::provide(&mut local_providers);
let mut extern_providers = ty::maps::Providers::default();
cstore::provide(&mut extern_providers);
- trans::provide(&mut extern_providers);
+ trans::provide_extern(&mut extern_providers);
ty::provide_extern(&mut extern_providers);
traits::provide_extern(&mut extern_providers);
// FIXME(eddyb) get rid of this once we replace const_eval with miri.
passes.push_pass(MIR_OPTIMIZED, mir::transform::add_call_guards::CriticalCallEdges);
passes.push_pass(MIR_OPTIMIZED, mir::transform::dump_mir::Marker("PreTrans"));
+ let (tx, rx) = mpsc::channel();
+
TyCtxt::create_and_enter(sess,
cstore,
local_providers,
named_region_map,
hir_map,
name,
+ tx,
+ output_filenames,
|tcx| {
let incremental_hashes_map =
time(time_passes,
|| stability::check_unstable_api_usage(tcx));
// passes are timed inside typeck
- try_with_f!(typeck::check_crate(tcx), (tcx, analysis, incremental_hashes_map));
+ try_with_f!(typeck::check_crate(tcx),
+ (tcx, analysis, incremental_hashes_map, rx));
time(time_passes,
"const checking",
// lint warnings and so on -- kindck used to do this abort, but
// kindck is gone now). -nmatsakis
if sess.err_count() > 0 {
- return Ok(f(tcx, analysis, incremental_hashes_map, sess.compile_status()));
+ return Ok(f(tcx, analysis, incremental_hashes_map, rx, sess.compile_status()));
}
- analysis.reachable =
- time(time_passes,
- "reachability checking",
- || reachable::find_reachable(tcx));
-
time(time_passes, "death checking", || middle::dead::check_crate(tcx));
time(time_passes, "unused lib feature checking", || {
time(time_passes, "lint checking", || lint::check_crate(tcx));
- return Ok(f(tcx, analysis, incremental_hashes_map, tcx.sess.compile_status()));
+ return Ok(f(tcx, analysis, incremental_hashes_map, rx, tcx.sess.compile_status()));
})
}
/// Run the translation phase to LLVM, after which the AST and analysis can
/// be discarded.
pub fn phase_4_translate_to_llvm<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- analysis: ty::CrateAnalysis,
incremental_hashes_map: IncrementalHashesMap,
- output_filenames: &OutputFilenames)
+ rx: mpsc::Receiver<Box<Any + Send>>)
-> write::OngoingCrateTranslation {
let time_passes = tcx.sess.time_passes();
|| ::rustc::middle::dependency_format::calculate(tcx));
let translation =
- time(time_passes,
- "translation",
- move || trans::trans_crate(tcx, analysis, incremental_hashes_map, output_filenames));
+ time(time_passes, "translation", move || {
+ trans::trans_crate(tcx, incremental_hashes_map, rx)
+ });
if tcx.sess.profile_queries() {
profile::dump("profile_queries".to_string())
ppm,
state.arena.unwrap(),
state.arenas.unwrap(),
+ state.output_filenames.unwrap(),
opt_uii.clone(),
state.out_file);
};
use rustc::dep_graph::DepGraph;
use rustc::middle::cstore::CrateStore;
use rustc::session::Session;
-use rustc::session::config::Input;
+use rustc::session::config::{Input, OutputFilenames};
use rustc_borrowck as borrowck;
use rustc_borrowck::graphviz as borrowck_dot;
resolutions: &Resolutions,
arena: &'tcx DroplessArena,
arenas: &'tcx GlobalArenas<'tcx>,
+ output_filenames: &OutputFilenames,
id: &str,
f: F)
-> A
arena,
arenas,
id,
- |tcx, _, _, _| {
+ output_filenames,
+ |tcx, _, _, _, _| {
let empty_tables = ty::TypeckTables::empty(None);
let annotation = TypedAnnotation {
tcx,
ppm: PpMode,
arena: &'tcx DroplessArena,
arenas: &'tcx GlobalArenas<'tcx>,
+ output_filenames: &OutputFilenames,
opt_uii: Option<UserIdentifiedItem>,
ofile: Option<&Path>) {
let dep_graph = DepGraph::new(false);
crate_name,
arena,
arenas,
+ output_filenames,
ppm,
opt_uii,
ofile);
resolutions,
arena,
arenas,
+ output_filenames,
crate_name,
move |annotation, krate| {
debug!("pretty printing source code {:?}", s);
resolutions,
arena,
arenas,
+ output_filenames,
crate_name,
move |annotation, _| {
debug!("pretty printing source code {:?}", s);
crate_name: &str,
arena: &'tcx DroplessArena,
arenas: &'tcx GlobalArenas<'tcx>,
+ output_filenames: &OutputFilenames,
ppm: PpMode,
uii: Option<UserIdentifiedItem>,
ofile: Option<&Path>) {
arena,
arenas,
crate_name,
- |tcx, _, _, _| {
+ output_filenames,
+ |tcx, _, _, _, _| {
match ppm {
PpmMir | PpmMirCFG => {
if let Some(nodeid) = nodeid {
//! # Standalone Tests for the Inference Module
+use std::path::PathBuf;
+use std::sync::mpsc;
+
use driver;
use rustc_lint;
use rustc_resolve::MakeGlobMap;
use rustc::hir::map as hir_map;
use rustc::mir::transform::Passes;
use rustc::session::{self, config};
+use rustc::session::config::{OutputFilenames, OutputTypes};
use std::rc::Rc;
use syntax::ast;
use syntax::abi::Abi;
let arena = DroplessArena::new();
let arenas = ty::GlobalArenas::new();
- let hir_map = hir_map::map_crate(&mut hir_forest, defs);
+ let hir_map = hir_map::map_crate(&mut hir_forest, &defs);
// run just enough stuff to build a tcx:
let named_region_map = resolve_lifetime::krate(&sess, &*cstore, &hir_map);
+ let (tx, _rx) = mpsc::channel();
+ let outputs = OutputFilenames {
+ out_directory: PathBuf::new(),
+ out_filestem: String::new(),
+ single_output_file: None,
+ extra: String::new(),
+ outputs: OutputTypes::new(&[]),
+ };
TyCtxt::create_and_enter(&sess,
&*cstore,
ty::maps::Providers::default(),
named_region_map.unwrap(),
hir_map,
"test_crate",
+ tx,
+ &outputs,
|tcx| {
tcx.infer_ctxt().enter(|infcx| {
let mut region_scope_tree = region::ScopeTree::default();
use std::hash::Hash;
use rustc::dep_graph::{DepNode, DepKind};
use rustc::hir;
-use rustc::hir::def_id::{CRATE_DEF_INDEX, DefId};
+use rustc::hir::def_id::{CRATE_DEF_INDEX, DefId, DefIndex};
use rustc::hir::map::DefPathHash;
use rustc::hir::itemlikevisit::ItemLikeVisitor;
use rustc::ich::{Fingerprint, StableHashingContext};
}
struct ComputeItemHashesVisitor<'a, 'tcx: 'a> {
- hcx: StableHashingContext<'a, 'tcx, 'tcx>,
+ tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ hcx: StableHashingContext<'tcx>,
hashes: IncrementalHashesMap,
}
impl<'a, 'tcx: 'a> ComputeItemHashesVisitor<'a, 'tcx> {
fn compute_and_store_ich_for_item_like<T>(&mut self,
- dep_node: DepNode,
+ def_index: DefIndex,
hash_bodies: bool,
item_like: T)
- where T: HashStable<StableHashingContext<'a, 'tcx, 'tcx>>
+ where T: HashStable<StableHashingContext<'tcx>>
{
- if !hash_bodies && !self.hcx.tcx().sess.opts.build_dep_graph() {
+ if !hash_bodies && !self.tcx.sess.opts.build_dep_graph() {
// If we just need the hashes in order to compute the SVH, we don't
// need have two hashes per item. Just the one containing also the
// item's body is sufficient.
return
}
+ let def_path_hash = self.hcx.local_def_path_hash(def_index);
+
let mut hasher = IchHasher::new();
self.hcx.while_hashing_hir_bodies(hash_bodies, |hcx| {
item_like.hash_stable(hcx, &mut hasher);
let bytes_hashed = hasher.bytes_hashed();
let item_hash = hasher.finish();
+ let dep_node = if hash_bodies {
+ def_path_hash.to_dep_node(DepKind::HirBody)
+ } else {
+ def_path_hash.to_dep_node(DepKind::Hir)
+ };
debug!("calculate_def_hash: dep_node={:?} hash={:?}", dep_node, item_hash);
self.hashes.insert(dep_node, item_hash);
- let tcx = self.hcx.tcx();
let bytes_hashed =
- tcx.sess.perf_stats.incr_comp_bytes_hashed.get() +
- bytes_hashed;
- tcx.sess.perf_stats.incr_comp_bytes_hashed.set(bytes_hashed);
+ self.tcx.sess.perf_stats.incr_comp_bytes_hashed.get() + bytes_hashed;
+ self.tcx.sess.perf_stats.incr_comp_bytes_hashed.set(bytes_hashed);
+
+ if hash_bodies {
+ let in_scope_traits_map = self.tcx.in_scope_traits_map(def_index);
+ let mut hasher = IchHasher::new();
+ in_scope_traits_map.hash_stable(&mut self.hcx, &mut hasher);
+ let dep_node = def_path_hash.to_dep_node(DepKind::InScopeTraits);
+ self.hashes.insert(dep_node, hasher.finish());
+ }
}
fn compute_crate_hash(&mut self) {
- let tcx = self.hcx.tcx();
- let krate = tcx.hir.krate();
+ let krate = self.tcx.hir.krate();
let mut crate_state = IchHasher::new();
- let crate_disambiguator = tcx.sess.local_crate_disambiguator();
+ let crate_disambiguator = self.tcx.sess.local_crate_disambiguator();
"crate_disambiguator".hash(&mut crate_state);
crate_disambiguator.as_str().len().hash(&mut crate_state);
crate_disambiguator.as_str().hash(&mut crate_state);
// This `match` determines what kinds of nodes
// go into the SVH:
match item_dep_node.kind {
+ DepKind::InScopeTraits |
DepKind::Hir |
DepKind::HirBody => {
// We want to incoporate these into the
body_ids: _,
} = *krate;
- let def_path_hash = self.hcx.tcx().hir.definitions().def_path_hash(CRATE_DEF_INDEX);
- self.compute_and_store_ich_for_item_like(def_path_hash.to_dep_node(DepKind::Hir),
+ self.compute_and_store_ich_for_item_like(CRATE_DEF_INDEX,
false,
(module, (span, attrs)));
- self.compute_and_store_ich_for_item_like(def_path_hash.to_dep_node(DepKind::HirBody),
+ self.compute_and_store_ich_for_item_like(CRATE_DEF_INDEX,
true,
(module, (span, attrs)));
}
fn compute_and_store_ich_for_trait_impls(&mut self, krate: &'tcx hir::Crate)
{
- let tcx = self.hcx.tcx();
+ let tcx = self.tcx;
let mut impls: Vec<(DefPathHash, Fingerprint)> = krate
.trait_impls
impl<'a, 'tcx: 'a> ItemLikeVisitor<'tcx> for ComputeItemHashesVisitor<'a, 'tcx> {
fn visit_item(&mut self, item: &'tcx hir::Item) {
- let def_id = self.hcx.tcx().hir.local_def_id(item.id);
- let def_path_hash = self.hcx.tcx().def_path_hash(def_id);
- self.compute_and_store_ich_for_item_like(def_path_hash.to_dep_node(DepKind::Hir),
+ let def_index = self.tcx.hir.local_def_id(item.id).index;
+ self.compute_and_store_ich_for_item_like(def_index,
false,
item);
- self.compute_and_store_ich_for_item_like(def_path_hash.to_dep_node(DepKind::HirBody),
+ self.compute_and_store_ich_for_item_like(def_index,
true,
item);
}
fn visit_trait_item(&mut self, item: &'tcx hir::TraitItem) {
- let def_id = self.hcx.tcx().hir.local_def_id(item.id);
- let def_path_hash = self.hcx.tcx().def_path_hash(def_id);
- self.compute_and_store_ich_for_item_like(def_path_hash.to_dep_node(DepKind::Hir),
+ let def_index = self.tcx.hir.local_def_id(item.id).index;
+ self.compute_and_store_ich_for_item_like(def_index,
false,
item);
- self.compute_and_store_ich_for_item_like(def_path_hash.to_dep_node(DepKind::HirBody),
+ self.compute_and_store_ich_for_item_like(def_index,
true,
item);
}
fn visit_impl_item(&mut self, item: &'tcx hir::ImplItem) {
- let def_id = self.hcx.tcx().hir.local_def_id(item.id);
- let def_path_hash = self.hcx.tcx().def_path_hash(def_id);
- self.compute_and_store_ich_for_item_like(def_path_hash.to_dep_node(DepKind::Hir),
+ let def_index = self.tcx.hir.local_def_id(item.id).index;
+ self.compute_and_store_ich_for_item_like(def_index,
false,
item);
- self.compute_and_store_ich_for_item_like(def_path_hash.to_dep_node(DepKind::HirBody),
+ self.compute_and_store_ich_for_item_like(def_index,
true,
item);
}
let krate = tcx.hir.krate();
let mut visitor = ComputeItemHashesVisitor {
- hcx: StableHashingContext::new(tcx),
+ tcx,
+ hcx: tcx.create_stable_hashing_context(),
hashes: IncrementalHashesMap::new(),
};
krate.visit_all_item_likes(&mut visitor);
for macro_def in krate.exported_macros.iter() {
- let def_id = tcx.hir.local_def_id(macro_def.id);
- let def_path_hash = tcx.def_path_hash(def_id);
- visitor.compute_and_store_ich_for_item_like(def_path_hash.to_dep_node(DepKind::Hir),
+ let def_index = tcx.hir.local_def_id(macro_def.id).index;
+ visitor.compute_and_store_ich_for_item_like(def_index,
false,
macro_def);
- visitor.compute_and_store_ich_for_item_like(def_path_hash.to_dep_node(DepKind::HirBody),
+ visitor.compute_and_store_ich_for_item_like(def_index,
true,
macro_def);
}
match dep_node.kind {
DepKind::Krate |
DepKind::Hir |
+ DepKind::InScopeTraits |
DepKind::HirBody =>
true,
DepKind::MetaData => {
}
// HIR nodes (which always come from our crate) are an input:
+ DepKind::InScopeTraits |
DepKind::Hir |
DepKind::HirBody => {
Some(self.incremental_hashes_map[dep_node])
match dep_node.kind {
DepKind::Hir |
DepKind::HirBody |
+ DepKind::InScopeTraits |
DepKind::MetaData => {
dep_node.extract_def_id(tcx).is_some()
}
static-libstdcpp = []
[dependencies]
-rustc_bitflags = { path = "../librustc_bitflags" }
+bitflags = "1.0"
+rustc_cratesio_shim = { path = "../librustc_cratesio_shim" }
[build-dependencies]
build_helper = { path = "../build_helper" }
// These values **must** match with LLVMRustDIFlags!!
bitflags! {
#[repr(C)]
- #[derive(Debug, Default)]
- flags DIFlags: ::libc::uint32_t {
- const FlagZero = 0,
- const FlagPrivate = 1,
- const FlagProtected = 2,
- const FlagPublic = 3,
- const FlagFwdDecl = (1 << 2),
- const FlagAppleBlock = (1 << 3),
- const FlagBlockByrefStruct = (1 << 4),
- const FlagVirtual = (1 << 5),
- const FlagArtificial = (1 << 6),
- const FlagExplicit = (1 << 7),
- const FlagPrototyped = (1 << 8),
- const FlagObjcClassComplete = (1 << 9),
- const FlagObjectPointer = (1 << 10),
- const FlagVector = (1 << 11),
- const FlagStaticMember = (1 << 12),
- const FlagLValueReference = (1 << 13),
- const FlagRValueReference = (1 << 14),
- const FlagMainSubprogram = (1 << 21),
+ #[derive(Default)]
+ pub struct DIFlags: ::libc::uint32_t {
+ const FlagZero = 0;
+ const FlagPrivate = 1;
+ const FlagProtected = 2;
+ const FlagPublic = 3;
+ const FlagFwdDecl = (1 << 2);
+ const FlagAppleBlock = (1 << 3);
+ const FlagBlockByrefStruct = (1 << 4);
+ const FlagVirtual = (1 << 5);
+ const FlagArtificial = (1 << 6);
+ const FlagExplicit = (1 << 7);
+ const FlagPrototyped = (1 << 8);
+ const FlagObjcClassComplete = (1 << 9);
+ const FlagObjectPointer = (1 << 10);
+ const FlagVector = (1 << 11);
+ const FlagStaticMember = (1 << 12);
+ const FlagLValueReference = (1 << 13);
+ const FlagRValueReference = (1 << 14);
+ const FlagMainSubprogram = (1 << 21);
}
}
}
#![feature(link_args)]
#![feature(static_nobundle)]
-extern crate libc;
+// See librustc_cratesio_shim/Cargo.toml for a comment explaining this.
+#[allow(unused_extern_crates)]
+extern crate rustc_cratesio_shim;
+
#[macro_use]
-#[no_link]
-extern crate rustc_bitflags;
+extern crate bitflags;
+extern crate libc;
pub use self::IntPredicate::*;
pub use self::RealPredicate::*;
use rustc::hir;
use rustc::ty::{self, TyCtxt};
+use rustc::ich::Fingerprint;
+use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
+
#[derive(RustcEncodable, RustcDecodable)]
pub struct Ast<'tcx> {
pub body: Lazy<hir::Body>,
pub tables: Lazy<ty::TypeckTables<'tcx>>,
pub nested_bodies: LazySeq<hir::Body>,
pub rvalue_promotable_to_static: bool,
+ pub stable_bodies_hash: Fingerprint,
}
impl_stable_hash_for!(struct Ast<'tcx> {
body,
tables,
nested_bodies,
- rvalue_promotable_to_static
+ rvalue_promotable_to_static,
+ stable_bodies_hash
});
impl<'a, 'b, 'tcx> IsolatedEncoder<'a, 'b, 'tcx> {
pub fn encode_body(&mut self, body_id: hir::BodyId) -> Lazy<Ast<'tcx>> {
let body = self.tcx.hir.body(body_id);
- let lazy_body = self.lazy(body);
+ // In order to avoid having to hash hir::Bodies from extern crates, we
+ // hash them here, during export, and store the hash with metadata.
+ let stable_bodies_hash = {
+ let mut hcx = self.tcx.create_stable_hashing_context();
+ let mut hasher = StableHasher::new();
+
+ hcx.while_hashing_hir_bodies(true, |hcx| {
+ hcx.while_hashing_spans(false, |hcx| {
+ body.hash_stable(hcx, &mut hasher);
+ });
+ });
+
+ hasher.finish()
+ };
+
+ let lazy_body = self.lazy(body);
let tables = self.tcx.body_tables(body_id);
let lazy_tables = self.lazy(tables);
tables: lazy_tables,
nested_bodies: lazy_nested_bodies,
rvalue_promotable_to_static,
+ stable_bodies_hash,
})
}
}
.decode(&cmeta)
.filter(|lib| relevant_lib(self.sess, lib) &&
lib.kind == cstore::NativeLibraryKind::NativeUnknown)
- .flat_map(|lib| lib.foreign_items.into_iter())
+ .flat_map(|lib| {
+ assert!(lib.foreign_items.iter().all(|def_id| def_id.krate == cnum));
+ lib.foreign_items.into_iter().map(|def_id| def_id.index)
+ })
.collect();
cmeta.dllimport_foreign_items = dllimports;
is_exported_symbol => {
cdata.exported_symbols.contains(&def_id.index)
}
- item_body_nested_bodies => { Rc::new(cdata.item_body_nested_bodies(def_id.index)) }
+ item_body_nested_bodies => { cdata.item_body_nested_bodies(def_id.index) }
const_is_rvalue_promotable_to_static => {
cdata.const_is_rvalue_promotable_to_static(def_id.index)
}
extern_crate => { Rc::new(cdata.extern_crate.get()) }
is_no_builtins => { cdata.is_no_builtins() }
impl_defaultness => { cdata.get_impl_defaultness(def_id.index) }
- exported_symbols => { Rc::new(cdata.get_exported_symbols()) }
+ exported_symbol_ids => { Rc::new(cdata.get_exported_symbols()) }
native_libraries => { Rc::new(cdata.get_native_libraries()) }
plugin_registrar_fn => {
cdata.root.plugin_registrar_fn.map(|index| {
tcx.native_libraries(id.krate)
.iter()
.filter(|lib| native_libs::relevant_lib(&tcx.sess, lib))
- .find(|l| l.foreign_items.contains(&id.index))
+ .find(|l| l.foreign_items.contains(&id))
.map(|l| l.kind)
},
native_libraries: |tcx, cnum| {
use rustc::hir;
use rustc::middle::const_val::ByteArray;
-use rustc::middle::cstore::LinkagePreference;
+use rustc::middle::cstore::{LinkagePreference, ExternConstBody,
+ ExternBodyNestedBodies};
use rustc::hir::def::{self, Def, CtorKind};
use rustc::hir::def_id::{CrateNum, DefId, DefIndex, CRATE_DEF_INDEX, LOCAL_CRATE};
+use rustc::ich::Fingerprint;
use rustc::middle::lang_items;
use rustc::session::Session;
use rustc::ty::{self, Ty, TyCtxt};
use rustc::ty::subst::Substs;
+use rustc::util::nodemap::DefIdSet;
use rustc::mir::Mir;
}
/// Iterates over the language items in the given crate.
- pub fn get_lang_items(&self) -> Vec<(DefIndex, usize)> {
+ pub fn get_lang_items(&self) -> Vec<(DefId, usize)> {
self.root
.lang_items
.decode(self)
+ .map(|(def_index, index)| (self.local_def_id(def_index), index))
.collect()
}
pub fn extern_const_body(&self,
tcx: TyCtxt<'a, 'tcx, 'tcx>,
id: DefIndex)
- -> &'tcx hir::Body {
+ -> ExternConstBody<'tcx> {
assert!(!self.is_proc_macro(id));
let ast = self.entry(id).ast.unwrap();
let def_id = self.local_def_id(id);
- let body = ast.decode((self, tcx)).body.decode((self, tcx));
- tcx.hir.intern_inlined_body(def_id, body)
+ let ast = ast.decode((self, tcx));
+ let body = ast.body.decode((self, tcx));
+ ExternConstBody {
+ body: tcx.hir.intern_inlined_body(def_id, body),
+ fingerprint: ast.stable_bodies_hash,
+ }
}
pub fn item_body_tables(&self,
tcx.alloc_tables(ast.tables.decode((self, tcx)))
}
- pub fn item_body_nested_bodies(&self, id: DefIndex) -> BTreeMap<hir::BodyId, hir::Body> {
- self.entry(id).ast.into_iter().flat_map(|ast| {
- ast.decode(self).nested_bodies.decode(self).map(|body| (body.id(), body))
- }).collect()
+ pub fn item_body_nested_bodies(&self, id: DefIndex) -> ExternBodyNestedBodies {
+ if let Some(ref ast) = self.entry(id).ast {
+ let ast = ast.decode(self);
+ let nested_bodies: BTreeMap<_, _> = ast.nested_bodies
+ .decode(self)
+ .map(|body| (body.id(), body))
+ .collect();
+ ExternBodyNestedBodies {
+ nested_bodies: Rc::new(nested_bodies),
+ fingerprint: ast.stable_bodies_hash,
+ }
+ } else {
+ ExternBodyNestedBodies {
+ nested_bodies: Rc::new(BTreeMap::new()),
+ fingerprint: Fingerprint::zero(),
+ }
+ }
}
pub fn const_is_rvalue_promotable_to_static(&self, id: DefIndex) -> bool {
arg_names.decode(self).collect()
}
- pub fn get_exported_symbols(&self) -> Vec<DefId> {
+ pub fn get_exported_symbols(&self) -> DefIdSet {
self.exported_symbols
.iter()
.map(|&index| self.local_def_id(index))
pub struct IsolatedEncoder<'a, 'b: 'a, 'tcx: 'b> {
pub tcx: TyCtxt<'b, 'tcx, 'tcx>,
ecx: &'a mut EncodeContext<'b, 'tcx>,
- hcx: Option<(StableHashingContext<'b, 'tcx, 'tcx>, StableHasher<Fingerprint>)>,
+ hcx: Option<(StableHashingContext<'tcx>, StableHasher<Fingerprint>)>,
}
impl<'a, 'b: 'a, 'tcx: 'b> IsolatedEncoder<'a, 'b, 'tcx> {
// Except when -Zquery-dep-graph is specified because we don't
// want to mess up our tests.
let hcx = if tcx.sess.opts.debugging_opts.query_dep_graph {
- StableHashingContext::new(tcx)
+ tcx.create_stable_hashing_context()
} else {
- StableHashingContext::new(tcx).force_span_hashing()
+ tcx.create_stable_hashing_context().force_span_hashing()
};
Some((hcx, StableHasher::new()))
}
pub fn lazy<T>(&mut self, value: &T) -> Lazy<T>
- where T: Encodable + HashStable<StableHashingContext<'b, 'tcx, 'tcx>>
+ where T: Encodable + HashStable<StableHashingContext<'tcx>>
{
if let Some((ref mut hcx, ref mut hasher)) = self.hcx {
value.hash_stable(hcx, hasher);
pub fn lazy_seq<I, T>(&mut self, iter: I) -> LazySeq<T>
where I: IntoIterator<Item = T>,
- T: Encodable + HashStable<StableHashingContext<'b, 'tcx, 'tcx>>
+ T: Encodable + HashStable<StableHashingContext<'tcx>>
{
if let Some((ref mut hcx, ref mut hasher)) = self.hcx {
let iter = iter.into_iter();
pub fn lazy_seq_ref<'x, I, T>(&mut self, iter: I) -> LazySeq<T>
where I: IntoIterator<Item = &'x T>,
- T: 'x + Encodable + HashStable<StableHashingContext<'b, 'tcx, 'tcx>>
+ T: 'x + Encodable + HashStable<StableHashingContext<'tcx>>
{
if let Some((ref mut hcx, ref mut hasher)) = self.hcx {
let iter = iter.into_iter();
}
pub fn lazy_seq_from_slice<T>(&mut self, slice: &[T]) -> LazySeq<T>
- where T: Encodable + HashStable<StableHashingContext<'b, 'tcx, 'tcx>>
+ where T: Encodable + HashStable<StableHashingContext<'tcx>>
{
if let Some((ref mut hcx, ref mut hasher)) = self.hcx {
slice.hash_stable(hcx, hasher);
}
pub fn lazy_seq_ref_from_slice<T>(&mut self, slice: &[&T]) -> LazySeq<T>
- where T: Encodable + HashStable<StableHashingContext<'b, 'tcx, 'tcx>>
+ where T: Encodable + HashStable<StableHashingContext<'tcx>>
{
if let Some((ref mut hcx, ref mut hasher)) = self.hcx {
slice.hash_stable(hcx, hasher);
list[0].meta_item().unwrap().clone()
});
let foreign_items = fm.items.iter()
- .map(|it| self.tcx.hir.local_def_id(it.id).index)
+ .map(|it| self.tcx.hir.local_def_id(it.id))
.collect();
let lib = NativeLibrary {
name: n,
pub impls: LazySeq<DefIndex>,
}
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for TraitImpls {
+impl<'gcx> HashStable<StableHashingContext<'gcx>> for TraitImpls {
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
let TraitImpls {
trait_id: (krate, def_index),
AssociatedConst(AssociatedContainer, u8),
}
-impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for EntryKind<'tcx> {
+impl<'gcx> HashStable<StableHashingContext<'gcx>> for EntryKind<'gcx> {
fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
+ hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
mem::discriminant(self).hash_stable(hcx, hasher);
match *self {
crate-type = ["dylib"]
[dependencies]
+bitflags = "1.0"
graphviz = { path = "../libgraphviz" }
log = "0.3"
rustc = { path = "../librustc" }
rustc_const_math = { path = "../librustc_const_math" }
rustc_data_structures = { path = "../librustc_data_structures" }
rustc_errors = { path = "../librustc_errors" }
-rustc_bitflags = { path = "../librustc_bitflags" }
syntax = { path = "../libsyntax" }
syntax_pos = { path = "../libsyntax_pos" }
#![feature(collection_placement)]
#![feature(nonzero)]
+#[macro_use]
+extern crate bitflags;
#[macro_use] extern crate log;
extern crate graphviz as dot;
#[macro_use]
extern crate rustc_data_structures;
extern crate rustc_errors;
#[macro_use]
-#[no_link]
-extern crate rustc_bitflags;
-#[macro_use]
extern crate syntax;
extern crate syntax_pos;
extern crate rustc_const_math;
use super::promote_consts::{self, Candidate, TempState};
bitflags! {
- flags Qualif: u8 {
+ struct Qualif: u8 {
// Constant containing interior mutability (UnsafeCell).
- const MUTABLE_INTERIOR = 1 << 0,
+ const MUTABLE_INTERIOR = 1 << 0;
// Constant containing an ADT that implements Drop.
- const NEEDS_DROP = 1 << 1,
+ const NEEDS_DROP = 1 << 1;
// Function argument.
- const FN_ARGUMENT = 1 << 2,
+ const FN_ARGUMENT = 1 << 2;
// Static lvalue or move from a static.
- const STATIC = 1 << 3,
+ const STATIC = 1 << 3;
// Reference to a static.
- const STATIC_REF = 1 << 4,
+ const STATIC_REF = 1 << 4;
// Not constant at all - non-`const fn` calls, asm!,
// pointer comparisons, ptr-to-int casts, etc.
- const NOT_CONST = 1 << 5,
+ const NOT_CONST = 1 << 5;
// Refers to temporaries which cannot be promoted as
// promote_consts decided they weren't simple enough.
- const NOT_PROMOTABLE = 1 << 6,
+ const NOT_PROMOTABLE = 1 << 6;
// Borrows of temporaries can be promoted only
// if they have none of the above qualifications.
- const NEVER_PROMOTE = 0b111_1111,
+ const NEVER_PROMOTE = 0b111_1111;
// Const items can only have MUTABLE_INTERIOR
// and NOT_PROMOTABLE without producing an error.
const CONST_ERROR = !Qualif::MUTABLE_INTERIOR.bits &
- !Qualif::NOT_PROMOTABLE.bits
+ !Qualif::NOT_PROMOTABLE.bits;
}
}
test = false
[dependencies]
+bitflags = "1.0"
num_cpus = "1.0"
flate2 = "0.2"
jobserver = "0.1.5"
rustc = { path = "../librustc" }
rustc_allocator = { path = "../librustc_allocator" }
rustc_back = { path = "../librustc_back" }
-rustc_bitflags = { path = "../librustc_bitflags" }
rustc_const_math = { path = "../librustc_const_math" }
rustc_data_structures = { path = "../librustc_data_structures" }
rustc_errors = { path = "../librustc_errors" }
-See [librustc/README.md](../librustc/README.md).
+NB: This crate is part of the Rust compiler. For an overview of the
+compiler as a whole, see
+[the README.md file found in `librustc`](../librustc/README.md).
+
+The `trans` crate contains the code to convert from MIR into LLVM IR,
+and then from LLVM IR into machine code. In general it contains code
+that runs towards the end of the compilation process.
mod attr_impl {
// The subset of llvm::Attribute needed for arguments, packed into a bitfield.
bitflags! {
- #[derive(Default, Debug)]
- flags ArgAttribute : u16 {
- const ByVal = 1 << 0,
- const NoAlias = 1 << 1,
- const NoCapture = 1 << 2,
- const NonNull = 1 << 3,
- const ReadOnly = 1 << 4,
- const SExt = 1 << 5,
- const StructRet = 1 << 6,
- const ZExt = 1 << 7,
- const InReg = 1 << 8,
+ #[derive(Default)]
+ pub struct ArgAttribute: u16 {
+ const ByVal = 1 << 0;
+ const NoAlias = 1 << 1;
+ const NoCapture = 1 << 2;
+ const NonNull = 1 << 3;
+ const ReadOnly = 1 << 4;
+ const SExt = 1 << 5;
+ const StructRet = 1 << 6;
+ const ZExt = 1 << 7;
+ const InReg = 1 << 8;
}
}
}
impl<'a, 'tcx> FnType<'tcx> {
pub fn of_instance(ccx: &CrateContext<'a, 'tcx>, instance: &ty::Instance<'tcx>)
-> Self {
- let fn_ty = instance_ty(ccx.shared(), &instance);
+ let fn_ty = instance_ty(ccx.tcx(), &instance);
let sig = ty_fn_sig(ccx, fn_ty);
let sig = ccx.tcx().erase_late_bound_regions_and_normalize(&sig);
Self::new(ccx, sig, &[])
use std::io::{self, BufWriter};
use std::path::{Path, PathBuf};
-use context::SharedCrateContext;
-
use back::archive;
use back::command::Command;
-use back::symbol_export::ExportedSymbols;
-use rustc::middle::dependency_format::Linkage;
+use back::symbol_export;
use rustc::hir::def_id::{LOCAL_CRATE, CrateNum};
-use rustc_back::LinkerFlavor;
+use rustc::middle::dependency_format::Linkage;
use rustc::session::Session;
use rustc::session::config::{self, CrateType, OptLevel, DebugInfoLevel};
+use rustc::ty::TyCtxt;
+use rustc_back::LinkerFlavor;
use serialize::{json, Encoder};
/// For all the linkers we support, and information they might
exports: HashMap<CrateType, Vec<String>>,
}
-impl<'a, 'tcx> LinkerInfo {
- pub fn new(scx: &SharedCrateContext<'a, 'tcx>,
- exports: &ExportedSymbols) -> LinkerInfo {
+impl LinkerInfo {
+ pub fn new(tcx: TyCtxt) -> LinkerInfo {
LinkerInfo {
- exports: scx.sess().crate_types.borrow().iter().map(|&c| {
- (c, exported_symbols(scx, exports, c))
+ exports: tcx.sess.crate_types.borrow().iter().map(|&c| {
+ (c, exported_symbols(tcx, c))
}).collect(),
}
}
- pub fn to_linker(&'a self,
- cmd: Command,
- sess: &'a Session) -> Box<Linker+'a> {
+ pub fn to_linker<'a>(&'a self,
+ cmd: Command,
+ sess: &'a Session) -> Box<Linker+'a> {
match sess.linker_flavor() {
LinkerFlavor::Msvc => {
Box::new(MsvcLinker {
}
}
-fn exported_symbols(scx: &SharedCrateContext,
- exported_symbols: &ExportedSymbols,
- crate_type: CrateType)
- -> Vec<String> {
+fn exported_symbols(tcx: TyCtxt, crate_type: CrateType) -> Vec<String> {
let mut symbols = Vec::new();
- exported_symbols.for_each_exported_symbol(LOCAL_CRATE, |name, _, _| {
- symbols.push(name.to_owned());
- });
- let formats = scx.sess().dependency_formats.borrow();
+ let export_threshold = symbol_export::threshold(tcx);
+ for &(ref name, _, level) in tcx.exported_symbols(LOCAL_CRATE).iter() {
+ if level.is_below_threshold(export_threshold) {
+ symbols.push(name.clone());
+ }
+ }
+
+ let formats = tcx.sess.dependency_formats.borrow();
let deps = formats[&crate_type].iter();
for (index, dep_format) in deps.enumerate() {
// For each dependency that we are linking to statically ...
if *dep_format == Linkage::Static {
// ... we add its symbol list to our export list.
- exported_symbols.for_each_exported_symbol(cnum, |name, _, _| {
- symbols.push(name.to_owned());
- })
+ for &(ref name, _, level) in tcx.exported_symbols(cnum).iter() {
+ if level.is_below_threshold(export_threshold) {
+ symbols.push(name.clone());
+ }
+ }
}
}
use llvm;
use llvm::archive_ro::ArchiveRO;
use llvm::{ModuleRef, TargetMachineRef, True, False};
+use rustc::middle::exported_symbols::SymbolExportLevel;
use rustc::util::common::time;
use rustc::util::common::path2cstr;
use rustc::hir::def_id::LOCAL_CRATE;
let export_threshold =
symbol_export::crates_export_threshold(&cgcx.crate_types);
- let symbol_filter = &|&(ref name, _, level): &(String, _, _)| {
- if symbol_export::is_below_threshold(level, export_threshold) {
+ let symbol_filter = &|&(ref name, _, level): &(String, _, SymbolExportLevel)| {
+ if level.is_below_threshold(export_threshold) {
let mut bytes = Vec::with_capacity(name.len() + 1);
bytes.extend(name.bytes());
Some(CString::new(bytes).unwrap())
}
};
- let mut symbol_white_list: Vec<CString> = cgcx.exported_symbols
- .exported_symbols(LOCAL_CRATE)
+ let mut symbol_white_list: Vec<CString> = cgcx.exported_symbols[&LOCAL_CRATE]
.iter()
.filter_map(symbol_filter)
.collect();
// module that we've got.
for &(cnum, ref path) in cgcx.each_linked_rlib_for_lto.iter() {
symbol_white_list.extend(
- cgcx.exported_symbols.exported_symbols(cnum)
- .iter()
- .filter_map(symbol_filter));
+ cgcx.exported_symbols[&cnum]
+ .iter()
+ .filter_map(symbol_filter));
let archive = ArchiveRO::open(&path).expect("wanted an rlib");
let bytecodes = archive.iter().filter_map(|child| {
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+use std::rc::Rc;
+use std::sync::Arc;
+
+use base;
use monomorphize::Instance;
-use rustc::util::nodemap::{FxHashMap, NodeSet};
-use rustc::hir::def_id::{DefId, CrateNum, LOCAL_CRATE, INVALID_CRATE, CRATE_DEF_INDEX};
+use rustc::hir::def_id::CrateNum;
+use rustc::hir::def_id::{DefId, LOCAL_CRATE};
+use rustc::middle::exported_symbols::SymbolExportLevel;
use rustc::session::config;
use rustc::ty::TyCtxt;
+use rustc::ty::maps::Providers;
+use rustc::util::nodemap::FxHashMap;
use rustc_allocator::ALLOCATOR_METHODS;
-use syntax::attr;
-
-/// The SymbolExportLevel of a symbols specifies from which kinds of crates
-/// the symbol will be exported. `C` symbols will be exported from any
-/// kind of crate, including cdylibs which export very few things.
-/// `Rust` will only be exported if the crate produced is a Rust
-/// dylib.
-#[derive(Eq, PartialEq, Debug, Copy, Clone)]
-pub enum SymbolExportLevel {
- C,
- Rust,
+
+pub type ExportedSymbols = FxHashMap<
+ CrateNum,
+ Arc<Vec<(String, Option<DefId>, SymbolExportLevel)>>,
+>;
+
+pub fn threshold(tcx: TyCtxt) -> SymbolExportLevel {
+ crates_export_threshold(&tcx.sess.crate_types.borrow())
}
-/// The set of symbols exported from each crate in the crate graph.
-#[derive(Debug)]
-pub struct ExportedSymbols {
- pub export_threshold: SymbolExportLevel,
- exports: FxHashMap<CrateNum, Vec<(String, DefId, SymbolExportLevel)>>,
- local_exports: NodeSet,
+pub fn metadata_symbol_name(tcx: TyCtxt) -> String {
+ format!("rust_metadata_{}_{}",
+ tcx.crate_name(LOCAL_CRATE),
+ tcx.crate_disambiguator(LOCAL_CRATE))
}
-impl ExportedSymbols {
- pub fn empty() -> ExportedSymbols {
- ExportedSymbols {
- export_threshold: SymbolExportLevel::C,
- exports: FxHashMap(),
- local_exports: NodeSet(),
- }
+fn crate_export_threshold(crate_type: config::CrateType) -> SymbolExportLevel {
+ match crate_type {
+ config::CrateTypeExecutable |
+ config::CrateTypeStaticlib |
+ config::CrateTypeProcMacro |
+ config::CrateTypeCdylib => SymbolExportLevel::C,
+ config::CrateTypeRlib |
+ config::CrateTypeDylib => SymbolExportLevel::Rust,
}
+}
- pub fn compute<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- local_exported_symbols: &NodeSet)
- -> ExportedSymbols {
- let export_threshold = crates_export_threshold(&tcx.sess.crate_types.borrow());
+pub fn crates_export_threshold(crate_types: &[config::CrateType])
+ -> SymbolExportLevel {
+ if crate_types.iter().any(|&crate_type| {
+ crate_export_threshold(crate_type) == SymbolExportLevel::Rust
+ }) {
+ SymbolExportLevel::Rust
+ } else {
+ SymbolExportLevel::C
+ }
+}
+
+pub fn provide_local(providers: &mut Providers) {
+ providers.exported_symbol_ids = |tcx, cnum| {
+ let export_threshold = threshold(tcx);
+ Rc::new(tcx.exported_symbols(cnum)
+ .iter()
+ .filter_map(|&(_, id, level)| {
+ id.and_then(|id| {
+ if level.is_below_threshold(export_threshold) {
+ Some(id)
+ } else {
+ None
+ }
+ })
+ })
+ .collect())
+ };
+
+ providers.is_exported_symbol = |tcx, id| {
+ // FIXME(#42293) needs red/green to not break a bunch of incremental
+ // tests
+ tcx.dep_graph.with_ignore(|| {
+ tcx.exported_symbol_ids(id.krate).contains(&id)
+ })
+ };
+
+ providers.exported_symbols = |tcx, cnum| {
+ assert_eq!(cnum, LOCAL_CRATE);
+ let local_exported_symbols = base::find_exported_symbols(tcx);
let mut local_crate: Vec<_> = local_exported_symbols
.iter()
let name = tcx.symbol_name(Instance::mono(tcx, def_id));
let export_level = export_level(tcx, def_id);
debug!("EXPORTED SYMBOL (local): {} ({:?})", name, export_level);
- (str::to_owned(&name), def_id, export_level)
+ (str::to_owned(&name), Some(def_id), export_level)
})
.collect();
- let mut local_exports = local_crate
- .iter()
- .filter_map(|&(_, def_id, level)| {
- if is_below_threshold(level, export_threshold) {
- tcx.hir.as_local_node_id(def_id)
- } else {
- None
- }
- })
- .collect::<NodeSet>();
-
- const INVALID_DEF_ID: DefId = DefId {
- krate: INVALID_CRATE,
- index: CRATE_DEF_INDEX,
- };
-
if let Some(_) = *tcx.sess.entry_fn.borrow() {
local_crate.push(("main".to_string(),
- INVALID_DEF_ID,
+ None,
SymbolExportLevel::C));
}
if tcx.sess.allocator_kind.get().is_some() {
for method in ALLOCATOR_METHODS {
local_crate.push((format!("__rust_{}", method.name),
- INVALID_DEF_ID,
+ None,
SymbolExportLevel::Rust));
}
}
let idx = def_id.index;
let disambiguator = tcx.sess.local_crate_disambiguator();
let registrar = tcx.sess.generate_derive_registrar_symbol(disambiguator, idx);
- local_crate.push((registrar, def_id, SymbolExportLevel::C));
- local_exports.insert(id);
+ local_crate.push((registrar, Some(def_id), SymbolExportLevel::C));
}
if tcx.sess.crate_types.borrow().contains(&config::CrateTypeDylib) {
local_crate.push((metadata_symbol_name(tcx),
- INVALID_DEF_ID,
+ None,
SymbolExportLevel::Rust));
}
+ Arc::new(local_crate)
+ };
+}
- let mut exports = FxHashMap();
- exports.insert(LOCAL_CRATE, local_crate);
-
- for &cnum in tcx.crates().iter() {
- debug_assert!(cnum != LOCAL_CRATE);
-
- // If this crate is a plugin and/or a custom derive crate, then
- // we're not even going to link those in so we skip those crates.
- if tcx.plugin_registrar_fn(cnum).is_some() ||
- tcx.derive_registrar_fn(cnum).is_some() {
- continue;
- }
-
- // Check to see if this crate is a "special runtime crate". These
- // crates, implementation details of the standard library, typically
- // have a bunch of `pub extern` and `#[no_mangle]` functions as the
- // ABI between them. We don't want their symbols to have a `C`
- // export level, however, as they're just implementation details.
- // Down below we'll hardwire all of the symbols to the `Rust` export
- // level instead.
- let special_runtime_crate =
- tcx.is_panic_runtime(cnum) || tcx.is_compiler_builtins(cnum);
-
- let crate_exports = tcx
- .exported_symbols(cnum)
- .iter()
- .map(|&def_id| {
- let name = tcx.symbol_name(Instance::mono(tcx, def_id));
- let export_level = if special_runtime_crate {
- // We can probably do better here by just ensuring that
- // it has hidden visibility rather than public
- // visibility, as this is primarily here to ensure it's
- // not stripped during LTO.
- //
- // In general though we won't link right if these
- // symbols are stripped, and LTO currently strips them.
- if &*name == "rust_eh_personality" ||
- &*name == "rust_eh_register_frames" ||
- &*name == "rust_eh_unregister_frames" {
- SymbolExportLevel::C
- } else {
- SymbolExportLevel::Rust
- }
- } else {
- export_level(tcx, def_id)
- };
- debug!("EXPORTED SYMBOL (re-export): {} ({:?})", name, export_level);
- (str::to_owned(&name), def_id, export_level)
- })
- .collect();
-
- exports.insert(cnum, crate_exports);
- }
-
- return ExportedSymbols {
- export_threshold,
- exports,
- local_exports,
- };
-
- fn export_level(tcx: TyCtxt,
- sym_def_id: DefId)
- -> SymbolExportLevel {
- let attrs = tcx.get_attrs(sym_def_id);
- if attr::contains_extern_indicator(tcx.sess.diagnostic(), &attrs) {
- SymbolExportLevel::C
- } else {
- SymbolExportLevel::Rust
- }
- }
- }
-
- pub fn local_exports(&self) -> &NodeSet {
- &self.local_exports
- }
-
- pub fn exported_symbols(&self,
- cnum: CrateNum)
- -> &[(String, DefId, SymbolExportLevel)] {
- match self.exports.get(&cnum) {
- Some(exports) => exports,
- None => &[]
+pub fn provide_extern(providers: &mut Providers) {
+ providers.exported_symbols = |tcx, cnum| {
+ // If this crate is a plugin and/or a custom derive crate, then
+ // we're not even going to link those in so we skip those crates.
+ if tcx.plugin_registrar_fn(cnum).is_some() ||
+ tcx.derive_registrar_fn(cnum).is_some() {
+ return Arc::new(Vec::new())
}
- }
- pub fn for_each_exported_symbol<F>(&self,
- cnum: CrateNum,
- mut f: F)
- where F: FnMut(&str, DefId, SymbolExportLevel)
- {
- for &(ref name, def_id, export_level) in self.exported_symbols(cnum) {
- if is_below_threshold(export_level, self.export_threshold) {
- f(&name, def_id, export_level)
- }
- }
- }
-}
-
-pub fn metadata_symbol_name(tcx: TyCtxt) -> String {
- format!("rust_metadata_{}_{}",
- tcx.crate_name(LOCAL_CRATE),
- tcx.crate_disambiguator(LOCAL_CRATE))
-}
+ // Check to see if this crate is a "special runtime crate". These
+ // crates, implementation details of the standard library, typically
+ // have a bunch of `pub extern` and `#[no_mangle]` functions as the
+ // ABI between them. We don't want their symbols to have a `C`
+ // export level, however, as they're just implementation details.
+ // Down below we'll hardwire all of the symbols to the `Rust` export
+ // level instead.
+ let special_runtime_crate =
+ tcx.is_panic_runtime(cnum) || tcx.is_compiler_builtins(cnum);
+
+ let crate_exports = tcx
+ .exported_symbol_ids(cnum)
+ .iter()
+ .map(|&def_id| {
+ let name = tcx.symbol_name(Instance::mono(tcx, def_id));
+ let export_level = if special_runtime_crate {
+ // We can probably do better here by just ensuring that
+ // it has hidden visibility rather than public
+ // visibility, as this is primarily here to ensure it's
+ // not stripped during LTO.
+ //
+ // In general though we won't link right if these
+ // symbols are stripped, and LTO currently strips them.
+ if &*name == "rust_eh_personality" ||
+ &*name == "rust_eh_register_frames" ||
+ &*name == "rust_eh_unregister_frames" {
+ SymbolExportLevel::C
+ } else {
+ SymbolExportLevel::Rust
+ }
+ } else {
+ export_level(tcx, def_id)
+ };
+ debug!("EXPORTED SYMBOL (re-export): {} ({:?})", name, export_level);
+ (str::to_owned(&name), Some(def_id), export_level)
+ })
+ .collect();
-pub fn crate_export_threshold(crate_type: config::CrateType)
- -> SymbolExportLevel {
- match crate_type {
- config::CrateTypeExecutable |
- config::CrateTypeStaticlib |
- config::CrateTypeProcMacro |
- config::CrateTypeCdylib => SymbolExportLevel::C,
- config::CrateTypeRlib |
- config::CrateTypeDylib => SymbolExportLevel::Rust,
- }
+ Arc::new(crate_exports)
+ };
}
-pub fn crates_export_threshold(crate_types: &[config::CrateType])
- -> SymbolExportLevel {
- if crate_types.iter().any(|&crate_type| {
- crate_export_threshold(crate_type) == SymbolExportLevel::Rust
- }) {
- SymbolExportLevel::Rust
- } else {
+fn export_level(tcx: TyCtxt, sym_def_id: DefId) -> SymbolExportLevel {
+ if tcx.contains_extern_indicator(sym_def_id) {
SymbolExportLevel::C
- }
-}
-
-pub fn is_below_threshold(level: SymbolExportLevel,
- threshold: SymbolExportLevel)
- -> bool {
- if threshold == SymbolExportLevel::Rust {
- // We export everything from Rust dylibs
- true
} else {
- level == SymbolExportLevel::C
+ SymbolExportLevel::Rust
}
}
*providers = Providers {
def_symbol_name,
symbol_name,
+
+ export_name: |tcx, id| {
+ tcx.get_attrs(id).iter().fold(None, |ia, attr| {
+ if attr.check_name("export_name") {
+ if let s @ Some(_) = attr.value_str() {
+ s
+ } else {
+ struct_span_err!(tcx.sess, attr.span, E0558,
+ "export_name attribute has invalid format")
+ .span_label(attr.span, "did you mean #[export_name=\"*\"]?")
+ .emit();
+ None
+ }
+ } else {
+ ia
+ }
+ })
+ },
+
+ contains_extern_indicator: |tcx, id| {
+ attr::contains_name(&tcx.get_attrs(id), "no_mangle") ||
+ tcx.export_name(id).is_some()
+ },
+
..*providers
};
}
return tcx.item_name(def_id).to_string();
}
- if let Some(name) = attr::find_export_name_attr(tcx.sess.diagnostic(), &attrs) {
+ if let Some(name) = tcx.export_name(def_id) {
// Use provided name
return name.to_string();
}
use rustc::session::config::{self, OutputFilenames, OutputType, OutputTypes, Passes, SomePasses,
AllPasses, Sanitizer};
use rustc::session::Session;
+use rustc::util::nodemap::FxHashMap;
use time_graph::{self, TimeGraph};
use llvm;
use llvm::{ModuleRef, TargetMachineRef, PassManagerRef, DiagnosticInfoRef};
use llvm::SMDiagnosticRef;
use {CrateTranslation, ModuleSource, ModuleTranslation, CompiledModule, ModuleKind};
use CrateInfo;
-use rustc::hir::def_id::CrateNum;
+use rustc::hir::def_id::{CrateNum, LOCAL_CRATE};
+use rustc::ty::TyCtxt;
use rustc::util::common::{time, time_depth, set_time_depth, path2cstr, print_time_passes_entry};
use rustc::util::fs::{link_or_copy, rename_or_copy_remove};
use errors::{self, Handler, Level, DiagnosticBuilder, FatalError};
use errors::emitter::{Emitter};
+use syntax::attr;
use syntax::ext::hygiene::Mark;
use syntax_pos::MultiSpan;
use syntax_pos::symbol::Symbol;
use jobserver::{Client, Acquired};
use rustc_demangle;
+use std::any::Any;
use std::ffi::CString;
use std::fmt;
use std::fs;
/// Module-specific configuration for `optimize_and_codegen`.
pub struct ModuleConfig {
- /// LLVM TargetMachine to use for codegen.
- tm: TargetMachineRef,
/// Names of additional optimization passes to run.
passes: Vec<String>,
/// Some(level) to optimize at a certain level, or None to run
obj_is_bitcode: bool,
}
-unsafe impl Send for ModuleConfig { }
-
impl ModuleConfig {
- fn new(sess: &Session, passes: Vec<String>) -> ModuleConfig {
+ fn new(passes: Vec<String>) -> ModuleConfig {
ModuleConfig {
- tm: create_target_machine(sess),
passes,
opt_level: None,
opt_size: None,
self.merge_functions = sess.opts.optimize == config::OptLevel::Default ||
sess.opts.optimize == config::OptLevel::Aggressive;
}
-
- fn clone(&self, sess: &Session) -> ModuleConfig {
- ModuleConfig {
- tm: create_target_machine(sess),
- passes: self.passes.clone(),
- opt_level: self.opt_level,
- opt_size: self.opt_size,
-
- emit_no_opt_bc: self.emit_no_opt_bc,
- emit_bc: self.emit_bc,
- emit_lto_bc: self.emit_lto_bc,
- emit_ir: self.emit_ir,
- emit_asm: self.emit_asm,
- emit_obj: self.emit_obj,
- obj_is_bitcode: self.obj_is_bitcode,
-
- no_verify: self.no_verify,
- no_prepopulate_passes: self.no_prepopulate_passes,
- no_builtins: self.no_builtins,
- time_passes: self.time_passes,
- vectorize_loop: self.vectorize_loop,
- vectorize_slp: self.vectorize_slp,
- merge_functions: self.merge_functions,
- inline_threshold: self.inline_threshold,
- }
- }
-}
-
-impl Drop for ModuleConfig {
- fn drop(&mut self) {
- unsafe {
- llvm::LLVMRustDisposeTargetMachine(self.tm);
- }
- }
}
/// Additional resources used by optimize_and_codegen (not module specific)
pub opts: Arc<config::Options>,
pub crate_types: Vec<config::CrateType>,
pub each_linked_rlib_for_lto: Vec<(CrateNum, PathBuf)>,
+ output_filenames: Arc<OutputFilenames>,
+ regular_module_config: Arc<ModuleConfig>,
+ metadata_module_config: Arc<ModuleConfig>,
+ allocator_module_config: Arc<ModuleConfig>,
+
// Handler to use for diagnostics produced during codegen.
pub diag_emitter: SharedEmitter,
// LLVM passes added by plugins.
// compiling incrementally
pub incr_comp_session_dir: Option<PathBuf>,
// Channel back to the main control thread to send messages to
- coordinator_send: Sender<Message>,
+ coordinator_send: Sender<Box<Any + Send>>,
// A reference to the TimeGraph so we can register timings. None means that
// measuring is disabled.
time_graph: Option<TimeGraph>,
fn create_diag_handler(&self) -> Handler {
Handler::with_emitter(true, false, Box::new(self.diag_emitter.clone()))
}
+
+ fn config(&self, kind: ModuleKind) -> &ModuleConfig {
+ match kind {
+ ModuleKind::Regular => &self.regular_module_config,
+ ModuleKind::Metadata => &self.metadata_module_config,
+ ModuleKind::Allocator => &self.allocator_module_config,
+ }
+ }
}
struct HandlerFreeVars<'a> {
unsafe fn optimize_and_codegen(cgcx: &CodegenContext,
diag_handler: &Handler,
mtrans: ModuleTranslation,
- config: ModuleConfig,
- output_names: OutputFilenames)
+ tm: TargetMachineRef,
+ config: &ModuleConfig)
-> Result<CompiledModule, FatalError>
{
let (llmod, llcx) = match mtrans.source {
}
};
- let tm = config.tm;
-
let fv = HandlerFreeVars {
cgcx,
diag_handler,
let module_name = Some(&module_name[..]);
if config.emit_no_opt_bc {
- let out = output_names.temp_path_ext("no-opt.bc", module_name);
+ let out = cgcx.output_filenames.temp_path_ext("no-opt.bc", module_name);
let out = path2cstr(&out);
llvm::LLVMWriteBitcodeToFile(llmod, out.as_ptr());
}
if cgcx.lto {
time(cgcx.time_passes, "all lto passes", || {
let temp_no_opt_bc_filename =
- output_names.temp_path_ext("no-opt.lto.bc", module_name);
+ cgcx.output_filenames.temp_path_ext("no-opt.lto.bc", module_name);
lto::run(cgcx,
diag_handler,
llmod,
&temp_no_opt_bc_filename)
})?;
if config.emit_lto_bc {
- let out = output_names.temp_path_ext("lto.bc", module_name);
+ let out = cgcx.output_filenames.temp_path_ext("lto.bc", module_name);
let out = path2cstr(&out);
llvm::LLVMWriteBitcodeToFile(llmod, out.as_ptr());
}
let write_obj = config.emit_obj && !config.obj_is_bitcode;
let copy_bc_to_obj = config.emit_obj && config.obj_is_bitcode;
- let bc_out = output_names.temp_path(OutputType::Bitcode, module_name);
- let obj_out = output_names.temp_path(OutputType::Object, module_name);
+ let bc_out = cgcx.output_filenames.temp_path(OutputType::Bitcode, module_name);
+ let obj_out = cgcx.output_filenames.temp_path(OutputType::Object, module_name);
if write_bc {
let bc_out_c = path2cstr(&bc_out);
time(config.time_passes, &format!("codegen passes [{}]", module_name.unwrap()),
|| -> Result<(), FatalError> {
if config.emit_ir {
- let out = output_names.temp_path(OutputType::LlvmAssembly, module_name);
+ let out = cgcx.output_filenames.temp_path(OutputType::LlvmAssembly, module_name);
let out = path2cstr(&out);
extern "C" fn demangle_callback(input_ptr: *const c_char,
}
if config.emit_asm {
- let path = output_names.temp_path(OutputType::Assembly, module_name);
+ let path = cgcx.output_filenames.temp_path(OutputType::Assembly, module_name);
// We can't use the same module for asm and binary output, because that triggers
// various errors like invalid IR or broken binaries, so we might have to clone the
sess.opts.output_types.contains_key(&OutputType::Exe)
}
-pub fn start_async_translation(sess: &Session,
- crate_output: &OutputFilenames,
+pub fn start_async_translation(tcx: TyCtxt,
time_graph: Option<TimeGraph>,
- crate_name: Symbol,
link: LinkMeta,
metadata: EncodedMetadata,
- exported_symbols: Arc<ExportedSymbols>,
- no_builtins: bool,
- windows_subsystem: Option<String>,
- linker_info: LinkerInfo,
- crate_info: CrateInfo,
- no_integrated_as: bool)
+ coordinator_receive: Receiver<Box<Any + Send>>)
-> OngoingCrateTranslation {
+ let sess = tcx.sess;
+ let crate_output = tcx.output_filenames(LOCAL_CRATE);
+ let crate_name = tcx.crate_name(LOCAL_CRATE);
+ let no_builtins = attr::contains_name(&tcx.hir.krate().attrs, "no_builtins");
+ let subsystem = attr::first_attr_value_str_by_name(&tcx.hir.krate().attrs,
+ "windows_subsystem");
+ let windows_subsystem = subsystem.map(|subsystem| {
+ if subsystem != "windows" && subsystem != "console" {
+ tcx.sess.fatal(&format!("invalid windows subsystem `{}`, only \
+ `windows` and `console` are allowed",
+ subsystem));
+ }
+ subsystem.to_string()
+ });
+
+ let no_integrated_as = tcx.sess.opts.cg.no_integrated_as ||
+ (tcx.sess.target.target.options.no_integrated_as &&
+ (crate_output.outputs.contains_key(&OutputType::Object) ||
+ crate_output.outputs.contains_key(&OutputType::Exe)));
+ let linker_info = LinkerInfo::new(tcx);
+ let crate_info = CrateInfo::new(tcx);
+
let output_types_override = if no_integrated_as {
OutputTypes::new(&[(OutputType::Assembly, None)])
} else {
};
// Figure out what we actually need to build.
- let mut modules_config = ModuleConfig::new(sess, sess.opts.cg.passes.clone());
- let mut metadata_config = ModuleConfig::new(sess, vec![]);
- let mut allocator_config = ModuleConfig::new(sess, vec![]);
+ let mut modules_config = ModuleConfig::new(sess.opts.cg.passes.clone());
+ let mut metadata_config = ModuleConfig::new(vec![]);
+ let mut allocator_config = ModuleConfig::new(vec![]);
if let Some(ref sanitizer) = sess.opts.debugging_opts.sanitizer {
match *sanitizer {
let (shared_emitter, shared_emitter_main) = SharedEmitter::new();
let (trans_worker_send, trans_worker_receive) = channel();
- let (coordinator_send, coordinator_receive) = channel();
- let coordinator_thread = start_executing_work(sess,
+ let coordinator_thread = start_executing_work(tcx,
&crate_info,
shared_emitter,
trans_worker_send,
- coordinator_send.clone(),
coordinator_receive,
client,
time_graph.clone(),
- exported_symbols.clone());
+ Arc::new(modules_config),
+ Arc::new(metadata_config),
+ Arc::new(allocator_config));
+
OngoingCrateTranslation {
crate_name,
link,
no_integrated_as,
crate_info,
- regular_module_config: modules_config,
- metadata_module_config: metadata_config,
- allocator_module_config: allocator_config,
-
time_graph,
- output_filenames: crate_output.clone(),
- coordinator_send,
+ coordinator_send: tcx.tx_to_llvm_workers.clone(),
trans_worker_receive,
shared_emitter_main,
- future: coordinator_thread
+ future: coordinator_thread,
+ output_filenames: tcx.output_filenames(LOCAL_CRATE),
}
}
struct WorkItem {
mtrans: ModuleTranslation,
- config: ModuleConfig,
- output_names: OutputFilenames
+ tm: TargetMachine,
}
impl fmt::Debug for WorkItem {
}
}
-fn build_work_item(mtrans: ModuleTranslation,
- config: ModuleConfig,
- output_names: OutputFilenames)
- -> WorkItem
-{
- WorkItem {
- mtrans,
- config,
- output_names,
+struct TargetMachine(TargetMachineRef);
+
+unsafe impl Send for TargetMachine {}
+
+impl Drop for TargetMachine {
+ fn drop(&mut self) {
+ unsafe {
+ llvm::LLVMRustDisposeTargetMachine(self.0);
+ }
}
}
{
let diag_handler = cgcx.create_diag_handler();
let module_name = work_item.mtrans.name.clone();
+ let config = cgcx.config(work_item.mtrans.kind);
let pre_existing = match work_item.mtrans.source {
ModuleSource::Translated(_) => None,
.unwrap();
let name = &work_item.mtrans.name;
for (kind, saved_file) in wp.saved_files {
- let obj_out = work_item.output_names.temp_path(kind, Some(name));
+ let obj_out = cgcx.output_filenames.temp_path(kind, Some(name));
let source_file = in_incr_comp_dir(&incr_comp_session_dir,
&saved_file);
debug!("copying pre-existing module `{}` from {:?} to {}",
kind: ModuleKind::Regular,
pre_existing: true,
symbol_name_hash: work_item.mtrans.symbol_name_hash,
- emit_bc: work_item.config.emit_bc,
- emit_obj: work_item.config.emit_obj,
+ emit_bc: config.emit_bc,
+ emit_obj: config.emit_obj,
})
} else {
debug!("llvm-optimizing {:?}", module_name);
optimize_and_codegen(cgcx,
&diag_handler,
work_item.mtrans,
- work_item.config,
- work_item.output_names)
+ work_item.tm.0,
+ config)
}
}
}
TranslationDone {
llvm_work_item: WorkItem,
cost: u64,
- is_last: bool,
},
+ TranslationComplete,
TranslateItem,
}
LLVMing,
}
-fn start_executing_work(sess: &Session,
+fn start_executing_work(tcx: TyCtxt,
crate_info: &CrateInfo,
shared_emitter: SharedEmitter,
trans_worker_send: Sender<Message>,
- coordinator_send: Sender<Message>,
- coordinator_receive: Receiver<Message>,
+ coordinator_receive: Receiver<Box<Any + Send>>,
jobserver: Client,
time_graph: Option<TimeGraph>,
- exported_symbols: Arc<ExportedSymbols>)
+ modules_config: Arc<ModuleConfig>,
+ metadata_config: Arc<ModuleConfig>,
+ allocator_config: Arc<ModuleConfig>)
-> thread::JoinHandle<CompiledModules> {
+ let coordinator_send = tcx.tx_to_llvm_workers.clone();
+ let mut exported_symbols = FxHashMap();
+ exported_symbols.insert(LOCAL_CRATE, tcx.exported_symbols(LOCAL_CRATE));
+ for &cnum in tcx.crates().iter() {
+ exported_symbols.insert(cnum, tcx.exported_symbols(cnum));
+ }
+ let exported_symbols = Arc::new(exported_symbols);
+ let sess = tcx.sess;
+
// First up, convert our jobserver into a helper thread so we can use normal
// mpsc channels to manage our messages and such. Once we've got the helper
// thread then request `n-1` tokens because all of our work items are ready
// tokens on `rx` above which will get managed in the main loop below.
let coordinator_send2 = coordinator_send.clone();
let helper = jobserver.into_helper_thread(move |token| {
- drop(coordinator_send2.send(Message::Token(token)));
+ drop(coordinator_send2.send(Box::new(Message::Token(token))));
}).expect("failed to spawn helper thread");
let mut each_linked_rlib_for_lto = Vec::new();
coordinator_send,
diag_emitter: shared_emitter.clone(),
time_graph,
+ output_filenames: tcx.output_filenames(LOCAL_CRATE),
+ regular_module_config: modules_config,
+ metadata_module_config: metadata_config,
+ allocator_module_config: allocator_config,
};
// This is the "main loop" of parallel work happening for parallel codegen.
let mut translation_done = false;
// This is the queue of LLVM work items that still need processing.
- let mut work_items = Vec::new();
+ let mut work_items = Vec::<(WorkItem, u64)>::new();
// This are the Jobserver Tokens we currently hold. Does not include
// the implicit Token the compiler process owns no matter what.
worker: get_worker_id(&mut free_worker_ids),
.. cgcx.clone()
};
- maybe_start_llvm_timer(&item, &mut llvm_start_time);
+ maybe_start_llvm_timer(cgcx.config(item.mtrans.kind),
+ &mut llvm_start_time);
main_thread_worker_state = MainThreadWorkerState::LLVMing;
spawn_work(cgcx, item);
}
worker: get_worker_id(&mut free_worker_ids),
.. cgcx.clone()
};
- maybe_start_llvm_timer(&item, &mut llvm_start_time);
+ maybe_start_llvm_timer(cgcx.config(item.mtrans.kind),
+ &mut llvm_start_time);
main_thread_worker_state = MainThreadWorkerState::LLVMing;
spawn_work(cgcx, item);
} else {
while work_items.len() > 0 && running < tokens.len() {
let (item, _) = work_items.pop().unwrap();
- maybe_start_llvm_timer(&item, &mut llvm_start_time);
+ maybe_start_llvm_timer(cgcx.config(item.mtrans.kind),
+ &mut llvm_start_time);
let cgcx = CodegenContext {
worker: get_worker_id(&mut free_worker_ids),
// Relinquish accidentally acquired extra tokens
tokens.truncate(running);
- match coordinator_receive.recv().unwrap() {
+ let msg = coordinator_receive.recv().unwrap();
+ match *msg.downcast::<Message>().ok().unwrap() {
// Save the token locally and the next turn of the loop will use
// this to spawn a new unit of work, or it may get dropped
// immediately if we have no more work to spawn.
}
}
- Message::TranslationDone { llvm_work_item, cost, is_last } => {
+ Message::TranslationDone { llvm_work_item, cost } => {
// We keep the queue sorted by estimated processing cost,
// so that more expensive items are processed earlier. This
// is good for throughput as it gives the main thread more
};
work_items.insert(insertion_index, (llvm_work_item, cost));
- if is_last {
- // If this is the last, don't request a token because
- // the trans worker thread will be free to handle this
- // immediately.
- translation_done = true;
- } else {
- helper.request_token();
- }
+ helper.request_token();
+ assert_eq!(main_thread_worker_state,
+ MainThreadWorkerState::Translating);
+ main_thread_worker_state = MainThreadWorkerState::Idle;
+ }
+ Message::TranslationComplete => {
+ translation_done = true;
assert_eq!(main_thread_worker_state,
MainThreadWorkerState::Translating);
main_thread_worker_state = MainThreadWorkerState::Idle;
items_in_queue >= max_workers.saturating_sub(workers_running / 2)
}
- fn maybe_start_llvm_timer(work_item: &WorkItem,
+ fn maybe_start_llvm_timer(config: &ModuleConfig,
llvm_start_time: &mut Option<Instant>) {
// We keep track of the -Ztime-passes output manually,
// since the closure-based interface does not fit well here.
- if work_item.config.time_passes {
+ if config.time_passes {
if llvm_start_time.is_none() {
*llvm_start_time = Some(Instant::now());
}
// Set up a destructor which will fire off a message that we're done as
// we exit.
struct Bomb {
- coordinator_send: Sender<Message>,
+ coordinator_send: Sender<Box<Any + Send>>,
result: Option<CompiledModule>,
worker_id: usize,
}
None => Err(())
};
- drop(self.coordinator_send.send(Message::Done {
+ drop(self.coordinator_send.send(Box::new(Message::Done {
result,
worker_id: self.worker_id,
- }));
+ })));
}
}
linker_info: LinkerInfo,
no_integrated_as: bool,
crate_info: CrateInfo,
-
- output_filenames: OutputFilenames,
- regular_module_config: ModuleConfig,
- metadata_module_config: ModuleConfig,
- allocator_module_config: ModuleConfig,
-
time_graph: Option<TimeGraph>,
- coordinator_send: Sender<Message>,
+ coordinator_send: Sender<Box<Any + Send>>,
trans_worker_receive: Receiver<Message>,
shared_emitter_main: SharedEmitterMain,
future: thread::JoinHandle<CompiledModules>,
+ output_filenames: Arc<OutputFilenames>,
}
impl OngoingCrateTranslation {
trans
}
- pub fn submit_translated_module_to_llvm(&self,
- sess: &Session,
- mtrans: ModuleTranslation,
- cost: u64,
- is_last: bool) {
- let module_config = match mtrans.kind {
- ModuleKind::Regular => self.regular_module_config.clone(sess),
- ModuleKind::Metadata => self.metadata_module_config.clone(sess),
- ModuleKind::Allocator => self.allocator_module_config.clone(sess),
- };
-
- let llvm_work_item = build_work_item(mtrans,
- module_config,
- self.output_filenames.clone());
-
- drop(self.coordinator_send.send(Message::TranslationDone {
- llvm_work_item,
- cost,
- is_last
- }));
- }
-
pub fn submit_pre_translated_module_to_llvm(&self,
- sess: &Session,
- mtrans: ModuleTranslation,
- is_last: bool) {
+ tcx: TyCtxt,
+ mtrans: ModuleTranslation) {
self.wait_for_signal_to_translate_item();
- self.check_for_errors(sess);
+ self.check_for_errors(tcx.sess);
// These are generally cheap and won't through off scheduling.
let cost = 0;
- self.submit_translated_module_to_llvm(sess, mtrans, cost, is_last);
+ submit_translated_module_to_llvm(tcx, mtrans, cost);
+ }
+
+ pub fn translation_finished(&self, tcx: TyCtxt) {
+ self.wait_for_signal_to_translate_item();
+ self.check_for_errors(tcx.sess);
+ drop(self.coordinator_send.send(Box::new(Message::TranslationComplete)));
}
pub fn check_for_errors(&self, sess: &Session) {
}
}
}
+
+pub fn submit_translated_module_to_llvm(tcx: TyCtxt,
+ mtrans: ModuleTranslation,
+ cost: u64) {
+ let llvm_work_item = WorkItem {
+ mtrans,
+ tm: TargetMachine(create_target_machine(tcx.sess)),
+ };
+ drop(tcx.tx_to_llvm_workers.send(Box::new(Message::TranslationDone {
+ llvm_work_item,
+ cost,
+ })));
+}
use super::ModuleTranslation;
use super::ModuleKind;
-use assert_module_sources;
+use assert_module_sources::{self, Disposition};
use back::link;
-use back::linker::LinkerInfo;
-use back::symbol_export::{self, ExportedSymbols};
+use back::symbol_export;
use back::write::{self, OngoingCrateTranslation};
-use llvm::{ContextRef, Linkage, ModuleRef, ValueRef, Vector, get_param};
+use llvm::{ContextRef, ModuleRef, ValueRef, Vector, get_param};
use llvm;
use metadata;
-use rustc::hir::def_id::LOCAL_CRATE;
+use rustc::hir::def_id::{CrateNum, DefId, LOCAL_CRATE};
use rustc::middle::lang_items::StartFnLangItem;
+use rustc::middle::trans::{Linkage, Visibility, Stats};
use rustc::middle::cstore::{EncodedMetadata, EncodedMetadataHashes};
use rustc::ty::{self, Ty, TyCtxt};
-use rustc::dep_graph::AssertDepGraphSafe;
+use rustc::ty::maps::Providers;
use rustc::middle::cstore::{self, LinkMeta, LinkagePreference};
use rustc::hir::map as hir_map;
use rustc::util::common::{time, print_time_passes_entry};
-use rustc::session::config::{self, NoDebugInfo, OutputFilenames, OutputType};
+use rustc::session::config::{self, NoDebugInfo};
use rustc::session::Session;
use rustc_incremental::{self, IncrementalHashesMap};
use abi;
use common::{type_is_zero_size, val_ty};
use common;
use consts;
-use context::{self, LocalCrateContext, SharedCrateContext, Stats};
+use context::{self, LocalCrateContext, SharedCrateContext};
use debuginfo;
use declare;
use machine;
use meth;
use mir;
use monomorphize::{self, Instance};
-use partitioning::{self, PartitioningStrategy, CodegenUnit};
+use partitioning::{self, PartitioningStrategy, CodegenUnit, CodegenUnitExt};
use symbol_names_test;
use time_graph;
-use trans_item::{TransItem, DefPathBasedNames};
+use trans_item::{TransItem, TransItemExt, DefPathBasedNames};
use type_::Type;
use type_of;
use value::Value;
-use rustc::util::nodemap::{NodeSet, FxHashMap, FxHashSet};
+use rustc::util::nodemap::{NodeSet, FxHashMap, FxHashSet, DefIdSet};
use CrateInfo;
use libc::c_uint;
+use std::any::Any;
+use std::cell::RefCell;
use std::ffi::{CStr, CString};
use std::str;
use std::sync::Arc;
use std::time::{Instant, Duration};
use std::i32;
+use std::sync::mpsc;
use syntax_pos::Span;
+use syntax_pos::symbol::InternedString;
use syntax::attr;
use rustc::hir;
use syntax::ast;
impl<'a, 'tcx> StatRecorder<'a, 'tcx> {
pub fn new(ccx: &'a CrateContext<'a, 'tcx>, name: String) -> StatRecorder<'a, 'tcx> {
- let istart = ccx.stats().n_llvm_insns.get();
+ let istart = ccx.stats().borrow().n_llvm_insns;
StatRecorder {
ccx,
name: Some(name),
impl<'a, 'tcx> Drop for StatRecorder<'a, 'tcx> {
fn drop(&mut self) {
if self.ccx.sess().trans_stats() {
- let iend = self.ccx.stats().n_llvm_insns.get();
- self.ccx.stats().fn_stats.borrow_mut()
- .push((self.name.take().unwrap(), iend - self.istart));
- self.ccx.stats().n_fns.set(self.ccx.stats().n_fns.get() + 1);
+ let mut stats = self.ccx.stats().borrow_mut();
+ let iend = stats.n_llvm_insns;
+ stats.fn_stats.push((self.name.take().unwrap(), iend - self.istart));
+ stats.n_fns += 1;
// Reset LLVM insn count to avoid compound costs.
- self.ccx.stats().n_llvm_insns.set(self.istart);
+ stats.n_llvm_insns = self.istart;
}
}
}
// release builds.
info!("trans_instance({})", instance);
- let fn_ty = common::instance_ty(ccx.shared(), &instance);
+ let fn_ty = common::instance_ty(ccx.tcx(), &instance);
let sig = common::ty_fn_sig(ccx, fn_ty);
let sig = ccx.tcx().erase_late_bound_regions_and_normalize(&sig);
None => bug!("Instance `{:?}` not already declared", instance)
};
- ccx.stats().n_closures.set(ccx.stats().n_closures.get() + 1);
+ ccx.stats().borrow_mut().n_closures += 1;
// The `uwtable` attribute according to LLVM is:
//
mir::trans_mir(ccx, lldecl, &mir, instance, sig);
}
-pub fn llvm_linkage_by_name(name: &str) -> Option<Linkage> {
+pub fn linkage_by_name(name: &str) -> Option<Linkage> {
+ use rustc::middle::trans::Linkage::*;
+
// Use the names from src/llvm/docs/LangRef.rst here. Most types are only
// applicable to variable declarations and may not really make sense for
// Rust code in the first place but whitelist them anyway and trust that
// ghost, dllimport, dllexport and linkonce_odr_autohide are not supported
// and don't have to be, LLVM treats them as no-ops.
match name {
- "appending" => Some(llvm::Linkage::AppendingLinkage),
- "available_externally" => Some(llvm::Linkage::AvailableExternallyLinkage),
- "common" => Some(llvm::Linkage::CommonLinkage),
- "extern_weak" => Some(llvm::Linkage::ExternalWeakLinkage),
- "external" => Some(llvm::Linkage::ExternalLinkage),
- "internal" => Some(llvm::Linkage::InternalLinkage),
- "linkonce" => Some(llvm::Linkage::LinkOnceAnyLinkage),
- "linkonce_odr" => Some(llvm::Linkage::LinkOnceODRLinkage),
- "private" => Some(llvm::Linkage::PrivateLinkage),
- "weak" => Some(llvm::Linkage::WeakAnyLinkage),
- "weak_odr" => Some(llvm::Linkage::WeakODRLinkage),
+ "appending" => Some(Appending),
+ "available_externally" => Some(AvailableExternally),
+ "common" => Some(Common),
+ "extern_weak" => Some(ExternalWeak),
+ "external" => Some(External),
+ "internal" => Some(Internal),
+ "linkonce" => Some(LinkOnceAny),
+ "linkonce_odr" => Some(LinkOnceODR),
+ "private" => Some(Private),
+ "weak" => Some(WeakAny),
+ "weak_odr" => Some(WeakODR),
_ => None,
}
}
///
/// This list is later used by linkers to determine the set of symbols needed to
/// be exposed from a dynamic library and it's also encoded into the metadata.
-pub fn find_exported_symbols(tcx: TyCtxt, reachable: &NodeSet) -> NodeSet {
- reachable.iter().cloned().filter(|&id| {
+pub fn find_exported_symbols(tcx: TyCtxt) -> NodeSet {
+ tcx.reachable_set(LOCAL_CRATE).0.iter().cloned().filter(|&id| {
// Next, we want to ignore some FFI functions that are not exposed from
// this crate. Reachable FFI functions can be lumped into two
// categories:
}
pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- analysis: ty::CrateAnalysis,
incremental_hashes_map: IncrementalHashesMap,
- output_filenames: &OutputFilenames)
+ rx: mpsc::Receiver<Box<Any + Send>>)
-> OngoingCrateTranslation {
check_for_rustc_errors_attr(tcx);
- // Be careful with this krate: obviously it gives access to the
- // entire contents of the krate. So if you push any subtasks of
- // `TransCrate`, you need to be careful to register "reads" of the
- // particular items that will be processed.
- let krate = tcx.hir.krate();
- let ty::CrateAnalysis { reachable, .. } = analysis;
- let check_overflow = tcx.sess.overflow_checks();
let link_meta = link::build_link_meta(&incremental_hashes_map);
- let exported_symbol_node_ids = find_exported_symbols(tcx, &reachable);
+ let exported_symbol_node_ids = find_exported_symbols(tcx);
- let shared_ccx = SharedCrateContext::new(tcx,
- check_overflow,
- output_filenames);
+ let shared_ccx = SharedCrateContext::new(tcx);
// Translate the metadata.
let (metadata_llcx, metadata_llmod, metadata, metadata_incr_hashes) =
time(tcx.sess.time_passes(), "write metadata", || {
kind: ModuleKind::Metadata,
};
- let no_builtins = attr::contains_name(&krate.attrs, "no_builtins");
let time_graph = if tcx.sess.opts.debugging_opts.trans_time_graph {
Some(time_graph::TimeGraph::new())
} else {
None
};
- let crate_info = CrateInfo::new(tcx);
// Skip crate items and just output metadata in -Z no-trans mode.
if tcx.sess.opts.debugging_opts.no_trans ||
!tcx.sess.opts.output_types.should_trans() {
- let empty_exported_symbols = ExportedSymbols::empty();
- let linker_info = LinkerInfo::new(&shared_ccx, &empty_exported_symbols);
let ongoing_translation = write::start_async_translation(
- tcx.sess,
- output_filenames,
+ tcx,
time_graph.clone(),
- tcx.crate_name(LOCAL_CRATE),
link_meta,
metadata,
- Arc::new(empty_exported_symbols),
- no_builtins,
- None,
- linker_info,
- crate_info,
- false);
+ rx);
- ongoing_translation.submit_pre_translated_module_to_llvm(tcx.sess, metadata_module, true);
+ ongoing_translation.submit_pre_translated_module_to_llvm(tcx, metadata_module);
+ ongoing_translation.translation_finished(tcx);
assert_and_save_dep_graph(tcx,
incremental_hashes_map,
return ongoing_translation;
}
- let exported_symbols = Arc::new(ExportedSymbols::compute(tcx,
- &exported_symbol_node_ids));
-
// Run the translation item collector and partition the collected items into
// codegen units.
- let (translation_items, codegen_units) =
- collect_and_partition_translation_items(&shared_ccx, &exported_symbols);
+ let codegen_units =
+ shared_ccx.tcx().collect_and_partition_translation_items(LOCAL_CRATE).1;
+ let codegen_units = (*codegen_units).clone();
assert!(codegen_units.len() <= 1 || !tcx.sess.lto());
- let linker_info = LinkerInfo::new(&shared_ccx, &exported_symbols);
- let subsystem = attr::first_attr_value_str_by_name(&krate.attrs,
- "windows_subsystem");
- let windows_subsystem = subsystem.map(|subsystem| {
- if subsystem != "windows" && subsystem != "console" {
- tcx.sess.fatal(&format!("invalid windows subsystem `{}`, only \
- `windows` and `console` are allowed",
- subsystem));
- }
- subsystem.to_string()
- });
-
- let no_integrated_as = tcx.sess.opts.cg.no_integrated_as ||
- (tcx.sess.target.target.options.no_integrated_as &&
- (output_filenames.outputs.contains_key(&OutputType::Object) ||
- output_filenames.outputs.contains_key(&OutputType::Exe)));
-
let ongoing_translation = write::start_async_translation(
- tcx.sess,
- output_filenames,
+ tcx,
time_graph.clone(),
- tcx.crate_name(LOCAL_CRATE),
link_meta,
metadata,
- exported_symbols.clone(),
- no_builtins,
- windows_subsystem,
- linker_info,
- crate_info,
- no_integrated_as);
+ rx);
// Translate an allocator shim, if any
//
};
if let Some(allocator_module) = allocator_module {
- ongoing_translation.submit_pre_translated_module_to_llvm(tcx.sess, allocator_module, false);
+ ongoing_translation.submit_pre_translated_module_to_llvm(tcx, allocator_module);
}
- let codegen_unit_count = codegen_units.len();
- ongoing_translation.submit_pre_translated_module_to_llvm(tcx.sess,
- metadata_module,
- codegen_unit_count == 0);
-
- let translation_items = Arc::new(translation_items);
-
- let mut all_stats = Stats::default();
- let mut module_dispositions = tcx.sess.opts.incremental.as_ref().map(|_| Vec::new());
+ ongoing_translation.submit_pre_translated_module_to_llvm(tcx, metadata_module);
// We sort the codegen units by size. This way we can schedule work for LLVM
// a bit more efficiently. Note that "size" is defined rather crudely at the
};
let mut total_trans_time = Duration::new(0, 0);
+ let mut all_stats = Stats::default();
- for (cgu_index, cgu) in codegen_units.into_iter().enumerate() {
+ for cgu in codegen_units.into_iter() {
ongoing_translation.wait_for_signal_to_translate_item();
ongoing_translation.check_for_errors(tcx.sess);
+ let _timing_guard = time_graph
+ .as_ref()
+ .map(|time_graph| time_graph.start(write::TRANS_WORKER_TIMELINE,
+ write::TRANS_WORK_PACKAGE_KIND));
let start_time = Instant::now();
-
- let module = {
- let _timing_guard = time_graph
- .as_ref()
- .map(|time_graph| time_graph.start(write::TRANS_WORKER_TIMELINE,
- write::TRANS_WORK_PACKAGE_KIND));
- let dep_node = cgu.work_product_dep_node();
- let ((stats, module), _) =
- tcx.dep_graph.with_task(dep_node,
- AssertDepGraphSafe(&shared_ccx),
- AssertDepGraphSafe((cgu,
- translation_items.clone(),
- exported_symbols.clone())),
- module_translation);
- all_stats.extend(stats);
-
- if let Some(ref mut module_dispositions) = module_dispositions {
- module_dispositions.push(module.disposition());
- }
-
- module
- };
-
- let time_to_translate = Instant::now().duration_since(start_time);
-
- // We assume that the cost to run LLVM on a CGU is proportional to
- // the time we needed for translating it.
- let cost = time_to_translate.as_secs() * 1_000_000_000 +
- time_to_translate.subsec_nanos() as u64;
-
- total_trans_time += time_to_translate;
-
- let is_last_cgu = (cgu_index + 1) == codegen_unit_count;
-
- ongoing_translation.submit_translated_module_to_llvm(tcx.sess,
- module,
- cost,
- is_last_cgu);
+ all_stats.extend(tcx.compile_codegen_unit(*cgu.name()));
+ total_trans_time += start_time.elapsed();
ongoing_translation.check_for_errors(tcx.sess);
}
+ ongoing_translation.translation_finished(tcx);
+
// Since the main thread is sometimes blocked during trans, we keep track
// -Ztime-passes output manually.
print_time_passes_entry(tcx.sess.time_passes(),
"translate to LLVM IR",
total_trans_time);
- if let Some(module_dispositions) = module_dispositions {
- assert_module_sources::assert_module_sources(tcx, &module_dispositions);
- }
-
- fn module_translation<'a, 'tcx>(
- scx: AssertDepGraphSafe<&SharedCrateContext<'a, 'tcx>>,
- args: AssertDepGraphSafe<(CodegenUnit<'tcx>,
- Arc<FxHashSet<TransItem<'tcx>>>,
- Arc<ExportedSymbols>)>)
- -> (Stats, ModuleTranslation)
- {
- // FIXME(#40304): We ought to be using the id as a key and some queries, I think.
- let AssertDepGraphSafe(scx) = scx;
- let AssertDepGraphSafe((cgu, crate_trans_items, exported_symbols)) = args;
-
- let cgu_name = String::from(cgu.name());
- let cgu_id = cgu.work_product_id();
- let symbol_name_hash = cgu.compute_symbol_name_hash(scx);
-
- // Check whether there is a previous work-product we can
- // re-use. Not only must the file exist, and the inputs not
- // be dirty, but the hash of the symbols we will generate must
- // be the same.
- let previous_work_product =
- scx.dep_graph().previous_work_product(&cgu_id).and_then(|work_product| {
- if work_product.input_hash == symbol_name_hash {
- debug!("trans_reuse_previous_work_products: reusing {:?}", work_product);
- Some(work_product)
- } else {
- if scx.sess().opts.debugging_opts.incremental_info {
- eprintln!("incremental: CGU `{}` invalidated because of \
- changed partitioning hash.",
- cgu.name());
- }
- debug!("trans_reuse_previous_work_products: \
- not reusing {:?} because hash changed to {:?}",
- work_product, symbol_name_hash);
- None
- }
- });
-
- if let Some(buf) = previous_work_product {
- // Don't need to translate this module.
- let module = ModuleTranslation {
- name: cgu_name,
- symbol_name_hash,
- source: ModuleSource::Preexisting(buf.clone()),
- kind: ModuleKind::Regular,
- };
- return (Stats::default(), module);
- }
-
- // Instantiate translation items without filling out definitions yet...
- let lcx = LocalCrateContext::new(scx, cgu, crate_trans_items, exported_symbols);
- let module = {
- let ccx = CrateContext::new(scx, &lcx);
- let trans_items = ccx.codegen_unit()
- .items_in_deterministic_order(ccx.tcx());
- for &(trans_item, (linkage, visibility)) in &trans_items {
- trans_item.predefine(&ccx, linkage, visibility);
- }
-
- // ... and now that we have everything pre-defined, fill out those definitions.
- for &(trans_item, _) in &trans_items {
- trans_item.define(&ccx);
- }
-
- // If this codegen unit contains the main function, also create the
- // wrapper here
- maybe_create_entry_wrapper(&ccx);
-
- // Run replace-all-uses-with for statics that need it
- for &(old_g, new_g) in ccx.statics_to_rauw().borrow().iter() {
- unsafe {
- let bitcast = llvm::LLVMConstPointerCast(new_g, llvm::LLVMTypeOf(old_g));
- llvm::LLVMReplaceAllUsesWith(old_g, bitcast);
- llvm::LLVMDeleteGlobal(old_g);
- }
- }
-
- // Create the llvm.used variable
- // This variable has type [N x i8*] and is stored in the llvm.metadata section
- if !ccx.used_statics().borrow().is_empty() {
- let name = CString::new("llvm.used").unwrap();
- let section = CString::new("llvm.metadata").unwrap();
- let array = C_array(Type::i8(&ccx).ptr_to(), &*ccx.used_statics().borrow());
-
- unsafe {
- let g = llvm::LLVMAddGlobal(ccx.llmod(),
- val_ty(array).to_ref(),
- name.as_ptr());
- llvm::LLVMSetInitializer(g, array);
- llvm::LLVMRustSetLinkage(g, llvm::Linkage::AppendingLinkage);
- llvm::LLVMSetSection(g, section.as_ptr());
- }
- }
-
- // Finalize debuginfo
- if ccx.sess().opts.debuginfo != NoDebugInfo {
- debuginfo::finalize(&ccx);
- }
-
- let llvm_module = ModuleLlvm {
- llcx: ccx.llcx(),
- llmod: ccx.llmod(),
- };
-
- // In LTO mode we inject the allocator shim into the existing
- // module.
- if ccx.sess().lto() {
- if let Some(kind) = ccx.sess().allocator_kind.get() {
- time(ccx.sess().time_passes(), "write allocator module", || {
- unsafe {
- allocator::trans(ccx.tcx(), &llvm_module, kind);
- }
- });
- }
- }
-
- // Adjust exported symbols for MSVC dllimport
- if ccx.sess().target.target.options.is_like_msvc &&
- ccx.sess().crate_types.borrow().iter().any(|ct| *ct == config::CrateTypeRlib) {
- create_imps(ccx.sess(), &llvm_module);
- }
-
- ModuleTranslation {
- name: cgu_name,
- symbol_name_hash,
- source: ModuleSource::Translated(llvm_module),
- kind: ModuleKind::Regular,
- }
- };
-
- (lcx.into_stats(), module)
+ if tcx.sess.opts.incremental.is_some() {
+ DISPOSITIONS.with(|d| {
+ assert_module_sources::assert_module_sources(tcx, &d.borrow());
+ });
}
symbol_names_test::report_symbol_names(tcx);
if shared_ccx.sess().trans_stats() {
println!("--- trans stats ---");
- println!("n_glues_created: {}", all_stats.n_glues_created.get());
- println!("n_null_glues: {}", all_stats.n_null_glues.get());
- println!("n_real_glues: {}", all_stats.n_real_glues.get());
+ println!("n_glues_created: {}", all_stats.n_glues_created);
+ println!("n_null_glues: {}", all_stats.n_null_glues);
+ println!("n_real_glues: {}", all_stats.n_real_glues);
- println!("n_fns: {}", all_stats.n_fns.get());
- println!("n_inlines: {}", all_stats.n_inlines.get());
- println!("n_closures: {}", all_stats.n_closures.get());
+ println!("n_fns: {}", all_stats.n_fns);
+ println!("n_inlines: {}", all_stats.n_inlines);
+ println!("n_closures: {}", all_stats.n_closures);
println!("fn stats:");
- all_stats.fn_stats.borrow_mut().sort_by(|&(_, insns_a), &(_, insns_b)| {
- insns_b.cmp(&insns_a)
- });
- for tuple in all_stats.fn_stats.borrow().iter() {
- match *tuple {
- (ref name, insns) => {
- println!("{} insns, {}", insns, *name);
- }
- }
+ all_stats.fn_stats.sort_by_key(|&(_, insns)| insns);
+ for &(ref name, insns) in all_stats.fn_stats.iter() {
+ println!("{} insns, {}", insns, *name);
}
}
if shared_ccx.sess().count_llvm_insns() {
- for (k, v) in all_stats.llvm_insns.borrow().iter() {
+ for (k, v) in all_stats.llvm_insns.iter() {
println!("{:7} {}", *v, *k);
}
}
ongoing_translation
}
+// FIXME(#42293) hopefully once red/green is enabled we're testing everything
+// via a method that doesn't require this!
+thread_local!(static DISPOSITIONS: RefCell<Vec<(String, Disposition)>> = Default::default());
+
fn assert_and_save_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
incremental_hashes_map: IncrementalHashesMap,
metadata_incr_hashes: EncodedMetadataHashes,
}
}
-fn collect_and_partition_translation_items<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
- exported_symbols: &ExportedSymbols)
- -> (FxHashSet<TransItem<'tcx>>,
- Vec<CodegenUnit<'tcx>>) {
- let time_passes = scx.sess().time_passes();
+fn collect_and_partition_translation_items<'a, 'tcx>(
+ tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ cnum: CrateNum,
+) -> (Arc<DefIdSet>, Arc<Vec<Arc<CodegenUnit<'tcx>>>>)
+{
+ assert_eq!(cnum, LOCAL_CRATE);
+ let time_passes = tcx.sess.time_passes();
- let collection_mode = match scx.sess().opts.debugging_opts.print_trans_items {
+ let collection_mode = match tcx.sess.opts.debugging_opts.print_trans_items {
Some(ref s) => {
let mode_string = s.to_lowercase();
let mode_string = mode_string.trim();
let message = format!("Unknown codegen-item collection mode '{}'. \
Falling back to 'lazy' mode.",
mode_string);
- scx.sess().warn(&message);
+ tcx.sess.warn(&message);
}
TransItemCollectionMode::Lazy
let (items, inlining_map) =
time(time_passes, "translation item collection", || {
- collector::collect_crate_translation_items(&scx,
- exported_symbols,
- collection_mode)
+ collector::collect_crate_translation_items(tcx, collection_mode)
});
- assert_symbols_are_distinct(scx.tcx(), items.iter());
+ assert_symbols_are_distinct(tcx, items.iter());
- let strategy = if scx.sess().opts.debugging_opts.incremental.is_some() {
+ let strategy = if tcx.sess.opts.debugging_opts.incremental.is_some() {
PartitioningStrategy::PerModule
} else {
- PartitioningStrategy::FixedUnitCount(scx.sess().opts.cg.codegen_units)
+ PartitioningStrategy::FixedUnitCount(tcx.sess.opts.cg.codegen_units)
};
let codegen_units = time(time_passes, "codegen unit partitioning", || {
- partitioning::partition(scx,
+ partitioning::partition(tcx,
items.iter().cloned(),
strategy,
- &inlining_map,
- exported_symbols)
+ &inlining_map)
+ .into_iter()
+ .map(Arc::new)
+ .collect::<Vec<_>>()
});
- assert!(scx.tcx().sess.opts.cg.codegen_units == codegen_units.len() ||
- scx.tcx().sess.opts.debugging_opts.incremental.is_some());
+ assert!(tcx.sess.opts.cg.codegen_units == codegen_units.len() ||
+ tcx.sess.opts.debugging_opts.incremental.is_some());
- let translation_items: FxHashSet<TransItem<'tcx>> = items.iter().cloned().collect();
+ let translation_items: DefIdSet = items.iter().filter_map(|trans_item| {
+ match *trans_item {
+ TransItem::Fn(ref instance) => Some(instance.def_id()),
+ _ => None,
+ }
+ }).collect();
- if scx.sess().opts.debugging_opts.print_trans_items.is_some() {
+ if tcx.sess.opts.debugging_opts.print_trans_items.is_some() {
let mut item_to_cgus = FxHashMap();
for cgu in &codegen_units {
let mut item_keys: Vec<_> = items
.iter()
.map(|i| {
- let mut output = i.to_string(scx.tcx());
+ let mut output = i.to_string(tcx);
output.push_str(" @@");
let mut empty = Vec::new();
let cgus = item_to_cgus.get_mut(i).unwrap_or(&mut empty);
output.push_str(&cgu_name);
let linkage_abbrev = match linkage {
- llvm::Linkage::ExternalLinkage => "External",
- llvm::Linkage::AvailableExternallyLinkage => "Available",
- llvm::Linkage::LinkOnceAnyLinkage => "OnceAny",
- llvm::Linkage::LinkOnceODRLinkage => "OnceODR",
- llvm::Linkage::WeakAnyLinkage => "WeakAny",
- llvm::Linkage::WeakODRLinkage => "WeakODR",
- llvm::Linkage::AppendingLinkage => "Appending",
- llvm::Linkage::InternalLinkage => "Internal",
- llvm::Linkage::PrivateLinkage => "Private",
- llvm::Linkage::ExternalWeakLinkage => "ExternalWeak",
- llvm::Linkage::CommonLinkage => "Common",
+ Linkage::External => "External",
+ Linkage::AvailableExternally => "Available",
+ Linkage::LinkOnceAny => "OnceAny",
+ Linkage::LinkOnceODR => "OnceODR",
+ Linkage::WeakAny => "WeakAny",
+ Linkage::WeakODR => "WeakODR",
+ Linkage::Appending => "Appending",
+ Linkage::Internal => "Internal",
+ Linkage::Private => "Private",
+ Linkage::ExternalWeak => "ExternalWeak",
+ Linkage::Common => "Common",
};
output.push_str("[");
}
}
- (translation_items, codegen_units)
+ (Arc::new(translation_items), Arc::new(codegen_units))
}
impl CrateInfo {
- pub fn new<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> CrateInfo {
+ pub fn new(tcx: TyCtxt) -> CrateInfo {
let mut info = CrateInfo {
panic_runtime: None,
compiler_builtins: None,
return info
}
}
+
+fn is_translated_function(tcx: TyCtxt, id: DefId) -> bool {
+ // FIXME(#42293) needs red/green tracking to avoid failing a bunch of
+ // existing tests
+ tcx.dep_graph.with_ignore(|| {
+ let (all_trans_items, _) =
+ tcx.collect_and_partition_translation_items(LOCAL_CRATE);
+ all_trans_items.contains(&id)
+ })
+}
+
+fn compile_codegen_unit<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ cgu: InternedString) -> Stats {
+ // FIXME(#42293) needs red/green tracking to avoid failing a bunch of
+ // existing tests
+ let cgu = tcx.dep_graph.with_ignore(|| {
+ tcx.codegen_unit(cgu)
+ });
+
+ let start_time = Instant::now();
+ let dep_node = cgu.work_product_dep_node();
+ let ((stats, module), _) =
+ tcx.dep_graph.with_task(dep_node,
+ tcx,
+ cgu,
+ module_translation);
+ let time_to_translate = start_time.elapsed();
+
+ if tcx.sess.opts.incremental.is_some() {
+ DISPOSITIONS.with(|d| {
+ d.borrow_mut().push(module.disposition());
+ });
+ }
+
+ // We assume that the cost to run LLVM on a CGU is proportional to
+ // the time we needed for translating it.
+ let cost = time_to_translate.as_secs() * 1_000_000_000 +
+ time_to_translate.subsec_nanos() as u64;
+
+ write::submit_translated_module_to_llvm(tcx,
+ module,
+ cost);
+ return stats;
+
+ fn module_translation<'a, 'tcx>(
+ tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ cgu: Arc<CodegenUnit<'tcx>>)
+ -> (Stats, ModuleTranslation)
+ {
+ let cgu_name = cgu.name().to_string();
+ let cgu_id = cgu.work_product_id();
+ let symbol_name_hash = cgu.compute_symbol_name_hash(tcx);
+
+ // Check whether there is a previous work-product we can
+ // re-use. Not only must the file exist, and the inputs not
+ // be dirty, but the hash of the symbols we will generate must
+ // be the same.
+ let previous_work_product =
+ tcx.dep_graph.previous_work_product(&cgu_id).and_then(|work_product| {
+ if work_product.input_hash == symbol_name_hash {
+ debug!("trans_reuse_previous_work_products: reusing {:?}", work_product);
+ Some(work_product)
+ } else {
+ if tcx.sess.opts.debugging_opts.incremental_info {
+ eprintln!("incremental: CGU `{}` invalidated because of \
+ changed partitioning hash.",
+ cgu.name());
+ }
+ debug!("trans_reuse_previous_work_products: \
+ not reusing {:?} because hash changed to {:?}",
+ work_product, symbol_name_hash);
+ None
+ }
+ });
+
+ if let Some(buf) = previous_work_product {
+ // Don't need to translate this module.
+ let module = ModuleTranslation {
+ name: cgu_name,
+ symbol_name_hash,
+ source: ModuleSource::Preexisting(buf.clone()),
+ kind: ModuleKind::Regular,
+ };
+ return (Stats::default(), module);
+ }
+
+ // Instantiate translation items without filling out definitions yet...
+ let scx = SharedCrateContext::new(tcx);
+ let lcx = LocalCrateContext::new(&scx, cgu);
+ let module = {
+ let ccx = CrateContext::new(&scx, &lcx);
+ let trans_items = ccx.codegen_unit()
+ .items_in_deterministic_order(ccx.tcx());
+ for &(trans_item, (linkage, visibility)) in &trans_items {
+ trans_item.predefine(&ccx, linkage, visibility);
+ }
+
+ // ... and now that we have everything pre-defined, fill out those definitions.
+ for &(trans_item, _) in &trans_items {
+ trans_item.define(&ccx);
+ }
+
+ // If this codegen unit contains the main function, also create the
+ // wrapper here
+ maybe_create_entry_wrapper(&ccx);
+
+ // Run replace-all-uses-with for statics that need it
+ for &(old_g, new_g) in ccx.statics_to_rauw().borrow().iter() {
+ unsafe {
+ let bitcast = llvm::LLVMConstPointerCast(new_g, llvm::LLVMTypeOf(old_g));
+ llvm::LLVMReplaceAllUsesWith(old_g, bitcast);
+ llvm::LLVMDeleteGlobal(old_g);
+ }
+ }
+
+ // Create the llvm.used variable
+ // This variable has type [N x i8*] and is stored in the llvm.metadata section
+ if !ccx.used_statics().borrow().is_empty() {
+ let name = CString::new("llvm.used").unwrap();
+ let section = CString::new("llvm.metadata").unwrap();
+ let array = C_array(Type::i8(&ccx).ptr_to(), &*ccx.used_statics().borrow());
+
+ unsafe {
+ let g = llvm::LLVMAddGlobal(ccx.llmod(),
+ val_ty(array).to_ref(),
+ name.as_ptr());
+ llvm::LLVMSetInitializer(g, array);
+ llvm::LLVMRustSetLinkage(g, llvm::Linkage::AppendingLinkage);
+ llvm::LLVMSetSection(g, section.as_ptr());
+ }
+ }
+
+ // Finalize debuginfo
+ if ccx.sess().opts.debuginfo != NoDebugInfo {
+ debuginfo::finalize(&ccx);
+ }
+
+ let llvm_module = ModuleLlvm {
+ llcx: ccx.llcx(),
+ llmod: ccx.llmod(),
+ };
+
+ // In LTO mode we inject the allocator shim into the existing
+ // module.
+ if ccx.sess().lto() {
+ if let Some(kind) = ccx.sess().allocator_kind.get() {
+ time(ccx.sess().time_passes(), "write allocator module", || {
+ unsafe {
+ allocator::trans(ccx.tcx(), &llvm_module, kind);
+ }
+ });
+ }
+ }
+
+ // Adjust exported symbols for MSVC dllimport
+ if ccx.sess().target.target.options.is_like_msvc &&
+ ccx.sess().crate_types.borrow().iter().any(|ct| *ct == config::CrateTypeRlib) {
+ create_imps(ccx.sess(), &llvm_module);
+ }
+
+ ModuleTranslation {
+ name: cgu_name,
+ symbol_name_hash,
+ source: ModuleSource::Translated(llvm_module),
+ kind: ModuleKind::Regular,
+ }
+ };
+
+ (lcx.into_stats(), module)
+ }
+}
+
+pub fn provide_local(providers: &mut Providers) {
+ providers.collect_and_partition_translation_items =
+ collect_and_partition_translation_items;
+
+ providers.is_translated_function = is_translated_function;
+
+ providers.codegen_unit = |tcx, name| {
+ let (_, all) = tcx.collect_and_partition_translation_items(LOCAL_CRATE);
+ all.iter()
+ .find(|cgu| *cgu.name() == name)
+ .cloned()
+ .expect(&format!("failed to find cgu with name {:?}", name))
+ };
+ providers.compile_codegen_unit = compile_codegen_unit;
+}
+
+pub fn provide_extern(providers: &mut Providers) {
+ providers.is_translated_function = is_translated_function;
+}
+
+pub fn linkage_to_llvm(linkage: Linkage) -> llvm::Linkage {
+ match linkage {
+ Linkage::External => llvm::Linkage::ExternalLinkage,
+ Linkage::AvailableExternally => llvm::Linkage::AvailableExternallyLinkage,
+ Linkage::LinkOnceAny => llvm::Linkage::LinkOnceAnyLinkage,
+ Linkage::LinkOnceODR => llvm::Linkage::LinkOnceODRLinkage,
+ Linkage::WeakAny => llvm::Linkage::WeakAnyLinkage,
+ Linkage::WeakODR => llvm::Linkage::WeakODRLinkage,
+ Linkage::Appending => llvm::Linkage::AppendingLinkage,
+ Linkage::Internal => llvm::Linkage::InternalLinkage,
+ Linkage::Private => llvm::Linkage::PrivateLinkage,
+ Linkage::ExternalWeak => llvm::Linkage::ExternalWeakLinkage,
+ Linkage::Common => llvm::Linkage::CommonLinkage,
+ }
+}
+
+pub fn visibility_to_llvm(linkage: Visibility) -> llvm::Visibility {
+ match linkage {
+ Visibility::Default => llvm::Visibility::Default,
+ Visibility::Hidden => llvm::Visibility::Hidden,
+ Visibility::Protected => llvm::Visibility::Protected,
+ }
+}
+
+// FIXME(mw): Anything that is produced via DepGraph::with_task() must implement
+// the HashStable trait. Normally DepGraph::with_task() calls are
+// hidden behind queries, but CGU creation is a special case in two
+// ways: (1) it's not a query and (2) CGU are output nodes, so their
+// Fingerprints are not actually needed. It remains to be clarified
+// how exactly this case will be handled in the red/green system but
+// for now we content ourselves with providing a no-op HashStable
+// implementation for CGUs.
+mod temp_stable_hash_impls {
+ use rustc_data_structures::stable_hasher::{StableHasherResult, StableHasher,
+ HashStable};
+ use ModuleTranslation;
+
+ impl<HCX> HashStable<HCX> for ModuleTranslation {
+ fn hash_stable<W: StableHasherResult>(&self,
+ _: &mut HCX,
+ _: &mut StableHasher<W>) {
+ // do nothing
+ }
+ }
+}
fn count_insn(&self, category: &str) {
if self.ccx.sess().trans_stats() {
- self.ccx.stats().n_llvm_insns.set(self.ccx.stats().n_llvm_insns.get() + 1);
+ self.ccx.stats().borrow_mut().n_llvm_insns += 1;
}
if self.ccx.sess().count_llvm_insns() {
- let mut h = self.ccx.stats().llvm_insns.borrow_mut();
- *h.entry(category.to_string()).or_insert(0) += 1;
+ *self.ccx.stats()
+ .borrow_mut()
+ .llvm_insns
+ .entry(category.to_string())
+ .or_insert(0) += 1;
}
}
use rustc::hir::def_id::DefId;
use rustc::ty::TypeFoldable;
use rustc::ty::subst::Substs;
-use trans_item::TransItem;
use type_of;
/// Translates a reference to a fn/method item, monomorphizing and
assert!(!instance.substs.has_escaping_regions());
assert!(!instance.substs.has_param_types());
- let fn_ty = common::instance_ty(ccx.shared(), &instance);
+ let fn_ty = common::instance_ty(ccx.tcx(), &instance);
if let Some(&llfn) = ccx.instances().borrow().get(&instance) {
return llfn;
}
let sym = tcx.symbol_name(instance);
debug!("get_fn({:?}: {:?}) => {}", instance, fn_ty, sym);
- // This is subtle and surprising, but sometimes we have to bitcast
- // the resulting fn pointer. The reason has to do with external
- // functions. If you have two crates that both bind the same C
- // library, they may not use precisely the same types: for
- // example, they will probably each declare their own structs,
- // which are distinct types from LLVM's point of view (nominal
- // types).
- //
- // Now, if those two crates are linked into an application, and
- // they contain inlined code, you can wind up with a situation
- // where both of those functions wind up being loaded into this
- // application simultaneously. In that case, the same function
- // (from LLVM's point of view) requires two types. But of course
- // LLVM won't allow one function to have two types.
- //
- // What we currently do, therefore, is declare the function with
- // one of the two types (whichever happens to come first) and then
- // bitcast as needed when the function is referenced to make sure
- // it has the type we expect.
- //
- // This can occur on either a crate-local or crate-external
- // reference. It also occurs when testing libcore and in some
- // other weird situations. Annoying.
-
// Create a fn pointer with the substituted signature.
let fn_ptr_ty = tcx.mk_fn_ptr(common::ty_fn_sig(ccx, fn_ty));
let llptrty = type_of::type_of(ccx, fn_ptr_ty);
let llfn = if let Some(llfn) = declare::get_declared_value(ccx, &sym) {
+ // This is subtle and surprising, but sometimes we have to bitcast
+ // the resulting fn pointer. The reason has to do with external
+ // functions. If you have two crates that both bind the same C
+ // library, they may not use precisely the same types: for
+ // example, they will probably each declare their own structs,
+ // which are distinct types from LLVM's point of view (nominal
+ // types).
+ //
+ // Now, if those two crates are linked into an application, and
+ // they contain inlined code, you can wind up with a situation
+ // where both of those functions wind up being loaded into this
+ // application simultaneously. In that case, the same function
+ // (from LLVM's point of view) requires two types. But of course
+ // LLVM won't allow one function to have two types.
+ //
+ // What we currently do, therefore, is declare the function with
+ // one of the two types (whichever happens to come first) and then
+ // bitcast as needed when the function is referenced to make sure
+ // it has the type we expect.
+ //
+ // This can occur on either a crate-local or crate-external
+ // reference. It also occurs when testing libcore and in some
+ // other weird situations. Annoying.
if common::val_ty(llfn) != llptrty {
debug!("get_fn: casting {:?} to {:?}", llfn, llptrty);
consts::ptrcast(llfn, llptrty)
attributes::unwind(llfn, true);
}
+ // Apply an appropriate linkage/visibility value to our item that we
+ // just declared.
+ //
+ // This is sort of subtle. Inside our codegen unit we started off
+ // compilation by predefining all our own `TransItem` instances. That
+ // is, everything we're translating ourselves is already defined. That
+ // means that anything we're actually translating ourselves will have
+ // hit the above branch in `get_declared_value`. As a result, we're
+ // guaranteed here that we're declaring a symbol that won't get defined,
+ // or in other words we're referencing a foreign value.
+ //
+ // So because this is a foreign value we blanket apply an external
+ // linkage directive because it's coming from a different object file.
+ // The visibility here is where it gets tricky. This symbol could be
+ // referencing some foreign crate or foreign library (an `extern`
+ // block) in which case we want to leave the default visibility. We may
+ // also, though, have multiple codegen units.
+ //
+ // In the situation of multiple codegen units this function may be
+ // referencing a function from another codegen unit. If we're
+ // indeed referencing a symbol in another codegen unit then we're in one
+ // of two cases:
+ //
+ // * This is a symbol defined in a foreign crate and we're just
+ // monomorphizing in another codegen unit. In this case this symbols
+ // is for sure not exported, so both codegen units will be using
+ // hidden visibility. Hence, we apply a hidden visibility here.
+ //
+ // * This is a symbol defined in our local crate. If the symbol in the
+ // other codegen unit is also not exported then like with the foreign
+ // case we apply a hidden visibility. If the symbol is exported from
+ // the foreign object file, however, then we leave this at the
+ // default visibility as we'll just import it naturally.
unsafe {
llvm::LLVMRustSetLinkage(llfn, llvm::Linkage::ExternalLinkage);
- if ccx.crate_trans_items().contains(&TransItem::Fn(instance)) {
- if let Some(node_id) = tcx.hir.as_local_node_id(instance_def_id) {
- if !ccx.exported_symbols().local_exports().contains(&node_id) {
+ if ccx.tcx().is_translated_function(instance_def_id) {
+ if instance_def_id.is_local() {
+ if !ccx.tcx().is_exported_symbol(instance_def_id) {
llvm::LLVMRustSetVisibility(llfn, llvm::Visibility::Hidden);
}
} else {
substs: &'tcx Substs<'tcx>)
-> ValueRef
{
- get_fn(ccx, monomorphize::resolve(ccx.shared(), def_id, substs))
+ get_fn(ccx, monomorphize::resolve(ccx.tcx(), def_id, substs))
}
use rustc::mir::{self, Location};
use rustc::mir::visit::Visitor as MirVisitor;
-use context::SharedCrateContext;
-use common::{def_ty, instance_ty};
+use common::{def_ty, instance_ty, type_is_sized};
use monomorphize::{self, Instance};
use rustc::util::nodemap::{FxHashSet, FxHashMap, DefIdMap};
-use trans_item::{TransItem, DefPathBasedNames, InstantiationMode};
+use trans_item::{TransItem, TransItemExt, DefPathBasedNames, InstantiationMode};
use rustc_data_structures::bitvec::BitVector;
-use back::symbol_export::ExportedSymbols;
#[derive(PartialEq, Eq, Hash, Clone, Copy, Debug)]
pub enum TransItemCollectionMode {
}
}
-pub fn collect_crate_translation_items<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
- exported_symbols: &ExportedSymbols,
+pub fn collect_crate_translation_items<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
mode: TransItemCollectionMode)
-> (FxHashSet<TransItem<'tcx>>,
InliningMap<'tcx>) {
// We are not tracking dependencies of this pass as it has to be re-executed
// every time no matter what.
- scx.tcx().dep_graph.with_ignore(|| {
- let roots = collect_roots(scx, exported_symbols, mode);
+ tcx.dep_graph.with_ignore(|| {
+ let roots = collect_roots(tcx, mode);
debug!("Building translation item graph, beginning at roots");
let mut visited = FxHashSet();
let mut inlining_map = InliningMap::new();
for root in roots {
- collect_items_rec(scx,
+ collect_items_rec(tcx,
root,
&mut visited,
&mut recursion_depths,
// Find all non-generic items by walking the HIR. These items serve as roots to
// start monomorphizing from.
-fn collect_roots<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
- exported_symbols: &ExportedSymbols,
+fn collect_roots<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
mode: TransItemCollectionMode)
-> Vec<TransItem<'tcx>> {
debug!("Collecting roots");
{
let mut visitor = RootCollector {
- scx,
+ tcx,
mode,
- exported_symbols,
output: &mut roots,
};
- scx.tcx().hir.krate().visit_all_item_likes(&mut visitor);
+ tcx.hir.krate().visit_all_item_likes(&mut visitor);
}
// We can only translate items that are instantiable - items all of
// whose predicates hold. Luckily, items that aren't instantiable
// can't actually be used, so we can just skip translating them.
- roots.retain(|root| root.is_instantiable(scx.tcx()));
+ roots.retain(|root| root.is_instantiable(tcx));
roots
}
// Collect all monomorphized translation items reachable from `starting_point`
-fn collect_items_rec<'a, 'tcx: 'a>(scx: &SharedCrateContext<'a, 'tcx>,
+fn collect_items_rec<'a, 'tcx: 'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
starting_point: TransItem<'tcx>,
visited: &mut FxHashSet<TransItem<'tcx>>,
recursion_depths: &mut DefIdMap<usize>,
// We've been here already, no need to search again.
return;
}
- debug!("BEGIN collect_items_rec({})", starting_point.to_string(scx.tcx()));
+ debug!("BEGIN collect_items_rec({})", starting_point.to_string(tcx));
let mut neighbors = Vec::new();
let recursion_depth_reset;
match starting_point {
TransItem::Static(node_id) => {
- let def_id = scx.tcx().hir.local_def_id(node_id);
- let instance = Instance::mono(scx.tcx(), def_id);
+ let def_id = tcx.hir.local_def_id(node_id);
+ let instance = Instance::mono(tcx, def_id);
// Sanity check whether this ended up being collected accidentally
- debug_assert!(should_trans_locally(scx.tcx(), &instance));
+ debug_assert!(should_trans_locally(tcx, &instance));
- let ty = instance_ty(scx, &instance);
- visit_drop_use(scx, ty, true, &mut neighbors);
+ let ty = instance_ty(tcx, &instance);
+ visit_drop_use(tcx, ty, true, &mut neighbors);
recursion_depth_reset = None;
- collect_neighbours(scx, instance, true, &mut neighbors);
+ collect_neighbours(tcx, instance, true, &mut neighbors);
}
TransItem::Fn(instance) => {
// Sanity check whether this ended up being collected accidentally
- debug_assert!(should_trans_locally(scx.tcx(), &instance));
+ debug_assert!(should_trans_locally(tcx, &instance));
// Keep track of the monomorphization recursion depth
- recursion_depth_reset = Some(check_recursion_limit(scx.tcx(),
+ recursion_depth_reset = Some(check_recursion_limit(tcx,
instance,
recursion_depths));
- check_type_length_limit(scx.tcx(), instance);
+ check_type_length_limit(tcx, instance);
- collect_neighbours(scx, instance, false, &mut neighbors);
+ collect_neighbours(tcx, instance, false, &mut neighbors);
}
TransItem::GlobalAsm(..) => {
recursion_depth_reset = None;
}
}
- record_accesses(scx.tcx(), starting_point, &neighbors[..], inlining_map);
+ record_accesses(tcx, starting_point, &neighbors[..], inlining_map);
for neighbour in neighbors {
- collect_items_rec(scx, neighbour, visited, recursion_depths, inlining_map);
+ collect_items_rec(tcx, neighbour, visited, recursion_depths, inlining_map);
}
if let Some((def_id, depth)) = recursion_depth_reset {
recursion_depths.insert(def_id, depth);
}
- debug!("END collect_items_rec({})", starting_point.to_string(scx.tcx()));
+ debug!("END collect_items_rec({})", starting_point.to_string(tcx));
}
fn record_accesses<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
}
struct MirNeighborCollector<'a, 'tcx: 'a> {
- scx: &'a SharedCrateContext<'a, 'tcx>,
+ tcx: TyCtxt<'a, 'tcx, 'tcx>,
mir: &'a mir::Mir<'tcx>,
output: &'a mut Vec<TransItem<'tcx>>,
param_substs: &'tcx Substs<'tcx>,
// have to instantiate all methods of the trait being cast to, so we
// can build the appropriate vtable.
mir::Rvalue::Cast(mir::CastKind::Unsize, ref operand, target_ty) => {
- let target_ty = self.scx.tcx().trans_apply_param_substs(self.param_substs,
- &target_ty);
- let source_ty = operand.ty(self.mir, self.scx.tcx());
- let source_ty = self.scx.tcx().trans_apply_param_substs(self.param_substs,
- &source_ty);
- let (source_ty, target_ty) = find_vtable_types_for_unsizing(self.scx,
+ let target_ty = self.tcx.trans_apply_param_substs(self.param_substs,
+ &target_ty);
+ let source_ty = operand.ty(self.mir, self.tcx);
+ let source_ty = self.tcx.trans_apply_param_substs(self.param_substs,
+ &source_ty);
+ let (source_ty, target_ty) = find_vtable_types_for_unsizing(self.tcx,
source_ty,
target_ty);
// This could also be a different Unsize instruction, like
// from a fixed sized array to a slice. But we are only
// interested in things that produce a vtable.
if target_ty.is_trait() && !source_ty.is_trait() {
- create_trans_items_for_vtable_methods(self.scx,
+ create_trans_items_for_vtable_methods(self.tcx,
target_ty,
source_ty,
self.output);
}
}
mir::Rvalue::Cast(mir::CastKind::ReifyFnPointer, ref operand, _) => {
- let fn_ty = operand.ty(self.mir, self.scx.tcx());
- let fn_ty = self.scx.tcx().trans_apply_param_substs(self.param_substs,
- &fn_ty);
- visit_fn_use(self.scx, fn_ty, false, &mut self.output);
+ let fn_ty = operand.ty(self.mir, self.tcx);
+ let fn_ty = self.tcx.trans_apply_param_substs(self.param_substs,
+ &fn_ty);
+ visit_fn_use(self.tcx, fn_ty, false, &mut self.output);
}
mir::Rvalue::Cast(mir::CastKind::ClosureFnPointer, ref operand, _) => {
- let source_ty = operand.ty(self.mir, self.scx.tcx());
- let source_ty = self.scx.tcx().trans_apply_param_substs(self.param_substs,
- &source_ty);
+ let source_ty = operand.ty(self.mir, self.tcx);
+ let source_ty = self.tcx.trans_apply_param_substs(self.param_substs,
+ &source_ty);
match source_ty.sty {
ty::TyClosure(def_id, substs) => {
let instance = monomorphize::resolve_closure(
- self.scx, def_id, substs, ty::ClosureKind::FnOnce);
+ self.tcx, def_id, substs, ty::ClosureKind::FnOnce);
self.output.push(create_fn_trans_item(instance));
}
_ => bug!(),
}
}
mir::Rvalue::NullaryOp(mir::NullOp::Box, _) => {
- let tcx = self.scx.tcx();
+ let tcx = self.tcx;
let exchange_malloc_fn_def_id = tcx
.lang_items()
.require(ExchangeMallocFnLangItem)
- .unwrap_or_else(|e| self.scx.sess().fatal(&e));
+ .unwrap_or_else(|e| tcx.sess.fatal(&e));
let instance = Instance::mono(tcx, exchange_malloc_fn_def_id);
if should_trans_locally(tcx, &instance) {
self.output.push(create_fn_trans_item(instance));
debug!("visiting const {:?} @ {:?}", *constant, location);
if let ConstVal::Unevaluated(def_id, substs) = constant.val {
- let substs = self.scx.tcx().trans_apply_param_substs(self.param_substs,
- &substs);
- let instance = monomorphize::resolve(self.scx, def_id, substs);
- collect_neighbours(self.scx, instance, true, self.output);
+ let substs = self.tcx.trans_apply_param_substs(self.param_substs,
+ &substs);
+ let instance = monomorphize::resolve(self.tcx, def_id, substs);
+ collect_neighbours(self.tcx, instance, true, self.output);
}
self.super_const(constant);
location: Location) {
debug!("visiting terminator {:?} @ {:?}", kind, location);
- let tcx = self.scx.tcx();
+ let tcx = self.tcx;
match *kind {
mir::TerminatorKind::Call { ref func, .. } => {
let callee_ty = func.ty(self.mir, tcx);
let callee_ty = tcx.trans_apply_param_substs(self.param_substs, &callee_ty);
let constness = match (self.const_context, &callee_ty.sty) {
- (true, &ty::TyFnDef(def_id, substs)) if self.scx.tcx().is_const_fn(def_id) => {
- let instance = monomorphize::resolve(self.scx, def_id, substs);
+ (true, &ty::TyFnDef(def_id, substs)) if self.tcx.is_const_fn(def_id) => {
+ let instance = monomorphize::resolve(self.tcx, def_id, substs);
Some(instance)
}
_ => None
// If this is a const fn, called from a const context, we
// have to visit its body in order to find any fn reifications
// it might contain.
- collect_neighbours(self.scx,
+ collect_neighbours(self.tcx,
const_fn_instance,
true,
self.output);
} else {
- visit_fn_use(self.scx, callee_ty, true, &mut self.output);
+ visit_fn_use(self.tcx, callee_ty, true, &mut self.output);
}
}
mir::TerminatorKind::Drop { ref location, .. } |
mir::TerminatorKind::DropAndReplace { ref location, .. } => {
- let ty = location.ty(self.mir, self.scx.tcx())
- .to_ty(self.scx.tcx());
+ let ty = location.ty(self.mir, self.tcx)
+ .to_ty(self.tcx);
let ty = tcx.trans_apply_param_substs(self.param_substs, &ty);
- visit_drop_use(self.scx, ty, true, self.output);
+ visit_drop_use(self.tcx, ty, true, self.output);
}
mir::TerminatorKind::Goto { .. } |
mir::TerminatorKind::SwitchInt { .. } |
location: Location) {
debug!("visiting static {:?} @ {:?}", static_.def_id, location);
- let tcx = self.scx.tcx();
+ let tcx = self.tcx;
let instance = Instance::mono(tcx, static_.def_id);
if should_trans_locally(tcx, &instance) {
let node_id = tcx.hir.as_local_node_id(static_.def_id).unwrap();
}
}
-fn visit_drop_use<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
+fn visit_drop_use<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
ty: Ty<'tcx>,
is_direct_call: bool,
output: &mut Vec<TransItem<'tcx>>)
{
- let instance = monomorphize::resolve_drop_in_place(scx, ty);
- visit_instance_use(scx, instance, is_direct_call, output);
+ let instance = monomorphize::resolve_drop_in_place(tcx, ty);
+ visit_instance_use(tcx, instance, is_direct_call, output);
}
-fn visit_fn_use<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
+fn visit_fn_use<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
ty: Ty<'tcx>,
is_direct_call: bool,
output: &mut Vec<TransItem<'tcx>>)
{
if let ty::TyFnDef(def_id, substs) = ty.sty {
- let instance = monomorphize::resolve(scx, def_id, substs);
- visit_instance_use(scx, instance, is_direct_call, output);
+ let instance = monomorphize::resolve(tcx, def_id, substs);
+ visit_instance_use(tcx, instance, is_direct_call, output);
}
}
-fn visit_instance_use<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
+fn visit_instance_use<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
instance: ty::Instance<'tcx>,
is_direct_call: bool,
output: &mut Vec<TransItem<'tcx>>)
{
debug!("visit_item_use({:?}, is_direct_call={:?})", instance, is_direct_call);
- if !should_trans_locally(scx.tcx(), &instance) {
+ if !should_trans_locally(tcx, &instance) {
return
}
///
/// Finally, there is also the case of custom unsizing coercions, e.g. for
/// smart pointers such as `Rc` and `Arc`.
-fn find_vtable_types_for_unsizing<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
+fn find_vtable_types_for_unsizing<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
source_ty: Ty<'tcx>,
target_ty: Ty<'tcx>)
-> (Ty<'tcx>, Ty<'tcx>) {
let ptr_vtable = |inner_source: Ty<'tcx>, inner_target: Ty<'tcx>| {
- if !scx.type_is_sized(inner_source) {
+ if !type_is_sized(tcx, inner_source) {
(inner_source, inner_target)
} else {
- scx.tcx().struct_lockstep_tails(inner_source, inner_target)
+ tcx.struct_lockstep_tails(inner_source, inner_target)
}
};
match (&source_ty.sty, &target_ty.sty) {
assert_eq!(source_adt_def, target_adt_def);
let kind =
- monomorphize::custom_coerce_unsize_info(scx, source_ty, target_ty);
+ monomorphize::custom_coerce_unsize_info(tcx, source_ty, target_ty);
let coerce_index = match kind {
CustomCoerceUnsized::Struct(i) => i
assert!(coerce_index < source_fields.len() &&
source_fields.len() == target_fields.len());
- find_vtable_types_for_unsizing(scx,
- source_fields[coerce_index].ty(scx.tcx(),
+ find_vtable_types_for_unsizing(tcx,
+ source_fields[coerce_index].ty(tcx,
source_substs),
- target_fields[coerce_index].ty(scx.tcx(),
+ target_fields[coerce_index].ty(tcx,
target_substs))
}
_ => bug!("find_vtable_types_for_unsizing: invalid coercion {:?} -> {:?}",
/// Creates a `TransItem` for each method that is referenced by the vtable for
/// the given trait/impl pair.
-fn create_trans_items_for_vtable_methods<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
+fn create_trans_items_for_vtable_methods<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
trait_ty: Ty<'tcx>,
impl_ty: Ty<'tcx>,
output: &mut Vec<TransItem<'tcx>>) {
if let ty::TyDynamic(ref trait_ty, ..) = trait_ty.sty {
if let Some(principal) = trait_ty.principal() {
- let poly_trait_ref = principal.with_self_ty(scx.tcx(), impl_ty);
+ let poly_trait_ref = principal.with_self_ty(tcx, impl_ty);
assert!(!poly_trait_ref.has_escaping_regions());
// Walk all methods of the trait, including those of its supertraits
- let methods = traits::get_vtable_methods(scx.tcx(), poly_trait_ref);
+ let methods = traits::get_vtable_methods(tcx, poly_trait_ref);
let methods = methods.filter_map(|method| method)
- .map(|(def_id, substs)| monomorphize::resolve(scx, def_id, substs))
- .filter(|&instance| should_trans_locally(scx.tcx(), &instance))
+ .map(|(def_id, substs)| monomorphize::resolve(tcx, def_id, substs))
+ .filter(|&instance| should_trans_locally(tcx, &instance))
.map(|instance| create_fn_trans_item(instance));
output.extend(methods);
}
// Also add the destructor
- visit_drop_use(scx, impl_ty, false, output);
+ visit_drop_use(tcx, impl_ty, false, output);
}
}
//=-----------------------------------------------------------------------------
struct RootCollector<'b, 'a: 'b, 'tcx: 'a + 'b> {
- scx: &'b SharedCrateContext<'a, 'tcx>,
- exported_symbols: &'b ExportedSymbols,
+ tcx: TyCtxt<'a, 'tcx, 'tcx>,
mode: TransItemCollectionMode,
output: &'b mut Vec<TransItem<'tcx>>,
}
hir::ItemImpl(..) => {
if self.mode == TransItemCollectionMode::Eager {
- create_trans_items_for_default_impls(self.scx,
+ create_trans_items_for_default_impls(self.tcx,
item,
self.output);
}
hir::ItemUnion(_, ref generics) => {
if !generics.is_parameterized() {
if self.mode == TransItemCollectionMode::Eager {
- let def_id = self.scx.tcx().hir.local_def_id(item.id);
+ let def_id = self.tcx.hir.local_def_id(item.id);
debug!("RootCollector: ADT drop-glue for {}",
- def_id_to_string(self.scx.tcx(), def_id));
+ def_id_to_string(self.tcx, def_id));
- let ty = def_ty(self.scx, def_id, Substs::empty());
- visit_drop_use(self.scx, ty, true, self.output);
+ let ty = def_ty(self.tcx, def_id, Substs::empty());
+ visit_drop_use(self.tcx, ty, true, self.output);
}
}
}
hir::ItemGlobalAsm(..) => {
debug!("RootCollector: ItemGlobalAsm({})",
- def_id_to_string(self.scx.tcx(),
- self.scx.tcx().hir.local_def_id(item.id)));
+ def_id_to_string(self.tcx,
+ self.tcx.hir.local_def_id(item.id)));
self.output.push(TransItem::GlobalAsm(item.id));
}
hir::ItemStatic(..) => {
debug!("RootCollector: ItemStatic({})",
- def_id_to_string(self.scx.tcx(),
- self.scx.tcx().hir.local_def_id(item.id)));
+ def_id_to_string(self.tcx,
+ self.tcx.hir.local_def_id(item.id)));
self.output.push(TransItem::Static(item.id));
}
hir::ItemConst(..) => {
// actually used somewhere. Just declaring them is insufficient.
}
hir::ItemFn(..) => {
- let tcx = self.scx.tcx();
+ let tcx = self.tcx;
let def_id = tcx.hir.local_def_id(item.id);
if (self.mode == TransItemCollectionMode::Eager ||
- !tcx.is_const_fn(def_id) ||
- self.exported_symbols.local_exports().contains(&item.id)) &&
+ !tcx.is_const_fn(def_id) || tcx.is_exported_symbol(def_id)) &&
!item_has_type_parameters(tcx, def_id) {
debug!("RootCollector: ItemFn({})",
fn visit_impl_item(&mut self, ii: &'v hir::ImplItem) {
match ii.node {
hir::ImplItemKind::Method(hir::MethodSig { .. }, _) => {
- let tcx = self.scx.tcx();
+ let tcx = self.tcx;
let def_id = tcx.hir.local_def_id(ii.id);
if (self.mode == TransItemCollectionMode::Eager ||
!tcx.is_const_fn(def_id) ||
- self.exported_symbols.local_exports().contains(&ii.id)) &&
+ tcx.is_exported_symbol(def_id)) &&
!item_has_type_parameters(tcx, def_id) {
debug!("RootCollector: MethodImplItem({})",
def_id_to_string(tcx, def_id));
generics.parent_types as usize + generics.types.len() > 0
}
-fn create_trans_items_for_default_impls<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
+fn create_trans_items_for_default_impls<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
item: &'tcx hir::Item,
output: &mut Vec<TransItem<'tcx>>) {
- let tcx = scx.tcx();
match item.node {
hir::ItemImpl(_,
_,
}
let instance =
- monomorphize::resolve(scx, method.def_id, callee_substs);
+ monomorphize::resolve(tcx, method.def_id, callee_substs);
let trans_item = create_fn_trans_item(instance);
if trans_item.is_instantiable(tcx) && should_trans_locally(tcx, &instance) {
}
/// Scan the MIR in order to find function calls, closures, and drop-glue
-fn collect_neighbours<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
+fn collect_neighbours<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
instance: Instance<'tcx>,
const_context: bool,
output: &mut Vec<TransItem<'tcx>>)
{
- let mir = scx.tcx().instance_mir(instance.def);
+ let mir = tcx.instance_mir(instance.def);
let mut visitor = MirNeighborCollector {
- scx,
+ tcx,
mir: &mir,
output,
param_substs: instance.substs,
use monomorphize;
use type_::Type;
use value::Value;
+use rustc::traits;
use rustc::ty::{self, Ty, TyCtxt};
use rustc::ty::layout::{Layout, LayoutTyper};
use rustc::ty::subst::{Kind, Subst, Substs};
use syntax::abi::Abi;
use syntax::attr;
use syntax::symbol::InternedString;
-use syntax_pos::Span;
+use syntax_pos::{Span, DUMMY_SP};
pub use context::{CrateContext, SharedCrateContext};
!layout.is_unsized() && layout.size(ccx).bytes() == 0
}
+pub fn type_needs_drop<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, ty: Ty<'tcx>) -> bool {
+ ty.needs_drop(tcx, ty::ParamEnv::empty(traits::Reveal::All))
+}
+
+pub fn type_is_sized<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, ty: Ty<'tcx>) -> bool {
+ ty.is_sized(tcx, ty::ParamEnv::empty(traits::Reveal::All), DUMMY_SP)
+}
+
+pub fn type_is_freeze<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, ty: Ty<'tcx>) -> bool {
+ ty.is_freeze(tcx, ty::ParamEnv::empty(traits::Reveal::All), DUMMY_SP)
+}
+
/*
* A note on nomenclature of linking: "extern", "foreign", and "upcall".
*
}
/// Given a DefId and some Substs, produces the monomorphic item type.
-pub fn def_ty<'a, 'tcx>(shared: &SharedCrateContext<'a, 'tcx>,
+pub fn def_ty<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
def_id: DefId,
substs: &'tcx Substs<'tcx>)
-> Ty<'tcx>
{
- let ty = shared.tcx().type_of(def_id);
- shared.tcx().trans_apply_param_substs(substs, &ty)
+ let ty = tcx.type_of(def_id);
+ tcx.trans_apply_param_substs(substs, &ty)
}
/// Return the substituted type of an instance.
-pub fn instance_ty<'a, 'tcx>(shared: &SharedCrateContext<'a, 'tcx>,
+pub fn instance_ty<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
instance: &ty::Instance<'tcx>)
-> Ty<'tcx>
{
- let ty = instance.def.def_ty(shared.tcx());
- shared.tcx().trans_apply_param_substs(instance.substs, &ty)
+ let ty = instance.def.def_ty(tcx);
+ tcx.trans_apply_param_substs(instance.substs, &ty)
}
use rustc::middle::const_val::ConstEvalErr;
use {debuginfo, machine};
use base;
-use trans_item::TransItem;
+use trans_item::{TransItem, TransItemExt};
use common::{self, CrateContext, val_ty};
use declare;
use monomorphize::Instance;
return g;
}
- let ty = common::instance_ty(ccx.shared(), &instance);
+ let ty = common::instance_ty(ccx.tcx(), &instance);
let g = if let Some(id) = ccx.tcx().hir.as_local_node_id(def_id) {
let llty = type_of::type_of(ccx, ty);
let g = declare::define_global(ccx, &sym[..], llty).unwrap();
- if !ccx.exported_symbols().local_exports().contains(&id) {
+ if !ccx.tcx().is_exported_symbol(def_id) {
unsafe {
llvm::LLVMRustSetVisibility(g, llvm::Visibility::Hidden);
}
// extern "C" fn() from being non-null, so we can't just declare a
// static and call it a day. Some linkages (like weak) will make it such
// that the static actually has a null value.
- let linkage = match base::llvm_linkage_by_name(&name.as_str()) {
+ let linkage = match base::linkage_by_name(&name.as_str()) {
Some(linkage) => linkage,
None => {
ccx.sess().span_fatal(span, "invalid linkage specified");
unsafe {
// Declare a symbol `foo` with the desired linkage.
let g1 = declare::declare_global(ccx, &sym, llty2);
- llvm::LLVMRustSetLinkage(g1, linkage);
+ llvm::LLVMRustSetLinkage(g1, base::linkage_to_llvm(linkage));
// Declare an internal global `extern_with_linkage_foo` which
// is initialized with the address of `foo`. If `foo` is
};
let instance = Instance::mono(ccx.tcx(), def_id);
- let ty = common::instance_ty(ccx.shared(), &instance);
+ let ty = common::instance_ty(ccx.tcx(), &instance);
let llty = type_of::type_of(ccx, ty);
let g = if val_llty == llty {
g
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+use common;
use llvm;
use llvm::{ContextRef, ModuleRef, ValueRef};
use rustc::dep_graph::{DepGraph, DepGraphSafe};
use rustc::hir;
use rustc::hir::def_id::DefId;
+use rustc::ich::StableHashingContext;
use rustc::traits;
use debuginfo;
use callee;
-use back::symbol_export::ExportedSymbols;
use base;
use declare;
use monomorphize::Instance;
use partitioning::CodegenUnit;
-use trans_item::TransItem;
use type_::Type;
use rustc_data_structures::base_n;
-use rustc::session::config::{self, NoDebugInfo, OutputFilenames};
+use rustc::middle::trans::Stats;
+use rustc_data_structures::stable_hasher::StableHashingContextProvider;
+use rustc::session::config::{self, NoDebugInfo};
use rustc::session::Session;
-use rustc::ty::{self, Ty, TyCtxt};
use rustc::ty::layout::{LayoutCx, LayoutError, LayoutTyper, TyLayout};
-use rustc::util::nodemap::{FxHashMap, FxHashSet};
+use rustc::ty::{self, Ty, TyCtxt};
+use rustc::util::nodemap::FxHashMap;
use std::ffi::{CStr, CString};
use std::cell::{Cell, RefCell};
use std::sync::Arc;
use std::marker::PhantomData;
use syntax::symbol::InternedString;
-use syntax_pos::DUMMY_SP;
use abi::Abi;
-#[derive(Clone, Default)]
-pub struct Stats {
- pub n_glues_created: Cell<usize>,
- pub n_null_glues: Cell<usize>,
- pub n_real_glues: Cell<usize>,
- pub n_fns: Cell<usize>,
- pub n_inlines: Cell<usize>,
- pub n_closures: Cell<usize>,
- pub n_llvm_insns: Cell<usize>,
- pub llvm_insns: RefCell<FxHashMap<String, usize>>,
- // (ident, llvm-instructions)
- pub fn_stats: RefCell<Vec<(String, usize)> >,
-}
-
-impl Stats {
- pub fn extend(&mut self, stats: Stats) {
- self.n_glues_created.set(self.n_glues_created.get() + stats.n_glues_created.get());
- self.n_null_glues.set(self.n_null_glues.get() + stats.n_null_glues.get());
- self.n_real_glues.set(self.n_real_glues.get() + stats.n_real_glues.get());
- self.n_fns.set(self.n_fns.get() + stats.n_fns.get());
- self.n_inlines.set(self.n_inlines.get() + stats.n_inlines.get());
- self.n_closures.set(self.n_closures.get() + stats.n_closures.get());
- self.n_llvm_insns.set(self.n_llvm_insns.get() + stats.n_llvm_insns.get());
- self.llvm_insns.borrow_mut().extend(
- stats.llvm_insns.borrow().iter()
- .map(|(key, value)| (key.clone(), value.clone())));
- self.fn_stats.borrow_mut().append(&mut *stats.fn_stats.borrow_mut());
- }
-}
-
/// The shared portion of a `CrateContext`. There is one `SharedCrateContext`
/// per crate. The data here is shared between all compilation units of the
/// crate, so it must not contain references to any LLVM data structures
pub struct SharedCrateContext<'a, 'tcx: 'a> {
tcx: TyCtxt<'a, 'tcx, 'tcx>,
check_overflow: bool,
-
use_dll_storage_attrs: bool,
-
- output_filenames: &'a OutputFilenames,
}
/// The local portion of a `CrateContext`. There is one `LocalCrateContext`
pub struct LocalCrateContext<'a, 'tcx: 'a> {
llmod: ModuleRef,
llcx: ContextRef,
- stats: Stats,
- codegen_unit: CodegenUnit<'tcx>,
-
- /// The translation items of the whole crate.
- crate_trans_items: Arc<FxHashSet<TransItem<'tcx>>>,
-
- /// Information about which symbols are exported from the crate.
- exported_symbols: Arc<ExportedSymbols>,
+ stats: RefCell<Stats>,
+ codegen_unit: Arc<CodegenUnit<'tcx>>,
/// Cache instances of monomorphic and polymorphic items
instances: RefCell<FxHashMap<Instance<'tcx>, ValueRef>>,
impl<'a, 'tcx> DepGraphSafe for CrateContext<'a, 'tcx> {
}
+impl<'a, 'tcx> DepGraphSafe for SharedCrateContext<'a, 'tcx> {
+}
+
+impl<'a, 'tcx> StableHashingContextProvider for SharedCrateContext<'a, 'tcx> {
+ type ContextType = StableHashingContext<'tcx>;
+
+ fn create_stable_hashing_context(&self) -> Self::ContextType {
+ self.tcx.create_stable_hashing_context()
+ }
+}
+
pub fn get_reloc_model(sess: &Session) -> llvm::RelocMode {
let reloc_model_arg = match sess.opts.cg.relocation_model {
Some(ref s) => &s[..],
}
impl<'b, 'tcx> SharedCrateContext<'b, 'tcx> {
- pub fn new(tcx: TyCtxt<'b, 'tcx, 'tcx>,
- check_overflow: bool,
- output_filenames: &'b OutputFilenames)
- -> SharedCrateContext<'b, 'tcx> {
+ pub fn new(tcx: TyCtxt<'b, 'tcx, 'tcx>) -> SharedCrateContext<'b, 'tcx> {
// An interesting part of Windows which MSVC forces our hand on (and
// apparently MinGW didn't) is the usage of `dllimport` and `dllexport`
// attributes in LLVM IR as well as native dependencies (in C these
// start) and then strongly recommending static linkage on MSVC!
let use_dll_storage_attrs = tcx.sess.target.target.options.is_like_msvc;
+ let check_overflow = tcx.sess.overflow_checks();
+
SharedCrateContext {
tcx,
check_overflow,
use_dll_storage_attrs,
- output_filenames,
}
}
pub fn type_needs_drop(&self, ty: Ty<'tcx>) -> bool {
- ty.needs_drop(self.tcx, ty::ParamEnv::empty(traits::Reveal::All))
+ common::type_needs_drop(self.tcx, ty)
}
pub fn type_is_sized(&self, ty: Ty<'tcx>) -> bool {
- ty.is_sized(self.tcx, ty::ParamEnv::empty(traits::Reveal::All), DUMMY_SP)
+ common::type_is_sized(self.tcx, ty)
}
pub fn type_is_freeze(&self, ty: Ty<'tcx>) -> bool {
- ty.is_freeze(self.tcx, ty::ParamEnv::empty(traits::Reveal::All), DUMMY_SP)
+ common::type_is_freeze(self.tcx, ty)
}
- pub fn tcx<'a>(&'a self) -> TyCtxt<'a, 'tcx, 'tcx> {
+ pub fn tcx(&self) -> TyCtxt<'b, 'tcx, 'tcx> {
self.tcx
}
pub fn use_dll_storage_attrs(&self) -> bool {
self.use_dll_storage_attrs
}
-
- pub fn output_filenames(&self) -> &OutputFilenames {
- self.output_filenames
- }
}
impl<'a, 'tcx> LocalCrateContext<'a, 'tcx> {
pub fn new(shared: &SharedCrateContext<'a, 'tcx>,
- codegen_unit: CodegenUnit<'tcx>,
- crate_trans_items: Arc<FxHashSet<TransItem<'tcx>>>,
- exported_symbols: Arc<ExportedSymbols>,)
+ codegen_unit: Arc<CodegenUnit<'tcx>>)
-> LocalCrateContext<'a, 'tcx> {
unsafe {
// Append ".rs" to LLVM module identifier.
let local_ccx = LocalCrateContext {
llmod,
llcx,
- stats: Stats::default(),
+ stats: RefCell::new(Stats::default()),
codegen_unit,
- crate_trans_items,
- exported_symbols,
instances: RefCell::new(FxHashMap()),
vtables: RefCell::new(FxHashMap()),
const_cstr_cache: RefCell::new(FxHashMap()),
}
pub fn into_stats(self) -> Stats {
- self.stats
+ self.stats.into_inner()
}
}
self.local_ccx
}
- pub fn tcx<'a>(&'a self) -> TyCtxt<'a, 'tcx, 'tcx> {
+ pub fn tcx(&self) -> TyCtxt<'b, 'tcx, 'tcx> {
self.shared.tcx
}
&self.local().codegen_unit
}
- pub fn crate_trans_items(&self) -> &FxHashSet<TransItem<'tcx>> {
- &self.local().crate_trans_items
- }
-
- pub fn exported_symbols(&self) -> &ExportedSymbols {
- &self.local().exported_symbols
- }
-
pub fn td(&self) -> llvm::TargetDataRef {
unsafe { llvm::LLVMRustGetModuleDataLayout(self.llmod()) }
}
&self.local().lltypes
}
- pub fn stats<'a>(&'a self) -> &'a Stats {
+ pub fn stats<'a>(&'a self) -> &'a RefCell<Stats> {
&self.local().stats
}
let gcov_cu_info = [
path_to_mdstring(debug_context.llcontext,
- &scc.output_filenames().with_extension("gcno")),
+ &scc.tcx().output_filenames(LOCAL_CRATE).with_extension("gcno")),
path_to_mdstring(debug_context.llcontext,
- &scc.output_filenames().with_extension("gcda")),
+ &scc.tcx().output_filenames(LOCAL_CRATE).with_extension("gcda")),
cu_desc_metadata,
];
let gcov_metadata = llvm::LLVMMDNodeInContext(debug_context.llcontext,
};
let is_local_to_unit = is_node_local_to_unit(cx, node_id);
- let variable_type = common::def_ty(cx.shared(), node_def_id, Substs::empty());
+ let variable_type = common::def_ty(cx.tcx(), node_def_id, Substs::empty());
let type_metadata = type_metadata(cx, variable_type, span);
let var_name = tcx.item_name(node_def_id).to_string();
let linkage_name = mangled_name_of_item(cx, node_def_id, "");
// If the method does *not* belong to a trait, proceed
if cx.tcx().trait_id_of_impl(impl_def_id).is_none() {
let impl_self_ty =
- common::def_ty(cx.shared(), impl_def_id, instance.substs);
+ common::def_ty(cx.tcx(), impl_def_id, instance.substs);
// Only "class" methods are generally understood by LLVM,
// so avoid methods on other types (e.g. `<*mut T>::null`).
// visible). It might better to use the `exported_items` set from
// `driver::CrateAnalysis` in the future, but (atm) this set is not
// available in the translation pass.
- !cx.exported_symbols().local_exports().contains(&node_id)
+ let def_id = cx.tcx().hir.local_def_id(node_id);
+ !cx.tcx().is_exported_symbol(def_id)
}
#[allow(non_snake_case)]
unsafe { simd_add(i32x1(0), i32x1(1)); } // ok!
```
"##,
+
+E0558: r##"
+The `export_name` attribute was malformed.
+
+Erroneous code example:
+
+```ignore (error-emitted-at-codegen-which-cannot-be-handled-by-compile_fail)
+#[export_name] // error: export_name attribute has invalid format
+pub fn something() {}
+
+fn main() {}
+```
+
+The `export_name` attribute expects a string in order to determine the name of
+the exported symbol. Example:
+
+```
+#[export_name = "some_function"] // ok!
+pub fn something() {}
+
+fn main() {}
+```
+"##,
+
}
use std;
-use llvm;
-use llvm::{ValueRef};
-use rustc::traits;
-use rustc::ty::{self, Ty, TypeFoldable};
-use rustc::ty::layout::LayoutTyper;
+use builder::Builder;
use common::*;
+use llvm::{ValueRef};
+use llvm;
use meth;
use monomorphize;
+use rustc::ty::layout::LayoutTyper;
+use rustc::ty::{self, Ty};
use value::Value;
-use builder::Builder;
-
-pub fn needs_drop_glue<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>, t: Ty<'tcx>) -> bool {
- assert!(t.is_normalized_for_trans());
-
- let t = scx.tcx().erase_regions(&t);
-
- // FIXME (#22815): note that type_needs_drop conservatively
- // approximates in some cases and may say a type expression
- // requires drop glue when it actually does not.
- //
- // (In this case it is not clear whether any harm is done, i.e.
- // erroneously returning `true` in some cases where we could have
- // returned `false` does not appear unsound. The impact on
- // code quality is unknown at this time.)
-
- if !scx.type_needs_drop(t) {
- return false;
- }
- match t.sty {
- ty::TyAdt(def, _) if def.is_box() => {
- let typ = t.boxed_ty();
- if !scx.type_needs_drop(typ) && scx.type_is_sized(typ) {
- let layout = t.layout(scx.tcx(), ty::ParamEnv::empty(traits::Reveal::All)).unwrap();
- if layout.size(scx).bytes() == 0 {
- // `Box<ZeroSizeType>` does not allocate.
- false
- } else {
- true
- }
- } else {
- true
- }
- }
- _ => true
- }
-}
pub fn size_and_align_of_dst<'a, 'tcx>(bcx: &Builder<'a, 'tcx>, t: Ty<'tcx>, info: ValueRef)
-> (ValueRef, ValueRef) {
use rustc::dep_graph::WorkProduct;
use syntax_pos::symbol::Symbol;
+#[macro_use]
+extern crate bitflags;
extern crate flate2;
extern crate libc;
extern crate owning_ref;
extern crate rustc_llvm as llvm;
extern crate rustc_platform_intrinsics as intrinsics;
extern crate rustc_const_math;
-#[macro_use]
-#[no_link]
-extern crate rustc_bitflags;
extern crate rustc_demangle;
extern crate jobserver;
extern crate num_cpus;
extern crate gcc; // Used to locate MSVC, not gcc :)
pub use base::trans_crate;
-pub use back::symbol_names::provide;
pub use metadata::LlvmMetadataLoader;
pub use llvm_util::{init, target_features, print_version, print_passes, print, enable_llvm_debug};
use std::rc::Rc;
use rustc::hir::def_id::CrateNum;
-use rustc::util::nodemap::{FxHashSet, FxHashMap};
use rustc::middle::cstore::{NativeLibrary, CrateSource, LibSource};
+use rustc::ty::maps::Providers;
+use rustc::util::nodemap::{FxHashSet, FxHashMap};
+
+mod diagnostics;
pub mod back {
mod archive;
mod rpath;
}
-mod diagnostics;
-
mod abi;
mod adt;
mod allocator;
}
__build_diagnostic_array! { librustc_trans, DIAGNOSTICS }
+
+pub fn provide_local(providers: &mut Providers) {
+ back::symbol_names::provide(providers);
+ back::symbol_export::provide_local(providers);
+ base::provide_local(providers);
+}
+
+pub fn provide_extern(providers: &mut Providers) {
+ back::symbol_names::provide(providers);
+ back::symbol_export::provide_extern(providers);
+ base::provide_extern(providers);
+}
let nullptr = C_null(Type::nil(ccx).ptr_to());
let mut components: Vec<_> = [
- callee::get_fn(ccx, monomorphize::resolve_drop_in_place(ccx.shared(), ty)),
+ callee::get_fn(ccx, monomorphize::resolve_drop_in_place(ccx.tcx(), ty)),
C_usize(ccx, ccx.size_of(ty)),
C_usize(ccx, ccx.align_of(ty) as u64)
].iter().cloned().collect();
mir::TerminatorKind::Drop { ref location, target, unwind } => {
let ty = location.ty(self.mir, bcx.tcx()).to_ty(bcx.tcx());
let ty = self.monomorphize(&ty);
- let drop_fn = monomorphize::resolve_drop_in_place(bcx.ccx.shared(), ty);
+ let drop_fn = monomorphize::resolve_drop_in_place(bcx.ccx.tcx(), ty);
if let ty::InstanceDef::DropGlue(_, None) = drop_fn.def {
// we don't actually need to drop anything.
let (instance, mut llfn) = match callee.ty.sty {
ty::TyFnDef(def_id, substs) => {
- (Some(monomorphize::resolve(bcx.ccx.shared(), def_id, substs)),
+ (Some(monomorphize::resolve(bcx.ccx.tcx(), def_id, substs)),
None)
}
ty::TyFnPtr(_) => {
};
let callee_ty = common::instance_ty(
- bcx.ccx.shared(), instance.as_ref().unwrap());
+ bcx.ccx.tcx(), instance.as_ref().unwrap());
trans_intrinsic_call(&bcx, callee_ty, &fn_ty, &llargs, dest,
terminator.source_info.span);
substs: &'tcx Substs<'tcx>,
args: IndexVec<mir::Local, Result<Const<'tcx>, ConstEvalErr<'tcx>>>)
-> Result<Const<'tcx>, ConstEvalErr<'tcx>> {
- let instance = monomorphize::resolve(ccx.shared(), def_id, substs);
+ let instance = monomorphize::resolve(ccx.tcx(), def_id, substs);
let mir = ccx.tcx().instance_mir(instance.def);
MirConstContext::new(ccx, &mir, instance.substs, args).trans()
}
match operand.ty.sty {
ty::TyClosure(def_id, substs) => {
let instance = monomorphize::resolve_closure(
- bcx.ccx.shared(), def_id, substs, ty::ClosureKind::FnOnce);
+ bcx.ccx.tcx(), def_id, substs, ty::ClosureKind::FnOnce);
OperandValue::Immediate(callee::get_fn(bcx.ccx, instance))
}
_ => {
use abi::Abi;
use common::*;
-use glue;
use rustc::hir::def_id::DefId;
use rustc::middle::lang_items::DropInPlaceFnLangItem;
}
pub fn resolve_closure<'a, 'tcx> (
- scx: &SharedCrateContext<'a, 'tcx>,
+ tcx: TyCtxt<'a, 'tcx, 'tcx>,
def_id: DefId,
substs: ty::ClosureSubsts<'tcx>,
requested_kind: ty::ClosureKind)
-> Instance<'tcx>
{
- let actual_kind = scx.tcx().closure_kind(def_id);
+ let actual_kind = tcx.closure_kind(def_id);
match needs_fn_once_adapter_shim(actual_kind, requested_kind) {
- Ok(true) => fn_once_adapter_instance(scx.tcx(), def_id, substs),
+ Ok(true) => fn_once_adapter_instance(tcx, def_id, substs),
_ => Instance::new(def_id, substs.substs)
}
}
fn resolve_associated_item<'a, 'tcx>(
- scx: &SharedCrateContext<'a, 'tcx>,
+ tcx: TyCtxt<'a, 'tcx, 'tcx>,
trait_item: &ty::AssociatedItem,
trait_id: DefId,
rcvr_substs: &'tcx Substs<'tcx>
) -> Instance<'tcx> {
- let tcx = scx.tcx();
let def_id = trait_item.def_id;
debug!("resolve_associated_item(trait_item={:?}, \
trait_id={:?}, \
}
traits::VtableClosure(closure_data) => {
let trait_closure_kind = tcx.lang_items().fn_trait_kind(trait_id).unwrap();
- resolve_closure(scx, closure_data.closure_def_id, closure_data.substs,
+ resolve_closure(tcx, closure_data.closure_def_id, closure_data.substs,
trait_closure_kind)
}
traits::VtableFnPointer(ref data) => {
/// The point where linking happens. Resolve a (def_id, substs)
/// pair to an instance.
pub fn resolve<'a, 'tcx>(
- scx: &SharedCrateContext<'a, 'tcx>,
+ tcx: TyCtxt<'a, 'tcx, 'tcx>,
def_id: DefId,
substs: &'tcx Substs<'tcx>
) -> Instance<'tcx> {
debug!("resolve(def_id={:?}, substs={:?})",
def_id, substs);
- let result = if let Some(trait_def_id) = scx.tcx().trait_of_item(def_id) {
+ let result = if let Some(trait_def_id) = tcx.trait_of_item(def_id) {
debug!(" => associated item, attempting to find impl");
- let item = scx.tcx().associated_item(def_id);
- resolve_associated_item(scx, &item, trait_def_id, substs)
+ let item = tcx.associated_item(def_id);
+ resolve_associated_item(tcx, &item, trait_def_id, substs)
} else {
- let item_type = def_ty(scx, def_id, substs);
+ let item_type = def_ty(tcx, def_id, substs);
let def = match item_type.sty {
ty::TyFnDef(..) if {
- let f = item_type.fn_sig(scx.tcx());
+ let f = item_type.fn_sig(tcx);
f.abi() == Abi::RustIntrinsic ||
f.abi() == Abi::PlatformIntrinsic
} =>
ty::InstanceDef::Intrinsic(def_id)
}
_ => {
- if Some(def_id) == scx.tcx().lang_items().drop_in_place_fn() {
+ if Some(def_id) == tcx.lang_items().drop_in_place_fn() {
let ty = substs.type_at(0);
- if glue::needs_drop_glue(scx, ty) {
+ if type_needs_drop(tcx, ty) {
debug!(" => nontrivial drop glue");
ty::InstanceDef::DropGlue(def_id, Some(ty))
} else {
}
pub fn resolve_drop_in_place<'a, 'tcx>(
- scx: &SharedCrateContext<'a, 'tcx>,
+ tcx: TyCtxt<'a, 'tcx, 'tcx>,
ty: Ty<'tcx>)
-> ty::Instance<'tcx>
{
- let def_id = scx.tcx().require_lang_item(DropInPlaceFnLangItem);
- let substs = scx.tcx().intern_substs(&[Kind::from(ty)]);
- resolve(scx, def_id, substs)
+ let def_id = tcx.require_lang_item(DropInPlaceFnLangItem);
+ let substs = tcx.intern_substs(&[Kind::from(ty)]);
+ resolve(tcx, def_id, substs)
}
-pub fn custom_coerce_unsize_info<'scx, 'tcx>(scx: &SharedCrateContext<'scx, 'tcx>,
- source_ty: Ty<'tcx>,
- target_ty: Ty<'tcx>)
- -> CustomCoerceUnsized {
+pub fn custom_coerce_unsize_info<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ source_ty: Ty<'tcx>,
+ target_ty: Ty<'tcx>)
+ -> CustomCoerceUnsized {
let trait_ref = ty::Binder(ty::TraitRef {
- def_id: scx.tcx().lang_items().coerce_unsized_trait().unwrap(),
- substs: scx.tcx().mk_substs_trait(source_ty, &[target_ty])
+ def_id: tcx.lang_items().coerce_unsized_trait().unwrap(),
+ substs: tcx.mk_substs_trait(source_ty, &[target_ty])
});
- match scx.tcx().trans_fulfill_obligation(DUMMY_SP, trait_ref) {
+ match tcx.trans_fulfill_obligation(DUMMY_SP, trait_ref) {
traits::VtableImpl(traits::VtableImplData { impl_def_id, .. }) => {
- scx.tcx().coerce_unsized_info(impl_def_id).custom_kind.unwrap()
+ tcx.coerce_unsized_info(impl_def_id).custom_kind.unwrap()
}
vtable => {
bug!("invalid CoerceUnsized vtable: {:?}", vtable);
//! source-level module, functions from the same module will be available for
//! inlining, even when they are not marked #[inline].
-use back::symbol_export::ExportedSymbols;
use collector::InliningMap;
use common;
-use context::SharedCrateContext;
-use llvm;
use rustc::dep_graph::{DepNode, WorkProductId};
use rustc::hir::def_id::DefId;
use rustc::hir::map::DefPathData;
+use rustc::middle::trans::{Linkage, Visibility};
use rustc::session::config::NUMBERED_CODEGEN_UNIT_MARKER;
use rustc::ty::{self, TyCtxt, InstanceDef};
use rustc::ty::item_path::characteristic_def_id_of_type;
use std::hash::Hash;
use syntax::ast::NodeId;
use syntax::symbol::{Symbol, InternedString};
-use trans_item::{TransItem, InstantiationMode};
+use trans_item::{TransItem, TransItemExt, InstantiationMode};
+
+pub use rustc::middle::trans::CodegenUnit;
pub enum PartitioningStrategy {
/// Generate one codegen unit per source-level module.
FixedUnitCount(usize)
}
-pub struct CodegenUnit<'tcx> {
- /// A name for this CGU. Incremental compilation requires that
- /// name be unique amongst **all** crates. Therefore, it should
- /// contain something unique to this crate (e.g., a module path)
- /// as well as the crate name and disambiguator.
- name: InternedString,
-
- items: FxHashMap<TransItem<'tcx>, (llvm::Linkage, llvm::Visibility)>,
-}
+pub trait CodegenUnitExt<'tcx> {
+ fn as_codegen_unit(&self) -> &CodegenUnit<'tcx>;
-impl<'tcx> CodegenUnit<'tcx> {
- pub fn new(name: InternedString,
- items: FxHashMap<TransItem<'tcx>, (llvm::Linkage, llvm::Visibility)>)
- -> Self {
- CodegenUnit {
- name,
- items,
- }
+ fn contains_item(&self, item: &TransItem<'tcx>) -> bool {
+ self.items().contains_key(item)
}
- pub fn empty(name: InternedString) -> Self {
- Self::new(name, FxHashMap())
+ fn name<'a>(&'a self) -> &'a InternedString
+ where 'tcx: 'a,
+ {
+ &self.as_codegen_unit().name()
}
- pub fn contains_item(&self, item: &TransItem<'tcx>) -> bool {
- self.items.contains_key(item)
+ fn items(&self) -> &FxHashMap<TransItem<'tcx>, (Linkage, Visibility)> {
+ &self.as_codegen_unit().items()
}
- pub fn name(&self) -> &str {
- &self.name
- }
-
- pub fn items(&self) -> &FxHashMap<TransItem<'tcx>, (llvm::Linkage, llvm::Visibility)> {
- &self.items
- }
-
- pub fn work_product_id(&self) -> WorkProductId {
+ fn work_product_id(&self) -> WorkProductId {
WorkProductId::from_cgu_name(self.name())
}
- pub fn work_product_dep_node(&self) -> DepNode {
+ fn work_product_dep_node(&self) -> DepNode {
self.work_product_id().to_dep_node()
}
- pub fn compute_symbol_name_hash<'a>(&self,
- scx: &SharedCrateContext<'a, 'tcx>)
- -> u64 {
+ fn compute_symbol_name_hash<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> u64 {
let mut state = IchHasher::new();
- let all_items = self.items_in_deterministic_order(scx.tcx());
+ let all_items = self.items_in_deterministic_order(tcx);
for (item, (linkage, visibility)) in all_items {
- let symbol_name = item.symbol_name(scx.tcx());
+ let symbol_name = item.symbol_name(tcx);
symbol_name.len().hash(&mut state);
symbol_name.hash(&mut state);
linkage.hash(&mut state);
state.finish().to_smaller_hash()
}
- pub fn items_in_deterministic_order<'a>(&self,
- tcx: TyCtxt<'a, 'tcx, 'tcx>)
- -> Vec<(TransItem<'tcx>,
- (llvm::Linkage, llvm::Visibility))> {
+ fn items_in_deterministic_order<'a>(&self,
+ tcx: TyCtxt<'a, 'tcx, 'tcx>)
+ -> Vec<(TransItem<'tcx>,
+ (Linkage, Visibility))> {
// The codegen tests rely on items being process in the same order as
// they appear in the file, so for local items, we sort by node_id first
#[derive(PartialEq, Eq, PartialOrd, Ord)]
}, item.symbol_name(tcx))
}
- let items: Vec<_> = self.items.iter().map(|(&i, &l)| (i, l)).collect();
+ let items: Vec<_> = self.items().iter().map(|(&i, &l)| (i, l)).collect();
let mut items : Vec<_> = items.iter()
.map(|il| (il, item_sort_key(tcx, il.0))).collect();
items.sort_by(|&(_, ref key1), &(_, ref key2)| key1.cmp(key2));
}
}
+impl<'tcx> CodegenUnitExt<'tcx> for CodegenUnit<'tcx> {
+ fn as_codegen_unit(&self) -> &CodegenUnit<'tcx> {
+ self
+ }
+}
// Anything we can't find a proper codegen unit for goes into this.
const FALLBACK_CODEGEN_UNIT: &'static str = "__rustc_fallback_codegen_unit";
-pub fn partition<'a, 'tcx, I>(scx: &SharedCrateContext<'a, 'tcx>,
+pub fn partition<'a, 'tcx, I>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
trans_items: I,
strategy: PartitioningStrategy,
- inlining_map: &InliningMap<'tcx>,
- exported_symbols: &ExportedSymbols)
+ inlining_map: &InliningMap<'tcx>)
-> Vec<CodegenUnit<'tcx>>
where I: Iterator<Item = TransItem<'tcx>>
{
- let tcx = scx.tcx();
-
// In the first step, we place all regular translation items into their
// respective 'home' codegen unit. Regular translation items are all
// functions and statics defined in the local crate.
- let mut initial_partitioning = place_root_translation_items(scx,
- exported_symbols,
+ let mut initial_partitioning = place_root_translation_items(tcx,
trans_items);
debug_dump(tcx, "INITIAL PARTITIONING:", initial_partitioning.codegen_units.iter());
} = post_inlining;
result.sort_by(|cgu1, cgu2| {
- (&cgu1.name[..]).cmp(&cgu2.name[..])
+ cgu1.name().cmp(cgu2.name())
});
- if scx.sess().opts.enable_dep_node_debug_strs() {
+ if tcx.sess.opts.enable_dep_node_debug_strs() {
for cgu in &result {
let dep_node = cgu.work_product_dep_node();
- scx.tcx().dep_graph.register_dep_node_debug_str(dep_node,
+ tcx.dep_graph.register_dep_node_debug_str(dep_node,
|| cgu.name().to_string());
}
}
internalization_candidates: FxHashSet<TransItem<'tcx>>,
}
-fn place_root_translation_items<'a, 'tcx, I>(scx: &SharedCrateContext<'a, 'tcx>,
- exported_symbols: &ExportedSymbols,
+fn place_root_translation_items<'a, 'tcx, I>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
trans_items: I)
-> PreInliningPartitioning<'tcx>
where I: Iterator<Item = TransItem<'tcx>>
{
- let tcx = scx.tcx();
- let exported_symbols = exported_symbols.local_exports();
-
let mut roots = FxHashSet();
let mut codegen_units = FxHashMap();
let is_incremental_build = tcx.sess.opts.incremental.is_some();
let is_root = trans_item.instantiation_mode(tcx) == InstantiationMode::GloballyShared;
if is_root {
- let characteristic_def_id = characteristic_def_id_of_trans_item(scx, trans_item);
+ let characteristic_def_id = characteristic_def_id_of_trans_item(tcx, trans_item);
let is_volatile = is_incremental_build &&
trans_item.is_generic_fn();
};
let make_codegen_unit = || {
- CodegenUnit::empty(codegen_unit_name.clone())
+ CodegenUnit::new(codegen_unit_name.clone())
};
let codegen_unit = codegen_units.entry(codegen_unit_name.clone())
.or_insert_with(make_codegen_unit);
let (linkage, visibility) = match trans_item.explicit_linkage(tcx) {
- Some(explicit_linkage) => (explicit_linkage, llvm::Visibility::Default),
+ Some(explicit_linkage) => (explicit_linkage, Visibility::Default),
None => {
match trans_item {
TransItem::Fn(ref instance) => {
let visibility = match instance.def {
InstanceDef::Item(def_id) => {
- if let Some(node_id) = tcx.hir.as_local_node_id(def_id) {
- if exported_symbols.contains(&node_id) {
- llvm::Visibility::Default
+ if def_id.is_local() {
+ if tcx.is_exported_symbol(def_id) {
+ Visibility::Default
} else {
internalization_candidates.insert(trans_item);
- llvm::Visibility::Hidden
+ Visibility::Hidden
}
} else {
internalization_candidates.insert(trans_item);
- llvm::Visibility::Hidden
+ Visibility::Hidden
}
}
InstanceDef::FnPtrShim(..) |
trans_item)
}
};
- (llvm::ExternalLinkage, visibility)
+ (Linkage::External, visibility)
}
TransItem::Static(node_id) |
TransItem::GlobalAsm(node_id) => {
- let visibility = if exported_symbols.contains(&node_id) {
- llvm::Visibility::Default
+ let def_id = tcx.hir.local_def_id(node_id);
+ let visibility = if tcx.is_exported_symbol(def_id) {
+ Visibility::Default
} else {
internalization_candidates.insert(trans_item);
- llvm::Visibility::Hidden
+ Visibility::Hidden
};
- (llvm::ExternalLinkage, visibility)
+ (Linkage::External, visibility)
}
}
}
};
- codegen_unit.items.insert(trans_item, (linkage, visibility));
+ codegen_unit.items_mut().insert(trans_item, (linkage, visibility));
roots.insert(trans_item);
}
}
if codegen_units.is_empty() {
let codegen_unit_name = Symbol::intern(FALLBACK_CODEGEN_UNIT).as_str();
codegen_units.insert(codegen_unit_name.clone(),
- CodegenUnit::empty(codegen_unit_name.clone()));
+ CodegenUnit::new(codegen_unit_name.clone()));
}
PreInliningPartitioning {
// translation items in a given unit. This could be improved on.
while codegen_units.len() > target_cgu_count {
// Sort small cgus to the back
- codegen_units.sort_by_key(|cgu| -(cgu.items.len() as i64));
- let smallest = codegen_units.pop().unwrap();
+ codegen_units.sort_by_key(|cgu| -(cgu.items().len() as i64));
+ let mut smallest = codegen_units.pop().unwrap();
let second_smallest = codegen_units.last_mut().unwrap();
- for (k, v) in smallest.items.into_iter() {
- second_smallest.items.insert(k, v);
+ for (k, v) in smallest.items_mut().drain() {
+ second_smallest.items_mut().insert(k, v);
}
}
for (index, cgu) in codegen_units.iter_mut().enumerate() {
- cgu.name = numbered_codegen_unit_name(crate_name, index);
+ cgu.set_name(numbered_codegen_unit_name(crate_name, index));
}
// If the initial partitioning contained less than target_cgu_count to begin
// we reach the target count
while codegen_units.len() < target_cgu_count {
let index = codegen_units.len();
- codegen_units.push(
- CodegenUnit::empty(numbered_codegen_unit_name(crate_name, index)));
+ let name = numbered_codegen_unit_name(crate_name, index);
+ codegen_units.push(CodegenUnit::new(name));
}
}
for old_codegen_unit in initial_cgus {
// Collect all items that need to be available in this codegen unit
let mut reachable = FxHashSet();
- for root in old_codegen_unit.items.keys() {
+ for root in old_codegen_unit.items().keys() {
follow_inlining(*root, inlining_map, &mut reachable);
}
- let mut new_codegen_unit = CodegenUnit {
- name: old_codegen_unit.name,
- items: FxHashMap(),
- };
+ let mut new_codegen_unit = CodegenUnit::new(old_codegen_unit.name().clone());
// Add all translation items that are not already there
for trans_item in reachable {
- if let Some(linkage) = old_codegen_unit.items.get(&trans_item) {
+ if let Some(linkage) = old_codegen_unit.items().get(&trans_item) {
// This is a root, just copy it over
- new_codegen_unit.items.insert(trans_item, *linkage);
+ new_codegen_unit.items_mut().insert(trans_item, *linkage);
} else {
if roots.contains(&trans_item) {
bug!("GloballyShared trans-item inlined into other CGU: \
}
// This is a cgu-private copy
- new_codegen_unit.items.insert(trans_item,
- (llvm::InternalLinkage, llvm::Visibility::Default));
+ new_codegen_unit.items_mut().insert(
+ trans_item,
+ (Linkage::Internal, Visibility::Default),
+ );
}
if !single_codegen_unit {
let placement = e.into_mut();
debug_assert!(match *placement {
TransItemPlacement::SingleCgu { ref cgu_name } => {
- *cgu_name != new_codegen_unit.name
+ *cgu_name != *new_codegen_unit.name()
}
TransItemPlacement::MultipleCgus => true,
});
}
Entry::Vacant(e) => {
e.insert(TransItemPlacement::SingleCgu {
- cgu_name: new_codegen_unit.name.clone()
+ cgu_name: new_codegen_unit.name().clone()
});
}
}
// could be accessed from.
for cgu in &mut partitioning.codegen_units {
for candidate in &partitioning.internalization_candidates {
- cgu.items.insert(*candidate, (llvm::InternalLinkage,
- llvm::Visibility::Default));
+ cgu.items_mut().insert(*candidate,
+ (Linkage::Internal, Visibility::Default));
}
}
// accessed from outside its defining codegen unit.
for cgu in &mut partitioning.codegen_units {
let home_cgu = TransItemPlacement::SingleCgu {
- cgu_name: cgu.name.clone()
+ cgu_name: cgu.name().clone()
};
- for (accessee, linkage_and_visibility) in &mut cgu.items {
+ for (accessee, linkage_and_visibility) in cgu.items_mut() {
if !partitioning.internalization_candidates.contains(accessee) {
// This item is no candidate for internalizing, so skip it.
continue
// If we got here, we did not find any accesses from other CGUs,
// so it's fine to make this translation item internal.
- *linkage_and_visibility = (llvm::InternalLinkage, llvm::Visibility::Default);
+ *linkage_and_visibility = (Linkage::Internal, Visibility::Default);
}
}
}
-fn characteristic_def_id_of_trans_item<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
+fn characteristic_def_id_of_trans_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
trans_item: TransItem<'tcx>)
-> Option<DefId> {
- let tcx = scx.tcx();
match trans_item {
TransItem::Fn(instance) => {
let def_id = match instance.def {
if let Some(impl_def_id) = tcx.impl_of_method(def_id) {
// This is a method within an inherent impl, find out what the
// self-type is:
- let impl_self_ty = common::def_ty(scx, impl_def_id, instance.substs);
+ let impl_self_ty = common::def_ty(tcx, impl_def_id, instance.substs);
if let Some(def_id) = characteristic_def_id_of_type(impl_self_ty) {
return Some(def_id);
}
if cfg!(debug_assertions) {
debug!("{}", label);
for cgu in cgus {
- debug!("CodegenUnit {}:", cgu.name);
+ debug!("CodegenUnit {}:", cgu.name());
- for (trans_item, linkage) in &cgu.items {
+ for (trans_item, linkage) in cgu.items() {
let symbol_name = trans_item.symbol_name(tcx);
let symbol_hash_start = symbol_name.rfind('h');
let symbol_hash = symbol_hash_start.map(|i| &symbol_name[i ..])
use monomorphize::Instance;
use rustc::hir;
use rustc::hir::def_id::DefId;
+use rustc::middle::trans::{Linkage, Visibility};
use rustc::traits;
use rustc::ty::{self, Ty, TyCtxt, TypeFoldable};
use rustc::ty::subst::{Subst, Substs};
-use syntax::ast::{self, NodeId};
+use syntax::ast;
use syntax::attr;
use syntax_pos::Span;
use syntax_pos::symbol::Symbol;
use type_of;
-use std::fmt::Write;
+use std::fmt::{self, Write};
use std::iter;
-#[derive(PartialEq, Eq, Clone, Copy, Debug, Hash)]
-pub enum TransItem<'tcx> {
- Fn(Instance<'tcx>),
- Static(NodeId),
- GlobalAsm(NodeId),
-}
+pub use rustc::middle::trans::TransItem;
/// Describes how a translation item will be instantiated in object files.
#[derive(PartialEq, Eq, Clone, Copy, Debug, Hash)]
LocalCopy,
}
-impl<'a, 'tcx> TransItem<'tcx> {
+pub trait TransItemExt<'a, 'tcx>: fmt::Debug {
+ fn as_trans_item(&self) -> &TransItem<'tcx>;
- pub fn define(&self, ccx: &CrateContext<'a, 'tcx>) {
+ fn define(&self, ccx: &CrateContext<'a, 'tcx>) {
debug!("BEGIN IMPLEMENTING '{} ({})' in cgu {}",
- self.to_string(ccx.tcx()),
- self.to_raw_string(),
- ccx.codegen_unit().name());
+ self.to_string(ccx.tcx()),
+ self.to_raw_string(),
+ ccx.codegen_unit().name());
- match *self {
+ match *self.as_trans_item() {
TransItem::Static(node_id) => {
let tcx = ccx.tcx();
let item = tcx.hir.expect_item(node_id);
ccx.codegen_unit().name());
}
- pub fn predefine(&self,
- ccx: &CrateContext<'a, 'tcx>,
- linkage: llvm::Linkage,
- visibility: llvm::Visibility) {
+ fn predefine(&self,
+ ccx: &CrateContext<'a, 'tcx>,
+ linkage: Linkage,
+ visibility: Visibility) {
debug!("BEGIN PREDEFINING '{} ({})' in cgu {}",
self.to_string(ccx.tcx()),
self.to_raw_string(),
debug!("symbol {}", &symbol_name);
- match *self {
+ match *self.as_trans_item() {
TransItem::Static(node_id) => {
- TransItem::predefine_static(ccx, node_id, linkage, visibility, &symbol_name);
+ predefine_static(ccx, node_id, linkage, visibility, &symbol_name);
}
TransItem::Fn(instance) => {
- TransItem::predefine_fn(ccx, instance, linkage, visibility, &symbol_name);
+ predefine_fn(ccx, instance, linkage, visibility, &symbol_name);
}
TransItem::GlobalAsm(..) => {}
}
ccx.codegen_unit().name());
}
- fn predefine_static(ccx: &CrateContext<'a, 'tcx>,
- node_id: ast::NodeId,
- linkage: llvm::Linkage,
- visibility: llvm::Visibility,
- symbol_name: &str) {
- let def_id = ccx.tcx().hir.local_def_id(node_id);
- let instance = Instance::mono(ccx.tcx(), def_id);
- let ty = common::instance_ty(ccx.shared(), &instance);
- let llty = type_of::type_of(ccx, ty);
-
- let g = declare::define_global(ccx, symbol_name, llty).unwrap_or_else(|| {
- ccx.sess().span_fatal(ccx.tcx().hir.span(node_id),
- &format!("symbol `{}` is already defined", symbol_name))
- });
-
- unsafe {
- llvm::LLVMRustSetLinkage(g, linkage);
- llvm::LLVMRustSetVisibility(g, visibility);
- }
-
- ccx.instances().borrow_mut().insert(instance, g);
- ccx.statics().borrow_mut().insert(g, def_id);
- }
-
- fn predefine_fn(ccx: &CrateContext<'a, 'tcx>,
- instance: Instance<'tcx>,
- linkage: llvm::Linkage,
- visibility: llvm::Visibility,
- symbol_name: &str) {
- assert!(!instance.substs.needs_infer() &&
- !instance.substs.has_param_types());
-
- let mono_ty = common::instance_ty(ccx.shared(), &instance);
- let attrs = instance.def.attrs(ccx.tcx());
- let lldecl = declare::declare_fn(ccx, symbol_name, mono_ty);
- unsafe { llvm::LLVMRustSetLinkage(lldecl, linkage) };
- base::set_link_section(ccx, lldecl, &attrs);
- if linkage == llvm::Linkage::LinkOnceODRLinkage ||
- linkage == llvm::Linkage::WeakODRLinkage {
- llvm::SetUniqueComdat(ccx.llmod(), lldecl);
- }
-
- // If we're compiling the compiler-builtins crate, e.g. the equivalent of
- // compiler-rt, then we want to implicitly compile everything with hidden
- // visibility as we're going to link this object all over the place but
- // don't want the symbols to get exported.
- if linkage != llvm::Linkage::InternalLinkage &&
- linkage != llvm::Linkage::PrivateLinkage &&
- attr::contains_name(ccx.tcx().hir.krate_attrs(), "compiler_builtins") {
- unsafe {
- llvm::LLVMRustSetVisibility(lldecl, llvm::Visibility::Hidden);
- }
- } else {
- unsafe {
- llvm::LLVMRustSetVisibility(lldecl, visibility);
- }
- }
-
- debug!("predefine_fn: mono_ty = {:?} instance = {:?}", mono_ty, instance);
- if common::is_inline_instance(ccx.tcx(), &instance) {
- attributes::inline(lldecl, attributes::InlineAttr::Hint);
- }
- attributes::from_fn_attrs(ccx, &attrs, lldecl);
-
- ccx.instances().borrow_mut().insert(instance, lldecl);
- }
-
- pub fn symbol_name(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> ty::SymbolName {
- match *self {
+ fn symbol_name(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> ty::SymbolName {
+ match *self.as_trans_item() {
TransItem::Fn(instance) => tcx.symbol_name(instance),
TransItem::Static(node_id) => {
let def_id = tcx.hir.local_def_id(node_id);
}
}
- pub fn local_span(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Option<Span> {
- match *self {
+ fn local_span(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Option<Span> {
+ match *self.as_trans_item() {
TransItem::Fn(Instance { def, .. }) => {
tcx.hir.as_local_node_id(def.def_id())
}
}.map(|node_id| tcx.hir.span(node_id))
}
- pub fn instantiation_mode(&self,
- tcx: TyCtxt<'a, 'tcx, 'tcx>)
- -> InstantiationMode {
- match *self {
+ fn instantiation_mode(&self,
+ tcx: TyCtxt<'a, 'tcx, 'tcx>)
+ -> InstantiationMode {
+ match *self.as_trans_item() {
TransItem::Fn(ref instance) => {
if self.explicit_linkage(tcx).is_none() &&
common::requests_inline(tcx, instance)
}
}
- pub fn is_generic_fn(&self) -> bool {
- match *self {
+ fn is_generic_fn(&self) -> bool {
+ match *self.as_trans_item() {
TransItem::Fn(ref instance) => {
instance.substs.types().next().is_some()
}
}
}
- pub fn explicit_linkage(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Option<llvm::Linkage> {
- let def_id = match *self {
+ fn explicit_linkage(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Option<Linkage> {
+ let def_id = match *self.as_trans_item() {
TransItem::Fn(ref instance) => instance.def_id(),
TransItem::Static(node_id) => tcx.hir.local_def_id(node_id),
TransItem::GlobalAsm(..) => return None,
let attributes = tcx.get_attrs(def_id);
if let Some(name) = attr::first_attr_value_str_by_name(&attributes, "linkage") {
- if let Some(linkage) = base::llvm_linkage_by_name(&name.as_str()) {
+ if let Some(linkage) = base::linkage_by_name(&name.as_str()) {
Some(linkage)
} else {
let span = tcx.hir.span_if_local(def_id);
/// Similarly, if a vtable method has such a signature, and therefore can't
/// be used, we can just not emit it and have a placeholder (a null pointer,
/// which will never be accessed) in its place.
- pub fn is_instantiable(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> bool {
+ fn is_instantiable(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> bool {
debug!("is_instantiable({:?})", self);
- let (def_id, substs) = match *self {
+ let (def_id, substs) = match *self.as_trans_item() {
TransItem::Fn(ref instance) => (instance.def_id(), instance.substs),
TransItem::Static(node_id) => (tcx.hir.local_def_id(node_id), Substs::empty()),
// global asm never has predicates
traits::normalize_and_test_predicates(tcx, predicates)
}
- pub fn to_string(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> String {
+ fn to_string(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> String {
let hir_map = &tcx.hir;
- return match *self {
+ return match *self.as_trans_item() {
TransItem::Fn(instance) => {
to_string_internal(tcx, "fn ", instance)
},
}
}
- pub fn to_raw_string(&self) -> String {
- match *self {
+ fn to_raw_string(&self) -> String {
+ match *self.as_trans_item() {
TransItem::Fn(instance) => {
format!("Fn({:?}, {})",
instance.def,
}
}
+impl<'a, 'tcx> TransItemExt<'a, 'tcx> for TransItem<'tcx> {
+ fn as_trans_item(&self) -> &TransItem<'tcx> {
+ self
+ }
+}
+
+fn predefine_static<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
+ node_id: ast::NodeId,
+ linkage: Linkage,
+ visibility: Visibility,
+ symbol_name: &str) {
+ let def_id = ccx.tcx().hir.local_def_id(node_id);
+ let instance = Instance::mono(ccx.tcx(), def_id);
+ let ty = common::instance_ty(ccx.tcx(), &instance);
+ let llty = type_of::type_of(ccx, ty);
+
+ let g = declare::define_global(ccx, symbol_name, llty).unwrap_or_else(|| {
+ ccx.sess().span_fatal(ccx.tcx().hir.span(node_id),
+ &format!("symbol `{}` is already defined", symbol_name))
+ });
+
+ unsafe {
+ llvm::LLVMRustSetLinkage(g, base::linkage_to_llvm(linkage));
+ llvm::LLVMRustSetVisibility(g, base::visibility_to_llvm(visibility));
+ }
+
+ ccx.instances().borrow_mut().insert(instance, g);
+ ccx.statics().borrow_mut().insert(g, def_id);
+}
+
+fn predefine_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
+ instance: Instance<'tcx>,
+ linkage: Linkage,
+ visibility: Visibility,
+ symbol_name: &str) {
+ assert!(!instance.substs.needs_infer() &&
+ !instance.substs.has_param_types());
+
+ let mono_ty = common::instance_ty(ccx.tcx(), &instance);
+ let attrs = instance.def.attrs(ccx.tcx());
+ let lldecl = declare::declare_fn(ccx, symbol_name, mono_ty);
+ unsafe { llvm::LLVMRustSetLinkage(lldecl, base::linkage_to_llvm(linkage)) };
+ base::set_link_section(ccx, lldecl, &attrs);
+ if linkage == Linkage::LinkOnceODR ||
+ linkage == Linkage::WeakODR {
+ llvm::SetUniqueComdat(ccx.llmod(), lldecl);
+ }
+
+ // If we're compiling the compiler-builtins crate, e.g. the equivalent of
+ // compiler-rt, then we want to implicitly compile everything with hidden
+ // visibility as we're going to link this object all over the place but
+ // don't want the symbols to get exported.
+ if linkage != Linkage::Internal && linkage != Linkage::Private &&
+ attr::contains_name(ccx.tcx().hir.krate_attrs(), "compiler_builtins") {
+ unsafe {
+ llvm::LLVMRustSetVisibility(lldecl, llvm::Visibility::Hidden);
+ }
+ } else {
+ unsafe {
+ llvm::LLVMRustSetVisibility(lldecl, base::visibility_to_llvm(visibility));
+ }
+ }
+
+ debug!("predefine_fn: mono_ty = {:?} instance = {:?}", mono_ty, instance);
+ if common::is_inline_instance(ccx.tcx(), &instance) {
+ attributes::inline(lldecl, attributes::InlineAttr::Hint);
+ }
+ attributes::from_fn_attrs(ccx, &attrs, lldecl);
+
+ ccx.instances().borrow_mut().insert(instance, lldecl);
+}
//=-----------------------------------------------------------------------------
// TransItem String Keys
--- /dev/null
+NB: This crate is part of the Rust compiler. For an overview of the
+compiler as a whole, see
+[the README.md file found in `librustc`](../librustc/README.md).
+
+The `rustc_typeck` crate contains the source for "type collection" and
+"type checking", as well as a few other bits of related functionality.
+(It draws heavily on the [type inferencing][infer] and
+[trait solving][traits] code found in librustc.)
+
+[infer]: ../librustc/infer/README.md
+[traits]: ../librustc/traits/README.md
+
+## Type collection
+
+Type "collection" is the process of convering the types found in the
+HIR (`hir::Ty`), which represent the syntactic things that the user
+wrote, into the **internal representation** used by the compiler
+(`Ty<'tcx>`) -- we also do similar conversions for where-clauses and
+other bits of the function signature.
+
+To try and get a sense for the difference, consider this function:
+
+```rust
+struct Foo { }
+fn foo(x: Foo, y: self::Foo) { .. }
+// ^^^ ^^^^^^^^^
+```
+
+Those two parameters `x` and `y` each have the same type: but they
+will have distinct `hir::Ty` nodes. Those nodes will have different
+spans, and of course they encode the path somewhat differently. But
+once they are "collected" into `Ty<'tcx>` nodes, they will be
+represented by the exact same internal type.
+
+Collection is defined as a bundle of queries (e.g., `type_of`) for
+computing information about the various functions, traits, and other
+items in the crate being compiled. Note that each of these queries is
+concerned with *interprocedural* things -- for example, for a function
+definition, collection will figure out the type and signature of the
+function, but it will not visit the *body* of the function in any way,
+nor examine type annotations on local variables (that's the job of
+type *checking*).
+
+For more details, see the `collect` module.
+
+## Type checking
+
+TODO
-> ty::Region<'tcx>
{
let tcx = self.tcx();
+ let lifetime_name = |def_id| {
+ tcx.hir.name(tcx.hir.as_local_node_id(def_id).unwrap())
+ };
+
let hir_id = tcx.hir.node_to_hir_id(lifetime.id);
let r = match tcx.named_region(hir_id) {
Some(rl::Region::Static) => {
}
Some(rl::Region::LateBound(debruijn, id)) => {
- let name = tcx.hir.name(id);
+ let name = lifetime_name(id);
tcx.mk_region(ty::ReLateBound(debruijn,
- ty::BrNamed(tcx.hir.local_def_id(id), name)))
+ ty::BrNamed(id, name)))
}
Some(rl::Region::LateBoundAnon(debruijn, index)) => {
}
Some(rl::Region::EarlyBound(index, id)) => {
- let name = tcx.hir.name(id);
+ let name = lifetime_name(id);
tcx.mk_region(ty::ReEarlyBound(ty::EarlyBoundRegion {
- def_id: tcx.hir.local_def_id(id),
+ def_id: id,
index,
name,
}))
}
Some(rl::Region::Free(scope, id)) => {
- let name = tcx.hir.name(id);
+ let name = lifetime_name(id);
tcx.mk_region(ty::ReFree(ty::FreeRegion {
scope,
- bound_region: ty::BrNamed(tcx.hir.local_def_id(id), name)
+ bound_region: ty::BrNamed(id, name)
}))
// (*) -- not late-bound, won't change
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-/*
-
-# Collect phase
-
-The collect phase of type check has the job of visiting all items,
-determining their type, and writing that type into the `tcx.types`
-table. Despite its name, this table does not really operate as a
-*cache*, at least not for the types of items defined within the
-current crate: we assume that after the collect phase, the types of
-all local items will be present in the table.
-
-Unlike most of the types that are present in Rust, the types computed
-for each item are in fact type schemes. This means that they are
-generic types that may have type parameters. TypeSchemes are
-represented by a pair of `Generics` and `Ty`. Type
-parameters themselves are represented as `ty_param()` instances.
-
-The phasing of type conversion is somewhat complicated. There is no
-clear set of phases we can enforce (e.g., converting traits first,
-then types, or something like that) because the user can introduce
-arbitrary interdependencies. So instead we generally convert things
-lazilly and on demand, and include logic that checks for cycles.
-Demand is driven by calls to `AstConv::get_item_type_scheme` or
-`AstConv::trait_def`.
-
-Currently, we "convert" types and traits in two phases (note that
-conversion only affects the types of items / enum variants / methods;
-it does not e.g. compute the types of individual expressions):
-
-0. Intrinsics
-1. Trait/Type definitions
-
-Conversion itself is done by simply walking each of the items in turn
-and invoking an appropriate function (e.g., `trait_def_of_item` or
-`convert_item`). However, it is possible that while converting an
-item, we may need to compute the *type scheme* or *trait definition*
-for other items.
-
-There are some shortcomings in this design:
-- Because the item generics include defaults, cycles through type
- parameter defaults are illegal even if those defaults are never
- employed. This is not necessarily a bug.
-
-*/
+//! "Collection" is the process of determining the type and other external
+//! details of each item in Rust. Collection is specifically concerned
+//! with *interprocedural* things -- for example, for a function
+//! definition, collection will figure out the type and signature of the
+//! function, but it will not visit the *body* of the function in any way,
+//! nor examine type annotations on local variables (that's the job of
+//! type *checking*).
+//!
+//! Collecting is ultimately defined by a bundle of queries that
+//! inquire after various facts about the items in the crate (e.g.,
+//! `type_of`, `generics_of`, `predicates_of`, etc). See the `provide` function
+//! for the full set.
+//!
+//! At present, however, we do run collection across all items in the
+//! crate as a kind of pass. This should eventually be factored away.
use astconv::{AstConv, Bounds};
use lint;
//! We walk the set of items and, for each member, generate new constraints.
use hir::def_id::DefId;
-use rustc::dep_graph::{AssertDepGraphSafe, DepKind};
+use rustc::dep_graph::{DepGraphSafe, DepKind};
+use rustc::ich::StableHashingContext;
use rustc::ty::subst::Substs;
use rustc::ty::{self, Ty, TyCtxt};
use syntax::ast;
use rustc::hir::itemlikevisit::ItemLikeVisitor;
use rustc_data_structures::transitive_relation::TransitiveRelation;
+use rustc_data_structures::stable_hasher::StableHashingContextProvider;
use super::terms::*;
use super::terms::VarianceTerm::*;
}
}
+impl<'a, 'tcx> StableHashingContextProvider for ConstraintContext<'a, 'tcx> {
+ type ContextType = StableHashingContext<'tcx>;
+
+ fn create_stable_hashing_context(&self) -> Self::ContextType {
+ self.terms_cx.tcx.create_stable_hashing_context()
+ }
+}
+
+impl<'a, 'tcx> DepGraphSafe for ConstraintContext<'a, 'tcx> {}
+
impl<'a, 'tcx> ConstraintContext<'a, 'tcx> {
fn visit_node_helper(&mut self, id: ast::NodeId) {
let tcx = self.terms_cx.tcx;
// on dep-graph management.
let dep_node = def_id.to_dep_node(tcx, DepKind::ItemVarianceConstraints);
tcx.dep_graph.with_task(dep_node,
- AssertDepGraphSafe(self),
+ self,
def_id,
visit_item_task);
- fn visit_item_task<'a, 'tcx>(ccx: AssertDepGraphSafe<&mut ConstraintContext<'a, 'tcx>>,
+ fn visit_item_task<'a, 'tcx>(ccx: &mut ConstraintContext<'a, 'tcx>,
def_id: DefId)
{
- ccx.0.build_constraints_for_item(def_id);
+ ccx.build_constraints_for_item(def_id);
}
}
}
}
-fn print_inlined_const(cx: &DocContext, did: DefId) -> String {
- let body = cx.tcx.extern_const_body(did);
+pub fn print_inlined_const(cx: &DocContext, did: DefId) -> String {
+ let body = cx.tcx.extern_const_body(did).body;
let inlined = InlinedConst {
- nested_bodies: cx.tcx.item_body_nested_bodies(did)
+ nested_bodies: cx.tcx.item_body_nested_bodies(did).nested_bodies
};
hir::print::to_string(&inlined, |s| s.print_expr(&body.value))
}
// These are later on moved into `CACHEKEY`, leaving the map empty.
// Only here so that they can be filtered through the rustdoc passes.
pub external_traits: FxHashMap<DefId, Trait>,
+ pub masked_crates: FxHashSet<CrateNum>,
}
impl<'a, 'tcx> Clean<Crate> for visit_ast::RustdocVisitor<'a, 'tcx> {
// Clean the crate, translating the entire libsyntax AST to one that is
// understood by rustdoc.
let mut module = self.module.clean(cx);
+ let mut masked_crates = FxHashSet();
+
+ match module.inner {
+ ModuleItem(ref module) => {
+ for it in &module.items {
+ if it.is_extern_crate() && it.attrs.has_doc_masked() {
+ masked_crates.insert(it.def_id.krate);
+ }
+ }
+ }
+ _ => unreachable!(),
+ }
let ExternalCrate { name, src, primitives, .. } = LOCAL_CRATE.clean(cx);
{
primitives,
access_levels: Arc::new(mem::replace(&mut access_levels, Default::default())),
external_traits: mem::replace(&mut external_traits, Default::default()),
+ masked_crates,
}
}
}
pub fn is_import(&self) -> bool {
self.type_() == ItemType::Import
}
+ pub fn is_extern_crate(&self) -> bool {
+ self.type_() == ItemType::ExternCrate
+ }
pub fn is_stripped(&self) -> bool {
match self.inner { StrippedItem(..) => true, _ => false }
None
}
+ pub fn has_doc_masked(&self) -> bool {
+ for attr in &self.other_attrs {
+ if !attr.check_name("doc") { continue; }
+
+ if let Some(items) = attr.meta_item_list() {
+ if items.iter().filter_map(|i| i.meta_item()).any(|it| it.check_name("masked")) {
+ return true;
+ }
+ }
+ }
+
+ false
+ }
+
pub fn from_ast(diagnostic: &::errors::Handler, attrs: &[ast::Attribute]) -> Attributes {
let mut doc_strings = vec![];
let mut sp = None;
let n = cx.tcx.const_eval(param_env.and((def_id, substs))).unwrap();
let n = if let ConstVal::Integral(ConstInt::Usize(n)) = n.val {
n.to_string()
+ } else if let ConstVal::Unevaluated(def_id, _) = n.val {
+ if let Some(node_id) = cx.tcx.hir.as_local_node_id(def_id) {
+ print_const_expr(cx, cx.tcx.hir.body_owned_by(node_id))
+ } else {
+ inline::print_inlined_const(cx, def_id)
+ }
} else {
format!("{:?}", n)
};
for (i, lt_param) in generics.lifetimes.iter().enumerate() {
if let Some(lt) = provided_params.lifetimes.get(i).cloned() {
if !lt.is_elided() {
- lt_substs.insert(lt_param.lifetime.id, lt.clean(cx));
+ let lt_def_id = cx.tcx.hir.local_def_id(lt_param.lifetime.id);
+ lt_substs.insert(lt_def_id, lt.clean(cx));
}
}
}
ty::TyArray(ty, n) => {
let n = if let ConstVal::Integral(ConstInt::Usize(n)) = n.val {
n.to_string()
+ } else if let ConstVal::Unevaluated(def_id, _) = n.val {
+ if let Some(node_id) = cx.tcx.hir.as_local_node_id(def_id) {
+ print_const_expr(cx, cx.tcx.hir.body_owned_by(node_id))
+ } else {
+ inline::print_inlined_const(cx, def_id)
+ }
} else {
format!("{:?}", n)
};
use rustc_resolve as resolve;
use rustc_metadata::cstore::CStore;
-use syntax::{ast, codemap};
+use syntax::codemap;
use syntax::feature_gate::UnstableFeatures;
use syntax::fold::Folder;
use errors;
/// Table type parameter definition -> substituted type
pub ty_substs: RefCell<FxHashMap<Def, clean::Type>>,
/// Table node id of lifetime parameter definition -> substituted lifetime
- pub lt_substs: RefCell<FxHashMap<ast::NodeId, clean::Lifetime>>,
+ pub lt_substs: RefCell<FxHashMap<DefId, clean::Lifetime>>,
}
impl<'a, 'tcx> DocContext<'a, 'tcx> {
/// the substitutions for a type alias' RHS.
pub fn enter_alias<F, R>(&self,
ty_substs: FxHashMap<Def, clean::Type>,
- lt_substs: FxHashMap<ast::NodeId, clean::Lifetime>,
+ lt_substs: FxHashMap<DefId, clean::Lifetime>,
f: F) -> R
where F: FnOnce() -> R {
let (old_tys, old_lts) =
let arena = DroplessArena::new();
let arenas = GlobalArenas::new();
- let hir_map = hir_map::map_crate(&mut hir_forest, defs);
+ let hir_map = hir_map::map_crate(&mut hir_forest, &defs);
+ let output_filenames = driver::build_output_filenames(&input,
+ &None,
+ &None,
+ &[],
+ &sess);
abort_on_err(driver::phase_3_run_analysis_passes(&sess,
&*cstore,
&arena,
&arenas,
&name,
- |tcx, analysis, _, result| {
+ &output_filenames,
+ |tcx, analysis, _, _, result| {
if let Err(_) = result {
sess.fatal("Compilation failed, aborting rustdoc");
}
/// Highlights `src`, returning the HTML output.
pub fn render_with_highlighting(src: &str, class: Option<&str>, id: Option<&str>,
- extension: Option<&str>) -> String {
+ extension: Option<&str>,
+ tooltip: Option<(&str, &str)>) -> String {
debug!("highlighting: ================\n{}\n==============", src);
let sess = parse::ParseSess::new(FilePathMapping::empty());
let fm = sess.codemap().new_filemap("<stdin>".to_string(), src.to_string());
let mut out = Vec::new();
+ if let Some((tooltip, class)) = tooltip {
+ write!(out, "<div class='information'><div class='tooltip {}'>⚠<span \
+ class='tooltiptext'>{}</span></div></div>",
+ class, tooltip).unwrap();
+ }
write_header(class, id, &mut out).unwrap();
let mut classifier = Classifier::new(lexer::StringReader::new(&sess, fm), sess.codemap());
fn next(&mut self) -> Option<Self::Item> {
let event = self.inner.next();
+ let compile_fail;
+ let ignore;
if let Some(Event::Start(Tag::CodeBlock(lang))) = event {
- if !LangString::parse(&lang).rust {
+ let parse_result = LangString::parse(&lang);
+ if !parse_result.rust {
return Some(Event::Start(Tag::CodeBlock(lang)));
}
+ compile_fail = parse_result.compile_fail;
+ ignore = parse_result.ignore;
} else {
return event;
}
url, test_escaped, channel
))
});
+ let tooltip = if ignore {
+ Some(("Be careful when using this code, it's not being tested!", "ignore"))
+ } else if compile_fail {
+ Some(("This code doesn't compile so be extra careful!", "compile_fail"))
+ } else {
+ None
+ };
s.push_str(&highlight::render_with_highlighting(
&text,
- Some("rust-example-rendered"),
+ Some(&format!("rust-example-rendered{}",
+ if ignore { " ignore" }
+ else if compile_fail { " compile_fail" }
+ else { "" })),
None,
- playground_button.as_ref().map(String::as_str)));
+ playground_button.as_ref().map(String::as_str),
+ tooltip));
Some(Event::Html(s.into()))
})
}
let origtext = str::from_utf8(text).unwrap();
let origtext = origtext.trim_left();
debug!("docblock: ==============\n{:?}\n=======", text);
+ let mut compile_fail = false;
+ let mut ignore = false;
+
let rendered = if lang.is_null() || origtext.is_empty() {
false
} else {
let rlang = (*lang).as_bytes();
let rlang = str::from_utf8(rlang).unwrap();
- if !LangString::parse(rlang).rust {
+ let parse_result = LangString::parse(rlang);
+ compile_fail = parse_result.compile_fail;
+ ignore = parse_result.ignore;
+ if !parse_result.rust {
(my_opaque.dfltblk)(ob, orig_text, lang,
opaque as *const hoedown_renderer_data,
line);
url, test_escaped, channel
))
});
+ let tooltip = if ignore {
+ Some(("Be careful when using this code, it's not being tested!", "ignore"))
+ } else if compile_fail {
+ Some(("This code doesn't compile so be extra careful!", "compile_fail"))
+ } else {
+ None
+ };
s.push_str(&highlight::render_with_highlighting(
&text,
- Some("rust-example-rendered"),
+ Some(&format!("rust-example-rendered{}",
+ if ignore { " ignore" }
+ else if compile_fail { " compile_fail" }
+ else { "" })),
None,
- playground_button.as_ref().map(String::as_str)));
+ playground_button.as_ref().map(String::as_str),
+ tooltip));
hoedown_buffer_put(ob, s.as_ptr(), s.len());
})
}
deref_trait_did: Option<DefId>,
deref_mut_trait_did: Option<DefId>,
owned_box_did: Option<DefId>,
+ masked_crates: FxHashSet<CrateNum>,
// In rare case where a structure is defined in one module but implemented
// in another, if the implementing module is parsed before defining module,
deref_trait_did,
deref_mut_trait_did,
owned_box_did,
+ masked_crates: mem::replace(&mut krate.masked_crates, FxHashSet()),
typarams: external_typarams,
};
// Collect all the implementors of traits.
if let clean::ImplItem(ref i) = item.inner {
- if let Some(did) = i.trait_.def_id() {
- self.implementors.entry(did).or_insert(vec![]).push(Implementor {
- def_id: item.def_id,
- stability: item.stability.clone(),
- impl_: i.clone(),
- });
+ if !self.masked_crates.contains(&item.def_id.krate) {
+ if let Some(did) = i.trait_.def_id() {
+ if i.for_.def_id().map_or(true, |d| !self.masked_crates.contains(&d.krate)) {
+ self.implementors.entry(did).or_insert(vec![]).push(Implementor {
+ def_id: item.def_id,
+ stability: item.stability.clone(),
+ impl_: i.clone(),
+ });
+ }
+ }
}
}
// primitive rather than always to a struct/enum.
// Note: matching twice to restrict the lifetime of the `i` borrow.
let did = if let clean::Item { inner: clean::ImplItem(ref i), .. } = item {
- match i.for_ {
- clean::ResolvedPath { did, .. } |
- clean::BorrowedRef {
- type_: box clean::ResolvedPath { did, .. }, ..
- } => {
- Some(did)
- }
- ref t => {
- t.primitive_type().and_then(|t| {
- self.primitive_locations.get(&t).cloned()
- })
+ let masked_trait = i.trait_.def_id().map_or(false,
+ |d| self.masked_crates.contains(&d.krate));
+ if !masked_trait {
+ match i.for_ {
+ clean::ResolvedPath { did, .. } |
+ clean::BorrowedRef {
+ type_: box clean::ResolvedPath { did, .. }, ..
+ } => {
+ Some(did)
+ }
+ ref t => {
+ t.primitive_type().and_then(|t| {
+ self.primitive_locations.get(&t).cloned()
+ })
+ }
}
+ } else {
+ None
}
} else {
unreachable!()
None,
None,
None,
+ None,
)
}
_ => String::new(),
let ns_id = derive_id(format!("{}.{}",
field.name.as_ref().unwrap(),
ItemType::StructField.name_space()));
- write!(w, "<span id='{id}' class=\"{item_type}\">
- <span id='{ns_id}' class='invisible'>
+ write!(w, "<span id=\"{id}\" class=\"{item_type} small-section-header\">
+ <a href=\"#{id}\" class=\"anchor field\"></a>
+ <span id=\"{ns_id}\" class='invisible'>
<code>{name}: {ty}</code>
</span></span>",
item_type = ItemType::StructField,
let ns_id = derive_id(format!("{}.{}",
variant.name.as_ref().unwrap(),
ItemType::Variant.name_space()));
- write!(w, "<span id='{id}' class='variant'>\
+ write!(w, "<span id=\"{id}\" class=\"variant small-section-header\">\
+ <a href=\"#{id}\" class=\"anchor field\"></a>\
<span id='{ns_id}' class='invisible'><code>{name}",
id = id,
ns_id = ns_id,
write!(fmt, "<span id=\"{0}\">{0:1$}</span>\n", i, cols)?;
}
write!(fmt, "</pre>")?;
- write!(fmt, "{}", highlight::render_with_highlighting(s, None, None, None))?;
+ write!(fmt, "{}",
+ highlight::render_with_highlighting(s, None, None, None, None))?;
Ok(())
}
}
w.write_str(&highlight::render_with_highlighting(&t.source,
Some("macro"),
None,
+ None,
None))?;
document(w, cx, it)
}
collapseDocs(i_e.previousSibling.childNodes[0]);
});
});
+
+ onEach(document.getElementsByClassName('rust-example-rendered'), function(e) {
+ if (hasClass(e, 'compile_fail')) {
+ e.addEventListener("mouseover", function(event) {
+ e.previousElementSibling.childNodes[0].style.color = '#f00';
+ });
+ e.addEventListener("mouseout", function(event) {
+ e.previousElementSibling.childNodes[0].style.color = '';
+ });
+ } else if (hasClass(e, 'ignore')) {
+ e.addEventListener("mouseover", function(event) {
+ e.previousElementSibling.childNodes[0].style.color = '#ff9200';
+ });
+ e.addEventListener("mouseout", function(event) {
+ e.previousElementSibling.childNodes[0].style.color = '';
+ });
+ }
+ });
}());
// Sets the focus on the search bar at the top of the page
.docblock {
margin-left: 24px;
+ position: relative;
}
.content .out-of-band {
}
.anchor {
display: none;
+ position: absolute;
+ left: -25px;
}
-.anchor:after {
+.anchor.field {
+ left: -20px;
+}
+.anchor:before {
content: '\2002\00a7\2002';
}
font-weight: bold;
}
-pre.rust { position: relative; }
a.test-arrow {
display: inline-block;
position: absolute;
text-decoration: none;
}
-.section-header:hover a:after {
+.section-header:hover a:before {
+ position: absolute;
+ left: -25px;
content: '\2002\00a7\2002';
}
display: none;
}
}
+
+.information {
+ position: absolute;
+ left: -1px;
+ margin-top: 7px;
+ z-index: 1;
+}
+
+.tooltip {
+ position: relative;
+ display: inline-block;
+ cursor: pointer;
+}
+
+.tooltip .tooltiptext {
+ width: 120px;
+ display: none;
+ background-color: black;
+ color: #fff;
+ text-align: center;
+ padding: 5px 3px;
+ border-radius: 6px;
+ margin-left: 5px;
+ top: -5px;
+ left: 105%;
+ z-index: 1;
+}
+
+.tooltip:hover .tooltiptext {
+ display: inline;
+}
+
+.tooltip .tooltiptext::after {
+ content: " ";
+ position: absolute;
+ top: 50%;
+ left: 11px;
+ margin-top: -5px;
+ border-width: 5px;
+ border-style: solid;
+ border-color: transparent black transparent transparent;
+}
+
+pre.rust {
+ position: relative;
+}
:target > code {
background: #FDFFD3;
-}
\ No newline at end of file
+}
+
+pre.compile_fail {
+ border-left: 2px solid rgba(255,0,0,.4);
+}
+
+pre.compile_fail:hover, .information:hover + pre.compile_fail {
+ border-left: 2px solid #f00;
+}
+
+pre.ignore {
+ border-left: 2px solid rgba(255,142,0,.4);
+}
+
+pre.ignore:hover, .information:hover + pre.ignore {
+ border-left: 2px solid #ff9200;
+}
+
+.tooltip.compile_fail {
+ color: rgba(255,0,0,.3);
+}
+
+.information > .compile_fail:hover {
+ color: #f00;
+}
+
+.tooltip.ignore {
+ color: rgba(255,142,0,.3);
+}
+
+.information > .ignore:hover {
+ color: rgba(255,142,0,1);
+}
render_type);
{
- let map = hir::map::map_crate(&mut hir_forest, defs);
+ let map = hir::map::map_crate(&mut hir_forest, &defs);
let krate = map.krate();
let mut hir_collector = HirCollector {
sess: &sess,
use std::env;
use std::process::Command;
-use build_helper::{run, native_lib_boilerplate};
+use build_helper::{run, native_lib_boilerplate, BuildExpectation};
fn main() {
let target = env::var("TARGET").expect("TARGET was not set");
.env("CC", compiler.path())
.env("AR", &ar)
.env("RANLIB", format!("{} s", ar.display()))
- .env("CFLAGS", cflags));
+ .env("CFLAGS", cflags),
+ BuildExpectation::None);
run(Command::new(build_helper::make(host))
.current_dir(&native.out_dir)
.arg(format!("INCDIR={}", native.src_dir.display()))
- .arg("-j").arg(env::var("NUM_JOBS").expect("NUM_JOBS was not set")));
+ .arg("-j").arg(env::var("NUM_JOBS").expect("NUM_JOBS was not set")),
+ BuildExpectation::None);
+
Ok(())
}
use alloc::allocator;
use any::TypeId;
+use borrow::Cow;
use cell;
use char;
use fmt::{self, Debug, Display};
}
}
+#[stable(feature = "cow_box_error", since = "1.22.0")]
+impl<'a, 'b> From<Cow<'b, str>> for Box<Error + Send + Sync + 'a> {
+ fn from(err: Cow<'b, str>) -> Box<Error + Send + Sync + 'a> {
+ From::from(String::from(err))
+ }
+}
+
+#[stable(feature = "cow_box_error", since = "1.22.0")]
+impl<'a> From<Cow<'a, str>> for Box<Error> {
+ fn from(err: Cow<'a, str>) -> Box<Error> {
+ From::from(String::from(err))
+ }
+}
+
#[unstable(feature = "never_type_impls", issue = "35121")]
impl Error for ! {
fn description(&self) -> &str { *self }
#![feature(allocator_internals)]
#![feature(allow_internal_unsafe)]
#![feature(allow_internal_unstable)]
+#![feature(align_offset)]
#![feature(asm)]
#![feature(box_syntax)]
#![feature(cfg_target_has_atomic)]
#![feature(macro_reexport)]
#![feature(macro_vis_matcher)]
#![feature(needs_panic_runtime)]
-#![feature(needs_drop)]
#![feature(never_type)]
#![feature(num_bits_bytes)]
#![feature(old_wrapping)]
#![feature(unwind_attributes)]
#![feature(vec_push_all)]
#![feature(doc_cfg)]
+#![feature(doc_masked)]
#![cfg_attr(test, feature(update_panic_count))]
#![cfg_attr(not(stage0), feature(const_max_value))]
debug_assert_ne, unreachable, unimplemented, write, writeln, try)]
extern crate core as __core;
-#[allow(deprecated)] extern crate rand as core_rand;
+#[doc(masked)]
+#[allow(deprecated)]
+extern crate rand as core_rand;
#[macro_use]
#[macro_reexport(vec, format)]
extern crate alloc;
extern crate alloc_system;
extern crate std_unicode;
+#[doc(masked)]
extern crate libc;
// We always need an unwinder currently for backtraces
+#[doc(masked)]
#[allow(unused_extern_crates)]
extern crate unwind;
// compiler-rt intrinsics
+#[doc(masked)]
extern crate compiler_builtins;
// During testing, this crate is not actually the "real" std library, but rather
#[test]
fn connect_timeout_unroutable() {
- // this IP is unroutable, so connections should always time out.
+ // this IP is unroutable, so connections should always time out,
+ // provided the network is reachable to begin with.
let addr = "10.255.255.1:80".parse().unwrap();
let e = TcpStream::connect_timeout(&addr, Duration::from_millis(250)).unwrap_err();
- assert_eq!(e.kind(), io::ErrorKind::TimedOut);
+ assert!(e.kind() == io::ErrorKind::TimedOut ||
+ e.kind() == io::ErrorKind::Other,
+ "bad error: {} {:?}", e, e.kind());
}
#[test]
let usize_bytes = mem::size_of::<usize>();
// search up to an aligned boundary
- let align = (ptr as usize) & (usize_bytes- 1);
- let mut offset;
- if align > 0 {
- offset = cmp::min(usize_bytes - align, len);
+ let mut offset = ptr.align_offset(usize_bytes);
+ if offset > 0 {
+ offset = cmp::min(offset, len);
if let Some(index) = text[..offset].iter().position(|elt| *elt == x) {
return Some(index);
}
- } else {
- offset = 0;
}
// search the body of the text
///
/// [`Ok`]: ../../std/result/enum.Result.html#variant.Ok
/// [`Err`]: ../../std/result/enum.Result.html#variant.Err
-/// [`into_inner`]: ../../std/sync/struct.Mutex.html#method.into_inner
+/// [`into_inner`]: ../../std/sync/struct.PoisonError.html#method.into_inner
#[stable(feature = "rust1", since = "1.0.0")]
pub type LockResult<Guard> = Result<Guard, PoisonError<Guard>>;
/// Names the thread-to-be. Currently the name is used for identification
/// only in panic messages.
///
+ /// The name must not contain null bytes (`\0`).
+ ///
/// For more information about named threads, see
/// [this module-level documentation][naming-threads].
///
/// [`io::Result`]: ../../std/io/type.Result.html
/// [`JoinHandle`]: ../../std/thread/struct.JoinHandle.html
///
+ /// # Panics
+ ///
+ /// Panics if a thread name was set and it contained null bytes.
+ ///
/// # Examples
///
/// ```
impl Thread {
// Used only internally to construct a thread object without spawning
+ // Panics if the name contains nuls.
pub(crate) fn new(name: Option<String>) -> Thread {
let cname = name.map(|n| {
CString::new(n).expect("thread name may not contain interior null bytes")
crate-type = ["dylib"]
[dependencies]
+bitflags = "1.0"
serialize = { path = "../libserialize" }
log = "0.3"
-bitflags = "0.8"
syntax_pos = { path = "../libsyntax_pos" }
+rustc_cratesio_shim = { path = "../librustc_cratesio_shim" }
rustc_errors = { path = "../librustc_errors" }
rustc_data_structures = { path = "../librustc_data_structures" }
--- /dev/null
+NB: This crate is part of the Rust compiler. For an overview of the
+compiler as a whole, see
+[the README.md file found in `librustc`](../librustc/README.md).
+
+The `syntax` crate contains those things concerned purely with syntax
+– that is, the AST ("abstract syntax tree"), parser, pretty-printer,
+lexer, macro expander, and utilities for traversing ASTs.
Macro(Mac),
}
-#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)]
+#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy,
+ PartialOrd, Ord)]
pub enum IntTy {
Is,
I8,
}
}
-#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)]
+#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy,
+ PartialOrd, Ord)]
pub enum UintTy {
Us,
U8,
}
}
-#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)]
+#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy,
+ PartialOrd, Ord)]
pub enum FloatTy {
F32,
F64,
first_attr_value_str_by_name(attrs, "crate_name")
}
-/// Find the value of #[export_name=*] attribute and check its validity.
-pub fn find_export_name_attr(diag: &Handler, attrs: &[Attribute]) -> Option<Symbol> {
- attrs.iter().fold(None, |ia,attr| {
- if attr.check_name("export_name") {
- if let s@Some(_) = attr.value_str() {
- s
- } else {
- struct_span_err!(diag, attr.span, E0558,
- "export_name attribute has invalid format")
- .span_label(attr.span, "did you mean #[export_name=\"*\"]?")
- .emit();
- None
- }
- } else {
- ia
- }
- })
-}
-
-pub fn contains_extern_indicator(diag: &Handler, attrs: &[Attribute]) -> bool {
- contains_name(attrs, "no_mangle") ||
- find_export_name_attr(diag, attrs).is_some()
-}
-
#[derive(Copy, Clone, PartialEq)]
pub enum InlineAttr {
None,
Delete the offending feature attribute.
"##,
-E0558: r##"
-The `export_name` attribute was malformed.
-
-Erroneous code example:
-
-```compile_fail,E0558
-#[export_name] // error: export_name attribute has invalid format
-pub fn something() {}
-
-fn main() {}
-```
-
-The `export_name` attribute expects a string in order to determine the name of
-the exported symbol. Example:
-
-```
-#[export_name = "some_function"] // ok!
-pub fn something() {}
-
-fn main() {}
-```
-"##,
-
E0565: r##"
A literal was used in an attribute that doesn't support literals.
pub fn span_bug(&self, sp: Span, msg: &str) -> ! {
self.parse_sess.span_diagnostic.span_bug(sp, msg);
}
- pub fn trace_macros_diag(&self) {
+ pub fn trace_macros_diag(&mut self) {
for (sp, notes) in self.expansions.iter() {
let mut db = self.parse_sess.span_diagnostic.span_note_diag(*sp, "trace_macro");
for note in notes {
}
db.emit();
}
+ // Fixme: does this result in errors?
+ self.expansions.clear();
}
pub fn bug(&self, msg: &str) -> ! {
self.parse_sess.span_diagnostic.bug(msg);
if self.cx.current_expansion.depth > self.cx.ecfg.recursion_limit {
let info = self.cx.current_expansion.mark.expn_info().unwrap();
let suggested_limit = self.cx.ecfg.recursion_limit * 2;
- let mut err = self.cx.struct_span_fatal(info.call_site,
+ let mut err = self.cx.struct_span_err(info.call_site,
&format!("recursion limit reached while expanding the macro `{}`",
info.callee.name()));
err.help(&format!(
"consider adding a `#![recursion_limit=\"{}\"]` attribute to your crate",
suggested_limit));
err.emit();
+ self.cx.trace_macros_diag();
panic!(FatalError);
}
}
ProcMacroDerive(..) | BuiltinDerive(..) => {
self.cx.span_err(attr.span, &format!("`{}` is a derive mode", attr.path));
+ self.cx.trace_macros_diag();
kind.dummy(attr.span)
}
_ => {
let msg = &format!("macro `{}` may not be used in attributes", attr.path);
self.cx.span_err(attr.span, msg);
+ self.cx.trace_macros_diag();
kind.dummy(attr.span)
}
}
if let Err(msg) = validate_and_set_expn_info(def_span.map(|(_, s)| s),
false, false) {
self.cx.span_err(path.span, &msg);
+ self.cx.trace_macros_diag();
return kind.dummy(span);
}
kind.make_from(expand.expand(self.cx, span, mac.node.stream()))
allow_internal_unstable,
allow_internal_unsafe) {
self.cx.span_err(path.span, &msg);
+ self.cx.trace_macros_diag();
return kind.dummy(span);
}
kind.make_from(expander.expand(self.cx, span, mac.node.stream()))
if ident.name == keywords::Invalid.name() {
self.cx.span_err(path.span,
&format!("macro {}! expects an ident argument", path));
+ self.cx.trace_macros_diag();
return kind.dummy(span);
};
MultiDecorator(..) | MultiModifier(..) | AttrProcMacro(..) => {
self.cx.span_err(path.span,
&format!("`{}` can only be used in attributes", path));
+ self.cx.trace_macros_diag();
return kind.dummy(span);
}
ProcMacroDerive(..) | BuiltinDerive(..) => {
self.cx.span_err(path.span, &format!("`{}` is a derive mode", path));
+ self.cx.trace_macros_diag();
return kind.dummy(span);
}
let msg =
format!("macro {}! expects no ident argument, given '{}'", path, ident);
self.cx.span_err(path.span, &msg);
+ self.cx.trace_macros_diag();
return kind.dummy(span);
}
let msg = format!("non-{kind} macro in {kind} position: {name}",
name = path.segments[0].identifier.name, kind = kind.name());
self.cx.span_err(path.span, &msg);
+ self.cx.trace_macros_diag();
kind.dummy(span)
})
}
_ => {
let msg = &format!("macro `{}` may not be used for derive attributes", attr.path);
self.cx.span_err(span, msg);
+ self.cx.trace_macros_diag();
kind.dummy(span)
}
}
Ok(expansion) => expansion,
Err(mut err) => {
err.emit();
+ self.cx.trace_macros_diag();
return kind.dummy(span);
}
};
if !traits.is_empty() &&
(kind == ExpansionKind::TraitItems || kind == ExpansionKind::ImplItems) {
self.cx.span_err(traits[0].span, "`derive` can be only be applied to items");
+ self.cx.trace_macros_diag();
return kind.expect_from_annotatables(::std::iter::once(item));
}
self.collect(kind, InvocationKind::Attr { attr: attr, traits: traits, item: item })
}
let best_fail_msg = parse_failure_msg(best_fail_tok.expect("ran no matchers"));
- cx.span_fatal(best_fail_spot.substitute_dummy(sp), &best_fail_msg);
+ cx.span_err(best_fail_spot.substitute_dummy(sp), &best_fail_msg);
+ cx.trace_macros_diag();
+ DummyResult::any(sp)
}
// Note that macro-by-example's input is also matched against a token tree:
// #[doc(cfg(...))]
(active, doc_cfg, "1.21.0", Some(43781)),
+ // #[doc(masked)]
+ (active, doc_masked, "1.21.0", None),
// allow `#[must_use]` on functions (RFC 1940)
(active, fn_must_use, "1.21.0", Some(43302)),
gate_feature_post!(&self, doc_cfg, attr.span,
"#[doc(cfg(...))] is experimental"
);
+ } else if content.iter().any(|c| c.check_name("masked")) {
+ gate_feature_post!(&self, doc_masked, attr.span,
+ "#[doc(masked)] is experimental"
+ );
}
}
}
#![feature(rustc_diagnostic_macros)]
#![feature(i128_type)]
+// See librustc_cratesio_shim/Cargo.toml for a comment explaining this.
+#[allow(unused_extern_crates)]
+extern crate rustc_cratesio_shim;
+
+#[macro_use] extern crate bitflags;
extern crate serialize;
#[macro_use] extern crate log;
-#[macro_use] extern crate bitflags;
extern crate std_unicode;
pub extern crate rustc_errors as errors;
extern crate syntax_pos;
use std::slice;
bitflags! {
- pub flags Restrictions: u8 {
- const RESTRICTION_STMT_EXPR = 1 << 0,
- const RESTRICTION_NO_STRUCT_LITERAL = 1 << 1,
+ pub struct Restrictions: u8 {
+ const STMT_EXPR = 1 << 0;
+ const NO_STRUCT_LITERAL = 1 << 1;
}
}
let e = if self.token.can_begin_expr()
&& !(self.token == token::OpenDelim(token::Brace)
&& self.restrictions.contains(
- RESTRICTION_NO_STRUCT_LITERAL)) {
+ Restrictions::NO_STRUCT_LITERAL)) {
Some(self.parse_expr()?)
} else {
None
// This is a struct literal, unless we're prohibited
// from parsing struct literals here.
let prohibited = self.restrictions.contains(
- RESTRICTION_NO_STRUCT_LITERAL
+ Restrictions::NO_STRUCT_LITERAL
);
if !prohibited {
return self.parse_struct_expr(lo, pth, attrs);
token::Ident(..) if self.token.is_keyword(keywords::In) => {
self.bump();
let place = self.parse_expr_res(
- RESTRICTION_NO_STRUCT_LITERAL,
+ Restrictions::NO_STRUCT_LITERAL,
None,
)?;
let blk = self.parse_block()?;
let cur_op_span = self.span;
let restrictions = if op.is_assign_like() {
- self.restrictions & RESTRICTION_NO_STRUCT_LITERAL
+ self.restrictions & Restrictions::NO_STRUCT_LITERAL
} else {
self.restrictions
};
let rhs = match op.fixity() {
Fixity::Right => self.with_res(
- restrictions - RESTRICTION_STMT_EXPR,
+ restrictions - Restrictions::STMT_EXPR,
|this| {
this.parse_assoc_expr_with(op.precedence(),
LhsExpr::NotYetParsed)
}),
Fixity::Left => self.with_res(
- restrictions - RESTRICTION_STMT_EXPR,
+ restrictions - Restrictions::STMT_EXPR,
|this| {
this.parse_assoc_expr_with(op.precedence() + 1,
LhsExpr::NotYetParsed)
// We currently have no non-associative operators that are not handled above by
// the special cases. The code is here only for future convenience.
Fixity::None => self.with_res(
- restrictions - RESTRICTION_STMT_EXPR,
+ restrictions - Restrictions::STMT_EXPR,
|this| {
this.parse_assoc_expr_with(op.precedence() + 1,
LhsExpr::NotYetParsed)
if self.token.can_begin_expr() {
// parse `for i in 1.. { }` as infinite loop, not as `for i in (1..{})`.
if self.token == token::OpenDelim(token::Brace) {
- return !self.restrictions.contains(RESTRICTION_NO_STRUCT_LITERAL);
+ return !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL);
}
true
} else {
return self.parse_if_let_expr(attrs);
}
let lo = self.prev_span;
- let cond = self.parse_expr_res(RESTRICTION_NO_STRUCT_LITERAL, None)?;
+ let cond = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?;
// Verify that the parsed `if` condition makes sense as a condition. If it is a block, then
// verify that the last statement is either an implicit return (no `;`) or an explicit
self.expect_keyword(keywords::Let)?;
let pat = self.parse_pat()?;
self.expect(&token::Eq)?;
- let expr = self.parse_expr_res(RESTRICTION_NO_STRUCT_LITERAL, None)?;
+ let expr = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?;
let thn = self.parse_block()?;
let (hi, els) = if self.eat_keyword(keywords::Else) {
let expr = self.parse_else_expr()?;
let decl_hi = self.prev_span;
let body = match decl.output {
FunctionRetTy::Default(_) => {
- let restrictions = self.restrictions - RESTRICTION_STMT_EXPR;
+ let restrictions = self.restrictions - Restrictions::STMT_EXPR;
self.parse_expr_res(restrictions, None)?
},
_ => {
let pat = self.parse_pat()?;
self.expect_keyword(keywords::In)?;
- let expr = self.parse_expr_res(RESTRICTION_NO_STRUCT_LITERAL, None)?;
+ let expr = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?;
let (iattrs, loop_block) = self.parse_inner_attrs_and_block()?;
attrs.extend(iattrs);
if self.token.is_keyword(keywords::Let) {
return self.parse_while_let_expr(opt_ident, span_lo, attrs);
}
- let cond = self.parse_expr_res(RESTRICTION_NO_STRUCT_LITERAL, None)?;
+ let cond = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?;
let (iattrs, body) = self.parse_inner_attrs_and_block()?;
attrs.extend(iattrs);
let span = span_lo.to(body.span);
self.expect_keyword(keywords::Let)?;
let pat = self.parse_pat()?;
self.expect(&token::Eq)?;
- let expr = self.parse_expr_res(RESTRICTION_NO_STRUCT_LITERAL, None)?;
+ let expr = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?;
let (iattrs, body) = self.parse_inner_attrs_and_block()?;
attrs.extend(iattrs);
let span = span_lo.to(body.span);
fn parse_match_expr(&mut self, mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
let match_span = self.prev_span;
let lo = self.prev_span;
- let discriminant = self.parse_expr_res(RESTRICTION_NO_STRUCT_LITERAL,
+ let discriminant = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL,
None)?;
if let Err(mut e) = self.expect(&token::OpenDelim(token::Brace)) {
if self.token == token::Token::Semi {
None
};
self.expect(&token::FatArrow)?;
- let expr = self.parse_expr_res(RESTRICTION_STMT_EXPR, None)?;
+ let expr = self.parse_expr_res(Restrictions::STMT_EXPR, None)?;
let require_comma = classify::expr_requires_semi_to_be_stmt(&expr)
&& self.token != token::CloseDelim(token::Brace);
self.look_ahead(2, |t| *t == token::OpenDelim(token::Brace)) &&
// prevent `while catch {} {}`, `if catch {} {} else {}`, etc.
- !self.restrictions.contains(RESTRICTION_NO_STRUCT_LITERAL)
+ !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL)
}
fn is_union_item(&self) -> bool {
self.mk_expr(lo.to(hi), ExprKind::Path(None, pth), ThinVec::new())
};
- let expr = self.with_res(RESTRICTION_STMT_EXPR, |this| {
+ let expr = self.with_res(Restrictions::STMT_EXPR, |this| {
let expr = this.parse_dot_or_call_expr_with(expr, lo, attrs.into())?;
this.parse_assoc_expr_with(0, LhsExpr::AlreadyParsed(expr))
})?;
// Remainder are line-expr stmts.
let e = self.parse_expr_res(
- RESTRICTION_STMT_EXPR, Some(attrs.into()))?;
+ Restrictions::STMT_EXPR, Some(attrs.into()))?;
Stmt {
id: ast::DUMMY_NODE_ID,
span: lo.to(e.span),
/// Is this expression a successfully-parsed statement?
fn expr_is_complete(&mut self, e: &Expr) -> bool {
- self.restrictions.contains(RESTRICTION_STMT_EXPR) &&
+ self.restrictions.contains(Restrictions::STMT_EXPR) &&
!classify::expr_requires_semi_to_be_stmt(e)
}
pub use self::Token::*;
use ast::{self};
+use parse::ParseSess;
+use print::pprust;
use ptr::P;
use serialize::{Decodable, Decoder, Encodable, Encoder};
use symbol::keywords;
+use syntax::parse::parse_stream_from_source_str;
+use syntax_pos::{self, Span};
use tokenstream::{TokenStream, TokenTree};
+use tokenstream;
use std::cell::Cell;
use std::{cmp, fmt};
pub fn is_reserved_ident(&self) -> bool {
self.is_special_ident() || self.is_used_keyword() || self.is_unused_keyword()
}
+
+ pub fn interpolated_to_tokenstream(&self, sess: &ParseSess, span: Span)
+ -> TokenStream
+ {
+ let nt = match *self {
+ Token::Interpolated(ref nt) => nt,
+ _ => panic!("only works on interpolated tokens"),
+ };
+
+ // An `Interpolated` token means that we have a `Nonterminal`
+ // which is often a parsed AST item. At this point we now need
+ // to convert the parsed AST to an actual token stream, e.g.
+ // un-parse it basically.
+ //
+ // Unfortunately there's not really a great way to do that in a
+ // guaranteed lossless fashion right now. The fallback here is
+ // to just stringify the AST node and reparse it, but this loses
+ // all span information.
+ //
+ // As a result, some AST nodes are annotated with the token
+ // stream they came from. Attempt to extract these lossless
+ // token streams before we fall back to the stringification.
+ let mut tokens = None;
+
+ match nt.0 {
+ Nonterminal::NtItem(ref item) => {
+ tokens = prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span);
+ }
+ Nonterminal::NtTraitItem(ref item) => {
+ tokens = prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span);
+ }
+ Nonterminal::NtImplItem(ref item) => {
+ tokens = prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span);
+ }
+ Nonterminal::NtIdent(ident) => {
+ let token = Token::Ident(ident.node);
+ tokens = Some(TokenTree::Token(ident.span, token).into());
+ }
+ Nonterminal::NtTT(ref tt) => {
+ tokens = Some(tt.clone().into());
+ }
+ _ => {}
+ }
+
+ tokens.unwrap_or_else(|| {
+ nt.1.force(|| {
+ // FIXME(jseyfried): Avoid this pretty-print + reparse hack
+ let name = "<macro expansion>".to_owned();
+ let source = pprust::token_to_string(self);
+ parse_stream_from_source_str(name, source, sess, Some(span))
+ })
+ })
+ }
}
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash)]
impl ::std::hash::Hash for LazyTokenStream {
fn hash<H: ::std::hash::Hasher>(&self, _hasher: &mut H) {}
}
+
+fn prepend_attrs(sess: &ParseSess,
+ attrs: &[ast::Attribute],
+ tokens: Option<&tokenstream::TokenStream>,
+ span: syntax_pos::Span)
+ -> Option<tokenstream::TokenStream>
+{
+ let tokens = match tokens {
+ Some(tokens) => tokens,
+ None => return None,
+ };
+ if attrs.len() == 0 {
+ return Some(tokens.clone())
+ }
+ let mut builder = tokenstream::TokenStreamBuilder::new();
+ for attr in attrs {
+ assert_eq!(attr.style, ast::AttrStyle::Outer,
+ "inner attributes should prevent cached tokens from existing");
+ // FIXME: Avoid this pretty-print + reparse hack as bove
+ let name = "<macro expansion>".to_owned();
+ let source = pprust::attr_to_string(attr);
+ let stream = parse_stream_from_source_str(name, source, sess, Some(span));
+ builder.push(stream);
+ }
+ builder.push(tokens.clone());
+ Some(builder.build())
+}
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#[doc(masked)] //~ ERROR: #[doc(masked)] is experimental
+extern crate std as realstd;
+
+fn main() {}
// scope.
// revisions: rpass1 rpass2
+// compile-flags: -Z query-dep-graph
#![feature(rustc_attrs)]
use Trait2;
#[rustc_clean(label="Hir", cfg="rpass2")]
- #[rustc_dirty(label="HirBody", cfg="rpass2")]
+ #[rustc_clean(label="HirBody", cfg="rpass2")]
+ #[rustc_dirty(label="TypeckTables", cfg="rpass2")]
fn bar() {
().method();
}
#[rustc_clean(label="Hir", cfg="rpass2")]
#[rustc_clean(label="HirBody", cfg="rpass2")]
+ #[rustc_clean(label="TypeckTables", cfg="rpass2")]
fn baz() {
22; // no method call, traits in scope don't matter
}
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![crate_name = "foo"]
+
+// ignore-tidy-linelength
+
+// @has foo/fn.bar.html '//*[@class="tooltip compile_fail"]/span' "This code doesn't compile so be extra careful!"
+// @has foo/fn.bar.html '//*[@class="tooltip ignore"]/span' "Be careful when using this code, it's not being tested!"
+
+/// foo
+///
+/// ```compile_fail
+/// foo();
+/// ```
+///
+/// ```ignore (tidy)
+/// goo();
+/// ```
+///
+/// ```
+/// let x = 0;
+/// ```
+pub fn bar() -> usize { 2 }
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+struct Ref<'a> {
+ x: &'a u32,
+}
+
+fn foo<'a, 'b>(mut x: Vec<Ref<'a>>, y: Ref<'b>)
+ where &'a (): Sized,
+ &'b u32: Sized
+{
+ x.push(y);
+}
+
+fn main() {}
--- /dev/null
+error[E0623]: lifetime mismatch
+ --> $DIR/ex3-both-anon-regions-both-are-structs-earlybound-regions.rs:18:12
+ |
+14 | fn foo<'a, 'b>(mut x: Vec<Ref<'a>>, y: Ref<'b>)
+ | ------- ------- these two types are declared with different lifetimes...
+...
+18 | x.push(y);
+ | ^ ...but data from `y` flows into `x` here
+
+error: aborting due to previous error
+
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+struct Ref<'a> {
+ x: &'a u32,
+}
+
+fn foo<'a, 'b>(mut x: Vec<Ref<'a>>, y: Ref<'b>) {
+ x.push(y);
+}
+
+fn main() {}
--- /dev/null
+error[E0623]: lifetime mismatch
+ --> $DIR/ex3-both-anon-regions-both-are-structs-latebound-regions.rs:15:12
+ |
+14 | fn foo<'a, 'b>(mut x: Vec<Ref<'a>>, y: Ref<'b>) {
+ | ------- ------- these two types are declared with different lifetimes...
+15 | x.push(y);
+ | ^ ...but data from `y` flows into `x` here
+
+error: aborting due to previous error
+
+++ /dev/null
-// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-#![feature(fn_must_use)]
-#![warn(unused_must_use)]
-
-struct MyStruct {
- n: usize
-}
-
-impl MyStruct {
- #[must_use]
- fn need_to_use_this_method_value(&self) -> usize {
- self.n
- }
-}
-
-#[must_use="it's important"]
-fn need_to_use_this_value() -> bool {
- false
-}
-
-fn main() {
- need_to_use_this_value();
-
- let m = MyStruct { n: 2 };
- m.need_to_use_this_method_value();
-}
+++ /dev/null
-warning: unused return value of `need_to_use_this_value` which must be used: it's important
- --> $DIR/fn_must_use.rs:31:5
- |
-31 | need_to_use_this_value();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^
- |
-note: lint level defined here
- --> $DIR/fn_must_use.rs:12:9
- |
-12 | #![warn(unused_must_use)]
- | ^^^^^^^^^^^^^^^
-
-warning: unused return value of `MyStruct::need_to_use_this_method_value` which must be used
- --> $DIR/fn_must_use.rs:34:5
- |
-34 | m.need_to_use_this_method_value();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-
12 | assert_eq!(1, 1,);
| ^
+error: aborting due to previous error
+
12 | assert_ne!(1, 2,);
| ^
+error: aborting due to previous error
+
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// compile-flags: -Z trace-macros
+
+#![recursion_limit="4"]
+
+macro_rules! my_faulty_macro {
+ () => {
+ my_faulty_macro!(bcd);
+ };
+}
+
+macro_rules! pat_macro {
+ () => {
+ pat_macro!(A{a:a, b:0, c:_, ..});
+ };
+ ($a:pat) => {
+ $a
+ };
+}
+
+macro_rules! my_recursive_macro {
+ () => {
+ my_recursive_macro!();
+ };
+}
+
+macro_rules! my_macro {
+ () => {
+
+ };
+}
+
+fn main() {
+ my_faulty_macro!();
+ my_recursive_macro!();
+ test!();
+ non_exisiting!();
+ derive!(Debug);
+ let a = pat_macro!();
+}
+
+#[my_macro]
+fn use_bang_macro_as_attr(){}
+
+#[derive(Debug)]
+fn use_derive_macro_as_attr(){}
--- /dev/null
+error: no rules expected the token `bcd`
+ --> $DIR/trace_faulty_macros.rs:17:26
+ |
+17 | my_faulty_macro!(bcd);
+ | ^^^
+...
+43 | my_faulty_macro!();
+ | ------------------- in this macro invocation
+
+note: trace_macro
+ --> $DIR/trace_faulty_macros.rs:43:5
+ |
+43 | my_faulty_macro!();
+ | ^^^^^^^^^^^^^^^^^^^
+ |
+ = note: expanding `my_faulty_macro! { }`
+ = note: to `my_faulty_macro ! ( bcd ) ;`
+ = note: expanding `my_faulty_macro! { bcd }`
+
+error: recursion limit reached while expanding the macro `my_recursive_macro`
+ --> $DIR/trace_faulty_macros.rs:32:9
+ |
+32 | my_recursive_macro!();
+ | ^^^^^^^^^^^^^^^^^^^^^^
+...
+44 | my_recursive_macro!();
+ | ---------------------- in this macro invocation
+ |
+ = help: consider adding a `#![recursion_limit="8"]` attribute to your crate
+
+note: trace_macro
+ --> $DIR/trace_faulty_macros.rs:44:5
+ |
+44 | my_recursive_macro!();
+ | ^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: expanding `my_recursive_macro! { }`
+ = note: to `my_recursive_macro ! ( ) ;`
+ = note: expanding `my_recursive_macro! { }`
+ = note: to `my_recursive_macro ! ( ) ;`
+ = note: expanding `my_recursive_macro! { }`
+ = note: to `my_recursive_macro ! ( ) ;`
+ = note: expanding `my_recursive_macro! { }`
+ = note: to `my_recursive_macro ! ( ) ;`
+ = note: expanding `my_recursive_macro! { }`
+ = note: to `my_recursive_macro ! ( ) ;`
+
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(fn_must_use)]
+#![warn(unused_must_use)]
+
+struct MyStruct {
+ n: usize,
+}
+
+impl MyStruct {
+ #[must_use]
+ fn need_to_use_this_method_value(&self) -> usize {
+ self.n
+ }
+}
+
+trait EvenNature {
+ #[must_use = "no side effects"]
+ fn is_even(&self) -> bool;
+}
+
+impl EvenNature for MyStruct {
+ fn is_even(&self) -> bool {
+ self.n % 2 == 0
+ }
+}
+
+trait Replaceable {
+ fn replace(&mut self, substitute: usize) -> usize;
+}
+
+impl Replaceable for MyStruct {
+ // ↓ N.b.: `#[must_use]` attribute on a particular trait implementation
+ // method won't work; the attribute should be on the method signature in
+ // the trait's definition.
+ #[must_use]
+ fn replace(&mut self, substitute: usize) -> usize {
+ let previously = self.n;
+ self.n = substitute;
+ previously
+ }
+}
+
+#[must_use = "it's important"]
+fn need_to_use_this_value() -> bool {
+ false
+}
+
+fn main() {
+ need_to_use_this_value();
+
+ let mut m = MyStruct { n: 2 };
+ m.need_to_use_this_method_value();
+ m.is_even(); // trait method!
+
+ m.replace(3);
+
+ 2.eq(&3);
+
+ // FIXME: operators should probably be `must_use` if underlying method is
+ 2 == 3;
+}
--- /dev/null
+warning: unused return value of `need_to_use_this_value` which must be used: it's important
+ --> $DIR/fn_must_use.rs:58:5
+ |
+58 | need_to_use_this_value();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+note: lint level defined here
+ --> $DIR/fn_must_use.rs:12:9
+ |
+12 | #![warn(unused_must_use)]
+ | ^^^^^^^^^^^^^^^
+
+warning: unused return value of `MyStruct::need_to_use_this_method_value` which must be used
+ --> $DIR/fn_must_use.rs:61:5
+ |
+61 | m.need_to_use_this_method_value();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+warning: unused return value of `EvenNature::is_even` which must be used: no side effects
+ --> $DIR/fn_must_use.rs:62:5
+ |
+62 | m.is_even(); // trait method!
+ | ^^^^^^^^^^^^
+
+warning: unused return value of `std::cmp::PartialEq::eq` which must be used
+ --> $DIR/fn_must_use.rs:66:5
+ |
+66 | 2.eq(&3);
+ | ^^^^^^^^^
+
-Subproject commit 33250c48b4763b01478d780e76206484a1d5b207
+Subproject commit 8118b02ac5ce49b22e049ff03316d5e1574852cf
//
// run-pass/foo/bar/baz.rs
let path =
- PathBuf::from(config.mode.to_string())
+ PathBuf::from(config.src_base.file_name().unwrap())
.join(&testpaths.relative_dir)
.join(&testpaths.file.file_name().unwrap());
test::DynTestName(format!("[{}] {}", config.mode, path.display()))
}
fn run_ui_test(&self) {
- println!("ui: {}", self.testpaths.file.display());
-
let proc_res = self.compile_test();
let expected_stderr_path = self.expected_output_path("stderr");
--- /dev/null
+Subproject commit 80853e2f24a01db96fe9821e468dd2af75a4d2e5
-Subproject commit 52d48656f93eeeb2c568e6c1048e64168e5b209f
+Subproject commit 7221e38023c41ff2532ebbf54a7da296fd488b50
--- /dev/null
+Subproject commit a1fd68da464fc51585f351c81fc2b867211c197e
"src/tools/rls",
"src/tools/clippy",
"src/tools/rust-installer",
+ "src/tools/rustfmt",
+ "src/tools/miri",
];
skip.iter().any(|p| path.ends_with(p))
}
--- /dev/null
+# This file reflects the current status of all tools which are allowed
+# to fail without failing the build.
+#
+# There are three states a tool can be in:
+# 1. Broken: The tool doesn't build
+# 2. Building: The tool builds but its tests are failing
+# 3. Testing: The tool builds and its tests are passing
+#
+# In the future there will be further states like "Distributing", which
+# configures whether the tool is included in the Rust distribution.
+#
+# If a tool was working before your PR but is broken now, consider
+# updating the tool within your PR. How to do that is described in
+# "CONTRIBUTING.md#External Dependencies". If the effort required is not
+# warranted (e.g. due to the tool abusing some API that you changed, and
+# fixing the tool would mean a significant refactoring), you can disable
+# the tool here, by changing its state to `Broken`. Remember to ping
+# the tool authors if you do not fix their tool, so they can proactively
+# fix it, instead of being surprised by the breakage.
+#
+# Each tool has a list of people to ping
+
+# ping @oli-obk @RalfJung @eddyb
+miri = "Testing"