# version that we're using, 8.2, cannot compile LLVM for OSX 10.7.
- env: >
RUST_CHECK_TARGET=check
- RUST_CONFIGURE_ARGS=--build=x86_64-apple-darwin
+ RUST_CONFIGURE_ARGS="--build=x86_64-apple-darwin --enable-sanitizers"
SRC=.
RUSTC_RETRY_LINKER_ON_SEGFAULT=1
SCCACHE_ERROR_LOG=/tmp/sccache.log
os: osx
osx_image: xcode8.2
install: &osx_install_sccache >
- travis_retry curl -o /usr/local/bin/sccache https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-04-sccache-x86_64-apple-darwin &&
+ travis_retry curl -o /usr/local/bin/sccache https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-19-sccache-x86_64-apple-darwin &&
chmod +x /usr/local/bin/sccache &&
travis_retry curl -o /usr/local/bin/stamp https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-17-stamp-x86_64-apple-darwin &&
chmod +x /usr/local/bin/stamp
install: *osx_install_sccache
- env: >
RUST_CHECK_TARGET=dist
- RUST_CONFIGURE_ARGS="--target=aarch64-apple-ios,armv7-apple-ios,armv7s-apple-ios,i386-apple-ios,x86_64-apple-ios --enable-extended"
+ RUST_CONFIGURE_ARGS="--target=aarch64-apple-ios,armv7-apple-ios,armv7s-apple-ios,i386-apple-ios,x86_64-apple-ios --enable-extended --enable-sanitizers"
SRC=.
DEPLOY=1
RUSTC_RETRY_LINKER_ON_SEGFAULT=1
# came from the mingw-w64 SourceForge download site. Unfortunately
# SourceForge is notoriously flaky, so we mirror it on our own infrastructure.
- MSYS_BITS: 32
- RUST_CONFIGURE_ARGS: --build=i686-pc-windows-gnu --enable-ninja
+ RUST_CONFIGURE_ARGS: --build=i686-pc-windows-gnu
SCRIPT: python x.py test
MINGW_URL: https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror
- MINGW_ARCHIVE: i686-6.2.0-release-posix-dwarf-rt_v5-rev1.7z
+ MINGW_ARCHIVE: i686-6.3.0-release-posix-dwarf-rt_v5-rev2.7z
MINGW_DIR: mingw32
- MSYS_BITS: 64
SCRIPT: python x.py test
- RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-gnu --enable-ninja
+ RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-gnu
MINGW_URL: https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror
- MINGW_ARCHIVE: x86_64-6.2.0-release-posix-seh-rt_v5-rev1.7z
+ MINGW_ARCHIVE: x86_64-6.3.0-release-posix-seh-rt_v5-rev2.7z
MINGW_DIR: mingw64
# 32/64 bit MSVC and GNU deployment
SCRIPT: python x.py dist
DEPLOY: 1
- MSYS_BITS: 32
- RUST_CONFIGURE_ARGS: --build=i686-pc-windows-gnu --enable-extended --enable-ninja
+ RUST_CONFIGURE_ARGS: --build=i686-pc-windows-gnu --enable-extended
SCRIPT: python x.py dist
MINGW_URL: https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror
- MINGW_ARCHIVE: i686-6.2.0-release-posix-dwarf-rt_v5-rev1.7z
+ MINGW_ARCHIVE: i686-6.3.0-release-posix-dwarf-rt_v5-rev2.7z
MINGW_DIR: mingw32
DEPLOY: 1
- MSYS_BITS: 64
SCRIPT: python x.py dist
- RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-gnu --enable-extended --enable-ninja
+ RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-gnu --enable-extended
MINGW_URL: https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror
- MINGW_ARCHIVE: x86_64-6.2.0-release-posix-seh-rt_v5-rev1.7z
+ MINGW_ARCHIVE: x86_64-6.3.0-release-posix-seh-rt_v5-rev2.7z
MINGW_DIR: mingw64
DEPLOY: 1
- if defined MINGW_URL 7z x -y %MINGW_ARCHIVE% > nul
- if defined MINGW_URL set PATH=%CD%\%MINGW_DIR%\bin;C:\msys64\usr\bin;%PATH%
+ # Here we do a pretty heinous thing which is to mangle the MinGW installation
+ # we just had above. Currently, as of this writing, we're using MinGW-w64
+ # builds of gcc, and that's currently at 6.3.0. We use 6.3.0 as it appears to
+ # be the first version which contains a fix for #40546, builds randomly
+ # failing during LLVM due to ar.exe/ranlib.exe failures.
+ #
+ # Unfortunately, though, 6.3.0 *also* is the first version of MinGW-w64 builds
+ # to contain a regression in gdb (#40184). As a result if we were to use the
+ # gdb provided (7.11.1) then we would fail all debuginfo tests.
+ #
+ # In order to fix spurious failures (pretty high priority) we use 6.3.0. To
+ # avoid disabling gdb tests we download an *old* version of gdb, specifically
+ # that found inside the 6.2.0 distribution. We then overwrite the 6.3.0 gdb
+ # with the 6.2.0 gdb to get tests passing.
+ #
+ # Note that we don't literally overwrite the gdb.exe binary because it appears
+ # to just use gdborig.exe, so that's the binary we deal with instead.
+ - if defined MINGW_URL appveyor-retry appveyor DownloadFile %MINGW_URL%/2017-04-20-%MSYS_BITS%bit-gdborig.exe
+ - if defined MINGW_URL mv 2017-04-20-%MSYS_BITS%bit-gdborig.exe %MINGW_DIR%\bin\gdborig.exe
+
# Otherwise pull in the MinGW installed on appveyor
- if NOT defined MINGW_URL set PATH=C:\msys64\mingw%MSYS_BITS%\bin;C:\msys64\usr\bin;%PATH%
- set PATH=C:\Python27;%PATH%
# Download and install sccache
- - appveyor-retry appveyor DownloadFile https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-04-sccache-x86_64-pc-windows-msvc
- - mv 2017-04-04-sccache-x86_64-pc-windows-msvc sccache.exe
+ - appveyor-retry appveyor DownloadFile https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-19-sccache-x86_64-pc-windows-msvc
+ - mv 2017-04-19-sccache-x86_64-pc-windows-msvc sccache.exe
- set PATH=%PATH%;%CD%
# Download and install ninja
# Note that this is originally from the github releases patch of Ninja
- appveyor-retry appveyor DownloadFile https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-15-ninja-win.zip
- 7z x 2017-03-15-ninja-win.zip
+ - set RUST_CONFIGURE_ARGS=%RUST_CONFIGURE_ARGS% --enable-ninja
# - set PATH=%PATH%;%CD% -- this already happens above for sccache
# Install InnoSetup to get `iscc` used to produce installers
on_failure:
- cat %CD%\sccache.log || exit 0
-cache:
- - "build/i686-pc-windows-msvc/llvm -> src/rustllvm/llvm-rebuild-trigger"
- - "build/x86_64-pc-windows-msvc/llvm -> src/rustllvm/llvm-rebuild-trigger"
- - "i686-pc-windows-msvc/llvm -> src/rustllvm/llvm-rebuild-trigger"
- - "x86_64-pc-windows-msvc/llvm -> src/rustllvm/llvm-rebuild-trigger"
-
branches:
only:
- auto
"gcc 0.3.45 (registry+https://github.com/rust-lang/crates.io-index)",
"getopts 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
- "num_cpus 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)",
+ "num_cpus 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-serialize 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)",
"toml 0.1.30 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "mdbook"
-version = "0.0.19"
+version = "0.0.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"clap 2.22.1 (registry+https://github.com/rust-lang/crates.io-index)",
[[package]]
name = "num_cpus"
-version = "0.2.13"
+version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
"bitflags 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
-[[package]]
-name = "qemu-test-client"
-version = "0.1.0"
-
-[[package]]
-name = "qemu-test-server"
-version = "0.1.0"
-
[[package]]
name = "quick-error"
version = "1.1.0"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
+[[package]]
+name = "remote-test-client"
+version = "0.1.0"
+
+[[package]]
+name = "remote-test-server"
+version = "0.1.0"
+
[[package]]
name = "rls-data"
version = "0.1.0"
version = "0.1.0"
dependencies = [
"clap 2.22.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "mdbook 0.0.19 (registry+https://github.com/rust-lang/crates.io-index)",
+ "mdbook 0.0.21 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
"checksum lazy_static 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "4732c563b9a21a406565c4747daa7b46742f082911ae4753f390dc9ec7ee1a97"
"checksum libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)" = "88ee81885f9f04bff991e306fea7c1c60a5f0f9e409e99f6b40e3311a3363135"
"checksum log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)" = "5141eca02775a762cc6cd564d8d2c50f67c0ea3a372cbf1c51592b3e029e10ad"
-"checksum mdbook 0.0.19 (registry+https://github.com/rust-lang/crates.io-index)" = "2598843aeda0c5bb2e8e4d714564f1c3fc40f7844157e34563bf96ae3866b56e"
+"checksum mdbook 0.0.21 (registry+https://github.com/rust-lang/crates.io-index)" = "f1e2e9d848514dcfad4195788d0d42ae5153a477c191d75d5b84fab10f222fbd"
"checksum memchr 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1dbccc0e46f1ea47b9f17e6d67c5a96bd27030519c519c9c91327e31275a47b4"
"checksum num-traits 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)" = "e1cbfa3781f3fe73dc05321bed52a06d2d491eaa764c52335cf4399f046ece99"
-"checksum num_cpus 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)" = "cee7e88156f3f9e19bdd598f8d6c9db7bf4078f99f8381f43a55b09648d1a6e3"
+"checksum num_cpus 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ca313f1862c7ec3e0dfe8ace9fa91b1d9cb5c84ace3d00f5ec4216238e93c167"
"checksum open 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3478ed1686bd1300c8a981a940abc92b06fac9cbef747f4c668d4e032ff7b842"
"checksum pest 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "0a6dda33d67c26f0aac90d324ab2eb7239c819fc7b2552fe9faa4fe88441edc8"
"checksum pulldown-cmark 0.0.14 (registry+https://github.com/rust-lang/crates.io-index)" = "d9ab1e588ef8efd702c7ed9d2bd774db5e6f4d878bb5a1a9f371828fbdff6973"
"tools/rustbook",
"tools/tidy",
"tools/build-manifest",
- "tools/qemu-test-client",
- "tools/qemu-test-server",
+ "tools/remote-test-client",
+ "tools/remote-test-server",
]
# Curiously, compiletest will segfault if compiled with opt-level=3 on 64-bit
path = "bin/rustdoc.rs"
test = false
+[[bin]]
+name = "sccache-plus-cl"
+path = "bin/sccache-plus-cl.rs"
+test = false
+
[dependencies]
build_helper = { path = "../build_helper" }
cmake = "0.1.17"
filetime = "0.1"
-num_cpus = "0.2"
+num_cpus = "1.0"
toml = "0.1"
getopts = "0.2"
rustc-serialize = "0.3"
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+extern crate gcc;
+
+use std::env;
+use std::process::{self, Command};
+
+fn main() {
+ let target = env::var("SCCACHE_TARGET").unwrap();
+ // Locate the actual compiler that we're invoking
+ env::remove_var("CC");
+ env::remove_var("CXX");
+ let mut cfg = gcc::Config::new();
+ cfg.cargo_metadata(false)
+ .out_dir("/")
+ .target(&target)
+ .host(&target)
+ .opt_level(0)
+ .debug(false);
+ let compiler = cfg.get_compiler();
+
+ // Invoke sccache with said compiler
+ let sccache_path = env::var_os("SCCACHE_PATH").unwrap();
+ let mut cmd = Command::new(&sccache_path);
+ cmd.arg(compiler.path());
+ for &(ref k, ref v) in compiler.env() {
+ cmd.env(k, v);
+ }
+ for arg in env::args().skip(1) {
+ cmd.arg(arg);
+ }
+
+ let status = cmd.status().expect("failed to spawn");
+ process::exit(status.code().unwrap_or(2))
+}
env["DYLD_LIBRARY_PATH"] = os.path.join(self.bin_root(), "lib") + \
(os.pathsep + env["DYLD_LIBRARY_PATH"]) \
if "DYLD_LIBRARY_PATH" in env else ""
+ env["LIBRARY_PATH"] = os.path.join(self.bin_root(), "lib") + \
+ (os.pathsep + env["LIBRARY_PATH"]) \
+ if "LIBRARY_PATH" in env else ""
env["PATH"] = os.path.join(self.bin_root(), "bin") + \
os.pathsep + env["PATH"]
if not os.path.isfile(self.cargo()):
# The goal here is to come up with the same triple as LLVM would,
# at least for the subset of platforms we're willing to target.
if ostype == 'Linux':
- ostype = 'unknown-linux-gnu'
+ os = subprocess.check_output(['uname', '-o']).strip().decode(default_encoding)
+ if os == 'Android':
+ ostype = 'linux-android'
+ else:
+ ostype = 'unknown-linux-gnu'
elif ostype == 'FreeBSD':
ostype = 'unknown-freebsd'
elif ostype == 'DragonFly':
cputype = 'i686'
elif cputype in {'xscale', 'arm'}:
cputype = 'arm'
+ if ostype == 'linux-android':
+ ostype = 'linux-androideabi'
elif cputype == 'armv6l':
cputype = 'arm'
- ostype += 'eabihf'
+ if ostype == 'linux-android':
+ ostype = 'linux-androideabi'
+ else:
+ ostype += 'eabihf'
elif cputype in {'armv7l', 'armv8l'}:
cputype = 'armv7'
- ostype += 'eabihf'
- elif cputype == 'aarch64':
- cputype = 'aarch64'
- elif cputype == 'arm64':
+ if ostype == 'linux-android':
+ ostype = 'linux-androideabi'
+ else:
+ ostype += 'eabihf'
+ elif cputype in {'aarch64', 'arm64'}:
cputype = 'aarch64'
elif cputype == 'mips':
if sys.byteorder == 'big':
use dist;
use util::{self, dylib_path, dylib_path_var, exe};
-const ADB_TEST_DIR: &'static str = "/data/tmp";
+const ADB_TEST_DIR: &'static str = "/data/tmp/work";
/// The two modes of the test runner; tests or benchmarks.
#[derive(Copy, Clone)]
.arg("--llvm-cxxflags").arg("");
}
- if build.qemu_rootfs(target).is_some() {
- cmd.arg("--qemu-test-client")
+ if build.remote_tested(target) {
+ cmd.arg("--remote-test-client")
.arg(build.tool(&Compiler::new(0, &build.config.build),
- "qemu-test-client"));
+ "remote-test-client"));
}
// Running a C compiler on MSVC requires a few env vars to be set, to be
dylib_path.insert(0, build.sysroot_libdir(&compiler, target));
cargo.env(dylib_path_var(), env::join_paths(&dylib_path).unwrap());
- if target.contains("android") ||
- target.contains("emscripten") ||
- build.qemu_rootfs(target).is_some() {
+ if target.contains("emscripten") || build.remote_tested(target) {
cargo.arg("--no-run");
}
let _time = util::timeit();
- if target.contains("android") {
- build.run(&mut cargo);
- krate_android(build, &compiler, target, mode);
- } else if target.contains("emscripten") {
+ if target.contains("emscripten") {
build.run(&mut cargo);
krate_emscripten(build, &compiler, target, mode);
- } else if build.qemu_rootfs(target).is_some() {
+ } else if build.remote_tested(target) {
build.run(&mut cargo);
- krate_qemu(build, &compiler, target, mode);
+ krate_remote(build, &compiler, target, mode);
} else {
cargo.args(&build.flags.cmd.test_args());
build.run(&mut cargo);
}
}
-fn krate_android(build: &Build,
- compiler: &Compiler,
- target: &str,
- mode: Mode) {
- let mut tests = Vec::new();
- let out_dir = build.cargo_out(compiler, mode, target);
- find_tests(&out_dir, target, &mut tests);
- find_tests(&out_dir.join("deps"), target, &mut tests);
-
- for test in tests {
- build.run(Command::new("adb").arg("push").arg(&test).arg(ADB_TEST_DIR));
-
- let test_file_name = test.file_name().unwrap().to_string_lossy();
- let log = format!("{}/check-stage{}-T-{}-H-{}-{}.log",
- ADB_TEST_DIR,
- compiler.stage,
- target,
- compiler.host,
- test_file_name);
- let quiet = if build.config.quiet_tests { "--quiet" } else { "" };
- let program = format!("(cd {dir}; \
- LD_LIBRARY_PATH=./{target} ./{test} \
- --logfile {log} \
- {quiet} \
- {args})",
- dir = ADB_TEST_DIR,
- target = target,
- test = test_file_name,
- log = log,
- quiet = quiet,
- args = build.flags.cmd.test_args().join(" "));
-
- let output = output(Command::new("adb").arg("shell").arg(&program));
- println!("{}", output);
-
- t!(fs::create_dir_all(build.out.join("tmp")));
- build.run(Command::new("adb")
- .arg("pull")
- .arg(&log)
- .arg(build.out.join("tmp")));
- build.run(Command::new("adb").arg("shell").arg("rm").arg(&log));
- if !output.contains("result: ok") {
- panic!("some tests failed");
- }
- }
-}
-
fn krate_emscripten(build: &Build,
compiler: &Compiler,
target: &str,
mode: Mode) {
let mut tests = Vec::new();
let out_dir = build.cargo_out(compiler, mode, target);
- find_tests(&out_dir, target, &mut tests);
find_tests(&out_dir.join("deps"), target, &mut tests);
for test in tests {
}
}
-fn krate_qemu(build: &Build,
- compiler: &Compiler,
- target: &str,
- mode: Mode) {
+fn krate_remote(build: &Build,
+ compiler: &Compiler,
+ target: &str,
+ mode: Mode) {
let mut tests = Vec::new();
let out_dir = build.cargo_out(compiler, mode, target);
- find_tests(&out_dir, target, &mut tests);
find_tests(&out_dir.join("deps"), target, &mut tests);
let tool = build.tool(&Compiler::new(0, &build.config.build),
- "qemu-test-client");
+ "remote-test-client");
for test in tests {
let mut cmd = Command::new(&tool);
cmd.arg("run")
}
}
-
fn find_tests(dir: &Path,
target: &str,
dst: &mut Vec<PathBuf>) {
}
pub fn emulator_copy_libs(build: &Build, compiler: &Compiler, target: &str) {
- if target.contains("android") {
- android_copy_libs(build, compiler, target)
- } else if let Some(s) = build.qemu_rootfs(target) {
- qemu_copy_libs(build, compiler, target, s)
- }
-}
-
-fn android_copy_libs(build: &Build, compiler: &Compiler, target: &str) {
- println!("Android copy libs to emulator ({})", target);
- build.run(Command::new("adb").arg("wait-for-device"));
- build.run(Command::new("adb").arg("remount"));
- build.run(Command::new("adb").args(&["shell", "rm", "-r", ADB_TEST_DIR]));
- build.run(Command::new("adb").args(&["shell", "mkdir", ADB_TEST_DIR]));
- build.run(Command::new("adb")
- .arg("push")
- .arg(build.src.join("src/etc/adb_run_wrapper.sh"))
- .arg(ADB_TEST_DIR));
-
- let target_dir = format!("{}/{}", ADB_TEST_DIR, target);
- build.run(Command::new("adb").args(&["shell", "mkdir", &target_dir]));
-
- for f in t!(build.sysroot_libdir(compiler, target).read_dir()) {
- let f = t!(f);
- let name = f.file_name().into_string().unwrap();
- if util::is_dylib(&name) {
- build.run(Command::new("adb")
- .arg("push")
- .arg(f.path())
- .arg(&target_dir));
- }
+ if !build.remote_tested(target) {
+ return
}
-}
-fn qemu_copy_libs(build: &Build,
- compiler: &Compiler,
- target: &str,
- rootfs: &Path) {
- println!("QEMU copy libs to emulator ({})", target);
- assert!(target.starts_with("arm"), "only works with arm for now");
+ println!("REMOTE copy libs to emulator ({})", target);
t!(fs::create_dir_all(build.out.join("tmp")));
- // Copy our freshly compiled test server over to the rootfs
let server = build.cargo_out(compiler, Mode::Tool, target)
- .join(exe("qemu-test-server", target));
- t!(fs::copy(&server, rootfs.join("testd")));
+ .join(exe("remote-test-server", target));
// Spawn the emulator and wait for it to come online
let tool = build.tool(&Compiler::new(0, &build.config.build),
- "qemu-test-client");
- build.run(Command::new(&tool)
- .arg("spawn-emulator")
- .arg(rootfs)
- .arg(build.out.join("tmp")));
+ "remote-test-client");
+ let mut cmd = Command::new(&tool);
+ cmd.arg("spawn-emulator")
+ .arg(target)
+ .arg(&server)
+ .arg(build.out.join("tmp"));
+ if let Some(rootfs) = build.qemu_rootfs(target) {
+ cmd.arg(rootfs);
+ }
+ build.run(&mut cmd);
// Push all our dylibs to the emulator
for f in t!(build.sysroot_libdir(compiler, target).read_dir()) {
if target.contains("musl") && !target.contains("mips") {
copy_musl_third_party_objects(build, target, &libdir);
}
+
+ if build.config.sanitizers && compiler.stage != 0 && target == "x86_64-apple-darwin" {
+ // The sanitizers are only built in stage1 or above, so the dylibs will
+ // be missing in stage0 and causes panic. See the `std()` function above
+ // for reason why the sanitizers are not built in stage0.
+ copy_apple_sanitizer_dylibs(&build.native_dir(target), "osx", &libdir);
+ }
}
/// Copies the crt(1,i,n).o startup objects
}
}
+fn copy_apple_sanitizer_dylibs(native_dir: &Path, platform: &str, into: &Path) {
+ for &sanitizer in &["asan", "tsan"] {
+ let filename = format!("libclang_rt.{}_{}_dynamic.dylib", sanitizer, platform);
+ let mut src_path = native_dir.join(sanitizer);
+ src_path.push("build");
+ src_path.push("lib");
+ src_path.push("darwin");
+ src_path.push(&filename);
+ copy(&src_path, &into.join(filename));
+ }
+}
+
/// Build and prepare startup objects like rsbegin.o and rsend.o
///
/// These are primarily used on Windows right now for linking executables/dlls.
# support. You'll need to write a target specification at least, and most
# likely, teach rustc about the C ABI of the target. Get in touch with the
# Rust team and file an issue if you need assistance in porting!
-#targets = "X86;ARM;AArch64;Mips;PowerPC;SystemZ;JSBackend;MSP430;Sparc;NVPTX"
+#targets = "X86;ARM;AArch64;Mips;PowerPC;SystemZ;JSBackend;MSP430;Sparc;NVPTX;Hexagon"
# Cap the number of parallel linker invocations when compiling LLVM.
# This can be useful when building LLVM with debug info, which significantly
install(&build.src.join("src/etc/").join(file), &dst, 0o644);
};
if host.contains("windows-msvc") {
- // no debugger scripts
+ // windbg debugger scripts
+ install(&build.src.join("src/etc/rust-windbg.cmd"), &sysroot.join("bin"),
+ 0o755);
+
+ cp_debugger_script("natvis/libcore.natvis");
+ cp_debugger_script("natvis/libcollections.natvis");
} else {
cp_debugger_script("debugger_pretty_printers_common.py");
println!("Dist src");
- let name = pkgname(build, "rust-src");
- let image = tmpdir(build).join(format!("{}-image", name));
- let _ = fs::remove_dir_all(&image);
-
- let dst = image.join("lib/rustlib/src");
- let dst_src = dst.join("rust");
- t!(fs::create_dir_all(&dst_src));
+ // Make sure that the root folder of tarball has the correct name
+ let plain_name = format!("rustc-{}-src", build.rust_package_vers());
+ let plain_dst_src = tmpdir(build).join(&plain_name);
+ let _ = fs::remove_dir_all(&plain_dst_src);
+ t!(fs::create_dir_all(&plain_dst_src));
// This is the set of root paths which will become part of the source package
let src_files = [
// Copy the directories using our filter
for item in &src_dirs {
- let dst = &dst_src.join(item);
+ let dst = &plain_dst_src.join(item);
t!(fs::create_dir(dst));
cp_filtered(&build.src.join(item), dst, &filter_fn);
}
// Copy the files normally
for item in &src_files {
- copy(&build.src.join(item), &dst_src.join(item));
+ copy(&build.src.join(item), &plain_dst_src.join(item));
}
// If we're building from git sources, we need to vendor a complete distribution.
// Vendor all Cargo dependencies
let mut cmd = Command::new(&build.cargo);
cmd.arg("vendor")
- .current_dir(&dst_src.join("src"));
+ .current_dir(&plain_dst_src.join("src"));
build.run(&mut cmd);
}
+ // Create the version file
+ write_file(&plain_dst_src.join("version"), build.rust_version().as_bytes());
+
+ // Create plain source tarball
+ let tarball = rust_src_location(build);
+ if let Some(dir) = tarball.parent() {
+ t!(fs::create_dir_all(dir));
+ }
+ let mut cmd = Command::new("tar");
+ cmd.arg("-czf").arg(sanitize_sh(&tarball))
+ .arg(&plain_name)
+ .current_dir(tmpdir(build));
+ build.run(&mut cmd);
+
+
+ let name = pkgname(build, "rust-src");
+ let image = tmpdir(build).join(format!("{}-image", name));
+ let _ = fs::remove_dir_all(&image);
+
+ let dst = image.join("lib/rustlib/src");
+ let dst_src = dst.join("rust");
+ t!(fs::create_dir_all(&dst_src));
+
+ // This is the reduced set of paths which will become the rust-src component
+ // (essentially libstd and all of its path dependencies)
+ let std_src_dirs = [
+ "src/build_helper",
+ "src/liballoc",
+ "src/liballoc_jemalloc",
+ "src/liballoc_system",
+ "src/libcollections",
+ "src/libcompiler_builtins",
+ "src/libcore",
+ "src/liblibc",
+ "src/libpanic_abort",
+ "src/libpanic_unwind",
+ "src/librand",
+ "src/librustc_asan",
+ "src/librustc_lsan",
+ "src/librustc_msan",
+ "src/librustc_tsan",
+ "src/libstd",
+ "src/libstd_unicode",
+ "src/libunwind",
+ "src/rustc/libc_shim",
+ ];
+
+ for item in &std_src_dirs {
+ let dst = &dst_src.join(item);
+ t!(fs::create_dir_all(dst));
+ cp_r(&plain_dst_src.join(item), dst);
+ }
+
// Create source tarball in rust-installer format
let mut cmd = Command::new(SH_CMD);
cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
.arg("--legacy-manifest-dirs=rustlib,cargo");
build.run(&mut cmd);
- // Rename directory, so that root folder of tarball has the correct name
- let plain_name = format!("rustc-{}-src", build.rust_package_vers());
- let plain_dst_src = tmpdir(build).join(&plain_name);
- let _ = fs::remove_dir_all(&plain_dst_src);
- t!(fs::create_dir_all(&plain_dst_src));
- cp_r(&dst_src, &plain_dst_src);
-
- // Create the version file
- write_file(&plain_dst_src.join("version"), build.rust_version().as_bytes());
-
- // Create plain source tarball
- let mut cmd = Command::new("tar");
- cmd.arg("-czf").arg(sanitize_sh(&rust_src_location(build)))
- .arg(&plain_name)
- .current_dir(tmpdir(build));
- build.run(&mut cmd);
-
t!(fs::remove_dir_all(&image));
t!(fs::remove_dir_all(&plain_dst_src));
}
.map(|p| &**p)
}
+ /// Returns whether the target will be tested using the `remote-test-client`
+ /// and `remote-test-server` binaries.
+ fn remote_tested(&self, target: &str) -> bool {
+ self.qemu_rootfs(target).is_some() || target.contains("android")
+ }
+
/// Returns the root of the "rootfs" image that this target will be using,
/// if one was configured.
///
// the dependency graph and what `-p` arguments there are.
let mut cargo = Command::new(&build.cargo);
cargo.arg("metadata")
+ .arg("--format-version").arg("1")
.arg("--manifest-path").arg(build.src.join(krate).join("Cargo.toml"));
let output = output(&mut cargo);
let output: Output = json::decode(&output).unwrap();
//! ensure that they're always in place if needed.
use std::env;
+use std::ffi::OsString;
use std::fs::{self, File};
use std::io::{Read, Write};
use std::path::Path;
// NOTE: remember to also update `config.toml.example` when changing the defaults!
let llvm_targets = match build.config.llvm_targets {
Some(ref s) => s,
- None => "X86;ARM;AArch64;Mips;PowerPC;SystemZ;JSBackend;MSP430;Sparc;NVPTX",
+ None => "X86;ARM;AArch64;Mips;PowerPC;SystemZ;JSBackend;MSP430;Sparc;NVPTX;Hexagon",
};
let assertions = if build.config.llvm_assertions {"ON"} else {"OFF"};
.define("LLVM_TABLEGEN", &host);
}
- // MSVC handles compiler business itself
- if !target.contains("msvc") {
- if let Some(ref ccache) = build.config.ccache {
+ let sanitize_cc = |cc: &Path| {
+ if target.contains("msvc") {
+ OsString::from(cc.to_str().unwrap().replace("\\", "/"))
+ } else {
+ cc.as_os_str().to_owned()
+ }
+ };
+
+ let configure_compilers = |cfg: &mut cmake::Config| {
+ // MSVC with CMake uses msbuild by default which doesn't respect these
+ // vars that we'd otherwise configure. In that case we just skip this
+ // entirely.
+ if target.contains("msvc") && !build.config.ninja {
+ return
+ }
+
+ let cc = build.cc(target);
+ let cxx = build.cxx(target);
+
+ // Handle msvc + ninja + ccache specially (this is what the bots use)
+ if target.contains("msvc") &&
+ build.config.ninja &&
+ build.config.ccache.is_some() {
+ let mut cc = env::current_exe().expect("failed to get cwd");
+ cc.set_file_name("sccache-plus-cl.exe");
+
+ cfg.define("CMAKE_C_COMPILER", sanitize_cc(&cc))
+ .define("CMAKE_CXX_COMPILER", sanitize_cc(&cc));
+ cfg.env("SCCACHE_PATH",
+ build.config.ccache.as_ref().unwrap())
+ .env("SCCACHE_TARGET", target);
+
+ // If ccache is configured we inform the build a little differently hwo
+ // to invoke ccache while also invoking our compilers.
+ } else if let Some(ref ccache) = build.config.ccache {
cfg.define("CMAKE_C_COMPILER", ccache)
- .define("CMAKE_C_COMPILER_ARG1", build.cc(target))
+ .define("CMAKE_C_COMPILER_ARG1", sanitize_cc(cc))
.define("CMAKE_CXX_COMPILER", ccache)
- .define("CMAKE_CXX_COMPILER_ARG1", build.cxx(target));
+ .define("CMAKE_CXX_COMPILER_ARG1", sanitize_cc(cxx));
} else {
- cfg.define("CMAKE_C_COMPILER", build.cc(target))
- .define("CMAKE_CXX_COMPILER", build.cxx(target));
+ cfg.define("CMAKE_C_COMPILER", sanitize_cc(cc))
+ .define("CMAKE_CXX_COMPILER", sanitize_cc(cxx));
}
- cfg.build_arg("-j").build_arg(build.jobs().to_string());
+ cfg.build_arg("-j").build_arg(build.jobs().to_string());
cfg.define("CMAKE_C_FLAGS", build.cflags(target).join(" "));
cfg.define("CMAKE_CXX_FLAGS", build.cflags(target).join(" "));
- }
+ };
+
+ configure_compilers(&mut cfg);
if env::var_os("SCCACHE_ERROR_LOG").is_some() {
cfg.env("RUST_LOG", "sccache=info");
println!("Configuring openssl for {}", target);
build.run_quiet(&mut configure);
println!("Building openssl for {}", target);
- build.run_quiet(Command::new("make").current_dir(&obj));
+ build.run_quiet(Command::new("make").arg("-j1").current_dir(&obj));
println!("Installing openssl for {}", target);
build.run_quiet(Command::new("make").arg("install").current_dir(&obj));
rules.test("emulator-copy-libs", "path/to/nowhere")
.dep(|s| s.name("libtest"))
.dep(move |s| {
- if build.qemu_rootfs(s.target).is_some() {
- s.name("tool-qemu-test-client").target(s.host).stage(0)
+ if build.remote_tested(s.target) {
+ s.name("tool-remote-test-client").target(s.host).stage(0)
} else {
Step::noop()
}
})
.dep(move |s| {
- if build.qemu_rootfs(s.target).is_some() {
- s.name("tool-qemu-test-server")
+ if build.remote_tested(s.target) {
+ s.name("tool-remote-test-server")
} else {
Step::noop()
}
.dep(|s| s.name("maybe-clean-tools"))
.dep(|s| s.name("libstd-tool"))
.run(move |s| compile::tool(build, s.stage, s.target, "build-manifest"));
- rules.build("tool-qemu-test-server", "src/tools/qemu-test-server")
+ rules.build("tool-remote-test-server", "src/tools/remote-test-server")
.dep(|s| s.name("maybe-clean-tools"))
.dep(|s| s.name("libstd-tool"))
- .run(move |s| compile::tool(build, s.stage, s.target, "qemu-test-server"));
- rules.build("tool-qemu-test-client", "src/tools/qemu-test-client")
+ .run(move |s| compile::tool(build, s.stage, s.target, "remote-test-server"));
+ rules.build("tool-remote-test-client", "src/tools/remote-test-client")
.dep(|s| s.name("maybe-clean-tools"))
.dep(|s| s.name("libstd-tool"))
- .run(move |s| compile::tool(build, s.stage, s.target, "qemu-test-client"));
+ .run(move |s| compile::tool(build, s.stage, s.target, "remote-test-client"));
rules.build("tool-cargo", "cargo")
.dep(|s| s.name("maybe-clean-tools"))
.dep(|s| s.name("libstd-tool"))
"PATH"
} else if cfg!(target_os = "macos") {
"DYLD_LIBRARY_PATH"
+ } else if cfg!(target_os = "haiku") {
+ "LIBRARY_PATH"
} else {
"LD_LIBRARY_PATH"
}
let out_dir = env::var_os("RUSTBUILD_NATIVE_DIR").unwrap_or(env::var_os("OUT_DIR").unwrap());
let out_dir = PathBuf::from(out_dir).join(out_name);
t!(create_dir_racy(&out_dir));
- println!("cargo:rustc-link-lib=static={}", link_name);
+ if link_name.contains('=') {
+ println!("cargo:rustc-link-lib={}", link_name);
+ } else {
+ println!("cargo:rustc-link-lib=static={}", link_name);
+ }
println!("cargo:rustc-link-search=native={}", out_dir.join(search_subdir).display());
let timestamp = out_dir.join("rustbuild.timestamp");
}
}
+pub fn sanitizer_lib_boilerplate(sanitizer_name: &str) -> Result<NativeLibBoilerplate, ()> {
+ let (link_name, search_path) = match &*env::var("TARGET").unwrap() {
+ "x86_64-unknown-linux-gnu" => (
+ format!("clang_rt.{}-x86_64", sanitizer_name),
+ "build/lib/linux",
+ ),
+ "x86_64-apple-darwin" => (
+ format!("dylib=clang_rt.{}_osx_dynamic", sanitizer_name),
+ "build/lib/darwin",
+ ),
+ _ => return Err(()),
+ };
+ native_lib_boilerplate("compiler-rt", sanitizer_name, &link_name, search_path)
+}
+
fn dir_up_to_date(src: &Path, threshold: &FileTime) -> bool {
t!(fs::read_dir(src)).map(|e| t!(e)).all(|e| {
let meta = t!(e.metadata());
RUN curl -O http://ftp.nl.debian.org/debian/dists/jessie/main/installer-armhf/current/images/device-tree/vexpress-v2p-ca15-tc1.dtb
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-04-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-19-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
pkg-config
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-04-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-19-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
USER root
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-04-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-19-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENV PATH=$PATH:/x-tools/aarch64-unknown-linux-gnueabi/bin
ENTRYPOINT ["/usr/bin/dumb-init", "--"]
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-04-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-19-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENV TARGETS=arm-linux-androideabi
USER root
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-04-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-19-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENV PATH=$PATH:/x-tools/arm-unknown-linux-gnueabi/bin
USER root
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-04-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-19-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENV PATH=$PATH:/x-tools/arm-unknown-linux-gnueabihf/bin
USER root
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-04-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-19-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENV PATH=$PATH:/x-tools/armv7-unknown-linux-gnueabihf/bin
ENTRYPOINT ["/usr/bin/dumb-init", "--"]
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-04-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-19-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENV \
ENTRYPOINT ["/usr/bin/dumb-init", "--"]
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-04-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-19-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENV RUST_CONFIGURE_ARGS \
ENTRYPOINT ["/usr/bin/dumb-init", "--"]
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-04-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-19-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENV \
ENTRYPOINT ["/rustroot/dumb-init", "--"]
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-04-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-19-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENV HOSTS=i686-unknown-linux-gnu
pkg-config
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-04-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-19-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
pkg-config
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-04-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-19-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
pkg-config
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-04-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-19-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
pkg-config
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-04-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-19-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
USER root
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-04-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-19-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENV PATH=$PATH:/x-tools/powerpc-unknown-linux-gnu/bin
USER root
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-04-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-19-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENV PATH=$PATH:/x-tools/powerpc64-unknown-linux-gnu/bin
RUN ./build-powerpc64le-toolchain.sh
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-04-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-19-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENV \
USER root
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-04-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-19-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENV PATH=$PATH:/x-tools/s390x-ibm-linux-gnu/bin
ENTRYPOINT ["/usr/bin/dumb-init", "--"]
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-04-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-19-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENV \
ENTRYPOINT ["/rustroot/dumb-init", "--"]
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-04-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-19-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENV HOSTS=x86_64-unknown-linux-gnu
ENTRYPOINT ["/usr/bin/dumb-init", "--"]
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-04-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-19-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENV RUST_CONFIGURE_ARGS \
USER root
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-04-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-19-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENV PATH=$PATH:/x-tools/x86_64-unknown-netbsd/bin
lib32stdc++6
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-04-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-19-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
xz-utils
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-04-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-19-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
xz-utils
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-04-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-19-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
pkg-config
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-04-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-19-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
xz-utils
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-04-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-19-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
pkg-config
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-04-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-19-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
xz-utils
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-04-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-19-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
xz-utils
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-04-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-19-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
xz-utils
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-04-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-19-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
xz-utils
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-04-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-19-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
xz-utils
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-04-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-19-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
* [The Rustonomicon][nomicon] is your guidebook to the dark arts of unsafe Rust.
* [The Reference][ref] is not a formal spec, but is more detailed and comprehensive than the book.
+Initially, documentation lands in the Unstable Book, and then, as part of the
+stabilization process, is moved into the Book, Nomicon, or Reference.
+
Another few words about the reference: it is guaranteed to be accurate, but not
-complete. We now have a policy that all new features must be included in the
-reference before stabilization; however, we are still back-filling things that
-landed before then. That work is being tracked [here][38643].
+complete. We have a policy that features must have documentation to be stabilized,
+but we did not always have this policy, and so there are some stable things that
+are not yet in the reference. We're working on back-filling things that landed
+before this policy was put into place. That work is being tracked
+[here][refchecklist].
[Rust Learning]: https://github.com/ctjhoa/rust-learning
[Docs.rs]: https://docs.rs/
[api]: std/index.html
[ref]: reference/index.html
-[38643]: https://github.com/rust-lang/rust/issues/38643
+[refchecklist]: https://github.com/rust-lang-nursery/reference/issues/9
[err]: error-index.html
[book]: book/index.html
[nomicon]: nomicon/index.html
- [Compiler flags](compiler-flags.md)
- [linker_flavor](compiler-flags/linker-flavor.md)
+ - [remap_path_prefix](compiler-flags/remap-path-prefix.md)
- [Language features](language-features.md)
- [abi_msp430_interrupt](language-features/abi-msp430-interrupt.md)
- [abi_ptx](language-features/abi-ptx.md)
- [linked_list_extras](library-features/linked-list-extras.md)
- [lookup_host](library-features/lookup-host.md)
- [manually_drop](library-features/manually-drop.md)
+ - [more_io_inner_methods](library-features/more-io-inner-methods.md)
- [mpsc_select](library-features/mpsc-select.md)
- [n16](library-features/n16.md)
- [never_type_impls](library-features/never-type-impls.md)
- [str_checked_slicing](library-features/str-checked-slicing.md)
- [str_escape](library-features/str-escape.md)
- [str_internals](library-features/str-internals.md)
+ - [str_box_extras](library-features/str-box-extras.md)
- [str_mut_extras](library-features/str-mut-extras.md)
- [test](library-features/test.md)
- [thread_id](library-features/thread-id.md)
--- /dev/null
+# `remap-path-prefix`
+
+The tracking issue for this feature is: [#41555](https://github.com/rust-lang/rust/issues/41555)
+
+------------------------
+
+The `-Z remap-path-prefix-from`, `-Z remap-path-prefix-to` commandline option
+pair allows to replace prefixes of any file paths the compiler emits in various
+places. This is useful for bringing debuginfo paths into a well-known form and
+for achieving reproducible builds independent of the directory the compiler was
+executed in. All paths emitted by the compiler are affected, including those in
+error messages.
+
+In order to map all paths starting with `/home/foo/my-project/src` to
+`/sources/my-project`, one would invoke the compiler as follows:
+
+```text
+rustc -Zremap-path-prefix-from="/home/foo/my-project/src" -Zremap-path-prefix-to="/sources/my-project"
+```
+
+Debuginfo for code from the file `/home/foo/my-project/src/foo/mod.rs`,
+for example, would then point debuggers to `/sources/my-project/foo/mod.rs`
+instead of the original file.
+
+The options can be specified multiple times when multiple prefixes should be
+mapped:
+
+```text
+rustc -Zremap-path-prefix-from="/home/foo/my-project/src" \
+ -Zremap-path-prefix-to="/sources/my-project" \
+ -Zremap-path-prefix-from="/home/foo/my-project/build-dir" \
+ -Zremap-path-prefix-to="/stable-build-dir"
+```
+
+When the options are given multiple times, the nth `-from` will be matched up
+with the nth `-to` and they can appear anywhere on the commandline. Mappings
+specified later on the line will take precedence over earlier ones.
--- /dev/null
+# `more_io_inner_methods`
+
+The tracking issue for this feature is: [#41519]
+
+[#41519]: https://github.com/rust-lang/rust/issues/41519
+
+------------------------
+
+This feature enables several internal accessor methods on structures in
+`std::io` including `Take::{get_ref, get_mut}` and `Chain::{into_inner, get_ref,
+get_mut}`.
--- /dev/null
+# `str_box_extras`
+
+The tracking issue for this feature is: [#str_box_extras]
+
+[#str_box_extras]: https://github.com/rust-lang/rust/issues/41119
+
+------------------------
+
+
+++ /dev/null
-# Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-# file at the top-level directory of this distribution and at
-# http://rust-lang.org/COPYRIGHT.
-#
-# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-# option. This file may not be copied, modified, or distributed
-# except according to those terms.
-#
-# ignore-tidy-linelength
-#
-# usage : adb_run_wrapper [test dir - where test executables exist] [test executable]
-#
-
-TEST_PATH=$1
-BIN_PATH=/system/bin
-if [ -d "$TEST_PATH" ]
-then
- shift
- RUN=$1
-
- if [ ! -z "$RUN" ]
- then
- shift
-
- # The length of binary path (i.e. ./$RUN) should be shorter than 128 characters.
- cd $TEST_PATH
- TEST_EXEC_ENV=22 LD_LIBRARY_PATH=$TEST_PATH PATH=$BIN_PATH:$TEST_PATH ./$RUN $@ 1>$TEST_PATH/$RUN.stdout 2>$TEST_PATH/$RUN.stderr
- L_RET=$?
-
- echo $L_RET > $TEST_PATH/$RUN.exitcode
-
- fi
-fi
elif key == "libraries":
lib_path.extend(val.lstrip(' =').split(';'))
- target_tools = ["gcc.exe", "ld.exe", "ar.exe", "dlltool.exe"]
+ target_tools = ["gcc.exe", "ld.exe", "ar.exe", "dlltool.exe",
+ "libwinpthread-1.dll"]
rustc_dlls = ["libstdc++-6.dll", "libwinpthread-1.dll"]
if target_triple.startswith("i686-"):
--- /dev/null
+@echo off
+setlocal
+
+REM Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+REM file at the top-level directory of this distribution and at
+REM http://rust-lang.org/COPYRIGHT.
+REM
+REM Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+REM http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+REM <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+REM option. This file may not be copied, modified, or distributed
+REM except according to those terms.
+
+for /f "delims=" %%i in ('rustc --print=sysroot') do set rustc_sysroot=%%i
+
+set rust_etc=%rustc_sysroot%\lib\rustlib\etc
+
+windbg -c ".nvload %rust_etc%\libcore.natvis;.nvload %rust_etc%\libcollections.natvis;" %*
\ No newline at end of file
%token TRAIT
%token TYPE
%token UNSAFE
+%token DEFAULT
%token USE
%token WHILE
%token CONTINUE
| %empty { $$ = mk_none(); }
;
+maybe_default_maybe_unsafe
+: DEFAULT UNSAFE { $$ = mk_atom("DefaultUnsafe"); }
+| DEFAULT { $$ = mk_atom("Default"); }
+| UNSAFE { $$ = mk_atom("Unsafe"); }
+| %empty { $$ = mk_none(); }
+
trait_method
: type_method { $$ = mk_node("Required", 1, $1); }
| method { $$ = mk_node("Provided", 1, $1); }
// they are ambiguous with traits. We do the same here, regrettably,
// by splitting ty into ty and ty_prim.
item_impl
-: maybe_unsafe IMPL generic_params ty_prim_sum maybe_where_clause '{' maybe_inner_attrs maybe_impl_items '}'
+: maybe_default_maybe_unsafe IMPL generic_params ty_prim_sum maybe_where_clause '{' maybe_inner_attrs maybe_impl_items '}'
{
$$ = mk_node("ItemImpl", 6, $1, $3, $4, $5, $7, $8);
}
-| maybe_unsafe IMPL generic_params '(' ty ')' maybe_where_clause '{' maybe_inner_attrs maybe_impl_items '}'
+| maybe_default_maybe_unsafe IMPL generic_params '(' ty ')' maybe_where_clause '{' maybe_inner_attrs maybe_impl_items '}'
{
$$ = mk_node("ItemImpl", 6, $1, $3, 5, $6, $9, $10);
}
-| maybe_unsafe IMPL generic_params trait_ref FOR ty_sum maybe_where_clause '{' maybe_inner_attrs maybe_impl_items '}'
+| maybe_default_maybe_unsafe IMPL generic_params trait_ref FOR ty_sum maybe_where_clause '{' maybe_inner_attrs maybe_impl_items '}'
{
$$ = mk_node("ItemImpl", 6, $3, $4, $6, $7, $9, $10);
}
-| maybe_unsafe IMPL generic_params '!' trait_ref FOR ty_sum maybe_where_clause '{' maybe_inner_attrs maybe_impl_items '}'
+| maybe_default_maybe_unsafe IMPL generic_params '!' trait_ref FOR ty_sum maybe_where_clause '{' maybe_inner_attrs maybe_impl_items '}'
{
$$ = mk_node("ItemImplNeg", 7, $1, $3, $5, $7, $8, $10, $11);
}
-| maybe_unsafe IMPL generic_params trait_ref FOR DOTDOT '{' '}'
+| maybe_default_maybe_unsafe IMPL generic_params trait_ref FOR DOTDOT '{' '}'
{
$$ = mk_node("ItemImplDefault", 3, $1, $3, $4);
}
-| maybe_unsafe IMPL generic_params '!' trait_ref FOR DOTDOT '{' '}'
+| maybe_default_maybe_unsafe IMPL generic_params '!' trait_ref FOR DOTDOT '{' '}'
{
$$ = mk_node("ItemImplDefaultNeg", 3, $1, $3, $4);
}
$2,
mk_node("TTTok", 1, mk_atom("]")));
}
-;
+;
\ No newline at end of file
syntax::errors::registry::Registry::new(&[]),
Rc::new(DummyCrateStore));
let filemap = session.parse_sess.codemap()
- .new_filemap("<n/a>".to_string(), None, code);
+ .new_filemap("<n/a>".to_string(), code);
let mut lexer = lexer::StringReader::new(session.diagnostic(), filemap);
let cm = session.codemap();
use core::ops::{BoxPlace, Boxed, InPlace, Place, Placer};
use core::ptr::{self, Unique};
use core::convert::From;
+use str::from_boxed_utf8_unchecked;
/// A value that represents the heap. This is the default place that the `box`
/// keyword allocates into when no place is supplied.
#[stable(feature = "default_box_extra", since = "1.17.0")]
impl Default for Box<str> {
fn default() -> Box<str> {
- let default: Box<[u8]> = Default::default();
- unsafe { mem::transmute(default) }
+ unsafe { from_boxed_utf8_unchecked(Default::default()) }
}
}
let buf = RawVec::with_capacity(len);
unsafe {
ptr::copy_nonoverlapping(self.as_ptr(), buf.ptr(), len);
- mem::transmute(buf.into_box()) // bytes to str ~magic
+ from_boxed_utf8_unchecked(buf.into_box())
}
}
}
#[stable(feature = "box_from_slice", since = "1.17.0")]
impl<'a> From<&'a str> for Box<str> {
fn from(s: &'a str) -> Box<str> {
- let boxed: Box<[u8]> = Box::from(s.as_bytes());
- unsafe { mem::transmute(boxed) }
+ unsafe { from_boxed_utf8_unchecked(Box::from(s.as_bytes())) }
+ }
+}
+
+#[stable(feature = "boxed_str_conv", since = "1.18.0")]
+impl From<Box<str>> for Box<[u8]> {
+ fn from(s: Box<str>) -> Self {
+ unsafe {
+ mem::transmute(s)
+ }
}
}
pub mod arc;
pub mod rc;
pub mod raw_vec;
+#[unstable(feature = "str_box_extras", issue = "41119")]
+pub mod str;
pub mod oom;
pub use oom::oom;
--- /dev/null
+// Copyright 2012-2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Methods for dealing with boxed strings.
+use core::mem;
+
+use boxed::Box;
+
+/// Converts a boxed slice of bytes to a boxed string slice without checking
+/// that the string contains valid UTF-8.
+#[unstable(feature = "str_box_extras", issue = "41119")]
+pub unsafe fn from_boxed_utf8_unchecked(v: Box<[u8]>) -> Box<str> {
+ mem::transmute(v)
+}
#![feature(specialization)]
#![feature(staged_api)]
#![feature(str_internals)]
+#![feature(str_box_extras)]
#![feature(str_mut_extras)]
#![feature(trusted_len)]
#![feature(unicode)]
#[stable(feature = "rust1", since = "1.0.0")]
pub use core::str::{MatchIndices, RMatchIndices};
#[stable(feature = "rust1", since = "1.0.0")]
-pub use core::str::{from_utf8, Chars, CharIndices, Bytes};
+pub use core::str::{from_utf8, from_utf8_mut, Chars, CharIndices, Bytes};
#[stable(feature = "rust1", since = "1.0.0")]
pub use core::str::{from_utf8_unchecked, from_utf8_unchecked_mut, ParseBoolError};
+#[unstable(feature = "str_box_extras", issue = "41119")]
+pub use alloc::str::from_boxed_utf8_unchecked;
#[stable(feature = "rust1", since = "1.0.0")]
pub use std_unicode::str::SplitWhitespace;
#[stable(feature = "rust1", since = "1.0.0")]
pub use core::str::pattern;
+
#[unstable(feature = "slice_concat_ext",
reason = "trait should not have to exist",
issue = "27747")]
core_str::StrExt::parse(self)
}
+ /// Converts a `Box<str>` into a `Box<[u8]>` without copying or allocating.
+ #[unstable(feature = "str_box_extras", issue = "41119")]
+ pub fn into_boxed_bytes(self: Box<str>) -> Box<[u8]> {
+ self.into()
+ }
+
/// Replaces all matches of a pattern with another string.
///
/// `replace` creates a new [`String`], and copies the data from this string slice into it.
#![stable(feature = "rust1", since = "1.0.0")]
+use alloc::str as alloc_str;
+
use core::fmt;
use core::hash;
use core::iter::{FromIterator, FusedIterator};
-use core::mem;
use core::ops::{self, Add, AddAssign, Index, IndexMut};
use core::ptr;
use core::str as core_str;
#[stable(feature = "box_str", since = "1.4.0")]
pub fn into_boxed_str(self) -> Box<str> {
let slice = self.vec.into_boxed_slice();
- unsafe { mem::transmute::<Box<[u8]>, Box<str>>(slice) }
+ unsafe { alloc_str::from_boxed_utf8_unchecked(slice) }
}
}
/// removed data to be erased for security purposes. Even if you drop a `Vec`, its
/// buffer may simply be reused by another `Vec`. Even if you zero a `Vec`'s memory
/// first, that may not actually happen because the optimizer does not consider
-/// this a side-effect that must be preserved.
+/// this a side-effect that must be preserved. There is one case which we will
+/// not break, however: using `unsafe` code to write to the excess capacity,
+/// and then increasing the length to match, is always valid.
///
/// `Vec` does not currently guarantee the order in which elements are dropped
/// (the order has changed in the past, and may change again).
self.truncate(0)
}
- /// Returns the number of elements in the vector.
+ /// Returns the number of elements in the vector, also referred to
+ /// as its 'length'.
///
/// # Examples
///
}
}
+#[stable(feature = "vec_from_mut", since = "1.21.0")]
+impl<'a, T: Clone> From<&'a mut [T]> for Vec<T> {
+ #[cfg(not(test))]
+ fn from(s: &'a mut [T]) -> Vec<T> {
+ s.to_vec()
+ }
+ #[cfg(test)]
+ fn from(s: &'a mut [T]) -> Vec<T> {
+ ::slice::to_vec(s)
+ }
+}
+
#[stable(feature = "vec_from_cow_slice", since = "1.14.0")]
impl<'a, T> From<Cow<'a, [T]>> for Vec<T> where [T]: ToOwned<Owned=Vec<T>> {
fn from(s: Cow<'a, [T]>) -> Vec<T> {
///
/// Note that the underlying iterator is still advanced when [`peek`] is
/// called for the first time: In order to retrieve the next element,
- /// [`next`] is called on the underlying iterator, hence any side effects of
- /// the [`next`] method will occur.
+ /// [`next`] is called on the underlying iterator, hence any side effects (i.e.
+ /// anything other than fetching the next value) of the [`next`] method
+ /// will occur.
///
/// [`peek`]: struct.Peekable.html#method.peek
/// [`next`]: ../../std/iter/trait.Iterator.html#tymethod.next
#[inline]
fn replace_one(&mut self) -> Self {
- mem::replace(self, 0)
+ mem::replace(self, 1)
}
#[inline]
fn replace_zero(&mut self) -> Self {
- mem::replace(self, 1)
+ mem::replace(self, 0)
}
#[inline]
#[inline]
fn replace_one(&mut self) -> Self {
- mem::replace(self, 0)
+ mem::replace(self, 1)
}
#[inline]
fn replace_zero(&mut self) -> Self {
- mem::replace(self, 1)
+ mem::replace(self, 0)
}
#[inline]
#[inline]
fn replace_one(&mut self) -> Self {
- mem::replace(self, 0)
+ mem::replace(self, 1)
}
#[inline]
fn replace_zero(&mut self) -> Self {
- mem::replace(self, 1)
+ mem::replace(self, 0)
}
#[inline]
assert_eq!(&[2, 3, 1, 2, 0], &result[..]);
}
+#[test]
+fn test_step_replace_unsigned() {
+ let mut x = 4u32;
+ let y = x.replace_zero();
+ assert_eq!(x, 0);
+ assert_eq!(y, 4);
+
+ x = 5;
+ let y = x.replace_one();
+ assert_eq!(x, 1);
+ assert_eq!(y, 5);
+}
+
+#[test]
+fn test_step_replace_signed() {
+ let mut x = 4i32;
+ let y = x.replace_zero();
+ assert_eq!(x, 0);
+ assert_eq!(y, 4);
+
+ x = 5;
+ let y = x.replace_one();
+ assert_eq!(x, 1);
+ assert_eq!(y, 5);
+}
+
+#[test]
+fn test_step_replace_no_between() {
+ let mut x = 4u128;
+ let y = x.replace_zero();
+ assert_eq!(x, 0);
+ assert_eq!(y, 4);
+
+ x = 5;
+ let y = x.replace_one();
+ assert_eq!(x, 1);
+ assert_eq!(y, 5);
+}
\ No newline at end of file
#![feature(fixed_size_array)]
#![feature(flt2dec)]
#![feature(fmt_internals)]
+#![feature(i128_type)]
#![feature(iter_rfind)]
#![feature(libc)]
#![feature(nonzero)]
#![feature(sort_internals)]
#![feature(sort_unstable)]
#![feature(step_by)]
+#![feature(step_trait)]
#![feature(test)]
#![feature(try_from)]
#![feature(unicode)]
// Find the tables for this body.
let owner_def_id = tcx.hir.local_def_id(tcx.hir.body_owner(body.id()));
- let tables = tcx.item_tables(owner_def_id);
+ let tables = tcx.typeck_tables_of(owner_def_id);
let mut cfg_builder = CFGBuilder {
tcx: tcx,
TypeckTables(D),
UsedTraitImports(D),
ConstEval(D),
+ SymbolName(D),
// The set of impls for a given trait. Ultimately, it would be
// nice to get more fine-grained here (e.g., to include a
TypeckTables(ref d) => op(d).map(TypeckTables),
UsedTraitImports(ref d) => op(d).map(UsedTraitImports),
ConstEval(ref d) => op(d).map(ConstEval),
+ SymbolName(ref d) => op(d).map(SymbolName),
TraitImpls(ref d) => op(d).map(TraitImpls),
TraitItems(ref d) => op(d).map(TraitItems),
ReprHints(ref d) => op(d).map(ReprHints),
hir::ItemDefaultImpl(self.lower_unsafety(unsafety),
trait_ref)
}
- ItemKind::Impl(unsafety, polarity, ref generics, ref ifce, ref ty, ref impl_items) => {
+ ItemKind::Impl(unsafety,
+ polarity,
+ defaultness,
+ ref generics,
+ ref ifce,
+ ref ty,
+ ref impl_items) => {
let new_impl_items = impl_items.iter()
.map(|item| self.lower_impl_item_ref(item))
.collect();
hir::ItemImpl(self.lower_unsafety(unsafety),
self.lower_impl_polarity(polarity),
+ self.lower_defaultness(defaultness, true /* [1] */),
self.lower_generics(generics),
ifce,
self.lower_ty(ty),
}
ItemKind::MacroDef(..) | ItemKind::Mac(..) => panic!("Shouldn't still be around"),
}
+
+ // [1] `defaultness.has_value()` is never called for an `impl`, always `true` in order to
+ // not cause an assertion failure inside the `lower_defaultness` function
}
fn lower_trait_item(&mut self, i: &TraitItem) -> hir::TraitItem {
/// An implementation, eg `impl<A> Trait for Foo { .. }`
ItemImpl(Unsafety,
ImplPolarity,
+ Defaultness,
Generics,
Option<TraitRef>, // (optional) trait this impl implements
P<Ty>, // self
}
hir::ItemImpl(unsafety,
polarity,
+ defaultness,
ref generics,
ref opt_trait,
ref ty,
ref impl_items) => {
self.head("")?;
self.print_visibility(&item.vis)?;
+ self.print_defaultness(defaultness)?;
self.print_unsafety(unsafety)?;
self.word_nbsp("impl")?;
}
}
+ pub fn print_defaultness(&mut self, defaultness: hir::Defaultness) -> io::Result<()> {
+ match defaultness {
+ hir::Defaultness::Default { .. } => self.word_nbsp("default")?,
+ hir::Defaultness::Final => (),
+ }
+ Ok(())
+ }
+
pub fn print_struct(&mut self,
struct_def: &hir::VariantData,
generics: &hir::Generics,
self.hardbreak_if_not_bol()?;
self.maybe_print_comment(ii.span.lo)?;
self.print_outer_attributes(&ii.attrs)?;
-
- match ii.defaultness {
- hir::Defaultness::Default { .. } => self.word_nbsp("default")?,
- hir::Defaultness::Final => (),
- }
+ self.print_defaultness(ii.defaultness)?;
match ii.node {
hir::ImplItemKind::Const(ref ty, expr) => {
ItemUnion(variant_data, generics),
ItemTrait(unsafety, generics, bounds, item_refs),
ItemDefaultImpl(unsafety, trait_ref),
- ItemImpl(unsafety, impl_polarity, generics, trait_ref, ty, impl_item_refs)
+ ItemImpl(unsafety, impl_polarity, impl_defaultness, generics, trait_ref, ty, impl_item_refs)
});
impl_stable_hash_for!(struct hir::TraitItemRef {
Option<ty::TypeckTables<'tcx>>,
Option<ty::ParameterEnvironment<'tcx>>) {
let item_id = tcx.hir.body_owner(self);
- (Some(tcx.item_tables(tcx.hir.local_def_id(item_id))),
+ (Some(tcx.typeck_tables_of(tcx.hir.local_def_id(item_id))),
None,
Some(ty::ParameterEnvironment::for_item(tcx, item_id)))
}
substs: &[Kind<'tcx>])
-> Ty<'tcx> {
let default = if def.has_default {
- let default = self.tcx.item_type(def.def_id);
+ let default = self.tcx.type_of(def.def_id);
Some(type_variable::Default {
ty: default.subst_spanned(self.tcx, substs, Some(span)),
origin_span: span,
use syntax::attr;
use syntax::ast;
use syntax::symbol::Symbol;
-use syntax_pos::{DUMMY_SP, MultiSpan, Span};
+use syntax_pos::{MultiSpan, Span};
use errors::{self, Diagnostic, DiagnosticBuilder};
use hir;
use hir::def_id::LOCAL_CRATE;
pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
let _task = tcx.dep_graph.in_task(DepNode::LateLintCheck);
- let access_levels = &ty::queries::privacy_access_levels::get(tcx, DUMMY_SP, LOCAL_CRATE);
+ let access_levels = &tcx.privacy_access_levels(LOCAL_CRATE);
let krate = tcx.hir.krate();
use hir;
use hir::def::Def;
use hir::def_id::DefId;
-use ty::{self, TyCtxt};
+use ty::TyCtxt;
use ty::subst::Substs;
use util::common::ErrorReported;
use rustc_const_math::*;
let count_expr = &tcx.hir.body(count).value;
let count_def_id = tcx.hir.body_owner_def_id(count);
let substs = Substs::empty();
- match ty::queries::const_eval::get(tcx, count_expr.span, (count_def_id, substs)) {
+ match tcx.at(count_expr.span).const_eval((count_def_id, substs)) {
Ok(Integral(Usize(count))) => {
let val = count.as_u64(tcx.sess.target.uint_type);
assert_eq!(val as usize as u64, val);
// are *mostly* used as a part of that interface, but these should
// probably get a better home if someone can find one.
-use hir::def::{self, Def};
+use hir::def;
use hir::def_id::{CrateNum, DefId, DefIndex};
use hir::map as hir_map;
use hir::map::definitions::{Definitions, DefKey, DisambiguatedDefPathData};
fn crate_data_as_rc_any(&self, krate: CrateNum) -> Rc<Any>;
// item info
- fn describe_def(&self, def: DefId) -> Option<Def>;
fn def_span(&self, sess: &Session, def: DefId) -> Span;
fn stability(&self, def: DefId) -> Option<attr::Stability>;
fn deprecation(&self, def: DefId) -> Option<attr::Deprecation>;
fn implementations_of_trait(&self, filter: Option<DefId>) -> Vec<DefId>;
// impl info
+ fn impl_defaultness(&self, def: DefId) -> hir::Defaultness;
fn impl_parent(&self, impl_def_id: DefId) -> Option<DefId>;
// trait/impl-item info
fn crate_data_as_rc_any(&self, krate: CrateNum) -> Rc<Any>
{ bug!("crate_data_as_rc_any") }
// item info
- fn describe_def(&self, def: DefId) -> Option<Def> { bug!("describe_def") }
fn def_span(&self, sess: &Session, def: DefId) -> Span { bug!("def_span") }
fn stability(&self, def: DefId) -> Option<attr::Stability> { bug!("stability") }
fn deprecation(&self, def: DefId) -> Option<attr::Deprecation> { bug!("deprecation") }
fn implementations_of_trait(&self, filter: Option<DefId>) -> Vec<DefId> { vec![] }
// impl info
+ fn impl_defaultness(&self, def: DefId) -> hir::Defaultness { bug!("impl_defaultness") }
fn impl_parent(&self, def: DefId) -> Option<DefId> { bug!("impl_parent") }
// trait/impl-item info
use syntax::{ast, codemap};
use syntax::attr;
-use syntax::codemap::DUMMY_SP;
use syntax_pos;
// Any local node that may call something in its body block should be
match item.node {
hir::ItemStruct(..) | hir::ItemUnion(..) => {
let def_id = self.tcx.hir.local_def_id(item.id);
- let def = self.tcx.lookup_adt_def(def_id);
+ let def = self.tcx.adt_def(def_id);
self.struct_has_extern_repr = def.repr.c();
intravisit::walk_item(self, &item);
}
fn should_warn_about_field(&mut self, field: &hir::StructField) -> bool {
- let field_type = self.tcx.item_type(self.tcx.hir.local_def_id(field.id));
+ let field_type = self.tcx.type_of(self.tcx.hir.local_def_id(field.id));
let is_marker_field = match field_type.ty_to_def_id() {
Some(def_id) => self.tcx.lang_items.items().iter().any(|item| *item == Some(def_id)),
_ => false
// This is done to handle the case where, for example, the static
// method of a private type is used, but the type itself is never
// called directly.
- if let Some(impl_list) =
- self.tcx.maps.inherent_impls.borrow().get(&self.tcx.hir.local_def_id(id)) {
- for &impl_did in impl_list.iter() {
- for &item_did in &self.tcx.associated_item_def_ids(impl_did)[..] {
- if let Some(item_node_id) = self.tcx.hir.as_local_node_id(item_did) {
- if self.live_symbols.contains(&item_node_id) {
- return true;
- }
+ let def_id = self.tcx.hir.local_def_id(id);
+ let inherent_impls = self.tcx.inherent_impls(def_id);
+ for &impl_did in inherent_impls.iter() {
+ for &item_did in &self.tcx.associated_item_def_ids(impl_did)[..] {
+ if let Some(item_node_id) = self.tcx.hir.as_local_node_id(item_did) {
+ if self.live_symbols.contains(&item_node_id) {
+ return true;
}
}
}
}
pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
- let access_levels = &ty::queries::privacy_access_levels::get(tcx, DUMMY_SP, LOCAL_CRATE);
+ let access_levels = &tcx.privacy_access_levels(LOCAL_CRATE);
let krate = tcx.hir.krate();
let live_symbols = find_live(tcx, access_levels, krate);
let mut visitor = DeadVisitor { tcx: tcx, live_symbols: live_symbols };
Def::Variant(variant_did) |
Def::VariantCtor(variant_did, ..) => {
let enum_did = tcx.parent_def_id(variant_did).unwrap();
- let downcast_cmt = if tcx.lookup_adt_def(enum_did).is_univariant() {
+ let downcast_cmt = if tcx.adt_def(enum_did).is_univariant() {
cmt_pat
} else {
let cmt_pat_ty = cmt_pat.ty;
impl<'a, 'gcx, 'tcx> ExprVisitor<'a, 'gcx, 'tcx> {
fn def_id_is_transmute(&self, def_id: DefId) -> bool {
- let intrinsic = match self.infcx.tcx.item_type(def_id).sty {
+ let intrinsic = match self.infcx.tcx.type_of(def_id).sty {
ty::TyFnDef(.., bfty) => bfty.abi() == RustIntrinsic,
_ => return false
};
entry_ln: LiveNode,
body: &hir::Body)
{
- let fn_ty = self.ir.tcx.item_type(self.ir.tcx.hir.local_def_id(id));
+ let fn_ty = self.ir.tcx.type_of(self.ir.tcx.hir.local_def_id(id));
let fn_sig = match fn_ty.sty {
ty::TyClosure(closure_def_id, substs) => {
self.ir.tcx.closure_type(closure_def_id)
// So peel off one-level, turning the &T into T.
match base_ty.builtin_deref(false, ty::NoPreference) {
Some(t) => t.ty,
- None => { return Err(()); }
+ None => {
+ debug!("By-ref binding of non-derefable type {:?}", base_ty);
+ return Err(());
+ }
}
}
_ => base_ty,
match base_cmt.ty.builtin_index() {
Some(ty) => (ty, ElementKind::VecElement),
None => {
+ debug!("Explicit index of non-indexable type {:?}", base_cmt);
return Err(());
}
}
PatKind::TupleStruct(hir::QPath::Resolved(_, ref path), ..) |
PatKind::Struct(hir::QPath::Resolved(_, ref path), ..) => {
match path.def {
- Def::Err => return Err(()),
+ Def::Err => {
+ debug!("access to unresolvable pattern {:?}", pat);
+ return Err(())
+ }
Def::Variant(variant_did) |
Def::VariantCtor(variant_did, ..) => {
// univariant enums do not need downcasts
let enum_did = self.tcx().parent_def_id(variant_did).unwrap();
- if !self.tcx().lookup_adt_def(enum_did).is_univariant() {
+ if !self.tcx().adt_def(enum_did).is_univariant() {
self.cat_downcast(pat, cmt.clone(), cmt.ty, variant_did)
} else {
cmt
let expected_len = match def {
Def::VariantCtor(def_id, CtorKind::Fn) => {
let enum_def = self.tcx().parent_def_id(def_id).unwrap();
- self.tcx().lookup_adt_def(enum_def).variant_with_id(def_id).fields.len()
+ self.tcx().adt_def(enum_def).variant_with_id(def_id).fields.len()
}
Def::StructCtor(_, CtorKind::Fn) => {
match self.pat_ty(&pat)?.sty {
use syntax::abi::Abi;
use syntax::ast;
use syntax::attr;
-use syntax::codemap::DUMMY_SP;
use hir;
use hir::def_id::LOCAL_CRATE;
use hir::intravisit::{Visitor, NestedVisitorMap};
}
match item.node {
- hir::ItemImpl(_, _, ref generics, ..) |
+ hir::ItemImpl(_, _, _, ref generics, ..) |
hir::ItemFn(.., ref generics, _) => {
generics_require_inlining(generics)
}
// does too.
let impl_node_id = self.tcx.hir.as_local_node_id(impl_did).unwrap();
match self.tcx.hir.expect_item(impl_node_id).node {
- hir::ItemImpl(_, _, ref generics, ..) => {
+ hir::ItemImpl(_, _, _, ref generics, ..) => {
generics_require_inlining(generics)
}
_ => false
}
pub fn find_reachable<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Rc<NodeSet> {
- ty::queries::reachable_set::get(tcx, DUMMY_SP, LOCAL_CRATE)
+ tcx.reachable_set(LOCAL_CRATE)
}
fn reachable_set<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, crate_num: CrateNum) -> Rc<NodeSet> {
debug_assert!(crate_num == LOCAL_CRATE);
- let access_levels = &ty::queries::privacy_access_levels::get(tcx, DUMMY_SP, LOCAL_CRATE);
+ let access_levels = &tcx.privacy_access_levels(LOCAL_CRATE);
let any_library = tcx.sess.crate_types.borrow().iter().any(|ty| {
*ty == config::CrateTypeRlib || *ty == config::CrateTypeDylib ||
hir::ItemStruct(_, ref generics) |
hir::ItemUnion(_, ref generics) |
hir::ItemTrait(_, ref generics, ..) |
- hir::ItemImpl(_, _, ref generics, ..) => {
+ hir::ItemImpl(_, _, _, ref generics, ..) => {
// These kinds of items have only early bound lifetime parameters.
let mut index = if let hir::ItemTrait(..) = item.node {
1 // Self comes before lifetimes
}
match parent.node {
hir::ItemTrait(_, ref generics, ..) |
- hir::ItemImpl(_, _, ref generics, ..) => {
+ hir::ItemImpl(_, _, _, ref generics, ..) => {
index += (generics.lifetimes.len() + generics.ty_params.len()) as u32;
}
_ => {}
// (See issue #38412)
fn skip_stability_check_due_to_privacy(self, mut def_id: DefId) -> bool {
// Check if `def_id` is a trait method.
- match self.sess.cstore.describe_def(def_id) {
+ match self.describe_def(def_id) {
Some(Def::Method(_)) |
Some(Def::AssociatedTy(_)) |
Some(Def::AssociatedConst(_)) => {
pub fn check_unused_or_stable_features<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
let sess = &tcx.sess;
- let access_levels = &ty::queries::privacy_access_levels::get(tcx, DUMMY_SP, LOCAL_CRATE);
+ let access_levels = &tcx.privacy_access_levels(LOCAL_CRATE);
if tcx.stability.borrow().staged_api[&LOCAL_CRATE] && tcx.sess.features.borrow().staged_api {
let krate = tcx.hir.krate();
) -> Self {
Operand::Constant(Constant {
span: span,
- ty: tcx.item_type(def_id).subst(tcx, substs),
+ ty: tcx.type_of(def_id).subst(tcx, substs),
literal: Literal::Value { value: ConstVal::Function(def_id, substs) },
})
}
)
}
AggregateKind::Adt(def, _, substs, _) => {
- tcx.item_type(def.did).subst(tcx, substs)
+ tcx.type_of(def.did).subst(tcx, substs)
}
AggregateKind::Closure(did, substs) => {
tcx.mk_closure_from_closure_substs(did, substs)
use middle::cstore;
use syntax::ast::{self, IntTy, UintTy};
+use syntax::codemap::FilePathMapping;
use syntax::parse::token;
use syntax::parse;
use syntax::symbol::Symbol;
pub uint_type: UintTy,
}
-#[derive(Clone, Hash)]
+#[derive(Clone, Hash, Debug)]
pub enum Sanitizer {
Address,
Leak,
self.incremental.is_none() ||
self.cg.codegen_units == 1
}
+
+ pub fn file_path_mapping(&self) -> FilePathMapping {
+ FilePathMapping::new(
+ self.debugging_opts.remap_path_prefix_from.iter().zip(
+ self.debugging_opts.remap_path_prefix_to.iter()
+ ).map(|(src, dst)| (src.clone(), dst.clone())).collect()
+ )
+ }
}
// The type of entry function, so
"Set the optimization fuel quota for a crate."),
print_fuel: Option<String> = (None, parse_opt_string, [TRACKED],
"Make Rustc print the total optimization fuel used by a crate."),
+ remap_path_prefix_from: Vec<String> = (vec![], parse_string_push, [TRACKED],
+ "add a source pattern to the file path remapping config"),
+ remap_path_prefix_to: Vec<String> = (vec![], parse_string_push, [TRACKED],
+ "add a mapping target to the file path remapping config"),
}
pub fn default_lib_output() -> CrateType {
// Convert strings provided as --cfg [cfgspec] into a crate_cfg
pub fn parse_cfgspecs(cfgspecs: Vec<String> ) -> ast::CrateConfig {
cfgspecs.into_iter().map(|s| {
- let sess = parse::ParseSess::new();
+ let sess = parse::ParseSess::new(FilePathMapping::empty());
let mut parser =
parse::new_parser_from_source_str(&sess, "cfgspec".to_string(), s.to_string());
output_types.insert(OutputType::Exe, None);
}
+ let remap_path_prefix_sources = debugging_opts.remap_path_prefix_from.len();
+ let remap_path_prefix_targets = debugging_opts.remap_path_prefix_from.len();
+
+ if remap_path_prefix_targets < remap_path_prefix_sources {
+ for source in &debugging_opts.remap_path_prefix_from[remap_path_prefix_targets..] {
+ early_error(error_format,
+ &format!("option `-Zremap-path-prefix-from='{}'` does not have \
+ a corresponding `-Zremap-path-prefix-to`", source))
+ }
+ } else if remap_path_prefix_targets > remap_path_prefix_sources {
+ for target in &debugging_opts.remap_path_prefix_to[remap_path_prefix_sources..] {
+ early_error(error_format,
+ &format!("option `-Zremap-path-prefix-to='{}'` does not have \
+ a corresponding `-Zremap-path-prefix-from`", target))
+ }
+ }
+
let mut cg = build_codegen_options(matches, error_format);
// Issue #30063: if user requests llvm-related output to one
// The name of the root source file of the crate, in the local file system.
// The path is always expected to be absolute. `None` means that there is no
// source file.
- pub local_crate_source_file: Option<PathBuf>,
- pub working_dir: PathBuf,
+ pub local_crate_source_file: Option<String>,
+ // The directory the compiler has been executed in plus a flag indicating
+ // if the value stored here has been affected by path remapping.
+ pub working_dir: (String, bool),
pub lint_store: RefCell<lint::LintStore>,
pub lints: RefCell<lint::LintTable>,
/// Set of (LintId, span, message) tuples tracking lint (sub)diagnostics
registry: errors::registry::Registry,
cstore: Rc<CrateStore>)
-> Session {
+ let file_path_mapping = sopts.file_path_mapping();
+
build_session_with_codemap(sopts,
dep_graph,
local_crate_source_file,
registry,
cstore,
- Rc::new(codemap::CodeMap::new()),
+ Rc::new(codemap::CodeMap::new(file_path_mapping)),
None)
}
Ok(t) => t,
Err(e) => {
panic!(span_diagnostic.fatal(&format!("Error loading host specification: {}", e)));
- }
+ }
};
let target_cfg = config::build_target_config(&sopts, &span_diagnostic);
let p_s = parse::ParseSess::with_span_handler(span_diagnostic, codemap);
None => Some(filesearch::get_or_default_sysroot())
};
+ let file_path_mapping = sopts.file_path_mapping();
+
// Make the path absolute, if necessary
- let local_crate_source_file = local_crate_source_file.map(|path|
- if path.is_absolute() {
- path.clone()
- } else {
- env::current_dir().unwrap().join(&path)
- }
- );
+ let local_crate_source_file = local_crate_source_file.map(|path| {
+ file_path_mapping.map_prefix(path.to_string_lossy().into_owned()).0
+ });
let optimization_fuel_crate = sopts.debugging_opts.fuel.as_ref().map(|i| i.0.clone());
let optimization_fuel_limit = Cell::new(sopts.debugging_opts.fuel.as_ref()
let print_fuel_crate = sopts.debugging_opts.print_fuel.clone();
let print_fuel = Cell::new(0);
+ let working_dir = env::current_dir().unwrap().to_string_lossy().into_owned();
+ let working_dir = file_path_mapping.map_prefix(working_dir);
+
let sess = Session {
dep_graph: dep_graph.clone(),
target: target_cfg,
derive_registrar_fn: Cell::new(None),
default_sysroot: default_sysroot,
local_crate_source_file: local_crate_source_file,
- working_dir: env::current_dir().unwrap(),
+ working_dir: working_dir,
lint_store: RefCell::new(lint::LintStore::new()),
lints: RefCell::new(lint::LintTable::new()),
one_time_diagnostics: RefCell::new(FxHashSet()),
let mut self_match_impls = vec![];
let mut fuzzy_match_impls = vec![];
- self.tcx.lookup_trait_def(trait_ref.def_id)
+ self.tcx.trait_def(trait_ref.def_id)
.for_each_relevant_impl(self.tcx, trait_self_ty, |def_id| {
let impl_substs = self.fresh_substs_for_item(obligation.cause.span, def_id);
let impl_trait_ref = tcx
let trait_str = self.tcx.item_path_str(trait_ref.def_id);
if let Some(istring) = item.value_str() {
let istring = &*istring.as_str();
- let generics = self.tcx.item_generics(trait_ref.def_id);
+ let generics = self.tcx.generics_of(trait_ref.def_id);
let generic_map = generics.types.iter().map(|param| {
(param.name.as_str().to_string(),
trait_ref.substs.type_for_def(param).to_string())
trait_ref.skip_binder().self_ty(),
true);
let mut impl_candidates = Vec::new();
- let trait_def = self.tcx.lookup_trait_def(trait_ref.def_id());
+ let trait_def = self.tcx.trait_def(trait_ref.def_id());
match simp {
Some(simp) => trait_def.for_each_impl(self.tcx, |def_id| {
// do not hold for this particular set of type parameters.
// Note that this method could then never be called, so we
// do not want to try and trans it, in that case (see #23435).
- let predicates = tcx.item_predicates(def_id).instantiate_own(tcx, substs);
+ let predicates = tcx.predicates_of(def_id).instantiate_own(tcx, substs);
if !normalize_and_test_predicates(tcx, predicates.predicates) {
debug!("get_vtable_methods: predicates do not hold");
return None;
impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
pub fn is_object_safe(self, trait_def_id: DefId) -> bool {
// Because we query yes/no results frequently, we keep a cache:
- let def = self.lookup_trait_def(trait_def_id);
+ let def = self.trait_def(trait_def_id);
let result = def.object_safety().unwrap_or_else(|| {
let result = self.object_safety_violations(trait_def_id).is_empty();
substs: Substs::identity_for_item(self, trait_def_id)
});
let predicates = if supertraits_only {
- self.item_super_predicates(trait_def_id)
+ self.super_predicates_of(trait_def_id)
} else {
- self.item_predicates(trait_def_id)
+ self.predicates_of(trait_def_id)
};
predicates
.predicates
// Search for a predicate like `Self : Sized` amongst the trait bounds.
let free_substs = self.construct_free_substs(def_id,
self.region_maps.node_extent(ast::DUMMY_NODE_ID));
- let predicates = self.item_predicates(def_id);
+ let predicates = self.predicates_of(def_id);
let predicates = predicates.instantiate(self, free_substs).predicates;
elaborate_predicates(self, predicates)
.any(|predicate| {
// The `Self` type is erased, so it should not appear in list of
// arguments or return type apart from the receiver.
- let ref sig = self.item_type(method.def_id).fn_sig();
+ let ref sig = self.type_of(method.def_id).fn_sig();
for input_ty in &sig.skip_binder().inputs()[1..] {
if self.contains_illegal_self_type_reference(trait_def_id, input_ty) {
return Some(MethodViolationCode::ReferencesSelf);
}
// We can't monomorphize things like `fn foo<A>(...)`.
- if !self.item_generics(method.def_id).types.is_empty() {
+ if !self.generics_of(method.def_id).types.is_empty() {
return Some(MethodViolationCode::Generic);
}
ty::TyAnon(def_id, substs) if !substs.has_escaping_regions() => { // (*)
// Only normalize `impl Trait` after type-checking, usually in trans.
if self.selcx.projection_mode() == Reveal::All {
- let generic_ty = self.tcx().item_type(def_id);
+ let generic_ty = self.tcx().type_of(def_id);
let concrete_ty = generic_ty.subst(self.tcx(), substs);
self.fold_ty(concrete_ty)
} else {
};
// If so, extract what we know from the trait and try to come up with a good answer.
- let trait_predicates = selcx.tcx().item_predicates(def_id);
+ let trait_predicates = selcx.tcx().predicates_of(def_id);
let bounds = trait_predicates.instantiate(selcx.tcx(), substs);
let bounds = elaborate_predicates(selcx.tcx(), bounds.predicates);
assemble_candidates_from_predicates(selcx,
// being invoked).
node_item.item.defaultness.has_value()
} else {
- node_item.item.defaultness.is_default()
+ node_item.item.defaultness.is_default() ||
+ selcx.tcx().impl_is_default(node_item.node.def_id())
};
// Only reveal a specializable default if we're past type-checking
obligation.predicate.trait_ref);
tcx.types.err
} else {
- tcx.item_type(node_item.item.def_id)
+ tcx.type_of(node_item.item.def_id)
};
let substs = translate_substs(selcx.infcx(), impl_def_id, substs, node_item.node);
Progress {
-> Option<specialization_graph::NodeItem<ty::AssociatedItem>>
{
let trait_def_id = selcx.tcx().impl_trait_ref(impl_def_id).unwrap().def_id;
- let trait_def = selcx.tcx().lookup_trait_def(trait_def_id);
+ let trait_def = selcx.tcx().trait_def(trait_def_id);
if !trait_def.is_complete(selcx.tcx()) {
let impl_node = specialization_graph::Node::Impl(impl_def_id);
fn filter_negative_impls(&self, candidate: SelectionCandidate<'tcx>)
-> SelectionResult<'tcx, SelectionCandidate<'tcx>> {
if let ImplCandidate(def_id) = candidate {
- if self.tcx().trait_impl_polarity(def_id) == hir::ImplPolarity::Negative {
+ if self.tcx().impl_polarity(def_id) == hir::ImplPolarity::Negative {
return Err(Unimplemented)
}
}
def_id={:?}, substs={:?}",
def_id, substs);
- let item_predicates = self.tcx().item_predicates(def_id);
- let bounds = item_predicates.instantiate(self.tcx(), substs);
+ let predicates_of = self.tcx().predicates_of(def_id);
+ let bounds = predicates_of.instantiate(self.tcx(), substs);
debug!("match_projection_obligation_against_definition_bounds: \
bounds={:?}",
bounds);
{
debug!("assemble_candidates_from_impls(obligation={:?})", obligation);
- let def = self.tcx().lookup_trait_def(obligation.predicate.def_id());
+ let def = self.tcx().trait_def(obligation.predicate.def_id());
def.for_each_relevant_impl(
self.tcx(),
// We can resolve the `impl Trait` to its concrete type,
// which enforces a DAG between the functions requiring
// the auto trait bounds in question.
- vec![self.tcx().item_type(def_id).subst(self.tcx(), substs)]
+ vec![self.tcx().type_of(def_id).subst(self.tcx(), substs)]
}
}
}
(&ty::TyAdt(def, substs_a), &ty::TyAdt(_, substs_b)) => {
let fields = def
.all_fields()
- .map(|f| tcx.item_type(f.did))
+ .map(|f| tcx.type_of(f.did))
.collect::<Vec<_>>();
// The last field of the structure has to exist and contain type parameters.
// obligation will normalize to `<$0 as Iterator>::Item = $1` and
// `$1: Copy`, so we must ensure the obligations are emitted in
// that order.
- let predicates = tcx.item_predicates(def_id);
+ let predicates = tcx.predicates_of(def_id);
assert_eq!(predicates.parent, None);
let predicates = predicates.predicates.iter().flat_map(|predicate| {
let predicate = normalize_with_depth(self, cause.clone(), recursion_depth,
assert!(!substs.needs_infer());
let trait_def_id = tcx.trait_id_of_impl(impl_data.impl_def_id).unwrap();
- let trait_def = tcx.lookup_trait_def(trait_def_id);
+ let trait_def = tcx.trait_def(trait_def_id);
let ancestors = trait_def.ancestors(impl_data.impl_def_id);
match ancestors.defs(tcx, item.name, item.kind).next() {
// See RFC 1210 for more details and justification.
// Currently we do not allow e.g. a negative impl to specialize a positive one
- if tcx.trait_impl_polarity(impl1_def_id) != tcx.trait_impl_polarity(impl2_def_id) {
+ if tcx.impl_polarity(impl1_def_id) != tcx.impl_polarity(impl2_def_id) {
return false;
}
use ty::{self, Ty, TyCtxt, ToPredicate, ToPolyTraitRef};
use ty::outlives::Component;
use util::nodemap::FxHashSet;
+use hir::{self};
+use traits::specialize::specialization_graph::NodeItem;
use super::{Obligation, ObligationCause, PredicateObligation, SelectionContext, Normalized};
match *predicate {
ty::Predicate::Trait(ref data) => {
// Predicates declared on the trait.
- let predicates = tcx.item_super_predicates(data.def_id());
+ let predicates = tcx.super_predicates_of(data.def_id());
let mut predicates: Vec<_> =
predicates.predicates
None => { return None; }
};
- let predicates = self.tcx.item_super_predicates(def_id);
+ let predicates = self.tcx.super_predicates_of(def_id);
let visited = &mut self.visited;
self.stack.extend(
predicates.predicates
let Normalized { value: impl_trait_ref, obligations: normalization_obligations1 } =
super::normalize(selcx, ObligationCause::dummy(), &impl_trait_ref);
- let predicates = selcx.tcx().item_predicates(impl_def_id);
+ let predicates = selcx.tcx().predicates_of(impl_def_id);
let predicates = predicates.instantiate(selcx.tcx(), impl_substs);
let Normalized { value: predicates, obligations: normalization_obligations2 } =
super::normalize(selcx, ObligationCause::dummy(), &predicates);
};
ty::Binder((trait_ref, sig.skip_binder().output()))
}
+
+ pub fn impl_is_default(self, node_item_def_id: DefId) -> bool {
+ match self.hir.as_local_node_id(node_item_def_id) {
+ Some(node_id) => {
+ let item = self.hir.expect_item(node_id);
+ if let hir::ItemImpl(_, _, defaultness, ..) = item.node {
+ defaultness.is_default()
+ } else {
+ false
+ }
+ }
+ None => {
+ self.global_tcx()
+ .sess
+ .cstore
+ .impl_defaultness(node_item_def_id)
+ .is_default()
+ }
+ }
+ }
+
+ pub fn impl_item_is_final(self, node_item: &NodeItem<hir::Defaultness>) -> bool {
+ node_item.item.is_final() && !self.impl_is_default(node_item.node.def_id())
+ }
}
pub enum TupleArgumentsFlag { Yes, No }
pub fn mk_box(self, ty: Ty<'tcx>) -> Ty<'tcx> {
let def_id = self.require_lang_item(lang_items::OwnedBoxLangItem);
- let adt_def = self.lookup_adt_def(def_id);
+ let adt_def = self.adt_def(def_id);
let substs = self.mk_substs(iter::once(Kind::from(ty)));
self.mk_ty(TyAdt(adt_def, substs))
}
#[inline]
pub fn def_ty<'a>(&self, tcx: ty::TyCtxt<'a, 'tcx, 'tcx>) -> Ty<'tcx> {
- tcx.item_type(self.def_id())
+ tcx.type_of(self.def_id())
}
#[inline]
use ty::{self, Ty, TyCtxt};
use syntax::ast;
use syntax::symbol::Symbol;
+use syntax_pos::DUMMY_SP;
use std::cell::Cell;
thread_local! {
- static FORCE_ABSOLUTE: Cell<bool> = Cell::new(false)
+ static FORCE_ABSOLUTE: Cell<bool> = Cell::new(false);
+ static FORCE_IMPL_FILENAME_LINE: Cell<bool> = Cell::new(false);
}
-/// Enforces that item_path_str always returns an absolute path.
-/// This is useful when building symbols that contain types,
-/// where we want the crate name to be part of the symbol.
+/// Enforces that item_path_str always returns an absolute path and
+/// also enables "type-based" impl paths. This is used when building
+/// symbols that contain types, where we want the crate name to be
+/// part of the symbol.
pub fn with_forced_absolute_paths<F: FnOnce() -> R, R>(f: F) -> R {
FORCE_ABSOLUTE.with(|force| {
let old = force.get();
})
}
+/// Force us to name impls with just the filename/line number. We
+/// normally try to use types. But at some points, notably while printing
+/// cycle errors, this can result in extra or suboptimal error output,
+/// so this variable disables that check.
+pub fn with_forced_impl_filename_line<F: FnOnce() -> R, R>(f: F) -> R {
+ FORCE_IMPL_FILENAME_LINE.with(|force| {
+ let old = force.get();
+ force.set(true);
+ let result = f();
+ force.set(old);
+ result
+ })
+}
+
impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
/// Returns a string identifying this def-id. This string is
/// suitable for user output. It is relative to the current crate
data @ DefPathData::LifetimeDef(..) |
data @ DefPathData::EnumVariant(..) |
data @ DefPathData::Field(..) |
- data @ DefPathData::StructCtor |
data @ DefPathData::Initializer |
data @ DefPathData::MacroDef(..) |
data @ DefPathData::ClosureExpr |
self.push_item_path(buffer, parent_def_id);
buffer.push(&data.as_interned_str());
}
+ DefPathData::StructCtor => { // present `X` instead of `X::{{constructor}}`
+ let parent_def_id = self.parent_def_id(def_id).unwrap();
+ self.push_item_path(buffer, parent_def_id);
+ }
}
}
{
let parent_def_id = self.parent_def_id(impl_def_id).unwrap();
- let use_types = if !impl_def_id.is_local() {
- // always have full types available for extern crates
- true
- } else {
- // for local crates, check whether type info is
- // available; typeck might not have completed yet
- self.maps.impl_trait_ref.borrow().contains_key(&impl_def_id) &&
- self.maps.ty.borrow().contains_key(&impl_def_id)
+ // Always use types for non-local impls, where types are always
+ // available, and filename/line-number is mostly uninteresting.
+ let use_types = !impl_def_id.is_local() || {
+ // Otherwise, use filename/line-number if forced.
+ let force_no_types = FORCE_IMPL_FILENAME_LINE.with(|f| f.get());
+ !force_no_types && {
+ // Otherwise, use types if we can query them without inducing a cycle.
+ ty::queries::impl_trait_ref::try_get(self, DUMMY_SP, impl_def_id).is_ok() &&
+ ty::queries::type_of::try_get(self, DUMMY_SP, impl_def_id).is_ok()
+ }
};
if !use_types {
// users may find it useful. Currently, we omit the parent if
// the impl is either in the same module as the self-type or
// as the trait.
- let self_ty = self.item_type(impl_def_id);
+ let self_ty = self.type_of(impl_def_id);
let in_self_mod = match characteristic_def_id_of_type(self_ty) {
None => false,
Some(ty_def_id) => self.parent_def_id(ty_def_id) == Some(parent_def_id),
use dep_graph::{DepGraph, DepNode, DepTrackingMap, DepTrackingMapConfig};
use hir::def_id::{CrateNum, DefId, LOCAL_CRATE};
+use hir::def::Def;
use hir;
use middle::const_val;
use middle::privacy::AccessLevels;
use mir;
use session::CompileResult;
use ty::{self, CrateInherentImpls, Ty, TyCtxt};
+use ty::item_path;
use ty::subst::Substs;
use util::nodemap::NodeSet;
use rustc_data_structures::indexed_vec::IndexVec;
use std::cell::{RefCell, RefMut};
+use std::mem;
+use std::ops::Deref;
use std::rc::Rc;
use syntax_pos::{Span, DUMMY_SP};
+use syntax::symbol::Symbol;
trait Key {
fn map_crate(&self) -> CrateNum;
}
}
+impl<'tcx> Key for ty::Instance<'tcx> {
+ fn map_crate(&self) -> CrateNum {
+ LOCAL_CRATE
+ }
+
+ fn default_span(&self, tcx: TyCtxt) -> Span {
+ tcx.def_span(self.def_id())
+ }
+}
+
impl Key for CrateNum {
fn map_crate(&self) -> CrateNum {
*self
}
}
-
impl<'tcx> Value<'tcx> for ty::DtorckConstraint<'tcx> {
fn from_cycle_error<'a>(_: TyCtxt<'a, 'tcx, 'tcx>) -> Self {
Self::empty()
}
}
+impl<'tcx> Value<'tcx> for ty::SymbolName {
+ fn from_cycle_error<'a>(_: TyCtxt<'a, 'tcx, 'tcx>) -> Self {
+ ty::SymbolName { name: Symbol::intern("<error>").as_str() }
+ }
+}
+
pub struct CycleError<'a, 'tcx: 'a> {
span: Span,
cycle: RefMut<'a, [(Span, Query<'tcx>)]>,
impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
pub fn report_cycle(self, CycleError { span, cycle }: CycleError) {
- assert!(!cycle.is_empty());
-
- let mut err = struct_span_err!(self.sess, span, E0391,
- "unsupported cyclic reference between types/traits detected");
- err.span_label(span, &format!("cyclic reference"));
-
- err.span_note(cycle[0].0, &format!("the cycle begins when {}...",
- cycle[0].1.describe(self)));
-
- for &(span, ref query) in &cycle[1..] {
- err.span_note(span, &format!("...which then requires {}...",
- query.describe(self)));
- }
+ // Subtle: release the refcell lock before invoking `describe()`
+ // below by dropping `cycle`.
+ let stack = cycle.to_vec();
+ mem::drop(cycle);
+
+ assert!(!stack.is_empty());
+
+ // Disable naming impls with types in this path, since that
+ // sometimes cycles itself, leading to extra cycle errors.
+ // (And cycle errors around impls tend to occur during the
+ // collect/coherence phases anyhow.)
+ item_path::with_forced_impl_filename_line(|| {
+ let mut err =
+ struct_span_err!(self.sess, span, E0391,
+ "unsupported cyclic reference between types/traits detected");
+ err.span_label(span, &format!("cyclic reference"));
+
+ err.span_note(stack[0].0, &format!("the cycle begins when {}...",
+ stack[0].1.describe(self)));
+
+ for &(span, ref query) in &stack[1..] {
+ err.span_note(span, &format!("...which then requires {}...",
+ query.describe(self)));
+ }
- err.note(&format!("...which then again requires {}, completing the cycle.",
- cycle[0].1.describe(self)));
+ err.note(&format!("...which then again requires {}, completing the cycle.",
+ stack[0].1.describe(self)));
- err.emit();
+ err.emit();
+ });
}
fn cycle_check<F, R>(self, span: Span, query: Query<'gcx>, compute: F)
}
}
-impl<'tcx> QueryDescription for queries::super_predicates<'tcx> {
+impl<'tcx> QueryDescription for queries::super_predicates_of<'tcx> {
fn describe(tcx: TyCtxt, def_id: DefId) -> String {
format!("computing the supertraits of `{}`",
tcx.item_path_str(def_id))
}
}
+impl<'tcx> QueryDescription for queries::symbol_name<'tcx> {
+ fn describe(_tcx: TyCtxt, instance: ty::Instance<'tcx>) -> String {
+ format!("computing the symbol for `{}`", instance)
+ }
+}
+
+impl<'tcx> QueryDescription for queries::describe_def<'tcx> {
+ fn describe(_: TyCtxt, _: DefId) -> String {
+ bug!("describe_def")
+ }
+}
+
macro_rules! define_maps {
(<$tcx:tt>
$($(#[$attr:meta])*
- pub $name:ident: $node:ident($K:ty) -> $V:ty),*) => {
+ [$($pub:tt)*] $name:ident: $node:ident($K:ty) -> $V:ty),*) => {
pub struct Maps<$tcx> {
providers: IndexVec<CrateNum, Providers<$tcx>>,
query_stack: RefCell<Vec<(Span, Query<$tcx>)>>,
- $($(#[$attr])* pub $name: RefCell<DepTrackingMap<queries::$name<$tcx>>>),*
+ $($(#[$attr])* $($pub)* $name: RefCell<DepTrackingMap<queries::$name<$tcx>>>),*
}
impl<$tcx> Maps<$tcx> {
-> Result<R, CycleError<'a, $tcx>>
where F: FnOnce(&$V) -> R
{
+ debug!("ty::queries::{}::try_get_with(key={:?}, span={:?})",
+ stringify!($name),
+ key,
+ span);
+
if let Some(result) = tcx.maps.$name.borrow().get(&key) {
return Ok(f(result));
}
Self::try_get_with(tcx, span, key, Clone::clone)
}
- $(#[$attr])*
- pub fn get(tcx: TyCtxt<'a, $tcx, 'lcx>, span: Span, key: $K) -> $V {
- Self::try_get(tcx, span, key).unwrap_or_else(|e| {
- tcx.report_cycle(e);
- Value::from_cycle_error(tcx.global_tcx())
- })
- }
-
pub fn force(tcx: TyCtxt<'a, $tcx, 'lcx>, span: Span, key: $K) {
// FIXME(eddyb) Move away from using `DepTrackingMap`
// so we don't have to explicitly ignore a false edge:
}
})*
+ #[derive(Copy, Clone)]
+ pub struct TyCtxtAt<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
+ pub tcx: TyCtxt<'a, 'gcx, 'tcx>,
+ pub span: Span,
+ }
+
+ impl<'a, 'gcx, 'tcx> Deref for TyCtxtAt<'a, 'gcx, 'tcx> {
+ type Target = TyCtxt<'a, 'gcx, 'tcx>;
+ fn deref(&self) -> &Self::Target {
+ &self.tcx
+ }
+ }
+
+ impl<'a, $tcx, 'lcx> TyCtxt<'a, $tcx, 'lcx> {
+ /// Return a transparent wrapper for `TyCtxt` which uses
+ /// `span` as the location of queries performed through it.
+ pub fn at(self, span: Span) -> TyCtxtAt<'a, $tcx, 'lcx> {
+ TyCtxtAt {
+ tcx: self,
+ span
+ }
+ }
+
+ $($(#[$attr])*
+ pub fn $name(self, key: $K) -> $V {
+ self.at(DUMMY_SP).$name(key)
+ })*
+ }
+
+ impl<'a, $tcx, 'lcx> TyCtxtAt<'a, $tcx, 'lcx> {
+ $($(#[$attr])*
+ pub fn $name(self, key: $K) -> $V {
+ queries::$name::try_get(self.tcx, self.span, key).unwrap_or_else(|e| {
+ self.report_cycle(e);
+ Value::from_cycle_error(self.global_tcx())
+ })
+ })*
+ }
+
pub struct Providers<$tcx> {
$(pub $name: for<'a> fn(TyCtxt<'a, $tcx, $tcx>, $K) -> $V),*
}
// the driver creates (using several `rustc_*` crates).
define_maps! { <'tcx>
/// Records the type of every item.
- pub ty: ItemSignature(DefId) -> Ty<'tcx>,
+ [] type_of: ItemSignature(DefId) -> Ty<'tcx>,
/// Maps from the def-id of an item (trait/struct/enum/fn) to its
/// associated generics and predicates.
- pub generics: ItemSignature(DefId) -> &'tcx ty::Generics,
- pub predicates: ItemSignature(DefId) -> ty::GenericPredicates<'tcx>,
+ [] generics_of: ItemSignature(DefId) -> &'tcx ty::Generics,
+ [] predicates_of: ItemSignature(DefId) -> ty::GenericPredicates<'tcx>,
/// Maps from the def-id of a trait to the list of
/// super-predicates. This is a subset of the full list of
/// evaluate them even during type conversion, often before the
/// full predicates are available (note that supertraits have
/// additional acyclicity requirements).
- pub super_predicates: ItemSignature(DefId) -> ty::GenericPredicates<'tcx>,
+ [] super_predicates_of: ItemSignature(DefId) -> ty::GenericPredicates<'tcx>,
/// To avoid cycles within the predicates of a single item we compute
/// per-type-parameter predicates for resolving `T::AssocTy`.
- pub type_param_predicates: TypeParamPredicates((DefId, DefId))
+ [] type_param_predicates: TypeParamPredicates((DefId, DefId))
-> ty::GenericPredicates<'tcx>,
- pub trait_def: ItemSignature(DefId) -> &'tcx ty::TraitDef,
- pub adt_def: ItemSignature(DefId) -> &'tcx ty::AdtDef,
- pub adt_destructor: AdtDestructor(DefId) -> Option<ty::Destructor>,
- pub adt_sized_constraint: SizedConstraint(DefId) -> &'tcx [Ty<'tcx>],
- pub adt_dtorck_constraint: DtorckConstraint(DefId) -> ty::DtorckConstraint<'tcx>,
+ [] trait_def: ItemSignature(DefId) -> &'tcx ty::TraitDef,
+ [] adt_def: ItemSignature(DefId) -> &'tcx ty::AdtDef,
+ [] adt_destructor: AdtDestructor(DefId) -> Option<ty::Destructor>,
+ [] adt_sized_constraint: SizedConstraint(DefId) -> &'tcx [Ty<'tcx>],
+ [] adt_dtorck_constraint: DtorckConstraint(DefId) -> ty::DtorckConstraint<'tcx>,
/// True if this is a foreign item (i.e., linked via `extern { ... }`).
- pub is_foreign_item: IsForeignItem(DefId) -> bool,
+ [] is_foreign_item: IsForeignItem(DefId) -> bool,
/// Maps from def-id of a type or region parameter to its
/// (inferred) variance.
- pub variances: ItemSignature(DefId) -> Rc<Vec<ty::Variance>>,
+ [pub] variances_of: ItemSignature(DefId) -> Rc<Vec<ty::Variance>>,
/// Maps from an impl/trait def-id to a list of the def-ids of its items
- pub associated_item_def_ids: AssociatedItemDefIds(DefId) -> Rc<Vec<DefId>>,
+ [] associated_item_def_ids: AssociatedItemDefIds(DefId) -> Rc<Vec<DefId>>,
/// Maps from a trait item to the trait item "descriptor"
- pub associated_item: AssociatedItems(DefId) -> ty::AssociatedItem,
+ [] associated_item: AssociatedItems(DefId) -> ty::AssociatedItem,
- pub impl_trait_ref: ItemSignature(DefId) -> Option<ty::TraitRef<'tcx>>,
- pub impl_polarity: ItemSignature(DefId) -> hir::ImplPolarity,
+ [] impl_trait_ref: ItemSignature(DefId) -> Option<ty::TraitRef<'tcx>>,
+ [] impl_polarity: ItemSignature(DefId) -> hir::ImplPolarity,
/// Maps a DefId of a type to a list of its inherent impls.
/// Contains implementations of methods that are inherent to a type.
/// Methods in these implementations don't need to be exported.
- pub inherent_impls: InherentImpls(DefId) -> Rc<Vec<DefId>>,
+ [] inherent_impls: InherentImpls(DefId) -> Rc<Vec<DefId>>,
/// Maps from the def-id of a function/method or const/static
/// to its MIR. Mutation is done at an item granularity to
///
/// Note that cross-crate MIR appears to be always borrowed
/// (in the `RefCell` sense) to prevent accidental mutation.
- pub mir: Mir(DefId) -> &'tcx RefCell<mir::Mir<'tcx>>,
+ [pub] mir: Mir(DefId) -> &'tcx RefCell<mir::Mir<'tcx>>,
/// Maps DefId's that have an associated Mir to the result
/// of the MIR qualify_consts pass. The actual meaning of
/// the value isn't known except to the pass itself.
- pub mir_const_qualif: Mir(DefId) -> u8,
+ [] mir_const_qualif: Mir(DefId) -> u8,
/// Records the type of each closure. The def ID is the ID of the
/// expression defining the closure.
- pub closure_kind: ItemSignature(DefId) -> ty::ClosureKind,
+ [] closure_kind: ItemSignature(DefId) -> ty::ClosureKind,
/// Records the type of each closure. The def ID is the ID of the
/// expression defining the closure.
- pub closure_type: ItemSignature(DefId) -> ty::PolyFnSig<'tcx>,
+ [] closure_type: ItemSignature(DefId) -> ty::PolyFnSig<'tcx>,
/// Caches CoerceUnsized kinds for impls on custom types.
- pub coerce_unsized_info: ItemSignature(DefId)
+ [] coerce_unsized_info: ItemSignature(DefId)
-> ty::adjustment::CoerceUnsizedInfo,
- pub typeck_item_bodies: typeck_item_bodies_dep_node(CrateNum) -> CompileResult,
+ [] typeck_item_bodies: typeck_item_bodies_dep_node(CrateNum) -> CompileResult,
+
+ [] typeck_tables_of: TypeckTables(DefId) -> &'tcx ty::TypeckTables<'tcx>,
- pub typeck_tables: TypeckTables(DefId) -> &'tcx ty::TypeckTables<'tcx>,
+ [] has_typeck_tables: TypeckTables(DefId) -> bool,
- pub coherent_trait: coherent_trait_dep_node((CrateNum, DefId)) -> (),
+ [] coherent_trait: coherent_trait_dep_node((CrateNum, DefId)) -> (),
- pub borrowck: BorrowCheck(DefId) -> (),
+ [] borrowck: BorrowCheck(DefId) -> (),
/// Gets a complete map from all types to their inherent impls.
/// Not meant to be used directly outside of coherence.
/// (Defined only for LOCAL_CRATE)
- pub crate_inherent_impls: crate_inherent_impls_dep_node(CrateNum) -> CrateInherentImpls,
+ [] crate_inherent_impls: crate_inherent_impls_dep_node(CrateNum) -> CrateInherentImpls,
/// Checks all types in the krate for overlap in their inherent impls. Reports errors.
/// Not meant to be used directly outside of coherence.
/// (Defined only for LOCAL_CRATE)
- pub crate_inherent_impls_overlap_check: crate_inherent_impls_dep_node(CrateNum) -> (),
+ [] crate_inherent_impls_overlap_check: crate_inherent_impls_dep_node(CrateNum) -> (),
/// Results of evaluating const items or constants embedded in
/// other items (such as enum variant explicit discriminants).
- pub const_eval: const_eval_dep_node((DefId, &'tcx Substs<'tcx>))
+ [] const_eval: const_eval_dep_node((DefId, &'tcx Substs<'tcx>))
-> const_val::EvalResult<'tcx>,
/// Performs the privacy check and computes "access levels".
- pub privacy_access_levels: PrivacyAccessLevels(CrateNum) -> Rc<AccessLevels>,
+ [] privacy_access_levels: PrivacyAccessLevels(CrateNum) -> Rc<AccessLevels>,
- pub reachable_set: reachability_dep_node(CrateNum) -> Rc<NodeSet>,
+ [] reachable_set: reachability_dep_node(CrateNum) -> Rc<NodeSet>,
- pub mir_shims: mir_shim_dep_node(ty::InstanceDef<'tcx>) -> &'tcx RefCell<mir::Mir<'tcx>>
+ [] mir_shims: mir_shim_dep_node(ty::InstanceDef<'tcx>) -> &'tcx RefCell<mir::Mir<'tcx>>,
+
+ [] def_symbol_name: SymbolName(DefId) -> ty::SymbolName,
+ [] symbol_name: symbol_name_dep_node(ty::Instance<'tcx>) -> ty::SymbolName,
+
+ [] describe_def: meta_data_node(DefId) -> Option<Def>
}
fn coherent_trait_dep_node((_, def_id): (CrateNum, DefId)) -> DepNode<DefId> {
instance.dep_node()
}
+fn symbol_name_dep_node(instance: ty::Instance) -> DepNode<DefId> {
+ // symbol_name uses the substs only to traverse them to find the
+ // hash, and that does not create any new dep-nodes.
+ DepNode::SymbolName(instance.def.def_id())
+}
+
fn typeck_item_bodies_dep_node(_: CrateNum) -> DepNode<DefId> {
DepNode::TypeckBodiesKrate
}
fn const_eval_dep_node((def_id, _): (DefId, &Substs)) -> DepNode<DefId> {
DepNode::ConstEval(def_id)
}
+
+fn meta_data_node(def_id: DefId) -> DepNode<DefId> {
+ DepNode::MetaData(def_id)
+}
use std::cell::{Cell, RefCell, Ref};
use std::collections::BTreeMap;
use std::cmp;
+use std::fmt;
use std::hash::{Hash, Hasher};
use std::iter::FromIterator;
use std::ops::Deref;
let header = ImplHeader {
impl_def_id: impl_def_id,
- self_ty: tcx.item_type(impl_def_id),
+ self_ty: tcx.type_of(impl_def_id),
trait_ref: tcx.impl_trait_ref(impl_def_id),
- predicates: tcx.item_predicates(impl_def_id).predicates
+ predicates: tcx.predicates_of(impl_def_id).predicates
}.subst(tcx, impl_substs);
let traits::Normalized { value: mut header, obligations } =
instantiated: &mut InstantiatedPredicates<'tcx>,
substs: &Substs<'tcx>) {
if let Some(def_id) = self.parent {
- tcx.item_predicates(def_id).instantiate_into(tcx, instantiated, substs);
+ tcx.predicates_of(def_id).instantiate_into(tcx, instantiated, substs);
}
instantiated.predicates.extend(self.predicates.iter().map(|p| p.subst(tcx, substs)))
}
#[inline]
pub fn predicates(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> GenericPredicates<'gcx> {
- tcx.item_predicates(self.did)
+ tcx.predicates_of(self.did)
}
/// Returns an iterator over all fields contained
let mut discr = prev_discr.map_or(initial, |d| d.wrap_incr());
if let VariantDiscr::Explicit(expr_did) = v.discr {
let substs = Substs::empty();
- match queries::const_eval::get(tcx, DUMMY_SP, (expr_did, substs)) {
+ match tcx.const_eval((expr_did, substs)) {
Ok(ConstVal::Integral(v)) => {
discr = v;
}
}
ty::VariantDiscr::Explicit(expr_did) => {
let substs = Substs::empty();
- match queries::const_eval::get(tcx, DUMMY_SP, (expr_did, substs)) {
+ match tcx.const_eval((expr_did, substs)) {
Ok(ConstVal::Integral(v)) => {
explicit_value = v;
break;
}
pub fn destructor(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option<Destructor> {
- queries::adt_destructor::get(tcx, DUMMY_SP, self.did)
+ tcx.adt_destructor(self.did)
}
/// Returns a list of types such that `Self: Sized` if and only
def_id: sized_trait,
substs: tcx.mk_substs_trait(ty, &[])
}).to_predicate();
- let predicates = tcx.item_predicates(self.did).predicates;
+ let predicates = tcx.predicates_of(self.did).predicates;
if predicates.into_iter().any(|p| p == sized_predicate) {
vec![]
} else {
impl<'a, 'gcx, 'tcx> FieldDef {
pub fn ty(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, subst: &Substs<'tcx>) -> Ty<'tcx> {
- tcx.item_type(self.did).subst(tcx, subst)
+ tcx.type_of(self.did).subst(tcx, subst)
}
}
impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
pub fn body_tables(self, body: hir::BodyId) -> &'gcx TypeckTables<'gcx> {
- self.item_tables(self.hir.body_owner_def_id(body))
- }
-
- pub fn item_tables(self, def_id: DefId) -> &'gcx TypeckTables<'gcx> {
- queries::typeck_tables::get(self, DUMMY_SP, def_id)
+ self.typeck_tables_of(self.hir.body_owner_def_id(body))
}
pub fn expr_span(self, id: NodeId) -> Span {
.collect()
}
- pub fn trait_impl_polarity(self, id: DefId) -> hir::ImplPolarity {
- queries::impl_polarity::get(self, DUMMY_SP, id)
- }
-
pub fn trait_relevant_for_never(self, did: DefId) -> bool {
self.associated_items(did).any(|item| {
item.relevant_for_never()
})
}
- pub fn coerce_unsized_info(self, did: DefId) -> adjustment::CoerceUnsizedInfo {
- queries::coerce_unsized_info::get(self, DUMMY_SP, did)
- }
+ pub fn opt_associated_item(self, def_id: DefId) -> Option<AssociatedItem> {
+ let is_associated_item = if let Some(node_id) = self.hir.as_local_node_id(def_id) {
+ match self.hir.get(node_id) {
+ hir_map::NodeTraitItem(_) | hir_map::NodeImplItem(_) => true,
+ _ => false,
+ }
+ } else {
+ match self.describe_def(def_id).expect("no def for def-id") {
+ Def::AssociatedConst(_) | Def::Method(_) | Def::AssociatedTy(_) => true,
+ _ => false,
+ }
+ };
- pub fn associated_item(self, def_id: DefId) -> AssociatedItem {
- queries::associated_item::get(self, DUMMY_SP, def_id)
+ if is_associated_item {
+ Some(self.associated_item(def_id))
+ } else {
+ None
+ }
}
fn associated_item_from_trait_item_ref(self,
parent_def_id: DefId,
+ parent_vis: &hir::Visibility,
trait_item_ref: &hir::TraitItemRef)
-> AssociatedItem {
let def_id = self.hir.local_def_id(trait_item_ref.id.node_id);
AssociatedItem {
name: trait_item_ref.name,
kind: kind,
- vis: Visibility::from_hir(&hir::Inherited, trait_item_ref.id.node_id, self),
+ // Visibility of trait items is inherited from their traits.
+ vis: Visibility::from_hir(parent_vis, trait_item_ref.id.node_id, self),
defaultness: trait_item_ref.defaultness,
def_id: def_id,
container: TraitContainer(parent_def_id),
fn associated_item_from_impl_item_ref(self,
parent_def_id: DefId,
- from_trait_impl: bool,
impl_item_ref: &hir::ImplItemRef)
-> AssociatedItem {
let def_id = self.hir.local_def_id(impl_item_ref.id.node_id);
hir::AssociatedItemKind::Type => (ty::AssociatedKind::Type, false),
};
- // Trait impl items are always public.
- let public = hir::Public;
- let vis = if from_trait_impl { &public } else { &impl_item_ref.vis };
-
ty::AssociatedItem {
name: impl_item_ref.name,
kind: kind,
- vis: ty::Visibility::from_hir(vis, impl_item_ref.id.node_id, self),
+ // Visibility of trait impl items doesn't matter.
+ vis: ty::Visibility::from_hir(&impl_item_ref.vis, impl_item_ref.id.node_id, self),
defaultness: impl_item_ref.defaultness,
def_id: def_id,
container: ImplContainer(parent_def_id),
}
}
- pub fn associated_item_def_ids(self, def_id: DefId) -> Rc<Vec<DefId>> {
- queries::associated_item_def_ids::get(self, DUMMY_SP, def_id)
- }
-
#[inline] // FIXME(#35870) Avoid closures being unexported due to impl Trait.
pub fn associated_items(self, def_id: DefId)
-> impl Iterator<Item = ty::AssociatedItem> + 'a {
(0..def_ids.len()).map(move |i| self.associated_item(def_ids[i]))
}
- /// Returns the trait-ref corresponding to a given impl, or None if it is
- /// an inherent impl.
- pub fn impl_trait_ref(self, id: DefId) -> Option<TraitRef<'gcx>> {
- queries::impl_trait_ref::get(self, DUMMY_SP, id)
- }
-
/// Returns true if the impls are the same polarity and are implementing
/// a trait which contains no items
pub fn impls_are_allowed_to_overlap(self, def_id1: DefId, def_id2: DefId) -> bool {
.map_or(false, |trait_ref| {
self.associated_item_def_ids(trait_ref.def_id).is_empty()
});
- self.trait_impl_polarity(def_id1) == self.trait_impl_polarity(def_id2)
+ self.impl_polarity(def_id1) == self.impl_polarity(def_id2)
&& trait1_is_empty
&& trait2_is_empty
}
match def {
Def::Variant(did) | Def::VariantCtor(did, ..) => {
let enum_did = self.parent_def_id(did).unwrap();
- self.lookup_adt_def(enum_did).variant_with_id(did)
+ self.adt_def(enum_did).variant_with_id(did)
}
Def::Struct(did) | Def::Union(did) => {
- self.lookup_adt_def(did).struct_variant()
+ self.adt_def(did).struct_variant()
}
Def::StructCtor(ctor_did, ..) => {
let did = self.parent_def_id(ctor_did).expect("struct ctor has no parent");
- self.lookup_adt_def(did).struct_variant()
+ self.adt_def(did).struct_variant()
}
_ => bug!("expect_variant_def used with unexpected def {:?}", def)
}
}
}
- // If the given item is in an external crate, looks up its type and adds it to
- // the type cache. Returns the type parameters and type.
- pub fn item_type(self, did: DefId) -> Ty<'gcx> {
- queries::ty::get(self, DUMMY_SP, did)
- }
-
- /// Given the did of a trait, returns its canonical trait ref.
- pub fn lookup_trait_def(self, did: DefId) -> &'gcx TraitDef {
- queries::trait_def::get(self, DUMMY_SP, did)
- }
-
- /// Given the did of an ADT, return a reference to its definition.
- pub fn lookup_adt_def(self, did: DefId) -> &'gcx AdtDef {
- queries::adt_def::get(self, DUMMY_SP, did)
- }
-
- /// Given the did of an item, returns its generics.
- pub fn item_generics(self, did: DefId) -> &'gcx Generics {
- queries::generics::get(self, DUMMY_SP, did)
- }
-
- /// Given the did of an item, returns its full set of predicates.
- pub fn item_predicates(self, did: DefId) -> GenericPredicates<'gcx> {
- queries::predicates::get(self, DUMMY_SP, did)
- }
-
- /// Given the did of a trait, returns its superpredicates.
- pub fn item_super_predicates(self, did: DefId) -> GenericPredicates<'gcx> {
- queries::super_predicates::get(self, DUMMY_SP, did)
- }
-
/// Given the did of an item, returns its MIR, borrowed immutably.
pub fn item_mir(self, did: DefId) -> Ref<'gcx, Mir<'gcx>> {
- queries::mir::get(self, DUMMY_SP, did).borrow()
+ self.mir(did).borrow()
}
/// Return the possibly-auto-generated MIR of a (DefId, Subst) pair.
{
match instance {
ty::InstanceDef::Item(did) if true => self.item_mir(did),
- _ => queries::mir_shims::get(self, DUMMY_SP, instance).borrow(),
+ _ => self.mir_shims(instance).borrow(),
}
}
self.get_attrs(did).iter().any(|item| item.check_name(attr))
}
- pub fn item_variances(self, item_id: DefId) -> Rc<Vec<ty::Variance>> {
- queries::variances::get(self, DUMMY_SP, item_id)
- }
-
pub fn trait_has_default_impl(self, trait_def_id: DefId) -> bool {
- let def = self.lookup_trait_def(trait_def_id);
+ let def = self.trait_def(trait_def_id);
def.flags.get().intersects(TraitFlags::HAS_DEFAULT_IMPL)
}
// metadata and don't need to track edges.
let _ignore = self.dep_graph.in_ignore();
- let def = self.lookup_trait_def(trait_id);
+ let def = self.trait_def(trait_id);
if def.flags.get().intersects(TraitFlags::HAS_REMOTE_IMPLS) {
return;
}
def.flags.set(def.flags.get() | TraitFlags::HAS_REMOTE_IMPLS);
}
- pub fn closure_kind(self, def_id: DefId) -> ty::ClosureKind {
- queries::closure_kind::get(self, DUMMY_SP, def_id)
- }
-
- pub fn closure_type(self, def_id: DefId) -> ty::PolyFnSig<'tcx> {
- queries::closure_type::get(self, DUMMY_SP, def_id)
- }
-
/// Given the def_id of an impl, return the def_id of the trait it implements.
/// If it implements no trait, return `None`.
pub fn trait_id_of_impl(self, def_id: DefId) -> Option<DefId> {
/// ID of the impl that the method belongs to. Otherwise, return `None`.
pub fn impl_of_method(self, def_id: DefId) -> Option<DefId> {
let item = if def_id.krate != LOCAL_CRATE {
- if let Some(Def::Method(_)) = self.sess.cstore.describe_def(def_id) {
+ if let Some(Def::Method(_)) = self.describe_def(def_id) {
Some(self.associated_item(def_id))
} else {
None
}
} else {
- self.maps.associated_item.borrow().get(&def_id).cloned()
+ self.opt_associated_item(def_id)
};
match item {
if def_id.krate != LOCAL_CRATE {
return self.sess.cstore.trait_of_item(def_id);
}
- match self.maps.associated_item.borrow().get(&def_id) {
- Some(associated_item) => {
+ self.opt_associated_item(def_id)
+ .and_then(|associated_item| {
match associated_item.container {
TraitContainer(def_id) => Some(def_id),
ImplContainer(_) => None
}
- }
- None => None
- }
+ })
}
/// Construct a parameter environment suitable for static contexts or other contexts where there
//
let tcx = self.global_tcx();
- let generic_predicates = tcx.item_predicates(def_id);
+ let generic_predicates = tcx.predicates_of(def_id);
let bounds = generic_predicates.instantiate(tcx, free_substs);
let bounds = tcx.liberate_late_bound_regions(free_id_outlive, &ty::Binder(bounds));
let predicates = bounds.predicates;
let parent_def_id = tcx.hir.local_def_id(parent_id);
let parent_item = tcx.hir.expect_item(parent_id);
match parent_item.node {
- hir::ItemImpl(.., ref impl_trait_ref, _, ref impl_item_refs) => {
+ hir::ItemImpl(.., ref impl_item_refs) => {
if let Some(impl_item_ref) = impl_item_refs.iter().find(|i| i.id.node_id == id) {
- let assoc_item =
- tcx.associated_item_from_impl_item_ref(parent_def_id,
- impl_trait_ref.is_some(),
- impl_item_ref);
+ let assoc_item = tcx.associated_item_from_impl_item_ref(parent_def_id,
+ impl_item_ref);
debug_assert_eq!(assoc_item.def_id, def_id);
return assoc_item;
}
hir::ItemTrait(.., ref trait_item_refs) => {
if let Some(trait_item_ref) = trait_item_refs.iter().find(|i| i.id.node_id == id) {
- let assoc_item =
- tcx.associated_item_from_trait_item_ref(parent_def_id, trait_item_ref);
+ let assoc_item = tcx.associated_item_from_trait_item_ref(parent_def_id,
+ &parent_item.vis,
+ trait_item_ref);
debug_assert_eq!(assoc_item.def_id, def_id);
return assoc_item;
}
}
- ref r => {
- panic!("unexpected container of associated items: {:?}", r)
- }
+ _ => { }
}
- panic!("associated item not found for def_id: {:?}", def_id);
+
+ span_bug!(parent_item.span,
+ "unexpected parent of trait or impl item or item not found: {:?}",
+ parent_item.node)
}
/// Calculates the Sized-constraint.
fn adt_sized_constraint<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
def_id: DefId)
-> &'tcx [Ty<'tcx>] {
- let def = tcx.lookup_adt_def(def_id);
+ let def = tcx.adt_def(def_id);
let result = tcx.intern_type_list(&def.variants.iter().flat_map(|v| {
v.fields.last()
}).flat_map(|f| {
- def.sized_constraint_for_ty(tcx, tcx.item_type(f.did))
+ def.sized_constraint_for_ty(tcx, tcx.type_of(f.did))
}).collect::<Vec<_>>());
debug!("adt_sized_constraint: {:?} => {:?}", def, result);
fn adt_dtorck_constraint<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
def_id: DefId)
-> DtorckConstraint<'tcx> {
- let def = tcx.lookup_adt_def(def_id);
+ let def = tcx.adt_def(def_id);
let span = tcx.def_span(def_id);
debug!("dtorck_constraint: {:?}", def);
let result = DtorckConstraint {
outlives: vec![],
dtorck_types: vec![
- tcx.mk_param_from_def(&tcx.item_generics(def_id).types[0])
+ tcx.mk_param_from_def(&tcx.generics_of(def_id).types[0])
]
};
debug!("dtorck_constraint: {:?} => {:?}", def, result);
}
let mut result = def.all_fields()
- .map(|field| tcx.item_type(field.did))
+ .map(|field| tcx.type_of(field.did))
.map(|fty| tcx.dtorck_constraint_for_ty(span, fty, 0, fty))
.collect::<Result<DtorckConstraint, ErrorReported>>()
.unwrap_or(DtorckConstraint::empty());
/// A map for the local crate mapping each type to a vector of its
/// inherent impls. This is not meant to be used outside of coherence;
/// rather, you should request the vector for a specific type via
-/// `ty::queries::inherent_impls::get(def_id)` so as to minimize your
-/// dependencies (constructing this map requires touching the entire
-/// crate).
+/// `tcx.inherent_impls(def_id)` so as to minimize your dependencies
+/// (constructing this map requires touching the entire crate).
#[derive(Clone, Debug)]
pub struct CrateInherentImpls {
pub inherent_impls: DefIdMap<Rc<Vec<DefId>>>,
self.dtorck_types.retain(|&val| dtorck_types.replace(val).is_none());
}
}
+
+#[derive(Clone, PartialEq, Eq, PartialOrd, Ord)]
+pub struct SymbolName {
+ // FIXME: we don't rely on interning or equality here - better have
+ // this be a `&'tcx str`.
+ pub name: InternedString
+}
+
+impl Deref for SymbolName {
+ type Target = str;
+
+ fn deref(&self) -> &str { &self.name }
+}
+
+impl fmt::Display for SymbolName {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ fmt::Display::fmt(&self.name, fmt)
+ }
+}
let variances;
let opt_variances = if relation.tcx().variance_computed.get() {
- variances = relation.tcx().item_variances(item_def_id);
+ variances = relation.tcx().variances_of(item_def_id);
Some(&*variances)
} else {
None
pub fn upvar_tys(self, def_id: DefId, tcx: TyCtxt<'a, 'gcx, 'acx>) ->
impl Iterator<Item=Ty<'tcx>> + 'tcx
{
- let generics = tcx.item_generics(def_id);
+ let generics = tcx.generics_of(def_id);
self.substs[self.substs.len()-generics.own_count()..].iter().map(
|t| t.as_type().expect("unexpected region in upvars"))
}
(Trait(_), Trait(_)) => Ordering::Equal,
(Projection(ref a), Projection(ref b)) => a.sort_key(tcx).cmp(&b.sort_key(tcx)),
(AutoTrait(ref a), AutoTrait(ref b)) =>
- tcx.lookup_trait_def(*a).def_path_hash.cmp(&tcx.lookup_trait_def(*b).def_path_hash),
+ tcx.trait_def(*a).def_path_hash.cmp(&tcx.trait_def(*b).def_path_hash),
(Trait(_), _) => Ordering::Less,
(Projection(_), Trait(_)) => Ordering::Greater,
(Projection(_), _) => Ordering::Less,
// We want something here that is stable across crate boundaries.
// The DefId isn't but the `deterministic_hash` of the corresponding
// DefPath is.
- let trait_def = tcx.lookup_trait_def(self.trait_ref.def_id);
+ let trait_def = tcx.trait_def(self.trait_ref.def_id);
let def_path_hash = trait_def.def_path_hash;
// An `ast::Name` is also not stable (it's just an index into an
-> &'tcx Substs<'tcx>
where FR: FnMut(&ty::RegionParameterDef, &[Kind<'tcx>]) -> &'tcx ty::Region,
FT: FnMut(&ty::TypeParameterDef, &[Kind<'tcx>]) -> Ty<'tcx> {
- let defs = tcx.item_generics(def_id);
+ let defs = tcx.generics_of(def_id);
let mut substs = Vec::with_capacity(defs.count());
Substs::fill_item(&mut substs, tcx, defs, &mut mk_region, &mut mk_type);
tcx.intern_substs(&substs)
where FR: FnMut(&ty::RegionParameterDef, &[Kind<'tcx>]) -> &'tcx ty::Region,
FT: FnMut(&ty::TypeParameterDef, &[Kind<'tcx>]) -> Ty<'tcx>
{
- let defs = tcx.item_generics(def_id);
+ let defs = tcx.generics_of(def_id);
let mut result = Vec::with_capacity(defs.count());
result.extend(self[..].iter().cloned());
Substs::fill_single(&mut result, defs, &mut mk_region, &mut mk_type);
FT: FnMut(&ty::TypeParameterDef, &[Kind<'tcx>]) -> Ty<'tcx> {
if let Some(def_id) = defs.parent {
- let parent_defs = tcx.item_generics(def_id);
+ let parent_defs = tcx.generics_of(def_id);
Substs::fill_item(substs, tcx, parent_defs, mk_region, mk_type);
}
Substs::fill_single(substs, defs, mk_region, mk_type)
source_ancestor: DefId,
target_substs: &Substs<'tcx>)
-> &'tcx Substs<'tcx> {
- let defs = tcx.item_generics(source_ancestor);
+ let defs = tcx.generics_of(source_ancestor);
tcx.mk_substs(target_substs.iter().chain(&self[defs.own_count()..]).cloned())
}
trait_id: DefId,
substs: &Substs<'tcx>)
-> ty::TraitRef<'tcx> {
- let defs = tcx.item_generics(trait_id);
+ let defs = tcx.generics_of(trait_id);
ty::TraitRef {
def_id: trait_id,
return None;
};
- ty::queries::coherent_trait::get(self, DUMMY_SP, (LOCAL_CRATE, drop_trait));
+ self.coherent_trait((LOCAL_CRATE, drop_trait));
let mut dtor_did = None;
- let ty = self.item_type(adt_did);
- self.lookup_trait_def(drop_trait).for_each_relevant_impl(self, ty, |impl_did| {
+ let ty = self.type_of(adt_did);
+ self.trait_def(drop_trait).for_each_relevant_impl(self, ty, |impl_did| {
if let Some(item) = self.associated_items(impl_did).next() {
if let Ok(()) = validate(self, impl_did) {
dtor_did = Some(item.def_id);
}
let impl_def_id = self.associated_item(dtor).container.id();
- let impl_generics = self.item_generics(impl_def_id);
+ let impl_generics = self.generics_of(impl_def_id);
// We have a destructor - all the parameters that are not
// pure_wrt_drop (i.e, don't have a #[may_dangle] attribute)
// <P1, P2, P0>, and then look up which of the impl substs refer to
// parameters marked as pure.
- let impl_substs = match self.item_type(impl_def_id).sty {
+ let impl_substs = match self.type_of(impl_def_id).sty {
ty::TyAdt(def_, substs) if def_ == def => substs,
_ => bug!()
};
- let item_substs = match self.item_type(def.did).sty {
+ let item_substs = match self.type_of(def.did).sty {
ty::TyAdt(def_, substs) if def_ == def => substs,
_ => bug!()
};
ty::TyAdt(def, substs) => {
let ty::DtorckConstraint {
dtorck_types, outlives
- } = ty::queries::adt_dtorck_constraint::get(self, span, def.did);
+ } = self.at(span).adt_dtorck_constraint(def.did);
Ok(ty::DtorckConstraint {
// FIXME: we can try to recursively `dtorck_constraint_on_ty`
// there, but that needs some way to handle cycles.
-> Vec<traits::PredicateObligation<'tcx>>
{
let predicates =
- self.infcx.tcx.item_predicates(def_id)
+ self.infcx.tcx.predicates_of(def_id)
.instantiate(self.infcx.tcx, substs);
let cause = self.cause(traits::ItemObligation(def_id));
predicates.predicates
}
}
}
- let mut generics = tcx.item_generics(item_def_id);
+ let mut generics = tcx.generics_of(item_def_id);
let mut path_def_id = did;
verbose = tcx.sess.verbose();
has_self = generics.has_self;
// Methods.
assert!(is_value_path);
child_types = generics.types.len();
- generics = tcx.item_generics(def_id);
+ generics = tcx.generics_of(def_id);
num_regions = generics.regions.len();
num_types = generics.types.len();
if !def.has_default {
break;
}
- if tcx.item_type(def.def_id).subst(tcx, substs) != actual {
+ if tcx.type_of(def.def_id).subst(tcx, substs) != actual {
break;
}
num_supplied_defaults += 1;
ty::tls::with(|tcx| {
// Grab the "TraitA + TraitB" from `impl TraitA + TraitB`,
// by looking up the projections associated with the def_id.
- let item_predicates = tcx.item_predicates(def_id);
+ let predicates_of = tcx.predicates_of(def_id);
let substs = tcx.lift(&substs).unwrap_or_else(|| {
tcx.intern_substs(&[])
});
- let bounds = item_predicates.instantiate(tcx, substs);
+ let bounds = predicates_of.instantiate(tcx, substs);
let mut first = true;
let mut is_sized = false;
extern crate cmake;
use std::env;
-use build_helper::native_lib_boilerplate;
+use build_helper::sanitizer_lib_boilerplate;
use cmake::Config;
fn main() {
if let Some(llvm_config) = env::var_os("LLVM_CONFIG") {
- let native = match native_lib_boilerplate("compiler-rt", "asan", "clang_rt.asan-x86_64",
- "build/lib/linux") {
+ let native = match sanitizer_lib_boilerplate("asan") {
Ok(native) => native,
_ => return,
};
"PATH"
} else if cfg!(target_os = "macos") {
"DYLD_LIBRARY_PATH"
+ } else if cfg!(target_os = "haiku") {
+ "LIBRARY_PATH"
} else {
"LD_LIBRARY_PATH"
}
linker: "cc".to_string(),
dynamic_linking: true,
executables: true,
- has_rpath: true,
+ has_rpath: false,
target_family: Some("unix".to_string()),
linker_is_gnu: true,
+ no_integrated_as: true,
.. Default::default()
}
}
use borrowck::BorrowckCtxt;
use rustc::middle::mem_categorization as mc;
use rustc::middle::mem_categorization::Categorization;
+use rustc::middle::mem_categorization::NoteClosureEnv;
use rustc::middle::mem_categorization::InteriorOffsetKind as Kind;
use rustc::ty;
use syntax::ast;
let mut err = report_cannot_move_out_of(bccx, error.move_from.clone());
let mut is_first_note = true;
for move_to in &error.move_to_places {
- err = note_move_destination(err, move_to.span,
- move_to.name, is_first_note);
+ err = note_move_destination(err, move_to.span, move_to.name, is_first_note);
is_first_note = false;
}
+ if let NoteClosureEnv(upvar_id) = error.move_from.note {
+ err.span_label(bccx.tcx.hir.span(upvar_id.var_id), &"captured outer variable");
+ }
err.emit();
}
}
use std::rc::Rc;
use std::hash::{Hash, Hasher};
use syntax::ast;
-use syntax_pos::{DUMMY_SP, MultiSpan, Span};
+use syntax_pos::{MultiSpan, Span};
use errors::DiagnosticBuilder;
use rustc::hir;
pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
tcx.visit_all_bodies_in_krate(|body_owner_def_id, _body_id| {
- ty::queries::borrowck::get(tcx, DUMMY_SP, body_owner_def_id);
+ tcx.borrowck(body_owner_def_id);
});
}
let owner_id = tcx.hir.as_local_node_id(owner_def_id).unwrap();
let body_id = tcx.hir.body_owned_by(owner_id);
let attributes = tcx.get_attrs(owner_def_id);
- let tables = tcx.item_tables(owner_def_id);
+ let tables = tcx.typeck_tables_of(owner_def_id);
let mut bccx = &mut BorrowckCtxt {
tcx: tcx,
{
let owner_id = tcx.hir.body_owner(body_id);
let owner_def_id = tcx.hir.local_def_id(owner_id);
- let tables = tcx.item_tables(owner_def_id);
+ let tables = tcx.typeck_tables_of(owner_def_id);
let mut bccx = BorrowckCtxt {
tcx: tcx,
use syntax::ast;
use rustc::hir::{self, Expr};
-use syntax_pos::{Span, DUMMY_SP};
+use syntax_pos::Span;
use std::cmp::Ordering;
_ => Some((def_id, substs))
}
} else {
- match tcx.sess.cstore.describe_def(def_id) {
+ match tcx.describe_def(def_id) {
Some(Def::AssociatedConst(_)) => {
// As mentioned in the comments above for in-crate
// constants, we only try to find the expression for a
match cx.tables.qpath_def(qpath, e.id) {
Def::Const(def_id) |
Def::AssociatedConst(def_id) => {
- match ty::queries::const_eval::get(tcx, e.span, (def_id, substs)) {
+ match tcx.at(e.span).const_eval((def_id, substs)) {
Ok(val) => val,
Err(ConstEvalErr { kind: TypeckError, .. }) => {
signal!(e, TypeckError);
debug!("const call({:?})", call_args);
let callee_cx = ConstContext {
tcx: tcx,
- tables: tcx.item_tables(def_id),
+ tables: tcx.typeck_tables_of(def_id),
substs: substs,
fn_args: Some(call_args)
};
Float(f) => cast_const_float(tcx, f, ty),
Char(c) => cast_const_int(tcx, U32(c as u32), ty),
Variant(v) => {
- let adt = tcx.lookup_adt_def(tcx.parent_def_id(v).unwrap());
+ let adt = tcx.adt_def(tcx.parent_def_id(v).unwrap());
let idx = adt.variant_index_with_id(v);
cast_const_int(tcx, adt.discriminant_for_variant(tcx, idx), ty)
}
let cx = ConstContext {
tcx,
- tables: tcx.item_tables(def_id),
+ tables: tcx.typeck_tables_of(def_id),
substs: substs,
fn_args: None
};
let body = if let Some(id) = tcx.hir.as_local_node_id(def_id) {
- ty::queries::mir_const_qualif::get(tcx, DUMMY_SP, def_id);
+ tcx.mir_const_qualif(def_id);
tcx.hir.body(tcx.hir.body_owned_by(id))
} else {
tcx.sess.cstore.item_body(tcx, def_id)
match def {
Def::Variant(variant_id) | Def::VariantCtor(variant_id, ..) => {
let enum_id = self.tcx.parent_def_id(variant_id).unwrap();
- let adt_def = self.tcx.lookup_adt_def(enum_id);
+ let adt_def = self.tcx.adt_def(enum_id);
if adt_def.variants.len() > 1 {
let substs = match ty.sty {
TypeVariants::TyAdt(_, substs) => substs,
Some((def_id, _substs)) => {
// Enter the inlined constant's tables temporarily.
let old_tables = self.tables;
- self.tables = tcx.item_tables(def_id);
+ self.tables = tcx.typeck_tables_of(def_id);
let body = if let Some(id) = tcx.hir.as_local_node_id(def_id) {
tcx.hir.body(tcx.hir.body_owned_by(id))
} else {
pub l_sysid: libc::c_int,
}
+ pub const F_RDLCK: libc::c_short = 0x0040;
pub const F_UNLCK: libc::c_short = 0x0200;
pub const F_WRLCK: libc::c_short = 0x0400;
pub const F_SETLK: libc::c_int = 0x0080;
sess.code_stats.borrow().print_type_sizes();
}
- if ::std::env::var("SKIP_LLVM").is_ok() { ::std::process::exit(0); }
-
let phase5_result = phase_5_run_llvm_passes(sess, &trans, &outputs);
controller_entry_point!(after_llvm,
mir::provide(&mut local_providers);
reachable::provide(&mut local_providers);
rustc_privacy::provide(&mut local_providers);
+ trans::provide(&mut local_providers);
typeck::provide(&mut local_providers);
ty::provide(&mut local_providers);
reachable::provide(&mut local_providers);
let mut extern_providers = ty::maps::Providers::default();
cstore::provide(&mut extern_providers);
+ trans::provide(&mut extern_providers);
ty::provide_extern(&mut extern_providers);
// FIXME(eddyb) get rid of this once we replace const_eval with miri.
rustc_const_eval::provide(&mut extern_providers);
let cstore = Rc::new(CStore::new(&dep_graph));
let loader = file_loader.unwrap_or(box RealFileLoader);
- let codemap = Rc::new(CodeMap::with_file_loader(loader));
+ let codemap = Rc::new(CodeMap::with_file_loader(loader, sopts.file_path_mapping()));
let mut sess = session::build_session_with_codemap(
sopts, &dep_graph, input_file_path, descriptions, cstore.clone(), codemap, emitter_dest,
);
use std::rc::Rc;
use syntax::ast;
use syntax::abi::Abi;
-use syntax::codemap::CodeMap;
+use syntax::codemap::{CodeMap, FilePathMapping};
use errors;
use errors::emitter::Emitter;
use errors::{Level, DiagnosticBuilder};
&dep_graph,
None,
diagnostic_handler,
- Rc::new(CodeMap::new()),
+ Rc::new(CodeMap::new(FilePathMapping::empty())),
cstore.clone());
rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess));
let input = config::Input::Str {
panic!(ExplicitBug);
}
pub fn delay_span_bug<S: Into<MultiSpan>>(&self, sp: S, msg: &str) {
+ if self.treat_err_as_bug {
+ self.span_bug(sp, msg);
+ }
let mut delayed = self.delayed_span_bug.borrow_mut();
*delayed = Some((sp.into(), msg.to_string()));
}
PlainImpl,
}
-pub fn method_context(cx: &LateContext, id: ast::NodeId, span: Span) -> MethodLateContext {
+pub fn method_context(cx: &LateContext, id: ast::NodeId) -> MethodLateContext {
let def_id = cx.tcx.hir.local_def_id(id);
- match cx.tcx.maps.associated_item.borrow().get(&def_id) {
- None => span_bug!(span, "missing method descriptor?!"),
- Some(item) => {
- match item.container {
- ty::TraitContainer(..) => MethodLateContext::TraitDefaultImpl,
- ty::ImplContainer(cid) => {
- match cx.tcx.impl_trait_ref(cid) {
- Some(_) => MethodLateContext::TraitImpl,
- None => MethodLateContext::PlainImpl,
- }
- }
+ let item = cx.tcx.associated_item(def_id);
+ match item.container {
+ ty::TraitContainer(..) => MethodLateContext::TraitDefaultImpl,
+ ty::ImplContainer(cid) => {
+ match cx.tcx.impl_trait_ref(cid) {
+ Some(_) => MethodLateContext::TraitImpl,
+ None => MethodLateContext::PlainImpl,
}
}
}
id: ast::NodeId) {
match fk {
FnKind::Method(name, ..) => {
- match method_context(cx, id, span) {
+ match method_context(cx, id) {
MethodLateContext::PlainImpl => {
self.check_snake_case(cx, "method", &name.as_str(), Some(span))
}
hir::ItemStruct(..) |
hir::ItemUnion(..) => {
let def_id = cx.tcx.hir.local_def_id(it.id);
- self.check_heap_type(cx, it.span, cx.tcx.item_type(def_id))
+ self.check_heap_type(cx, it.span, cx.tcx.type_of(def_id))
}
_ => ()
}
for struct_field in struct_def.fields() {
let def_id = cx.tcx.hir.local_def_id(struct_field.id);
self.check_heap_type(cx, struct_field.span,
- cx.tcx.item_type(def_id));
+ cx.tcx.type_of(def_id));
}
}
_ => (),
fn check_impl_item(&mut self, cx: &LateContext, impl_item: &hir::ImplItem) {
// If the method is an impl for a trait, don't doc.
- if method_context(cx, impl_item.id, impl_item.span) == MethodLateContext::TraitImpl {
+ if method_context(cx, impl_item.id) == MethodLateContext::TraitImpl {
return;
}
if ast_generics.is_parameterized() {
return;
}
- let def = cx.tcx.lookup_adt_def(cx.tcx.hir.local_def_id(item.id));
+ let def = cx.tcx.adt_def(cx.tcx.hir.local_def_id(item.id));
(def, cx.tcx.mk_adt(def, cx.tcx.intern_substs(&[])))
}
hir::ItemUnion(_, ref ast_generics) => {
if ast_generics.is_parameterized() {
return;
}
- let def = cx.tcx.lookup_adt_def(cx.tcx.hir.local_def_id(item.id));
+ let def = cx.tcx.adt_def(cx.tcx.hir.local_def_id(item.id));
(def, cx.tcx.mk_adt(def, cx.tcx.intern_substs(&[])))
}
hir::ItemEnum(_, ref ast_generics) => {
if ast_generics.is_parameterized() {
return;
}
- let def = cx.tcx.lookup_adt_def(cx.tcx.hir.local_def_id(item.id));
+ let def = cx.tcx.adt_def(cx.tcx.hir.local_def_id(item.id));
(def, cx.tcx.mk_adt(def, cx.tcx.intern_substs(&[])))
}
_ => return,
};
if self.impling_types.is_none() {
- let debug_def = cx.tcx.lookup_trait_def(debug);
+ let debug_def = cx.tcx.trait_def(debug);
let mut impls = NodeSet();
debug_def.for_each_impl(cx.tcx, |d| {
- if let Some(ty_def) = cx.tcx.item_type(d).ty_to_def_id() {
+ if let Some(ty_def) = cx.tcx.type_of(d).ty_to_def_id() {
if let Some(node_id) = cx.tcx.hir.as_local_node_id(ty_def) {
impls.insert(node_id);
}
}
fn def_id_is_transmute(cx: &LateContext, def_id: DefId) -> bool {
- match cx.tcx.item_type(def_id).sty {
+ match cx.tcx.type_of(def_id).sty {
ty::TyFnDef(.., bfty) if bfty.abi() == RustIntrinsic => (),
_ => return false,
}
if let hir::ItemUnion(ref vdata, _) = item.node {
let param_env = &ty::ParameterEnvironment::for_item(ctx.tcx, item.id);
for field in vdata.fields() {
- let field_ty = ctx.tcx.item_type(ctx.tcx.hir.local_def_id(field.id));
+ let field_ty = ctx.tcx.type_of(ctx.tcx.hir.local_def_id(field.id));
if field_ty.needs_drop(ctx.tcx, param_env) {
ctx.span_lint(UNIONS_WITH_DROP_FIELDS,
field.span,
fn check_foreign_fn(&mut self, id: ast::NodeId, decl: &hir::FnDecl) {
let def_id = self.cx.tcx.hir.local_def_id(id);
- let sig = self.cx.tcx.item_type(def_id).fn_sig();
+ let sig = self.cx.tcx.type_of(def_id).fn_sig();
let sig = self.cx.tcx.erase_late_bound_regions(&sig);
for (input_ty, input_hir) in sig.inputs().iter().zip(&decl.inputs) {
fn check_foreign_static(&mut self, id: ast::NodeId, span: Span) {
let def_id = self.cx.tcx.hir.local_def_id(id);
- let ty = self.cx.tcx.item_type(def_id);
+ let ty = self.cx.tcx.type_of(def_id);
self.check_type_for_ffi_and_report_errors(span, ty);
}
}
if let hir::ItemEnum(ref enum_definition, ref gens) = it.node {
if gens.ty_params.is_empty() {
// sizes only make sense for non-generic types
- let t = cx.tcx.item_type(cx.tcx.hir.local_def_id(it.id));
+ let t = cx.tcx.type_of(cx.tcx.hir.local_def_id(it.id));
let layout = cx.tcx.infer_ctxt((), Reveal::All).enter(|infcx| {
let ty = cx.tcx.erase_regions(&t);
ty.layout(&infcx).unwrap_or_else(|e| {
use build_helper::output;
-fn detect_llvm_link(llvm_config: &Path) -> (&'static str, Option<&'static str>) {
- let mut version_cmd = Command::new(llvm_config);
- version_cmd.arg("--version");
- let version_output = output(&mut version_cmd);
- let mut parts = version_output.split('.').take(2)
- .filter_map(|s| s.parse::<u32>().ok());
- if let (Some(major), Some(minor)) = (parts.next(), parts.next()) {
- if major > 3 || (major == 3 && minor >= 9) {
- // Force the link mode we want, preferring static by default, but
- // possibly overridden by `configure --enable-llvm-link-shared`.
- if env::var_os("LLVM_LINK_SHARED").is_some() {
- return ("dylib", Some("--link-shared"));
- } else {
- return ("static", Some("--link-static"));
- }
- } else if major == 3 && minor == 8 {
- // Find out LLVM's default linking mode.
- let mut mode_cmd = Command::new(llvm_config);
- mode_cmd.arg("--shared-mode");
- if output(&mut mode_cmd).trim() == "shared" {
- return ("dylib", None);
- } else {
- return ("static", None);
- }
+fn detect_llvm_link(major: u32, minor: u32, llvm_config: &Path)
+ -> (&'static str, Option<&'static str>) {
+ if major > 3 || (major == 3 && minor >= 9) {
+ // Force the link mode we want, preferring static by default, but
+ // possibly overridden by `configure --enable-llvm-link-shared`.
+ if env::var_os("LLVM_LINK_SHARED").is_some() {
+ return ("dylib", Some("--link-shared"));
+ } else {
+ return ("static", Some("--link-static"));
+ }
+ } else if major == 3 && minor == 8 {
+ // Find out LLVM's default linking mode.
+ let mut mode_cmd = Command::new(llvm_config);
+ mode_cmd.arg("--shared-mode");
+ if output(&mut mode_cmd).trim() == "shared" {
+ return ("dylib", None);
+ } else {
+ return ("static", None);
}
}
("static", None)
let host = env::var("HOST").expect("HOST was not set");
let is_crossed = target != host;
- let optional_components =
- ["x86", "arm", "aarch64", "mips", "powerpc", "pnacl", "systemz", "jsbackend", "msp430",
- "sparc", "nvptx"];
+ let mut optional_components =
+ vec!["x86", "arm", "aarch64", "mips", "powerpc", "pnacl",
+ "systemz", "jsbackend", "msp430", "sparc", "nvptx"];
+
+ let mut version_cmd = Command::new(&llvm_config);
+ version_cmd.arg("--version");
+ let version_output = output(&mut version_cmd);
+ let mut parts = version_output.split('.').take(2)
+ .filter_map(|s| s.parse::<u32>().ok());
+ let (major, minor) =
+ if let (Some(major), Some(minor)) = (parts.next(), parts.next()) {
+ (major, minor)
+ } else {
+ (3, 7)
+ };
+
+ if major > 3 {
+ optional_components.push("hexagon");
+ }
// FIXME: surely we don't need all these components, right? Stuff like mcjit
// or interpreter the compiler itself never uses.
.cpp_link_stdlib(None) // we handle this below
.compile("librustllvm.a");
- let (llvm_kind, llvm_link_arg) = detect_llvm_link(&llvm_config);
+ let (llvm_kind, llvm_link_arg) = detect_llvm_link(major, minor, &llvm_config);
// Link in all LLVM libraries, if we're uwring the "wrong" llvm-config then
// we don't pick up system libs because unfortunately they're for the host
LLVMInitializeNVPTXTarget,
LLVMInitializeNVPTXTargetMC,
LLVMInitializeNVPTXAsmPrinter);
+ init_target!(llvm_component = "hexagon",
+ LLVMInitializeHexagonTargetInfo,
+ LLVMInitializeHexagonTarget,
+ LLVMInitializeHexagonTargetMC,
+ LLVMInitializeHexagonAsmPrinter,
+ LLVMInitializeHexagonAsmParser);
}
pub fn last_error() -> Option<String> {
extern crate cmake;
use std::env;
-use build_helper::native_lib_boilerplate;
+use build_helper::sanitizer_lib_boilerplate;
use cmake::Config;
fn main() {
if let Some(llvm_config) = env::var_os("LLVM_CONFIG") {
- let native = match native_lib_boilerplate("compiler-rt", "lsan", "clang_rt.lsan-x86_64",
- "build/lib/linux") {
+ let native = match sanitizer_lib_boilerplate("lsan") {
Ok(native) => native,
_ => return,
};
fn inject_sanitizer_runtime(&mut self) {
if let Some(ref sanitizer) = self.sess.opts.debugging_opts.sanitizer {
- // Sanitizers can only be used with x86_64 Linux executables linked
- // to `std`
- if self.sess.target.target.llvm_target != "x86_64-unknown-linux-gnu" {
- self.sess.err(&format!("Sanitizers only work with the \
- `x86_64-unknown-linux-gnu` target."));
+ // Sanitizers can only be used on some tested platforms with
+ // executables linked to `std`
+ const ASAN_SUPPORTED_TARGETS: &[&str] = &["x86_64-unknown-linux-gnu",
+ "x86_64-apple-darwin"];
+ const TSAN_SUPPORTED_TARGETS: &[&str] = &["x86_64-unknown-linux-gnu",
+ "x86_64-apple-darwin"];
+ const LSAN_SUPPORTED_TARGETS: &[&str] = &["x86_64-unknown-linux-gnu"];
+ const MSAN_SUPPORTED_TARGETS: &[&str] = &["x86_64-unknown-linux-gnu"];
+
+ let supported_targets = match *sanitizer {
+ Sanitizer::Address => ASAN_SUPPORTED_TARGETS,
+ Sanitizer::Thread => TSAN_SUPPORTED_TARGETS,
+ Sanitizer::Leak => LSAN_SUPPORTED_TARGETS,
+ Sanitizer::Memory => MSAN_SUPPORTED_TARGETS,
+ };
+ if !supported_targets.contains(&&*self.sess.target.target.llvm_target) {
+ self.sess.err(&format!("{:?}Sanitizer only works with the `{}` target",
+ sanitizer,
+ supported_targets.join("` or `")
+ ));
return
}
use rustc::middle::cstore::{CrateStore, CrateSource, LibSource, DepKind,
ExternCrate, NativeLibrary, LinkMeta,
LinkagePreference, LoadedMacro, EncodedMetadata};
-use rustc::hir::def::{self, Def};
+use rustc::hir::def;
use rustc::middle::lang_items;
use rustc::session::Session;
use rustc::ty::{self, TyCtxt};
}
provide! { <'tcx> tcx, def_id, cdata
- ty => { cdata.get_type(def_id.index, tcx) }
- generics => { tcx.alloc_generics(cdata.get_generics(def_id.index)) }
- predicates => { cdata.get_predicates(def_id.index, tcx) }
- super_predicates => { cdata.get_super_predicates(def_id.index, tcx) }
+ type_of => { cdata.get_type(def_id.index, tcx) }
+ generics_of => { tcx.alloc_generics(cdata.get_generics(def_id.index)) }
+ predicates_of => { cdata.get_predicates(def_id.index, tcx) }
+ super_predicates_of => { cdata.get_super_predicates(def_id.index, tcx) }
trait_def => {
tcx.alloc_trait_def(cdata.get_trait_def(def_id.index))
}
let _ = cdata;
tcx.calculate_dtor(def_id, &mut |_,_| Ok(()))
}
- variances => { Rc::new(cdata.get_item_variances(def_id.index)) }
+ variances_of => { Rc::new(cdata.get_item_variances(def_id.index)) }
associated_item_def_ids => {
let mut result = vec![];
cdata.each_child_of_item(def_id.index, |child| result.push(child.def.def_id()));
mir
}
mir_const_qualif => { cdata.mir_const_qualif(def_id.index) }
- typeck_tables => { cdata.item_body_tables(def_id.index, tcx) }
+ typeck_tables_of => { cdata.item_body_tables(def_id.index, tcx) }
closure_kind => { cdata.closure_kind(def_id.index) }
closure_type => { cdata.closure_ty(def_id.index, tcx) }
inherent_impls => { Rc::new(cdata.get_inherent_implementations_for_type(def_id.index)) }
is_foreign_item => { cdata.is_foreign_item(def_id.index) }
+ describe_def => { cdata.get_def(def_id.index) }
}
impl CrateStore for cstore::CStore {
self.get_crate_data(krate)
}
- fn describe_def(&self, def: DefId) -> Option<Def> {
- self.dep_graph.read(DepNode::MetaData(def));
- self.get_crate_data(def.krate).get_def(def.index)
- }
-
fn def_span(&self, sess: &Session, def: DefId) -> Span {
self.dep_graph.read(DepNode::MetaData(def));
self.get_crate_data(def.krate).get_span(def.index, sess)
result
}
+ fn impl_defaultness(&self, def: DefId) -> hir::Defaultness
+ {
+ self.dep_graph.read(DepNode::MetaData(def));
+ self.get_crate_data(def.krate).get_impl_defaultness(def.index)
+ }
+
fn impl_parent(&self, impl_def: DefId) -> Option<DefId> {
self.dep_graph.read(DepNode::MetaData(impl_def));
self.get_crate_data(impl_def.krate).get_parent_impl(impl_def.index)
let (name, def) = data.get_macro(id.index);
let source_name = format!("<{} macros>", name);
- let filemap = sess.parse_sess.codemap().new_filemap(source_name, None, def.body);
+ let filemap = sess.parse_sess.codemap().new_filemap(source_name, def.body);
let local_span = Span { lo: filemap.start_pos, hi: filemap.end_pos, ctxt: NO_EXPANSION };
let body = filemap_to_stream(&sess.parse_sess, filemap);
impl<'a, 'tcx> SpecializedDecoder<&'tcx ty::AdtDef> for DecodeContext<'a, 'tcx> {
fn specialized_decode(&mut self) -> Result<&'tcx ty::AdtDef, Self::Error> {
let def_id = DefId::decode(self)?;
- Ok(self.tcx().lookup_adt_def(def_id))
+ Ok(self.tcx().adt_def(def_id))
}
}
self.get_impl_data(id).polarity
}
+ pub fn get_impl_defaultness(&self, id: DefIndex) -> hir::Defaultness {
+ self.get_impl_data(id).defaultness
+ }
+
pub fn get_coerce_unsized_info(&self,
id: DefIndex)
-> Option<ty::adjustment::CoerceUnsizedInfo> {
EntryKind::AssociatedType(container) => {
(ty::AssociatedKind::Type, container, false)
}
- _ => bug!()
+ _ => bug!("cannot get associated-item of `{:?}`", def_key)
};
ty::AssociatedItem {
// We can't reuse an existing FileMap, so allocate a new one
// containing the information we need.
let syntax_pos::FileMap { name,
- abs_path,
+ name_was_remapped,
start_pos,
end_pos,
lines,
}
let local_version = local_codemap.new_imported_filemap(name,
- abs_path,
+ name_was_remapped,
source_length,
lines,
multibyte_chars);
use std::intrinsics;
use std::io::prelude::*;
use std::io::Cursor;
+use std::path::Path;
use std::rc::Rc;
use std::u32;
use syntax::ast::{self, CRATE_NODE_ID};
use syntax::codemap::Spanned;
use syntax::attr;
use syntax::symbol::Symbol;
-use syntax_pos::{self, DUMMY_SP};
+use syntax_pos;
use rustc::hir::{self, PatKind};
use rustc::hir::itemlikevisit::ItemLikeVisitor;
fn encode_item_variances(&mut self, def_id: DefId) -> LazySeq<ty::Variance> {
debug!("EntryBuilder::encode_item_variances({:?})", def_id);
let tcx = self.tcx;
- self.lazy_seq_from_slice(&tcx.item_variances(def_id))
+ self.lazy_seq_from_slice(&tcx.variances_of(def_id))
}
fn encode_item_type(&mut self, def_id: DefId) -> Lazy<Ty<'tcx>> {
let tcx = self.tcx;
- let ty = tcx.item_type(def_id);
+ let ty = tcx.type_of(def_id);
debug!("EntryBuilder::encode_item_type({:?}) => {:?}", def_id, ty);
self.lazy(&ty)
}
(enum_did, Untracked(index)): (DefId, Untracked<usize>))
-> Entry<'tcx> {
let tcx = self.tcx;
- let def = tcx.lookup_adt_def(enum_did);
+ let def = tcx.adt_def(enum_did);
let variant = &def.variants[index];
let def_id = variant.did;
debug!("EntryBuilder::encode_enum_variant_info({:?})", def_id);
impl<'a, 'b, 'tcx> IndexBuilder<'a, 'b, 'tcx> {
fn encode_fields(&mut self, adt_def_id: DefId) {
- let def = self.tcx.lookup_adt_def(adt_def_id);
+ let def = self.tcx.adt_def(adt_def_id);
for (variant_index, variant) in def.variants.iter().enumerate() {
for (field_index, field) in variant.fields.iter().enumerate() {
self.record(field.did,
usize)>))
-> Entry<'tcx> {
let tcx = self.tcx;
- let variant = &tcx.lookup_adt_def(adt_def_id).variants[variant_index];
+ let variant = &tcx.adt_def(adt_def_id).variants[variant_index];
let field = &variant.fields[field_index];
let def_id = field.did;
fn encode_struct_ctor(&mut self, (adt_def_id, def_id): (DefId, DefId)) -> Entry<'tcx> {
debug!("EntryBuilder::encode_struct_ctor({:?})", def_id);
let tcx = self.tcx;
- let variant = tcx.lookup_adt_def(adt_def_id).struct_variant();
+ let variant = tcx.adt_def(adt_def_id).struct_variant();
let data = VariantData {
ctor_kind: variant.ctor_kind,
fn encode_generics(&mut self, def_id: DefId) -> Lazy<ty::Generics> {
debug!("EntryBuilder::encode_generics({:?})", def_id);
let tcx = self.tcx;
- self.lazy(tcx.item_generics(def_id))
+ self.lazy(tcx.generics_of(def_id))
}
fn encode_predicates(&mut self, def_id: DefId) -> Lazy<ty::GenericPredicates<'tcx>> {
debug!("EntryBuilder::encode_predicates({:?})", def_id);
let tcx = self.tcx;
- self.lazy(&tcx.item_predicates(def_id))
+ self.lazy(&tcx.predicates_of(def_id))
}
fn encode_info_for_trait_item(&mut self, def_id: DefId) -> Entry<'tcx> {
let kind = match impl_item.kind {
ty::AssociatedKind::Const => {
EntryKind::AssociatedConst(container,
- ty::queries::mir_const_qualif::get(self.tcx, ast_item.span, def_id))
+ self.tcx.at(ast_item.span).mir_const_qualif(def_id))
}
ty::AssociatedKind::Method => {
let fn_data = if let hir::ImplItemKind::Method(ref sig, body) = ast_item.node {
let (ast, mir) = if let hir::ImplItemKind::Const(_, body) = ast_item.node {
(Some(body), true)
} else if let hir::ImplItemKind::Method(ref sig, body) = ast_item.node {
- let generics = self.tcx.item_generics(def_id);
+ let generics = self.tcx.generics_of(def_id);
let types = generics.parent_types as usize + generics.types.len();
let needs_inline = types > 0 || attr::requests_inline(&ast_item.attrs);
let is_const_fn = sig.constness == hir::Constness::Const;
// Encodes the inherent implementations of a structure, enumeration, or trait.
fn encode_inherent_implementations(&mut self, def_id: DefId) -> LazySeq<DefIndex> {
debug!("EntryBuilder::encode_inherent_implementations({:?})", def_id);
- match self.tcx.maps.inherent_impls.borrow().get(&def_id) {
- None => LazySeq::empty(),
- Some(implementations) => {
- self.lazy_seq(implementations.iter().map(|&def_id| {
- assert!(def_id.is_local());
- def_id.index
- }))
- }
+ let implementations = self.tcx.inherent_impls(def_id);
+ if implementations.is_empty() {
+ LazySeq::empty()
+ } else {
+ self.lazy_seq(implementations.iter().map(|&def_id| {
+ assert!(def_id.is_local());
+ def_id.index
+ }))
}
}
hir::ItemStatic(_, hir::MutMutable, _) => EntryKind::MutStatic,
hir::ItemStatic(_, hir::MutImmutable, _) => EntryKind::ImmStatic,
hir::ItemConst(..) => {
- EntryKind::Const(ty::queries::mir_const_qualif::get(tcx, item.span, def_id))
+ EntryKind::Const(tcx.at(item.span).mir_const_qualif(def_id))
}
hir::ItemFn(_, _, constness, .., body) => {
let data = FnData {
hir::ItemTy(..) => EntryKind::Type,
hir::ItemEnum(..) => EntryKind::Enum(get_repr_options(&tcx, def_id)),
hir::ItemStruct(ref struct_def, _) => {
- let variant = tcx.lookup_adt_def(def_id).struct_variant();
+ let variant = tcx.adt_def(def_id).struct_variant();
// Encode def_ids for each field and method
// for methods, write all the stuff get_trait_method
}), repr_options)
}
hir::ItemUnion(..) => {
- let variant = tcx.lookup_adt_def(def_id).struct_variant();
+ let variant = tcx.adt_def(def_id).struct_variant();
let repr_options = get_repr_options(&tcx, def_id);
EntryKind::Union(self.lazy(&VariantData {
hir::ItemDefaultImpl(..) => {
let data = ImplData {
polarity: hir::ImplPolarity::Positive,
+ defaultness: hir::Defaultness::Final,
parent_impl: None,
coerce_unsized_info: None,
trait_ref: tcx.impl_trait_ref(def_id).map(|trait_ref| self.lazy(&trait_ref)),
EntryKind::DefaultImpl(self.lazy(&data))
}
- hir::ItemImpl(_, polarity, ..) => {
+ hir::ItemImpl(_, polarity, defaultness, ..) => {
let trait_ref = tcx.impl_trait_ref(def_id);
let parent = if let Some(trait_ref) = trait_ref {
- let trait_def = tcx.lookup_trait_def(trait_ref.def_id);
+ let trait_def = tcx.trait_def(trait_ref.def_id);
trait_def.ancestors(def_id).skip(1).next().and_then(|node| {
match node {
specialization_graph::Node::Impl(parent) => Some(parent),
let coerce_unsized_info =
trait_ref.and_then(|t| {
if Some(t.def_id) == tcx.lang_items.coerce_unsized_trait() {
- Some(ty::queries::coerce_unsized_info::get(tcx, item.span, def_id))
+ Some(tcx.at(item.span).coerce_unsized_info(def_id))
} else {
None
}
let data = ImplData {
polarity: polarity,
+ defaultness: defaultness,
parent_impl: parent,
coerce_unsized_info: coerce_unsized_info,
trait_ref: trait_ref.map(|trait_ref| self.lazy(&trait_ref)),
EntryKind::Impl(self.lazy(&data))
}
hir::ItemTrait(..) => {
- let trait_def = tcx.lookup_trait_def(def_id);
+ let trait_def = tcx.trait_def(def_id);
let data = TraitData {
unsafety: trait_def.unsafety,
paren_sugar: trait_def.paren_sugar,
has_default_impl: tcx.trait_has_default_impl(def_id),
- super_predicates: self.lazy(&tcx.item_super_predicates(def_id)),
+ super_predicates: self.lazy(&tcx.super_predicates_of(def_id)),
};
EntryKind::Trait(self.lazy(&data))
.map(|foreign_item| tcx.hir.local_def_id(foreign_item.id).index))
}
hir::ItemEnum(..) => {
- let def = self.tcx.lookup_adt_def(def_id);
+ let def = self.tcx.adt_def(def_id);
self.lazy_seq(def.variants.iter().map(|v| {
assert!(v.did.is_local());
v.did.index
}
hir::ItemStruct(..) |
hir::ItemUnion(..) => {
- let def = self.tcx.lookup_adt_def(def_id);
+ let def = self.tcx.adt_def(def_id);
self.lazy_seq(def.struct_variant().fields.iter().map(|f| {
assert!(f.did.is_local());
f.did.index
hir::ItemEnum(..) => {
self.encode_fields(def_id);
- let def = self.tcx.lookup_adt_def(def_id);
+ let def = self.tcx.adt_def(def_id);
for (i, variant) in def.variants.iter().enumerate() {
self.record(variant.did,
EntryBuilder::encode_enum_variant_info,
let body = tcx.hir.body_owned_by(id);
Entry {
- kind: EntryKind::Const(ty::queries::mir_const_qualif::get(tcx, DUMMY_SP, def_id)),
+ kind: EntryKind::Const(tcx.mir_const_qualif(def_id)),
visibility: self.lazy(&ty::Visibility::Public),
span: self.lazy(&tcx.def_span(def_id)),
attributes: LazySeq::empty(),
fn encode_codemap(&mut self) -> LazySeq<syntax_pos::FileMap> {
let codemap = self.tcx.sess.codemap();
let all_filemaps = codemap.files.borrow();
- self.lazy_seq_ref(all_filemaps.iter()
+ let adapted = all_filemaps.iter()
.filter(|filemap| {
// No need to re-export imported filemaps, as any downstream
// crate will import them from their original source.
!filemap.is_imported()
})
- .map(|filemap| &**filemap))
+ .map(|filemap| {
+ // When exporting FileMaps, we expand all paths to absolute
+ // paths because any relative paths are potentially relative to
+ // a wrong directory.
+ // However, if a path has been modified via
+ // `-Zremap-path-prefix` we assume the user has already set
+ // things up the way they want and don't touch the path values
+ // anymore.
+ let name = Path::new(&filemap.name);
+ let (ref working_dir, working_dir_was_remapped) = self.tcx.sess.working_dir;
+ if filemap.name_was_remapped ||
+ (name.is_relative() && working_dir_was_remapped) {
+ // This path of this FileMap has been modified by
+ // path-remapping, so we use it verbatim (and avoid cloning
+ // the whole map in the process).
+ filemap.clone()
+ } else {
+ let mut adapted = (**filemap).clone();
+ let abs_path = Path::new(working_dir).join(name)
+ .to_string_lossy()
+ .into_owned();
+ adapted.name = abs_path;
+ Rc::new(adapted)
+ }
+ })
+ .collect::<Vec<_>>();
+
+ self.lazy_seq_ref(adapted.iter().map(|fm| &**fm))
}
fn encode_def_path_table(&mut self) -> Lazy<DefPathTable> {
}
pub fn get_repr_options<'a, 'tcx, 'gcx>(tcx: &TyCtxt<'a, 'tcx, 'gcx>, did: DefId) -> ReprOptions {
- let ty = tcx.item_type(did);
+ let ty = tcx.type_of(did);
match ty.sty {
ty::TyAdt(ref def, _) => return def.repr,
_ => bug!("{} is not an ADT", ty),
#[derive(RustcEncodable, RustcDecodable)]
pub struct ImplData<'tcx> {
pub polarity: hir::ImplPolarity,
+ pub defaultness: hir::Defaultness,
pub parent_impl: Option<DefId>,
/// This is `Some` only for impls of `CoerceUnsized`.
impl_stable_hash_for!(struct ImplData<'tcx> {
polarity,
+ defaultness,
parent_impl,
coerce_unsized_info,
trait_ref
TerminatorKind::Call {
func: Operand::Constant(Constant {
span: data.span,
- ty: tcx.item_type(free_func).subst(tcx, substs),
+ ty: tcx.type_of(free_func).subst(tcx, substs),
literal: Literal::Value {
value: ConstVal::Function(free_func, substs),
}
let c = &cx.tcx.hir.body(count).value;
let def_id = cx.tcx.hir.body_owner_def_id(count);
let substs = Substs::empty();
- let count = match ty::queries::const_eval::get(cx.tcx, c.span, (def_id, substs)) {
+ let count = match cx.tcx.at(c.span).const_eval((def_id, substs)) {
Ok(ConstVal::Integral(ConstInt::Usize(u))) => u,
Ok(other) => bug!("constant evaluation of repeat count yielded {:?}", other),
Err(s) => cx.fatal_const_eval_err(&s, c.span, "expression")
let substs = self.tcx.mk_substs_trait(self_ty, params);
for item in self.tcx.associated_items(trait_def_id) {
if item.kind == ty::AssociatedKind::Method && item.name == method_name {
- let method_ty = self.tcx.item_type(item.def_id);
+ let method_ty = self.tcx.type_of(item.def_id);
let method_ty = method_ty.subst(self.tcx, substs);
return (method_ty,
Literal::Value {
// types/lifetimes replaced)
let fn_sig = cx.tables().liberated_fn_sigs[&id].clone();
- let ty = tcx.item_type(tcx.hir.local_def_id(id));
+ let ty = tcx.type_of(tcx.hir.local_def_id(id));
let mut abi = fn_sig.abi;
let implicit_argument = if let ty::TyClosure(..) = ty.sty {
// HACK(eddyb) Avoid having RustCall on closures,
} else {
param_env.free_substs
};
- let fn_ty = tcx.item_type(def_id).subst(tcx, substs);
+ let fn_ty = tcx.type_of(def_id).subst(tcx, substs);
let sig = tcx.erase_late_bound_regions(&fn_ty.fn_sig());
let span = tcx.def_span(def_id);
call_kind={:?}, untuple_args={:?})",
def_id, rcvr_adjustment, call_kind, untuple_args);
- let fn_ty = tcx.item_type(def_id).subst(tcx, param_env.free_substs);
+ let fn_ty = tcx.type_of(def_id).subst(tcx, param_env.free_substs);
let sig = tcx.erase_late_bound_regions(&fn_ty.fn_sig());
let span = tcx.def_span(def_id);
CallKind::Direct(def_id) => (
Operand::Constant(Constant {
span: span,
- ty: tcx.item_type(def_id).subst(tcx, param_env.free_substs),
+ ty: tcx.type_of(def_id).subst(tcx, param_env.free_substs),
literal: Literal::Value {
value: ConstVal::Function(def_id, param_env.free_substs),
},
{
let tcx = infcx.tcx;
let def_id = tcx.hir.local_def_id(ctor_id);
- let sig = match tcx.item_type(def_id).sty {
+ let sig = match tcx.type_of(def_id).sty {
ty::TyFnDef(_, _, fty) => tcx.no_late_bound_regions(&fty)
.expect("LBR in ADT constructor signature"),
_ => bug!("unexpected type for ctor {:?}", def_id)
let mut span = None;
self.tcx
- .lookup_trait_def(drop_trait_id)
+ .trait_def(drop_trait_id)
.for_each_relevant_impl(self.tcx, self.mir.return_ty, |impl_did| {
self.tcx.hir
.as_local_node_id(impl_did)
if substs.types().next().is_some() {
self.add_type(constant.ty);
} else {
- let bits = ty::queries::mir_const_qualif::get(self.tcx,
- constant.span,
- def_id);
+ let bits = self.tcx.at(constant.span).mir_const_qualif(def_id);
let qualif = Qualif::from_bits(bits).expect("invalid mir_const_qualif");
self.add(qualif);
let src = MirSource::from_node(tcx, id);
if let MirSource::Const(_) = src {
- ty::queries::mir_const_qualif::get(tcx, DUMMY_SP, def_id);
+ tcx.mir_const_qualif(def_id);
continue;
}
Lvalue::Local(index) => LvalueTy::Ty { ty: self.mir.local_decls[index].ty },
Lvalue::Static(box Static { def_id, ty: sty }) => {
let sty = self.sanitize_type(lvalue, sty);
- let ty = self.tcx().item_type(def_id);
+ let ty = self.tcx().type_of(def_id);
let ty = self.cx.normalize(&ty);
if let Err(terr) = self.cx.eq_types(self.last_span, ty, sty) {
span_mirbug!(
extern crate cmake;
use std::env;
-use build_helper::native_lib_boilerplate;
+use build_helper::sanitizer_lib_boilerplate;
use cmake::Config;
fn main() {
if let Some(llvm_config) = env::var_os("LLVM_CONFIG") {
- let native = match native_lib_boilerplate("compiler-rt", "msan", "clang_rt.msan-x86_64",
- "build/lib/linux") {
+ let native = match sanitizer_lib_boilerplate("msan") {
Ok(native) => native,
_ => return,
};
};
let outer_tables = self.tables;
- self.tables = self.tcx.item_tables(self.tcx.hir.local_def_id(item_id));
+ self.tables = self.tcx.typeck_tables_of(self.tcx.hir.local_def_id(item_id));
let body = self.tcx.hir.body(body_id);
if !self.in_fn {
#![crate_type = "dylib"]
#![crate_type = "rlib"]
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
- html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
- html_root_url = "https://doc.rust-lang.org/nightly/")]
+ html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
+ html_root_url = "https://doc.rust-lang.org/nightly/")]
#![deny(warnings)]
#![feature(rustc_diagnostic_macros)]
use rustc::hir::def_id::{CRATE_DEF_INDEX, LOCAL_CRATE, CrateNum, DefId};
use rustc::hir::intravisit::{self, Visitor, NestedVisitorMap};
use rustc::hir::itemlikevisit::DeepVisitor;
-use rustc::hir::pat_util::EnumerateAndAdjustIterator;
use rustc::lint;
use rustc::middle::privacy::{AccessLevel, AccessLevels};
use rustc::ty::{self, TyCtxt, Ty, TypeFoldable};
use rustc::ty::maps::Providers;
use rustc::util::nodemap::NodeSet;
use syntax::ast;
-use syntax_pos::{DUMMY_SP, Span};
+use syntax_pos::Span;
use std::cmp;
use std::mem::replace;
impl<'a, 'tcx> EmbargoVisitor<'a, 'tcx> {
fn item_ty_level(&self, item_def_id: DefId) -> Option<AccessLevel> {
- let ty_def_id = match self.tcx.item_type(item_def_id).sty {
+ let ty_def_id = match self.tcx.type_of(item_def_id).sty {
ty::TyAdt(adt, _) => adt.did,
ty::TyDynamic(ref obj, ..) if obj.principal().is_some() =>
obj.principal().unwrap().def_id(),
hir::ItemConst(..) | hir::ItemStatic(..) |
hir::ItemFn(..) | hir::ItemTy(..) => {
if item_level.is_some() {
- self.reach(item.id).generics().predicates().item_type();
+ self.reach(item.id).generics().predicates().ty();
}
}
hir::ItemTrait(.., ref trait_item_refs) => {
!trait_item_ref.defaultness.has_value() {
// No type to visit.
} else {
- reach.item_type();
+ reach.ty();
}
}
}
for impl_item_ref in impl_item_refs {
let id = impl_item_ref.id.node_id;
if trait_ref.is_some() || self.get(id).is_some() {
- self.reach(id).generics().predicates().item_type();
+ self.reach(id).generics().predicates().ty();
}
}
}
for variant in &def.variants {
if self.get(variant.node.data.id()).is_some() {
for field in variant.node.data.fields() {
- self.reach(field.id).item_type();
+ self.reach(field.id).ty();
}
// Corner case: if the variant is reachable, but its
// enum is not, make the enum reachable as well.
hir::ItemForeignMod(ref foreign_mod) => {
for foreign_item in &foreign_mod.items {
if self.get(foreign_item.id).is_some() {
- self.reach(foreign_item.id).generics().predicates().item_type();
+ self.reach(foreign_item.id).generics().predicates().ty();
}
}
}
self.reach(item.id).generics().predicates();
for field in struct_def.fields() {
if self.get(field.id).is_some() {
- self.reach(field.id).item_type();
+ self.reach(field.id).ty();
}
}
}
if let hir::TyImplTrait(..) = ty.node {
if self.get(ty.id).is_some() {
// Reach the (potentially private) type and the API being exposed.
- self.reach(ty.id).item_type().predicates();
+ self.reach(ty.id).ty().predicates();
}
}
impl<'b, 'a, 'tcx> ReachEverythingInTheInterfaceVisitor<'b, 'a, 'tcx> {
fn generics(&mut self) -> &mut Self {
- for def in &self.ev.tcx.item_generics(self.item_def_id).types {
+ for def in &self.ev.tcx.generics_of(self.item_def_id).types {
if def.has_default {
- self.ev.tcx.item_type(def.def_id).visit_with(self);
+ self.ev.tcx.type_of(def.def_id).visit_with(self);
}
}
self
}
fn predicates(&mut self) -> &mut Self {
- self.ev.tcx.item_predicates(self.item_def_id).visit_with(self);
+ self.ev.tcx.predicates_of(self.item_def_id).visit_with(self);
self
}
- fn item_type(&mut self) -> &mut Self {
- self.ev.tcx.item_type(self.item_def_id).visit_with(self);
+ fn ty(&mut self) -> &mut Self {
+ self.ev.tcx.type_of(self.item_def_id).visit_with(self);
self
}
}
}
-////////////////////////////////////////////////////////////////////////////////
-/// The privacy visitor, where privacy checks take place (violations reported)
-////////////////////////////////////////////////////////////////////////////////
+//////////////////////////////////////////////////////////////////////////////////////
+/// Name privacy visitor, checks privacy and reports violations.
+/// Most of name privacy checks are performed during the main resolution phase,
+/// or later in type checking when field accesses and associated items are resolved.
+/// This pass performs remaining checks for fields in struct expressions and patterns.
+//////////////////////////////////////////////////////////////////////////////////////
-struct PrivacyVisitor<'a, 'tcx: 'a> {
+struct NamePrivacyVisitor<'a, 'tcx: 'a> {
tcx: TyCtxt<'a, 'tcx, 'tcx>,
- curitem: DefId,
- in_foreign: bool,
tables: &'a ty::TypeckTables<'tcx>,
+ current_item: DefId,
}
-impl<'a, 'tcx> PrivacyVisitor<'a, 'tcx> {
- fn item_is_accessible(&self, did: DefId) -> bool {
- match self.tcx.hir.as_local_node_id(did) {
- Some(node_id) =>
- ty::Visibility::from_hir(&self.tcx.hir.expect_item(node_id).vis, node_id, self.tcx),
- None => self.tcx.sess.cstore.visibility(did),
- }.is_accessible_from(self.curitem, self.tcx)
- }
-
- // Checks that a field is in scope.
+impl<'a, 'tcx> NamePrivacyVisitor<'a, 'tcx> {
+ // Checks that a field is accessible.
fn check_field(&mut self, span: Span, def: &'tcx ty::AdtDef, field: &'tcx ty::FieldDef) {
- if !def.is_enum() && !field.vis.is_accessible_from(self.curitem, self.tcx) {
+ if !def.is_enum() && !field.vis.is_accessible_from(self.current_item, self.tcx) {
struct_span_err!(self.tcx.sess, span, E0451, "field `{}` of {} `{}` is private",
- field.name, def.variant_descr(), self.tcx.item_path_str(def.did))
+ field.name, def.variant_descr(), self.tcx.item_path_str(def.did))
.span_label(span, &format!("field `{}` is private", field.name))
.emit();
}
}
-
- // Checks that a method is in scope.
- fn check_method(&mut self, span: Span, method_def_id: DefId) {
- match self.tcx.associated_item(method_def_id).container {
- // Trait methods are always all public. The only controlling factor
- // is whether the trait itself is accessible or not.
- ty::TraitContainer(trait_def_id) if !self.item_is_accessible(trait_def_id) => {
- let msg = format!("source trait `{}` is private",
- self.tcx.item_path_str(trait_def_id));
- self.tcx.sess.span_err(span, &msg);
- }
- _ => {}
- }
- }
}
-impl<'a, 'tcx> Visitor<'tcx> for PrivacyVisitor<'a, 'tcx> {
+impl<'a, 'tcx> Visitor<'tcx> for NamePrivacyVisitor<'a, 'tcx> {
/// We want to visit items in the context of their containing
/// module and so forth, so supply a crate for doing a deep walk.
fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
}
fn visit_nested_body(&mut self, body: hir::BodyId) {
- let old_tables = self.tables;
- self.tables = self.tcx.body_tables(body);
+ let orig_tables = replace(&mut self.tables, self.tcx.body_tables(body));
let body = self.tcx.hir.body(body);
self.visit_body(body);
- self.tables = old_tables;
+ self.tables = orig_tables;
}
fn visit_item(&mut self, item: &'tcx hir::Item) {
- let orig_curitem = replace(&mut self.curitem, self.tcx.hir.local_def_id(item.id));
+ let orig_current_item = replace(&mut self.current_item, self.tcx.hir.local_def_id(item.id));
intravisit::walk_item(self, item);
- self.curitem = orig_curitem;
+ self.current_item = orig_current_item;
}
fn visit_expr(&mut self, expr: &'tcx hir::Expr) {
match expr.node {
- hir::ExprMethodCall(..) => {
- let method_call = ty::MethodCall::expr(expr.id);
- let method = self.tables.method_map[&method_call];
- self.check_method(expr.span, method.def_id);
- }
- hir::ExprStruct(ref qpath, ref expr_fields, _) => {
+ hir::ExprStruct(ref qpath, ref fields, ref base) => {
let def = self.tables.qpath_def(qpath, expr.id);
let adt = self.tables.expr_ty(expr).ty_adt_def().unwrap();
let variant = adt.variant_of_def(def);
- // RFC 736: ensure all unmentioned fields are visible.
- // Rather than computing the set of unmentioned fields
- // (i.e. `all_fields - fields`), just check them all,
- // unless the ADT is a union, then unmentioned fields
- // are not checked.
- if adt.is_union() {
- for expr_field in expr_fields {
- self.check_field(expr.span, adt, variant.field_named(expr_field.name.node));
+ if let Some(ref base) = *base {
+ // If the expression uses FRU we need to make sure all the unmentioned fields
+ // are checked for privacy (RFC 736). Rather than computing the set of
+ // unmentioned fields, just check them all.
+ for variant_field in &variant.fields {
+ let field = fields.iter().find(|f| f.name.node == variant_field.name);
+ let span = if let Some(f) = field { f.span } else { base.span };
+ self.check_field(span, adt, variant_field);
}
} else {
- for field in &variant.fields {
- let expr_field = expr_fields.iter().find(|f| f.name.node == field.name);
- let span = if let Some(f) = expr_field { f.span } else { expr.span };
- self.check_field(span, adt, field);
+ for field in fields {
+ self.check_field(field.span, adt, variant.field_named(field.name.node));
}
}
}
intravisit::walk_expr(self, expr);
}
- fn visit_pat(&mut self, pattern: &'tcx hir::Pat) {
- // Foreign functions do not have their patterns mapped in the def_map,
- // and there's nothing really relevant there anyway, so don't bother
- // checking privacy. If you can name the type then you can pass it to an
- // external C function anyway.
- if self.in_foreign { return }
-
- match pattern.node {
+ fn visit_pat(&mut self, pat: &'tcx hir::Pat) {
+ match pat.node {
PatKind::Struct(ref qpath, ref fields, _) => {
- let def = self.tables.qpath_def(qpath, pattern.id);
- let adt = self.tables.pat_ty(pattern).ty_adt_def().unwrap();
+ let def = self.tables.qpath_def(qpath, pat.id);
+ let adt = self.tables.pat_ty(pat).ty_adt_def().unwrap();
let variant = adt.variant_of_def(def);
for field in fields {
self.check_field(field.span, adt, variant.field_named(field.node.name));
}
}
- PatKind::TupleStruct(_, ref fields, ddpos) => {
- match self.tables.pat_ty(pattern).sty {
- // enum fields have no privacy at this time
- ty::TyAdt(def, _) if !def.is_enum() => {
- let expected_len = def.struct_variant().fields.len();
- for (i, field) in fields.iter().enumerate_and_adjust(expected_len, ddpos) {
- if let PatKind::Wild = field.node {
- continue
- }
- self.check_field(field.span, def, &def.struct_variant().fields[i]);
- }
- }
- _ => {}
- }
- }
_ => {}
}
- intravisit::walk_pat(self, pattern);
- }
-
- fn visit_foreign_item(&mut self, fi: &'tcx hir::ForeignItem) {
- self.in_foreign = true;
- intravisit::walk_foreign_item(self, fi);
- self.in_foreign = false;
+ intravisit::walk_pat(self, pat);
}
}
impl<'a, 'tcx: 'a> SearchInterfaceForPrivateItemsVisitor<'a, 'tcx> {
fn generics(&mut self) -> &mut Self {
- for def in &self.tcx.item_generics(self.item_def_id).types {
+ for def in &self.tcx.generics_of(self.item_def_id).types {
if def.has_default {
- self.tcx.item_type(def.def_id).visit_with(self);
+ self.tcx.type_of(def.def_id).visit_with(self);
}
}
self
}
fn predicates(&mut self) -> &mut Self {
- self.tcx.item_predicates(self.item_def_id).visit_with(self);
+ self.tcx.predicates_of(self.item_def_id).visit_with(self);
self
}
- fn item_type(&mut self) -> &mut Self {
- self.tcx.item_type(self.item_def_id).visit_with(self);
+ fn ty(&mut self) -> &mut Self {
+ self.tcx.type_of(self.item_def_id).visit_with(self);
self
}
// Subitems of these items have inherited publicity
hir::ItemConst(..) | hir::ItemStatic(..) | hir::ItemFn(..) |
hir::ItemTy(..) => {
- self.check(item.id, item_visibility).generics().predicates().item_type();
+ self.check(item.id, item_visibility).generics().predicates().ty();
// Recurse for e.g. `impl Trait` (see `visit_ty`).
self.inner_visibility = item_visibility;
!trait_item_ref.defaultness.has_value() {
// No type to visit.
} else {
- check.item_type();
+ check.ty();
}
}
}
for variant in &def.variants {
for field in variant.node.data.fields() {
- self.check(field.id, item_visibility).item_type();
+ self.check(field.id, item_visibility).ty();
}
}
}
hir::ItemForeignMod(ref foreign_mod) => {
for foreign_item in &foreign_mod.items {
let vis = ty::Visibility::from_hir(&foreign_item.vis, item.id, tcx);
- self.check(foreign_item.id, vis).generics().predicates().item_type();
+ self.check(foreign_item.id, vis).generics().predicates().ty();
}
}
// Subitems of structs and unions have their own publicity
for field in struct_def.fields() {
let field_visibility = ty::Visibility::from_hir(&field.vis, item.id, tcx);
- self.check(field.id, min(item_visibility, field_visibility)).item_type();
+ self.check(field.id, min(item_visibility, field_visibility)).ty();
}
}
// The interface is empty
// Subitems of inherent impls have their own publicity
hir::ItemImpl(.., None, _, ref impl_item_refs) => {
let ty_vis =
- self.check(item.id, ty::Visibility::Invisible).item_type().min_visibility;
+ self.check(item.id, ty::Visibility::Invisible).ty().min_visibility;
self.check(item.id, ty_vis).generics().predicates();
for impl_item_ref in impl_item_refs {
let impl_item_vis =
ty::Visibility::from_hir(&impl_item.vis, item.id, tcx);
self.check(impl_item.id, min(impl_item_vis, ty_vis))
- .generics().predicates().item_type();
+ .generics().predicates().ty();
// Recurse for e.g. `impl Trait` (see `visit_ty`).
self.inner_visibility = impl_item_vis;
// Subitems of trait impls have inherited publicity
hir::ItemImpl(.., Some(_), _, ref impl_item_refs) => {
let vis = self.check(item.id, ty::Visibility::Invisible)
- .item_type().impl_trait_ref().min_visibility;
+ .ty().impl_trait_ref().min_visibility;
self.check(item.id, vis).generics().predicates();
for impl_item_ref in impl_item_refs {
let impl_item = self.tcx.hir.impl_item(impl_item_ref.id);
- self.check(impl_item.id, vis).generics().predicates().item_type();
+ self.check(impl_item.id, vis).generics().predicates().ty();
// Recurse for e.g. `impl Trait` (see `visit_ty`).
self.inner_visibility = vis;
// e.g. `impl Iterator<Item=T>` has two predicates,
// `X: Iterator` and `<X as Iterator>::Item == T`,
// where `X` is the `impl Iterator<Item=T>` itself,
- // stored in `item_predicates`, not in the `Ty` itself.
+ // stored in `predicates_of`, not in the `Ty` itself.
self.check(ty.id, self.inner_visibility).predicates();
}
pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Rc<AccessLevels> {
tcx.dep_graph.with_ignore(|| { // FIXME
- ty::queries::privacy_access_levels::get(tcx, DUMMY_SP, LOCAL_CRATE)
+ tcx.privacy_access_levels(LOCAL_CRATE)
})
}
let krate = tcx.hir.krate();
- // Use the parent map to check the privacy of everything
- let mut visitor = PrivacyVisitor {
- curitem: DefId::local(CRATE_DEF_INDEX),
- in_foreign: false,
+ // Check privacy of names not checked in previous compilation stages.
+ let mut visitor = NamePrivacyVisitor {
tcx: tcx,
tables: &ty::TypeckTables::empty(),
+ current_item: DefId::local(CRATE_DEF_INDEX),
};
intravisit::walk_crate(&mut visitor, krate);
- tcx.sess.abort_if_errors();
-
// Build up a set of all exported items in the AST. This is a set of all
// items which are reachable from external crates based on visibility.
let mut visitor = EmbargoVisitor {
use std::collections::HashSet;
use std::collections::hash_map::DefaultHasher;
use std::hash::*;
+use std::path::Path;
use syntax::ast::{self, NodeId, PatKind, Attribute, CRATE_NODE_ID};
use syntax::parse::token;
where F: FnOnce(&mut DumpVisitor<'l, 'tcx, 'll, D>)
{
let item_def_id = self.tcx.hir.local_def_id(item_id);
- match self.tcx.maps.typeck_tables.borrow().get(&item_def_id) {
- Some(tables) => {
- let old_tables = self.save_ctxt.tables;
- self.save_ctxt.tables = tables;
- f(self);
- self.save_ctxt.tables = old_tables;
- }
- None => f(self),
+ if self.tcx.has_typeck_tables(item_def_id) {
+ let tables = self.tcx.typeck_tables_of(item_def_id);
+ let old_tables = self.save_ctxt.tables;
+ self.save_ctxt.tables = tables;
+ f(self);
+ self.save_ctxt.tables = old_tables;
+ } else {
+ f(self)
}
}
pub fn dump_crate_info(&mut self, name: &str, krate: &ast::Crate) {
let source_file = self.tcx.sess.local_crate_source_file.as_ref();
let crate_root = source_file.map(|source_file| {
+ let source_file = Path::new(source_file);
match source_file.file_name() {
Some(_) => source_file.parent().unwrap().display().to_string(),
None => source_file.display().to_string(),
}
None => {
if let Some(NodeItem(item)) = self.tcx.hir.get_if_local(id) {
- if let hir::ItemImpl(_, _, _, _, ref ty, _) = item.node {
+ if let hir::ItemImpl(_, _, _, _, _, ref ty, _) = item.node {
trait_id = self.lookup_def_id(ty.id);
}
}
let sub_span = self.span_utils.sub_span_before_token(field.span, token::Colon);
filter!(self.span_utils, sub_span, field.span, None);
let def_id = self.tcx.hir.local_def_id(field.id);
- let typ = self.tcx.item_type(def_id).to_string();
+ let typ = self.tcx.type_of(def_id).to_string();
let span = field.span;
let text = self.span_utils.snippet(field.span);
use cabi_sparc64;
use cabi_nvptx;
use cabi_nvptx64;
+use cabi_hexagon;
use machine::llalign_of_min;
use type_::Type;
use type_of;
"sparc64" => cabi_sparc64::compute_abi_info(ccx, self),
"nvptx" => cabi_nvptx::compute_abi_info(ccx, self),
"nvptx64" => cabi_nvptx64::compute_abi_info(ccx, self),
+ "hexagon" => cabi_hexagon::compute_abi_info(ccx, self),
a => ccx.sess().fatal(&format!("unrecognized arch \"{}\" in target specification", a))
}
cnum: CrateNum) {
let src = sess.cstore.used_crate_source(cnum);
let cratepath = &src.rlib.unwrap().0;
+
+ if sess.target.target.options.is_like_osx {
+ // On Apple platforms, the sanitizer is always built as a dylib, and
+ // LLVM will link to `@rpath/*.dylib`, so we need to specify an
+ // rpath to the library as well (the rpath should be absolute, see
+ // PR #41352 for details).
+ //
+ // FIXME: Remove this logic into librustc_*san once Cargo supports it
+ let rpath = cratepath.parent().unwrap();
+ let rpath = rpath.to_str().expect("non-utf8 component in path");
+ cmd.args(&["-Wl,-rpath".into(), "-Xlinker".into(), rpath.into()]);
+ }
+
let dst = tmpdir.join(cratepath.file_name().unwrap());
let cfg = archive_config(sess, &dst, Some(cratepath));
let mut archive = ArchiveBuilder::new(cfg);
use context::SharedCrateContext;
use monomorphize::Instance;
-use symbol_map::SymbolMap;
-use back::symbol_names::symbol_name;
use util::nodemap::FxHashMap;
use rustc::hir::def_id::{DefId, CrateNum, LOCAL_CRATE};
use rustc::session::config;
use rustc::ty::TyCtxt;
use syntax::attr;
-use trans_item::TransItem;
/// The SymbolExportLevel of a symbols specifies from which kinds of crates
/// the symbol will be exported. `C` symbols will be exported from any
}
impl ExportedSymbols {
-
pub fn empty() -> ExportedSymbols {
ExportedSymbols {
exports: FxHashMap(),
}
}
- pub fn compute_from<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
- symbol_map: &SymbolMap<'tcx>)
- -> ExportedSymbols {
+ pub fn compute<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>) -> ExportedSymbols {
let mut local_crate: Vec<_> = scx
.exported_symbols()
.iter()
scx.tcx().hir.local_def_id(node_id)
})
.map(|def_id| {
- let name = symbol_for_def_id(scx.tcx(), def_id, symbol_map);
+ let name = scx.tcx().symbol_name(Instance::mono(scx.tcx(), def_id));
let export_level = export_level(scx, def_id);
debug!("EXPORTED SYMBOL (local): {} ({:?})", name, export_level);
- (name, export_level)
+ (str::to_owned(&name), export_level)
})
.collect();
.exported_symbols(cnum)
.iter()
.map(|&def_id| {
- let name = symbol_name(Instance::mono(scx.tcx(), def_id), scx.tcx());
+ let name = scx.tcx().symbol_name(Instance::mono(scx.tcx(), def_id));
let export_level = if special_runtime_crate {
// We can probably do better here by just ensuring that
// it has hidden visibility rather than public
//
// In general though we won't link right if these
// symbols are stripped, and LTO currently strips them.
- if name == "rust_eh_personality" ||
- name == "rust_eh_register_frames" ||
- name == "rust_eh_unregister_frames" {
+ if &*name == "rust_eh_personality" ||
+ &*name == "rust_eh_register_frames" ||
+ &*name == "rust_eh_unregister_frames" {
SymbolExportLevel::C
} else {
SymbolExportLevel::Rust
export_level(scx, def_id)
};
debug!("EXPORTED SYMBOL (re-export): {} ({:?})", name, export_level);
- (name, export_level)
+ (str::to_owned(&name), export_level)
})
.collect();
level == SymbolExportLevel::C
}
}
-
-fn symbol_for_def_id<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- def_id: DefId,
- symbol_map: &SymbolMap<'tcx>)
- -> String {
- // Just try to look things up in the symbol map. If nothing's there, we
- // recompute.
- if let Some(node_id) = tcx.hir.as_local_node_id(def_id) {
- if let Some(sym) = symbol_map.get(TransItem::Static(node_id)) {
- return sym.to_owned();
- }
- }
-
- let instance = Instance::mono(tcx, def_id);
-
- symbol_map.get(TransItem::Fn(instance))
- .map(str::to_owned)
- .unwrap_or_else(|| symbol_name(instance, tcx))
-}
use rustc::ty::{self, Ty, TyCtxt, TypeFoldable};
use rustc::ty::fold::TypeVisitor;
use rustc::ty::item_path::{self, ItemPathBuffer, RootMode};
+use rustc::ty::maps::Providers;
use rustc::ty::subst::Substs;
use rustc::hir::map::definitions::DefPathData;
use rustc::util::common::record_time;
use syntax::attr;
+use syntax_pos::symbol::Symbol;
use std::fmt::Write;
+pub fn provide(providers: &mut Providers) {
+ *providers = Providers {
+ def_symbol_name,
+ symbol_name,
+ ..*providers
+ };
+}
+
fn get_symbol_hash<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
// the DefId of the item this name is for
// values for generic type parameters,
// if any.
substs: Option<&'tcx Substs<'tcx>>)
- -> String {
+ -> u64 {
debug!("get_symbol_hash(def_id={:?}, parameters={:?})", def_id, substs);
let mut hasher = ty::util::TypeIdHasher::<u64>::new(tcx);
});
// 64 bits should be enough to avoid collisions.
- format!("h{:016x}", hasher.finish())
+ hasher.finish()
+}
+
+fn def_symbol_name<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId)
+ -> ty::SymbolName
+{
+ let mut buffer = SymbolPathBuffer::new();
+ item_path::with_forced_absolute_paths(|| {
+ tcx.push_item_path(&mut buffer, def_id);
+ });
+ buffer.into_interned()
}
-pub fn symbol_name<'a, 'tcx>(instance: Instance<'tcx>,
- tcx: TyCtxt<'a, 'tcx, 'tcx>) -> String {
+fn symbol_name<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, instance: Instance<'tcx>)
+ -> ty::SymbolName
+{
+ ty::SymbolName { name: Symbol::intern(&compute_symbol_name(tcx, instance)).as_str() }
+}
+
+fn compute_symbol_name<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, instance: Instance<'tcx>)
+ -> String
+{
let def_id = instance.def_id();
let substs = instance.substs;
match key.disambiguated_data.data {
DefPathData::TypeNs(_) |
DefPathData::ValueNs(_) => {
- instance_ty = tcx.item_type(ty_def_id);
+ instance_ty = tcx.type_of(ty_def_id);
break;
}
_ => {
let hash = get_symbol_hash(tcx, Some(def_id), instance_ty, Some(substs));
- let mut buffer = SymbolPathBuffer::new();
- item_path::with_forced_absolute_paths(|| {
- tcx.push_item_path(&mut buffer, def_id);
- });
- buffer.finish(&hash)
+ SymbolPathBuffer::from_interned(tcx.def_symbol_name(def_id)).finish(hash)
}
// Follow C++ namespace-mangling style, see
result
}
- fn finish(mut self, hash: &str) -> String {
- // end name-sequence
- self.push(hash);
- self.result.push('E');
+ fn from_interned(symbol: ty::SymbolName) -> Self {
+ let mut result = SymbolPathBuffer {
+ result: String::with_capacity(64),
+ temp_buf: String::with_capacity(16)
+ };
+ result.result.push_str(&symbol.name);
+ result
+ }
+
+ fn into_interned(self) -> ty::SymbolName {
+ ty::SymbolName { name: Symbol::intern(&self.result).as_str() }
+ }
+
+ fn finish(mut self, hash: u64) -> String {
+ // E = end name-sequence
+ let _ = write!(self.result, "17h{:016x}E", hash);
self.result
}
}
let hash = get_symbol_hash(tcx, None, t, None);
let mut buffer = SymbolPathBuffer::new();
buffer.push(prefix);
- buffer.finish(&hash)
+ buffer.finish(hash)
}
// Name sanitation. LLVM will happily accept identifiers with weird names, but
use mir;
use monomorphize::{self, Instance};
use partitioning::{self, PartitioningStrategy, CodegenUnit};
-use symbol_cache::SymbolCache;
-use symbol_map::SymbolMap;
use symbol_names_test;
use trans_item::{TransItem, DefPathBasedNames};
use type_::Type;
scx: &SharedCrateContext<'a, 'tcx>,
translation_items: &FxHashSet<TransItem<'tcx>>,
llvm_modules: &[ModuleLlvm],
- symbol_map: &SymbolMap<'tcx>,
exported_symbols: &ExportedSymbols) {
let export_threshold =
symbol_export::crates_export_threshold(&sess.crate_types.borrow());
let mut linkage_fixed_explicitly = FxHashSet();
for trans_item in translation_items {
- let symbol_name = symbol_map.get_or_compute(scx, *trans_item);
+ let symbol_name = str::to_owned(&trans_item.symbol_name(tcx));
if trans_item.explicit_linkage(tcx).is_some() {
linkage_fixed_explicitly.insert(symbol_name.clone());
}
hir_map::NodeImplItem(&hir::ImplItem {
node: hir::ImplItemKind::Method(..), .. }) => {
let def_id = tcx.hir.local_def_id(id);
- let generics = tcx.item_generics(def_id);
+ let generics = tcx.generics_of(def_id);
let attributes = tcx.get_attrs(def_id);
(generics.parent_types == 0 && generics.types.is_empty()) &&
// Functions marked with #[inline] are only ever translated
// Run the translation item collector and partition the collected items into
// codegen units.
- let (translation_items, codegen_units, symbol_map) =
+ let (translation_items, codegen_units) =
collect_and_partition_translation_items(&shared_ccx);
let mut all_stats = Stats::default();
let cgu_name = String::from(cgu.name());
let cgu_id = cgu.work_product_id();
- let symbol_cache = SymbolCache::new(scx.tcx());
- let symbol_name_hash = cgu.compute_symbol_name_hash(scx, &symbol_cache);
+ let symbol_name_hash = cgu.compute_symbol_name_hash(scx);
// Check whether there is a previous work-product we can
// re-use. Not only must the file exist, and the inputs not
}
// Instantiate translation items without filling out definitions yet...
- let lcx = LocalCrateContext::new(scx, cgu, &symbol_cache);
+ let lcx = LocalCrateContext::new(scx, cgu);
let module = {
let ccx = CrateContext::new(scx, &lcx);
let trans_items = ccx.codegen_unit()
- .items_in_deterministic_order(ccx.tcx(), &symbol_cache);
+ .items_in_deterministic_order(ccx.tcx());
for &(trans_item, linkage) in &trans_items {
trans_item.predefine(&ccx, linkage);
}
let sess = shared_ccx.sess();
- let exported_symbols = ExportedSymbols::compute_from(&shared_ccx,
- &symbol_map);
+ let exported_symbols = ExportedSymbols::compute(&shared_ccx);
// Get the list of llvm modules we created. We'll do a few wacky
// transforms on them now.
&shared_ccx,
&translation_items,
&llvm_modules,
- &symbol_map,
&exported_symbols);
});
}
}
+#[inline(never)] // give this a place in the profiler
+fn assert_symbols_are_distinct<'a, 'tcx, I>(tcx: TyCtxt<'a, 'tcx, 'tcx>, trans_items: I)
+ where I: Iterator<Item=&'a TransItem<'tcx>>
+{
+ let mut symbols: Vec<_> = trans_items.map(|trans_item| {
+ (trans_item, trans_item.symbol_name(tcx))
+ }).collect();
+
+ (&mut symbols[..]).sort_by(|&(_, ref sym1), &(_, ref sym2)|{
+ sym1.cmp(sym2)
+ });
+
+ for pair in (&symbols[..]).windows(2) {
+ let sym1 = &pair[0].1;
+ let sym2 = &pair[1].1;
+
+ if *sym1 == *sym2 {
+ let trans_item1 = pair[0].0;
+ let trans_item2 = pair[1].0;
+
+ let span1 = trans_item1.local_span(tcx);
+ let span2 = trans_item2.local_span(tcx);
+
+ // Deterministically select one of the spans for error reporting
+ let span = match (span1, span2) {
+ (Some(span1), Some(span2)) => {
+ Some(if span1.lo.0 > span2.lo.0 {
+ span1
+ } else {
+ span2
+ })
+ }
+ (Some(span), None) |
+ (None, Some(span)) => Some(span),
+ _ => None
+ };
+
+ let error_message = format!("symbol `{}` is already defined", sym1);
+
+ if let Some(span) = span {
+ tcx.sess.span_fatal(span, &error_message)
+ } else {
+ tcx.sess.fatal(&error_message)
+ }
+ }
+ }
+}
+
fn collect_and_partition_translation_items<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>)
-> (FxHashSet<TransItem<'tcx>>,
- Vec<CodegenUnit<'tcx>>,
- SymbolMap<'tcx>) {
+ Vec<CodegenUnit<'tcx>>) {
let time_passes = scx.sess().time_passes();
let collection_mode = match scx.sess().opts.debugging_opts.print_trans_items {
collector::collect_crate_translation_items(&scx, collection_mode)
});
- let symbol_map = SymbolMap::build(scx, items.iter().cloned());
+ assert_symbols_are_distinct(scx.tcx(), items.iter());
let strategy = if scx.sess().opts.debugging_opts.incremental.is_some() {
PartitioningStrategy::PerModule
}
}
- (translation_items, codegen_units, symbol_map)
+ (translation_items, codegen_units)
}
--- /dev/null
+// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![allow(non_upper_case_globals)]
+
+use abi::{FnType, ArgType, LayoutExt};
+use context::CrateContext;
+
+fn classify_ret_ty<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, ret: &mut ArgType<'tcx>) {
+ if ret.layout.is_aggregate() && ret.layout.size(ccx).bits() > 64 {
+ ret.make_indirect(ccx);
+ } else {
+ ret.extend_integer_width_to(32);
+ }
+}
+
+fn classify_arg_ty<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, arg: &mut ArgType<'tcx>) {
+ if arg.layout.is_aggregate() && arg.layout.size(ccx).bits() > 64 {
+ arg.make_indirect(ccx);
+ } else {
+ arg.extend_integer_width_to(32);
+ }
+}
+
+pub fn compute_abi_info<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, fty: &mut FnType<'tcx>) {
+ if !fty.ret.is_ignore() {
+ classify_ret_ty(ccx, &mut fty.ret);
+ }
+
+ for arg in &mut fty.args {
+ if arg.is_ignore() {
+ continue;
+ }
+ classify_arg_ty(ccx, arg);
+ }
+}
use llvm::{self, ValueRef};
use monomorphize::{self, Instance};
use rustc::hir::def_id::DefId;
-use rustc::ty::{self, TypeFoldable};
+use rustc::ty::TypeFoldable;
use rustc::ty::subst::Substs;
-use syntax_pos::DUMMY_SP;
-use trans_item::TransItem;
use type_of;
/// Translates a reference to a fn/method item, monomorphizing and
return llfn;
}
- let sym = ccx.symbol_cache().get(TransItem::Fn(instance));
+ let sym = tcx.symbol_name(instance);
debug!("get_fn({:?}: {:?}) => {}", instance, fn_ty, sym);
// This is subtle and surprising, but sometimes we have to bitcast
// *in Rust code* may unwind. Foreign items like `extern "C" {
// fn foo(); }` are assumed not to unwind **unless** they have
// a `#[unwind]` attribute.
- if !ty::queries::is_foreign_item::get(tcx, DUMMY_SP, instance.def_id()) {
+ if !tcx.is_foreign_item(instance.def_id()) {
attributes::unwind(llfn, true);
unsafe {
llvm::LLVMRustSetLinkage(llfn, llvm::Linkage::ExternalLinkage);
let parent_node_id = hir_map.get_parent_node(ii.id);
let is_impl_generic = match hir_map.expect_item(parent_node_id) {
&hir::Item {
- node: hir::ItemImpl(_, _, ref generics, ..),
+ node: hir::ItemImpl(_, _, _, ref generics, ..),
..
} => {
generics.is_type_parameterized()
let tcx = scx.tcx();
match item.node {
hir::ItemImpl(_,
+ _,
_,
ref generics,
..,
continue;
}
- if !tcx.item_generics(method.def_id).types.is_empty() {
+ if !tcx.generics_of(method.def_id).types.is_empty() {
continue;
}
let instance =
monomorphize::resolve(scx, method.def_id, callee_substs);
- let predicates = tcx.item_predicates(instance.def_id()).predicates
+ let predicates = tcx.predicates_of(instance.def_id()).predicates
.subst(tcx, instance.substs);
if !traits::normalize_and_test_predicates(tcx, predicates) {
continue;
substs: &'tcx Substs<'tcx>)
-> Ty<'tcx>
{
- let ty = shared.tcx().item_type(def_id);
+ let ty = shared.tcx().type_of(def_id);
shared.tcx().trans_apply_param_substs(substs, &ty)
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-
-use back::symbol_names;
use llvm;
use llvm::{SetUnnamedAddr};
use llvm::{ValueRef, True};
hir_map::NodeItem(&hir::Item {
ref attrs, span, node: hir::ItemStatic(..), ..
}) => {
- let sym = ccx.symbol_cache()
- .get(TransItem::Static(id));
+ let sym = TransItem::Static(id).symbol_name(ccx.tcx());
let defined_in_current_codegen_unit = ccx.codegen_unit()
.items()
hir_map::NodeForeignItem(&hir::ForeignItem {
ref attrs, span, node: hir::ForeignItemStatic(..), ..
}) => {
- let sym = symbol_names::symbol_name(instance, ccx.tcx());
+ let sym = ccx.tcx().symbol_name(instance);
let g = if let Some(name) =
attr::first_attr_value_str_by_name(&attrs, "linkage") {
// If this is a static with a linkage specified, then we need to handle
g
} else {
- let sym = symbol_names::symbol_name(instance, ccx.tcx());
+ let sym = ccx.tcx().symbol_name(instance);
// FIXME(nagisa): perhaps the map of externs could be offloaded to llvm somehow?
// FIXME(nagisa): investigate whether it can be changed into define_global
use session::config::NoDebugInfo;
use session::Session;
use session::config;
-use symbol_cache::SymbolCache;
use util::nodemap::{NodeSet, DefIdMap, FxHashMap};
use std::ffi::{CStr, CString};
use std::ptr;
use std::iter;
use std::str;
+use std::marker::PhantomData;
use syntax::ast;
use syntax::symbol::InternedString;
use syntax_pos::DUMMY_SP;
llcx: ContextRef,
stats: Stats,
codegen_unit: CodegenUnit<'tcx>,
- needs_unwind_cleanup_cache: RefCell<FxHashMap<Ty<'tcx>, bool>>,
/// Cache instances of monomorphic and polymorphic items
instances: RefCell<FxHashMap<Instance<'tcx>, ValueRef>>,
/// Cache generated vtables
/// Mapping from static definitions to their DefId's.
statics: RefCell<FxHashMap<ValueRef, DefId>>,
- impl_method_cache: RefCell<FxHashMap<(DefId, ast::Name), DefId>>,
-
- /// Cache of closure wrappers for bare fn's.
- closure_bare_wrapper_cache: RefCell<FxHashMap<ValueRef, ValueRef>>,
-
/// List of globals for static variables which need to be passed to the
/// LLVM function ReplaceAllUsesWith (RAUW) when translation is complete.
/// (We have to make sure we don't invalidate any ValueRefs referring
used_statics: RefCell<Vec<ValueRef>>,
lltypes: RefCell<FxHashMap<Ty<'tcx>, Type>>,
- llsizingtypes: RefCell<FxHashMap<Ty<'tcx>, Type>>,
type_hashcodes: RefCell<FxHashMap<Ty<'tcx>, String>>,
int_type: Type,
opaque_vec_type: Type,
str_slice_type: Type,
- /// Holds the LLVM values for closure IDs.
- closure_vals: RefCell<FxHashMap<Instance<'tcx>, ValueRef>>,
-
dbg_cx: Option<debuginfo::CrateDebugContext<'tcx>>,
eh_personality: Cell<Option<ValueRef>>,
/// A counter that is used for generating local symbol names
local_gen_sym_counter: Cell<usize>,
- symbol_cache: &'a SymbolCache<'a, 'tcx>,
+ /// A placeholder so we can add lifetimes
+ placeholder: PhantomData<&'a ()>,
}
/// A CrateContext value binds together one LocalCrateContext with the
impl<'a, 'tcx> LocalCrateContext<'a, 'tcx> {
pub fn new(shared: &SharedCrateContext<'a, 'tcx>,
- codegen_unit: CodegenUnit<'tcx>,
- symbol_cache: &'a SymbolCache<'a, 'tcx>)
+ codegen_unit: CodegenUnit<'tcx>)
-> LocalCrateContext<'a, 'tcx> {
unsafe {
// Append ".rs" to LLVM module identifier.
llcx: llcx,
stats: Stats::default(),
codegen_unit: codegen_unit,
- needs_unwind_cleanup_cache: RefCell::new(FxHashMap()),
instances: RefCell::new(FxHashMap()),
vtables: RefCell::new(FxHashMap()),
const_cstr_cache: RefCell::new(FxHashMap()),
const_values: RefCell::new(FxHashMap()),
extern_const_values: RefCell::new(DefIdMap()),
statics: RefCell::new(FxHashMap()),
- impl_method_cache: RefCell::new(FxHashMap()),
- closure_bare_wrapper_cache: RefCell::new(FxHashMap()),
statics_to_rauw: RefCell::new(Vec::new()),
used_statics: RefCell::new(Vec::new()),
lltypes: RefCell::new(FxHashMap()),
- llsizingtypes: RefCell::new(FxHashMap()),
type_hashcodes: RefCell::new(FxHashMap()),
int_type: Type::from_ref(ptr::null_mut()),
opaque_vec_type: Type::from_ref(ptr::null_mut()),
str_slice_type: Type::from_ref(ptr::null_mut()),
- closure_vals: RefCell::new(FxHashMap()),
dbg_cx: dbg_cx,
eh_personality: Cell::new(None),
eh_unwind_resume: Cell::new(None),
intrinsics: RefCell::new(FxHashMap()),
type_of_depth: Cell::new(0),
local_gen_sym_counter: Cell::new(0),
- symbol_cache: symbol_cache,
+ placeholder: PhantomData,
};
let (int_type, opaque_vec_type, str_slice_ty, mut local_ccx) = {
unsafe { llvm::LLVMRustGetModuleDataLayout(self.llmod()) }
}
- pub fn needs_unwind_cleanup_cache(&self) -> &RefCell<FxHashMap<Ty<'tcx>, bool>> {
- &self.local().needs_unwind_cleanup_cache
- }
-
pub fn instances<'a>(&'a self) -> &'a RefCell<FxHashMap<Instance<'tcx>, ValueRef>> {
&self.local().instances
}
&self.local().statics
}
- pub fn impl_method_cache<'a>(&'a self)
- -> &'a RefCell<FxHashMap<(DefId, ast::Name), DefId>> {
- &self.local().impl_method_cache
- }
-
- pub fn closure_bare_wrapper_cache<'a>(&'a self) -> &'a RefCell<FxHashMap<ValueRef, ValueRef>> {
- &self.local().closure_bare_wrapper_cache
- }
-
pub fn statics_to_rauw<'a>(&'a self) -> &'a RefCell<Vec<(ValueRef, ValueRef)>> {
&self.local().statics_to_rauw
}
&self.local().lltypes
}
- pub fn llsizingtypes<'a>(&'a self) -> &'a RefCell<FxHashMap<Ty<'tcx>, Type>> {
- &self.local().llsizingtypes
- }
-
pub fn type_hashcodes<'a>(&'a self) -> &'a RefCell<FxHashMap<Ty<'tcx>, String>> {
&self.local().type_hashcodes
}
self.local().str_slice_type
}
- pub fn closure_vals<'a>(&'a self) -> &'a RefCell<FxHashMap<Instance<'tcx>, ValueRef>> {
- &self.local().closure_vals
- }
-
pub fn dbg_cx<'a>(&'a self) -> &'a Option<debuginfo::CrateDebugContext<'tcx>> {
&self.local().dbg_cx
}
self.shared.use_dll_storage_attrs()
}
- pub fn symbol_cache(&self) -> &'b SymbolCache<'b, 'tcx> {
- self.local().symbol_cache
- }
-
/// Given the def-id of some item that has no type parameters, make
/// a suitable "empty substs" for it.
pub fn empty_substs_for_def_id(&self, item_def_id: DefId) -> &'tcx Substs<'tcx> {
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use super::FunctionDebugContext;
+use super::{FunctionDebugContext, FunctionDebugContextData};
use super::metadata::file_metadata;
use super::utils::{DIB, span_start};
use llvm;
-use llvm::debuginfo::{DIScope, DISubprogram};
+use llvm::debuginfo::DIScope;
use common::CrateContext;
use rustc::mir::{Mir, VisibilityScope};
};
let mut scopes = IndexVec::from_elem(null_scope, &mir.visibility_scopes);
- let fn_metadata = match *debug_context {
- FunctionDebugContext::RegularContext(ref data) => data.fn_metadata,
+ let debug_context = match *debug_context {
+ FunctionDebugContext::RegularContext(ref data) => data,
FunctionDebugContext::DebugInfoDisabled |
FunctionDebugContext::FunctionWithoutDebugInfo => {
return scopes;
// Instantiate all scopes.
for idx in 0..mir.visibility_scopes.len() {
let scope = VisibilityScope::new(idx);
- make_mir_scope(ccx, &mir, &has_variables, fn_metadata, scope, &mut scopes);
+ make_mir_scope(ccx, &mir, &has_variables, debug_context, scope, &mut scopes);
}
scopes
fn make_mir_scope(ccx: &CrateContext,
mir: &Mir,
has_variables: &BitVector,
- fn_metadata: DISubprogram,
+ debug_context: &FunctionDebugContextData,
scope: VisibilityScope,
scopes: &mut IndexVec<VisibilityScope, MirDebugScope>) {
if scopes[scope].is_valid() {
let scope_data = &mir.visibility_scopes[scope];
let parent_scope = if let Some(parent) = scope_data.parent_scope {
- make_mir_scope(ccx, mir, has_variables, fn_metadata, parent, scopes);
+ make_mir_scope(ccx, mir, has_variables, debug_context, parent, scopes);
scopes[parent]
} else {
// The root is the function itself.
let loc = span_start(ccx, mir.span);
scopes[scope] = MirDebugScope {
- scope_metadata: fn_metadata,
+ scope_metadata: debug_context.fn_metadata,
file_start_pos: loc.file.start_pos,
file_end_pos: loc.file.end_pos,
};
// However, we don't skip creating a nested scope if
// our parent is the root, because we might want to
// put arguments in the root and not have shadowing.
- if parent_scope.scope_metadata != fn_metadata {
+ if parent_scope.scope_metadata != debug_context.fn_metadata {
scopes[scope] = parent_scope;
return;
}
}
let loc = span_start(ccx, scope_data.span);
- let file_metadata = file_metadata(ccx, &loc.file.name, &loc.file.abs_path);
+ let file_metadata = file_metadata(ccx,
+ &loc.file.name,
+ debug_context.defining_crate);
+
let scope_metadata = unsafe {
llvm::LLVMRustDIBuilderCreateLexicalBlock(
DIB(ccx),
DICompositeType, DILexicalBlock, DIFlags};
use rustc::hir::def::CtorKind;
-use rustc::hir::def_id::{DefId, LOCAL_CRATE};
+use rustc::hir::def_id::{DefId, CrateNum, LOCAL_CRATE};
use rustc::ty::fold::TypeVisitor;
use rustc::ty::subst::Substs;
use rustc::ty::util::TypeIdHasher;
use rustc::ty::layout::{self, LayoutTyper};
use session::config;
use util::nodemap::FxHashMap;
-use util::common::path2cstr;
use libc::{c_uint, c_longlong};
use std::ffi::CString;
-use std::path::Path;
use std::ptr;
use syntax::ast;
-use syntax::symbol::{Interner, InternedString};
+use syntax::symbol::{Interner, InternedString, Symbol};
use syntax_pos::{self, Span};
assert!(member_descriptions.len() == member_llvm_types.len());
- let loc = span_start(cx, span);
- let file_metadata = file_metadata(cx, &loc.file.name, &loc.file.abs_path);
+ let file_metadata = unknown_file_metadata(cx);
let metadata = composite_type_metadata(cx,
slice_llvm_type,
metadata
}
-pub fn file_metadata(cx: &CrateContext, path: &str, full_path: &Option<String>) -> DIFile {
- // FIXME (#9639): This needs to handle non-utf8 paths
- let work_dir = cx.sess().working_dir.to_str().unwrap();
- let file_name =
- full_path.as_ref().map(|p| p.as_str()).unwrap_or_else(|| {
- if path.starts_with(work_dir) {
- &path[work_dir.len() + 1..path.len()]
- } else {
- path
- }
- });
+pub fn file_metadata(cx: &CrateContext,
+ file_name: &str,
+ defining_crate: CrateNum) -> DIFile {
+ debug!("file_metadata: file_name: {}, defining_crate: {}",
+ file_name,
+ defining_crate);
- file_metadata_(cx, path, file_name, &work_dir)
+ let directory = if defining_crate == LOCAL_CRATE {
+ &cx.sess().working_dir.0[..]
+ } else {
+ // If the path comes from an upstream crate we assume it has been made
+ // independent of the compiler's working directory one way or another.
+ ""
+ };
+
+ file_metadata_raw(cx, file_name, directory)
}
pub fn unknown_file_metadata(cx: &CrateContext) -> DIFile {
- // Regular filenames should not be empty, so we abuse an empty name as the
- // key for the special unknown file metadata
- file_metadata_(cx, "", "<unknown>", "")
-
+ file_metadata_raw(cx, "<unknown>", "")
}
-fn file_metadata_(cx: &CrateContext, key: &str, file_name: &str, work_dir: &str) -> DIFile {
- if let Some(file_metadata) = debug_context(cx).created_files.borrow().get(key) {
+fn file_metadata_raw(cx: &CrateContext,
+ file_name: &str,
+ directory: &str)
+ -> DIFile {
+ let key = (Symbol::intern(file_name), Symbol::intern(directory));
+
+ if let Some(file_metadata) = debug_context(cx).created_files.borrow().get(&key) {
return *file_metadata;
}
- debug!("file_metadata: file_name: {}, work_dir: {}", file_name, work_dir);
+ debug!("file_metadata: file_name: {}, directory: {}", file_name, directory);
let file_name = CString::new(file_name).unwrap();
- let work_dir = CString::new(work_dir).unwrap();
+ let directory = CString::new(directory).unwrap();
+
let file_metadata = unsafe {
- llvm::LLVMRustDIBuilderCreateFile(DIB(cx), file_name.as_ptr(),
- work_dir.as_ptr())
+ llvm::LLVMRustDIBuilderCreateFile(DIB(cx),
+ file_name.as_ptr(),
+ directory.as_ptr())
};
let mut created_files = debug_context(cx).created_files.borrow_mut();
- created_files.insert(key.to_string(), file_metadata);
+ created_files.insert(key, file_metadata);
file_metadata
}
debug_context: &CrateDebugContext,
sess: &Session)
-> DIDescriptor {
- let work_dir = &sess.working_dir;
let compile_unit_name = match sess.local_crate_source_file {
None => fallback_path(scc),
- Some(ref abs_path) => {
- if abs_path.is_relative() {
- sess.warn("debuginfo: Invalid path to crate's local root source file!");
- fallback_path(scc)
- } else {
- match abs_path.strip_prefix(work_dir) {
- Ok(ref p) if p.is_relative() => {
- if p.starts_with(Path::new("./")) {
- path2cstr(p)
- } else {
- path2cstr(&Path::new(".").join(p))
- }
- }
- _ => fallback_path(scc)
- }
- }
+ Some(ref path) => {
+ CString::new(&path[..]).unwrap()
}
};
(option_env!("CFG_VERSION")).expect("CFG_VERSION"));
let compile_unit_name = compile_unit_name.as_ptr();
- let work_dir = path2cstr(&work_dir);
+
+ let work_dir = CString::new(&sess.working_dir.0[..]).unwrap();
let producer = CString::new(producer).unwrap();
let flags = "\0";
let split_name = "\0";
let (file_metadata, line_number) = if span != syntax_pos::DUMMY_SP {
let loc = span_start(cx, span);
- (file_metadata(cx, &loc.file.name, &loc.file.abs_path), loc.line as c_uint)
+ (file_metadata(cx, &loc.file.name, LOCAL_CRATE), loc.line as c_uint)
} else {
(unknown_file_metadata(cx), UNKNOWN_LINE_NUMBER)
};
// Creates an "extension" of an existing DIScope into another file.
pub fn extend_scope_to_file(ccx: &CrateContext,
scope_metadata: DIScope,
- file: &syntax_pos::FileMap)
+ file: &syntax_pos::FileMap,
+ defining_crate: CrateNum)
-> DILexicalBlock {
- let file_metadata = file_metadata(ccx, &file.name, &file.abs_path);
+ let file_metadata = file_metadata(ccx, &file.name, defining_crate);
unsafe {
llvm::LLVMRustDIBuilderCreateLexicalBlockFile(
DIB(ccx),
use llvm;
use llvm::{ModuleRef, ContextRef, ValueRef};
use llvm::debuginfo::{DIFile, DIType, DIScope, DIBuilderRef, DISubprogram, DIArray, DIFlags};
-use rustc::hir::def_id::DefId;
+use rustc::hir::def_id::{DefId, CrateNum};
use rustc::ty::subst::Substs;
use abi::Abi;
use syntax_pos::{self, Span, Pos};
use syntax::ast;
+use syntax::symbol::Symbol;
use rustc::ty::layout;
pub mod gdb;
pub struct CrateDebugContext<'tcx> {
llcontext: ContextRef,
builder: DIBuilderRef,
- created_files: RefCell<FxHashMap<String, DIFile>>,
+ created_files: RefCell<FxHashMap<(Symbol, Symbol), DIFile>>,
created_enum_disr_types: RefCell<FxHashMap<(DefId, layout::Integer), DIType>>,
type_map: RefCell<TypeMap<'tcx>>,
}
impl FunctionDebugContext {
- fn get_ref<'a>(&'a self, span: Span) -> &'a FunctionDebugContextData {
+ pub fn get_ref<'a>(&'a self, span: Span) -> &'a FunctionDebugContextData {
match *self {
FunctionDebugContext::RegularContext(ref data) => data,
FunctionDebugContext::DebugInfoDisabled => {
pub struct FunctionDebugContextData {
fn_metadata: DISubprogram,
source_locations_enabled: Cell<bool>,
+ pub defining_crate: CrateNum,
}
pub enum VariableAccess<'a> {
return FunctionDebugContext::FunctionWithoutDebugInfo;
}
+ let def_id = instance.def_id();
let loc = span_start(cx, span);
- let file_metadata = file_metadata(cx, &loc.file.name, &loc.file.abs_path);
+ let file_metadata = file_metadata(cx, &loc.file.name, def_id.krate);
let function_type_metadata = unsafe {
let fn_signature = get_function_signature(cx, sig);
};
// Find the enclosing function, in case this is a closure.
- let def_key = cx.tcx().def_key(instance.def_id());
+ let def_key = cx.tcx().def_key(def_id);
let mut name = def_key.disambiguated_data.data.to_string();
let name_len = name.len();
- let fn_def_id = cx.tcx().closure_base_def_id(instance.def_id());
+ let enclosing_fn_def_id = cx.tcx().closure_base_def_id(def_id);
// Get_template_parameters() will append a `<...>` clause to the function
// name if necessary.
- let generics = cx.tcx().item_generics(fn_def_id);
+ let generics = cx.tcx().generics_of(enclosing_fn_def_id);
let substs = instance.substs.truncate_to(cx.tcx(), generics);
let template_parameters = get_template_parameters(cx,
&generics,
let fn_debug_context = FunctionDebugContextData {
fn_metadata: fn_metadata,
source_locations_enabled: Cell::new(false),
+ defining_crate: def_id.krate,
};
return FunctionDebugContext::RegularContext(fn_debug_context);
fn get_type_parameter_names(cx: &CrateContext, generics: &ty::Generics) -> Vec<ast::Name> {
let mut names = generics.parent.map_or(vec![], |def_id| {
- get_type_parameter_names(cx, cx.tcx().item_generics(def_id))
+ get_type_parameter_names(cx, cx.tcx().generics_of(def_id))
});
names.extend(generics.types.iter().map(|param| param.name));
names
let cx = bcx.ccx;
let file = span_start(cx, span).file;
- let filename = file.name.clone();
- let file_metadata = file_metadata(cx, &filename[..], &file.abs_path);
+ let file_metadata = file_metadata(cx,
+ &file.name[..],
+ dbg_context.get_ref(span).defining_crate);
let loc = span_start(cx, span);
let type_metadata = type_metadata(cx, variable_type, span);
let span = ccx.tcx().def_span(def_id);
let (file, line) = if span != DUMMY_SP {
let loc = span_start(ccx, span);
- (file_metadata(ccx, &loc.file.name, &loc.file.abs_path), loc.line as c_uint)
+ (file_metadata(ccx, &loc.file.name, def_id.krate), loc.line as c_uint)
} else {
(unknown_file_metadata(ccx), UNKNOWN_LINE_NUMBER)
};
pub use rustc::util;
pub use base::trans_crate;
+pub use back::symbol_names::provide;
pub mod back {
pub use rustc::hir::svh;
mod cabi_aarch64;
mod cabi_arm;
mod cabi_asmjs;
+mod cabi_hexagon;
mod cabi_mips;
mod cabi_mips64;
mod cabi_msp430;
mod mir;
mod monomorphize;
mod partitioning;
-mod symbol_cache;
-mod symbol_map;
mod symbol_names_test;
mod trans_item;
mod tvec;
if pos < self.scopes[scope_id].file_start_pos ||
pos >= self.scopes[scope_id].file_end_pos {
let cm = self.ccx.sess().codemap();
- debuginfo::extend_scope_to_file(self.ccx, scope_metadata, &cm.lookup_char_pos(pos).file)
+ let defining_crate = self.debug_context.get_ref(DUMMY_SP).defining_crate;
+ debuginfo::extend_scope_to_file(self.ccx,
+ scope_metadata,
+ &cm.lookup_char_pos(pos).file,
+ defining_crate)
} else {
scope_metadata
}
assert!(common::type_is_zero_size(ccx, ty));
let llty = type_of::type_of(ccx, ty);
let val = if common::type_is_imm_pair(ccx, ty) {
+ let layout = ccx.layout_of(ty);
+ let (ix0, ix1) = if let Layout::Univariant { ref variant, .. } = *layout {
+ (adt::struct_llfields_index(variant, 0),
+ adt::struct_llfields_index(variant, 1))
+ } else {
+ (0, 1)
+ };
let fields = llty.field_types();
- OperandValue::Pair(C_null(fields[0]), C_null(fields[1]))
+ OperandValue::Pair(C_null(fields[ix0]), C_null(fields[ix1]))
} else {
OperandValue::Immediate(C_null(llty))
};
if common::type_is_imm_pair(bcx.ccx, self.ty) {
debug!("Operand::unpack_if_pair: unpacking {:?}", self);
- let mut a = bcx.extract_value(llval, 0);
- let mut b = bcx.extract_value(llval, 1);
+ let layout = bcx.ccx.layout_of(self.ty);
+ let (ix0, ix1) = if let Layout::Univariant { ref variant, .. } = *layout {
+ (adt::struct_llfields_index(variant, 0),
+ adt::struct_llfields_index(variant, 1))
+ } else {
+ (0, 1)
+ };
+
+ let mut a = bcx.extract_value(llval, ix0);
+ let mut b = bcx.extract_value(llval, ix1);
let pair_fields = common::type_pair_fields(bcx.ccx, self.ty);
if let Some([a_ty, b_ty]) = pair_fields {
use rustc::ty::{self, TyCtxt};
use rustc::ty::item_path::characteristic_def_id_of_type;
use rustc_incremental::IchHasher;
-use std::cmp::Ordering;
use std::hash::Hash;
use std::sync::Arc;
-use symbol_cache::SymbolCache;
use syntax::ast::NodeId;
use syntax::symbol::{Symbol, InternedString};
use trans_item::{TransItem, InstantiationMode};
}
pub fn compute_symbol_name_hash<'a>(&self,
- scx: &SharedCrateContext<'a, 'tcx>,
- symbol_cache: &SymbolCache<'a, 'tcx>)
+ scx: &SharedCrateContext<'a, 'tcx>)
-> u64 {
let mut state = IchHasher::new();
let exported_symbols = scx.exported_symbols();
- let all_items = self.items_in_deterministic_order(scx.tcx(), symbol_cache);
+ let all_items = self.items_in_deterministic_order(scx.tcx());
for (item, _) in all_items {
- let symbol_name = symbol_cache.get(item);
+ let symbol_name = item.symbol_name(scx.tcx());
symbol_name.len().hash(&mut state);
symbol_name.hash(&mut state);
let exported = match item {
}
pub fn items_in_deterministic_order<'a>(&self,
- tcx: TyCtxt,
- symbol_cache: &SymbolCache<'a, 'tcx>)
+ tcx: TyCtxt<'a, 'tcx, 'tcx>)
-> Vec<(TransItem<'tcx>, llvm::Linkage)> {
- let mut items: Vec<(TransItem<'tcx>, llvm::Linkage)> =
- self.items.iter().map(|(item, linkage)| (*item, *linkage)).collect();
-
// The codegen tests rely on items being process in the same order as
// they appear in the file, so for local items, we sort by node_id first
- items.sort_by(|&(trans_item1, _), &(trans_item2, _)| {
- let node_id1 = local_node_id(tcx, trans_item1);
- let node_id2 = local_node_id(tcx, trans_item2);
-
- match (node_id1, node_id2) {
- (None, None) => {
- let symbol_name1 = symbol_cache.get(trans_item1);
- let symbol_name2 = symbol_cache.get(trans_item2);
- symbol_name1.cmp(&symbol_name2)
- }
- // In the following two cases we can avoid looking up the symbol
- (None, Some(_)) => Ordering::Less,
- (Some(_), None) => Ordering::Greater,
- (Some(node_id1), Some(node_id2)) => {
- let ordering = node_id1.cmp(&node_id2);
-
- if ordering != Ordering::Equal {
- return ordering;
- }
-
- let symbol_name1 = symbol_cache.get(trans_item1);
- let symbol_name2 = symbol_cache.get(trans_item2);
- symbol_name1.cmp(&symbol_name2)
- }
- }
- });
-
- return items;
+ #[derive(PartialEq, Eq, PartialOrd, Ord)]
+ pub struct ItemSortKey(Option<NodeId>, ty::SymbolName);
- fn local_node_id(tcx: TyCtxt, trans_item: TransItem) -> Option<NodeId> {
- match trans_item {
+ fn item_sort_key<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ item: TransItem<'tcx>) -> ItemSortKey {
+ ItemSortKey(match item {
TransItem::Fn(instance) => {
tcx.hir.as_local_node_id(instance.def_id())
}
TransItem::Static(node_id) | TransItem::GlobalAsm(node_id) => {
Some(node_id)
}
- }
+ }, item.symbol_name(tcx))
}
+
+ let items: Vec<_> = self.items.iter().map(|(&i, &l)| (i, l)).collect();
+ let mut items : Vec<_> = items.iter()
+ .map(|il| (il, item_sort_key(tcx, il.0))).collect();
+ items.sort_by(|&(_, ref key1), &(_, ref key2)| key1.cmp(key2));
+ items.into_iter().map(|(&item_linkage, _)| item_linkage).collect()
}
}
{
if cfg!(debug_assertions) {
debug!("{}", label);
- let symbol_cache = SymbolCache::new(tcx);
for cgu in cgus {
debug!("CodegenUnit {}:", cgu.name);
for (trans_item, linkage) in &cgu.items {
- let symbol_name = symbol_cache.get(*trans_item);
+ let symbol_name = trans_item.symbol_name(tcx);
let symbol_hash_start = symbol_name.rfind('h');
let symbol_hash = symbol_hash_start.map(|i| &symbol_name[i ..])
.unwrap_or("<no hash>");
+++ /dev/null
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-use rustc::ty::TyCtxt;
-use std::cell::RefCell;
-use syntax_pos::symbol::{InternedString, Symbol};
-use trans_item::TransItem;
-use util::nodemap::FxHashMap;
-
-// In the SymbolCache we collect the symbol names of translation items
-// and cache them for later reference. This is just a performance
-// optimization and the cache is populated lazilly; symbol names of
-// translation items are deterministic and fully defined by the item.
-// Thus they can always be recomputed if needed.
-
-pub struct SymbolCache<'a, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- index: RefCell<FxHashMap<TransItem<'tcx>, Symbol>>,
-}
-
-impl<'a, 'tcx> SymbolCache<'a, 'tcx> {
- pub fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Self {
- SymbolCache {
- tcx: tcx,
- index: RefCell::new(FxHashMap())
- }
- }
-
- pub fn get(&self, trans_item: TransItem<'tcx>) -> InternedString {
- let mut index = self.index.borrow_mut();
- index.entry(trans_item)
- .or_insert_with(|| Symbol::intern(&trans_item.compute_symbol_name(self.tcx)))
- .as_str()
- }
-}
+++ /dev/null
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-use context::SharedCrateContext;
-use monomorphize::Instance;
-use rustc::ty::TyCtxt;
-use std::borrow::Cow;
-use syntax::codemap::Span;
-use trans_item::TransItem;
-use util::nodemap::FxHashMap;
-
-// In the SymbolMap we collect the symbol names of all translation items of
-// the current crate. This map exists as a performance optimization. Symbol
-// names of translation items are deterministic and fully defined by the item.
-// Thus they could also always be recomputed if needed.
-
-pub struct SymbolMap<'tcx> {
- index: FxHashMap<TransItem<'tcx>, (usize, usize)>,
- arena: String,
-}
-
-impl<'tcx> SymbolMap<'tcx> {
-
- pub fn build<'a, I>(scx: &SharedCrateContext<'a, 'tcx>,
- trans_items: I)
- -> SymbolMap<'tcx>
- where I: Iterator<Item=TransItem<'tcx>>
- {
- // Check for duplicate symbol names
- let tcx = scx.tcx();
- let mut symbols: Vec<_> = trans_items.map(|trans_item| {
- (trans_item, trans_item.compute_symbol_name(tcx))
- }).collect();
-
- (&mut symbols[..]).sort_by(|&(_, ref sym1), &(_, ref sym2)|{
- sym1.cmp(sym2)
- });
-
- for pair in (&symbols[..]).windows(2) {
- let sym1 = &pair[0].1;
- let sym2 = &pair[1].1;
-
- if *sym1 == *sym2 {
- let trans_item1 = pair[0].0;
- let trans_item2 = pair[1].0;
-
- let span1 = get_span(scx.tcx(), trans_item1);
- let span2 = get_span(scx.tcx(), trans_item2);
-
- // Deterministically select one of the spans for error reporting
- let span = match (span1, span2) {
- (Some(span1), Some(span2)) => {
- Some(if span1.lo.0 > span2.lo.0 {
- span1
- } else {
- span2
- })
- }
- (Some(span), None) |
- (None, Some(span)) => Some(span),
- _ => None
- };
-
- let error_message = format!("symbol `{}` is already defined", sym1);
-
- if let Some(span) = span {
- scx.sess().span_fatal(span, &error_message)
- } else {
- scx.sess().fatal(&error_message)
- }
- }
- }
-
- let mut symbol_map = SymbolMap {
- index: FxHashMap(),
- arena: String::with_capacity(1024),
- };
-
- for (trans_item, symbol) in symbols {
- let start_index = symbol_map.arena.len();
- symbol_map.arena.push_str(&symbol[..]);
- let end_index = symbol_map.arena.len();
- let prev_entry = symbol_map.index.insert(trans_item,
- (start_index, end_index));
- if prev_entry.is_some() {
- bug!("TransItem encountered twice?")
- }
- }
-
- fn get_span<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- trans_item: TransItem<'tcx>) -> Option<Span> {
- match trans_item {
- TransItem::Fn(Instance { def, .. }) => {
- tcx.hir.as_local_node_id(def.def_id())
- }
- TransItem::Static(node_id) |
- TransItem::GlobalAsm(node_id) => {
- Some(node_id)
- }
- }.map(|node_id| {
- tcx.hir.span(node_id)
- })
- }
-
- symbol_map
- }
-
- pub fn get(&self, trans_item: TransItem<'tcx>) -> Option<&str> {
- self.index.get(&trans_item).map(|&(start_index, end_index)| {
- &self.arena[start_index .. end_index]
- })
- }
-
- pub fn get_or_compute<'map, 'scx>(&'map self,
- scx: &SharedCrateContext<'scx, 'tcx>,
- trans_item: TransItem<'tcx>)
- -> Cow<'map, str> {
- if let Some(sym) = self.get(trans_item) {
- Cow::from(sym)
- } else {
- Cow::from(trans_item.compute_symbol_name(scx.tcx()))
- }
- }
-}
//! item-path. This is used for unit testing the code that generates
//! paths etc in all kinds of annoying scenarios.
-use back::symbol_names;
use rustc::hir;
use rustc::hir::intravisit::{self, Visitor, NestedVisitorMap};
use rustc::ty::TyCtxt;
if attr.check_name(SYMBOL_NAME) {
// for now, can only use on monomorphic names
let instance = Instance::mono(tcx, def_id);
- let name = symbol_names::symbol_name(instance, self.tcx);
+ let name = self.tcx.symbol_name(instance);
tcx.sess.span_err(attr.span, &format!("symbol-name({})", name));
} else if attr.check_name(ITEM_PATH) {
let path = tcx.item_path_str(def_id);
use rustc::ty::subst::Substs;
use syntax::ast::{self, NodeId};
use syntax::attr;
+use syntax_pos::Span;
+use syntax_pos::symbol::Symbol;
use type_of;
-use back::symbol_names;
use std::fmt::Write;
use std::iter;
self.to_raw_string(),
ccx.codegen_unit().name());
- let symbol_name = ccx.symbol_cache().get(*self);
+ let symbol_name = self.symbol_name(ccx.tcx());
debug!("symbol {}", &symbol_name);
ccx.instances().borrow_mut().insert(instance, lldecl);
}
- pub fn compute_symbol_name(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> String {
+ pub fn symbol_name(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> ty::SymbolName {
match *self {
- TransItem::Fn(instance) => symbol_names::symbol_name(instance, tcx),
+ TransItem::Fn(instance) => tcx.symbol_name(instance),
TransItem::Static(node_id) => {
let def_id = tcx.hir.local_def_id(node_id);
- symbol_names::symbol_name(Instance::mono(tcx, def_id), tcx)
+ tcx.symbol_name(Instance::mono(tcx, def_id))
}
TransItem::GlobalAsm(node_id) => {
let def_id = tcx.hir.local_def_id(node_id);
- format!("global_asm_{:?}", def_id)
+ ty::SymbolName {
+ name: Symbol::intern(&format!("global_asm_{:?}", def_id)).as_str()
+ }
}
}
}
+ pub fn local_span(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Option<Span> {
+ match *self {
+ TransItem::Fn(Instance { def, .. }) => {
+ tcx.hir.as_local_node_id(def.def_id())
+ }
+ TransItem::Static(node_id) |
+ TransItem::GlobalAsm(node_id) => {
+ Some(node_id)
+ }
+ }.map(|node_id| tcx.hir.span(node_id))
+ }
+
pub fn instantiation_mode(&self,
tcx: TyCtxt<'a, 'tcx, 'tcx>)
-> InstantiationMode {
},
ty::TyClosure(def_id, ref closure_substs) => {
self.push_def_path(def_id, output);
- let generics = self.tcx.item_generics(self.tcx.closure_base_def_id(def_id));
+ let generics = self.tcx.generics_of(self.tcx.closure_base_def_id(def_id));
let substs = closure_substs.substs.truncate_to(self.tcx, generics);
self.push_type_params(substs, iter::empty(), output);
}
extern crate cmake;
use std::env;
-use build_helper::native_lib_boilerplate;
+use build_helper::sanitizer_lib_boilerplate;
use cmake::Config;
fn main() {
if let Some(llvm_config) = env::var_os("LLVM_CONFIG") {
- let native = match native_lib_boilerplate("compiler-rt", "tsan", "clang_rt.tsan-x86_64",
- "build/lib/linux") {
+ let native = match sanitizer_lib_boilerplate("tsan") {
Ok(native) => native,
_ => return,
};
// If the type is parameterized by this region, then replace this
// region with the current anon region binding (in other words,
// whatever & would get replaced with).
- let decl_generics = tcx.item_generics(def_id);
+ let decl_generics = tcx.generics_of(def_id);
let expected_num_region_params = decl_generics.regions.len();
let supplied_num_region_params = lifetimes.len();
if expected_num_region_params != supplied_num_region_params {
let is_object = self_ty.map_or(false, |ty| ty.sty == TRAIT_OBJECT_DUMMY_SELF);
let default_needs_object_self = |p: &ty::TypeParameterDef| {
if is_object && p.has_default {
- if ty::queries::ty::get(tcx, span, p.def_id).has_self_ty() {
+ if tcx.at(span).type_of(p.def_id).has_self_ty() {
// There is no suitable inference default for a type parameter
// that references self, in an object type.
return true;
// This is a default type parameter.
self.normalize_ty(
span,
- ty::queries::ty::get(tcx, span, def.def_id)
+ tcx.at(span).type_of(def.def_id)
.subst_spanned(tcx, substs, Some(span))
)
}
debug!("create_substs_for_ast_trait_ref(trait_segment={:?})",
trait_segment);
- let trait_def = self.tcx().lookup_trait_def(trait_def_id);
+ let trait_def = self.tcx().trait_def(trait_def_id);
match trait_segment.parameters {
hir::AngleBracketedParameters(_) => {
let substs = self.ast_path_substs_for_ty(span, did, item_segment);
self.normalize_ty(
span,
- ty::queries::ty::get(self.tcx(), span, did).subst(self.tcx(), substs)
+ self.tcx().at(span).type_of(did).subst(self.tcx(), substs)
)
}
let ty = self.projected_ty_from_poly_trait_ref(span, bound, assoc_name);
let ty = self.normalize_ty(span, ty);
- let item = tcx.associated_items(trait_did).find(|i| i.name == assoc_name);
- let def_id = item.expect("missing associated type").def_id;
- tcx.check_stability(def_id, ref_id, span);
- (ty, Def::AssociatedTy(def_id))
+ let item = tcx.associated_items(trait_did).find(|i| i.name == assoc_name)
+ .expect("missing associated type");
+ let def = Def::AssociatedTy(item.def_id);
+ if !tcx.vis_is_accessible_from(item.vis, ref_id) {
+ let msg = format!("{} `{}` is private", def.kind_name(), assoc_name);
+ tcx.sess.span_err(span, &msg);
+ }
+ tcx.check_stability(item.def_id, ref_id, span);
+
+ (ty, def)
}
fn qpath_to_ty(&self,
let node_id = tcx.hir.as_local_node_id(did).unwrap();
let item_id = tcx.hir.get_parent_node(node_id);
let item_def_id = tcx.hir.local_def_id(item_id);
- let generics = tcx.item_generics(item_def_id);
+ let generics = tcx.generics_of(item_def_id);
let index = generics.type_param_to_index[&tcx.hir.local_def_id(node_id).index];
tcx.mk_param(index, tcx.hir.name(node_id))
}
assert_eq!(opt_self_ty, None);
self.prohibit_type_params(&path.segments);
- let ty = ty::queries::ty::get(tcx, span, def_id);
+ let ty = tcx.at(span).type_of(def_id);
if let Some(free_substs) = self.get_free_substs() {
ty.subst(tcx, free_substs)
} else {
debug!("compare_impl_method: trait_to_skol_substs={:?}",
trait_to_skol_substs);
- let impl_m_generics = tcx.item_generics(impl_m.def_id);
- let trait_m_generics = tcx.item_generics(trait_m.def_id);
- let impl_m_predicates = tcx.item_predicates(impl_m.def_id);
- let trait_m_predicates = tcx.item_predicates(trait_m.def_id);
+ let impl_m_generics = tcx.generics_of(impl_m.def_id);
+ let trait_m_generics = tcx.generics_of(trait_m.def_id);
+ let impl_m_predicates = tcx.predicates_of(impl_m.def_id);
+ let trait_m_predicates = tcx.predicates_of(trait_m.def_id);
// Check region bounds.
check_region_bounds_on_impl_method(tcx,
// environment. We can't just use `impl_env.caller_bounds`,
// however, because we want to replace all late-bound regions with
// region variables.
- let impl_predicates = tcx.item_predicates(impl_m_predicates.parent.unwrap());
+ let impl_predicates = tcx.predicates_of(impl_m_predicates.parent.unwrap());
let mut hybrid_preds = impl_predicates.instantiate(tcx, impl_to_skol_substs);
debug!("compare_impl_method: impl_bounds={:?}", hybrid_preds);
let tcx = infcx.tcx;
let m_sig = |method: &ty::AssociatedItem| {
- match tcx.item_type(method.def_id).sty {
+ match tcx.type_of(method.def_id).sty {
ty::TyFnDef(_, _, f) => f,
_ => bug!()
}
ty::ImplContainer(_) => impl_trait_ref.self_ty(),
ty::TraitContainer(_) => tcx.mk_self_type()
};
- let method_ty = tcx.item_type(method.def_id);
+ let method_ty = tcx.type_of(method.def_id);
let self_arg_ty = *method_ty.fn_sig().input(0).skip_binder();
match ExplicitSelf::determine(untransformed_self_ty, self_arg_ty) {
ExplicitSelf::ByValue => "self".to_string(),
trait_m: &ty::AssociatedItem,
trait_item_span: Option<Span>)
-> Result<(), ErrorReported> {
- let impl_m_generics = tcx.item_generics(impl_m.def_id);
- let trait_m_generics = tcx.item_generics(trait_m.def_id);
+ let impl_m_generics = tcx.generics_of(impl_m.def_id);
+ let trait_m_generics = tcx.generics_of(trait_m.def_id);
let num_impl_m_type_params = impl_m_generics.types.len();
let num_trait_m_type_params = trait_m_generics.types.len();
if num_impl_m_type_params != num_trait_m_type_params {
trait_item_span: Option<Span>)
-> Result<(), ErrorReported> {
let m_fty = |method: &ty::AssociatedItem| {
- match tcx.item_type(method.def_id).sty {
+ match tcx.type_of(method.def_id).sty {
ty::TyFnDef(_, _, f) => f,
_ => bug!()
}
trait_to_skol_substs);
// Compute skolemized form of impl and trait const tys.
- let impl_ty = tcx.item_type(impl_c.def_id).subst(tcx, impl_to_skol_substs);
- let trait_ty = tcx.item_type(trait_c.def_id).subst(tcx, trait_to_skol_substs);
+ let impl_ty = tcx.type_of(impl_c.def_id).subst(tcx, impl_to_skol_substs);
+ let trait_ty = tcx.type_of(trait_c.def_id).subst(tcx, trait_to_skol_substs);
let mut cause = ObligationCause::misc(impl_c_span, impl_c_node_id);
// There is no "body" here, so just pass dummy id.
trait",
trait_c.name);
- // Add a label to the Span containing just the type of the item
- let trait_c_node_id = tcx.hir.as_local_node_id(trait_c.def_id).unwrap();
- let trait_c_span = match tcx.hir.expect_trait_item(trait_c_node_id).node {
- TraitItemKind::Const(ref ty, _) => ty.span,
- _ => bug!("{:?} is not a trait const", trait_c),
- };
+ let trait_c_node_id = tcx.hir.as_local_node_id(trait_c.def_id);
+ let trait_c_span = trait_c_node_id.map(|trait_c_node_id| {
+ // Add a label to the Span containing just the type of the const
+ match tcx.hir.expect_trait_item(trait_c_node_id).node {
+ TraitItemKind::Const(ref ty, _) => ty.span,
+ _ => bug!("{:?} is not a trait const", trait_c),
+ }
+ });
infcx.note_type_err(&mut diag,
&cause,
- Some((trait_c_span, format!("type in trait"))),
+ trait_c_span.map(|span| (span, format!("type in trait"))),
Some(infer::ValuePairs::Types(ExpectedFound {
expected: trait_ty,
found: impl_ty,
fn has_no_input_arg(&self, method: &AssociatedItem) -> bool {
match method.def() {
Def::Method(def_id) => {
- match self.tcx.item_type(def_id).sty {
+ match self.tcx.type_of(def_id).sty {
ty::TypeVariants::TyFnDef(_, _, sig) => {
sig.inputs().skip_binder().len() == 1
}
pub fn check_drop_impl<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
drop_impl_did: DefId)
-> Result<(), ErrorReported> {
- let dtor_self_type = tcx.item_type(drop_impl_did);
- let dtor_predicates = tcx.item_predicates(drop_impl_did);
+ let dtor_self_type = tcx.type_of(drop_impl_did);
+ let dtor_predicates = tcx.predicates_of(drop_impl_did);
match dtor_self_type.sty {
ty::TyAdt(adt_def, self_to_impl_substs) => {
ensure_drop_params_and_item_params_correspond(tcx,
let tcx = infcx.tcx;
let mut fulfillment_cx = traits::FulfillmentContext::new();
- let named_type = tcx.item_type(self_type_did);
+ let named_type = tcx.type_of(self_type_did);
let named_type = named_type.subst(tcx, &infcx.parameter_environment.free_substs);
let drop_impl_span = tcx.def_span(drop_impl_did);
// We can assume the predicates attached to struct/enum definition
// hold.
- let generic_assumptions = tcx.item_predicates(self_type_did);
+ let generic_assumptions = tcx.predicates_of(self_type_did);
let assumptions_in_impl_context = generic_assumptions.instantiate(tcx, &self_to_impl_substs);
let assumptions_in_impl_context = assumptions_in_impl_context.predicates;
hir::Unsafety::Unsafe,
abi
)));
- let i_n_tps = tcx.item_generics(def_id).types.len();
+ let i_n_tps = tcx.generics_of(def_id).types.len();
if i_n_tps != n_tps {
let span = match it.node {
hir::ForeignItemFn(_, _, ref generics) => generics.span,
&ObligationCause::new(it.span,
it.id,
ObligationCauseCode::IntrinsicType),
- tcx.item_type(def_id),
+ tcx.type_of(def_id),
fty);
}
}
};
let def_id = tcx.hir.local_def_id(it.id);
- let i_n_tps = tcx.item_generics(def_id).types.len();
+ let i_n_tps = tcx.generics_of(def_id).types.len();
let name = it.name.as_str();
let (n_tps, inputs, output) = match &*name {
let mut structural_to_nomimal = FxHashMap();
- let sig = tcx.item_type(def_id).fn_sig();
+ let sig = tcx.type_of(def_id).fn_sig();
let sig = tcx.no_late_bound_regions(&sig).unwrap();
if intr.inputs.len() != sig.inputs().len() {
span_err!(tcx.sess, it.span, E0444,
// If they were not explicitly supplied, just construct fresh
// variables.
let num_supplied_types = supplied_method_types.len();
- let method_generics = self.tcx.item_generics(pick.item.def_id);
+ let method_generics = self.tcx.generics_of(pick.item.def_id);
let num_method_types = method_generics.types.len();
if num_supplied_types > 0 && num_supplied_types != num_method_types {
// type/early-bound-regions substitutions performed. There can
// be no late-bound regions appearing here.
let def_id = pick.item.def_id;
- let method_predicates = self.tcx.item_predicates(def_id)
+ let method_predicates = self.tcx.predicates_of(def_id)
.instantiate(self.tcx, all_substs);
let method_predicates = self.normalize_associated_types_in(self.span,
&method_predicates);
debug!("method_predicates after subst = {:?}", method_predicates);
- let sig = self.tcx.item_type(def_id).fn_sig();
+ let sig = self.tcx.type_of(def_id).fn_sig();
// Instantiate late-bound regions and substitute the trait
// parameters into the method type to get the actual method type.
};
let index_expr_ty = self.node_ty(index_expr.id);
+ let adjusted_base_ty = self.resolve_type_vars_if_possible(&adjusted_base_ty);
+ let index_expr_ty = self.resolve_type_vars_if_possible(&index_expr_ty);
let result = self.try_index_step(ty::MethodCall::expr(expr.id),
expr,
let expr_ty = self.node_ty(expr.id);
self.demand_suptype(expr.span, expr_ty, return_ty);
+ } else {
+ // We could not perform a mutable index. Re-apply the
+ // immutable index adjustments - borrowck will detect
+ // this as an error.
+ if let Some(adjustment) = adjustment {
+ self.apply_adjustment(expr.id, adjustment);
+ }
+ self.tcx.sess.delay_span_bug(
+ expr.span, "convert_lvalue_derefs_to_mutable failed");
}
}
hir::ExprUnary(hir::UnDeref, ref base_expr) => {
let tcx = self.tcx;
let method_item = self.associated_item(trait_def_id, m_name).unwrap();
let def_id = method_item.def_id;
- let generics = tcx.item_generics(def_id);
+ let generics = tcx.generics_of(def_id);
assert_eq!(generics.types.len(), 0);
assert_eq!(generics.regions.len(), 0);
// NB: Instantiate late-bound regions first so that
// `instantiate_type_scheme` can normalize associated types that
// may reference those regions.
- let original_method_ty = tcx.item_type(def_id);
+ let original_method_ty = tcx.type_of(def_id);
let fn_sig = original_method_ty.fn_sig();
let fn_sig = self.replace_late_bound_regions_with_fresh_var(span,
infer::FnCall,
//
// Note that as the method comes from a trait, it should not have
// any late-bound regions appearing in its bounds.
- let bounds = self.tcx.item_predicates(def_id).instantiate(self.tcx, substs);
+ let bounds = self.tcx.predicates_of(def_id).instantiate(self.tcx, substs);
let bounds = match self.normalize_associated_types_in_as_infer_ok(span, &bounds) {
InferOk { value, obligations: o } => {
obligations.extend(o);
}
let def = pick.item.def();
-
self.tcx.check_stability(def.def_id(), expr_id, span);
- if let probe::InherentImplPick = pick.kind {
- if !self.tcx.vis_is_accessible_from(pick.item.vis, self.body_id) {
- let msg = format!("{} `{}` is private", def.kind_name(), method_name);
- self.tcx.sess.span_err(span, &msg);
- }
- }
Ok(def)
}
///////////////////////////////////////////////////////////////////////////
// CANDIDATE ASSEMBLY
+ fn push_inherent_candidate(&mut self, xform_self_ty: Ty<'tcx>, item: ty::AssociatedItem,
+ kind: CandidateKind<'tcx>, import_id: Option<ast::NodeId>) {
+ if self.tcx.vis_is_accessible_from(item.vis, self.body_id) {
+ self.inherent_candidates.push(Candidate { xform_self_ty, item, kind, import_id });
+ } else if self.private_candidate.is_none() {
+ self.private_candidate = Some(item.def());
+ }
+ }
+
+ fn push_extension_candidate(&mut self, xform_self_ty: Ty<'tcx>, item: ty::AssociatedItem,
+ kind: CandidateKind<'tcx>, import_id: Option<ast::NodeId>) {
+ if self.tcx.vis_is_accessible_from(item.vis, self.body_id) {
+ self.extension_candidates.push(Candidate { xform_self_ty, item, kind, import_id });
+ } else if self.private_candidate.is_none() {
+ self.private_candidate = Some(item.def());
+ }
+ }
+
fn assemble_inherent_candidates(&mut self) {
let steps = self.steps.clone();
for step in steps.iter() {
}
fn assemble_inherent_impl_candidates_for_type(&mut self, def_id: DefId) {
- let impl_def_ids = ty::queries::inherent_impls::get(self.tcx, self.span, def_id);
+ let impl_def_ids = self.tcx.at(self.span).inherent_impls(def_id);
for &impl_def_id in impl_def_ids.iter() {
self.assemble_inherent_impl_probe(impl_def_id);
}
continue
}
- if !self.tcx.vis_is_accessible_from(item.vis, self.body_id) {
- self.private_candidate = Some(item.def());
- continue
- }
-
let (impl_ty, impl_substs) = self.impl_ty_and_substs(impl_def_id);
let impl_ty = impl_ty.subst(self.tcx, impl_substs);
debug!("assemble_inherent_impl_probe: xform_self_ty = {:?}",
xform_self_ty);
- self.inherent_candidates.push(Candidate {
- xform_self_ty: xform_self_ty,
- item: item,
- kind: InherentImplCandidate(impl_substs, obligations),
- import_id: None,
- });
+ self.push_inherent_candidate(xform_self_ty, item,
+ InherentImplCandidate(impl_substs, obligations), None);
}
}
let xform_self_ty =
this.xform_self_ty(&item, new_trait_ref.self_ty(), new_trait_ref.substs);
- this.inherent_candidates.push(Candidate {
- xform_self_ty: xform_self_ty,
- item: item,
- kind: ObjectCandidate,
- import_id: None,
- });
+ this.push_inherent_candidate(xform_self_ty, item, ObjectCandidate, None);
});
}
// `WhereClausePick`.
assert!(!trait_ref.substs.needs_infer());
- this.inherent_candidates.push(Candidate {
- xform_self_ty: xform_self_ty,
- item: item,
- kind: WhereClauseCandidate(poly_trait_ref),
- import_id: None,
- });
+ this.push_inherent_candidate(xform_self_ty, item,
+ WhereClauseCandidate(poly_trait_ref), None);
});
}
expected: ty::Ty<'tcx>) -> bool {
match method.def() {
Def::Method(def_id) => {
- let fty = self.tcx.item_type(def_id).fn_sig();
+ let fty = self.tcx.type_of(def_id).fn_sig();
self.probe(|_| {
let substs = self.fresh_substs_for_item(self.span, method.def_id);
let output = fty.output().subst(self.tcx, substs);
import_id: Option<ast::NodeId>,
trait_def_id: DefId,
item: ty::AssociatedItem) {
- let trait_def = self.tcx.lookup_trait_def(trait_def_id);
+ let trait_def = self.tcx.trait_def(trait_def_id);
// FIXME(arielb1): can we use for_each_relevant_impl here?
trait_def.for_each_impl(self.tcx, |impl_def_id| {
debug!("xform_self_ty={:?}", xform_self_ty);
- self.extension_candidates.push(Candidate {
- xform_self_ty: xform_self_ty,
- item: item.clone(),
- kind: ExtensionImplCandidate(impl_def_id, impl_substs, obligations),
- import_id: import_id,
- });
+ self.push_extension_candidate(xform_self_ty, item,
+ ExtensionImplCandidate(impl_def_id, impl_substs, obligations), import_id);
});
}
}
};
- let impl_type = self.tcx.item_type(impl_def_id);
+ let impl_type = self.tcx.type_of(impl_def_id);
let impl_simplified_type =
match ty::fast_reject::simplify_type(self.tcx, impl_type, false) {
Some(simplified_type) => simplified_type,
});
let xform_self_ty = self.xform_self_ty(&item, step.self_ty, substs);
- self.inherent_candidates.push(Candidate {
- xform_self_ty: xform_self_ty,
- item: item.clone(),
- kind: TraitCandidate,
- import_id: import_id,
- });
+ self.push_inherent_candidate(xform_self_ty, item, TraitCandidate, import_id);
}
Ok(())
trait_def_id,
item);
- for step in self.steps.iter() {
+ for step in Rc::clone(&self.steps).iter() {
debug!("assemble_projection_candidates: step={:?}", step);
let (def_id, substs) = match step.self_ty.sty {
def_id,
substs);
- let trait_predicates = self.tcx.item_predicates(def_id);
+ let trait_predicates = self.tcx.predicates_of(def_id);
let bounds = trait_predicates.instantiate(self.tcx, substs);
let predicates = bounds.predicates;
debug!("assemble_projection_candidates: predicates={:?}",
bound,
xform_self_ty);
- self.extension_candidates.push(Candidate {
- xform_self_ty: xform_self_ty,
- item: item.clone(),
- kind: TraitCandidate,
- import_id: import_id,
- });
+ self.push_extension_candidate(xform_self_ty, item, TraitCandidate, import_id);
}
}
}
bound,
xform_self_ty);
- self.extension_candidates.push(Candidate {
- xform_self_ty: xform_self_ty,
- item: item.clone(),
- kind: WhereClauseCandidate(poly_bound),
- import_id: import_id,
- });
+ self.push_extension_candidate(xform_self_ty, item,
+ WhereClauseCandidate(poly_bound), import_id);
}
}
let cause = traits::ObligationCause::misc(self.span, self.body_id);
// Check whether the impl imposes obligations we have to worry about.
- let impl_bounds = self.tcx.item_predicates(impl_def_id);
+ let impl_bounds = self.tcx.predicates_of(impl_def_id);
let impl_bounds = impl_bounds.instantiate(self.tcx, substs);
let traits::Normalized { value: impl_bounds, obligations: norm_obligations } =
traits::normalize(selcx, cause.clone(), &impl_bounds);
impl_ty: Ty<'tcx>,
substs: &Substs<'tcx>)
-> Ty<'tcx> {
- let self_ty = self.tcx.item_type(method).fn_sig().input(0);
+ let self_ty = self.tcx.type_of(method).fn_sig().input(0);
debug!("xform_self_ty(impl_ty={:?}, self_ty={:?}, substs={:?})",
impl_ty,
self_ty,
// are given do not include type/lifetime parameters for the
// method yet. So create fresh variables here for those too,
// if there are any.
- let generics = self.tcx.item_generics(method);
+ let generics = self.tcx.generics_of(method);
assert_eq!(substs.types().count(), generics.parent_types as usize);
assert_eq!(substs.regions().count(), generics.parent_regions as usize);
/// Get the type of an impl and generate substitutions with placeholders.
fn impl_ty_and_substs(&self, impl_def_id: DefId) -> (Ty<'tcx>, &'tcx Substs<'tcx>) {
- let impl_ty = self.tcx.item_type(impl_def_id);
+ let impl_ty = self.tcx.type_of(impl_def_id);
let substs = Substs::for_item(self.tcx,
impl_def_id,
}
pub fn check_item_bodies<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> CompileResult {
- ty::queries::typeck_item_bodies::get(tcx, DUMMY_SP, LOCAL_CRATE)
+ tcx.typeck_item_bodies(LOCAL_CRATE)
}
fn typeck_item_bodies<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, crate_num: CrateNum) -> CompileResult {
debug_assert!(crate_num == LOCAL_CRATE);
tcx.sess.track_errors(|| {
tcx.visit_all_bodies_in_krate(|body_owner_def_id, _body_id| {
- tcx.item_tables(body_owner_def_id);
+ tcx.typeck_tables_of(body_owner_def_id);
});
})
}
pub fn provide(providers: &mut Providers) {
*providers = Providers {
typeck_item_bodies,
- typeck_tables,
+ typeck_tables_of,
+ has_typeck_tables,
closure_type,
closure_kind,
adt_destructor,
def_id: DefId)
-> ty::PolyFnSig<'tcx> {
let node_id = tcx.hir.as_local_node_id(def_id).unwrap();
- tcx.item_tables(def_id).closure_tys[&node_id]
+ tcx.typeck_tables_of(def_id).closure_tys[&node_id]
}
fn closure_kind<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
def_id: DefId)
-> ty::ClosureKind {
let node_id = tcx.hir.as_local_node_id(def_id).unwrap();
- tcx.item_tables(def_id).closure_kinds[&node_id]
+ tcx.typeck_tables_of(def_id).closure_kinds[&node_id]
}
fn adt_destructor<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
tcx.calculate_dtor(def_id, &mut dropck::check_drop_impl)
}
-fn typeck_tables<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- def_id: DefId)
- -> &'tcx ty::TypeckTables<'tcx> {
- // Closures' tables come from their outermost function,
- // as they are part of the same "inference environment".
- let outer_def_id = tcx.closure_base_def_id(def_id);
- if outer_def_id != def_id {
- return tcx.item_tables(outer_def_id);
- }
-
- let id = tcx.hir.as_local_node_id(def_id).unwrap();
- let span = tcx.hir.span(id);
- let unsupported = || {
- span_bug!(span, "can't type-check body of {:?}", def_id);
- };
-
- // Figure out what primary body this item has.
- let mut fn_decl = None;
- let body_id = match tcx.hir.get(id) {
+/// If this def-id is a "primary tables entry", returns `Some((body_id, decl))`
+/// with information about it's body-id and fn-decl (if any). Otherwise,
+/// returns `None`.
+///
+/// If this function returns "some", then `typeck_tables(def_id)` will
+/// succeed; if it returns `None`, then `typeck_tables(def_id)` may or
+/// may not succeed. In some cases where this function returns `None`
+/// (notably closures), `typeck_tables(def_id)` would wind up
+/// redirecting to the owning function.
+fn primary_body_of<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ id: ast::NodeId)
+ -> Option<(hir::BodyId, Option<&'tcx hir::FnDecl>)>
+{
+ match tcx.hir.get(id) {
hir::map::NodeItem(item) => {
match item.node {
hir::ItemConst(_, body) |
- hir::ItemStatic(_, _, body) => body,
- hir::ItemFn(ref decl, .., body) => {
- fn_decl = Some(decl);
- body
- }
- _ => unsupported()
+ hir::ItemStatic(_, _, body) =>
+ Some((body, None)),
+ hir::ItemFn(ref decl, .., body) =>
+ Some((body, Some(decl))),
+ _ =>
+ None,
}
}
hir::map::NodeTraitItem(item) => {
match item.node {
- hir::TraitItemKind::Const(_, Some(body)) => body,
- hir::TraitItemKind::Method(ref sig,
- hir::TraitMethod::Provided(body)) => {
- fn_decl = Some(&sig.decl);
- body
- }
- _ => unsupported()
+ hir::TraitItemKind::Const(_, Some(body)) =>
+ Some((body, None)),
+ hir::TraitItemKind::Method(ref sig, hir::TraitMethod::Provided(body)) =>
+ Some((body, Some(&sig.decl))),
+ _ =>
+ None,
}
}
hir::map::NodeImplItem(item) => {
match item.node {
- hir::ImplItemKind::Const(_, body) => body,
- hir::ImplItemKind::Method(ref sig, body) => {
- fn_decl = Some(&sig.decl);
- body
- }
- _ => unsupported()
+ hir::ImplItemKind::Const(_, body) =>
+ Some((body, None)),
+ hir::ImplItemKind::Method(ref sig, body) =>
+ Some((body, Some(&sig.decl))),
+ _ =>
+ None,
}
}
hir::map::NodeExpr(expr) => {
// Assume that everything other than closures
// is a constant "initializer" expression.
match expr.node {
- hir::ExprClosure(..) => {
- // We should've bailed out above for closures.
- span_bug!(expr.span, "unexpected closure")
- }
- _ => hir::BodyId { node_id: expr.id }
+ hir::ExprClosure(..) =>
+ None,
+ _ =>
+ Some((hir::BodyId { node_id: expr.id }, None)),
}
}
- _ => unsupported()
- };
+ _ => None,
+ }
+}
+
+fn has_typeck_tables<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ def_id: DefId)
+ -> bool {
+ // Closures' tables come from their outermost function,
+ // as they are part of the same "inference environment".
+ let outer_def_id = tcx.closure_base_def_id(def_id);
+ if outer_def_id != def_id {
+ return tcx.has_typeck_tables(outer_def_id);
+ }
+
+ let id = tcx.hir.as_local_node_id(def_id).unwrap();
+ primary_body_of(tcx, id).is_some()
+}
+
+fn typeck_tables_of<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ def_id: DefId)
+ -> &'tcx ty::TypeckTables<'tcx> {
+ // Closures' tables come from their outermost function,
+ // as they are part of the same "inference environment".
+ let outer_def_id = tcx.closure_base_def_id(def_id);
+ if outer_def_id != def_id {
+ return tcx.typeck_tables_of(outer_def_id);
+ }
+
+ let id = tcx.hir.as_local_node_id(def_id).unwrap();
+ let span = tcx.hir.span(id);
+
+ // Figure out what primary body this item has.
+ let (body_id, fn_decl) = primary_body_of(tcx, id).unwrap_or_else(|| {
+ span_bug!(span, "can't type-check body of {:?}", def_id);
+ });
let body = tcx.hir.body(body_id);
Inherited::build(tcx, id).enter(|inh| {
let fcx = if let Some(decl) = fn_decl {
- let fn_sig = tcx.item_type(def_id).fn_sig();
+ let fn_sig = tcx.type_of(def_id).fn_sig();
check_abi(tcx, span, fn_sig.abi());
check_fn(&inh, fn_sig, decl, id, body)
} else {
let fcx = FnCtxt::new(&inh, body.value.id);
- let expected_type = tcx.item_type(def_id);
+ let expected_type = tcx.type_of(def_id);
let expected_type = fcx.normalize_associated_types_in(body.value.span, &expected_type);
fcx.require_type_is_sized(expected_type, body.value.span, traits::ConstSized);
id: ast::NodeId,
span: Span) {
let def_id = tcx.hir.local_def_id(id);
- let def = tcx.lookup_adt_def(def_id);
+ let def = tcx.adt_def(def_id);
def.destructor(tcx); // force the destructor to be evaluated
check_representable(tcx, span, def_id);
// if struct is packed and not aligned, check fields for alignment.
// Checks for combining packed and align attrs on single struct are done elsewhere.
- if tcx.lookup_adt_def(def_id).repr.packed() && tcx.lookup_adt_def(def_id).repr.align == 0 {
+ if tcx.adt_def(def_id).repr.packed() && tcx.adt_def(def_id).repr.align == 0 {
check_packed(tcx, span, def_id);
}
}
id: ast::NodeId,
span: Span) {
let def_id = tcx.hir.local_def_id(id);
- let def = tcx.lookup_adt_def(def_id);
+ let def = tcx.adt_def(def_id);
def.destructor(tcx); // force the destructor to be evaluated
check_representable(tcx, span, def_id);
}
// Consts can play a role in type-checking, so they are included here.
hir::ItemStatic(..) |
hir::ItemConst(..) => {
- tcx.item_tables(tcx.hir.local_def_id(it.id));
+ tcx.typeck_tables_of(tcx.hir.local_def_id(it.id));
}
hir::ItemEnum(ref enum_definition, _) => {
check_enum(tcx,
}
hir::ItemTy(_, ref generics) => {
let def_id = tcx.hir.local_def_id(it.id);
- let pty_ty = tcx.item_type(def_id);
+ let pty_ty = tcx.type_of(def_id);
check_bounds_are_used(tcx, generics, pty_ty);
}
hir::ItemForeignMod(ref m) => {
}
} else {
for item in &m.items {
- let generics = tcx.item_generics(tcx.hir.local_def_id(item.id));
+ let generics = tcx.generics_of(tcx.hir.local_def_id(item.id));
if !generics.types.is_empty() {
let mut err = struct_span_err!(tcx.sess, item.span, E0044,
"foreign items may not have type parameters");
fn check_on_unimplemented<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
def_id: DefId,
item: &hir::Item) {
- let generics = tcx.item_generics(def_id);
+ let generics = tcx.generics_of(def_id);
if let Some(ref attr) = item.attrs.iter().find(|a| {
a.check_name("rustc_on_unimplemented")
}) {
.map(|node_item| node_item.map(|parent| parent.defaultness));
if let Some(parent) = parent {
- if parent.item.is_final() {
+ if tcx.impl_item_is_final(&parent) {
report_forbidden_specialization(tcx, impl_item, parent.node.def_id());
}
}
if impl_trait_ref.references_error() { return; }
// Locate trait definition and items
- let trait_def = tcx.lookup_trait_def(impl_trait_ref.def_id);
+ let trait_def = tcx.trait_def(impl_trait_ref.def_id);
let mut overridden_associated_type = None;
let impl_items = || impl_item_refs.iter().map(|iiref| tcx.hir.impl_item(iiref.id));
let signature = |item: &ty::AssociatedItem| {
match item.kind {
ty::AssociatedKind::Method => {
- format!("{}", tcx.item_type(item.def_id).fn_sig().0)
+ format!("{}", tcx.type_of(item.def_id).fn_sig().0)
}
ty::AssociatedKind::Type => format!("type {};", item.name.to_string()),
ty::AssociatedKind::Const => {
- format!("const {}: {:?};", item.name.to_string(), tcx.item_type(item.def_id))
+ format!("const {}: {:?};", item.name.to_string(), tcx.type_of(item.def_id))
}
}
};
sp: Span,
item_def_id: DefId)
-> bool {
- let rty = tcx.item_type(item_def_id);
+ let rty = tcx.type_of(item_def_id);
// Check that it is possible to represent this type. This call identifies
// (1) types that contain themselves and (2) types that contain a different
}
pub fn check_simd<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, sp: Span, def_id: DefId) {
- let t = tcx.item_type(def_id);
+ let t = tcx.type_of(def_id);
match t.sty {
ty::TyAdt(def, substs) if def.is_struct() => {
let fields = &def.struct_variant().fields;
fn check_packed_inner<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
def_id: DefId,
stack: &mut Vec<DefId>) -> bool {
- let t = tcx.item_type(def_id);
+ let t = tcx.type_of(def_id);
if stack.contains(&def_id) {
debug!("check_packed_inner: {:?} is recursive", t);
return false;
}
match t.sty {
ty::TyAdt(def, substs) if def.is_struct() => {
- if tcx.lookup_adt_def(def.did).repr.align > 0 {
+ if tcx.adt_def(def.did).repr.align > 0 {
return true;
}
// push struct def_id before checking fields
vs: &'tcx [hir::Variant],
id: ast::NodeId) {
let def_id = tcx.hir.local_def_id(id);
- let def = tcx.lookup_adt_def(def_id);
+ let def = tcx.adt_def(def_id);
def.destructor(tcx); // force the destructor to be evaluated
if vs.is_empty() && tcx.has_attr(def_id, "repr") {
for v in vs {
if let Some(e) = v.node.disr_expr {
- tcx.item_tables(tcx.hir.local_def_id(e.node_id));
+ tcx.typeck_tables_of(tcx.hir.local_def_id(e.node_id));
}
}
let node_id = tcx.hir.as_local_node_id(def_id).unwrap();
let item_id = tcx.hir.ty_param_owner(node_id);
let item_def_id = tcx.hir.local_def_id(item_id);
- let generics = tcx.item_generics(item_def_id);
+ let generics = tcx.generics_of(item_def_id);
let index = generics.type_param_to_index[&def_id.index];
ty::GenericPredicates {
parent: None,
/// generic type scheme.
fn instantiate_bounds(&self, span: Span, def_id: DefId, substs: &Substs<'tcx>)
-> ty::InstantiatedPredicates<'tcx> {
- let bounds = self.tcx.item_predicates(def_id);
+ let bounds = self.tcx.predicates_of(def_id);
let result = bounds.instantiate(self.tcx, substs);
let result = self.normalize_associated_types_in(span, &result);
debug!("instantiate_bounds(bounds={:?}, substs={:?}) = {:?}",
let ty_var = self.next_ty_var(TypeVariableOrigin::TypeInference(span));
self.anon_types.borrow_mut().insert(id, ty_var);
- let item_predicates = self.tcx.item_predicates(def_id);
- let bounds = item_predicates.instantiate(self.tcx, substs);
+ let predicates_of = self.tcx.predicates_of(def_id);
+ let bounds = predicates_of.instantiate(self.tcx, substs);
for predicate in bounds.predicates {
// Change the predicate to refer to the type variable,
span: Span, // (potential) receiver for this impl
did: DefId)
-> TypeAndSubsts<'tcx> {
- let ity = self.tcx.item_type(did);
+ let ity = self.tcx.type_of(did);
debug!("impl_self_ty: ity={:?}", ity);
let substs = self.fresh_substs_for_item(span, did);
Def::VariantCtor(def_id, ..) => {
// Everything but the final segment should have no
// parameters at all.
- let mut generics = self.tcx.item_generics(def_id);
+ let mut generics = self.tcx.generics_of(def_id);
if let Some(def_id) = generics.parent {
// Variant and struct constructors use the
// generics of their parent type definition.
- generics = self.tcx.item_generics(def_id);
+ generics = self.tcx.generics_of(def_id);
}
type_segment = Some((segments.last().unwrap(), generics));
}
Def::Const(def_id) |
Def::Static(def_id, _) => {
fn_segment = Some((segments.last().unwrap(),
- self.tcx.item_generics(def_id)));
+ self.tcx.generics_of(def_id)));
}
// Case 3. Reference to a method or associated const.
ty::ImplContainer(_) => {}
}
- let generics = self.tcx.item_generics(def_id);
+ let generics = self.tcx.generics_of(def_id);
if segments.len() >= 2 {
- let parent_generics = self.tcx.item_generics(generics.parent.unwrap());
+ let parent_generics = self.tcx.generics_of(generics.parent.unwrap());
type_segment = Some((&segments[segments.len() - 2], parent_generics));
} else {
// `<T>::assoc` will end up here, and so can `T::assoc`.
self.to_ty(ast_ty)
} else if !infer_types && def.has_default {
// No type parameter provided, but a default exists.
- let default = self.tcx.item_type(def.def_id);
+ let default = self.tcx.type_of(def.def_id);
self.normalize_ty(
span,
default.subst_spanned(self.tcx, substs, Some(span))
// The things we are substituting into the type should not contain
// escaping late-bound regions, and nor should the base type scheme.
- let ty = self.tcx.item_type(def.def_id());
+ let ty = self.tcx.type_of(def.def_id());
assert!(!substs.has_escaping_regions());
assert!(!ty.has_escaping_regions());
// is inherent, there is no `Self` parameter, instead, the impl needs
// type parameters, which we can infer by unifying the provided `Self`
// with the substituted impl type.
- let ty = self.tcx.item_type(impl_def_id);
+ let ty = self.tcx.type_of(impl_def_id);
let impl_ty = self.instantiate_type_scheme(span, &substs, &ty);
match self.sub_types(false, &self.misc(span), self_ty, impl_ty) {
// ```
//
// we can thus deduce that `<T as SomeTrait<'a>>::SomeType : 'a`.
- let trait_predicates = self.tcx.item_predicates(projection_ty.trait_ref.def_id);
+ let trait_predicates = self.tcx.predicates_of(projection_ty.trait_ref.def_id);
assert_eq!(trait_predicates.parent, None);
let predicates = trait_predicates.predicates.as_slice().to_vec();
traits::elaborate_predicates(self.tcx, predicates)
///
/// won't be allowed unless there's an *explicit* implementation of `Send`
/// for `T`
- hir::ItemImpl(_, hir::ImplPolarity::Positive, _,
+ hir::ItemImpl(_, hir::ImplPolarity::Positive, _, _,
ref trait_ref, ref self_ty, _) => {
self.check_impl(item, self_ty, trait_ref);
}
- hir::ItemImpl(_, hir::ImplPolarity::Negative, _, Some(_), ..) => {
+ hir::ItemImpl(_, hir::ImplPolarity::Negative, _, _, Some(_), ..) => {
// FIXME(#27579) what amount of WF checking do we need for neg impls?
let trait_ref = tcx.impl_trait_ref(tcx.hir.local_def_id(item.id)).unwrap();
let (mut implied_bounds, self_ty) = match item.container {
ty::TraitContainer(_) => (vec![], fcx.tcx.mk_self_type()),
ty::ImplContainer(def_id) => (fcx.impl_implied_bounds(def_id, span),
- fcx.tcx.item_type(def_id))
+ fcx.tcx.type_of(def_id))
};
match item.kind {
ty::AssociatedKind::Const => {
- let ty = fcx.tcx.item_type(item.def_id);
+ let ty = fcx.tcx.type_of(item.def_id);
let ty = fcx.instantiate_type_scheme(span, free_substs, &ty);
fcx.register_wf_obligation(ty, span, code.clone());
}
ty::AssociatedKind::Method => {
reject_shadowing_type_parameters(fcx.tcx, item.def_id);
- let method_ty = fcx.tcx.item_type(item.def_id);
+ let method_ty = fcx.tcx.type_of(item.def_id);
let method_ty = fcx.instantiate_type_scheme(span, free_substs, &method_ty);
let predicates = fcx.instantiate_bounds(span, item.def_id, free_substs);
let sig = method_ty.fn_sig();
}
ty::AssociatedKind::Type => {
if item.defaultness.has_value() {
- let ty = fcx.tcx.item_type(item.def_id);
+ let ty = fcx.tcx.type_of(item.def_id);
let ty = fcx.instantiate_type_scheme(span, free_substs, &ty);
fcx.register_wf_obligation(ty, span, code.clone());
}
//
// 3) that the trait definition does not have any type parameters
- let predicates = self.tcx.item_predicates(trait_def_id);
+ let predicates = self.tcx.predicates_of(trait_def_id);
// We must exclude the Self : Trait predicate contained by all
// traits.
}
});
- let has_ty_params = self.tcx.item_generics(trait_def_id).types.len() > 1;
+ let has_ty_params = self.tcx.generics_of(trait_def_id).types.len() > 1;
// We use an if-else here, since the generics will also trigger
// an extraneous error message when we find predicates like
self.for_item(item).with_fcx(|fcx, this| {
let free_substs = &fcx.parameter_environment.free_substs;
let def_id = fcx.tcx.hir.local_def_id(item.id);
- let ty = fcx.tcx.item_type(def_id);
+ let ty = fcx.tcx.type_of(def_id);
let item_ty = fcx.instantiate_type_scheme(item.span, free_substs, &ty);
let sig = item_ty.fn_sig();
debug!("check_item_type: {:?}", item);
self.for_item(item).with_fcx(|fcx, this| {
- let ty = fcx.tcx.item_type(fcx.tcx.hir.local_def_id(item.id));
+ let ty = fcx.tcx.type_of(fcx.tcx.hir.local_def_id(item.id));
let item_ty = fcx.instantiate_type_scheme(item.span,
&fcx.parameter_environment
.free_substs,
}
}
None => {
- let self_ty = fcx.tcx.item_type(item_def_id);
+ let self_ty = fcx.tcx.type_of(item_def_id);
let self_ty = fcx.instantiate_type_scheme(item.span, free_substs, &self_ty);
fcx.register_wf_obligation(self_ty, ast_self_ty.span, this.code.clone());
}
let span = method_sig.decl.inputs[0].span;
let free_substs = &fcx.parameter_environment.free_substs;
- let method_ty = fcx.tcx.item_type(method.def_id);
+ let method_ty = fcx.tcx.type_of(method.def_id);
let fty = fcx.instantiate_type_scheme(span, free_substs, &method_ty);
let sig = fcx.tcx.liberate_late_bound_regions(free_id_outlive, &fty.fn_sig());
ast_generics: &hir::Generics)
{
let item_def_id = self.tcx.hir.local_def_id(item.id);
- let ty = self.tcx.item_type(item_def_id);
+ let ty = self.tcx.type_of(item_def_id);
if self.tcx.has_error_field(ty) {
return;
}
- let ty_predicates = self.tcx.item_predicates(item_def_id);
+ let ty_predicates = self.tcx.predicates_of(item_def_id);
assert_eq!(ty_predicates.parent, None);
- let variances = self.tcx.item_variances(item_def_id);
+ let variances = self.tcx.variances_of(item_def_id);
let mut constrained_parameters: FxHashSet<_> =
variances.iter().enumerate()
}
fn reject_shadowing_type_parameters(tcx: TyCtxt, def_id: DefId) {
- let generics = tcx.item_generics(def_id);
- let parent = tcx.item_generics(generics.parent.unwrap());
+ let generics = tcx.generics_of(def_id);
+ let parent = tcx.generics_of(generics.parent.unwrap());
let impl_params: FxHashMap<_, _> = parent.types
.iter()
.map(|tp| (tp.name, tp.def_id))
let fields =
struct_def.fields().iter()
.map(|field| {
- let field_ty = self.tcx.item_type(self.tcx.hir.local_def_id(field.id));
+ let field_ty = self.tcx.type_of(self.tcx.hir.local_def_id(field.id));
let field_ty = self.instantiate_type_scheme(field.span,
&self.parameter_environment
.free_substs,
None => {
// Inherent impl: take implied bounds from the self type.
- let self_ty = self.tcx.item_type(impl_def_id);
+ let self_ty = self.tcx.type_of(impl_def_id);
let self_ty = self.instantiate_type_scheme(span, free_substs, &self_ty);
vec![self_ty]
}
pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
let mut used_trait_imports = DefIdSet();
for &body_id in tcx.hir.krate().bodies.keys() {
- let item_id = tcx.hir.body_owner(body_id);
- let item_def_id = tcx.hir.local_def_id(item_id);
-
- // this will have been written by the main typeck pass
- if let Some(tables) = tcx.maps.typeck_tables.borrow().get(&item_def_id) {
- let imports = &tables.used_trait_imports;
- debug!("GatherVisitor: item_def_id={:?} with imports {:#?}", item_def_id, imports);
- used_trait_imports.extend(imports);
- } else {
- debug!("GatherVisitor: item_def_id={:?} with no imports", item_def_id);
- }
+ let item_def_id = tcx.hir.body_owner_def_id(body_id);
+ let tables = tcx.typeck_tables_of(item_def_id);
+ let imports = &tables.used_trait_imports;
+ debug!("GatherVisitor: item_def_id={:?} with imports {:#?}", item_def_id, imports);
+ used_trait_imports.extend(imports);
}
let mut visitor = CheckVisitor { tcx, used_trait_imports };
fn visit_implementation_of_drop<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
_drop_did: DefId,
impl_did: DefId) {
- match tcx.item_type(impl_did).sty {
+ match tcx.type_of(impl_did).sty {
ty::TyAdt(..) => {}
_ => {
// Destructors only work on nominal types.
return;
};
- let self_type = tcx.item_type(impl_did);
+ let self_type = tcx.type_of(impl_did);
debug!("visit_implementation_of_copy: self_type={:?} (bound)",
self_type);
// course.
if impl_did.is_local() {
let span = tcx.def_span(impl_did);
- ty::queries::coerce_unsized_info::get(tcx, span, impl_did);
+ tcx.at(span).coerce_unsized_info(impl_did);
}
}
bug!("coerce_unsized_info: invoked for non-local def-id {:?}", impl_did)
});
- let source = tcx.item_type(impl_did);
+ let source = tcx.type_of(impl_did);
let trait_ref = tcx.impl_trait_ref(impl_did).unwrap();
assert_eq!(trait_ref.def_id, coerce_unsized_trait);
let target = trait_ref.substs.type_at(1);
.filter_map(|(i, f)| {
let (a, b) = (f.ty(tcx, substs_a), f.ty(tcx, substs_b));
- if tcx.item_type(f.did).is_phantom_data() {
+ if tcx.type_of(f.did).is_phantom_data() {
// Ignore PhantomData fields
return None;
}
//! for any change, but it is very cheap to compute. In practice, most
//! code in the compiler never *directly* requests this map. Instead,
//! it requests the inherent impls specific to some type (via
-//! `ty::queries::inherent_impls::get(def_id)`). That value, however,
+//! `tcx.inherent_impls(def_id)`). That value, however,
//! is computed by selecting an idea from this table.
use rustc::dep_graph::DepNode;
use std::rc::Rc;
use syntax::ast;
-use syntax_pos::{DUMMY_SP, Span};
+use syntax_pos::Span;
/// On-demand query: yields a map containing all types mapped to their inherent impls.
pub fn crate_inherent_impls<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
//
// [the plan]: https://github.com/rust-lang/rust-roadmap/issues/4
+ thread_local! {
+ static EMPTY_DEF_ID_VEC: Rc<Vec<DefId>> = Rc::new(vec![])
+ }
+
let result = tcx.dep_graph.with_ignore(|| {
- let crate_map = ty::queries::crate_inherent_impls::get(tcx, DUMMY_SP, ty_def_id.krate);
+ let crate_map = tcx.crate_inherent_impls(ty_def_id.krate);
match crate_map.inherent_impls.get(&ty_def_id) {
Some(v) => v.clone(),
- None => Rc::new(vec![]),
+ None => EMPTY_DEF_ID_VEC.with(|v| v.clone())
}
});
}
let def_id = self.tcx.hir.local_def_id(item.id);
- let self_ty = self.tcx.item_type(def_id);
+ let self_ty = self.tcx.type_of(def_id);
match self_ty.sty {
ty::TyAdt(def, _) => {
self.check_def_id(item, def.did);
use rustc::traits::{self, Reveal};
use rustc::ty::{self, TyCtxt};
-use syntax_pos::DUMMY_SP;
-
pub fn crate_inherent_impls_overlap_check<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
crate_num: CrateNum) {
assert_eq!(crate_num, LOCAL_CRATE);
}
fn check_for_overlapping_inherent_impls(&self, ty_def_id: DefId) {
- let impls = ty::queries::inherent_impls::get(self.tcx, DUMMY_SP, ty_def_id);
+ let impls = self.tcx.inherent_impls(ty_def_id);
for (i, &impl1_def_id) in impls.iter().enumerate() {
for &impl2_def_id in &impls[(i + 1)..] {
// mappings. That mapping code resides here.
use hir::def_id::{CrateNum, DefId, LOCAL_CRATE};
-use rustc::ty::{self, TyCtxt, TypeFoldable};
+use rustc::ty::{TyCtxt, TypeFoldable};
use rustc::ty::maps::Providers;
use syntax::ast;
-use syntax_pos::DUMMY_SP;
mod builtin;
mod inherent_impls;
}
enforce_trait_manually_implementable(tcx, impl_def_id, trait_ref.def_id);
- let trait_def = tcx.lookup_trait_def(trait_ref.def_id);
+ let trait_def = tcx.trait_def(trait_ref.def_id);
trait_def.record_local_impl(tcx, impl_def_id, trait_ref);
}
}
pub fn check_coherence<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
for &trait_def_id in tcx.hir.krate().trait_impls.keys() {
- ty::queries::coherent_trait::get(tcx, DUMMY_SP, (LOCAL_CRATE, trait_def_id));
+ tcx.coherent_trait((LOCAL_CRATE, trait_def_id));
}
unsafety::check(tcx);
overlap::check_default_impls(tcx);
// these queries are executed for side-effects (error reporting):
- ty::queries::crate_inherent_impls::get(tcx, DUMMY_SP, LOCAL_CRATE);
- ty::queries::crate_inherent_impls_overlap_check::get(tcx, DUMMY_SP, LOCAL_CRATE);
+ tcx.crate_inherent_impls(LOCAL_CRATE);
+ tcx.crate_inherent_impls_overlap_check(LOCAL_CRATE);
}
let _task =
tcx.dep_graph.in_task(DepNode::CoherenceOverlapCheck(trait_def_id));
- let def = tcx.lookup_trait_def(trait_def_id);
+ let def = tcx.trait_def(trait_def_id);
// attempt to insert into the specialization graph
let insert_result = def.add_impl_for_specialization(tcx, impl_def_id);
None => {}
Some(trait_ref) => {
- let trait_def = self.tcx.lookup_trait_def(trait_ref.def_id);
+ let trait_def = self.tcx.trait_def(trait_ref.def_id);
let unsafe_attr = impl_generics.and_then(|g| g.carries_unsafe_attr());
match (trait_def.unsafety, unsafe_attr, unsafety, polarity) {
(_, _, Unsafety::Unsafe, hir::ImplPolarity::Negative) => {
hir::ItemDefaultImpl(unsafety, _) => {
self.check_unsafety_coherence(item, None, unsafety, hir::ImplPolarity::Positive);
}
- hir::ItemImpl(unsafety, polarity, ref generics, Some(_), _, _) => {
+ hir::ItemImpl(unsafety, polarity, _, ref generics, ..) => {
self.check_unsafety_coherence(item, Some(generics), unsafety, polarity);
}
_ => {}
arbitrary interdependencies. So instead we generally convert things
lazilly and on demand, and include logic that checks for cycles.
Demand is driven by calls to `AstConv::get_item_type_scheme` or
-`AstConv::lookup_trait_def`.
+`AstConv::trait_def`.
Currently, we "convert" types and traits in two phases (note that
conversion only affects the types of items / enum variants / methods;
pub fn provide(providers: &mut Providers) {
*providers = Providers {
- ty,
- generics,
- predicates,
- super_predicates,
+ type_of,
+ generics_of,
+ predicates_of,
+ super_predicates_of,
type_param_predicates,
trait_def,
adt_def,
for param in &generics.ty_params {
if param.default.is_some() {
let def_id = self.tcx.hir.local_def_id(param.id);
- self.tcx.item_type(def_id);
+ self.tcx.type_of(def_id);
}
}
intravisit::walk_generics(self, generics);
fn visit_expr(&mut self, expr: &'tcx hir::Expr) {
if let hir::ExprClosure(..) = expr.node {
let def_id = self.tcx.hir.local_def_id(expr.id);
- self.tcx.item_generics(def_id);
- self.tcx.item_type(def_id);
+ self.tcx.generics_of(def_id);
+ self.tcx.type_of(def_id);
}
intravisit::walk_expr(self, expr);
}
fn visit_ty(&mut self, ty: &'tcx hir::Ty) {
if let hir::TyImplTrait(..) = ty.node {
let def_id = self.tcx.hir.local_def_id(ty.id);
- self.tcx.item_generics(def_id);
- self.tcx.item_predicates(def_id);
+ self.tcx.generics_of(def_id);
+ self.tcx.predicates_of(def_id);
}
intravisit::walk_ty(self, ty);
}
def_id: DefId)
-> ty::GenericPredicates<'tcx>
{
- ty::queries::type_param_predicates::get(self.tcx, span, (self.item_def_id, def_id))
+ self.tcx.at(span).type_param_predicates((self.item_def_id, def_id))
}
fn get_free_substs(&self) -> Option<&Substs<'tcx>> {
let param_id = tcx.hir.as_local_node_id(def_id).unwrap();
let param_owner = tcx.hir.ty_param_owner(param_id);
let param_owner_def_id = tcx.hir.local_def_id(param_owner);
- let generics = tcx.item_generics(param_owner_def_id);
+ let generics = tcx.generics_of(param_owner_def_id);
let index = generics.type_param_to_index[&def_id.index];
let ty = tcx.mk_param(index, tcx.hir.ty_param_name(param_id));
let parent = if item_def_id == param_owner_def_id {
None
} else {
- tcx.item_generics(item_def_id).parent
+ tcx.generics_of(item_def_id).parent
};
let mut result = parent.map_or(ty::GenericPredicates {
NodeItem(item) => {
match item.node {
ItemFn(.., ref generics, _) |
- ItemImpl(_, _, ref generics, ..) |
+ ItemImpl(_, _, _, ref generics, ..) |
ItemTy(_, ref generics) |
ItemEnum(_, ref generics) |
ItemStruct(_, ref generics) |
hir::ItemForeignMod(ref foreign_mod) => {
for item in &foreign_mod.items {
let def_id = tcx.hir.local_def_id(item.id);
- tcx.item_generics(def_id);
- tcx.item_type(def_id);
- tcx.item_predicates(def_id);
+ tcx.generics_of(def_id);
+ tcx.type_of(def_id);
+ tcx.predicates_of(def_id);
}
}
hir::ItemEnum(ref enum_definition, _) => {
- tcx.item_generics(def_id);
- tcx.item_type(def_id);
- tcx.item_predicates(def_id);
+ tcx.generics_of(def_id);
+ tcx.type_of(def_id);
+ tcx.predicates_of(def_id);
convert_enum_variant_types(tcx, def_id, &enum_definition.variants);
},
hir::ItemDefaultImpl(..) => {
tcx.impl_trait_ref(def_id);
}
hir::ItemImpl(..) => {
- tcx.item_generics(def_id);
- tcx.item_type(def_id);
+ tcx.generics_of(def_id);
+ tcx.type_of(def_id);
tcx.impl_trait_ref(def_id);
- tcx.item_predicates(def_id);
+ tcx.predicates_of(def_id);
},
hir::ItemTrait(..) => {
- tcx.item_generics(def_id);
- tcx.lookup_trait_def(def_id);
- ty::queries::super_predicates::get(tcx, it.span, def_id);
- tcx.item_predicates(def_id);
+ tcx.generics_of(def_id);
+ tcx.trait_def(def_id);
+ tcx.at(it.span).super_predicates_of(def_id);
+ tcx.predicates_of(def_id);
},
hir::ItemStruct(ref struct_def, _) |
hir::ItemUnion(ref struct_def, _) => {
- tcx.item_generics(def_id);
- tcx.item_type(def_id);
- tcx.item_predicates(def_id);
+ tcx.generics_of(def_id);
+ tcx.type_of(def_id);
+ tcx.predicates_of(def_id);
for f in struct_def.fields() {
let def_id = tcx.hir.local_def_id(f.id);
- tcx.item_generics(def_id);
- tcx.item_type(def_id);
- tcx.item_predicates(def_id);
+ tcx.generics_of(def_id);
+ tcx.type_of(def_id);
+ tcx.predicates_of(def_id);
}
if !struct_def.is_struct() {
},
hir::ItemTy(_, ref generics) => {
ensure_no_ty_param_bounds(tcx, it.span, generics, "type");
- tcx.item_generics(def_id);
- tcx.item_type(def_id);
- tcx.item_predicates(def_id);
+ tcx.generics_of(def_id);
+ tcx.type_of(def_id);
+ tcx.predicates_of(def_id);
}
hir::ItemStatic(..) | hir::ItemConst(..) | hir::ItemFn(..) => {
- tcx.item_generics(def_id);
- tcx.item_type(def_id);
- tcx.item_predicates(def_id);
+ tcx.generics_of(def_id);
+ tcx.type_of(def_id);
+ tcx.predicates_of(def_id);
}
}
}
fn convert_trait_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, trait_item_id: ast::NodeId) {
let trait_item = tcx.hir.expect_trait_item(trait_item_id);
let def_id = tcx.hir.local_def_id(trait_item.id);
- tcx.item_generics(def_id);
+ tcx.generics_of(def_id);
match trait_item.node {
hir::TraitItemKind::Const(..) |
hir::TraitItemKind::Type(_, Some(_)) |
hir::TraitItemKind::Method(..) => {
- tcx.item_type(def_id);
+ tcx.type_of(def_id);
}
hir::TraitItemKind::Type(_, None) => {}
};
- tcx.item_predicates(def_id);
+ tcx.predicates_of(def_id);
}
fn convert_impl_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, impl_item_id: ast::NodeId) {
let def_id = tcx.hir.local_def_id(impl_item_id);
- tcx.item_generics(def_id);
- tcx.item_type(def_id);
- tcx.item_predicates(def_id);
+ tcx.generics_of(def_id);
+ tcx.type_of(def_id);
+ tcx.predicates_of(def_id);
}
fn convert_variant_ctor<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
ctor_id: ast::NodeId) {
let def_id = tcx.hir.local_def_id(ctor_id);
- tcx.item_generics(def_id);
- tcx.item_type(def_id);
- tcx.item_predicates(def_id);
+ tcx.generics_of(def_id);
+ tcx.type_of(def_id);
+ tcx.predicates_of(def_id);
}
fn convert_enum_variant_types<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
def_id: DefId,
variants: &[hir::Variant]) {
- let def = tcx.lookup_adt_def(def_id);
+ let def = tcx.adt_def(def_id);
let repr_type = def.repr.discr_type();
let initial = repr_type.initial_discriminant(tcx);
let mut prev_discr = None::<ConstInt>;
prev_discr = Some(if let Some(e) = variant.node.disr_expr {
let expr_did = tcx.hir.local_def_id(e.node_id);
let substs = Substs::empty();
- let result = ty::queries::const_eval::get(tcx, variant.span, (expr_did, substs));
+ let result = tcx.at(variant.span).const_eval((expr_did, substs));
// enum variant evaluation happens before the global constant check
// so we need to report the real error
for f in variant.node.data.fields() {
let def_id = tcx.hir.local_def_id(f.id);
- tcx.item_generics(def_id);
- tcx.item_type(def_id);
- tcx.item_predicates(def_id);
+ tcx.generics_of(def_id);
+ tcx.type_of(def_id);
+ tcx.predicates_of(def_id);
}
// Convert the ctor, if any. This also registers the variant as
/// Ensures that the super-predicates of the trait with def-id
/// trait_def_id are converted and stored. This also ensures that
/// the transitive super-predicates are converted;
-fn super_predicates<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- trait_def_id: DefId)
- -> ty::GenericPredicates<'tcx> {
+fn super_predicates_of<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ trait_def_id: DefId)
+ -> ty::GenericPredicates<'tcx> {
debug!("super_predicates(trait_def_id={:?})", trait_def_id);
let trait_node_id = tcx.hir.as_local_node_id(trait_def_id).unwrap();
// Now require that immediate supertraits are converted,
// which will, in turn, reach indirect supertraits.
for bound in superbounds.iter().filter_map(|p| p.to_opt_poly_trait_ref()) {
- ty::queries::super_predicates::get(tcx, item.span, bound.def_id());
+ tcx.at(item.span).super_predicates_of(bound.def_id());
}
ty::GenericPredicates {
tcx.alloc_trait_def(def)
}
-fn generics<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- def_id: DefId)
- -> &'tcx ty::Generics {
+fn generics_of<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ def_id: DefId)
+ -> &'tcx ty::Generics {
use rustc::hir::map::*;
use rustc::hir::*;
NodeItem(item) => {
match item.node {
ItemFn(.., ref generics, _) |
- ItemImpl(_, _, ref generics, ..) => generics,
+ ItemImpl(_, _, _, ref generics, ..) => generics,
ItemTy(_, ref generics) |
ItemEnum(_, ref generics) |
let mut parent_has_self = false;
let mut own_start = has_self as u32;
let (parent_regions, parent_types) = parent_def_id.map_or((0, 0), |def_id| {
- let generics = tcx.item_generics(def_id);
+ let generics = tcx.generics_of(def_id);
assert_eq!(has_self, false);
parent_has_self = generics.has_self;
own_start = generics.count() as u32;
})
}
-fn ty<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- def_id: DefId)
- -> Ty<'tcx> {
+fn type_of<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ def_id: DefId)
+ -> Ty<'tcx> {
use rustc::hir::map::*;
use rustc::hir::*;
ItemEnum(..) |
ItemStruct(..) |
ItemUnion(..) => {
- let def = tcx.lookup_adt_def(def_id);
+ let def = tcx.adt_def(def_id);
let substs = Substs::identity_for_item(tcx, def_id);
tcx.mk_adt(def, substs)
}
NodeStructCtor(&ref def) |
NodeVariant(&Spanned { node: hir::Variant_ { data: ref def, .. }, .. }) => {
- let ty = tcx.item_type(tcx.hir.get_parent_did(node_id));
+ let ty = tcx.type_of(tcx.hir.get_parent_did(node_id));
match *def {
VariantData::Unit(..) | VariantData::Struct(..) => ty,
VariantData::Tuple(ref fields, _) => {
let inputs = fields.iter().map(|f| {
- tcx.item_type(tcx.hir.local_def_id(f.id))
+ tcx.type_of(tcx.hir.local_def_id(f.id))
});
let substs = Substs::identity_for_item(tcx, def_id);
tcx.mk_fn_def(def_id, substs, ty::Binder(tcx.mk_fn_sig(
NodeVariant(&Spanned { node: Variant_ { disr_expr: Some(e), .. }, .. })
if e.node_id == node_id => {
- tcx.lookup_adt_def(tcx.hir.get_parent_did(node_id))
+ tcx.adt_def(tcx.hir.get_parent_did(node_id))
.repr.discr_type().to_ty(tcx)
}
NodeTy(&hir::Ty { node: TyImplTrait(..), .. }) => {
let owner = tcx.hir.get_parent_did(node_id);
- tcx.item_tables(owner).node_id_to_type(node_id)
+ tcx.typeck_tables_of(owner).node_id_to_type(node_id)
}
x => {
}
hir::ItemImpl(.., ref opt_trait_ref, _, _) => {
opt_trait_ref.as_ref().map(|ast_trait_ref| {
- let selfty = tcx.item_type(def_id);
+ let selfty = tcx.type_of(def_id);
AstConv::instantiate_mono_trait_ref(&icx, ast_trait_ref, selfty)
})
}
let node_id = tcx.hir.as_local_node_id(def_id).unwrap();
match tcx.hir.expect_item(node_id).node {
hir::ItemImpl(_, polarity, ..) => polarity,
- ref item => bug!("trait_impl_polarity: {:?} not an impl", item)
+ ref item => bug!("impl_polarity: {:?} not an impl", item)
}
}
.filter(move |l| !tcx.named_region_map.late_bound.contains(&l.lifetime.id))
}
-fn predicates<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- def_id: DefId)
- -> ty::GenericPredicates<'tcx> {
+fn predicates_of<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ def_id: DefId)
+ -> ty::GenericPredicates<'tcx> {
use rustc::hir::map::*;
use rustc::hir::*;
NodeItem(item) => {
match item.node {
ItemFn(.., ref generics, _) |
- ItemImpl(_, _, ref generics, ..) |
+ ItemImpl(_, _, _, ref generics, ..) |
ItemTy(_, ref generics) |
ItemEnum(_, ref generics) |
ItemStruct(_, ref generics) |
_ => &no_generics
};
- let generics = tcx.item_generics(def_id);
+ let generics = tcx.generics_of(def_id);
let parent_count = generics.parent_count() as u32;
let has_own_self = generics.has_self && parent_count == 0;
// on a trait we need to add in the supertrait bounds and bounds found on
// associated types.
if let Some((trait_ref, _)) = is_trait {
- predicates = tcx.item_super_predicates(def_id).predicates;
+ predicates = tcx.super_predicates_of(def_id).predicates;
// Add in a predicate that `Self:Trait` (where `Trait` is the
// current trait). This is needed for builtin bounds.
// in trait checking. See `setup_constraining_predicates`
// for details.
if let NodeItem(&Item { node: ItemImpl(..), .. }) = node {
- let self_ty = tcx.item_type(def_id);
+ let self_ty = tcx.type_of(def_id);
let trait_ref = tcx.impl_trait_ref(def_id);
ctp::setup_constraining_predicates(&mut predicates,
trait_ref,
impl_item_refs: &[hir::ImplItemRef])
{
// Every lifetime used in an associated type must be constrained.
- let impl_self_ty = tcx.item_type(impl_def_id);
- let impl_generics = tcx.item_generics(impl_def_id);
- let impl_predicates = tcx.item_predicates(impl_def_id);
+ let impl_self_ty = tcx.type_of(impl_def_id);
+ let impl_generics = tcx.generics_of(impl_def_id);
+ let impl_predicates = tcx.predicates_of(impl_def_id);
let impl_trait_ref = tcx.impl_trait_ref(impl_def_id);
let mut input_parameters = ctp::parameters_for_impl(impl_self_ty, impl_trait_ref);
item.kind == ty::AssociatedKind::Type && item.defaultness.has_value()
})
.flat_map(|def_id| {
- ctp::parameters_for(&tcx.item_type(def_id), true)
+ ctp::parameters_for(&tcx.type_of(def_id), true)
}).collect();
for (ty_lifetime, lifetime) in impl_generics.regions.iter()
.zip(&impl_hir_generics.lifetimes)
main_id: ast::NodeId,
main_span: Span) {
let main_def_id = tcx.hir.local_def_id(main_id);
- let main_t = tcx.item_type(main_def_id);
+ let main_t = tcx.type_of(main_def_id);
match main_t.sty {
ty::TyFnDef(..) => {
match tcx.hir.find(main_id) {
start_id: ast::NodeId,
start_span: Span) {
let start_def_id = tcx.hir.local_def_id(start_id);
- let start_t = tcx.item_type(start_def_id);
+ let start_t = tcx.type_of(start_def_id);
match start_t.sty {
ty::TyFnDef(..) => {
match tcx.hir.find(start_id) {
hir::ItemEnum(..) |
hir::ItemStruct(..) |
hir::ItemUnion(..) => {
- let generics = tcx.item_generics(did);
+ let generics = tcx.generics_of(did);
// Not entirely obvious: constraints on structs/enums do not
// affect the variance of their type parameters. See discussion
//
// self.add_constraints_from_generics(generics);
- for field in tcx.lookup_adt_def(did).all_fields() {
+ for field in tcx.adt_def(did).all_fields() {
self.add_constraints_from_ty(generics,
- tcx.item_type(field.did),
+ tcx.type_of(field.did),
self.covariant);
}
}
hir::ItemTrait(..) => {
- let generics = tcx.item_generics(did);
+ let generics = tcx.generics_of(did);
let trait_ref = ty::TraitRef {
def_id: did,
substs: Substs::identity_for_item(tcx, did)
} else {
// Parameter on an item defined within another crate:
// variance already inferred, just look it up.
- let variances = self.tcx().item_variances(item_def_id);
+ let variances = self.tcx().variances_of(item_def_id);
self.constant_term(variances[index])
}
}
trait_ref,
variance);
- let trait_generics = self.tcx().item_generics(trait_ref.def_id);
+ let trait_generics = self.tcx().generics_of(trait_ref.def_id);
// This edge is actually implied by the call to
- // `lookup_trait_def`, but I'm trying to be future-proof. See
+ // `trait_def`, but I'm trying to be future-proof. See
// README.md for a discussion on dep-graph management.
self.tcx().dep_graph.read(VarianceDepNode(trait_ref.def_id));
}
ty::TyAdt(def, substs) => {
- let adt_generics = self.tcx().item_generics(def.did);
+ let adt_generics = self.tcx().generics_of(def.did);
// This edge is actually implied by the call to
- // `lookup_trait_def`, but I'm trying to be future-proof. See
+ // `trait_def`, but I'm trying to be future-proof. See
// README.md for a discussion on dep-graph management.
self.tcx().dep_graph.read(VarianceDepNode(def.did));
ty::TyProjection(ref data) => {
let trait_ref = &data.trait_ref;
- let trait_generics = self.tcx().item_generics(trait_ref.def_id);
+ let trait_generics = self.tcx().generics_of(trait_ref.def_id);
// This edge is actually implied by the call to
- // `lookup_trait_def`, but I'm trying to be future-proof. See
+ // `trait_def`, but I'm trying to be future-proof. See
// README.md for a discussion on dep-graph management.
self.tcx().dep_graph.read(VarianceDepNode(trait_ref.def_id));
item_variances);
}
- tcx.maps.variances
- .borrow_mut()
+ tcx.maps.variances_of.borrow_mut()
.insert(item_def_id, Rc::new(item_variances));
}
}
// parameters".
if self.num_inferred() == inferreds_on_entry {
let item_def_id = self.tcx.hir.local_def_id(item_id);
- self.tcx.maps.variances
- .borrow_mut()
+ self.tcx.maps.variances_of.borrow_mut()
.insert(item_def_id, self.empty_variances.clone());
}
}
use std::iter::once;
use syntax::ast;
-use syntax_pos::DUMMY_SP;
use rustc::hir;
use rustc::hir::def::{Def, CtorKind};
pub fn build_external_trait(cx: &DocContext, did: DefId) -> clean::Trait {
let trait_items = cx.tcx.associated_items(did).map(|item| item.clean(cx)).collect();
- let predicates = cx.tcx.item_predicates(did);
- let generics = (cx.tcx.item_generics(did), &predicates).clean(cx);
+ let predicates = cx.tcx.predicates_of(did);
+ let generics = (cx.tcx.generics_of(did), &predicates).clean(cx);
let generics = filter_non_trait_generics(did, generics);
let (generics, supertrait_bounds) = separate_supertrait_bounds(generics);
clean::Trait {
- unsafety: cx.tcx.lookup_trait_def(did).unsafety,
+ unsafety: cx.tcx.trait_def(did).unsafety,
generics: generics,
items: trait_items,
bounds: supertrait_bounds,
}
fn build_external_function(cx: &DocContext, did: DefId) -> clean::Function {
- let sig = cx.tcx.item_type(did).fn_sig();
+ let sig = cx.tcx.type_of(did).fn_sig();
let constness = if cx.tcx.sess.cstore.is_const_fn(did) {
hir::Constness::Const
hir::Constness::NotConst
};
- let predicates = cx.tcx.item_predicates(did);
+ let predicates = cx.tcx.predicates_of(did);
clean::Function {
decl: (did, sig).clean(cx),
- generics: (cx.tcx.item_generics(did), &predicates).clean(cx),
+ generics: (cx.tcx.generics_of(did), &predicates).clean(cx),
unsafety: sig.unsafety(),
constness: constness,
abi: sig.abi(),
}
fn build_enum(cx: &DocContext, did: DefId) -> clean::Enum {
- let predicates = cx.tcx.item_predicates(did);
+ let predicates = cx.tcx.predicates_of(did);
clean::Enum {
- generics: (cx.tcx.item_generics(did), &predicates).clean(cx),
+ generics: (cx.tcx.generics_of(did), &predicates).clean(cx),
variants_stripped: false,
- variants: cx.tcx.lookup_adt_def(did).variants.clean(cx),
+ variants: cx.tcx.adt_def(did).variants.clean(cx),
}
}
fn build_struct(cx: &DocContext, did: DefId) -> clean::Struct {
- let predicates = cx.tcx.item_predicates(did);
- let variant = cx.tcx.lookup_adt_def(did).struct_variant();
+ let predicates = cx.tcx.predicates_of(did);
+ let variant = cx.tcx.adt_def(did).struct_variant();
clean::Struct {
struct_type: match variant.ctor_kind {
CtorKind::Fn => doctree::Tuple,
CtorKind::Const => doctree::Unit,
},
- generics: (cx.tcx.item_generics(did), &predicates).clean(cx),
+ generics: (cx.tcx.generics_of(did), &predicates).clean(cx),
fields: variant.fields.clean(cx),
fields_stripped: false,
}
}
fn build_union(cx: &DocContext, did: DefId) -> clean::Union {
- let predicates = cx.tcx.item_predicates(did);
- let variant = cx.tcx.lookup_adt_def(did).struct_variant();
+ let predicates = cx.tcx.predicates_of(did);
+ let variant = cx.tcx.adt_def(did).struct_variant();
clean::Union {
struct_type: doctree::Plain,
- generics: (cx.tcx.item_generics(did), &predicates).clean(cx),
+ generics: (cx.tcx.generics_of(did), &predicates).clean(cx),
fields: variant.fields.clean(cx),
fields_stripped: false,
}
}
fn build_type_alias(cx: &DocContext, did: DefId) -> clean::Typedef {
- let predicates = cx.tcx.item_predicates(did);
+ let predicates = cx.tcx.predicates_of(did);
clean::Typedef {
- type_: cx.tcx.item_type(did).clean(cx),
- generics: (cx.tcx.item_generics(did), &predicates).clean(cx),
+ type_: cx.tcx.type_of(did).clean(cx),
+ generics: (cx.tcx.generics_of(did), &predicates).clean(cx),
}
}
let tcx = cx.tcx;
let mut impls = Vec::new();
- for &did in ty::queries::inherent_impls::get(tcx, DUMMY_SP, did).iter() {
+ for &did in tcx.inherent_impls(did).iter() {
build_impl(cx, did, &mut impls);
}
});
}
- let for_ = tcx.item_type(did).clean(cx);
+ let for_ = tcx.type_of(did).clean(cx);
// Only inline impl if the implementing type is
// reachable in rustdoc generated documentation
}
}
- let predicates = tcx.item_predicates(did);
+ let predicates = tcx.predicates_of(did);
let trait_items = tcx.associated_items(did).filter_map(|item| {
match item.kind {
ty::AssociatedKind::Const => {
Some(clean::Item {
name: Some(item.name.clean(cx)),
inner: clean::AssociatedConstItem(
- tcx.item_type(item.def_id).clean(cx),
+ tcx.type_of(item.def_id).clean(cx),
default,
),
source: tcx.def_span(item.def_id).clean(cx),
}
ty::AssociatedKind::Type => {
let typedef = clean::Typedef {
- type_: tcx.item_type(item.def_id).clean(cx),
+ type_: tcx.type_of(item.def_id).clean(cx),
generics: clean::Generics {
lifetimes: vec![],
type_params: vec![],
}
}
}).collect::<Vec<_>>();
- let polarity = tcx.trait_impl_polarity(did);
+ let polarity = tcx.impl_polarity(did);
let trait_ = associated_trait.clean(cx).map(|bound| {
match bound {
clean::TraitBound(polyt, _) => polyt.trait_,
provided_trait_methods: provided,
trait_: trait_,
for_: for_,
- generics: (tcx.item_generics(did), &predicates).clean(cx),
+ generics: (tcx.generics_of(did), &predicates).clean(cx),
items: trait_items,
polarity: Some(polarity.clean(cx)),
}),
fn build_const(cx: &DocContext, did: DefId) -> clean::Constant {
clean::Constant {
- type_: cx.tcx.item_type(did).clean(cx),
+ type_: cx.tcx.type_of(did).clean(cx),
expr: print_inlined_const(cx, did)
}
}
fn build_static(cx: &DocContext, did: DefId, mutable: bool) -> clean::Static {
clean::Static {
- type_: cx.tcx.item_type(did).clean(cx),
+ type_: cx.tcx.type_of(did).clean(cx),
mutability: if mutable {clean::Mutable} else {clean::Immutable},
expr: "\n\n\n".to_string(), // trigger the "[definition]" links
}
did: self.def_id,
bounds: vec![], // these are filled in from the where-clauses
default: if self.has_default {
- Some(cx.tcx.item_type(self.def_id).clean(cx))
+ Some(cx.tcx.type_of(self.def_id).clean(cx))
} else {
None
}
fn clean(&self, cx: &DocContext) -> Item {
let inner = match self.kind {
ty::AssociatedKind::Const => {
- let ty = cx.tcx.item_type(self.def_id);
+ let ty = cx.tcx.type_of(self.def_id);
AssociatedConstItem(ty.clean(cx), None)
}
ty::AssociatedKind::Method => {
- let generics = (cx.tcx.item_generics(self.def_id),
- &cx.tcx.item_predicates(self.def_id)).clean(cx);
- let sig = cx.tcx.item_type(self.def_id).fn_sig();
+ let generics = (cx.tcx.generics_of(self.def_id),
+ &cx.tcx.predicates_of(self.def_id)).clean(cx);
+ let sig = cx.tcx.type_of(self.def_id).fn_sig();
let mut decl = (self.def_id, sig).clean(cx);
if self.method_has_self_argument {
let self_ty = match self.container {
ty::ImplContainer(def_id) => {
- cx.tcx.item_type(def_id)
+ cx.tcx.type_of(def_id)
}
ty::TraitContainer(_) => cx.tcx.mk_self_type()
};
// are actually located on the trait/impl itself, so we need to load
// all of the generics from there and then look for bounds that are
// applied to this associated type in question.
- let predicates = cx.tcx.item_predicates(did);
- let generics = (cx.tcx.item_generics(did), &predicates).clean(cx);
+ let predicates = cx.tcx.predicates_of(did);
+ let generics = (cx.tcx.generics_of(did), &predicates).clean(cx);
generics.where_predicates.iter().filter_map(|pred| {
let (name, self_type, trait_, bounds) = match *pred {
WherePredicate::BoundPredicate {
}
let ty = if self.defaultness.has_value() {
- Some(cx.tcx.item_type(self.def_id))
+ Some(cx.tcx.type_of(self.def_id))
} else {
None
};
ty::TyAnon(def_id, substs) => {
// Grab the "TraitA + TraitB" from `impl TraitA + TraitB`,
// by looking up the projections associated with the def_id.
- let item_predicates = cx.tcx.item_predicates(def_id);
+ let predicates_of = cx.tcx.predicates_of(def_id);
let substs = cx.tcx.lift(&substs).unwrap();
- let bounds = item_predicates.instantiate(cx.tcx, substs);
+ let bounds = predicates_of.instantiate(cx.tcx, substs);
ImplTrait(bounds.predicates.into_iter().filter_map(|predicate| {
predicate.to_opt_poly_trait_ref().clean(cx)
}).collect())
stability: get_stability(cx, self.did),
deprecation: get_deprecation(cx, self.did),
def_id: self.did,
- inner: StructFieldItem(cx.tcx.item_type(self.did).clean(cx)),
+ inner: StructFieldItem(cx.tcx.type_of(self.did).clean(cx)),
}
}
}
CtorKind::Const => VariantKind::CLike,
CtorKind::Fn => {
VariantKind::Tuple(
- self.fields.iter().map(|f| cx.tcx.item_type(f.did).clean(cx)).collect()
+ self.fields.iter().map(|f| cx.tcx.type_of(f.did).clean(cx)).collect()
)
}
CtorKind::Fictive => {
def_id: field.did,
stability: get_stability(cx, field.did),
deprecation: get_deprecation(cx, field.did),
- inner: StructFieldItem(cx.tcx.item_type(field.did).clean(cx))
+ inner: StructFieldItem(cx.tcx.type_of(field.did).clean(cx))
}
}).collect()
})
if child == trait_ {
return true
}
- let predicates = cx.tcx.item_super_predicates(child).predicates;
+ let predicates = cx.tcx.super_predicates_of(child).predicates;
predicates.iter().filter_map(|pred| {
if let ty::Predicate::Trait(ref pred) = *pred {
if pred.0.trait_ref.self_ty().is_self() {
..config::basic_options().clone()
};
- let codemap = Rc::new(codemap::CodeMap::new());
+ let codemap = Rc::new(codemap::CodeMap::new(sessopts.file_path_mapping()));
let diagnostic_handler = errors::Handler::with_tty_emitter(ColorConfig::Auto,
true,
false,
pub struct Impl {
pub unsafety: hir::Unsafety,
pub polarity: hir::ImplPolarity,
+ pub defaultness: hir::Defaultness,
pub generics: hir::Generics,
pub trait_: Option<hir::TraitRef>,
pub for_: P<hir::Ty>,
use std::io;
use std::io::prelude::*;
-use syntax::codemap::CodeMap;
+use syntax::codemap::{CodeMap, FilePathMapping};
use syntax::parse::lexer::{self, TokenAndSpan};
use syntax::parse::token;
use syntax::parse;
pub fn render_with_highlighting(src: &str, class: Option<&str>, id: Option<&str>,
extension: Option<&str>) -> String {
debug!("highlighting: ================\n{}\n==============", src);
- let sess = parse::ParseSess::new();
- let fm = sess.codemap().new_filemap("<stdin>".to_string(), None, src.to_string());
+ let sess = parse::ParseSess::new(FilePathMapping::empty());
+ let fm = sess.codemap().new_filemap("<stdin>".to_string(), src.to_string());
let mut out = Vec::new();
write_header(class, id, &mut out).unwrap();
/// be inserted into an element. C.f., `render_with_highlighting` which includes
/// an enclosing `<pre>` block.
pub fn render_inner_with_highlighting(src: &str) -> io::Result<String> {
- let sess = parse::ParseSess::new();
- let fm = sess.codemap().new_filemap("<stdin>".to_string(), None, src.to_string());
+ let sess = parse::ParseSess::new(FilePathMapping::empty());
+ let fm = sess.codemap().new_filemap("<stdin>".to_string(), src.to_string());
let mut out = Vec::new();
let mut classifier = Classifier::new(lexer::StringReader::new(&sess, fm), sess.codemap());
..config::basic_options().clone()
};
- let codemap = Rc::new(CodeMap::new());
+ let codemap = Rc::new(CodeMap::new(sessopts.file_path_mapping()));
let handler =
errors::Handler::with_tty_emitter(ColorConfig::Auto, true, false, Some(codemap.clone()));
}
}
let data = Arc::new(Mutex::new(Vec::new()));
- let codemap = Rc::new(CodeMap::new());
+ let codemap = Rc::new(CodeMap::new(sessopts.file_path_mapping()));
let emitter = errors::emitter::EmitterWriter::new(box Sink(data.clone()),
Some(codemap.clone()));
let old = io::set_panic(Some(box Sink(data.clone())));
om.traits.push(t);
},
- hir::ItemImpl(unsafety, polarity, ref gen, ref tr, ref ty, ref item_ids) => {
+ hir::ItemImpl(unsafety,
+ polarity,
+ defaultness,
+ ref gen,
+ ref tr,
+ ref ty,
+ ref item_ids) => {
// Don't duplicate impls when inlining, we'll pick them up
// regardless of where they're located.
if !self.inlining {
let i = Impl {
unsafety: unsafety,
polarity: polarity,
+ defaultness: defaultness,
generics: gen.clone(),
trait_: tr.clone(),
for_: ty.clone(),
std_unicode = { path = "../libstd_unicode" }
unwind = { path = "../libunwind" }
+[target.x86_64-apple-darwin.dependencies]
+rustc_asan = { path = "../librustc_asan" }
+rustc_tsan = { path = "../librustc_tsan" }
+
[target.x86_64-unknown-linux-gnu.dependencies]
rustc_asan = { path = "../librustc_asan" }
rustc_lsan = { path = "../librustc_lsan" }
/// variant will be returned. If an error is returned then it must be
/// guaranteed that no bytes were read.
///
+ /// An error of the `ErrorKind::Interrupted` kind is non-fatal and the read
+ /// operation should be retried if there is nothing else to do.
+ ///
/// # Examples
///
/// [`File`][file]s implement `Read`:
/// let mut f = File::open("foo.txt")?;
/// let mut buffer = [0; 10];
///
- /// // read 10 bytes
+ /// // read up to 10 bytes
/// f.read(&mut buffer[..])?;
/// # Ok(())
/// # }
/// It is **not** considered an error if the entire buffer could not be
/// written to this writer.
///
+ /// An error of the `ErrorKind::Interrupted` kind is non-fatal and the
+ /// write operation should be retried if there is nothing else to do.
+ ///
/// # Examples
///
/// ```
/// # fn foo() -> std::io::Result<()> {
/// let mut buffer = File::create("foo.txt")?;
///
+ /// // Writes some prefix of the byte string, not necessarily all of it.
/// buffer.write(b"some bytes")?;
/// # Ok(())
/// # }
/// Attempts to write an entire buffer into this write.
///
- /// This method will continuously call `write` while there is more data to
- /// write. This method will not return until the entire buffer has been
- /// successfully written or an error occurs. The first error generated from
- /// this method will be returned.
+ /// This method will continuously call `write` until there is no more data
+ /// to be written or an error of non-`ErrorKind::Interrupted` kind is
+ /// returned. This method will not return until the entire buffer has been
+ /// successfully written or such an error occurs. The first error that is
+ /// not of `ErrorKind::Interrupted` kind generated from this method will be
+ /// returned.
///
/// # Errors
///
- /// This function will return the first error that `write` returns.
+ /// This function will return the first error of
+ /// non-`ErrorKind::Interrupted` kind that `write` returns.
///
/// # Examples
///
done_first: bool,
}
+impl<T, U> Chain<T, U> {
+ /// Consumes the `Chain`, returning the wrapped readers.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(more_io_inner_methods)]
+ ///
+ /// # use std::io;
+ /// use std::io::prelude::*;
+ /// use std::fs::File;
+ ///
+ /// # fn foo() -> io::Result<()> {
+ /// let mut foo_file = File::open("foo.txt")?;
+ /// let mut bar_file = File::open("bar.txt")?;
+ ///
+ /// let chain = foo_file.chain(bar_file);
+ /// let (foo_file, bar_file) = chain.into_inner();
+ /// # Ok(())
+ /// # }
+ /// ```
+ #[unstable(feature = "more_io_inner_methods", issue="41519")]
+ pub fn into_inner(self) -> (T, U) {
+ (self.first, self.second)
+ }
+
+ /// Gets references to the underlying readers in this `Chain`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(more_io_inner_methods)]
+ ///
+ /// # use std::io;
+ /// use std::io::prelude::*;
+ /// use std::fs::File;
+ ///
+ /// # fn foo() -> io::Result<()> {
+ /// let mut foo_file = File::open("foo.txt")?;
+ /// let mut bar_file = File::open("bar.txt")?;
+ ///
+ /// let chain = foo_file.chain(bar_file);
+ /// let (foo_file, bar_file) = chain.get_ref();
+ /// # Ok(())
+ /// # }
+ /// ```
+ #[unstable(feature = "more_io_inner_methods", issue="41519")]
+ pub fn get_ref(&self) -> (&T, &U) {
+ (&self.first, &self.second)
+ }
+
+ /// Gets mutable references to the underlying readers in this `Chain`.
+ ///
+ /// Care should be taken to avoid modifying the internal I/O state of the
+ /// underlying readers as doing so may corrupt the internal state of this
+ /// `Chain`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(more_io_inner_methods)]
+ ///
+ /// # use std::io;
+ /// use std::io::prelude::*;
+ /// use std::fs::File;
+ ///
+ /// # fn foo() -> io::Result<()> {
+ /// let mut foo_file = File::open("foo.txt")?;
+ /// let mut bar_file = File::open("bar.txt")?;
+ ///
+ /// let mut chain = foo_file.chain(bar_file);
+ /// let (foo_file, bar_file) = chain.get_mut();
+ /// # Ok(())
+ /// # }
+ /// ```
+ #[unstable(feature = "more_io_inner_methods", issue="41519")]
+ pub fn get_mut(&mut self) -> (&mut T, &mut U) {
+ (&mut self.first, &mut self.second)
+ }
+}
+
#[stable(feature = "std_debug", since = "1.16.0")]
impl<T: fmt::Debug, U: fmt::Debug> fmt::Debug for Chain<T, U> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
pub fn into_inner(self) -> T {
self.inner
}
+
+ /// Gets a reference to the underlying reader.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(more_io_inner_methods)]
+ ///
+ /// use std::io;
+ /// use std::io::prelude::*;
+ /// use std::fs::File;
+ ///
+ /// # fn foo() -> io::Result<()> {
+ /// let mut file = File::open("foo.txt")?;
+ ///
+ /// let mut buffer = [0; 5];
+ /// let mut handle = file.take(5);
+ /// handle.read(&mut buffer)?;
+ ///
+ /// let file = handle.get_ref();
+ /// # Ok(())
+ /// # }
+ /// ```
+ #[unstable(feature = "more_io_inner_methods", issue="41519")]
+ pub fn get_ref(&self) -> &T {
+ &self.inner
+ }
+
+ /// Gets a mutable reference to the underlying reader.
+ ///
+ /// Care should be taken to avoid modifying the internal I/O state of the
+ /// underlying reader as doing so may corrupt the internal limit of this
+ /// `Take`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(more_io_inner_methods)]
+ ///
+ /// use std::io;
+ /// use std::io::prelude::*;
+ /// use std::fs::File;
+ ///
+ /// # fn foo() -> io::Result<()> {
+ /// let mut file = File::open("foo.txt")?;
+ ///
+ /// let mut buffer = [0; 5];
+ /// let mut handle = file.take(5);
+ /// handle.read(&mut buffer)?;
+ ///
+ /// let file = handle.get_mut();
+ /// # Ok(())
+ /// # }
+ /// ```
+ #[unstable(feature = "more_io_inner_methods", issue="41519")]
+ pub fn get_mut(&mut self) -> &mut T {
+ &mut self.inner
+ }
}
#[stable(feature = "rust1", since = "1.0.0")]
///
/// # Examples
///
-/// ```no_run
+/// ```
+/// # use std::io;
/// use std::net::{TcpListener, TcpStream};
///
-/// let listener = TcpListener::bind("127.0.0.1:80").unwrap();
-///
/// fn handle_client(stream: TcpStream) {
/// // ...
/// }
///
+/// # fn process() -> io::Result<()> {
+/// let listener = TcpListener::bind("127.0.0.1:80").unwrap();
+///
/// // accept connections and process them serially
/// for stream in listener.incoming() {
-/// match stream {
-/// Ok(stream) => {
-/// handle_client(stream);
-/// }
-/// Err(e) => { /* connection failed */ }
-/// }
+/// handle_client(stream?);
/// }
+/// # Ok(())
+/// # }
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub struct TcpListener(net_imp::TcpListener);
///
/// [`Shutdown`]: ../../std/net/enum.Shutdown.html
///
+ /// # Platform-specific behavior
+ ///
+ /// Calling this function multiple times may result in different behavior,
+ /// depending on the operating system. On Linux, the second call will
+ /// return `Ok(())`, but on macOS, it will return `ErrorKind::NotConnected`.
+ /// This may change in the future.
+ ///
/// # Examples
///
/// ```no_run
mod mpsc_queue;
mod spsc_queue;
-/// The receiving-half of Rust's channel type. This half can only be owned by
-/// one thread.
+/// The receiving half of Rust's [`channel`][] (or [`sync_channel`]) type.
+/// This half can only be owned by one thread.
///
/// Messages sent to the channel can be retrieved using [`recv`].
///
-/// [`recv`]: ../../../std/sync/mpsc/struct.Receiver.html#method.recv
+/// [`channel`]: fn.channel.html
+/// [`sync_channel`]: fn.sync_channel.html
+/// [`recv`]: struct.Receiver.html#method.recv
///
/// # Examples
///
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> !Sync for Receiver<T> { }
-/// An iterator over messages on a receiver, this iterator will block whenever
-/// [`next`] is called, waiting for a new message, and [`None`] will be returned
+/// An iterator over messages on a [`Receiver`], created by [`iter`].
+///
+/// This iterator will block whenever [`next`] is called,
+/// waiting for a new message, and [`None`] will be returned
/// when the corresponding channel has hung up.
///
+/// [`iter`]: struct.Receiver.html#method.iter
+/// [`Receiver`]: struct.Receiver.html
/// [`next`]: ../../../std/iter/trait.Iterator.html#tymethod.next
/// [`None`]: ../../../std/option/enum.Option.html#variant.None
+///
+/// # Examples
+///
+/// ```rust
+/// use std::sync::mpsc::channel;
+/// use std::thread;
+///
+/// let (send, recv) = channel();
+///
+/// thread::spawn(move || {
+/// send.send(1u8).unwrap();
+/// send.send(2u8).unwrap();
+/// send.send(3u8).unwrap();
+/// });
+///
+/// for x in recv.iter() {
+/// println!("Got: {}", x);
+/// }
+/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[derive(Debug)]
pub struct Iter<'a, T: 'a> {
rx: &'a Receiver<T>
}
-/// An iterator that attempts to yield all pending values for a receiver.
-/// [`None`] will be returned when there are no pending values remaining or if
-/// the corresponding channel has hung up.
+/// An iterator that attempts to yield all pending values for a [`Receiver`],
+/// created by [`try_iter`].
+///
+/// [`None`] will be returned when there are no pending values remaining or
+/// if the corresponding channel has hung up.
///
-/// This Iterator will never block the caller in order to wait for data to
+/// This iterator will never block the caller in order to wait for data to
/// become available. Instead, it will return [`None`].
///
+/// [`Receiver`]: struct.Receiver.html
+/// [`try_iter`]: struct.Receiver.html#method.try_iter
/// [`None`]: ../../../std/option/enum.Option.html#variant.None
+///
+/// # Examples
+///
+/// ```rust
+/// use std::sync::mpsc::channel;
+/// use std::thread;
+/// use std::time::Duration;
+///
+/// let (sender, receiver) = channel();
+///
+/// // Nothing is in the buffer yet
+/// assert!(receiver.try_iter().next().is_none());
+/// println!("Nothing in the buffer...");
+///
+/// thread::spawn(move || {
+/// sender.send(1).unwrap();
+/// sender.send(2).unwrap();
+/// sender.send(3).unwrap();
+/// });
+///
+/// println!("Going to sleep...");
+/// thread::sleep(Duration::from_secs(2)); // block for two seconds
+///
+/// for x in receiver.try_iter() {
+/// println!("Got: {}", x);
+/// }
+/// ```
#[stable(feature = "receiver_try_iter", since = "1.15.0")]
#[derive(Debug)]
pub struct TryIter<'a, T: 'a> {
rx: &'a Receiver<T>
}
-/// An owning iterator over messages on a receiver, this iterator will block
-/// whenever [`next`] is called, waiting for a new message, and [`None`] will be
-/// returned when the corresponding channel has hung up.
+/// An owning iterator over messages on a [`Receiver`],
+/// created by **Receiver::into_iter**.
+///
+/// This iterator will block whenever [`next`]
+/// is called, waiting for a new message, and [`None`] will be
+/// returned if the corresponding channel has hung up.
///
+/// [`Receiver`]: struct.Receiver.html
/// [`next`]: ../../../std/iter/trait.Iterator.html#tymethod.next
/// [`None`]: ../../../std/option/enum.Option.html#variant.None
///
+/// # Examples
+///
+/// ```rust
+/// use std::sync::mpsc::channel;
+/// use std::thread;
+///
+/// let (send, recv) = channel();
+///
+/// thread::spawn(move || {
+/// send.send(1u8).unwrap();
+/// send.send(2u8).unwrap();
+/// send.send(3u8).unwrap();
+/// });
+///
+/// for x in recv.into_iter() {
+/// println!("Got: {}", x);
+/// }
+/// ```
#[stable(feature = "receiver_into_iter", since = "1.1.0")]
#[derive(Debug)]
pub struct IntoIter<T> {
rx: Receiver<T>
}
-/// The sending-half of Rust's asynchronous channel type. This half can only be
+/// The sending-half of Rust's asynchronous [`channel`] type. This half can only be
/// owned by one thread, but it can be cloned to send to other threads.
///
/// Messages can be sent through this channel with [`send`].
///
-/// [`send`]: ../../../std/sync/mpsc/struct.Sender.html#method.send
+/// [`channel`]: fn.channel.html
+/// [`send`]: struct.Sender.html#method.send
///
/// # Examples
///
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> !Sync for Sender<T> { }
-/// The sending-half of Rust's synchronous channel type. This half can only be
-/// owned by one thread, but it can be cloned to send to other threads.
+/// The sending-half of Rust's synchronous [`sync_channel`] type.
+/// This half can only be owned by one thread, but it can be cloned
+/// to send to other threads.
+///
+/// Messages can be sent through this channel with [`send`] or [`try_send`].
+///
+/// [`send`] will block if there is no space in the internal buffer.
+///
+/// [`sync_channel`]: fn.sync_channel.html
+/// [`send`]: struct.SyncSender.html#method.send
+/// [`try_send`]: struct.SyncSender.html#method.try_send
+///
+/// # Examples
+///
+/// ```rust
+/// use std::sync::mpsc::sync_channel;
+/// use std::thread;
///
-/// [`send`]: ../../../std/sync/mpsc/struct.Sender.html#method.send
-/// [`SyncSender::send`]: ../../../std/sync/mpsc/struct.SyncSender.html#method.send
+/// // Create a sync_channel with buffer size 2
+/// let (sync_sender, receiver) = sync_channel(2);
+/// let sync_sender2 = sync_sender.clone();
///
+/// // First thread owns sync_sender
+/// thread::spawn(move || {
+/// sync_sender.send(1).unwrap();
+/// sync_sender.send(2).unwrap();
+/// });
+///
+/// // Second thread owns sync_sender2
+/// thread::spawn(move || {
+/// sync_sender2.send(3).unwrap();
+/// // thread will now block since the buffer is full
+/// println!("Thread unblocked!");
+/// });
+///
+/// let mut msg;
+///
+/// msg = receiver.recv().unwrap();
+/// println!("message {} received", msg);
+///
+/// // "Thread unblocked!" will be printed now
+///
+/// msg = receiver.recv().unwrap();
+/// println!("message {} received", msg);
+///
+/// msg = receiver.recv().unwrap();
+///
+/// println!("message {} received", msg);
+/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub struct SyncSender<T> {
inner: Arc<sync::Packet<T>>,
/// Note that a successful send does *not* guarantee that the receiver will
/// ever see the data if there is a buffer on this channel. Items may be
/// enqueued in the internal buffer for the receiver to receive at a later
- /// time. If the buffer size is 0, however, it can be guaranteed that the
- /// receiver has indeed received the data if this function returns success.
+ /// time. If the buffer size is 0, however, the channel becomes a rendezvous
+ /// channel and it guarantees that the receiver has indeed received
+ /// the data if this function returns success.
///
/// This function will never panic, but it may return [`Err`] if the
/// [`Receiver`] has disconnected and is no longer able to receive
///
/// [`Err`]: ../../../std/result/enum.Result.html#variant.Err
/// [`Receiver`]: ../../../std/sync/mpsc/struct.Receiver.html
+ ///
+ /// # Examples
+ ///
+ /// ```rust
+ /// use std::sync::mpsc::sync_channel;
+ /// use std::thread;
+ ///
+ /// // Create a rendezvous sync_channel with buffer size 0
+ /// let (sync_sender, receiver) = sync_channel(0);
+ ///
+ /// thread::spawn(move || {
+ /// println!("sending message...");
+ /// sync_sender.send(1).unwrap();
+ /// // Thread is now blocked until the message is received
+ ///
+ /// println!("...message received!");
+ /// });
+ ///
+ /// let msg = receiver.recv().unwrap();
+ /// assert_eq!(1, msg);
+ /// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn send(&self, t: T) -> Result<(), SendError<T>> {
self.inner.send(t).map_err(SendError)
/// data. Compared with [`send`], this function has two failure cases
/// instead of one (one for disconnection, one for a full buffer).
///
- /// See [`SyncSender::send`] for notes about guarantees of whether the
+ /// See [`send`] for notes about guarantees of whether the
/// receiver has received the data or not if this function is successful.
///
- /// [`send`]: ../../../std/sync/mpsc/struct.Sender.html#method.send
- /// [`SyncSender::send`]: ../../../std/sync/mpsc/struct.SyncSender.html#method.send
+ /// [`send`]: ../../../std/sync/mpsc/struct.SyncSender.html#method.send
+ ///
+ /// # Examples
+ ///
+ /// ```rust
+ /// use std::sync::mpsc::sync_channel;
+ /// use std::thread;
+ ///
+ /// // Create a sync_channel with buffer size 1
+ /// let (sync_sender, receiver) = sync_channel(1);
+ /// let sync_sender2 = sync_sender.clone();
+ ///
+ /// // First thread owns sync_sender
+ /// thread::spawn(move || {
+ /// sync_sender.send(1).unwrap();
+ /// sync_sender.send(2).unwrap();
+ /// // Thread blocked
+ /// });
+ ///
+ /// // Second thread owns sync_sender2
+ /// thread::spawn(move || {
+ /// // This will return an error and send
+ /// // no message if the buffer is full
+ /// sync_sender2.try_send(3).is_err();
+ /// });
+ ///
+ /// let mut msg;
+ /// msg = receiver.recv().unwrap();
+ /// println!("message {} received", msg);
+ ///
+ /// msg = receiver.recv().unwrap();
+ /// println!("message {} received", msg);
+ ///
+ /// // Third message may have never been sent
+ /// match receiver.try_recv() {
+ /// Ok(msg) => println!("message {} received", msg),
+ /// Err(_) => println!("the third message was never sent"),
+ /// }
+ /// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn try_send(&self, t: T) -> Result<(), TrySendError<T>> {
self.inner.try_send(t)
///
/// This is useful for a flavor of "optimistic check" before deciding to
/// block on a receiver.
+ ///
+ /// Compared with [`recv`], this function has two failure cases instead of one
+ /// (one for disconnection, one for an empty buffer).
+ ///
+ /// [`recv`]: struct.Receiver.html#method.recv
+ ///
+ /// # Examples
+ ///
+ /// ```rust
+ /// use std::sync::mpsc::{Receiver, channel};
+ ///
+ /// let (_, receiver): (_, Receiver<i32>) = channel();
+ ///
+ /// assert!(receiver.try_recv().is_err());
+ /// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn try_recv(&self) -> Result<T, TryRecvError> {
loop {
///
/// This function will always block the current thread if there is no data
/// available and it's possible for more data to be sent. Once a message is
- /// sent to the corresponding [`Sender`], then this receiver will wake up and
- /// return that message.
+ /// sent to the corresponding [`Sender`][] (or [`SyncSender`]), then this
+ /// receiver will wake up and return that message.
///
/// If the corresponding [`Sender`] has disconnected, or it disconnects while
/// this call is blocking, this call will wake up and return [`Err`] to
/// However, since channels are buffered, messages sent before the disconnect
/// will still be properly received.
///
- /// [`Sender`]: ../../../std/sync/mpsc/struct.Sender.html
+ /// [`Sender`]: struct.Sender.html
+ /// [`SyncSender`]: struct.SyncSender.html
/// [`Err`]: ../../../std/result/enum.Result.html#variant.Err
///
/// # Examples
///
/// This function will always block the current thread if there is no data
/// available and it's possible for more data to be sent. Once a message is
- /// sent to the corresponding [`Sender`], then this receiver will wake up and
- /// return that message.
+ /// sent to the corresponding [`Sender`][] (or [`SyncSender`]), then this
+ /// receiver will wake up and return that message.
///
/// If the corresponding [`Sender`] has disconnected, or it disconnects while
/// this call is blocking, this call will wake up and return [`Err`] to
/// However, since channels are buffered, messages sent before the disconnect
/// will still be properly received.
///
- /// [`Sender`]: ../../../std/sync/mpsc/struct.Sender.html
+ /// [`Sender`]: struct.Sender.html
+ /// [`SyncSender`]: struct.SyncSender.html
/// [`Err`]: ../../../std/result/enum.Result.html#variant.Err
///
/// # Examples
/// user by waiting for values.
///
/// [`panic!`]: ../../../std/macro.panic.html
+ ///
+ /// # Examples
+ ///
+ /// ```rust
+ /// use std::sync::mpsc::channel;
+ /// use std::thread;
+ /// use std::time::Duration;
+ ///
+ /// let (sender, receiver) = channel();
+ ///
+ /// // Nothing is in the buffer yet
+ /// assert!(receiver.try_iter().next().is_none());
+ /// println!("Nothing in the buffer...");
+ ///
+ /// thread::spawn(move || {
+ /// sender.send(1).unwrap();
+ /// sender.send(2).unwrap();
+ /// sender.send(3).unwrap();
+ /// });
+ ///
+ /// println!("Going to sleep...");
+ /// thread::sleep(Duration::from_secs(2)); // block for two seconds
+ ///
+ /// for x in receiver.try_iter() {
+ /// println!("Got: {}", x);
+ /// }
+ /// ```
#[stable(feature = "receiver_try_iter", since = "1.15.0")]
pub fn try_iter(&self) -> TryIter<T> {
TryIter { rx: self }
pub fn sub_duration(&self, other: &Duration) -> Instant {
Instant {
t: self.t.checked_sub(dur2intervals(other))
- .expect("overflow when adding duration to instant"),
+ .expect("overflow when subtracting duration from instant"),
}
}
}
/// E.g. `impl<A> Foo<A> { .. }` or `impl<A> Trait for Foo<A> { .. }`
Impl(Unsafety,
ImplPolarity,
+ Defaultness,
Generics,
Option<TraitRef>, // (optional) trait this impl implements
P<Ty>, // self
pub struct CodeMap {
pub files: RefCell<Vec<Rc<FileMap>>>,
- file_loader: Box<FileLoader>
+ file_loader: Box<FileLoader>,
+ // This is used to apply the file path remapping as specified via
+ // -Zremap-path-prefix to all FileMaps allocated within this CodeMap.
+ path_mapping: FilePathMapping,
}
impl CodeMap {
- pub fn new() -> CodeMap {
+ pub fn new(path_mapping: FilePathMapping) -> CodeMap {
CodeMap {
files: RefCell::new(Vec::new()),
- file_loader: Box::new(RealFileLoader)
+ file_loader: Box::new(RealFileLoader),
+ path_mapping: path_mapping,
}
}
- pub fn with_file_loader(file_loader: Box<FileLoader>) -> CodeMap {
+ pub fn with_file_loader(file_loader: Box<FileLoader>,
+ path_mapping: FilePathMapping)
+ -> CodeMap {
CodeMap {
files: RefCell::new(Vec::new()),
- file_loader: file_loader
+ file_loader: file_loader,
+ path_mapping: path_mapping,
}
}
+ pub fn path_mapping(&self) -> &FilePathMapping {
+ &self.path_mapping
+ }
+
pub fn file_exists(&self, path: &Path) -> bool {
self.file_loader.file_exists(path)
}
pub fn load_file(&self, path: &Path) -> io::Result<Rc<FileMap>> {
let src = self.file_loader.read_file(path)?;
- let abs_path = self.file_loader.abs_path(path).map(|p| p.to_str().unwrap().to_string());
- Ok(self.new_filemap(path.to_str().unwrap().to_string(), abs_path, src))
+ Ok(self.new_filemap(path.to_str().unwrap().to_string(), src))
}
fn next_start_pos(&self) -> usize {
/// Creates a new filemap without setting its line information. If you don't
/// intend to set the line information yourself, you should use new_filemap_and_lines.
- pub fn new_filemap(&self, filename: FileName, abs_path: Option<FileName>,
- mut src: String) -> Rc<FileMap> {
+ pub fn new_filemap(&self, filename: FileName, mut src: String) -> Rc<FileMap> {
let start_pos = self.next_start_pos();
let mut files = self.files.borrow_mut();
let end_pos = start_pos + src.len();
+ let (filename, was_remapped) = self.path_mapping.map_prefix(filename);
+
let filemap = Rc::new(FileMap {
name: filename,
- abs_path: abs_path,
+ name_was_remapped: was_remapped,
src: Some(Rc::new(src)),
start_pos: Pos::from_usize(start_pos),
end_pos: Pos::from_usize(end_pos),
}
/// Creates a new filemap and sets its line information.
- pub fn new_filemap_and_lines(&self, filename: &str, abs_path: Option<&str>,
- src: &str) -> Rc<FileMap> {
- let fm = self.new_filemap(filename.to_string(),
- abs_path.map(|s| s.to_owned()),
- src.to_owned());
+ pub fn new_filemap_and_lines(&self, filename: &str, src: &str) -> Rc<FileMap> {
+ let fm = self.new_filemap(filename.to_string(), src.to_owned());
let mut byte_pos: u32 = fm.start_pos.0;
for line in src.lines() {
// register the start of this line
/// information for things inlined from other crates.
pub fn new_imported_filemap(&self,
filename: FileName,
- abs_path: Option<FileName>,
+ name_was_remapped: bool,
source_len: usize,
mut file_local_lines: Vec<BytePos>,
mut file_local_multibyte_chars: Vec<MultiByteChar>)
let filemap = Rc::new(FileMap {
name: filename,
- abs_path: abs_path,
+ name_was_remapped: name_was_remapped,
src: None,
start_pos: start_pos,
end_pos: end_pos,
}
}
+#[derive(Clone)]
+pub struct FilePathMapping {
+ mapping: Vec<(String, String)>,
+}
+
+impl FilePathMapping {
+ pub fn empty() -> FilePathMapping {
+ FilePathMapping {
+ mapping: vec![]
+ }
+ }
+
+ pub fn new(mapping: Vec<(String, String)>) -> FilePathMapping {
+ FilePathMapping {
+ mapping: mapping
+ }
+ }
+
+ /// Applies any path prefix substitution as defined by the mapping.
+ /// The return value is the remapped path and a boolean indicating whether
+ /// the path was affected by the mapping.
+ pub fn map_prefix(&self, path: String) -> (String, bool) {
+ // NOTE: We are iterating over the mapping entries from last to first
+ // because entries specified later on the command line should
+ // take precedence.
+ for &(ref from, ref to) in self.mapping.iter().rev() {
+ if path.starts_with(from) {
+ let mapped = path.replacen(from, to, 1);
+ return (mapped, true);
+ }
+ }
+
+ (path, false)
+ }
+}
+
// _____________________________________________________________________________
// Tests
//
#[test]
fn t1 () {
- let cm = CodeMap::new();
+ let cm = CodeMap::new(FilePathMapping::empty());
let fm = cm.new_filemap("blork.rs".to_string(),
- None,
"first line.\nsecond line".to_string());
fm.next_line(BytePos(0));
// Test we can get lines with partial line info.
#[test]
#[should_panic]
fn t2 () {
- let cm = CodeMap::new();
+ let cm = CodeMap::new(FilePathMapping::empty());
let fm = cm.new_filemap("blork.rs".to_string(),
- None,
"first line.\nsecond line".to_string());
// TESTING *REALLY* BROKEN BEHAVIOR:
fm.next_line(BytePos(0));
}
fn init_code_map() -> CodeMap {
- let cm = CodeMap::new();
+ let cm = CodeMap::new(FilePathMapping::empty());
let fm1 = cm.new_filemap("blork.rs".to_string(),
- None,
"first line.\nsecond line".to_string());
let fm2 = cm.new_filemap("empty.rs".to_string(),
- None,
"".to_string());
let fm3 = cm.new_filemap("blork2.rs".to_string(),
- None,
"first line.\nsecond line".to_string());
fm1.next_line(BytePos(0));
}
fn init_code_map_mbc() -> CodeMap {
- let cm = CodeMap::new();
+ let cm = CodeMap::new(FilePathMapping::empty());
// € is a three byte utf8 char.
let fm1 =
cm.new_filemap("blork.rs".to_string(),
- None,
"fir€st €€€€ line.\nsecond line".to_string());
let fm2 = cm.new_filemap("blork2.rs".to_string(),
- None,
"first line€€.\n€ second line".to_string());
fm1.next_line(BytePos(0));
/// lines in the middle of a file.
#[test]
fn span_to_snippet_and_lines_spanning_multiple_lines() {
- let cm = CodeMap::new();
+ let cm = CodeMap::new(FilePathMapping::empty());
let inputtext = "aaaaa\nbbbbBB\nCCC\nDDDDDddddd\neee\n";
let selection = " \n ~~\n~~~\n~~~~~ \n \n";
- cm.new_filemap_and_lines("blork.rs", None, inputtext);
+ cm.new_filemap_and_lines("blork.rs", inputtext);
let span = span_from_selection(inputtext, selection);
// check that we are extracting the text we thought we were extracting
/// Test failing to merge two spans on different lines
#[test]
fn span_merging_fail() {
- let cm = CodeMap::new();
+ let cm = CodeMap::new(FilePathMapping::empty());
let inputtext = "bbbb BB\ncc CCC\n";
let selection1 = " ~~\n \n";
let selection2 = " \n ~~~\n";
- cm.new_filemap_and_lines("blork.rs", None, inputtext);
+ cm.new_filemap_and_lines("blork.rs", inputtext);
let span1 = span_from_selection(inputtext, selection1);
let span2 = span_from_selection(inputtext, selection2);
fn string_to_stream(text: String, parse_sess: &ParseSess) -> TokenStream {
let filename = String::from("<macro expansion>");
- filemap_to_stream(parse_sess, parse_sess.codemap().new_filemap(filename, None, text))
+ filemap_to_stream(parse_sess, parse_sess.codemap().new_filemap(filename, text))
}
impl<'a, 'b> Folder for InvocationCollector<'a, 'b> {
// Add this input file to the code map to make it available as
// dependency information
let filename = format!("{}", file.display());
- cx.codemap().new_filemap_and_lines(&filename, None, &src);
+ cx.codemap().new_filemap_and_lines(&filename, &src);
base::MacEager::expr(cx.expr_str(sp, Symbol::intern(&src)))
}
// Add this input file to the code map to make it available as
// dependency information, but don't enter it's contents
let filename = format!("{}", file.display());
- cx.codemap().new_filemap_and_lines(&filename, None, "");
+ cx.codemap().new_filemap_and_lines(&filename, "");
base::MacEager::expr(cx.expr_lit(sp, ast::LitKind::ByteStr(Rc::new(bytes))))
}
and possibly buggy");
}
- ast::ItemKind::Impl(_, polarity, _, _, _, _) => {
+ ast::ItemKind::Impl(_, polarity, defaultness, _, _, _, _) => {
match polarity {
ast::ImplPolarity::Negative => {
gate_feature_post!(&self, optin_builtin_traits,
},
_ => {}
}
+
+ if let ast::Defaultness::Default = defaultness {
+ gate_feature_post!(&self, specialization,
+ i.span,
+ "specialization is unstable");
+ }
}
_ => {}
ItemKind::DefaultImpl(unsafety, ref trait_ref) => {
ItemKind::DefaultImpl(unsafety, folder.fold_trait_ref((*trait_ref).clone()))
}
- ItemKind::Impl(unsafety, polarity, generics, ifce, ty, impl_items) => ItemKind::Impl(
+ ItemKind::Impl(unsafety,
+ polarity,
+ defaultness,
+ generics,
+ ifce,
+ ty,
+ impl_items) => ItemKind::Impl(
unsafety,
polarity,
+ defaultness,
folder.fold_generics(generics),
ifce.map(|trait_ref| folder.fold_trait_ref(trait_ref.clone())),
folder.fold_ty(ty),
// FIXME spec the JSON output properly.
-use codemap::CodeMap;
+use codemap::{CodeMap, FilePathMapping};
use syntax_pos::{self, MacroBacktrace, Span, SpanLabel, MultiSpan};
use errors::registry::Registry;
use errors::{DiagnosticBuilder, SubDiagnostic, RenderSpan, CodeSuggestion, CodeMapper};
}
pub fn basic() -> JsonEmitter {
- JsonEmitter::stderr(None, Rc::new(CodeMap::new()))
+ let file_path_mapping = FilePathMapping::empty();
+ JsonEmitter::stderr(None, Rc::new(CodeMap::new(file_path_mapping)))
}
pub fn new(dst: Box<Write + Send>,
let mut src = Vec::new();
srdr.read_to_end(&mut src).unwrap();
let src = String::from_utf8(src).unwrap();
- let cm = CodeMap::new();
- let filemap = cm.new_filemap(path, None, src);
+ let cm = CodeMap::new(sess.codemap().path_mapping().clone());
+ let filemap = cm.new_filemap(path, src);
let mut rdr = lexer::StringReader::new_raw(sess, filemap);
let mut comments: Vec<Comment> = Vec::new();
use ast::{self, Ident};
use syntax_pos::{self, BytePos, CharPos, Pos, Span, NO_EXPANSION};
-use codemap::CodeMap;
+use codemap::{CodeMap, FilePathMapping};
use errors::{FatalError, DiagnosticBuilder};
use parse::{token, ParseSess};
use str::char_at;
// I guess this is the only way to figure out if
// we're at the beginning of the file...
- let cmap = CodeMap::new();
+ let cmap = CodeMap::new(FilePathMapping::empty());
cmap.files.borrow_mut().push(self.filemap.clone());
let loc = cmap.lookup_char_pos_adj(self.pos);
debug!("Skipping a shebang");
sess: &'a ParseSess,
teststr: String)
-> StringReader<'a> {
- let fm = cm.new_filemap("zebra.rs".to_string(), None, teststr);
+ let fm = cm.new_filemap("zebra.rs".to_string(), teststr);
StringReader::new(sess, fm)
}
#[test]
fn t1() {
- let cm = Rc::new(CodeMap::new());
+ let cm = Rc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
let mut string_reader = setup(&cm,
&sh,
#[test]
fn doublecolonparsing() {
- let cm = Rc::new(CodeMap::new());
+ let cm = Rc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
check_tokenization(setup(&cm, &sh, "a b".to_string()),
vec![mk_ident("a"), token::Whitespace, mk_ident("b")]);
#[test]
fn dcparsing_2() {
- let cm = Rc::new(CodeMap::new());
+ let cm = Rc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
check_tokenization(setup(&cm, &sh, "a::b".to_string()),
vec![mk_ident("a"), token::ModSep, mk_ident("b")]);
#[test]
fn dcparsing_3() {
- let cm = Rc::new(CodeMap::new());
+ let cm = Rc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
check_tokenization(setup(&cm, &sh, "a ::b".to_string()),
vec![mk_ident("a"), token::Whitespace, token::ModSep, mk_ident("b")]);
#[test]
fn dcparsing_4() {
- let cm = Rc::new(CodeMap::new());
+ let cm = Rc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
check_tokenization(setup(&cm, &sh, "a:: b".to_string()),
vec![mk_ident("a"), token::ModSep, token::Whitespace, mk_ident("b")]);
#[test]
fn character_a() {
- let cm = Rc::new(CodeMap::new());
+ let cm = Rc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
assert_eq!(setup(&cm, &sh, "'a'".to_string()).next_token().tok,
token::Literal(token::Char(Symbol::intern("a")), None));
#[test]
fn character_space() {
- let cm = Rc::new(CodeMap::new());
+ let cm = Rc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
assert_eq!(setup(&cm, &sh, "' '".to_string()).next_token().tok,
token::Literal(token::Char(Symbol::intern(" ")), None));
#[test]
fn character_escaped() {
- let cm = Rc::new(CodeMap::new());
+ let cm = Rc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
assert_eq!(setup(&cm, &sh, "'\\n'".to_string()).next_token().tok,
token::Literal(token::Char(Symbol::intern("\\n")), None));
#[test]
fn lifetime_name() {
- let cm = Rc::new(CodeMap::new());
+ let cm = Rc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
assert_eq!(setup(&cm, &sh, "'abc".to_string()).next_token().tok,
token::Lifetime(Ident::from_str("'abc")));
#[test]
fn raw_string() {
- let cm = Rc::new(CodeMap::new());
+ let cm = Rc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
assert_eq!(setup(&cm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string())
.next_token()
#[test]
fn literal_suffixes() {
- let cm = Rc::new(CodeMap::new());
+ let cm = Rc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
macro_rules! test {
($input: expr, $tok_type: ident, $tok_contents: expr) => {{
#[test]
fn nested_block_comments() {
- let cm = Rc::new(CodeMap::new());
+ let cm = Rc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
let mut lexer = setup(&cm, &sh, "/* /* */ */'a'".to_string());
match lexer.next_token().tok {
#[test]
fn crlf_comments() {
- let cm = Rc::new(CodeMap::new());
+ let cm = Rc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
let mut lexer = setup(&cm, &sh, "// test\r\n/// test\r\n".to_string());
let comment = lexer.next_token();
//! The main parser interface
use ast::{self, CrateConfig};
-use codemap::CodeMap;
+use codemap::{CodeMap, FilePathMapping};
use syntax_pos::{self, Span, FileMap, NO_EXPANSION};
use errors::{Handler, ColorConfig, DiagnosticBuilder};
use feature_gate::UnstableFeatures;
}
impl ParseSess {
- pub fn new() -> Self {
- let cm = Rc::new(CodeMap::new());
+ pub fn new(file_path_mapping: FilePathMapping) -> Self {
+ let cm = Rc::new(CodeMap::new(file_path_mapping));
let handler = Handler::with_tty_emitter(ColorConfig::Auto,
true,
false,
pub fn parse_stream_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
-> TokenStream {
- filemap_to_stream(sess, sess.codemap().new_filemap(name, None, source))
+ filemap_to_stream(sess, sess.codemap().new_filemap(name, source))
}
// Create a new parser from a source string
pub fn new_parser_from_source_str<'a>(sess: &'a ParseSess, name: String, source: String)
-> Parser<'a> {
- filemap_to_parser(sess, sess.codemap().new_filemap(name, None, source))
+ filemap_to_parser(sess, sess.codemap().new_filemap(name, source))
}
/// Create a new parser, handling errors as appropriate
}
#[test] fn parse_ident_pat () {
- let sess = ParseSess::new();
+ let sess = ParseSess::new(FilePathMapping::empty());
let mut parser = string_to_parser(&sess, "b".to_string());
assert!(panictry!(parser.parse_pat())
== P(ast::Pat{
}
#[test] fn crlf_doc_comments() {
- let sess = ParseSess::new();
+ let sess = ParseSess::new(FilePathMapping::empty());
let name = "<source>".to_string();
let source = "/// doc comment\r\nfn foo() {}".to_string();
#[test]
fn ttdelim_span() {
- let sess = ParseSess::new();
+ let sess = ParseSess::new(FilePathMapping::empty());
let expr = parse::parse_expr_from_source_str("foo".to_string(),
"foo!( fn main() { body } )".to_string(), &sess).unwrap();
use symbol::{Symbol, keywords};
use util::ThinVec;
+use std::cmp;
use std::collections::HashSet;
-use std::{cmp, mem, slice};
+use std::mem;
use std::path::{self, Path, PathBuf};
+use std::slice;
bitflags! {
flags Restrictions: u8 {
/// impl<T> Foo { ... }
/// impl<T> ToString for &'static T { ... }
/// impl Send for .. {}
- fn parse_item_impl(&mut self, unsafety: ast::Unsafety) -> PResult<'a, ItemInfo> {
+ fn parse_item_impl(&mut self,
+ unsafety: ast::Unsafety,
+ defaultness: Defaultness) -> PResult<'a, ItemInfo> {
let impl_span = self.span;
// First, parse type parameters if necessary.
allowed to have generics");
}
+ if let ast::Defaultness::Default = defaultness {
+ self.span_err(impl_span, "`default impl` is not allowed for \
+ default trait implementations");
+ }
+
self.expect(&token::OpenDelim(token::Brace))?;
self.expect(&token::CloseDelim(token::Brace))?;
Ok((keywords::Invalid.ident(),
}
Ok((keywords::Invalid.ident(),
- ItemKind::Impl(unsafety, polarity, generics, opt_trait, ty, impl_items),
+ ItemKind::Impl(unsafety, polarity, defaultness, generics, opt_trait, ty, impl_items),
Some(attrs)))
}
}
}
let mut err = self.diagnostic().struct_span_err(id_sp,
"cannot declare a new module at this location");
- let this_module = match self.directory.path.file_name() {
- Some(file_name) => file_name.to_str().unwrap().to_owned(),
- None => self.root_module_name.as_ref().unwrap().clone(),
- };
- err.span_note(id_sp,
- &format!("maybe move this module `{0}` to its own directory \
- via `{0}{1}mod.rs`",
- this_module,
- path::MAIN_SEPARATOR));
+ if id_sp != syntax_pos::DUMMY_SP {
+ let src_path = PathBuf::from(self.sess.codemap().span_to_filename(id_sp));
+ if let Some(stem) = src_path.file_stem() {
+ let mut dest_path = src_path.clone();
+ dest_path.set_file_name(stem);
+ dest_path.push("mod.rs");
+ err.span_note(id_sp,
+ &format!("maybe move this module `{}` to its own \
+ directory via `{}`", src_path.to_string_lossy(),
+ dest_path.to_string_lossy()));
+ }
+ }
if paths.path_exists {
err.span_note(id_sp,
&format!("... or maybe `use` the module `{}` instead \
of possibly redeclaring it",
paths.name));
- Err(err)
- } else {
- Err(err)
}
+ Err(err)
} else {
paths.result.map_err(|err| self.span_fatal_err(id_sp, err))
}
maybe_append(attrs, extra_attrs));
return Ok(Some(item));
}
- if self.check_keyword(keywords::Unsafe) &&
- self.look_ahead(1, |t| t.is_keyword(keywords::Impl))
+ if (self.check_keyword(keywords::Unsafe) &&
+ self.look_ahead(1, |t| t.is_keyword(keywords::Impl))) ||
+ (self.check_keyword(keywords::Default) &&
+ self.look_ahead(1, |t| t.is_keyword(keywords::Unsafe)) &&
+ self.look_ahead(2, |t| t.is_keyword(keywords::Impl)))
{
// IMPL ITEM
+ let defaultness = self.parse_defaultness()?;
self.expect_keyword(keywords::Unsafe)?;
self.expect_keyword(keywords::Impl)?;
- let (ident, item_, extra_attrs) = self.parse_item_impl(ast::Unsafety::Unsafe)?;
+ let (ident,
+ item_,
+ extra_attrs) = self.parse_item_impl(ast::Unsafety::Unsafe, defaultness)?;
let prev_span = self.prev_span;
let item = self.mk_item(lo.to(prev_span),
ident,
maybe_append(attrs, extra_attrs));
return Ok(Some(item));
}
- if self.eat_keyword(keywords::Impl) {
+ if (self.check_keyword(keywords::Impl)) ||
+ (self.check_keyword(keywords::Default) &&
+ self.look_ahead(1, |t| t.is_keyword(keywords::Impl)))
+ {
// IMPL ITEM
- let (ident, item_, extra_attrs) = self.parse_item_impl(ast::Unsafety::Normal)?;
+ let defaultness = self.parse_defaultness()?;
+ self.expect_keyword(keywords::Impl)?;
+ let (ident,
+ item_,
+ extra_attrs) = self.parse_item_impl(ast::Unsafety::Normal, defaultness)?;
let prev_span = self.prev_span;
let item = self.mk_item(lo.to(prev_span),
ident,
}
ast::ItemKind::Impl(unsafety,
polarity,
+ defaultness,
ref generics,
ref opt_trait,
ref ty,
ref impl_items) => {
self.head("")?;
self.print_visibility(&item.vis)?;
+ self.print_defaultness(defaultness)?;
self.print_unsafety(unsafety)?;
self.word_nbsp("impl")?;
}
}
+ pub fn print_defaultness(&mut self, defatulness: ast::Defaultness) -> io::Result<()> {
+ if let ast::Defaultness::Default = defatulness {
+ try!(self.word_nbsp("default"));
+ }
+ Ok(())
+ }
+
pub fn print_struct(&mut self,
struct_def: &ast::VariantData,
generics: &ast::Generics,
self.hardbreak_if_not_bol()?;
self.maybe_print_comment(ii.span.lo)?;
self.print_outer_attributes(&ii.attrs)?;
- if let ast::Defaultness::Default = ii.defaultness {
- self.word_nbsp("default")?;
- }
+ self.print_defaultness(ii.defaultness)?;
match ii.node {
ast::ImplItemKind::Const(ref ty, ref expr) => {
self.print_associated_const(ii.ident, &ty, Some(&expr), &ii.vis)?;
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use codemap::CodeMap;
+use codemap::{CodeMap, FilePathMapping};
use errors::Handler;
use errors::emitter::EmitterWriter;
use std::io;
fn test_harness(file_text: &str, span_labels: Vec<SpanLabel>, expected_output: &str) {
let output = Arc::new(Mutex::new(Vec::new()));
- let code_map = Rc::new(CodeMap::new());
- code_map.new_filemap_and_lines("test.rs", None, &file_text);
+ let code_map = Rc::new(CodeMap::new(FilePathMapping::empty()));
+ code_map.new_filemap_and_lines("test.rs", &file_text);
let primary_span = make_span(&file_text, &span_labels[0].start, &span_labels[0].end);
let mut msp = MultiSpan::from_span(primary_span);
// except according to those terms.
use ast::{self, Ident};
+use codemap::FilePathMapping;
use parse::{ParseSess, PResult, filemap_to_stream};
use parse::{lexer, new_parser_from_source_str};
use parse::parser::Parser;
/// Map a string to tts, using a made-up filename:
pub fn string_to_stream(source_str: String) -> TokenStream {
- let ps = ParseSess::new();
- filemap_to_stream(&ps, ps.codemap().new_filemap("bogofile".to_string(), None, source_str))
+ let ps = ParseSess::new(FilePathMapping::empty());
+ filemap_to_stream(&ps, ps.codemap().new_filemap("bogofile".to_string(), source_str))
}
/// Map string to parser (via tts)
/// Parse a string, return a crate.
pub fn string_to_crate (source_str : String) -> ast::Crate {
- let ps = ParseSess::new();
+ let ps = ParseSess::new(FilePathMapping::empty());
with_error_checking_parse(source_str, &ps, |p| {
p.parse_crate_mod()
})
/// Parse a string, return an expr
pub fn string_to_expr (source_str : String) -> P<ast::Expr> {
- let ps = ParseSess::new();
+ let ps = ParseSess::new(FilePathMapping::empty());
with_error_checking_parse(source_str, &ps, |p| {
p.parse_expr()
})
/// Parse a string, return an item
pub fn string_to_item (source_str : String) -> Option<P<ast::Item>> {
- let ps = ParseSess::new();
+ let ps = ParseSess::new(FilePathMapping::empty());
with_error_checking_parse(source_str, &ps, |p| {
p.parse_item()
})
/// Parse a string, return a stmt
pub fn string_to_stmt(source_str : String) -> Option<ast::Stmt> {
- let ps = ParseSess::new();
+ let ps = ParseSess::new(FilePathMapping::empty());
with_error_checking_parse(source_str, &ps, |p| {
p.parse_stmt()
})
/// Parse a string, return a pat. Uses "irrefutable"... which doesn't
/// (currently) affect parsing.
pub fn string_to_pat(source_str: String) -> P<ast::Pat> {
- let ps = ParseSess::new();
+ let ps = ParseSess::new(FilePathMapping::empty());
with_error_checking_parse(source_str, &ps, |p| {
p.parse_pat()
})
ItemKind::DefaultImpl(_, ref trait_ref) => {
visitor.visit_trait_ref(trait_ref)
}
- ItemKind::Impl(_, _,
+ ItemKind::Impl(_, _, _,
ref type_parameters,
ref opt_trait_reference,
ref typ,
a,
ast::ItemKind::Impl(unsafety,
ast::ImplPolarity::Positive,
+ ast::Defaultness::Final,
trait_generics,
opt_trait_ref,
self_type,
}
/// A single source in the CodeMap.
+#[derive(Clone)]
pub struct FileMap {
/// The name of the file that the source came from, source that doesn't
/// originate from files has names between angle brackets by convention,
/// e.g. `<anon>`
pub name: FileName,
- /// The absolute path of the file that the source came from.
- pub abs_path: Option<FileName>,
+ /// True if the `name` field above has been modified by -Zremap-path-prefix
+ pub name_was_remapped: bool,
/// The complete source code
pub src: Option<Rc<String>>,
/// The start position of this source in the CodeMap
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
s.emit_struct("FileMap", 6, |s| {
s.emit_struct_field("name", 0, |s| self.name.encode(s))?;
- s.emit_struct_field("abs_path", 1, |s| self.abs_path.encode(s))?;
+ s.emit_struct_field("name_was_remapped", 1, |s| self.name_was_remapped.encode(s))?;
s.emit_struct_field("start_pos", 2, |s| self.start_pos.encode(s))?;
s.emit_struct_field("end_pos", 3, |s| self.end_pos.encode(s))?;
s.emit_struct_field("lines", 4, |s| {
d.read_struct("FileMap", 6, |d| {
let name: String = d.read_struct_field("name", 0, |d| Decodable::decode(d))?;
- let abs_path: Option<String> =
- d.read_struct_field("abs_path", 1, |d| Decodable::decode(d))?;
+ let name_was_remapped: bool =
+ d.read_struct_field("name_was_remapped", 1, |d| Decodable::decode(d))?;
let start_pos: BytePos = d.read_struct_field("start_pos", 2, |d| Decodable::decode(d))?;
let end_pos: BytePos = d.read_struct_field("end_pos", 3, |d| Decodable::decode(d))?;
let lines: Vec<BytePos> = d.read_struct_field("lines", 4, |d| {
d.read_struct_field("multibyte_chars", 5, |d| Decodable::decode(d))?;
Ok(FileMap {
name: name,
- abs_path: abs_path,
+ name_was_remapped: name_was_remapped,
start_pos: start_pos,
end_pos: end_pos,
src: None,
println!("cargo:rustc-link-lib=static-nobundle=pthread");
} else if target.contains("fuchsia") {
println!("cargo:rustc-link-lib=unwind");
+ } else if target.contains("haiku") {
+ println!("cargo:rustc-link-lib=gcc_s");
}
}
-Subproject commit a884d21cc5f0b23a1693d1e872fd8998a4fdd17f
+Subproject commit 15745af7683844e43bdec966072b8e7b44772450
#define SUBTARGET_SPARC
#endif
+#ifdef LLVM_COMPONENT_HEXAGON
+#define SUBTARGET_HEXAGON SUBTARGET(Hexagon)
+#else
+#define SUBTARGET_HEXAGON
+#endif
+
#define GEN_SUBTARGETS \
SUBTARGET_X86 \
SUBTARGET_ARM \
SUBTARGET_PPC \
SUBTARGET_SYSTEMZ \
SUBTARGET_MSP430 \
- SUBTARGET_SPARC
+ SUBTARGET_SPARC \
+ SUBTARGET_HEXAGON
#define SUBTARGET(x) \
namespace llvm { \
# If this file is modified, then llvm will be (optionally) cleaned and then rebuilt.
# The actual contents of this file do not matter, but to trigger a change on the
# build bots then the contents should be changed so git updates the mtime.
-2017-03-23
+2017-04-26
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// ignore-tidy-linelength
+
+// compile-flags: -g -Zremap-path-prefix-from={{cwd}} -Zremap-path-prefix-to=/the/aux-cwd -Zremap-path-prefix-from={{src-base}}/remap_path_prefix/auxiliary -Zremap-path-prefix-to=/the/aux-src
+
+#[inline]
+pub fn some_aux_function() -> i32 {
+ 1234
+}
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// ignore-windows
+// ignore-tidy-linelength
+
+// compile-flags: -g -C no-prepopulate-passes -Zremap-path-prefix-from={{cwd}} -Zremap-path-prefix-to=/the/cwd -Zremap-path-prefix-from={{src-base}} -Zremap-path-prefix-to=/the/src
+// aux-build:remap_path_prefix_aux.rs
+
+extern crate remap_path_prefix_aux;
+
+// Here we check that the expansion of the file!() macro is mapped.
+// CHECK: internal constant [34 x i8] c"/the/src/remap_path_prefix/main.rs"
+pub static FILE_PATH: &'static str = file!();
+
+fn main() {
+ remap_path_prefix_aux::some_aux_function();
+}
+
+// Here we check that local debuginfo is mapped correctly.
+// CHECK: !DIFile(filename: "/the/src/remap_path_prefix/main.rs", directory: "/the/cwd")
+
+// And here that debuginfo from other crates are expanded to absolute paths.
+// CHECK: !DIFile(filename: "/the/aux-src/remap_path_prefix_aux.rs", directory: "")
extern crate syntax_pos;
use syntax::ast;
+use syntax::codemap::FilePathMapping;
use syntax::print::pprust;
use syntax::symbol::Symbol;
use syntax_pos::DUMMY_SP;
fn main() {
- let ps = syntax::parse::ParseSess::new();
+ let ps = syntax::parse::ParseSess::new(FilePathMapping::empty());
let mut resolver = syntax::ext::base::DummyResolver;
let mut cx = syntax::ext::base::ExtCtxt::new(
&ps,
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(associated_consts)]
+
+pub trait Trait {
+ const CONST: u32;
+}
+++ /dev/null
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// check that borrowck looks inside consts/statics
-
-static FN : &'static (Fn() -> (Box<Fn()->Box<i32>>) + Sync) = &|| {
- let x = Box::new(0);
- Box::new(|| x) //~ ERROR cannot move out of captured outer variable
-};
-
-fn main() {
- let f = (FN)();
- f();
- f();
-}
+++ /dev/null
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-#![deny(private_in_public)]
-
-pub use inner::C;
-
-mod inner {
- trait A {
- fn a(&self) { }
- }
-
- pub trait B {
- fn b(&self) { }
- }
-
- pub trait C: A + B { //~ ERROR private trait `inner::A` in public interface
- //~^ WARN will become a hard error
- fn c(&self) { }
- }
-
- impl A for i32 {}
- impl B for i32 {}
- impl C for i32 {}
-
-}
-
-fn main() {
- // A is private
- // B is pub, not reexported
- // C : A + B is pub, reexported
-
- // 0.a(); // can't call
- // 0.b(); // can't call
- 0.c(); // ok
-
- C::a(&0); // can call
- C::b(&0); // can call
- C::c(&0); // ok
-}
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// aux-build:issue_41549.rs
+
+#![feature(associated_consts)]
+
+extern crate issue_41549;
+
+struct S;
+
+impl issue_41549::Trait for S {
+ const CONST: () = (); //~ ERROR incompatible type for trait
+}
+
+fn main() {}
enum Bar { X }
mod foo {
- trait Bar {
+ pub trait Bar {
fn method(&self) {}
fn method2(&self) {}
match s {
S{0: a, 0x1: b, ..} => {}
//~^ ERROR does not have a field named `0x1`
- //~| NOTE struct `S::{{constructor}}` does not have field `0x1`
+ //~| NOTE struct `S` does not have field `0x1`
}
}
}
}
-fn main() {
+fn main() { unsafe {
let u = m::U { a: 0 }; // OK
let u = m::U { b: 0 }; // OK
let u = m::U { c: 0 }; //~ ERROR field `c` of union `m::U` is private
let m::U { a } = u; // OK
let m::U { b } = u; // OK
let m::U { c } = u; //~ ERROR field `c` of union `m::U` is private
-}
+}}
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(specialization)]
+
+// Make sure we can't project defaulted associated types
+
+trait Foo {
+ type Assoc;
+}
+
+default impl<T> Foo for T {
+ type Assoc = ();
+}
+
+impl Foo for u8 {
+ type Assoc = String;
+}
+
+fn generic<T>() -> <T as Foo>::Assoc {
+ // `T` could be some downstream crate type that specializes (or,
+ // for that matter, `u8`).
+
+ () //~ ERROR mismatched types
+}
+
+fn monomorphic() -> () {
+ // Even though we know that `()` is not specialized in a
+ // downstream crate, typeck refuses to project here.
+
+ generic::<()>() //~ ERROR mismatched types
+}
+
+fn main() {
+ // No error here, we CAN project from `u8`, as there is no `default`
+ // in that impl.
+ let s: String = generic::<u8>();
+ println!("{}", s); // bad news if this all compiles
+}
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// It should not be possible to use the concrete value of a defaulted
+// associated type in the impl defining it -- otherwise, what happens
+// if it's overridden?
+
+#![feature(specialization)]
+
+trait Example {
+ type Output;
+ fn generate(self) -> Self::Output;
+}
+
+default impl<T> Example for T {
+ type Output = Box<T>;
+ fn generate(self) -> Self::Output {
+ Box::new(self) //~ ERROR mismatched types
+ }
+}
+
+impl Example for bool {
+ type Output = bool;
+ fn generate(self) -> bool { self }
+}
+
+fn trouble<T>(t: T) -> Box<T> {
+ Example::generate(t) //~ ERROR mismatched types
+}
+
+fn weaponize() -> bool {
+ let b: Box<bool> = trouble(true);
+ *b
+}
+
+fn main() {
+ weaponize();
+}
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Check that specialization must be ungated to use the `default` keyword
+
+trait Foo {
+ fn foo(&self);
+}
+
+default impl<T> Foo for T { //~ ERROR specialization is unstable
+ fn foo(&self) {}
+}
+
+fn main() {}
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(specialization)]
+#![feature(optin_builtin_traits)]
+
+trait Foo {}
+
+default impl Foo for .. {}
+//~^ ERROR `default impl` is not allowed for default trait implementations
+
+fn main() {}
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(specialization)]
+
+// Check a number of scenarios in which one impl tries to override another,
+// without correctly using `default`.
+
+////////////////////////////////////////////////////////////////////////////////
+// Test 1: one layer of specialization, multiple methods, missing `default`
+////////////////////////////////////////////////////////////////////////////////
+
+trait Foo {
+ fn foo(&self);
+ fn bar(&self);
+}
+
+impl<T> Foo for T {
+ fn foo(&self) {}
+ fn bar(&self) {}
+}
+
+impl Foo for u8 {}
+impl Foo for u16 {
+ fn foo(&self) {} //~ ERROR E0520
+}
+impl Foo for u32 {
+ fn bar(&self) {} //~ ERROR E0520
+}
+
+////////////////////////////////////////////////////////////////////////////////
+// Test 2: one layer of specialization, missing `default` on associated type
+////////////////////////////////////////////////////////////////////////////////
+
+trait Bar {
+ type T;
+}
+
+impl<T> Bar for T {
+ type T = u8;
+}
+
+impl Bar for u8 {
+ type T = (); //~ ERROR E0520
+}
+
+////////////////////////////////////////////////////////////////////////////////
+// Test 3a: multiple layers of specialization, missing interior `default`
+////////////////////////////////////////////////////////////////////////////////
+
+trait Baz {
+ fn baz(&self);
+}
+
+default impl<T> Baz for T {
+ fn baz(&self) {}
+}
+
+impl<T: Clone> Baz for T {
+ fn baz(&self) {}
+}
+
+impl Baz for i32 {
+ fn baz(&self) {} //~ ERROR E0520
+}
+
+////////////////////////////////////////////////////////////////////////////////
+// Test 3b: multiple layers of specialization, missing interior `default`,
+// redundant `default` in bottom layer.
+////////////////////////////////////////////////////////////////////////////////
+
+trait Redundant {
+ fn redundant(&self);
+}
+
+default impl<T> Redundant for T {
+ fn redundant(&self) {}
+}
+
+impl<T: Clone> Redundant for T {
+ fn redundant(&self) {}
+}
+
+default impl Redundant for i32 {
+ fn redundant(&self) {} //~ ERROR E0520
+}
+
+fn main() {}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(associated_consts)]
+#![feature(associated_type_defaults)]
+
+struct S;
+
+mod method {
+ trait A {
+ fn a(&self) { }
+ }
+
+ pub trait B {
+ fn b(&self) { }
+ }
+
+ pub trait C: A + B {
+ fn c(&self) { }
+ }
+
+ impl A for ::S {}
+ impl B for ::S {}
+ impl C for ::S {}
+}
+
+mod assoc_const {
+ trait A {
+ const A: u8 = 0;
+ }
+
+ pub trait B {
+ const B: u8 = 0;
+ }
+
+ pub trait C: A + B {
+ const C: u8 = 0;
+ }
+
+ impl A for ::S {}
+ impl B for ::S {}
+ impl C for ::S {}
+}
+
+mod assoc_ty {
+ trait A {
+ type A = u8;
+ }
+
+ pub trait B {
+ type B = u8;
+ }
+
+ pub trait C: A + B {
+ type C = u8;
+ }
+
+ impl A for ::S {}
+ impl B for ::S {}
+ impl C for ::S {}
+}
+
+fn check_method() {
+ // A is private
+ // B is pub, not in scope
+ // C : A + B is pub, in scope
+ use method::C;
+
+ // Methods, method call
+ // a, b, c are resolved as trait items, their traits need to be in scope
+ S.a(); //~ ERROR no method named `a` found for type `S` in the current scope
+ S.b(); //~ ERROR no method named `b` found for type `S` in the current scope
+ S.c(); // OK
+ // a, b, c are resolved as inherent items, their traits don't need to be in scope
+ let c = &S as &C;
+ c.a(); //~ ERROR method `a` is private
+ c.b(); // OK
+ c.c(); // OK
+
+ // Methods, UFCS
+ // a, b, c are resolved as trait items, their traits need to be in scope
+ S::a(&S); //~ ERROR no associated item named `a` found for type `S` in the current scope
+ S::b(&S); //~ ERROR no associated item named `b` found for type `S` in the current scope
+ S::c(&S); // OK
+ // a, b, c are resolved as inherent items, their traits don't need to be in scope
+ C::a(&S); //~ ERROR method `a` is private
+ C::b(&S); // OK
+ C::c(&S); // OK
+}
+
+fn check_assoc_const() {
+ // A is private
+ // B is pub, not in scope
+ // C : A + B is pub, in scope
+ use assoc_const::C;
+
+ // Associated constants
+ // A, B, C are resolved as trait items, their traits need to be in scope
+ S::A; //~ ERROR no associated item named `A` found for type `S` in the current scope
+ S::B; //~ ERROR no associated item named `B` found for type `S` in the current scope
+ S::C; // OK
+ // A, B, C are resolved as inherent items, their traits don't need to be in scope
+ C::A; //~ ERROR associated constant `A` is private
+ //~^ ERROR the trait `assoc_const::C` cannot be made into an object
+ //~| ERROR the trait bound `assoc_const::C: assoc_const::A` is not satisfied
+ C::B; // ERROR the trait `assoc_const::C` cannot be made into an object
+ //~^ ERROR the trait bound `assoc_const::C: assoc_const::B` is not satisfied
+ C::C; // OK
+}
+
+fn check_assoc_ty<T: assoc_ty::C>() {
+ // A is private
+ // B is pub, not in scope
+ // C : A + B is pub, in scope
+ use assoc_ty::C;
+
+ // Associated types
+ // A, B, C are resolved as trait items, their traits need to be in scope, not implemented yet
+ let _: S::A; //~ ERROR ambiguous associated type
+ let _: S::B; //~ ERROR ambiguous associated type
+ let _: S::C; //~ ERROR ambiguous associated type
+ // A, B, C are resolved as inherent items, their traits don't need to be in scope
+ let _: T::A; //~ ERROR associated type `A` is private
+ let _: T::B; // OK
+ let _: T::C; // OK
+}
+
+fn main() {}
+++ /dev/null
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-mod m {
- trait Priv {
- fn f(&self) {}
- }
- impl Priv for super::S {}
- pub trait Pub: Priv {}
-}
-
-struct S;
-impl m::Pub for S {}
-
-fn g<T: m::Pub>(arg: T) {
- arg.f(); //~ ERROR: source trait `m::Priv` is private
-}
-
-fn main() {
- g(S);
-}
+++ /dev/null
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// Test that a by-ref `FnMut` closure gets an error when it tries to
-// consume a value.
-
-fn call<F>(f: F) where F : Fn() {
- f();
-}
-
-fn main() {
- let y = vec![format!("World")];
- call(|| {
- y.into_iter();
- //~^ ERROR cannot move out of captured outer variable in an `Fn` closure
- });
-}
use syntax_pos::DUMMY_SP;
fn main() {
- let ps = syntax::parse::ParseSess::new();
+ let ps = syntax::parse::ParseSess::new(codemap::FilePathMapping::empty());
let mut resolver = syntax::ext::base::DummyResolver;
let mut cx = syntax::ext::base::ExtCtxt::new(
&ps,
-include ../tools.mk
-# NOTE the address sanitizer only supports x86_64 linux
-ifdef SANITIZER_SUPPORT
-all:
- $(RUSTC) -g -Z sanitizer=address -Z print-link-args overflow.rs | grep -q librustc_asan
- $(TMPDIR)/overflow 2>&1 | grep -q stack-buffer-overflow
+# NOTE the address sanitizer only supports x86_64 linux and macOS
+
+ifeq ($(TARGET),x86_64-apple-darwin)
+ASAN_SUPPORT=$(SANITIZER_SUPPORT)
+EXTRA_RUSTFLAG=-C rpath
else
-all:
+ifeq ($(TARGET),x86_64-unknown-linux-gnu)
+ASAN_SUPPORT=$(SANITIZER_SUPPORT)
+EXTRA_RUSTFLAG=
+endif
+endif
+all:
+ifeq ($(ASAN_SUPPORT),1)
+ $(RUSTC) -g -Z sanitizer=address -Z print-link-args $(EXTRA_RUSTFLAG) overflow.rs | grep -q librustc_asan
+ $(TMPDIR)/overflow 2>&1 | grep -q stack-buffer-overflow
endif
-include ../tools.mk
all:
- $(RUSTC) -Z sanitizer=leak --target i686-unknown-linux-gnu hello.rs 2>&1 | grep -q 'Sanitizers only work with the `x86_64-unknown-linux-gnu` target'
+ $(RUSTC) -Z sanitizer=leak --target i686-unknown-linux-gnu hello.rs 2>&1 | grep -q 'LeakSanitizer only works with the `x86_64-unknown-linux-gnu` target'
-include ../tools.mk
-ifdef SANITIZER_SUPPORT
all:
+ifeq ($(TARGET),x86_64-unknown-linux-gnu)
+ifdef SANITIZER_SUPPORT
$(RUSTC) -C opt-level=1 -g -Z sanitizer=leak -Z print-link-args leak.rs | grep -q librustc_lsan
$(TMPDIR)/leak 2>&1 | grep -q 'detected memory leaks'
-else
-all:
-
endif
+endif
+
-include ../tools.mk
-ifdef SANITIZER_SUPPORT
all:
+ifeq ($(TARGET),x86_64-unknown-linux-gnu)
+ifdef SANITIZER_SUPPORT
$(RUSTC) -g -Z sanitizer=memory -Z print-link-args uninit.rs | grep -q librustc_msan
$(TMPDIR)/uninit 2>&1 | grep -q use-of-uninitialized-value
-else
-all:
-
endif
+endif
+
-include ../tools.mk
-# This is a whitelist of crates which are stable, we don't check for the
-# instability of these crates as they're all stable!
+# This is a whitelist of files which are stable crates or simply are not crates,
+# we don't check for the instability of these crates as they're all stable!
STABLE_CRATES := \
std \
core \
rsbegin.o \
rsend.o \
dllcrt2.o \
- crt2.o
+ crt2.o \
+ clang_rt.%_dynamic.dylib
# Generate a list of all crates in the sysroot. To do this we list all files in
# rustc's sysroot, look at the filename, strip everything after the `-`, and
use syntax::ast::*;
use syntax::attr::*;
use syntax::ast;
+use syntax::codemap::FilePathMapping;
use syntax::parse;
use syntax::parse::{ParseSess, PResult};
use syntax::parse::new_parser_from_source_str;
}
fn check_expr_attrs(es: &str, expected: &[&str]) {
- let ps = ParseSess::new();
+ let ps = ParseSess::new(FilePathMapping::empty());
let e = expr(es, &ps).expect("parse error");
let actual = &e.attrs;
str_compare(es,
}
fn check_stmt_attrs(es: &str, expected: &[&str]) {
- let ps = ParseSess::new();
+ let ps = ParseSess::new(FilePathMapping::empty());
let e = stmt(es, &ps).expect("parse error");
let actual = e.node.attrs();
str_compare(es,
}
fn reject_expr_parse(es: &str) {
- let ps = ParseSess::new();
+ let ps = ParseSess::new(FilePathMapping::empty());
match expr(es, &ps) {
Ok(_) => panic!("parser did not reject `{}`", es),
Err(mut e) => e.cancel(),
}
fn reject_stmt_parse(es: &str) {
- let ps = ParseSess::new();
+ let ps = ParseSess::new(FilePathMapping::empty());
match stmt(es, &ps) {
Ok(_) => panic!("parser did not reject `{}`", es),
Err(mut e) => e.cancel(),
extern crate syntax;
extern crate syntax_pos;
+use syntax::codemap::FilePathMapping;
use syntax::print::pprust::*;
use syntax::symbol::Symbol;
use syntax_pos::DUMMY_SP;
fn main() {
- let ps = syntax::parse::ParseSess::new();
+ let ps = syntax::parse::ParseSess::new(FilePathMapping::empty());
let mut resolver = syntax::ext::base::DummyResolver;
let mut cx = syntax::ext::base::ExtCtxt::new(
&ps,
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+const A: [u32; 1] = [0];
+
+fn test() {
+ let range = A[1]..;
+}
+
+fn main() { }
+
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn split<A, B>(pair: (A, B)) {
+ let _a = pair.0;
+ let _b = pair.1;
+}
+
+fn main() {
+ split(((), ((), ())));
+}
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// regression test for issue #41498.
+
+struct S;
+impl S {
+ fn mutate(&mut self) {}
+}
+
+fn call_and_ref<T, F: FnOnce() -> T>(x: &mut Option<T>, f: F) -> &mut T {
+ *x = Some(f());
+ x.as_mut().unwrap()
+}
+
+fn main() {
+ let mut n = None;
+ call_and_ref(&mut n, || [S])[0].mutate();
+}
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(specialization)]
+
+// Common code used for tests that model the Fn/FnMut/FnOnce hierarchy.
+
+pub trait Go {
+ fn go(&self, arg: isize);
+}
+
+pub fn go<G:Go>(this: &G, arg: isize) {
+ this.go(arg)
+}
+
+pub trait GoMut {
+ fn go_mut(&mut self, arg: isize);
+}
+
+pub fn go_mut<G:GoMut>(this: &mut G, arg: isize) {
+ this.go_mut(arg)
+}
+
+pub trait GoOnce {
+ fn go_once(self, arg: isize);
+}
+
+pub fn go_once<G:GoOnce>(this: G, arg: isize) {
+ this.go_once(arg)
+}
+
+default impl<G> GoMut for G
+ where G : Go
+{
+ fn go_mut(&mut self, arg: isize) {
+ go(&*self, arg)
+ }
+}
+
+default impl<G> GoOnce for G
+ where G : GoMut
+{
+ fn go_once(mut self, arg: isize) {
+ go_mut(&mut self, arg)
+ }
+}
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(specialization)]
+
+pub trait Foo {
+ fn foo(&self) -> &'static str;
+}
+
+default impl<T> Foo for T {
+ fn foo(&self) -> &'static str {
+ "generic"
+ }
+}
+
+default impl<T: Clone> Foo for T {
+ fn foo(&self) -> &'static str {
+ "generic Clone"
+ }
+}
+
+default impl<T, U> Foo for (T, U) where T: Clone, U: Clone {
+ fn foo(&self) -> &'static str {
+ "generic pair"
+ }
+}
+
+default impl<T: Clone> Foo for (T, T) {
+ fn foo(&self) -> &'static str {
+ "generic uniform pair"
+ }
+}
+
+default impl Foo for (u8, u32) {
+ fn foo(&self) -> &'static str {
+ "(u8, u32)"
+ }
+}
+
+default impl Foo for (u8, u8) {
+ fn foo(&self) -> &'static str {
+ "(u8, u8)"
+ }
+}
+
+default impl<T: Clone> Foo for Vec<T> {
+ fn foo(&self) -> &'static str {
+ "generic Vec"
+ }
+}
+
+impl Foo for Vec<i32> {
+ fn foo(&self) -> &'static str {
+ "Vec<i32>"
+ }
+}
+
+impl Foo for String {
+ fn foo(&self) -> &'static str {
+ "String"
+ }
+}
+
+impl Foo for i32 {
+ fn foo(&self) -> &'static str {
+ "i32"
+ }
+}
+
+pub trait MyMarker {}
+default impl<T: Clone + MyMarker> Foo for T {
+ fn foo(&self) -> &'static str {
+ "generic Clone + MyMarker"
+ }
+}
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+
+#![feature(specialization)]
+
+// First, test only use of explicit `default` items:
+
+pub trait Foo {
+ fn foo(&self) -> bool;
+}
+
+default impl<T> Foo for T {
+ fn foo(&self) -> bool { false }
+}
+
+impl Foo for i32 {}
+
+impl Foo for i64 {
+ fn foo(&self) -> bool { true }
+}
+
+// Next, test mixture of explicit `default` and provided methods:
+
+pub trait Bar {
+ fn bar(&self) -> i32 { 0 }
+}
+
+impl<T> Bar for T {} // use the provided method
+
+impl Bar for i32 {
+ fn bar(&self) -> i32 { 1 }
+}
+impl<'a> Bar for &'a str {}
+
+default impl<T> Bar for Vec<T> {
+ fn bar(&self) -> i32 { 2 }
+}
+impl Bar for Vec<i32> {}
+impl Bar for Vec<i64> {
+ fn bar(&self) -> i32 { 3 }
+}
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// aux-build:go_trait.rs
+
+#![feature(specialization)]
+
+extern crate go_trait;
+
+use go_trait::{Go,GoMut};
+use std::fmt::Debug;
+use std::default::Default;
+
+struct MyThingy;
+
+impl Go for MyThingy {
+ fn go(&self, arg: isize) { }
+}
+
+impl GoMut for MyThingy {
+ fn go_mut(&mut self, arg: isize) { }
+}
+
+fn main() { }
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Test that non-method associated functions can be specialized
+
+#![feature(specialization)]
+
+trait Foo {
+ fn mk() -> Self;
+}
+
+default impl<T: Default> Foo for T {
+ fn mk() -> T {
+ T::default()
+ }
+}
+
+impl Foo for Vec<u8> {
+ fn mk() -> Vec<u8> {
+ vec![0]
+ }
+}
+
+fn main() {
+ let v1: Vec<i32> = Foo::mk();
+ let v2: Vec<u8> = Foo::mk();
+
+ assert!(v1.len() == 0);
+ assert!(v2.len() == 1);
+}
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(specialization)]
+
+// Tests a variety of basic specialization scenarios and method
+// dispatch for them.
+
+unsafe trait Foo {
+ fn foo(&self) -> &'static str;
+}
+
+default unsafe impl<T> Foo for T {
+ fn foo(&self) -> &'static str {
+ "generic"
+ }
+}
+
+default unsafe impl<T: Clone> Foo for T {
+ fn foo(&self) -> &'static str {
+ "generic Clone"
+ }
+}
+
+default unsafe impl<T, U> Foo for (T, U) where T: Clone, U: Clone {
+ fn foo(&self) -> &'static str {
+ "generic pair"
+ }
+}
+
+default unsafe impl<T: Clone> Foo for (T, T) {
+ fn foo(&self) -> &'static str {
+ "generic uniform pair"
+ }
+}
+
+default unsafe impl Foo for (u8, u32) {
+ fn foo(&self) -> &'static str {
+ "(u8, u32)"
+ }
+}
+
+default unsafe impl Foo for (u8, u8) {
+ fn foo(&self) -> &'static str {
+ "(u8, u8)"
+ }
+}
+
+default unsafe impl<T: Clone> Foo for Vec<T> {
+ fn foo(&self) -> &'static str {
+ "generic Vec"
+ }
+}
+
+default unsafe impl Foo for Vec<i32> {
+ fn foo(&self) -> &'static str {
+ "Vec<i32>"
+ }
+}
+
+default unsafe impl Foo for String {
+ fn foo(&self) -> &'static str {
+ "String"
+ }
+}
+
+default unsafe impl Foo for i32 {
+ fn foo(&self) -> &'static str {
+ "i32"
+ }
+}
+
+struct NotClone;
+
+unsafe trait MyMarker {}
+default unsafe impl<T: Clone + MyMarker> Foo for T {
+ fn foo(&self) -> &'static str {
+ "generic Clone + MyMarker"
+ }
+}
+
+#[derive(Clone)]
+struct MarkedAndClone;
+unsafe impl MyMarker for MarkedAndClone {}
+
+fn main() {
+ assert!(NotClone.foo() == "generic");
+ assert!(0u8.foo() == "generic Clone");
+ assert!(vec![NotClone].foo() == "generic");
+ assert!(vec![0u8].foo() == "generic Vec");
+ assert!(vec![0i32].foo() == "Vec<i32>");
+ assert!(0i32.foo() == "i32");
+ assert!(String::new().foo() == "String");
+ assert!(((), 0).foo() == "generic pair");
+ assert!(((), ()).foo() == "generic uniform pair");
+ assert!((0u8, 0u32).foo() == "(u8, u32)");
+ assert!((0u8, 0u8).foo() == "(u8, u8)");
+ assert!(MarkedAndClone.foo() == "generic Clone + MyMarker");
+}
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(specialization)]
+
+// Tests a variety of basic specialization scenarios and method
+// dispatch for them.
+
+trait Foo {
+ fn foo(&self) -> &'static str;
+}
+
+default impl<T> Foo for T {
+ fn foo(&self) -> &'static str {
+ "generic"
+ }
+}
+
+default impl<T: Clone> Foo for T {
+ fn foo(&self) -> &'static str {
+ "generic Clone"
+ }
+}
+
+default impl<T, U> Foo for (T, U) where T: Clone, U: Clone {
+ fn foo(&self) -> &'static str {
+ "generic pair"
+ }
+}
+
+default impl<T: Clone> Foo for (T, T) {
+ fn foo(&self) -> &'static str {
+ "generic uniform pair"
+ }
+}
+
+default impl Foo for (u8, u32) {
+ fn foo(&self) -> &'static str {
+ "(u8, u32)"
+ }
+}
+
+default impl Foo for (u8, u8) {
+ fn foo(&self) -> &'static str {
+ "(u8, u8)"
+ }
+}
+
+default impl<T: Clone> Foo for Vec<T> {
+ fn foo(&self) -> &'static str {
+ "generic Vec"
+ }
+}
+
+impl Foo for Vec<i32> {
+ fn foo(&self) -> &'static str {
+ "Vec<i32>"
+ }
+}
+
+impl Foo for String {
+ fn foo(&self) -> &'static str {
+ "String"
+ }
+}
+
+impl Foo for i32 {
+ fn foo(&self) -> &'static str {
+ "i32"
+ }
+}
+
+struct NotClone;
+
+trait MyMarker {}
+default impl<T: Clone + MyMarker> Foo for T {
+ fn foo(&self) -> &'static str {
+ "generic Clone + MyMarker"
+ }
+}
+
+#[derive(Clone)]
+struct MarkedAndClone;
+impl MyMarker for MarkedAndClone {}
+
+fn main() {
+ assert!(NotClone.foo() == "generic");
+ assert!(0u8.foo() == "generic Clone");
+ assert!(vec![NotClone].foo() == "generic");
+ assert!(vec![0u8].foo() == "generic Vec");
+ assert!(vec![0i32].foo() == "Vec<i32>");
+ assert!(0i32.foo() == "i32");
+ assert!(String::new().foo() == "String");
+ assert!(((), 0).foo() == "generic pair");
+ assert!(((), ()).foo() == "generic uniform pair");
+ assert!((0u8, 0u32).foo() == "(u8, u32)");
+ assert!((0u8, 0u8).foo() == "(u8, u8)");
+ assert!(MarkedAndClone.foo() == "generic Clone + MyMarker");
+}
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// aux-build:specialization_cross_crate_defaults.rs
+
+#![feature(specialization)]
+
+extern crate specialization_cross_crate_defaults;
+
+use specialization_cross_crate_defaults::*;
+
+struct LocalDefault;
+struct LocalOverride;
+
+impl Foo for LocalDefault {}
+
+impl Foo for LocalOverride {
+ fn foo(&self) -> bool { true }
+}
+
+fn test_foo() {
+ assert!(!0i8.foo());
+ assert!(!0i32.foo());
+ assert!(0i64.foo());
+
+ assert!(!LocalDefault.foo());
+ assert!(LocalOverride.foo());
+}
+
+fn test_bar() {
+ assert!(0u8.bar() == 0);
+ assert!(0i32.bar() == 1);
+ assert!("hello".bar() == 0);
+ assert!(vec![()].bar() == 2);
+ assert!(vec![0i32].bar() == 2);
+ assert!(vec![0i64].bar() == 3);
+}
+
+fn main() {
+ test_foo();
+ test_bar();
+}
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Test that specialization works even if only the upstream crate enables it
+
+// aux-build:specialization_cross_crate.rs
+
+extern crate specialization_cross_crate;
+
+use specialization_cross_crate::*;
+
+fn main() {
+ assert!(0u8.foo() == "generic Clone");
+ assert!(vec![0u8].foo() == "generic Vec");
+ assert!(vec![0i32].foo() == "Vec<i32>");
+ assert!(0i32.foo() == "i32");
+ assert!(String::new().foo() == "String");
+ assert!(((), 0).foo() == "generic pair");
+ assert!(((), ()).foo() == "generic uniform pair");
+ assert!((0u8, 0u32).foo() == "(u8, u32)");
+ assert!((0u8, 0u8).foo() == "(u8, u8)");
+}
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// aux-build:specialization_cross_crate.rs
+
+#![feature(specialization)]
+
+extern crate specialization_cross_crate;
+
+use specialization_cross_crate::*;
+
+struct NotClone;
+
+#[derive(Clone)]
+struct MarkedAndClone;
+impl MyMarker for MarkedAndClone {}
+
+struct MyType<T>(T);
+default impl<T> Foo for MyType<T> {
+ fn foo(&self) -> &'static str {
+ "generic MyType"
+ }
+}
+
+impl Foo for MyType<u8> {
+ fn foo(&self) -> &'static str {
+ "MyType<u8>"
+ }
+}
+
+struct MyOtherType;
+impl Foo for MyOtherType {}
+
+fn main() {
+ assert!(NotClone.foo() == "generic");
+ assert!(0u8.foo() == "generic Clone");
+ assert!(vec![NotClone].foo() == "generic");
+ assert!(vec![0u8].foo() == "generic Vec");
+ assert!(vec![0i32].foo() == "Vec<i32>");
+ assert!(0i32.foo() == "i32");
+ assert!(String::new().foo() == "String");
+ assert!(((), 0).foo() == "generic pair");
+ assert!(((), ()).foo() == "generic uniform pair");
+ assert!((0u8, 0u32).foo() == "(u8, u32)");
+ assert!((0u8, 0u8).foo() == "(u8, u8)");
+ assert!(MarkedAndClone.foo() == "generic Clone + MyMarker");
+
+ assert!(MyType(()).foo() == "generic MyType");
+ assert!(MyType(0u8).foo() == "MyType<u8>");
+ assert!(MyOtherType.foo() == "generic");
+}
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(specialization)]
+
+// Test that default methods are cascaded correctly
+
+// First, test only use of explicit `default` items:
+
+trait Foo {
+ fn foo(&self) -> bool;
+}
+
+// Specialization tree for Foo:
+//
+// T
+// / \
+// i32 i64
+
+default impl<T> Foo for T {
+ fn foo(&self) -> bool { false }
+}
+
+impl Foo for i32 {}
+
+impl Foo for i64 {
+ fn foo(&self) -> bool { true }
+}
+
+fn test_foo() {
+ assert!(!0i8.foo());
+ assert!(!0i32.foo());
+ assert!(0i64.foo());
+}
+
+// Next, test mixture of explicit `default` and provided methods:
+
+trait Bar {
+ fn bar(&self) -> i32 { 0 }
+}
+
+// Specialization tree for Bar.
+// Uses of $ designate that method is provided
+//
+// $Bar (the trait)
+// |
+// T
+// /|\
+// / | \
+// / | \
+// / | \
+// / | \
+// / | \
+// $i32 &str $Vec<T>
+// /\
+// / \
+// Vec<i32> $Vec<i64>
+
+// use the provided method
+impl<T> Bar for T {}
+
+impl Bar for i32 {
+ fn bar(&self) -> i32 { 1 }
+}
+impl<'a> Bar for &'a str {}
+
+default impl<T> Bar for Vec<T> {
+ fn bar(&self) -> i32 { 2 }
+}
+impl Bar for Vec<i32> {}
+impl Bar for Vec<i64> {
+ fn bar(&self) -> i32 { 3 }
+}
+
+fn test_bar() {
+ assert!(0u8.bar() == 0);
+ assert!(0i32.bar() == 1);
+ assert!("hello".bar() == 0);
+ assert!(vec![()].bar() == 2);
+ assert!(vec![0i32].bar() == 2);
+ assert!(vec![0i64].bar() == 3);
+}
+
+fn main() {
+ test_foo();
+ test_bar();
+}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Test that you can list the more specific impl before the more general one.
+
+#![feature(specialization)]
+
+trait Foo {
+ type Out;
+}
+
+impl Foo for bool {
+ type Out = ();
+}
+
+default impl<T> Foo for T {
+ type Out = bool;
+}
+
+fn main() {}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Test that impls on projected self types can resolve overlap, even when the
+// projections involve specialization, so long as the associated type is
+// provided by the most specialized impl.
+
+#![feature(specialization)]
+
+trait Assoc {
+ type Output;
+}
+
+default impl<T> Assoc for T {
+ type Output = bool;
+}
+
+impl Assoc for u8 { type Output = u8; }
+impl Assoc for u16 { type Output = u16; }
+
+trait Foo {}
+impl Foo for u32 {}
+impl Foo for <u8 as Assoc>::Output {}
+impl Foo for <u16 as Assoc>::Output {}
+
+fn main() {}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(specialization)]
+
+// Regression test for ICE when combining specialized associated types and type
+// aliases
+
+trait Id_ {
+ type Out;
+}
+
+type Id<T> = <T as Id_>::Out;
+
+default impl<T> Id_ for T {
+ type Out = T;
+}
+
+fn test_proection() {
+ let x: Id<bool> = panic!();
+}
+
+fn main() {
+
+}
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(specialization)]
+
+// Make sure we *can* project non-defaulted associated types
+// cf compile-fail/specialization-default-projection.rs
+
+// First, do so without any use of specialization
+
+trait Foo {
+ type Assoc;
+}
+
+impl<T> Foo for T {
+ type Assoc = ();
+}
+
+fn generic_foo<T>() -> <T as Foo>::Assoc {
+ ()
+}
+
+// Next, allow for one layer of specialization
+
+trait Bar {
+ type Assoc;
+}
+
+default impl<T> Bar for T {
+ type Assoc = ();
+}
+
+impl<T: Clone> Bar for T {
+ type Assoc = u8;
+}
+
+fn generic_bar_clone<T: Clone>() -> <T as Bar>::Assoc {
+ 0u8
+}
+
+fn main() {
+}
#![feature(rand)]
#![feature(const_fn)]
-use std::sync::atomic::{AtomicUsize, Ordering};
use std::__rand::{thread_rng, Rng};
+use std::panic;
+use std::sync::atomic::{AtomicUsize, Ordering};
use std::thread;
+use std::cell::Cell;
const MAX_LEN: usize = 80;
let mut panic_countdown = panic_countdown;
v.sort_by(|a, b| {
if panic_countdown == 0 {
+ SILENCE_PANIC.with(|s| s.set(true));
panic!();
}
panic_countdown -= 1;
}
}
+thread_local!(static SILENCE_PANIC: Cell<bool> = Cell::new(false));
+
fn main() {
+ let prev = panic::take_hook();
+ panic::set_hook(Box::new(move |info| {
+ if !SILENCE_PANIC.with(|s| s.get()) {
+ prev(info);
+ }
+ }));
for len in (1..20).chain(70..MAX_LEN) {
// Test on a random array.
let mut rng = thread_rng();
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// check that borrowck looks inside consts/statics
+
+static FN : &'static (Fn() -> (Box<Fn()->Box<i32>>) + Sync) = &|| {
+ let x = Box::new(0); //~ NOTE moved
+ Box::new(|| x) //~ ERROR cannot move out of captured outer variable
+};
+
+fn main() {
+ let f = (FN)();
+ f();
+ f();
+}
--- /dev/null
+error[E0507]: cannot move out of captured outer variable in an `Fn` closure
+ --> $DIR/borrowck-in-static.rs:15:17
+ |
+14 | let x = Box::new(0); //~ NOTE moved
+ | - captured outer variable
+15 | Box::new(|| x) //~ ERROR cannot move out of captured outer variable
+ | ^ cannot move out of captured outer variable in an `Fn` closure
+
+error: aborting due to previous error
+
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Test that a by-ref `FnMut` closure gets an error when it tries to
+// consume a value.
+
+fn call<F>(f: F) where F : Fn() {
+ f();
+}
+
+fn main() {
+ let y = vec![format!("World")]; //~ NOTE moved
+ call(|| {
+ y.into_iter();
+ //~^ ERROR cannot move out of captured outer variable in an `Fn` closure
+ });
+}
--- /dev/null
+error[E0507]: cannot move out of captured outer variable in an `Fn` closure
+ --> $DIR/unboxed-closures-move-upvar-from-non-once-ref-closure.rs:21:9
+ |
+19 | let y = vec![format!("World")]; //~ NOTE moved
+ | - captured outer variable
+20 | call(|| {
+21 | y.into_iter();
+ | ^ cannot move out of captured outer variable in an `Fn` closure
+
+error: aborting due to previous error
+
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+pub mod baz;
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+pub mod bar;
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// ignore-tidy-linelength
+
+// error-pattern: cannot declare a new module at this location
+// error-pattern: maybe move this module
+
+mod auxiliary {
+ mod foo;
+}
+
+fn main() {}
--- /dev/null
+error: cannot declare a new module at this location
+ --> $DIR/auxiliary/foo/bar.rs:11:9
+ |
+11 | pub mod baz;
+ | ^^^
+ |
+note: maybe move this module `$DIR/auxiliary/foo/bar.rs` to its own directory via `$DIR/auxiliary/foo/bar/mod.rs`
+ --> $DIR/auxiliary/foo/bar.rs:11:9
+ |
+11 | pub mod baz;
+ | ^^^
+
+error: aborting due to previous error
+
fn call<F>(mut f: F) where F: FnMut(Fn) {
f(Box::new(|| {
//~^ ERROR: cannot borrow `f` as mutable more than once
+ //~| NOTE first mutable borrow occurs here
+ //~| NOTE second mutable borrow occurs here
f((Box::new(|| {})))
}));
+ //~^ NOTE first borrow ends here
}
fn test1() {
}
fn test2<F>(f: &F) where F: FnMut() {
- (*f)(); //~ ERROR: cannot borrow immutable borrowed content `*f` as mutable
+ //~^ NOTE use `&mut F` here to make mutable
+ (*f)();
+ //~^ ERROR cannot borrow immutable borrowed content `*f` as mutable
+ //~| NOTE cannot borrow as mutable
}
fn test3<F>(f: &mut F) where F: FnMut() {
}
fn test4(f: &Test) {
- f.f.call_mut(()) //~ ERROR: cannot borrow immutable `Box` content `*f.f` as mutable
+ //~^ NOTE use `&mut Test` here to make mutable
+ f.f.call_mut(())
+ //~^ ERROR: cannot borrow immutable `Box` content `*f.f` as mutable
+ //~| NOTE cannot borrow as mutable
}
fn test5(f: &mut Test) {
fn test7() {
fn foo<F>(_: F) where F: FnMut(Box<FnMut(isize)>, isize) {}
let mut f = |g: Box<FnMut(isize)>, b: isize| {};
+ //~^ NOTE moved
f(Box::new(|a| {
+ //~^ NOTE borrow of `f` occurs here
foo(f);
//~^ ERROR cannot move `f` into closure because it is borrowed
//~| ERROR cannot move out of captured outer variable in an `FnMut` closure
+ //~| NOTE move into closure occurs here
+ //~| NOTE cannot move out of captured outer variable in an `FnMut` closure
}), 3);
}
| - ^^ second mutable borrow occurs here
| |
| first mutable borrow occurs here
-23 | //~^ ERROR: cannot borrow `f` as mutable more than once
-24 | f((Box::new(|| {})))
+...
+26 | f((Box::new(|| {})))
| - borrow occurs due to use of `f` in closure
-25 | }));
+27 | }));
| - first borrow ends here
error: cannot borrow immutable borrowed content `*f` as mutable
- --> $DIR/borrowck-call-is-borrow-issue-12224.rs:35:5
+ --> $DIR/borrowck-call-is-borrow-issue-12224.rs:39:5
|
-34 | fn test2<F>(f: &F) where F: FnMut() {
+37 | fn test2<F>(f: &F) where F: FnMut() {
| -- use `&mut F` here to make mutable
-35 | (*f)(); //~ ERROR: cannot borrow immutable borrowed content `*f` as mutable
+38 | //~^ NOTE use `&mut F` here to make mutable
+39 | (*f)();
| ^^^^ cannot borrow as mutable
error: cannot borrow immutable `Box` content `*f.f` as mutable
- --> $DIR/borrowck-call-is-borrow-issue-12224.rs:43:5
+ --> $DIR/borrowck-call-is-borrow-issue-12224.rs:50:5
|
-42 | fn test4(f: &Test) {
+48 | fn test4(f: &Test) {
| ----- use `&mut Test` here to make mutable
-43 | f.f.call_mut(()) //~ ERROR: cannot borrow immutable `Box` content `*f.f` as mutable
+49 | //~^ NOTE use `&mut Test` here to make mutable
+50 | f.f.call_mut(())
| ^^^ cannot borrow as mutable
error[E0504]: cannot move `f` into closure because it is borrowed
- --> $DIR/borrowck-call-is-borrow-issue-12224.rs:61:13
+ --> $DIR/borrowck-call-is-borrow-issue-12224.rs:72:13
|
-60 | f(Box::new(|a| {
+70 | f(Box::new(|a| {
| - borrow of `f` occurs here
-61 | foo(f);
+71 | //~^ NOTE borrow of `f` occurs here
+72 | foo(f);
| ^ move into closure occurs here
error[E0507]: cannot move out of captured outer variable in an `FnMut` closure
- --> $DIR/borrowck-call-is-borrow-issue-12224.rs:61:13
+ --> $DIR/borrowck-call-is-borrow-issue-12224.rs:72:13
|
-61 | foo(f);
+68 | let mut f = |g: Box<FnMut(isize)>, b: isize| {};
+ | ----- captured outer variable
+...
+72 | foo(f);
| ^ cannot move out of captured outer variable in an `FnMut` closure
error: aborting due to 5 previous errors
// Print one character per test instead of one line
pub quiet: bool,
- // where to find the qemu test client process, if we're using it
- pub qemu_test_client: Option<PathBuf>,
+ // where to find the remote test client process, if we're using it
+ pub remote_test_client: Option<PathBuf>,
// Configuration for various run-make tests frobbing things like C compilers
// or querying about various LLVM component information.
None,
&mut |ln| {
props.ignore =
- props.ignore || parse_name_directive(ln, "ignore-test") ||
- parse_name_directive(ln, &ignore_target(config)) ||
- parse_name_directive(ln, &ignore_architecture(config)) ||
- parse_name_directive(ln, &ignore_stage(config)) ||
- parse_name_directive(ln, &ignore_env(config)) ||
- (config.mode == common::Pretty && parse_name_directive(ln, "ignore-pretty")) ||
+ props.ignore || config.parse_name_directive(ln, "ignore-test") ||
+ config.parse_name_directive(ln, &ignore_target(config)) ||
+ config.parse_name_directive(ln, &ignore_architecture(config)) ||
+ config.parse_name_directive(ln, &ignore_stage(config)) ||
+ config.parse_name_directive(ln, &ignore_env(config)) ||
+ (config.mode == common::Pretty &&
+ config.parse_name_directive(ln, "ignore-pretty")) ||
(config.target != config.host &&
- parse_name_directive(ln, "ignore-cross-compile")) ||
+ config.parse_name_directive(ln, "ignore-cross-compile")) ||
ignore_gdb(config, ln) ||
ignore_lldb(config, ln) ||
ignore_llvm(config, ln);
- if let Some(s) = parse_aux_build(ln) {
+ if let Some(s) = config.parse_aux_build(ln) {
props.aux.push(s);
}
- props.should_fail = props.should_fail || parse_name_directive(ln, "should-fail");
+ props.should_fail = props.should_fail || config.parse_name_directive(ln, "should-fail");
});
return props;
}
if !line.contains("ignore-gdb-version") &&
- parse_name_directive(line, "ignore-gdb") {
+ config.parse_name_directive(line, "ignore-gdb") {
return true;
}
return false;
}
- if parse_name_directive(line, "ignore-lldb") {
+ if config.parse_name_directive(line, "ignore-lldb") {
return true;
}
}
}
- pub fn from_aux_file(&self, testfile: &Path, cfg: Option<&str>) -> Self {
+ pub fn from_aux_file(&self,
+ testfile: &Path,
+ cfg: Option<&str>,
+ config: &Config)
+ -> Self {
let mut props = TestProps::new();
// copy over select properties to the aux build:
props.incremental_dir = self.incremental_dir.clone();
- props.load_from(testfile, cfg);
+ props.load_from(testfile, cfg, config);
props
}
- pub fn from_file(testfile: &Path) -> Self {
+ pub fn from_file(testfile: &Path, config: &Config) -> Self {
let mut props = TestProps::new();
- props.load_from(testfile, None);
+ props.load_from(testfile, None, config);
props
}
/// tied to a particular revision `foo` (indicated by writing
/// `//[foo]`), then the property is ignored unless `cfg` is
/// `Some("foo")`.
- pub fn load_from(&mut self, testfile: &Path, cfg: Option<&str>) {
+ pub fn load_from(&mut self,
+ testfile: &Path,
+ cfg: Option<&str>,
+ config: &Config) {
iter_header(testfile,
cfg,
&mut |ln| {
- if let Some(ep) = parse_error_pattern(ln) {
+ if let Some(ep) = config.parse_error_pattern(ln) {
self.error_patterns.push(ep);
}
- if let Some(flags) = parse_compile_flags(ln) {
+ if let Some(flags) = config.parse_compile_flags(ln) {
self.compile_flags.extend(flags.split_whitespace()
.map(|s| s.to_owned()));
}
- if let Some(r) = parse_revisions(ln) {
+ if let Some(r) = config.parse_revisions(ln) {
self.revisions.extend(r);
}
if self.run_flags.is_none() {
- self.run_flags = parse_run_flags(ln);
+ self.run_flags = config.parse_run_flags(ln);
}
if self.pp_exact.is_none() {
- self.pp_exact = parse_pp_exact(ln, testfile);
+ self.pp_exact = config.parse_pp_exact(ln, testfile);
}
if !self.build_aux_docs {
- self.build_aux_docs = parse_build_aux_docs(ln);
+ self.build_aux_docs = config.parse_build_aux_docs(ln);
}
if !self.force_host {
- self.force_host = parse_force_host(ln);
+ self.force_host = config.parse_force_host(ln);
}
if !self.check_stdout {
- self.check_stdout = parse_check_stdout(ln);
+ self.check_stdout = config.parse_check_stdout(ln);
}
if !self.no_prefer_dynamic {
- self.no_prefer_dynamic = parse_no_prefer_dynamic(ln);
+ self.no_prefer_dynamic = config.parse_no_prefer_dynamic(ln);
}
if !self.pretty_expanded {
- self.pretty_expanded = parse_pretty_expanded(ln);
+ self.pretty_expanded = config.parse_pretty_expanded(ln);
}
- if let Some(m) = parse_pretty_mode(ln) {
+ if let Some(m) = config.parse_pretty_mode(ln) {
self.pretty_mode = m;
}
if !self.pretty_compare_only {
- self.pretty_compare_only = parse_pretty_compare_only(ln);
+ self.pretty_compare_only = config.parse_pretty_compare_only(ln);
}
- if let Some(ab) = parse_aux_build(ln) {
+ if let Some(ab) = config.parse_aux_build(ln) {
self.aux_builds.push(ab);
}
- if let Some(ee) = parse_env(ln, "exec-env") {
+ if let Some(ee) = config.parse_env(ln, "exec-env") {
self.exec_env.push(ee);
}
- if let Some(ee) = parse_env(ln, "rustc-env") {
+ if let Some(ee) = config.parse_env(ln, "rustc-env") {
self.rustc_env.push(ee);
}
- if let Some(cl) = parse_check_line(ln) {
+ if let Some(cl) = config.parse_check_line(ln) {
self.check_lines.push(cl);
}
- if let Some(of) = parse_forbid_output(ln) {
+ if let Some(of) = config.parse_forbid_output(ln) {
self.forbid_output.push(of);
}
if !self.must_compile_successfully {
- self.must_compile_successfully = parse_must_compile_successfully(ln);
+ self.must_compile_successfully = config.parse_must_compile_successfully(ln);
}
if !self.check_test_line_numbers_match {
- self.check_test_line_numbers_match = parse_check_test_line_numbers_match(ln);
+ self.check_test_line_numbers_match = config.parse_check_test_line_numbers_match(ln);
}
});
return;
}
-fn parse_error_pattern(line: &str) -> Option<String> {
- parse_name_value_directive(line, "error-pattern")
-}
+impl Config {
-fn parse_forbid_output(line: &str) -> Option<String> {
- parse_name_value_directive(line, "forbid-output")
-}
+ fn parse_error_pattern(&self, line: &str) -> Option<String> {
+ self.parse_name_value_directive(line, "error-pattern")
+ }
-fn parse_aux_build(line: &str) -> Option<String> {
- parse_name_value_directive(line, "aux-build")
-}
+ fn parse_forbid_output(&self, line: &str) -> Option<String> {
+ self.parse_name_value_directive(line, "forbid-output")
+ }
-fn parse_compile_flags(line: &str) -> Option<String> {
- parse_name_value_directive(line, "compile-flags")
-}
+ fn parse_aux_build(&self, line: &str) -> Option<String> {
+ self.parse_name_value_directive(line, "aux-build")
+ }
-fn parse_revisions(line: &str) -> Option<Vec<String>> {
- parse_name_value_directive(line, "revisions")
- .map(|r| r.split_whitespace().map(|t| t.to_string()).collect())
-}
+ fn parse_compile_flags(&self, line: &str) -> Option<String> {
+ self.parse_name_value_directive(line, "compile-flags")
+ }
-fn parse_run_flags(line: &str) -> Option<String> {
- parse_name_value_directive(line, "run-flags")
-}
+ fn parse_revisions(&self, line: &str) -> Option<Vec<String>> {
+ self.parse_name_value_directive(line, "revisions")
+ .map(|r| r.split_whitespace().map(|t| t.to_string()).collect())
+ }
-fn parse_check_line(line: &str) -> Option<String> {
- parse_name_value_directive(line, "check")
-}
+ fn parse_run_flags(&self, line: &str) -> Option<String> {
+ self.parse_name_value_directive(line, "run-flags")
+ }
-fn parse_force_host(line: &str) -> bool {
- parse_name_directive(line, "force-host")
-}
+ fn parse_check_line(&self, line: &str) -> Option<String> {
+ self.parse_name_value_directive(line, "check")
+ }
-fn parse_build_aux_docs(line: &str) -> bool {
- parse_name_directive(line, "build-aux-docs")
-}
+ fn parse_force_host(&self, line: &str) -> bool {
+ self.parse_name_directive(line, "force-host")
+ }
-fn parse_check_stdout(line: &str) -> bool {
- parse_name_directive(line, "check-stdout")
-}
+ fn parse_build_aux_docs(&self, line: &str) -> bool {
+ self.parse_name_directive(line, "build-aux-docs")
+ }
-fn parse_no_prefer_dynamic(line: &str) -> bool {
- parse_name_directive(line, "no-prefer-dynamic")
-}
+ fn parse_check_stdout(&self, line: &str) -> bool {
+ self.parse_name_directive(line, "check-stdout")
+ }
-fn parse_pretty_expanded(line: &str) -> bool {
- parse_name_directive(line, "pretty-expanded")
-}
+ fn parse_no_prefer_dynamic(&self, line: &str) -> bool {
+ self.parse_name_directive(line, "no-prefer-dynamic")
+ }
-fn parse_pretty_mode(line: &str) -> Option<String> {
- parse_name_value_directive(line, "pretty-mode")
-}
+ fn parse_pretty_expanded(&self, line: &str) -> bool {
+ self.parse_name_directive(line, "pretty-expanded")
+ }
-fn parse_pretty_compare_only(line: &str) -> bool {
- parse_name_directive(line, "pretty-compare-only")
-}
+ fn parse_pretty_mode(&self, line: &str) -> Option<String> {
+ self.parse_name_value_directive(line, "pretty-mode")
+ }
-fn parse_must_compile_successfully(line: &str) -> bool {
- parse_name_directive(line, "must-compile-successfully")
-}
+ fn parse_pretty_compare_only(&self, line: &str) -> bool {
+ self.parse_name_directive(line, "pretty-compare-only")
+ }
-fn parse_check_test_line_numbers_match(line: &str) -> bool {
- parse_name_directive(line, "check-test-line-numbers-match")
-}
+ fn parse_must_compile_successfully(&self, line: &str) -> bool {
+ self.parse_name_directive(line, "must-compile-successfully")
+ }
-fn parse_env(line: &str, name: &str) -> Option<(String, String)> {
- parse_name_value_directive(line, name).map(|nv| {
- // nv is either FOO or FOO=BAR
- let mut strs: Vec<String> = nv.splitn(2, '=')
- .map(str::to_owned)
- .collect();
+ fn parse_check_test_line_numbers_match(&self, line: &str) -> bool {
+ self.parse_name_directive(line, "check-test-line-numbers-match")
+ }
- match strs.len() {
- 1 => (strs.pop().unwrap(), "".to_owned()),
- 2 => {
- let end = strs.pop().unwrap();
- (strs.pop().unwrap(), end)
+ fn parse_env(&self, line: &str, name: &str) -> Option<(String, String)> {
+ self.parse_name_value_directive(line, name).map(|nv| {
+ // nv is either FOO or FOO=BAR
+ let mut strs: Vec<String> = nv.splitn(2, '=')
+ .map(str::to_owned)
+ .collect();
+
+ match strs.len() {
+ 1 => (strs.pop().unwrap(), "".to_owned()),
+ 2 => {
+ let end = strs.pop().unwrap();
+ (strs.pop().unwrap(), end)
+ }
+ n => panic!("Expected 1 or 2 strings, not {}", n),
}
- n => panic!("Expected 1 or 2 strings, not {}", n),
- }
- })
-}
+ })
+ }
-fn parse_pp_exact(line: &str, testfile: &Path) -> Option<PathBuf> {
- if let Some(s) = parse_name_value_directive(line, "pp-exact") {
- Some(PathBuf::from(&s))
- } else {
- if parse_name_directive(line, "pp-exact") {
- testfile.file_name().map(PathBuf::from)
+ fn parse_pp_exact(&self, line: &str, testfile: &Path) -> Option<PathBuf> {
+ if let Some(s) = self.parse_name_value_directive(line, "pp-exact") {
+ Some(PathBuf::from(&s))
} else {
- None
+ if self.parse_name_directive(line, "pp-exact") {
+ testfile.file_name().map(PathBuf::from)
+ } else {
+ None
+ }
}
}
-}
-fn parse_name_directive(line: &str, directive: &str) -> bool {
- // This 'no-' rule is a quick hack to allow pretty-expanded and no-pretty-expanded to coexist
- line.contains(directive) && !line.contains(&("no-".to_owned() + directive))
-}
+ fn parse_name_directive(&self, line: &str, directive: &str) -> bool {
+ // This 'no-' rule is a quick hack to allow pretty-expanded and
+ // no-pretty-expanded to coexist
+ line.contains(directive) && !line.contains(&("no-".to_owned() + directive))
+ }
-pub fn parse_name_value_directive(line: &str, directive: &str) -> Option<String> {
- let keycolon = format!("{}:", directive);
- if let Some(colon) = line.find(&keycolon) {
- let value = line[(colon + keycolon.len())..line.len()].to_owned();
- debug!("{}: {}", directive, value);
- Some(value)
- } else {
- None
+ pub fn parse_name_value_directive(&self, line: &str, directive: &str) -> Option<String> {
+ let keycolon = format!("{}:", directive);
+ if let Some(colon) = line.find(&keycolon) {
+ let value = line[(colon + keycolon.len())..line.len()].to_owned();
+ debug!("{}: {}", directive, value);
+ Some(expand_variables(value, self))
+ } else {
+ None
+ }
}
}
let major: isize = version_string.parse().ok().expect(&error_string);
return major;
}
+
+fn expand_variables(mut value: String, config: &Config) -> String {
+ const CWD: &'static str = "{{cwd}}";
+ const SRC_BASE: &'static str = "{{src-base}}";
+ const BUILD_BASE: &'static str = "{{build-base}}";
+
+ if value.contains(CWD) {
+ let cwd = env::current_dir().unwrap();
+ value = value.replace(CWD, &cwd.to_string_lossy());
+ }
+
+ if value.contains(SRC_BASE) {
+ value = value.replace(SRC_BASE, &config.src_base.to_string_lossy());
+ }
+
+ if value.contains(BUILD_BASE) {
+ value = value.replace(BUILD_BASE, &config.build_base.to_string_lossy());
+ }
+
+ value
+}
reqopt("", "llvm-components", "list of LLVM components built in", "LIST"),
reqopt("", "llvm-cxxflags", "C++ flags for LLVM", "FLAGS"),
optopt("", "nodejs", "the name of nodejs", "PATH"),
- optopt("", "qemu-test-client", "path to the qemu test client", "PATH"),
+ optopt("", "remote-test-client", "path to the remote test client", "PATH"),
optflag("h", "help", "show this message")];
let (argv0, args_) = args.split_first().unwrap();
llvm_version: matches.opt_str("llvm-version"),
android_cross_path: opt_path(matches, "android-cross-path"),
adb_path: opt_str2(matches.opt_str("adb-path")),
- adb_test_dir: format!("{}/{}",
- opt_str2(matches.opt_str("adb-test-dir")),
- opt_str2(matches.opt_str("target"))),
+ adb_test_dir: opt_str2(matches.opt_str("adb-test-dir")),
adb_device_status:
opt_str2(matches.opt_str("target")).contains("android") &&
"(none)" != opt_str2(matches.opt_str("adb-test-dir")) &&
lldb_python_dir: matches.opt_str("lldb-python-dir"),
verbose: matches.opt_present("verbose"),
quiet: matches.opt_present("quiet"),
- qemu_test_client: matches.opt_str("qemu-test-client").map(PathBuf::from),
+ remote_test_client: matches.opt_str("remote-test-client").map(PathBuf::from),
cc: matches.opt_str("cc").unwrap(),
cxx: matches.opt_str("cxx").unwrap(),
if let DebugInfoGdb = config.mode {
println!("{} debug-info test uses tcp 5039 port.\
please reserve it", config.target);
- }
-
- // android debug-info test uses remote debugger
- // so, we test 1 thread at once.
- // also trying to isolate problems with adb_run_wrapper.sh ilooping
- match config.mode {
- // These tests don't actually run code or don't run for android, so
- // we don't need to limit ourselves there
- Mode::Ui |
- Mode::CompileFail |
- Mode::ParseFail |
- Mode::RunMake |
- Mode::Codegen |
- Mode::CodegenUnits |
- Mode::Pretty |
- Mode::Rustdoc => {}
-
- _ => {
- env::set_var("RUST_TEST_THREADS", "1");
- }
+ // android debug-info test uses remote debugger so, we test 1 thread
+ // at once as they're all sharing the same TCP port to communicate
+ // over.
+ //
+ // we should figure out how to lift this restriction! (run them all
+ // on different ports allocated dynamically).
+ env::set_var("RUST_TEST_THREADS", "1");
}
}
}
DebugInfoGdb => {
- if config.qemu_test_client.is_some() {
+ if config.remote_test_client.is_some() &&
+ !config.target.contains("android"){
println!("WARNING: debuginfo tests are not available when \
- testing with QEMU");
+ testing with remote");
return
}
}
"PATH"
} else if cfg!(target_os = "macos") {
"DYLD_LIBRARY_PATH"
+ } else if cfg!(target_os = "haiku") {
+ "LIBRARY_PATH"
} else {
"LD_LIBRARY_PATH"
}
use filetime::FileTime;
use json;
use header::TestProps;
-use header;
use procsrv;
use test::TestPaths;
use uidiff;
use std::collections::HashSet;
use std::env;
-use std::fmt;
use std::fs::{self, File, create_dir_all};
use std::io::prelude::*;
use std::io::{self, BufReader};
print!("\n\n");
}
debug!("running {:?}", testpaths.file.display());
- let base_props = TestProps::from_file(&testpaths.file);
+ let base_props = TestProps::from_file(&testpaths.file, &config);
let base_cx = TestCx { config: &config,
props: &base_props,
} else {
for revision in &base_props.revisions {
let mut revision_props = base_props.clone();
- revision_props.load_from(&testpaths.file, Some(&revision));
+ revision_props.load_from(&testpaths.file, Some(&revision), &config);
let rev_cx = TestCx {
config: &config,
props: &revision_props,
let debugger_run_result;
match &*self.config.target {
- "arm-linux-androideabi" | "armv7-linux-androideabi" | "aarch64-linux-android" => {
+ "arm-linux-androideabi" |
+ "armv7-linux-androideabi" |
+ "aarch64-linux-android" => {
cmds = cmds.replace("run", "continue");
exe_file.file_name().unwrap().to_str()
.unwrap());
+ debug!("adb arg: {}", adb_arg);
let mut process = procsrv::run_background("",
&self.config.adb_path
,
};
debugger_run_result = ProcRes {
- status: Status::Normal(status),
+ status: status,
stdout: out,
stderr: err,
cmdline: cmdline
self.dump_output(&out, &err);
ProcRes {
- status: Status::Normal(status),
+ status: status,
stdout: out,
stderr: err,
cmdline: format!("{:?}", cmd)
}
for &(ref command_directive, ref check_directive) in &directives {
- header::parse_name_value_directive(
+ self.config.parse_name_value_directive(
&line,
&command_directive).map(|cmd| {
commands.push(cmd)
});
- header::parse_name_value_directive(
+ self.config.parse_name_value_directive(
&line,
&check_directive).map(|cmd| {
check_lines.push(cmd)
if self.props.build_aux_docs {
for rel_ab in &self.props.aux_builds {
let aux_testpaths = self.compute_aux_test_paths(rel_ab);
- let aux_props = self.props.from_aux_file(&aux_testpaths.file, self.revision);
+ let aux_props = self.props.from_aux_file(&aux_testpaths.file,
+ self.revision,
+ self.config);
let aux_cx = TestCx {
config: self.config,
props: &aux_props,
let env = self.props.exec_env.clone();
match &*self.config.target {
-
- "arm-linux-androideabi" | "armv7-linux-androideabi" | "aarch64-linux-android" => {
- self._arm_exec_compiled_test(env)
- }
-
// This is pretty similar to below, we're transforming:
//
// program arg1 arg2
//
// into
//
- // qemu-test-client run program:support-lib.so arg1 arg2
+ // remote-test-client run program:support-lib.so arg1 arg2
//
// The test-client program will upload `program` to the emulator
// along with all other support libraries listed (in this case
// `support-lib.so`. It will then execute the program on the
// emulator with the arguments specified (in the environment we give
// the process) and then report back the same result.
- _ if self.config.qemu_test_client.is_some() => {
+ _ if self.config.remote_test_client.is_some() => {
let aux_dir = self.aux_output_dir_name();
let mut args = self.make_run_args();
let mut program = args.prog.clone();
}
args.args.insert(0, program);
args.args.insert(0, "run".to_string());
- args.prog = self.config.qemu_test_client.clone().unwrap()
+ args.prog = self.config.remote_test_client.clone().unwrap()
.into_os_string().into_string().unwrap();
self.compose_and_run(args,
env,
for rel_ab in &self.props.aux_builds {
let aux_testpaths = self.compute_aux_test_paths(rel_ab);
- let aux_props = self.props.from_aux_file(&aux_testpaths.file, self.revision);
+ let aux_props = self.props.from_aux_file(&aux_testpaths.file,
+ self.revision,
+ self.config);
let mut crate_type = if aux_props.no_prefer_dynamic {
Vec::new()
} else {
aux_testpaths.file.display()),
&auxres);
}
-
- match &*self.config.target {
- "arm-linux-androideabi" | "armv7-linux-androideabi" | "aarch64-linux-android" => {
- self._arm_push_aux_shared_library();
- }
- _ => {}
- }
}
self.compose_and_run(args,
input).expect(&format!("failed to exec `{}`", prog));
self.dump_output(&out, &err);
return ProcRes {
- status: Status::Normal(status),
+ status: status,
stdout: out,
stderr: err,
cmdline: cmdline,
println!("---------------------------------------------------");
}
- fn _arm_exec_compiled_test(&self, env: Vec<(String, String)>) -> ProcRes {
- let args = self.make_run_args();
- let cmdline = self.make_cmdline("", &args.prog, &args.args);
-
- // get bare program string
- let mut tvec: Vec<String> = args.prog
- .split('/')
- .map(str::to_owned)
- .collect();
- let prog_short = tvec.pop().unwrap();
-
- // copy to target
- let copy_result = procsrv::run("",
- &self.config.adb_path,
- None,
- &[
- "push".to_owned(),
- args.prog.clone(),
- self.config.adb_test_dir.clone()
- ],
- vec![("".to_owned(), "".to_owned())],
- Some("".to_owned()))
- .expect(&format!("failed to exec `{}`", self.config.adb_path));
-
- if self.config.verbose {
- println!("push ({}) {} {} {}",
- self.config.target,
- args.prog,
- copy_result.out,
- copy_result.err);
- }
-
- logv(self.config, format!("executing ({}) {}", self.config.target, cmdline));
-
- let mut runargs = Vec::new();
-
- // run test via adb_run_wrapper
- runargs.push("shell".to_owned());
- for (key, val) in env {
- runargs.push(format!("{}={}", key, val));
- }
- runargs.push(format!("{}/../adb_run_wrapper.sh", self.config.adb_test_dir));
- runargs.push(format!("{}", self.config.adb_test_dir));
- runargs.push(format!("{}", prog_short));
-
- for tv in &args.args {
- runargs.push(tv.to_owned());
- }
- procsrv::run("",
- &self.config.adb_path,
- None,
- &runargs,
- vec![("".to_owned(), "".to_owned())], Some("".to_owned()))
- .expect(&format!("failed to exec `{}`", self.config.adb_path));
-
- // get exitcode of result
- runargs = Vec::new();
- runargs.push("shell".to_owned());
- runargs.push("cat".to_owned());
- runargs.push(format!("{}/{}.exitcode", self.config.adb_test_dir, prog_short));
-
- let procsrv::Result{ out: exitcode_out, err: _, status: _ } =
- procsrv::run("",
- &self.config.adb_path,
- None,
- &runargs,
- vec![("".to_owned(), "".to_owned())],
- Some("".to_owned()))
- .expect(&format!("failed to exec `{}`", self.config.adb_path));
-
- let mut exitcode: i32 = 0;
- for c in exitcode_out.chars() {
- if !c.is_numeric() { break; }
- exitcode = exitcode * 10 + match c {
- '0' ... '9' => c as i32 - ('0' as i32),
- _ => 101,
- }
- }
-
- // get stdout of result
- runargs = Vec::new();
- runargs.push("shell".to_owned());
- runargs.push("cat".to_owned());
- runargs.push(format!("{}/{}.stdout", self.config.adb_test_dir, prog_short));
-
- let procsrv::Result{ out: stdout_out, err: _, status: _ } =
- procsrv::run("",
- &self.config.adb_path,
- None,
- &runargs,
- vec![("".to_owned(), "".to_owned())],
- Some("".to_owned()))
- .expect(&format!("failed to exec `{}`", self.config.adb_path));
-
- // get stderr of result
- runargs = Vec::new();
- runargs.push("shell".to_owned());
- runargs.push("cat".to_owned());
- runargs.push(format!("{}/{}.stderr", self.config.adb_test_dir, prog_short));
-
- let procsrv::Result{ out: stderr_out, err: _, status: _ } =
- procsrv::run("",
- &self.config.adb_path,
- None,
- &runargs,
- vec![("".to_owned(), "".to_owned())],
- Some("".to_owned()))
- .expect(&format!("failed to exec `{}`", self.config.adb_path));
-
- self.dump_output(&stdout_out, &stderr_out);
-
- ProcRes {
- status: Status::Parsed(exitcode),
- stdout: stdout_out,
- stderr: stderr_out,
- cmdline: cmdline
- }
- }
-
- fn _arm_push_aux_shared_library(&self) {
- let tdir = self.aux_output_dir_name();
-
- let dirs = fs::read_dir(&tdir).unwrap();
- for file in dirs {
- let file = file.unwrap().path();
- if file.extension().and_then(|s| s.to_str()) == Some("so") {
- // FIXME (#9639): This needs to handle non-utf8 paths
- let copy_result = procsrv::run("",
- &self.config.adb_path,
- None,
- &[
- "push".to_owned(),
- file.to_str()
- .unwrap()
- .to_owned(),
- self.config.adb_test_dir.to_owned(),
- ],
- vec![("".to_owned(),
- "".to_owned())],
- Some("".to_owned()))
- .expect(&format!("failed to exec `{}`", self.config.adb_path));
-
- if self.config.verbose {
- println!("push ({}) {:?} {} {}",
- self.config.target, file.display(),
- copy_result.out, copy_result.err);
- }
- }
- }
- }
-
// codegen tests (using FileCheck)
fn compile_test_and_save_ir(&self) -> ProcRes {
let output = cmd.output().expect("failed to spawn `make`");
if !output.status.success() {
let res = ProcRes {
- status: Status::Normal(output.status),
+ status: output.status,
stdout: String::from_utf8_lossy(&output.stdout).into_owned(),
stderr: String::from_utf8_lossy(&output.stderr).into_owned(),
cmdline: format!("{:?}", cmd),
}
pub struct ProcRes {
- status: Status,
+ status: ExitStatus,
stdout: String,
stderr: String,
cmdline: String,
}
-enum Status {
- Parsed(i32),
- Normal(ExitStatus),
-}
-
impl ProcRes {
pub fn fatal(&self, err: Option<&str>) -> ! {
if let Some(e) = err {
}
}
-impl Status {
- fn code(&self) -> Option<i32> {
- match *self {
- Status::Parsed(i) => Some(i),
- Status::Normal(ref e) => e.code(),
- }
- }
-
- fn success(&self) -> bool {
- match *self {
- Status::Parsed(i) => i == 0,
- Status::Normal(ref e) => e.success(),
- }
- }
-}
-
-impl fmt::Display for Status {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- match *self {
- Status::Parsed(i) => write!(f, "exit code: {}", i),
- Status::Normal(ref e) => e.fmt(f),
- }
- }
-}
-
enum TargetLocation {
ThisFile(PathBuf),
ThisDirectory(PathBuf),
+++ /dev/null
-[package]
-name = "qemu-test-client"
-version = "0.1.0"
-authors = ["The Rust Project Developers"]
-
-[dependencies]
+++ /dev/null
-// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-/// This is a small client program intended to pair with `qemu-test-server` in
-/// this repository. This client connects to the server over TCP and is used to
-/// push artifacts and run tests on the server instead of locally.
-///
-/// Here is also where we bake in the support to spawn the QEMU emulator as
-/// well.
-
-use std::env;
-use std::fs::File;
-use std::io::prelude::*;
-use std::io::{self, BufWriter};
-use std::net::TcpStream;
-use std::path::Path;
-use std::process::{Command, Stdio};
-use std::thread;
-use std::time::Duration;
-
-macro_rules! t {
- ($e:expr) => (match $e {
- Ok(e) => e,
- Err(e) => panic!("{} failed with {}", stringify!($e), e),
- })
-}
-
-fn main() {
- let mut args = env::args().skip(1);
-
- match &args.next().unwrap()[..] {
- "spawn-emulator" => {
- spawn_emulator(Path::new(&args.next().unwrap()),
- Path::new(&args.next().unwrap()))
- }
- "push" => {
- push(Path::new(&args.next().unwrap()))
- }
- "run" => {
- run(args.next().unwrap(), args.collect())
- }
- cmd => panic!("unknown command: {}", cmd),
- }
-}
-
-fn spawn_emulator(rootfs: &Path, tmpdir: &Path) {
- // Generate a new rootfs image now that we've updated the test server
- // executable. This is the equivalent of:
- //
- // find $rootfs -print 0 | cpio --null -o --format=newc > rootfs.img
- let rootfs_img = tmpdir.join("rootfs.img");
- let mut cmd = Command::new("cpio");
- cmd.arg("--null")
- .arg("-o")
- .arg("--format=newc")
- .stdin(Stdio::piped())
- .stdout(Stdio::piped())
- .current_dir(rootfs);
- let mut child = t!(cmd.spawn());
- let mut stdin = child.stdin.take().unwrap();
- let rootfs = rootfs.to_path_buf();
- thread::spawn(move || add_files(&mut stdin, &rootfs, &rootfs));
- t!(io::copy(&mut child.stdout.take().unwrap(),
- &mut t!(File::create(&rootfs_img))));
- assert!(t!(child.wait()).success());
-
- // Start up the emulator, in the background
- let mut cmd = Command::new("qemu-system-arm");
- cmd.arg("-M").arg("vexpress-a15")
- .arg("-m").arg("1024")
- .arg("-kernel").arg("/tmp/zImage")
- .arg("-initrd").arg(&rootfs_img)
- .arg("-dtb").arg("/tmp/vexpress-v2p-ca15-tc1.dtb")
- .arg("-append").arg("console=ttyAMA0 root=/dev/ram rdinit=/sbin/init init=/sbin/init")
- .arg("-nographic")
- .arg("-redir").arg("tcp:12345::12345");
- t!(cmd.spawn());
-
- // Wait for the emulator to come online
- loop {
- let dur = Duration::from_millis(100);
- if let Ok(mut client) = TcpStream::connect("127.0.0.1:12345") {
- t!(client.set_read_timeout(Some(dur)));
- t!(client.set_write_timeout(Some(dur)));
- if client.write_all(b"ping").is_ok() {
- let mut b = [0; 4];
- if client.read_exact(&mut b).is_ok() {
- break
- }
- }
- }
- thread::sleep(dur);
- }
-
- fn add_files(w: &mut Write, root: &Path, cur: &Path) {
- for entry in t!(cur.read_dir()) {
- let entry = t!(entry);
- let path = entry.path();
- let to_print = path.strip_prefix(root).unwrap();
- t!(write!(w, "{}\u{0}", to_print.to_str().unwrap()));
- if t!(entry.file_type()).is_dir() {
- add_files(w, root, &path);
- }
- }
- }
-}
-
-fn push(path: &Path) {
- let client = t!(TcpStream::connect("127.0.0.1:12345"));
- let mut client = BufWriter::new(client);
- t!(client.write_all(b"push"));
- t!(client.write_all(path.file_name().unwrap().to_str().unwrap().as_bytes()));
- t!(client.write_all(&[0]));
- let mut file = t!(File::open(path));
- t!(io::copy(&mut file, &mut client));
- t!(client.flush());
- println!("done pushing {:?}", path);
-}
-
-fn run(files: String, args: Vec<String>) {
- let client = t!(TcpStream::connect("127.0.0.1:12345"));
- let mut client = BufWriter::new(client);
- t!(client.write_all(b"run "));
-
- // Send over the args
- for arg in args {
- t!(client.write_all(arg.as_bytes()));
- t!(client.write_all(&[0]));
- }
- t!(client.write_all(&[0]));
-
- // Send over env vars
- for (k, v) in env::vars() {
- if k != "PATH" && k != "LD_LIBRARY_PATH" {
- t!(client.write_all(k.as_bytes()));
- t!(client.write_all(&[0]));
- t!(client.write_all(v.as_bytes()));
- t!(client.write_all(&[0]));
- }
- }
- t!(client.write_all(&[0]));
-
- // Send over support libraries
- let mut files = files.split(':');
- let exe = files.next().unwrap();
- for file in files.map(Path::new) {
- t!(client.write_all(file.file_name().unwrap().to_str().unwrap().as_bytes()));
- t!(client.write_all(&[0]));
- send(&file, &mut client);
- }
- t!(client.write_all(&[0]));
-
- // Send over the client executable as the last piece
- send(exe.as_ref(), &mut client);
-
- println!("uploaded {:?}, waiting for result", exe);
-
- // Ok now it's time to read all the output. We're receiving "frames"
- // representing stdout/stderr, so we decode all that here.
- let mut header = [0; 5];
- let mut stderr_done = false;
- let mut stdout_done = false;
- let mut client = t!(client.into_inner());
- let mut stdout = io::stdout();
- let mut stderr = io::stderr();
- while !stdout_done || !stderr_done {
- t!(client.read_exact(&mut header));
- let amt = ((header[1] as u64) << 24) |
- ((header[2] as u64) << 16) |
- ((header[3] as u64) << 8) |
- ((header[4] as u64) << 0);
- if header[0] == 0 {
- if amt == 0 {
- stdout_done = true;
- } else {
- t!(io::copy(&mut (&mut client).take(amt), &mut stdout));
- t!(stdout.flush());
- }
- } else {
- if amt == 0 {
- stderr_done = true;
- } else {
- t!(io::copy(&mut (&mut client).take(amt), &mut stderr));
- t!(stderr.flush());
- }
- }
- }
-
- // Finally, read out the exit status
- let mut status = [0; 5];
- t!(client.read_exact(&mut status));
- let code = ((status[1] as i32) << 24) |
- ((status[2] as i32) << 16) |
- ((status[3] as i32) << 8) |
- ((status[4] as i32) << 0);
- if status[0] == 0 {
- std::process::exit(code);
- } else {
- println!("died due to signal {}", code);
- std::process::exit(3);
- }
-}
-
-fn send(path: &Path, dst: &mut Write) {
- let mut file = t!(File::open(&path));
- let amt = t!(file.metadata()).len();
- t!(dst.write_all(&[
- (amt >> 24) as u8,
- (amt >> 16) as u8,
- (amt >> 8) as u8,
- (amt >> 0) as u8,
- ]));
- t!(io::copy(&mut file, dst));
-}
+++ /dev/null
-[package]
-name = "qemu-test-server"
-version = "0.1.0"
-authors = ["The Rust Project Developers"]
-
-[dependencies]
+++ /dev/null
-// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-/// This is a small server which is intended to run inside of an emulator. This
-/// server pairs with the `qemu-test-client` program in this repository. The
-/// `qemu-test-client` connects to this server over a TCP socket and performs
-/// work such as:
-///
-/// 1. Pushing shared libraries to the server
-/// 2. Running tests through the server
-///
-/// The server supports running tests concurrently and also supports tests
-/// themselves having support libraries. All data over the TCP sockets is in a
-/// basically custom format suiting our needs.
-
-use std::fs::{self, File, Permissions};
-use std::io::prelude::*;
-use std::io::{self, BufReader};
-use std::net::{TcpListener, TcpStream};
-use std::os::unix::prelude::*;
-use std::sync::{Arc, Mutex};
-use std::path::Path;
-use std::str;
-use std::sync::atomic::{AtomicUsize, ATOMIC_USIZE_INIT, Ordering};
-use std::thread;
-use std::process::{Command, Stdio};
-
-macro_rules! t {
- ($e:expr) => (match $e {
- Ok(e) => e,
- Err(e) => panic!("{} failed with {}", stringify!($e), e),
- })
-}
-
-static TEST: AtomicUsize = ATOMIC_USIZE_INIT;
-
-fn main() {
- println!("starting test server");
- let listener = t!(TcpListener::bind("10.0.2.15:12345"));
- println!("listening!");
-
- let work = Path::new("/tmp/work");
- t!(fs::create_dir_all(work));
-
- let lock = Arc::new(Mutex::new(()));
-
- for socket in listener.incoming() {
- let mut socket = t!(socket);
- let mut buf = [0; 4];
- t!(socket.read_exact(&mut buf));
- if &buf[..] == b"ping" {
- t!(socket.write_all(b"pong"));
- } else if &buf[..] == b"push" {
- handle_push(socket, work);
- } else if &buf[..] == b"run " {
- let lock = lock.clone();
- thread::spawn(move || handle_run(socket, work, &lock));
- } else {
- panic!("unknown command {:?}", buf);
- }
- }
-}
-
-fn handle_push(socket: TcpStream, work: &Path) {
- let mut reader = BufReader::new(socket);
- let mut filename = Vec::new();
- t!(reader.read_until(0, &mut filename));
- filename.pop(); // chop off the 0
- let filename = t!(str::from_utf8(&filename));
-
- let path = work.join(filename);
- t!(io::copy(&mut reader, &mut t!(File::create(&path))));
- t!(fs::set_permissions(&path, Permissions::from_mode(0o755)));
-}
-
-struct RemoveOnDrop<'a> {
- inner: &'a Path,
-}
-
-impl<'a> Drop for RemoveOnDrop<'a> {
- fn drop(&mut self) {
- t!(fs::remove_dir_all(self.inner));
- }
-}
-
-fn handle_run(socket: TcpStream, work: &Path, lock: &Mutex<()>) {
- let mut arg = Vec::new();
- let mut reader = BufReader::new(socket);
-
- // Allocate ourselves a directory that we'll delete when we're done to save
- // space.
- let n = TEST.fetch_add(1, Ordering::SeqCst);
- let path = work.join(format!("test{}", n));
- let exe = path.join("exe");
- t!(fs::create_dir(&path));
- let _a = RemoveOnDrop { inner: &path };
-
- // First up we'll get a list of arguments delimited with 0 bytes. An empty
- // argument means that we're done.
- let mut cmd = Command::new(&exe);
- while t!(reader.read_until(0, &mut arg)) > 1 {
- cmd.arg(t!(str::from_utf8(&arg[..arg.len() - 1])));
- arg.truncate(0);
- }
-
- // Next we'll get a bunch of env vars in pairs delimited by 0s as well
- arg.truncate(0);
- while t!(reader.read_until(0, &mut arg)) > 1 {
- let key_len = arg.len() - 1;
- let val_len = t!(reader.read_until(0, &mut arg)) - 1;
- {
- let key = &arg[..key_len];
- let val = &arg[key_len + 1..][..val_len];
- let key = t!(str::from_utf8(key));
- let val = t!(str::from_utf8(val));
- cmd.env(key, val);
- }
- arg.truncate(0);
- }
-
- // The section of code from here down to where we drop the lock is going to
- // be a critical section for us. On Linux you can't execute a file which is
- // open somewhere for writing, as you'll receive the error "text file busy".
- // Now here we never have the text file open for writing when we spawn it,
- // so why do we still need a critical section?
- //
- // Process spawning first involves a `fork` on Unix, which clones all file
- // descriptors into the child process. This means that it's possible for us
- // to open the file for writing (as we're downloading it), then some other
- // thread forks, then we close the file and try to exec. At that point the
- // other thread created a child process with the file open for writing, and
- // we attempt to execute it, so we get an error.
- //
- // This race is resolve by ensuring that only one thread can writ ethe file
- // and spawn a child process at once. Kinda an unfortunate solution, but we
- // don't have many other choices with this sort of setup!
- //
- // In any case the lock is acquired here, before we start writing any files.
- // It's then dropped just after we spawn the child. That way we don't lock
- // the execution of the child, just the creation of its files.
- let lock = lock.lock();
-
- // Next there's a list of dynamic libraries preceded by their filenames.
- arg.truncate(0);
- while t!(reader.read_until(0, &mut arg)) > 1 {
- let dst = path.join(t!(str::from_utf8(&arg[..arg.len() - 1])));
- let amt = read_u32(&mut reader) as u64;
- t!(io::copy(&mut reader.by_ref().take(amt),
- &mut t!(File::create(&dst))));
- t!(fs::set_permissions(&dst, Permissions::from_mode(0o755)));
- arg.truncate(0);
- }
-
- // Finally we'll get the binary. The other end will tell us how big the
- // binary is and then we'll download it all to the exe path we calculated
- // earlier.
- let amt = read_u32(&mut reader) as u64;
- t!(io::copy(&mut reader.by_ref().take(amt),
- &mut t!(File::create(&exe))));
- t!(fs::set_permissions(&exe, Permissions::from_mode(0o755)));
-
- // Support libraries were uploaded to `work` earlier, so make sure that's
- // in `LD_LIBRARY_PATH`. Also include our own current dir which may have
- // had some libs uploaded.
- cmd.env("LD_LIBRARY_PATH",
- format!("{}:{}", work.display(), path.display()));
-
- // Spawn the child and ferry over stdout/stderr to the socket in a framed
- // fashion (poor man's style)
- let mut child = t!(cmd.stdin(Stdio::null())
- .stdout(Stdio::piped())
- .stderr(Stdio::piped())
- .spawn());
- drop(lock);
- let mut stdout = child.stdout.take().unwrap();
- let mut stderr = child.stderr.take().unwrap();
- let socket = Arc::new(Mutex::new(reader.into_inner()));
- let socket2 = socket.clone();
- let thread = thread::spawn(move || my_copy(&mut stdout, 0, &*socket2));
- my_copy(&mut stderr, 1, &*socket);
- thread.join().unwrap();
-
- // Finally send over the exit status.
- let status = t!(child.wait());
- let (which, code) = match status.code() {
- Some(n) => (0, n),
- None => (1, status.signal().unwrap()),
- };
- t!(socket.lock().unwrap().write_all(&[
- which,
- (code >> 24) as u8,
- (code >> 16) as u8,
- (code >> 8) as u8,
- (code >> 0) as u8,
- ]));
-}
-
-fn my_copy(src: &mut Read, which: u8, dst: &Mutex<Write>) {
- let mut b = [0; 1024];
- loop {
- let n = t!(src.read(&mut b));
- let mut dst = dst.lock().unwrap();
- t!(dst.write_all(&[
- which,
- (n >> 24) as u8,
- (n >> 16) as u8,
- (n >> 8) as u8,
- (n >> 0) as u8,
- ]));
- if n > 0 {
- t!(dst.write_all(&b[..n]));
- } else {
- break
- }
- }
-}
-
-fn read_u32(r: &mut Read) -> u32 {
- let mut len = [0; 4];
- t!(r.read_exact(&mut len));
- ((len[0] as u32) << 24) |
- ((len[1] as u32) << 16) |
- ((len[2] as u32) << 8) |
- ((len[3] as u32) << 0)
-}
--- /dev/null
+[package]
+name = "remote-test-client"
+version = "0.1.0"
+authors = ["The Rust Project Developers"]
+
+[dependencies]
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+/// This is a small client program intended to pair with `remote-test-server` in
+/// this repository. This client connects to the server over TCP and is used to
+/// push artifacts and run tests on the server instead of locally.
+///
+/// Here is also where we bake in the support to spawn the QEMU emulator as
+/// well.
+
+use std::env;
+use std::fs::{self, File};
+use std::io::prelude::*;
+use std::io::{self, BufWriter};
+use std::net::TcpStream;
+use std::path::{Path, PathBuf};
+use std::process::{Command, Stdio};
+use std::thread;
+use std::time::Duration;
+
+macro_rules! t {
+ ($e:expr) => (match $e {
+ Ok(e) => e,
+ Err(e) => panic!("{} failed with {}", stringify!($e), e),
+ })
+}
+
+fn main() {
+ let mut args = env::args().skip(1);
+
+ match &args.next().unwrap()[..] {
+ "spawn-emulator" => {
+ spawn_emulator(&args.next().unwrap(),
+ Path::new(&args.next().unwrap()),
+ Path::new(&args.next().unwrap()),
+ args.next().map(|s| s.into()))
+ }
+ "push" => {
+ push(Path::new(&args.next().unwrap()))
+ }
+ "run" => {
+ run(args.next().unwrap(), args.collect())
+ }
+ cmd => panic!("unknown command: {}", cmd),
+ }
+}
+
+fn spawn_emulator(target: &str,
+ server: &Path,
+ tmpdir: &Path,
+ rootfs: Option<PathBuf>) {
+ if target.contains("android") {
+ start_android_emulator(server);
+ } else {
+ let rootfs = rootfs.as_ref().expect("need rootfs on non-android");
+ start_qemu_emulator(rootfs, server, tmpdir);
+ }
+
+ // Wait for the emulator to come online
+ loop {
+ let dur = Duration::from_millis(100);
+ if let Ok(mut client) = TcpStream::connect("127.0.0.1:12345") {
+ t!(client.set_read_timeout(Some(dur)));
+ t!(client.set_write_timeout(Some(dur)));
+ if client.write_all(b"ping").is_ok() {
+ let mut b = [0; 4];
+ if client.read_exact(&mut b).is_ok() {
+ break
+ }
+ }
+ }
+ thread::sleep(dur);
+ }
+}
+
+fn start_android_emulator(server: &Path) {
+ println!("waiting for device to come online");
+ let status = Command::new("adb")
+ .arg("wait-for-device")
+ .status()
+ .unwrap();
+ assert!(status.success());
+
+ println!("pushing server");
+ let status = Command::new("adb")
+ .arg("push")
+ .arg(server)
+ .arg("/data/tmp/testd")
+ .status()
+ .unwrap();
+ assert!(status.success());
+
+ println!("forwarding tcp");
+ let status = Command::new("adb")
+ .arg("forward")
+ .arg("tcp:12345")
+ .arg("tcp:12345")
+ .status()
+ .unwrap();
+ assert!(status.success());
+
+ println!("executing server");
+ Command::new("adb")
+ .arg("shell")
+ .arg("/data/tmp/testd")
+ .spawn()
+ .unwrap();
+}
+
+fn start_qemu_emulator(rootfs: &Path, server: &Path, tmpdir: &Path) {
+ // Generate a new rootfs image now that we've updated the test server
+ // executable. This is the equivalent of:
+ //
+ // find $rootfs -print 0 | cpio --null -o --format=newc > rootfs.img
+ t!(fs::copy(server, rootfs.join("testd")));
+ let rootfs_img = tmpdir.join("rootfs.img");
+ let mut cmd = Command::new("cpio");
+ cmd.arg("--null")
+ .arg("-o")
+ .arg("--format=newc")
+ .stdin(Stdio::piped())
+ .stdout(Stdio::piped())
+ .current_dir(rootfs);
+ let mut child = t!(cmd.spawn());
+ let mut stdin = child.stdin.take().unwrap();
+ let rootfs = rootfs.to_path_buf();
+ thread::spawn(move || add_files(&mut stdin, &rootfs, &rootfs));
+ t!(io::copy(&mut child.stdout.take().unwrap(),
+ &mut t!(File::create(&rootfs_img))));
+ assert!(t!(child.wait()).success());
+
+ // Start up the emulator, in the background
+ let mut cmd = Command::new("qemu-system-arm");
+ cmd.arg("-M").arg("vexpress-a15")
+ .arg("-m").arg("1024")
+ .arg("-kernel").arg("/tmp/zImage")
+ .arg("-initrd").arg(&rootfs_img)
+ .arg("-dtb").arg("/tmp/vexpress-v2p-ca15-tc1.dtb")
+ .arg("-append").arg("console=ttyAMA0 root=/dev/ram rdinit=/sbin/init init=/sbin/init")
+ .arg("-nographic")
+ .arg("-redir").arg("tcp:12345::12345");
+ t!(cmd.spawn());
+
+ fn add_files(w: &mut Write, root: &Path, cur: &Path) {
+ for entry in t!(cur.read_dir()) {
+ let entry = t!(entry);
+ let path = entry.path();
+ let to_print = path.strip_prefix(root).unwrap();
+ t!(write!(w, "{}\u{0}", to_print.to_str().unwrap()));
+ if t!(entry.file_type()).is_dir() {
+ add_files(w, root, &path);
+ }
+ }
+ }
+}
+
+fn push(path: &Path) {
+ let client = t!(TcpStream::connect("127.0.0.1:12345"));
+ let mut client = BufWriter::new(client);
+ t!(client.write_all(b"push"));
+ send(path, &mut client);
+ t!(client.flush());
+
+ // Wait for an acknowledgement that all the data was received. No idea
+ // why this is necessary, seems like it shouldn't be!
+ let mut client = client.into_inner().unwrap();
+ let mut buf = [0; 4];
+ t!(client.read_exact(&mut buf));
+ assert_eq!(&buf, b"ack ");
+ println!("done pushing {:?}", path);
+}
+
+fn run(files: String, args: Vec<String>) {
+ let client = t!(TcpStream::connect("127.0.0.1:12345"));
+ let mut client = BufWriter::new(client);
+ t!(client.write_all(b"run "));
+
+ // Send over the args
+ for arg in args {
+ t!(client.write_all(arg.as_bytes()));
+ t!(client.write_all(&[0]));
+ }
+ t!(client.write_all(&[0]));
+
+ // Send over env vars
+ //
+ // Don't send over *everything* though as some env vars are set by and used
+ // by the client.
+ for (k, v) in env::vars() {
+ match &k[..] {
+ "PATH" |
+ "LD_LIBRARY_PATH" |
+ "PWD" => continue,
+ _ => {}
+ }
+ t!(client.write_all(k.as_bytes()));
+ t!(client.write_all(&[0]));
+ t!(client.write_all(v.as_bytes()));
+ t!(client.write_all(&[0]));
+ }
+ t!(client.write_all(&[0]));
+
+ // Send over support libraries
+ let mut files = files.split(':');
+ let exe = files.next().unwrap();
+ for file in files.map(Path::new) {
+ send(&file, &mut client);
+ }
+ t!(client.write_all(&[0]));
+
+ // Send over the client executable as the last piece
+ send(exe.as_ref(), &mut client);
+
+ println!("uploaded {:?}, waiting for result", exe);
+
+ // Ok now it's time to read all the output. We're receiving "frames"
+ // representing stdout/stderr, so we decode all that here.
+ let mut header = [0; 5];
+ let mut stderr_done = false;
+ let mut stdout_done = false;
+ let mut client = t!(client.into_inner());
+ let mut stdout = io::stdout();
+ let mut stderr = io::stderr();
+ while !stdout_done || !stderr_done {
+ t!(client.read_exact(&mut header));
+ let amt = ((header[1] as u64) << 24) |
+ ((header[2] as u64) << 16) |
+ ((header[3] as u64) << 8) |
+ ((header[4] as u64) << 0);
+ if header[0] == 0 {
+ if amt == 0 {
+ stdout_done = true;
+ } else {
+ t!(io::copy(&mut (&mut client).take(amt), &mut stdout));
+ t!(stdout.flush());
+ }
+ } else {
+ if amt == 0 {
+ stderr_done = true;
+ } else {
+ t!(io::copy(&mut (&mut client).take(amt), &mut stderr));
+ t!(stderr.flush());
+ }
+ }
+ }
+
+ // Finally, read out the exit status
+ let mut status = [0; 5];
+ t!(client.read_exact(&mut status));
+ let code = ((status[1] as i32) << 24) |
+ ((status[2] as i32) << 16) |
+ ((status[3] as i32) << 8) |
+ ((status[4] as i32) << 0);
+ if status[0] == 0 {
+ std::process::exit(code);
+ } else {
+ println!("died due to signal {}", code);
+ std::process::exit(3);
+ }
+}
+
+fn send(path: &Path, dst: &mut Write) {
+ t!(dst.write_all(path.file_name().unwrap().to_str().unwrap().as_bytes()));
+ t!(dst.write_all(&[0]));
+ let mut file = t!(File::open(&path));
+ let amt = t!(file.metadata()).len();
+ t!(dst.write_all(&[
+ (amt >> 24) as u8,
+ (amt >> 16) as u8,
+ (amt >> 8) as u8,
+ (amt >> 0) as u8,
+ ]));
+ t!(io::copy(&mut file, dst));
+}
--- /dev/null
+[package]
+name = "remote-test-server"
+version = "0.1.0"
+authors = ["The Rust Project Developers"]
+
+[dependencies]
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+/// This is a small server which is intended to run inside of an emulator. This
+/// server pairs with the `remote-test-client` program in this repository. The
+/// `remote-test-client` connects to this server over a TCP socket and performs
+/// work such as:
+///
+/// 1. Pushing shared libraries to the server
+/// 2. Running tests through the server
+///
+/// The server supports running tests concurrently and also supports tests
+/// themselves having support libraries. All data over the TCP sockets is in a
+/// basically custom format suiting our needs.
+
+use std::cmp;
+use std::fs::{self, File, Permissions};
+use std::io::prelude::*;
+use std::io::{self, BufReader};
+use std::net::{TcpListener, TcpStream};
+use std::os::unix::prelude::*;
+use std::path::{Path, PathBuf};
+use std::process::{Command, Stdio};
+use std::str;
+use std::sync::atomic::{AtomicUsize, ATOMIC_USIZE_INIT, Ordering};
+use std::sync::{Arc, Mutex};
+use std::thread;
+
+macro_rules! t {
+ ($e:expr) => (match $e {
+ Ok(e) => e,
+ Err(e) => panic!("{} failed with {}", stringify!($e), e),
+ })
+}
+
+static TEST: AtomicUsize = ATOMIC_USIZE_INIT;
+
+fn main() {
+ println!("starting test server");
+ let (listener, work) = if cfg!(target_os = "android") {
+ (t!(TcpListener::bind("0.0.0.0:12345")), "/data/tmp/work")
+ } else {
+ (t!(TcpListener::bind("10.0.2.15:12345")), "/tmp/work")
+ };
+ println!("listening!");
+
+ let work = Path::new(work);
+ t!(fs::create_dir_all(work));
+
+ let lock = Arc::new(Mutex::new(()));
+
+ for socket in listener.incoming() {
+ let mut socket = t!(socket);
+ let mut buf = [0; 4];
+ if socket.read_exact(&mut buf).is_err() {
+ continue
+ }
+ if &buf[..] == b"ping" {
+ t!(socket.write_all(b"pong"));
+ } else if &buf[..] == b"push" {
+ handle_push(socket, work);
+ } else if &buf[..] == b"run " {
+ let lock = lock.clone();
+ thread::spawn(move || handle_run(socket, work, &lock));
+ } else {
+ panic!("unknown command {:?}", buf);
+ }
+ }
+}
+
+fn handle_push(socket: TcpStream, work: &Path) {
+ let mut reader = BufReader::new(socket);
+ recv(&work, &mut reader);
+
+ let mut socket = reader.into_inner();
+ t!(socket.write_all(b"ack "));
+}
+
+struct RemoveOnDrop<'a> {
+ inner: &'a Path,
+}
+
+impl<'a> Drop for RemoveOnDrop<'a> {
+ fn drop(&mut self) {
+ t!(fs::remove_dir_all(self.inner));
+ }
+}
+
+fn handle_run(socket: TcpStream, work: &Path, lock: &Mutex<()>) {
+ let mut arg = Vec::new();
+ let mut reader = BufReader::new(socket);
+
+ // Allocate ourselves a directory that we'll delete when we're done to save
+ // space.
+ let n = TEST.fetch_add(1, Ordering::SeqCst);
+ let path = work.join(format!("test{}", n));
+ t!(fs::create_dir(&path));
+ let _a = RemoveOnDrop { inner: &path };
+
+ // First up we'll get a list of arguments delimited with 0 bytes. An empty
+ // argument means that we're done.
+ let mut args = Vec::new();
+ while t!(reader.read_until(0, &mut arg)) > 1 {
+ args.push(t!(str::from_utf8(&arg[..arg.len() - 1])).to_string());
+ arg.truncate(0);
+ }
+
+ // Next we'll get a bunch of env vars in pairs delimited by 0s as well
+ let mut env = Vec::new();
+ arg.truncate(0);
+ while t!(reader.read_until(0, &mut arg)) > 1 {
+ let key_len = arg.len() - 1;
+ let val_len = t!(reader.read_until(0, &mut arg)) - 1;
+ {
+ let key = &arg[..key_len];
+ let val = &arg[key_len + 1..][..val_len];
+ let key = t!(str::from_utf8(key)).to_string();
+ let val = t!(str::from_utf8(val)).to_string();
+ env.push((key, val));
+ }
+ arg.truncate(0);
+ }
+
+ // The section of code from here down to where we drop the lock is going to
+ // be a critical section for us. On Linux you can't execute a file which is
+ // open somewhere for writing, as you'll receive the error "text file busy".
+ // Now here we never have the text file open for writing when we spawn it,
+ // so why do we still need a critical section?
+ //
+ // Process spawning first involves a `fork` on Unix, which clones all file
+ // descriptors into the child process. This means that it's possible for us
+ // to open the file for writing (as we're downloading it), then some other
+ // thread forks, then we close the file and try to exec. At that point the
+ // other thread created a child process with the file open for writing, and
+ // we attempt to execute it, so we get an error.
+ //
+ // This race is resolve by ensuring that only one thread can writ ethe file
+ // and spawn a child process at once. Kinda an unfortunate solution, but we
+ // don't have many other choices with this sort of setup!
+ //
+ // In any case the lock is acquired here, before we start writing any files.
+ // It's then dropped just after we spawn the child. That way we don't lock
+ // the execution of the child, just the creation of its files.
+ let lock = lock.lock();
+
+ // Next there's a list of dynamic libraries preceded by their filenames.
+ while t!(reader.fill_buf())[0] != 0 {
+ recv(&path, &mut reader);
+ }
+ assert_eq!(t!(reader.read(&mut [0])), 1);
+
+ // Finally we'll get the binary. The other end will tell us how big the
+ // binary is and then we'll download it all to the exe path we calculated
+ // earlier.
+ let exe = recv(&path, &mut reader);
+
+ let mut cmd = Command::new(&exe);
+ for arg in args {
+ cmd.arg(arg);
+ }
+ for (k, v) in env {
+ cmd.env(k, v);
+ }
+
+ // Support libraries were uploaded to `work` earlier, so make sure that's
+ // in `LD_LIBRARY_PATH`. Also include our own current dir which may have
+ // had some libs uploaded.
+ cmd.env("LD_LIBRARY_PATH",
+ format!("{}:{}", work.display(), path.display()));
+
+ // Spawn the child and ferry over stdout/stderr to the socket in a framed
+ // fashion (poor man's style)
+ let mut child = t!(cmd.stdin(Stdio::null())
+ .stdout(Stdio::piped())
+ .stderr(Stdio::piped())
+ .spawn());
+ drop(lock);
+ let mut stdout = child.stdout.take().unwrap();
+ let mut stderr = child.stderr.take().unwrap();
+ let socket = Arc::new(Mutex::new(reader.into_inner()));
+ let socket2 = socket.clone();
+ let thread = thread::spawn(move || my_copy(&mut stdout, 0, &*socket2));
+ my_copy(&mut stderr, 1, &*socket);
+ thread.join().unwrap();
+
+ // Finally send over the exit status.
+ let status = t!(child.wait());
+ let (which, code) = match status.code() {
+ Some(n) => (0, n),
+ None => (1, status.signal().unwrap()),
+ };
+ t!(socket.lock().unwrap().write_all(&[
+ which,
+ (code >> 24) as u8,
+ (code >> 16) as u8,
+ (code >> 8) as u8,
+ (code >> 0) as u8,
+ ]));
+}
+
+fn recv<B: BufRead>(dir: &Path, io: &mut B) -> PathBuf {
+ let mut filename = Vec::new();
+ t!(io.read_until(0, &mut filename));
+
+ // We've got some tests with *really* long names. We try to name the test
+ // executable the same on the target as it is on the host to aid with
+ // debugging, but the targets we're emulating are often more restrictive
+ // than the hosts as well.
+ //
+ // To ensure we can run a maximum number of tests without modifications we
+ // just arbitrarily truncate the filename to 50 bytes. That should
+ // hopefully allow us to still identify what's running while staying under
+ // the filesystem limits.
+ let len = cmp::min(filename.len() - 1, 50);
+ let dst = dir.join(t!(str::from_utf8(&filename[..len])));
+ let amt = read_u32(io) as u64;
+ t!(io::copy(&mut io.take(amt),
+ &mut t!(File::create(&dst))));
+ t!(fs::set_permissions(&dst, Permissions::from_mode(0o755)));
+ return dst
+}
+
+fn my_copy(src: &mut Read, which: u8, dst: &Mutex<Write>) {
+ let mut b = [0; 1024];
+ loop {
+ let n = t!(src.read(&mut b));
+ let mut dst = dst.lock().unwrap();
+ t!(dst.write_all(&[
+ which,
+ (n >> 24) as u8,
+ (n >> 16) as u8,
+ (n >> 8) as u8,
+ (n >> 0) as u8,
+ ]));
+ if n > 0 {
+ t!(dst.write_all(&b[..n]));
+ } else {
+ break
+ }
+ }
+}
+
+fn read_u32(r: &mut Read) -> u32 {
+ let mut len = [0; 4];
+ t!(r.read_exact(&mut len));
+ ((len[0] as u32) << 24) |
+ ((len[1] as u32) << 16) |
+ ((len[2] as u32) << 8) |
+ ((len[3] as u32) << 0)
+}
clap = "2.19.3"
[dependencies.mdbook]
-version = "0.0.19"
+version = "0.0.21"
default-features = false
// Build command implementation
fn build(args: &ArgMatches) -> Result<(), Box<Error>> {
- let book_dir = get_book_dir(args);
- let book = MDBook::new(&book_dir).read_config();
+ let book = build_mdbook_struct(args);
let mut book = match args.value_of("dest-dir") {
Some(dest_dir) => book.set_dest(Path::new(dest_dir)),
}
fn test(args: &ArgMatches) -> Result<(), Box<Error>> {
- let book_dir = get_book_dir(args);
- let mut book = MDBook::new(&book_dir).read_config();
+ let mut book = build_mdbook_struct(args);
try!(book.test());
Ok(())
}
+fn build_mdbook_struct(args: &ArgMatches) -> mdbook::MDBook {
+ let book_dir = get_book_dir(args);
+ let mut book = MDBook::new(&book_dir).read_config();
+
+ // By default mdbook will attempt to create non-existent files referenced
+ // from SUMMARY.md files. This is problematic on CI where we mount the
+ // source directory as readonly. To avoid any issues, we'll disabled
+ // mdbook's implicit file creation feature.
+ book.create_missing = false;
+
+ book
+}
+
fn get_book_dir(args: &ArgMatches) -> PathBuf {
if let Some(dir) = args.value_of("dir") {
// Check if path is relative from current dir, or absolute...