Chris C Cerami <chrisccerami@users.noreply.github.com> Chris C Cerami <chrisccerami@gmail.com>
Chris Pressey <cpressey@gmail.com>
Chris Thorn <chris@thorn.co> Chris Thorn <thorn@thoughtbot.com>
+Chris Vittal <christopher.vittal@gmail.com> Christopher Vittal <christopher.vittal@gmail.com>
Clark Gaebel <cg.wowus.cg@gmail.com> <cgaebel@mozilla.com>
Clinton Ryan <clint.ryan3@gmail.com>
Corey Richardson <corey@octayn.net> Elaine "See More" Nemo <corey@octayn.net>
# Whether to deny warnings in crates
#deny-warnings = true
+# Print backtrace on internal compiler errors during bootstrap
+#backtrace-on-ice = false
+
# =============================================================================
# Options for specific targets
#
[[package]]
name = "clippy"
-version = "0.0.195"
+version = "0.0.197"
dependencies = [
"cargo_metadata 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)",
"clippy-mini-macro-test 0.2.0",
- "clippy_lints 0.0.195",
+ "clippy_lints 0.0.197",
"compiletest_rs 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
"derive-new 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
[[package]]
name = "clippy_lints"
-version = "0.0.195"
+version = "0.0.197"
dependencies = [
+ "cargo_metadata 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)",
"if_chain 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
"itertools 0.7.8 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.5.8 (registry+https://github.com/rust-lang/crates.io-index)",
"filetime 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)",
"getopts 0.2.17 (registry+https://github.com/rust-lang/crates.io-index)",
+ "lazy_static 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.40 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
"miow 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
dependencies = [
"cargo 0.28.0",
"cargo_metadata 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)",
- "clippy_lints 0.0.195",
+ "clippy_lints 0.0.197",
"env_logger 0.5.8 (registry+https://github.com/rust-lang/crates.io-index)",
"failure 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"json 0.11.13 (registry+https://github.com/rust-lang/crates.io-index)",
"num_cpus 1.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
"racer 2.0.13 (registry+https://github.com/rust-lang/crates.io-index)",
"rayon 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "rls-analysis 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rls-analysis 0.12.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rls-blacklist 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
"rls-data 0.15.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rls-rustc 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
[[package]]
name = "rls-analysis"
-version = "0.12.0"
+version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"derive-new 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)",
"checksum regex 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)" = "aec3f58d903a7d2a9dc2bf0e41a746f4530e0cab6b615494e058f67a3ef947fb"
"checksum regex-syntax 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)" = "bd90079345f4a4c3409214734ae220fd773c6f2e8a543d07370c6c1c369cfbfb"
"checksum remove_dir_all 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3488ba1b9a2084d38645c4c08276a1752dcbf2c7130d74f1569681ad5d2799c5"
-"checksum rls-analysis 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b339561571efd8d2d4ae1b16eb27f760cad46907d49e9726242844dbbde14e79"
+"checksum rls-analysis 0.12.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a41488cf5dc99d6ce383319d2978756567b70d4ed0539eb0d9ce07763e732e46"
"checksum rls-blacklist 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e4a9cc2545ccb7e05b355bfe047b8039a6ec12270d5f3c996b766b340a50f7d2"
"checksum rls-data 0.15.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bea04462e94b5512a78499837eecb7db182ff082144cd1b4bc32ef5d43de6510"
"checksum rls-rustc 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "885f66b92757420572cbb02e033d4a9558c7413ca9b7ac206f28fd58ffdb44ea"
# execute tests in the standard library in stage0
./x.py test --stage 0 src/libstd
+ # execute tests in the core and standard library in stage0,
+ # without running doc tests (thus avoid depending on building the compiler)
+ ./x.py test --stage 0 --no-doc src/libcore src/libstd
+
# execute all doc tests
./x.py test src/doc
```
env::join_paths(&dylib_path).unwrap());
let mut maybe_crate = None;
+ // Print backtrace in case of ICE
+ if env::var("RUSTC_BACKTRACE_ON_ICE").is_ok() && env::var("RUST_BACKTRACE").is_err() {
+ cmd.env("RUST_BACKTRACE", "1");
+ }
+
+ cmd.env("RUSTC_BREAK_ON_ICE", "1");
+
if let Some(target) = target {
// The stage0 compiler has a special sysroot distinct from what we
// actually downloaded, so we just always pass the `--sysroot` option.
use install;
use dist;
use util::{exe, libdir, add_lib_path};
-use {Build, Mode};
+use {Build, Mode, DocTests};
use cache::{INTERNER, Interned, Cache};
use check;
use test;
format!("{} {}", env::var("RUSTFLAGS").unwrap_or_default(), extra_args));
}
+ let want_rustdoc = self.doc_tests != DocTests::No;
+
// Customize the compiler we're running. Specify the compiler to cargo
// as our shim and then pass it some various options used to configure
// how the actual compiler itself is called.
.env("RUSTC_LIBDIR", self.rustc_libdir(compiler))
.env("RUSTC_RPATH", self.config.rust_rpath.to_string())
.env("RUSTDOC", self.out.join("bootstrap/debug/rustdoc"))
- .env("RUSTDOC_REAL", if cmd == "doc" || cmd == "test" {
+ .env("RUSTDOC_REAL", if cmd == "doc" || (cmd == "test" && want_rustdoc) {
self.rustdoc(compiler.host)
} else {
PathBuf::from("/path/to/nowhere/rustdoc/not/required")
if let Some(ref error_format) = self.config.rustc_error_format {
cargo.env("RUSTC_ERROR_FORMAT", error_format);
}
- if cmd != "build" && cmd != "check" {
+ if cmd != "build" && cmd != "check" && want_rustdoc {
cargo.env("RUSTDOC_LIBDIR", self.rustc_libdir(self.compiler(2, self.config.build)));
}
cargo.env("RUSTC_PRINT_STEP_TIMINGS", "1");
}
+ if self.config.backtrace_on_ice {
+ cargo.env("RUSTC_BACKTRACE_ON_ICE", "1");
+ }
+
cargo.env("RUSTC_VERBOSE", format!("{}", self.verbosity));
// in std, we want to avoid denying warnings for stage 0 as that makes cfg's painful.
},
]);
}
+
+ #[test]
+ fn test_with_no_doc_stage0() {
+ let mut config = configure(&[], &[]);
+ config.stage = Some(0);
+ config.cmd = Subcommand::Test {
+ paths: vec!["src/libstd".into()],
+ test_args: vec![],
+ rustc_args: vec![],
+ fail_fast: true,
+ doc_tests: DocTests::No,
+ };
+
+ let build = Build::new(config);
+ let mut builder = Builder::new(&build);
+
+ let host = INTERNER.intern_str("A");
+
+ builder.run_step_descriptions(
+ &[StepDescription::from::<test::Crate>()],
+ &["src/libstd".into()],
+ );
+
+ // Ensure we don't build any compiler artifacts.
+ assert!(builder.cache.all::<compile::Rustc>().is_empty());
+ assert_eq!(first(builder.cache.all::<test::Crate>()), &[
+ test::Crate {
+ compiler: Compiler { host, stage: 0 },
+ target: host,
+ mode: Mode::Libstd,
+ test_kind: test::TestKind::Test,
+ krate: INTERNER.intern_str("std"),
+ },
+ ]);
+ }
}
pub dry_run: bool,
pub deny_warnings: bool,
+ pub backtrace_on_ice: bool,
// llvm codegen options
pub llvm_enabled: bool,
wasm_syscall: Option<bool>,
lld: Option<bool>,
deny_warnings: Option<bool>,
+ backtrace_on_ice: Option<bool>,
}
/// TOML representation of how each build target is configured.
config.musl_root = rust.musl_root.clone().map(PathBuf::from);
config.save_toolstates = rust.save_toolstates.clone().map(PathBuf::from);
set(&mut config.deny_warnings, rust.deny_warnings.or(flags.warnings));
+ set(&mut config.backtrace_on_ice, rust.backtrace_on_ice);
if let Some(ref backends) = rust.codegen_backends {
config.rust_codegen_backends = backends.iter()
"arm-unknown-linux-musleabi install directory")
v("musl-root-armhf", "target.arm-unknown-linux-musleabihf.musl-root",
"arm-unknown-linux-musleabihf install directory")
+v("musl-root-armv5te", "target.armv5te-unknown-linux-musleabi.musl-root",
+ "armv5te-unknown-linux-musleabi install directory")
v("musl-root-armv7", "target.armv7-unknown-linux-musleabihf.musl-root",
"armv7-unknown-linux-musleabihf install directory")
v("musl-root-aarch64", "target.aarch64-unknown-linux-musl.musl-root",
use getopts::Options;
-use Build;
+use {Build, DocTests};
use config::Config;
use metadata;
use builder::Builder;
test_args: Vec<String>,
rustc_args: Vec<String>,
fail_fast: bool,
- doc_tests: bool,
+ doc_tests: DocTests,
},
Bench {
paths: Vec<PathBuf>,
"extra options to pass the compiler when running tests",
"ARGS",
);
- opts.optflag("", "doc", "run doc tests");
+ opts.optflag("", "no-doc", "do not run doc tests");
+ opts.optflag("", "doc", "only run doc tests");
},
"bench" => { opts.optmulti("", "test-args", "extra arguments", "ARGS"); },
"clean" => { opts.optflag("", "all", "clean all build artifacts"); },
test_args: matches.opt_strs("test-args"),
rustc_args: matches.opt_strs("rustc-args"),
fail_fast: !matches.opt_present("no-fail-fast"),
- doc_tests: matches.opt_present("doc"),
+ doc_tests: if matches.opt_present("doc") {
+ DocTests::Only
+ } else if matches.opt_present("no-doc") {
+ DocTests::No
+ } else {
+ DocTests::Yes
+ }
}
}
"bench" => {
}
}
- pub fn doc_tests(&self) -> bool {
+ pub fn doc_tests(&self) -> DocTests {
match *self {
Subcommand::Test { doc_tests, .. } => doc_tests,
- _ => false,
+ _ => DocTests::Yes,
}
}
}
}
pub unsafe fn setup(build: &mut Build) {
- // Tell Windows to not show any UI on errors (such as not finding a required dll
- // during startup or terminating abnormally). This is important for running tests,
- // since some of them use abnormal termination by design.
- // This mode is inherited by all child processes.
- let mode = SetErrorMode(SEM_NOGPFAULTERRORBOX); // read inherited flags
- SetErrorMode(mode | SEM_FAILCRITICALERRORS | SEM_NOGPFAULTERRORBOX);
+ // Enable the Windows Error Reporting dialog which msys disables,
+ // so we can JIT debug rustc
+ let mode = SetErrorMode(0);
+ SetErrorMode(mode & !SEM_NOGPFAULTERRORBOX);
// Create a new job object for us to use
let job = CreateJobObjectW(0 as *mut _, 0 as *const _);
host: Interned<String>,
}
+#[derive(PartialEq, Eq, Copy, Clone, Debug)]
+pub enum DocTests {
+ // Default, run normal tests and doc tests.
+ Yes,
+ // Do not run any doc tests.
+ No,
+ // Only run doc tests.
+ Only,
+}
+
/// Global configuration for the build system.
///
/// This structure transitively contains all configuration for the build system.
rustfmt_info: channel::GitInfo,
local_rebuild: bool,
fail_fast: bool,
- doc_tests: bool,
+ doc_tests: DocTests,
verbosity: usize,
// Targets for which to build.
///
/// These entries currently correspond to the various output directories of the
/// build system, with each mod generating output in a different directory.
-#[derive(Debug, Hash, Clone, Copy, PartialEq, Eq)]
+#[derive(Debug, Hash, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
pub enum Mode {
/// Build the standard library, placing output in the "stageN-std" directory.
Libstd,
use native;
use tool::{self, Tool};
use util::{self, dylib_path, dylib_path_var};
-use Mode;
+use {Mode, DocTests};
use toolstate::ToolState;
const ADB_TEST_DIR: &str = "/data/tmp/work";
/// The two modes of the test runner; tests or benchmarks.
-#[derive(Debug, PartialEq, Eq, Hash, Copy, Clone)]
+#[derive(Debug, PartialEq, Eq, Hash, Copy, Clone, PartialOrd, Ord)]
pub enum TestKind {
/// Run `cargo test`
Test,
// Don't build tests dynamically, just a pain to work with
cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");
+ let dir = testdir(builder, compiler.host);
+ t!(fs::create_dir_all(&dir));
+ cargo.env("RUSTFMT_TEST_DIR", dir);
builder.add_rustc_lib_path(compiler, &mut cargo);
host: true
});
-default_test!(RunMake {
+host_test!(RunMake {
path: "src/test/run-make",
mode: "run-make",
suite: "run-make"
// Only pass correct values for these flags for the `run-make` suite as it
// requires that a C++ compiler was configured which isn't always the case.
- if !builder.config.dry_run && suite == "run-make-fulldeps" {
+ if !builder.config.dry_run && mode == "run-make" {
let llvm_components = output(Command::new(&llvm_config).arg("--components"));
let llvm_cxxflags = output(Command::new(&llvm_config).arg("--cxxflags"));
cmd.arg("--cc").arg(builder.cc(target))
}
}
}
- if suite == "run-make-fulldeps" && !builder.config.llvm_enabled {
+ if mode == "run-make" && !builder.config.llvm_enabled {
builder.info(
&format!("Ignoring run-make test suite as they generally don't work without LLVM"));
return;
}
- if suite != "run-make-fulldeps" {
+ if mode != "run-make" {
cmd.arg("--cc").arg("")
.arg("--cxx").arg("")
.arg("--cflags").arg("")
}
-#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Crate {
- compiler: Compiler,
- target: Interned<String>,
- mode: Mode,
- test_kind: TestKind,
- krate: Interned<String>,
+ pub compiler: Compiler,
+ pub target: Interned<String>,
+ pub mode: Mode,
+ pub test_kind: TestKind,
+ pub krate: Interned<String>,
}
impl Step for Crate {
if test_kind.subcommand() == "test" && !builder.fail_fast {
cargo.arg("--no-fail-fast");
}
- if builder.doc_tests {
- cargo.arg("--doc");
+ match builder.doc_tests {
+ DocTests::Only => {
+ cargo.arg("--doc");
+ }
+ DocTests::No => {
+ cargo.args(&["--lib", "--bins", "--examples", "--tests", "--benches"]);
+ }
+ DocTests::Yes => {}
}
cargo.arg("-p").arg(krate);
use std::fs;
use std::env;
+use std::iter;
use std::path::PathBuf;
use std::process::{Command, exit};
/// right location to run `compiler`.
fn prepare_tool_cmd(&self, compiler: Compiler, cmd: &mut Command) {
let host = &compiler.host;
- let mut paths: Vec<PathBuf> = vec![
+ let mut lib_paths: Vec<PathBuf> = vec![
PathBuf::from(&self.sysroot_libdir(compiler, compiler.host)),
self.cargo_out(compiler, Mode::Tool, *host).join("deps"),
];
}
for path in env::split_paths(v) {
if !curpaths.contains(&path) {
- paths.push(path);
+ lib_paths.push(path);
}
}
}
}
- add_lib_path(paths, cmd);
+
+ // Add the llvm/bin directory to PATH since it contains lots of
+ // useful, platform-independent tools
+ if let Some(llvm_bin_path) = self.llvm_bin_path() {
+ if host.contains("windows") {
+ // On Windows, PATH and the dynamic library path are the same,
+ // so we just add the LLVM bin path to lib_path
+ lib_paths.push(llvm_bin_path);
+ } else {
+ let old_path = env::var_os("PATH").unwrap_or_default();
+ let new_path = env::join_paths(iter::once(llvm_bin_path)
+ .chain(env::split_paths(&old_path)))
+ .expect("Could not add LLVM bin path to PATH");
+ cmd.env("PATH", new_path);
+ }
+ }
+
+ add_lib_path(lib_paths, cmd);
+ }
+
+ fn llvm_bin_path(&self) -> Option<PathBuf> {
+ if self.config.llvm_enabled && !self.config.dry_run {
+ let llvm_config = self.ensure(native::Llvm {
+ target: self.config.build,
+ emscripten: false,
+ });
+
+ // Add the llvm/bin directory to PATH since it contains lots of
+ // useful, platform-independent tools
+ let llvm_bin_path = llvm_config.parent()
+ .expect("Expected llvm-config to be contained in directory");
+ assert!(llvm_bin_path.is_dir());
+ Some(llvm_bin_path.to_path_buf())
+ } else {
+ None
+ }
}
}
COPY scripts/musl.sh /build
RUN env \
+ CC=arm-linux-gnueabi-gcc CFLAGS="-march=armv5te -marm -mfloat-abi=soft" \
+ CXX=arm-linux-gnueabi-g++ CXXFLAGS="-march=armv5te -marm -mfloat-abi=soft" \
+ bash musl.sh armv5te && \
+ env \
CC=arm-linux-gnueabi-gcc CFLAGS="-march=armv6 -marm" \
CXX=arm-linux-gnueabi-g++ CXXFLAGS="-march=armv6 -marm" \
bash musl.sh arm && \
ENV TARGETS=$TARGETS,arm-unknown-linux-musleabi
ENV TARGETS=$TARGETS,arm-unknown-linux-musleabihf
ENV TARGETS=$TARGETS,armv5te-unknown-linux-gnueabi
+ENV TARGETS=$TARGETS,armv5te-unknown-linux-musleabi
ENV TARGETS=$TARGETS,armv7-unknown-linux-musleabihf
ENV TARGETS=$TARGETS,aarch64-unknown-linux-musl
ENV TARGETS=$TARGETS,sparc64-unknown-linux-gnu
CC_sparc64_unknown_linux_gnu=sparc64-linux-gnu-gcc \
CC_x86_64_unknown_redox=x86_64-unknown-redox-gcc \
CC_armv5te_unknown_linux_gnueabi=arm-linux-gnueabi-gcc \
- CFLAGS_armv5te_unknown_linux_gnueabi="-march=armv5te -marm -mfloat-abi=soft"
+ CFLAGS_armv5te_unknown_linux_gnueabi="-march=armv5te -marm -mfloat-abi=soft" \
+ CC_armv5te_unknown_linux_musleabi=arm-linux-gnueabi-gcc \
+ CFLAGS_armv5te_unknown_linux_musleabi="-march=armv5te -marm -mfloat-abi=soft"
ENV RUST_CONFIGURE_ARGS \
+ --musl-root-armv5te=/musl-armv5te \
--musl-root-arm=/musl-arm \
--musl-root-armhf=/musl-armhf \
--musl-root-armv7=/musl-armv7 \
COPY scripts/sccache.sh /scripts/
RUN sh /scripts/sccache.sh
+ENV RUN_CHECK_WITH_PARALLEL_QUERIES 1
ENV SCRIPT python2.7 ../x.py check --target=i686-pc-windows-gnu --host=i686-pc-windows-gnu
COPY scripts/sccache.sh /scripts/
RUN sh /scripts/sccache.sh
-ENV PARALLEL_CHECK 1
+ENV RUN_CHECK_WITH_PARALLEL_QUERIES 1
ENV RUST_CONFIGURE_ARGS \
--build=x86_64-unknown-linux-gnu \
--enable-debug \
# sccache server at the start of the build, but no need to worry if this fails.
SCCACHE_IDLE_TIMEOUT=10800 sccache --start-server || true
-if [ "$PARALLEL_CHECK" != "" ]; then
+if [ "$RUN_CHECK_WITH_PARALLEL_QUERIES" != "" ]; then
$SRC/configure --enable-experimental-parallel-queries
- python2.7 ../x.py check
+ CARGO_INCREMENTAL=0 python2.7 ../x.py check
rm -f config.toml
rm -rf build
fi
-Subproject commit 6237a75790cd2e0ca22961b55f64a83319e73464
+Subproject commit f51127530d46b9acbf4747c859da185e771cfcf3
-Subproject commit 3c56329d1bd9038e5341f1962bcd8d043312a712
+Subproject commit 748a5e6742db4a21c4c630a58087f818828e8a0a
-Subproject commit 76296346e97c3702974d3398fdb94af9e10111a2
+Subproject commit 134f419ee62714590b04712fe6072253bc2a7822
-Subproject commit d5ec87eabe5733cc2348c7dada89fc67c086f391
+Subproject commit eebda16e4b45f2eed4310cf7b9872cc752278163
| ^^^^^^^^^^^^^^^^^^^^
|
```
+
+## incoherent-fundamental-impls
+
+This lint detects potentially-conflicting impls that were erroneously allowed. Some
+example code that triggers this lint:
+
+```rust,ignore
+pub trait Trait1<X> {
+ type Output;
+}
+
+pub trait Trait2<X> {}
+
+pub struct A;
+
+impl<X, T> Trait1<X> for T where T: Trait2<X> {
+ type Output = ();
+}
+
+impl<X> Trait1<Box<X>> for A {
+ type Output = i32;
+}
+```
+
+This will produce:
+
+```text
+error: conflicting implementations of trait `Trait1<std::boxed::Box<_>>` for type `A`: (E0119)
+ --> src/main.rs:13:1
+ |
+9 | impl<X, T> Trait1<X> for T where T: Trait2<X> {
+ | --------------------------------------------- first implementation here
+...
+13 | impl<X> Trait1<Box<X>> for A {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ conflicting implementation for `A`
+ |
+ = note: #[deny(incoherent_fundamental_impls)] on by default
+ = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
+ = note: for more information, see issue #46205 <https://github.com/rust-lang/rust/issues/46205>
+ = note: downstream crates may implement trait `Trait2<std::boxed::Box<_>>` for type `A`
+```
|
```
-## incoherent-fundamental-impls
-
-This lint detects potentially-conflicting impls that were erroneously allowed. Some
-example code that triggers this lint:
-
-```rust
-pub trait Trait1<X> {
- type Output;
-}
-
-pub trait Trait2<X> {}
-
-pub struct A;
-
-impl<X, T> Trait1<X> for T where T: Trait2<X> {
- type Output = ();
-}
-
-impl<X> Trait1<Box<X>> for A {
- type Output = i32;
-}
-```
-
-This will produce:
-
-```text
-warning: conflicting implementations of trait `Trait1<std::boxed::Box<_>>` for type `A`: (E0119)
- --> src/main.rs:13:1
- |
-9 | impl<X, T> Trait1<X> for T where T: Trait2<X> {
- | --------------------------------------------- first implementation here
-...
-13 | impl<X> Trait1<Box<X>> for A {
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ conflicting implementation for `A`
- |
- = note: #[warn(incoherent_fundamental_impls)] on by default
- = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
- = note: for more information, see issue #46205 <https://github.com/rust-lang/rust/issues/46205>
- = note: downstream crates may implement trait `Trait2<std::boxed::Box<_>>` for type `A`
-```
-
## late-bound-lifetime-arguments
This lint detects detects generic lifetime arguments in path segments with
use core::ops::{Deref, DerefMut};
use core::iter::{FromIterator, FusedIterator};
-use core::mem::{swap, size_of};
+use core::mem::{swap, size_of, ManuallyDrop};
use core::ptr;
use core::fmt;
/// position with the value that was originally removed.
struct Hole<'a, T: 'a> {
data: &'a mut [T],
- /// `elt` is always `Some` from new until drop.
- elt: Option<T>,
+ elt: ManuallyDrop<T>,
pos: usize,
}
let elt = ptr::read(&data[pos]);
Hole {
data,
- elt: Some(elt),
+ elt: ManuallyDrop::new(elt),
pos,
}
}
/// Returns a reference to the element removed.
#[inline]
fn element(&self) -> &T {
- self.elt.as_ref().unwrap()
+ &self.elt
}
/// Returns a reference to the element at `index`.
// fill the hole again
unsafe {
let pos = self.pos;
- ptr::write(self.data.get_unchecked_mut(pos), self.elt.take().unwrap());
+ ptr::copy_nonoverlapping(&*self.elt, self.data.get_unchecked_mut(pos), 1);
}
}
}
use core::hash::{Hash, Hasher};
use core::iter::FusedIterator;
use core::marker::{Unpin, Unsize};
-use core::mem::{self, Pin};
+use core::mem::{self, PinMut};
use core::ops::{CoerceUnsized, Deref, DerefMut, Generator, GeneratorState};
use core::ptr::{self, NonNull, Unique};
use core::convert::From;
#[unstable(feature = "pin", issue = "49150")]
impl<T: ?Sized> PinBox<T> {
/// Get a pinned reference to the data in this PinBox.
- pub fn as_pin<'a>(&'a mut self) -> Pin<'a, T> {
- unsafe { Pin::new_unchecked(&mut *self.inner) }
+ pub fn as_pin_mut<'a>(&'a mut self) -> PinMut<'a, T> {
+ unsafe { PinMut::new_unchecked(&mut *self.inner) }
}
/// Get a mutable reference to the data inside this PinBox.
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_on_unimplemented(
- on(crate_local, label="`{Self}` cannot be formatted using `:?`; \
- add `#[derive(Debug)]` or manually implement `{Debug}`"),
+ on(crate_local, label="`{Self}` cannot be formatted using `{{:?}}`",
+ note="add `#[derive(Debug)]` or manually implement `{Debug}`"),
message="`{Self}` doesn't implement `{Debug}`",
- label="`{Self}` cannot be formatted using `:?` because it doesn't implement `{Debug}`",
+ label="`{Self}` cannot be formatted using `{{:?}}` because it doesn't implement `{Debug}`",
)]
#[doc(alias = "{:?}")]
#[lang = "debug_trait"]
/// ```
#[rustc_on_unimplemented(
message="`{Self}` doesn't implement `{Display}`",
- label="`{Self}` cannot be formatted with the default formatter; \
- try using `:?` instead if you are using a format string",
+ label="`{Self}` cannot be formatted with the default formatter",
+ note="in format strings you may be able to use `{{:?}}` \
+ (or {{:#?}} for pretty-print) instead",
)]
#[doc(alias = "{}")]
#[stable(feature = "rust1", since = "1.0.0")]
// Since libcore defines many fundamental lang items, all tests live in a
// separate crate, libcoretest, to avoid bizarre issues.
+//
+// Here we explicitly #[cfg]-out this whole crate when testing. If we don't do
+// this, both the generated test artifact and the linked libtest (which
+// transitively includes libcore) will both define the same set of lang items,
+// and this will cause the E0152 "duplicate lang item found" error. See
+// discussion in #50466 for details.
+//
+// This cfg won't affect doc tests.
+#![cfg(not(test))]
#![stable(feature = "core", since = "1.6.0")]
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
unsafe impl<'a, T: ?Sized> Freeze for &'a T {}
unsafe impl<'a, T: ?Sized> Freeze for &'a mut T {}
-/// Types which can be moved out of a `Pin`.
+/// Types which can be moved out of a `PinMut`.
///
-/// The `Unpin` trait is used to control the behavior of the [`Pin`] type. If a
+/// The `Unpin` trait is used to control the behavior of the [`PinMut`] type. If a
/// type implements `Unpin`, it is safe to move a value of that type out of the
-/// `Pin` pointer.
+/// `PinMut` pointer.
///
/// This trait is automatically implemented for almost every type.
///
-/// [`Pin`]: ../mem/struct.Pin.html
+/// [`PinMut`]: ../mem/struct.PinMut.html
#[unstable(feature = "pin", issue = "49150")]
pub unsafe auto trait Unpin {}
/// ManuallyDrop::new(Box::new(()));
/// ```
#[stable(feature = "manually_drop", since = "1.20.0")]
+ #[rustc_const_unstable(feature = "const_manually_drop_new")]
#[inline]
- pub fn new(value: T) -> ManuallyDrop<T> {
+ pub const fn new(value: T) -> ManuallyDrop<T> {
ManuallyDrop { value: value }
}
/// value implements the `Unpin` trait.
#[unstable(feature = "pin", issue = "49150")]
#[fundamental]
-pub struct Pin<'a, T: ?Sized + 'a> {
+pub struct PinMut<'a, T: ?Sized + 'a> {
inner: &'a mut T,
}
#[unstable(feature = "pin", issue = "49150")]
-impl<'a, T: ?Sized + Unpin> Pin<'a, T> {
- /// Construct a new `Pin` around a reference to some data of a type that
+impl<'a, T: ?Sized + Unpin> PinMut<'a, T> {
+ /// Construct a new `PinMut` around a reference to some data of a type that
/// implements `Unpin`.
#[unstable(feature = "pin", issue = "49150")]
- pub fn new(reference: &'a mut T) -> Pin<'a, T> {
- Pin { inner: reference }
+ pub fn new(reference: &'a mut T) -> PinMut<'a, T> {
+ PinMut { inner: reference }
}
}
#[unstable(feature = "pin", issue = "49150")]
-impl<'a, T: ?Sized> Pin<'a, T> {
- /// Construct a new `Pin` around a reference to some data of a type that
+impl<'a, T: ?Sized> PinMut<'a, T> {
+ /// Construct a new `PinMut` around a reference to some data of a type that
/// may or may not implement `Unpin`.
///
/// This constructor is unsafe because we do not know what will happen with
/// that data after the reference ends. If you cannot guarantee that the
/// data will never move again, calling this constructor is invalid.
#[unstable(feature = "pin", issue = "49150")]
- pub unsafe fn new_unchecked(reference: &'a mut T) -> Pin<'a, T> {
- Pin { inner: reference }
+ pub unsafe fn new_unchecked(reference: &'a mut T) -> PinMut<'a, T> {
+ PinMut { inner: reference }
}
- /// Borrow a Pin for a shorter lifetime than it already has.
+ /// Reborrow a `PinMut` for a shorter lifetime.
+ ///
+ /// For example, `PinMut::get_mut(x.reborrow())` (unsafely) returns a
+ /// short-lived mutable reference reborrowing from `x`.
#[unstable(feature = "pin", issue = "49150")]
- pub fn borrow<'b>(this: &'b mut Pin<'a, T>) -> Pin<'b, T> {
- Pin { inner: this.inner }
+ pub fn reborrow<'b>(&'b mut self) -> PinMut<'b, T> {
+ PinMut { inner: self.inner }
}
- /// Get a mutable reference to the data inside of this `Pin`.
+ /// Get a mutable reference to the data inside of this `PinMut`.
///
/// This function is unsafe. You must guarantee that you will never move
/// the data out of the mutable reference you receive when you call this
/// function.
#[unstable(feature = "pin", issue = "49150")]
- pub unsafe fn get_mut<'b>(this: &'b mut Pin<'a, T>) -> &'b mut T {
+ pub unsafe fn get_mut(this: PinMut<'a, T>) -> &'a mut T {
this.inner
}
/// Construct a new pin by mapping the interior value.
///
- /// For example, if you wanted to get a `Pin` of a field of something, you
+ /// For example, if you wanted to get a `PinMut` of a field of something, you
/// could use this to get access to that field in one line of code.
///
/// This function is unsafe. You must guarantee that the data you return
/// because it is one of the fields of that value), and also that you do
/// not move out of the argument you receive to the interior function.
#[unstable(feature = "pin", issue = "49150")]
- pub unsafe fn map<'b, U, F>(this: &'b mut Pin<'a, T>, f: F) -> Pin<'b, U> where
+ pub unsafe fn map<U, F>(this: PinMut<'a, T>, f: F) -> PinMut<'a, U> where
F: FnOnce(&mut T) -> &mut U
{
- Pin { inner: f(this.inner) }
+ PinMut { inner: f(this.inner) }
}
}
#[unstable(feature = "pin", issue = "49150")]
-impl<'a, T: ?Sized> Deref for Pin<'a, T> {
+impl<'a, T: ?Sized> Deref for PinMut<'a, T> {
type Target = T;
fn deref(&self) -> &T {
}
#[unstable(feature = "pin", issue = "49150")]
-impl<'a, T: ?Sized + Unpin> DerefMut for Pin<'a, T> {
+impl<'a, T: ?Sized + Unpin> DerefMut for PinMut<'a, T> {
fn deref_mut(&mut self) -> &mut T {
self.inner
}
}
#[unstable(feature = "pin", issue = "49150")]
-impl<'a, T: fmt::Debug + ?Sized> fmt::Debug for Pin<'a, T> {
+impl<'a, T: fmt::Debug + ?Sized> fmt::Debug for PinMut<'a, T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(&**self, f)
}
}
#[unstable(feature = "pin", issue = "49150")]
-impl<'a, T: fmt::Display + ?Sized> fmt::Display for Pin<'a, T> {
+impl<'a, T: fmt::Display + ?Sized> fmt::Display for PinMut<'a, T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(&**self, f)
}
}
#[unstable(feature = "pin", issue = "49150")]
-impl<'a, T: ?Sized> fmt::Pointer for Pin<'a, T> {
+impl<'a, T: ?Sized> fmt::Pointer for PinMut<'a, T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Pointer::fmt(&(&*self.inner as *const T), f)
}
}
#[unstable(feature = "pin", issue = "49150")]
-impl<'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Pin<'a, U>> for Pin<'a, T> {}
+impl<'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<PinMut<'a, U>> for PinMut<'a, T> {}
#[unstable(feature = "pin", issue = "49150")]
-unsafe impl<'a, T: ?Sized> Unpin for Pin<'a, T> {}
+unsafe impl<'a, T: ?Sized> Unpin for PinMut<'a, T> {}
#[stable(feature = "rust1", since = "1.0.0")]
pub const LOG2_E: f32 = 1.44269504088896340735992468100189214_f32;
+ /// log<sub>2</sub>(10)
+ #[unstable(feature = "extra_log_consts", issue = "50540")]
+ pub const LOG2_10: f32 = 3.32192809488736234787031942948939018_f32;
+
/// log<sub>10</sub>(e)
#[stable(feature = "rust1", since = "1.0.0")]
pub const LOG10_E: f32 = 0.434294481903251827651128918916605082_f32;
+ /// log<sub>10</sub>(2)
+ #[unstable(feature = "extra_log_consts", issue = "50540")]
+ pub const LOG10_2: f32 = 0.301029995663981195213738894724493027_f32;
+
/// ln(2)
#[stable(feature = "rust1", since = "1.0.0")]
pub const LN_2: f32 = 0.693147180559945309417232121458176568_f32;
#[stable(feature = "rust1", since = "1.0.0")]
pub const E: f64 = 2.71828182845904523536028747135266250_f64;
+ /// log<sub>2</sub>(10)
+ #[unstable(feature = "extra_log_consts", issue = "50540")]
+ pub const LOG2_10: f64 = 3.32192809488736234787031942948939018_f64;
+
/// log<sub>2</sub>(e)
#[stable(feature = "rust1", since = "1.0.0")]
pub const LOG2_E: f64 = 1.44269504088896340735992468100189214_f64;
+ /// log<sub>10</sub>(2)
+ #[unstable(feature = "extra_log_consts", issue = "50540")]
+ pub const LOG10_2: f64 = 0.301029995663981195213738894724493027_f64;
+
/// log<sub>10</sub>(e)
#[stable(feature = "rust1", since = "1.0.0")]
pub const LOG10_E: f64 = 0.434294481903251827651128918916605082_f64;
// find the byte before the point the body loop stopped
text[..offset].iter().rposition(|elt| *elt == x)
}
-
-// test fallback implementations on all platforms
-#[test]
-fn matches_one() {
- assert_eq!(Some(0), memchr(b'a', b"a"));
-}
-
-#[test]
-fn matches_begin() {
- assert_eq!(Some(0), memchr(b'a', b"aaaa"));
-}
-
-#[test]
-fn matches_end() {
- assert_eq!(Some(4), memchr(b'z', b"aaaaz"));
-}
-
-#[test]
-fn matches_nul() {
- assert_eq!(Some(4), memchr(b'\x00', b"aaaa\x00"));
-}
-
-#[test]
-fn matches_past_nul() {
- assert_eq!(Some(5), memchr(b'z', b"aaaa\x00z"));
-}
-
-#[test]
-fn no_match_empty() {
- assert_eq!(None, memchr(b'a', b""));
-}
-
-#[test]
-fn no_match() {
- assert_eq!(None, memchr(b'a', b"xyz"));
-}
-
-#[test]
-fn matches_one_reversed() {
- assert_eq!(Some(0), memrchr(b'a', b"a"));
-}
-
-#[test]
-fn matches_begin_reversed() {
- assert_eq!(Some(3), memrchr(b'a', b"aaaa"));
-}
-
-#[test]
-fn matches_end_reversed() {
- assert_eq!(Some(0), memrchr(b'z', b"zaaaa"));
-}
-
-#[test]
-fn matches_nul_reversed() {
- assert_eq!(Some(4), memrchr(b'\x00', b"aaaa\x00"));
-}
-
-#[test]
-fn matches_past_nul_reversed() {
- assert_eq!(Some(0), memrchr(b'z', b"z\x00aaaa"));
-}
-
-#[test]
-fn no_match_empty_reversed() {
- assert_eq!(None, memrchr(b'a', b""));
-}
-
-#[test]
-fn no_match_reversed() {
- assert_eq!(None, memrchr(b'a', b"xyz"));
-}
-
-#[test]
-fn each_alignment_reversed() {
- let mut data = [1u8; 64];
- let needle = 2;
- let pos = 40;
- data[pos] = needle;
- for start in 0..16 {
- assert_eq!(Some(pos - start), memrchr(needle, &data[start..]));
- }
-}
#![feature(reverse_bits)]
#![feature(inclusive_range_methods)]
#![feature(iterator_find_map)]
+#![feature(slice_internals)]
extern crate core;
extern crate test;
mod slice;
mod str;
mod str_lossy;
+mod time;
mod tuple;
}
#[test]
+ #[cfg(not(stage0))]
fn test_reverse_bits() {
assert_eq!(A.reverse_bits().reverse_bits(), A);
assert_eq!(B.reverse_bits().reverse_bits(), B);
v.sort_unstable();
assert!(v == [0xDEADBEEF]);
}
+
+pub mod memchr {
+ use core::slice::memchr::{memchr, memrchr};
+
+ // test fallback implementations on all platforms
+ #[test]
+ fn matches_one() {
+ assert_eq!(Some(0), memchr(b'a', b"a"));
+ }
+
+ #[test]
+ fn matches_begin() {
+ assert_eq!(Some(0), memchr(b'a', b"aaaa"));
+ }
+
+ #[test]
+ fn matches_end() {
+ assert_eq!(Some(4), memchr(b'z', b"aaaaz"));
+ }
+
+ #[test]
+ fn matches_nul() {
+ assert_eq!(Some(4), memchr(b'\x00', b"aaaa\x00"));
+ }
+
+ #[test]
+ fn matches_past_nul() {
+ assert_eq!(Some(5), memchr(b'z', b"aaaa\x00z"));
+ }
+
+ #[test]
+ fn no_match_empty() {
+ assert_eq!(None, memchr(b'a', b""));
+ }
+
+ #[test]
+ fn no_match() {
+ assert_eq!(None, memchr(b'a', b"xyz"));
+ }
+
+ #[test]
+ fn matches_one_reversed() {
+ assert_eq!(Some(0), memrchr(b'a', b"a"));
+ }
+
+ #[test]
+ fn matches_begin_reversed() {
+ assert_eq!(Some(3), memrchr(b'a', b"aaaa"));
+ }
+
+ #[test]
+ fn matches_end_reversed() {
+ assert_eq!(Some(0), memrchr(b'z', b"zaaaa"));
+ }
+
+ #[test]
+ fn matches_nul_reversed() {
+ assert_eq!(Some(4), memrchr(b'\x00', b"aaaa\x00"));
+ }
+
+ #[test]
+ fn matches_past_nul_reversed() {
+ assert_eq!(Some(0), memrchr(b'z', b"z\x00aaaa"));
+ }
+
+ #[test]
+ fn no_match_empty_reversed() {
+ assert_eq!(None, memrchr(b'a', b""));
+ }
+
+ #[test]
+ fn no_match_reversed() {
+ assert_eq!(None, memrchr(b'a', b"xyz"));
+ }
+
+ #[test]
+ fn each_alignment_reversed() {
+ let mut data = [1u8; 64];
+ let needle = 2;
+ let pos = 40;
+ data[pos] = needle;
+ for start in 0..16 {
+ assert_eq!(Some(pos - start), memrchr(needle, &data[start..]));
+ }
+ }
+}
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use core::time::Duration;
+
+#[test]
+fn creation() {
+ assert!(Duration::from_secs(1) != Duration::from_secs(0));
+ assert_eq!(Duration::from_secs(1) + Duration::from_secs(2),
+ Duration::from_secs(3));
+ assert_eq!(Duration::from_millis(10) + Duration::from_secs(4),
+ Duration::new(4, 10 * 1_000_000));
+ assert_eq!(Duration::from_millis(4000), Duration::new(4, 0));
+}
+
+#[test]
+fn secs() {
+ assert_eq!(Duration::new(0, 0).as_secs(), 0);
+ assert_eq!(Duration::from_secs(1).as_secs(), 1);
+ assert_eq!(Duration::from_millis(999).as_secs(), 0);
+ assert_eq!(Duration::from_millis(1001).as_secs(), 1);
+}
+
+#[test]
+fn nanos() {
+ assert_eq!(Duration::new(0, 0).subsec_nanos(), 0);
+ assert_eq!(Duration::new(0, 5).subsec_nanos(), 5);
+ assert_eq!(Duration::new(0, 1_000_000_001).subsec_nanos(), 1);
+ assert_eq!(Duration::from_secs(1).subsec_nanos(), 0);
+ assert_eq!(Duration::from_millis(999).subsec_nanos(), 999 * 1_000_000);
+ assert_eq!(Duration::from_millis(1001).subsec_nanos(), 1 * 1_000_000);
+}
+
+#[test]
+fn add() {
+ assert_eq!(Duration::new(0, 0) + Duration::new(0, 1),
+ Duration::new(0, 1));
+ assert_eq!(Duration::new(0, 500_000_000) + Duration::new(0, 500_000_001),
+ Duration::new(1, 1));
+}
+
+#[test]
+fn checked_add() {
+ assert_eq!(Duration::new(0, 0).checked_add(Duration::new(0, 1)),
+ Some(Duration::new(0, 1)));
+ assert_eq!(Duration::new(0, 500_000_000).checked_add(Duration::new(0, 500_000_001)),
+ Some(Duration::new(1, 1)));
+ assert_eq!(Duration::new(1, 0).checked_add(Duration::new(::core::u64::MAX, 0)), None);
+}
+
+#[test]
+fn sub() {
+ assert_eq!(Duration::new(0, 1) - Duration::new(0, 0),
+ Duration::new(0, 1));
+ assert_eq!(Duration::new(0, 500_000_001) - Duration::new(0, 500_000_000),
+ Duration::new(0, 1));
+ assert_eq!(Duration::new(1, 0) - Duration::new(0, 1),
+ Duration::new(0, 999_999_999));
+}
+
+#[test]
+fn checked_sub() {
+ let zero = Duration::new(0, 0);
+ let one_nano = Duration::new(0, 1);
+ let one_sec = Duration::new(1, 0);
+ assert_eq!(one_nano.checked_sub(zero), Some(Duration::new(0, 1)));
+ assert_eq!(one_sec.checked_sub(one_nano),
+ Some(Duration::new(0, 999_999_999)));
+ assert_eq!(zero.checked_sub(one_nano), None);
+ assert_eq!(zero.checked_sub(one_sec), None);
+}
+
+#[test]
+#[should_panic]
+fn sub_bad1() {
+ let _ = Duration::new(0, 0) - Duration::new(0, 1);
+}
+
+#[test]
+#[should_panic]
+fn sub_bad2() {
+ let _ = Duration::new(0, 0) - Duration::new(1, 0);
+}
+
+#[test]
+fn mul() {
+ assert_eq!(Duration::new(0, 1) * 2, Duration::new(0, 2));
+ assert_eq!(Duration::new(1, 1) * 3, Duration::new(3, 3));
+ assert_eq!(Duration::new(0, 500_000_001) * 4, Duration::new(2, 4));
+ assert_eq!(Duration::new(0, 500_000_001) * 4000,
+ Duration::new(2000, 4000));
+}
+
+#[test]
+fn checked_mul() {
+ assert_eq!(Duration::new(0, 1).checked_mul(2), Some(Duration::new(0, 2)));
+ assert_eq!(Duration::new(1, 1).checked_mul(3), Some(Duration::new(3, 3)));
+ assert_eq!(Duration::new(0, 500_000_001).checked_mul(4), Some(Duration::new(2, 4)));
+ assert_eq!(Duration::new(0, 500_000_001).checked_mul(4000),
+ Some(Duration::new(2000, 4000)));
+ assert_eq!(Duration::new(::core::u64::MAX - 1, 0).checked_mul(2), None);
+}
+
+#[test]
+fn div() {
+ assert_eq!(Duration::new(0, 1) / 2, Duration::new(0, 0));
+ assert_eq!(Duration::new(1, 1) / 3, Duration::new(0, 333_333_333));
+ assert_eq!(Duration::new(99, 999_999_000) / 100,
+ Duration::new(0, 999_999_990));
+}
+
+#[test]
+fn checked_div() {
+ assert_eq!(Duration::new(2, 0).checked_div(2), Some(Duration::new(1, 0)));
+ assert_eq!(Duration::new(1, 0).checked_div(2), Some(Duration::new(0, 500_000_000)));
+ assert_eq!(Duration::new(2, 0).checked_div(0), None);
+}
iter.fold(Duration::new(0, 0), |a, b| a + *b)
}
}
-
-#[cfg(test)]
-mod tests {
- use super::Duration;
-
- #[test]
- fn creation() {
- assert!(Duration::from_secs(1) != Duration::from_secs(0));
- assert_eq!(Duration::from_secs(1) + Duration::from_secs(2),
- Duration::from_secs(3));
- assert_eq!(Duration::from_millis(10) + Duration::from_secs(4),
- Duration::new(4, 10 * 1_000_000));
- assert_eq!(Duration::from_millis(4000), Duration::new(4, 0));
- }
-
- #[test]
- fn secs() {
- assert_eq!(Duration::new(0, 0).as_secs(), 0);
- assert_eq!(Duration::from_secs(1).as_secs(), 1);
- assert_eq!(Duration::from_millis(999).as_secs(), 0);
- assert_eq!(Duration::from_millis(1001).as_secs(), 1);
- }
-
- #[test]
- fn nanos() {
- assert_eq!(Duration::new(0, 0).subsec_nanos(), 0);
- assert_eq!(Duration::new(0, 5).subsec_nanos(), 5);
- assert_eq!(Duration::new(0, 1_000_000_001).subsec_nanos(), 1);
- assert_eq!(Duration::from_secs(1).subsec_nanos(), 0);
- assert_eq!(Duration::from_millis(999).subsec_nanos(), 999 * 1_000_000);
- assert_eq!(Duration::from_millis(1001).subsec_nanos(), 1 * 1_000_000);
- }
-
- #[test]
- fn add() {
- assert_eq!(Duration::new(0, 0) + Duration::new(0, 1),
- Duration::new(0, 1));
- assert_eq!(Duration::new(0, 500_000_000) + Duration::new(0, 500_000_001),
- Duration::new(1, 1));
- }
-
- #[test]
- fn checked_add() {
- assert_eq!(Duration::new(0, 0).checked_add(Duration::new(0, 1)),
- Some(Duration::new(0, 1)));
- assert_eq!(Duration::new(0, 500_000_000).checked_add(Duration::new(0, 500_000_001)),
- Some(Duration::new(1, 1)));
- assert_eq!(Duration::new(1, 0).checked_add(Duration::new(::u64::MAX, 0)), None);
- }
-
- #[test]
- fn sub() {
- assert_eq!(Duration::new(0, 1) - Duration::new(0, 0),
- Duration::new(0, 1));
- assert_eq!(Duration::new(0, 500_000_001) - Duration::new(0, 500_000_000),
- Duration::new(0, 1));
- assert_eq!(Duration::new(1, 0) - Duration::new(0, 1),
- Duration::new(0, 999_999_999));
- }
-
- #[test]
- fn checked_sub() {
- let zero = Duration::new(0, 0);
- let one_nano = Duration::new(0, 1);
- let one_sec = Duration::new(1, 0);
- assert_eq!(one_nano.checked_sub(zero), Some(Duration::new(0, 1)));
- assert_eq!(one_sec.checked_sub(one_nano),
- Some(Duration::new(0, 999_999_999)));
- assert_eq!(zero.checked_sub(one_nano), None);
- assert_eq!(zero.checked_sub(one_sec), None);
- }
-
- #[test] #[should_panic]
- fn sub_bad1() {
- Duration::new(0, 0) - Duration::new(0, 1);
- }
-
- #[test] #[should_panic]
- fn sub_bad2() {
- Duration::new(0, 0) - Duration::new(1, 0);
- }
-
- #[test]
- fn mul() {
- assert_eq!(Duration::new(0, 1) * 2, Duration::new(0, 2));
- assert_eq!(Duration::new(1, 1) * 3, Duration::new(3, 3));
- assert_eq!(Duration::new(0, 500_000_001) * 4, Duration::new(2, 4));
- assert_eq!(Duration::new(0, 500_000_001) * 4000,
- Duration::new(2000, 4000));
- }
-
- #[test]
- fn checked_mul() {
- assert_eq!(Duration::new(0, 1).checked_mul(2), Some(Duration::new(0, 2)));
- assert_eq!(Duration::new(1, 1).checked_mul(3), Some(Duration::new(3, 3)));
- assert_eq!(Duration::new(0, 500_000_001).checked_mul(4), Some(Duration::new(2, 4)));
- assert_eq!(Duration::new(0, 500_000_001).checked_mul(4000),
- Some(Duration::new(2000, 4000)));
- assert_eq!(Duration::new(::u64::MAX - 1, 0).checked_mul(2), None);
- }
-
- #[test]
- fn div() {
- assert_eq!(Duration::new(0, 1) / 2, Duration::new(0, 0));
- assert_eq!(Duration::new(1, 1) / 3, Duration::new(0, 333_333_333));
- assert_eq!(Duration::new(99, 999_999_000) / 100,
- Duration::new(0, 999_999_990));
- }
-
- #[test]
- fn checked_div() {
- assert_eq!(Duration::new(2, 0).checked_div(2), Some(Duration::new(1, 0)));
- assert_eq!(Duration::new(1, 0).checked_div(2), Some(Duration::new(0, 500_000_000)));
- assert_eq!(Duration::new(2, 0).checked_div(0), None);
- }
-}
#[derive(Clone)]
pub struct TokenStream(tokenstream::TokenStream);
+#[unstable(feature = "proc_macro", issue = "38356")]
+impl !Send for TokenStream {}
+#[unstable(feature = "proc_macro", issue = "38356")]
+impl !Sync for TokenStream {}
+
/// Error returned from `TokenStream::from_str`.
#[stable(feature = "proc_macro_lib", since = "1.15.0")]
#[derive(Debug)]
_inner: (),
}
+#[unstable(feature = "proc_macro", issue = "38356")]
+impl !Send for LexError {}
+#[unstable(feature = "proc_macro", issue = "38356")]
+impl !Sync for LexError {}
+
impl TokenStream {
/// Returns an empty `TokenStream`.
#[unstable(feature = "proc_macro", issue = "38356")]
#[derive(Copy, Clone)]
pub struct Span(syntax_pos::Span);
+#[unstable(feature = "proc_macro", issue = "38356")]
+impl !Send for Span {}
+#[unstable(feature = "proc_macro", issue = "38356")]
+impl !Sync for Span {}
+
macro_rules! diagnostic_method {
($name:ident, $level:expr) => (
/// Create a new `Diagnostic` with the given `message` at the span
pub column: usize
}
+#[unstable(feature = "proc_macro", issue = "38356")]
+impl !Send for LineColumn {}
+#[unstable(feature = "proc_macro", issue = "38356")]
+impl !Sync for LineColumn {}
+
/// The source file of a given `Span`.
#[unstable(feature = "proc_macro", issue = "38356")]
#[derive(Clone)]
/// Returns `true` if this source file is a real source file, and not generated by an external
/// macro's expansion.
- # [unstable(feature = "proc_macro", issue = "38356")]
+ #[unstable(feature = "proc_macro", issue = "38356")]
pub fn is_real(&self) -> bool {
// This is a hack until intercrate spans are implemented and we can have real source files
// for spans generated in external macros.
Literal(Literal),
}
+#[unstable(feature = "proc_macro", issue = "38356")]
+impl !Send for TokenTree {}
+#[unstable(feature = "proc_macro", issue = "38356")]
+impl !Sync for TokenTree {}
+
impl TokenTree {
/// Returns the span of this token, accessing the `span` method of each of
/// the internal tokens.
span: Span,
}
+#[unstable(feature = "proc_macro", issue = "38356")]
+impl !Send for Group {}
+#[unstable(feature = "proc_macro", issue = "38356")]
+impl !Sync for Group {}
+
/// Describes how a sequence of token trees is delimited.
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
#[unstable(feature = "proc_macro", issue = "38356")]
span: Span,
}
+#[unstable(feature = "proc_macro", issue = "38356")]
+impl !Send for Op {}
+#[unstable(feature = "proc_macro", issue = "38356")]
+impl !Sync for Op {}
+
/// Whether an `Op` is either followed immediately by another `Op` or followed by whitespace.
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
#[unstable(feature = "proc_macro", issue = "38356")]
span: Span,
}
+#[unstable(feature = "proc_macro", issue = "38356")]
+impl !Send for Term {}
+#[unstable(feature = "proc_macro", issue = "38356")]
+impl !Sync for Term {}
+
impl Term {
/// Creates a new `Term` with the given `string` as well as the specified
/// `span`.
span: Span,
}
+#[unstable(feature = "proc_macro", issue = "38356")]
+impl !Send for Literal {}
+#[unstable(feature = "proc_macro", issue = "38356")]
+impl !Sync for Literal {}
+
macro_rules! suffixed_int_literals {
($($name:ident => $kind:ident,)*) => ($(
/// Creates a new suffixed integer literal with the specified value.
//! compiler code, rather than using their own custom pass. Those
//! lints are all available in `rustc_lint::builtin`.
-use errors::DiagnosticBuilder;
+use errors::{Applicability, DiagnosticBuilder};
use lint::{LintPass, LateLintPass, LintArray};
use session::Session;
use syntax::codemap::Span;
declare_lint! {
pub INCOHERENT_FUNDAMENTAL_IMPLS,
- Warn,
+ Deny,
"potentially-conflicting impls were erroneously allowed"
}
match self {
BuiltinLintDiagnostics::Normal => (),
BuiltinLintDiagnostics::BareTraitObject(span, is_global) => {
- let sugg = match sess.codemap().span_to_snippet(span) {
- Ok(ref s) if is_global => format!("dyn ({})", s),
- Ok(s) => format!("dyn {}", s),
- Err(_) => format!("dyn <type>")
+ let (sugg, app) = match sess.codemap().span_to_snippet(span) {
+ Ok(ref s) if is_global => (format!("dyn ({})", s),
+ Applicability::MachineApplicable),
+ Ok(s) => (format!("dyn {}", s), Applicability::MachineApplicable),
+ Err(_) => (format!("dyn <type>"), Applicability::HasPlaceholders)
};
- db.span_suggestion(span, "use `dyn`", sugg);
+ db.span_suggestion_with_applicability(span, "use `dyn`", sugg, app);
}
BuiltinLintDiagnostics::AbsPathWithModule(span) => {
- let sugg = match sess.codemap().span_to_snippet(span) {
+ let (sugg, app) = match sess.codemap().span_to_snippet(span) {
Ok(ref s) => {
// FIXME(Manishearth) ideally the emitting code
// can tell us whether or not this is global
"::"
};
- format!("crate{}{}", opt_colon, s)
+ (format!("crate{}{}", opt_colon, s), Applicability::MachineApplicable)
}
- Err(_) => format!("crate::<path>")
+ Err(_) => (format!("crate::<path>"), Applicability::HasPlaceholders)
};
- db.span_suggestion(span, "use `crate`", sugg);
+ db.span_suggestion_with_applicability(span, "use `crate`", sugg, app);
}
}
}
use hir::{self, PatKind};
use rustc_data_structures::sync::Lrc;
+use std::rc::Rc;
use syntax::ast;
use syntax::ptr::P;
use syntax_pos::Span;
fn consume(&mut self,
consume_id: ast::NodeId,
consume_span: Span,
- cmt: mc::cmt<'tcx>,
+ cmt: &mc::cmt_<'tcx>,
mode: ConsumeMode);
// The value found at `cmt` has been determined to match the
// called on a subpart of an input passed to `matched_pat).
fn matched_pat(&mut self,
matched_pat: &hir::Pat,
- cmt: mc::cmt<'tcx>,
+ cmt: &mc::cmt_<'tcx>,
mode: MatchMode);
// The value found at `cmt` is either copied or moved via the
// pattern binding `consume_pat`, depending on mode.
fn consume_pat(&mut self,
consume_pat: &hir::Pat,
- cmt: mc::cmt<'tcx>,
+ cmt: &mc::cmt_<'tcx>,
mode: ConsumeMode);
// The value found at `borrow` is being borrowed at the point
fn borrow(&mut self,
borrow_id: ast::NodeId,
borrow_span: Span,
- cmt: mc::cmt<'tcx>,
+ cmt: &mc::cmt_<'tcx>,
loan_region: ty::Region<'tcx>,
bk: ty::BorrowKind,
loan_cause: LoanCause);
fn mutate(&mut self,
assignment_id: ast::NodeId,
assignment_span: Span,
- assignee_cmt: mc::cmt<'tcx>,
+ assignee_cmt: &mc::cmt_<'tcx>,
mode: MutateMode);
}
let fn_body_scope_r =
self.tcx().mk_region(ty::ReScope(region::Scope::Node(body.value.hir_id.local_id)));
- let arg_cmt = self.mc.cat_rvalue(
+ let arg_cmt = Rc::new(self.mc.cat_rvalue(
arg.id,
arg.pat.span,
fn_body_scope_r, // Args live only as long as the fn body.
- arg_ty);
+ arg_ty));
self.walk_irrefutable_pat(arg_cmt, &arg.pat);
}
fn delegate_consume(&mut self,
consume_id: ast::NodeId,
consume_span: Span,
- cmt: mc::cmt<'tcx>) {
+ cmt: &mc::cmt_<'tcx>) {
debug!("delegate_consume(consume_id={}, cmt={:?})",
consume_id, cmt);
- let mode = copy_or_move(&self.mc, self.param_env, &cmt, DirectRefMove);
+ let mode = copy_or_move(&self.mc, self.param_env, cmt, DirectRefMove);
self.delegate.consume(consume_id, consume_span, cmt, mode);
}
debug!("consume_expr(expr={:?})", expr);
let cmt = return_if_err!(self.mc.cat_expr(expr));
- self.delegate_consume(expr.id, expr.span, cmt);
+ self.delegate_consume(expr.id, expr.span, &cmt);
self.walk_expr(expr);
}
expr: &hir::Expr,
mode: MutateMode) {
let cmt = return_if_err!(self.mc.cat_expr(expr));
- self.delegate.mutate(assignment_expr.id, assignment_expr.span, cmt, mode);
+ self.delegate.mutate(assignment_expr.id, assignment_expr.span, &cmt, mode);
self.walk_expr(expr);
}
expr, r, bk);
let cmt = return_if_err!(self.mc.cat_expr(expr));
- self.delegate.borrow(expr.id, expr.span, cmt, r, bk, cause);
+ self.delegate.borrow(expr.id, expr.span, &cmt, r, bk, cause);
self.walk_expr(expr)
}
}
hir::ExprMatch(ref discr, ref arms, _) => {
- let discr_cmt = return_if_err!(self.mc.cat_expr(&discr));
+ let discr_cmt = Rc::new(return_if_err!(self.mc.cat_expr(&discr)));
let r = self.tcx().types.re_empty;
self.borrow_expr(&discr, r, ty::ImmBorrow, MatchDiscriminant);
// "assigns", which is handled by
// `walk_pat`:
self.walk_expr(&expr);
- let init_cmt = return_if_err!(self.mc.cat_expr(&expr));
+ let init_cmt = Rc::new(return_if_err!(self.mc.cat_expr(&expr)));
self.walk_irrefutable_pat(init_cmt, &local.pat);
}
}
None => { return; }
};
- let with_cmt = return_if_err!(self.mc.cat_expr(&with_expr));
+ let with_cmt = Rc::new(return_if_err!(self.mc.cat_expr(&with_expr)));
// Select just those fields of the `with`
// expression that will actually be used
with_field.name,
with_field.ty(self.tcx(), substs)
);
- self.delegate_consume(with_expr.id, with_expr.span, cmt_field);
+ self.delegate_consume(with_expr.id, with_expr.span, &cmt_field);
}
}
}
adjustment::Adjust::Unsize => {
// Creating a closure/fn-pointer or unsizing consumes
// the input and stores it into the resulting rvalue.
- self.delegate_consume(expr.id, expr.span, cmt.clone());
+ self.delegate_consume(expr.id, expr.span, &cmt);
}
adjustment::Adjust::Deref(None) => {}
// this is an autoref of `x`.
adjustment::Adjust::Deref(Some(ref deref)) => {
let bk = ty::BorrowKind::from_mutbl(deref.mutbl);
- self.delegate.borrow(expr.id, expr.span, cmt.clone(),
- deref.region, bk, AutoRef);
+ self.delegate.borrow(expr.id, expr.span, &cmt, deref.region, bk, AutoRef);
}
adjustment::Adjust::Borrow(ref autoref) => {
- self.walk_autoref(expr, cmt.clone(), autoref);
+ self.walk_autoref(expr, &cmt, autoref);
}
}
cmt = return_if_err!(self.mc.cat_expr_adjusted(expr, cmt, &adjustment));
/// after all relevant autoderefs have occurred.
fn walk_autoref(&mut self,
expr: &hir::Expr,
- cmt_base: mc::cmt<'tcx>,
+ cmt_base: &mc::cmt_<'tcx>,
autoref: &adjustment::AutoBorrow<'tcx>) {
debug!("walk_autoref(expr.id={} cmt_base={:?} autoref={:?})",
expr.id,
// Each match binding is effectively an assignment to the
// binding being produced.
let def = Def::Local(canonical_id);
- if let Ok(binding_cmt) = mc.cat_def(pat.id, pat.span, pat_ty, def) {
+ if let Ok(ref binding_cmt) = mc.cat_def(pat.id, pat.span, pat_ty, def) {
delegate.mutate(pat.id, pat.span, binding_cmt, MutateMode::Init);
}
ty::BindByReference(m) => {
if let ty::TyRef(r, _) = pat_ty.sty {
let bk = ty::BorrowKind::from_mutbl(m);
- delegate.borrow(pat.id, pat.span, cmt_pat, r, bk, RefBinding);
+ delegate.borrow(pat.id, pat.span, &cmt_pat, r, bk, RefBinding);
}
}
ty::BindByValue(..) => {
let mode = copy_or_move(mc, param_env, &cmt_pat, PatBindingMove);
debug!("walk_pat binding consuming pat");
- delegate.consume_pat(pat, cmt_pat, mode);
+ delegate.consume_pat(pat, &cmt_pat, mode);
}
}
}
let downcast_cmt = mc.cat_downcast_if_needed(pat, cmt_pat, variant_did);
debug!("variant downcast_cmt={:?} pat={:?}", downcast_cmt, pat);
- delegate.matched_pat(pat, downcast_cmt, match_mode);
+ delegate.matched_pat(pat, &downcast_cmt, match_mode);
}
Def::Struct(..) | Def::StructCtor(..) | Def::Union(..) |
Def::TyAlias(..) | Def::AssociatedTy(..) | Def::SelfTy(..) => {
debug!("struct cmt_pat={:?} pat={:?}", cmt_pat, pat);
- delegate.matched_pat(pat, cmt_pat, match_mode);
+ delegate.matched_pat(pat, &cmt_pat, match_mode);
}
_ => {}
}
self.param_env,
&cmt_var,
CaptureMove);
- self.delegate.consume(closure_expr.id, freevar.span, cmt_var, mode);
+ self.delegate.consume(closure_expr.id, freevar.span, &cmt_var, mode);
}
ty::UpvarCapture::ByRef(upvar_borrow) => {
self.delegate.borrow(closure_expr.id,
fn_decl_span,
- cmt_var,
+ &cmt_var,
upvar_borrow.region,
upvar_borrow.kind,
ClosureCapture(freevar.span));
closure_id: ast::NodeId,
closure_span: Span,
upvar: &hir::Freevar)
- -> mc::McResult<mc::cmt<'tcx>> {
+ -> mc::McResult<mc::cmt_<'tcx>> {
// Create the cmt for the variable being borrowed, from the
// caller's perspective
let var_hir_id = self.tcx().hir.node_to_hir_id(upvar.var_id());
fn copy_or_move<'a, 'gcx, 'tcx>(mc: &mc::MemCategorizationContext<'a, 'gcx, 'tcx>,
param_env: ty::ParamEnv<'tcx>,
- cmt: &mc::cmt<'tcx>,
+ cmt: &mc::cmt_<'tcx>,
move_reason: MoveReason)
-> ConsumeMode
{
Ok(ret_ty)
}
- pub fn cat_expr(&self, expr: &hir::Expr) -> McResult<cmt<'tcx>> {
+ pub fn cat_expr(&self, expr: &hir::Expr) -> McResult<cmt_<'tcx>> {
// This recursion helper avoids going through *too many*
// adjustments, since *only* non-overloaded deref recurses.
fn helper<'a, 'gcx, 'tcx>(mc: &MemCategorizationContext<'a, 'gcx, 'tcx>,
expr: &hir::Expr,
adjustments: &[adjustment::Adjustment<'tcx>])
- -> McResult<cmt<'tcx>> {
+ -> McResult<cmt_<'tcx>> {
match adjustments.split_last() {
None => mc.cat_expr_unadjusted(expr),
Some((adjustment, previous)) => {
}
pub fn cat_expr_adjusted(&self, expr: &hir::Expr,
- previous: cmt<'tcx>,
+ previous: cmt_<'tcx>,
adjustment: &adjustment::Adjustment<'tcx>)
- -> McResult<cmt<'tcx>> {
+ -> McResult<cmt_<'tcx>> {
self.cat_expr_adjusted_with(expr, || Ok(previous), adjustment)
}
fn cat_expr_adjusted_with<F>(&self, expr: &hir::Expr,
previous: F,
adjustment: &adjustment::Adjustment<'tcx>)
- -> McResult<cmt<'tcx>>
- where F: FnOnce() -> McResult<cmt<'tcx>>
+ -> McResult<cmt_<'tcx>>
+ where F: FnOnce() -> McResult<cmt_<'tcx>>
{
debug!("cat_expr_adjusted_with({:?}): {:?}", adjustment, expr);
let target = self.resolve_type_vars_if_possible(&adjustment.target);
match adjustment.kind {
adjustment::Adjust::Deref(overloaded) => {
// Equivalent to *expr or something similar.
- let base = if let Some(deref) = overloaded {
+ let base = Rc::new(if let Some(deref) = overloaded {
let ref_ty = self.tcx.mk_ref(deref.region, ty::TypeAndMut {
ty: target,
mutbl: deref.mutbl,
self.cat_rvalue_node(expr.id, expr.span, ref_ty)
} else {
previous()?
- };
+ });
self.cat_deref(expr, base, false)
}
}
}
- pub fn cat_expr_unadjusted(&self, expr: &hir::Expr) -> McResult<cmt<'tcx>> {
+ pub fn cat_expr_unadjusted(&self, expr: &hir::Expr) -> McResult<cmt_<'tcx>> {
debug!("cat_expr: id={} expr={:?}", expr.id, expr);
let expr_ty = self.expr_ty(expr)?;
if self.tables.is_method_call(expr) {
self.cat_overloaded_place(expr, e_base, false)
} else {
- let base_cmt = self.cat_expr(&e_base)?;
+ let base_cmt = Rc::new(self.cat_expr(&e_base)?);
self.cat_deref(expr, base_cmt, false)
}
}
hir::ExprField(ref base, f_name) => {
- let base_cmt = self.cat_expr(&base)?;
+ let base_cmt = Rc::new(self.cat_expr(&base)?);
debug!("cat_expr(cat_field): id={} expr={:?} base={:?}",
expr.id,
expr,
// dereferencing.
self.cat_overloaded_place(expr, base, true)
} else {
- let base_cmt = self.cat_expr(&base)?;
+ let base_cmt = Rc::new(self.cat_expr(&base)?);
self.cat_index(expr, base_cmt, expr_ty, InteriorOffsetKind::Index)
}
}
span: Span,
expr_ty: Ty<'tcx>,
def: Def)
- -> McResult<cmt<'tcx>> {
+ -> McResult<cmt_<'tcx>> {
debug!("cat_def: id={} expr={:?} def={:?}",
id, expr_ty, def);
return Ok(self.cat_rvalue_node(id, span, expr_ty));
}
}
- Ok(Rc::new(cmt_ {
+ Ok(cmt_ {
id:id,
span:span,
cat:Categorization::StaticItem,
mutbl: if mutbl { McDeclared } else { McImmutable},
ty:expr_ty,
note: NoteNone
- }))
+ })
}
Def::Upvar(var_id, _, fn_node_id) => {
}
Def::Local(vid) => {
- Ok(Rc::new(cmt_ {
+ Ok(cmt_ {
id,
span,
cat: Categorization::Local(vid),
mutbl: MutabilityCategory::from_local(self.tcx, self.tables, vid),
ty: expr_ty,
note: NoteNone
- }))
+ })
}
def => span_bug!(span, "unexpected definition in memory categorization: {:?}", def)
span: Span,
var_id: ast::NodeId,
fn_node_id: ast::NodeId)
- -> McResult<cmt<'tcx>>
+ -> McResult<cmt_<'tcx>>
{
let fn_hir_id = self.tcx.hir.node_to_hir_id(fn_node_id);
}
};
- let ret = Rc::new(cmt_result);
+ let ret = cmt_result;
debug!("cat_upvar ret={:?}", ret);
Ok(ret)
}
id: ast::NodeId,
span: Span,
expr_ty: Ty<'tcx>)
- -> cmt<'tcx> {
+ -> cmt_<'tcx> {
let hir_id = self.tcx.hir.node_to_hir_id(id);
let promotable = self.rvalue_promotable_map.as_ref().map(|m| m.contains(&hir_id.local_id))
.unwrap_or(false);
cmt_id: ast::NodeId,
span: Span,
temp_scope: ty::Region<'tcx>,
- expr_ty: Ty<'tcx>) -> cmt<'tcx> {
- let ret = Rc::new(cmt_ {
+ expr_ty: Ty<'tcx>) -> cmt_<'tcx> {
+ let ret = cmt_ {
id:cmt_id,
span:span,
cat:Categorization::Rvalue(temp_scope),
mutbl:McDeclared,
ty:expr_ty,
note: NoteNone
- });
+ };
debug!("cat_rvalue ret {:?}", ret);
ret
}
f_index: usize,
f_name: Name,
f_ty: Ty<'tcx>)
- -> cmt<'tcx> {
- let ret = Rc::new(cmt_ {
+ -> cmt_<'tcx> {
+ let ret = cmt_ {
id: node.id(),
span: node.span(),
mutbl: base_cmt.mutbl.inherit(),
cat: Categorization::Interior(base_cmt, InteriorField(FieldIndex(f_index, f_name))),
ty: f_ty,
note: NoteNone
- });
+ };
debug!("cat_field ret {:?}", ret);
ret
}
expr: &hir::Expr,
base: &hir::Expr,
implicit: bool)
- -> McResult<cmt<'tcx>> {
+ -> McResult<cmt_<'tcx>> {
debug!("cat_overloaded_place: implicit={}", implicit);
// Reconstruct the output assuming it's a reference with the
mutbl,
});
- let base_cmt = self.cat_rvalue_node(expr.id, expr.span, ref_ty);
+ let base_cmt = Rc::new(self.cat_rvalue_node(expr.id, expr.span, ref_ty));
self.cat_deref(expr, base_cmt, implicit)
}
node: &N,
base_cmt: cmt<'tcx>,
implicit: bool)
- -> McResult<cmt<'tcx>> {
+ -> McResult<cmt_<'tcx>> {
debug!("cat_deref: base_cmt={:?}", base_cmt);
let base_cmt_ty = base_cmt.ty;
}
ref ty => bug!("unexpected type in cat_deref: {:?}", ty)
};
- let ret = Rc::new(cmt_ {
+ let ret = cmt_ {
id: node.id(),
span: node.span(),
// For unique ptrs, we inherit mutability from the owning reference.
cat: Categorization::Deref(base_cmt, ptr),
ty: deref_ty,
note: NoteNone
- });
+ };
debug!("cat_deref ret {:?}", ret);
Ok(ret)
}
base_cmt: cmt<'tcx>,
element_ty: Ty<'tcx>,
context: InteriorOffsetKind)
- -> McResult<cmt<'tcx>> {
+ -> McResult<cmt_<'tcx>> {
//! Creates a cmt for an indexing operation (`[]`).
//!
//! One subtle aspect of indexing that may not be
//! - `base_cmt`: the cmt of `elt`
let interior_elem = InteriorElement(context);
- let ret =
- self.cat_imm_interior(elt, base_cmt, element_ty, interior_elem);
+ let ret = self.cat_imm_interior(elt, base_cmt, element_ty, interior_elem);
debug!("cat_index ret {:?}", ret);
return Ok(ret);
}
base_cmt: cmt<'tcx>,
interior_ty: Ty<'tcx>,
interior: InteriorKind)
- -> cmt<'tcx> {
- let ret = Rc::new(cmt_ {
+ -> cmt_<'tcx> {
+ let ret = cmt_ {
id: node.id(),
span: node.span(),
mutbl: base_cmt.mutbl.inherit(),
cat: Categorization::Interior(base_cmt, interior),
ty: interior_ty,
note: NoteNone
- });
+ };
debug!("cat_imm_interior ret={:?}", ret);
ret
}
.get(pat.hir_id)
.map(|v| v.len())
.unwrap_or(0) {
- cmt = self.cat_deref(pat, cmt, true /* implicit */)?;
+ cmt = Rc::new(self.cat_deref(pat, cmt, true /* implicit */)?);
}
let cmt = cmt; // lose mutability
for (i, subpat) in subpats.iter().enumerate_and_adjust(expected_len, ddpos) {
let subpat_ty = self.pat_ty(&subpat)?; // see (*2)
let interior = InteriorField(FieldIndex(i, Name::intern(&i.to_string())));
- let subcmt = self.cat_imm_interior(pat, cmt.clone(), subpat_ty, interior);
+ let subcmt = Rc::new(self.cat_imm_interior(pat, cmt.clone(), subpat_ty, interior));
self.cat_pattern_(subcmt, &subpat, op)?;
}
}
for fp in field_pats {
let field_ty = self.pat_ty(&fp.node.pat)?; // see (*2)
let f_index = self.tcx.field_index(fp.node.id, self.tables);
- let cmt_field = self.cat_field(pat, cmt.clone(), f_index, fp.node.name, field_ty);
+ let cmt_field =
+ Rc::new(self.cat_field(pat, cmt.clone(), f_index, fp.node.name, field_ty));
self.cat_pattern_(cmt_field, &fp.node.pat, op)?;
}
}
for (i, subpat) in subpats.iter().enumerate_and_adjust(expected_len, ddpos) {
let subpat_ty = self.pat_ty(&subpat)?; // see (*2)
let interior = InteriorField(FieldIndex(i, Name::intern(&i.to_string())));
- let subcmt = self.cat_imm_interior(pat, cmt.clone(), subpat_ty, interior);
+ let subcmt = Rc::new(self.cat_imm_interior(pat, cmt.clone(), subpat_ty, interior));
self.cat_pattern_(subcmt, &subpat, op)?;
}
}
// box p1, &p1, &mut p1. we can ignore the mutability of
// PatKind::Ref since that information is already contained
// in the type.
- let subcmt = self.cat_deref(pat, cmt, false)?;
+ let subcmt = Rc::new(self.cat_deref(pat, cmt, false)?);
self.cat_pattern_(subcmt, &subpat, op)?;
}
}
};
let context = InteriorOffsetKind::Pattern;
- let elt_cmt = self.cat_index(pat, cmt, element_ty, context)?;
+ let elt_cmt = Rc::new(self.cat_index(pat, cmt, element_ty, context)?);
for before_pat in before {
self.cat_pattern_(elt_cmt.clone(), &before_pat, op)?;
}
}
impl<'tcx> cmt_<'tcx> {
- pub fn guarantor(&self) -> cmt<'tcx> {
+ pub fn guarantor(&self) -> cmt_<'tcx> {
//! Returns `self` after stripping away any derefs or
//! interior content. The return value is basically the `cmt` which
//! determines how long the value in `self` remains live.
Categorization::Deref(_, BorrowedPtr(..)) |
Categorization::Deref(_, Implicit(..)) |
Categorization::Upvar(..) => {
- Rc::new((*self).clone())
+ (*self).clone()
}
Categorization::Downcast(ref b, _) |
Categorization::Interior(ref b, _) |
}
}
- // Digs down through one or two layers of deref and grabs the cmt
- // for the upvar if a note indicates there is one.
- pub fn upvar(&self) -> Option<cmt<'tcx>> {
+ // Digs down through one or two layers of deref and grabs the
+ // Categorization of the cmt for the upvar if a note indicates there is
+ // one.
+ pub fn upvar_cat(&self) -> Option<&Categorization<'tcx>> {
match self.note {
NoteClosureEnv(..) | NoteUpvarRef(..) => {
Some(match self.cat {
Categorization::Deref(ref inner, _) => {
match inner.cat {
- Categorization::Deref(ref inner, _) => inner.clone(),
- Categorization::Upvar(..) => inner.clone(),
+ Categorization::Deref(ref inner, _) => &inner.cat,
+ Categorization::Upvar(..) => &inner.cat,
_ => bug!()
}
}
}
}
-
pub fn descriptive_string(&self, tcx: TyCtxt) -> String {
match self.cat {
Categorization::StaticItem => {
}
}
Categorization::Deref(_, pk) => {
- let upvar = self.upvar();
- match upvar.as_ref().map(|i| &i.cat) {
+ match self.upvar_cat() {
Some(&Categorization::Upvar(ref var)) => {
var.to_string()
}
}
impl Location {
+ pub const START: Location = Location {
+ block: START_BLOCK,
+ statement_index: 0,
+ };
+
/// Returns the location immediately after this one within the enclosing block.
///
/// Note that if this location represents a terminator, then the
"choose which RELRO level to use"),
nll_subminimal_causes: bool = (false, parse_bool, [UNTRACKED],
"when tracking region error causes, accept subminimal results for faster execution."),
+ nll_facts: bool = (false, parse_bool, [UNTRACKED],
+ "dump facts from NLL analysis into side files"),
disable_nll_user_type_assert: bool = (false, parse_bool, [UNTRACKED],
"disable user provided type assertion in NLL"),
trans_time_graph: bool = (false, parse_bool, [UNTRACKED],
"make the current crate share its generic instantiations"),
chalk: bool = (false, parse_bool, [TRACKED],
"enable the experimental Chalk-based trait solving engine"),
+ cross_lang_lto: bool = (false, parse_bool, [TRACKED],
+ "generate build artifacts that are compatible with linker-based LTO."),
}
pub fn default_lib_output() -> CrateType {
}
tcx.intern_layout(LayoutDetails {
variants: Variants::Tagged {
- discr: tag,
+ tag,
variants: layout_variants,
},
fields: FieldPlacement::Arbitrary {
})
.collect();
record(adt_kind.into(), adt_packed, match layout.variants {
- Variants::Tagged { ref discr, .. } => Some(discr.value.size(self)),
+ Variants::Tagged { ref tag, .. } => Some(tag.value.size(self)),
_ => None
}, variant_infos);
}
}
// Discriminant field for enums (where applicable).
- Variants::Tagged { ref discr, .. } |
+ Variants::Tagged { tag: ref discr, .. } |
Variants::NicheFilling { niche: ref discr, .. } => {
assert_eq!(i, 0);
let layout = LayoutDetails::scalar(tcx, discr.clone());
index.hash_stable(hcx, hasher);
}
Tagged {
- ref discr,
+ ref tag,
ref variants,
} => {
- discr.hash_stable(hcx, hasher);
+ tag.hash_stable(hcx, hasher);
variants.hash_stable(hcx, hasher);
}
NicheFilling {
if backtrace {
TyCtxt::try_print_query_stack();
}
+
+ #[cfg(windows)]
+ unsafe {
+ if env::var("RUSTC_BREAK_ON_ICE").is_ok() {
+ extern "system" {
+ fn DebugBreak();
+ }
+ // Trigger a debugger if we crashed during bootstrap
+ DebugBreak();
+ }
+ }
}
}
fn consume(&mut self,
consume_id: ast::NodeId,
consume_span: Span,
- cmt: mc::cmt<'tcx>,
+ cmt: &mc::cmt_<'tcx>,
mode: euv::ConsumeMode) {
debug!("consume(consume_id={}, cmt={:?}, mode={:?})",
consume_id, cmt, mode);
fn matched_pat(&mut self,
_matched_pat: &hir::Pat,
- _cmt: mc::cmt,
+ _cmt: &mc::cmt_,
_mode: euv::MatchMode) { }
fn consume_pat(&mut self,
consume_pat: &hir::Pat,
- cmt: mc::cmt<'tcx>,
+ cmt: &mc::cmt_<'tcx>,
mode: euv::ConsumeMode) {
debug!("consume_pat(consume_pat={:?}, cmt={:?}, mode={:?})",
consume_pat,
fn borrow(&mut self,
borrow_id: ast::NodeId,
borrow_span: Span,
- cmt: mc::cmt<'tcx>,
+ cmt: &mc::cmt_<'tcx>,
loan_region: ty::Region<'tcx>,
bk: ty::BorrowKind,
loan_cause: euv::LoanCause)
bk, loan_cause);
let hir_id = self.tcx().hir.node_to_hir_id(borrow_id);
- if let Some(lp) = opt_loan_path(&cmt) {
+ if let Some(lp) = opt_loan_path(cmt) {
let moved_value_use_kind = match loan_cause {
euv::ClosureCapture(_) => MovedInCapture,
_ => MovedInUse,
fn mutate(&mut self,
assignment_id: ast::NodeId,
assignment_span: Span,
- assignee_cmt: mc::cmt<'tcx>,
+ assignee_cmt: &mc::cmt_<'tcx>,
mode: euv::MutateMode)
{
debug!("mutate(assignment_id={}, assignee_cmt={:?})",
assignment_id, assignee_cmt);
- if let Some(lp) = opt_loan_path(&assignee_cmt) {
+ if let Some(lp) = opt_loan_path(assignee_cmt) {
match mode {
MutateMode::Init | MutateMode::JustWrite => {
// In a case like `path = 1`, then path does not
}
pub fn check_for_loans_across_yields(&self,
- cmt: mc::cmt<'tcx>,
+ cmt: &mc::cmt_<'tcx>,
loan_region: ty::Region<'tcx>,
borrow_span: Span) {
- pub fn borrow_of_local_data<'tcx>(cmt: &mc::cmt<'tcx>) -> bool {
+ pub fn borrow_of_local_data<'tcx>(cmt: &mc::cmt_<'tcx>) -> bool {
match cmt.cat {
// Borrows of static items is allowed
Categorization::StaticItem => false,
return;
}
- if !borrow_of_local_data(&cmt) {
+ if !borrow_of_local_data(cmt) {
return;
}
fn consume_common(&self,
id: hir::ItemLocalId,
span: Span,
- cmt: mc::cmt<'tcx>,
+ cmt: &mc::cmt_<'tcx>,
mode: euv::ConsumeMode) {
- if let Some(lp) = opt_loan_path(&cmt) {
+ if let Some(lp) = opt_loan_path(cmt) {
let moved_value_use_kind = match mode {
euv::Copy => {
self.check_for_copy_of_frozen_path(id, span, &lp);
fn check_assignment(&self,
assignment_id: hir::ItemLocalId,
assignment_span: Span,
- assignee_cmt: mc::cmt<'tcx>) {
+ assignee_cmt: &mc::cmt_<'tcx>) {
debug!("check_assignment(assignee_cmt={:?})", assignee_cmt);
// Check that we don't invalidate any outstanding loans
- if let Some(loan_path) = opt_loan_path(&assignee_cmt) {
+ if let Some(loan_path) = opt_loan_path(assignee_cmt) {
let scope = region::Scope::Node(assignment_id);
self.each_in_scope_loan_affecting_path(scope, &loan_path, |loan| {
self.report_illegal_mutation(assignment_span, &loan_path, loan);
// needs to be done here instead of in check_loans because we
// depend on move data.
if let Categorization::Local(local_id) = assignee_cmt.cat {
- let lp = opt_loan_path(&assignee_cmt).unwrap();
+ let lp = opt_loan_path(assignee_cmt).unwrap();
self.move_data.each_assignment_of(assignment_id, &lp, |assign| {
if assignee_cmt.mutbl.is_mutable() {
let hir_id = self.bccx.tcx.hir.node_to_hir_id(local_id);
use rustc::hir::*;
use rustc::hir::map::Node::*;
-struct GatherMoveInfo<'tcx> {
+struct GatherMoveInfo<'c, 'tcx: 'c> {
id: hir::ItemLocalId,
kind: MoveKind,
- cmt: mc::cmt<'tcx>,
+ cmt: &'c mc::cmt_<'tcx>,
span_path_opt: Option<MovePlace<'tcx>>
}
move_data: &MoveData<'tcx>,
move_error_collector: &mut MoveErrorCollector<'tcx>,
move_expr_id: hir::ItemLocalId,
- cmt: mc::cmt<'tcx>,
+ cmt: &mc::cmt_<'tcx>,
move_reason: euv::MoveReason) {
let kind = match move_reason {
euv::DirectRefMove | euv::PatBindingMove => MoveExpr,
gather_move(bccx, move_data, move_error_collector, move_info);
}
-pub fn gather_move_from_pat<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>,
- move_data: &MoveData<'tcx>,
- move_error_collector: &mut MoveErrorCollector<'tcx>,
- move_pat: &hir::Pat,
- cmt: mc::cmt<'tcx>) {
+pub fn gather_move_from_pat<'a, 'c, 'tcx: 'c>(bccx: &BorrowckCtxt<'a, 'tcx>,
+ move_data: &MoveData<'tcx>,
+ move_error_collector: &mut MoveErrorCollector<'tcx>,
+ move_pat: &hir::Pat,
+ cmt: &'c mc::cmt_<'tcx>) {
let source = get_pattern_source(bccx.tcx,move_pat);
let pat_span_path_opt = match move_pat.node {
PatKind::Binding(_, _, ref path1, _) => {
gather_move(bccx, move_data, move_error_collector, move_info);
}
-fn gather_move<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>,
+fn gather_move<'a, 'c, 'tcx: 'c>(bccx: &BorrowckCtxt<'a, 'tcx>,
move_data: &MoveData<'tcx>,
move_error_collector: &mut MoveErrorCollector<'tcx>,
- move_info: GatherMoveInfo<'tcx>) {
+ move_info: GatherMoveInfo<'c, 'tcx>) {
debug!("gather_move(move_id={:?}, cmt={:?})",
move_info.id, move_info.cmt);
- let potentially_illegal_move =
- check_and_get_illegal_move_origin(bccx, &move_info.cmt);
+ let potentially_illegal_move = check_and_get_illegal_move_origin(bccx, move_info.cmt);
if let Some(illegal_move_origin) = potentially_illegal_move {
debug!("illegal_move_origin={:?}", illegal_move_origin);
- let error = MoveError::with_move_info(illegal_move_origin,
+ let error = MoveError::with_move_info(Rc::new(illegal_move_origin),
move_info.span_path_opt);
move_error_collector.add_error(error);
return;
// (keep in sync with move_error::report_cannot_move_out_of )
fn check_and_get_illegal_move_origin<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>,
- cmt: &mc::cmt<'tcx>)
- -> Option<mc::cmt<'tcx>> {
+ cmt: &mc::cmt_<'tcx>)
+ -> Option<mc::cmt_<'tcx>> {
match cmt.cat {
Categorization::Deref(_, mc::BorrowedPtr(..)) |
Categorization::Deref(_, mc::Implicit(..)) |
item_scope: region::Scope,
span: Span,
cause: euv::LoanCause,
- cmt: mc::cmt<'tcx>,
+ cmt: &'a mc::cmt_<'tcx>,
loan_region: ty::Region<'tcx>,
_: ty::BorrowKind)
-> Result<(),()> {
span,
cause,
loan_region,
- cmt_original: cmt.clone()};
- ctxt.check(&cmt, None)
+ cmt_original: cmt};
+ ctxt.check(cmt, None)
}
///////////////////////////////////////////////////////////////////////////
span: Span,
cause: euv::LoanCause,
loan_region: ty::Region<'tcx>,
- cmt_original: mc::cmt<'tcx>
+ cmt_original: &'a mc::cmt_<'tcx>
}
impl<'a, 'tcx> GuaranteeLifetimeContext<'a, 'tcx> {
-
- fn check(&self, cmt: &mc::cmt<'tcx>, discr_scope: Option<ast::NodeId>) -> R {
+ fn check(&self, cmt: &mc::cmt_<'tcx>, discr_scope: Option<ast::NodeId>) -> R {
//! Main routine. Walks down `cmt` until we find the
//! "guarantor". Reports an error if `self.loan_region` is
//! larger than scope of `cmt`.
}
}
- fn scope(&self, cmt: &mc::cmt<'tcx>) -> ty::Region<'tcx> {
+ fn scope(&self, cmt: &mc::cmt_<'tcx>) -> ty::Region<'tcx> {
//! Returns the maximal region scope for the which the
//! place `cmt` is guaranteed to be valid without any
//! rooting etc, and presuming `cmt` is not mutated.
}
fn report_error(&self, code: bckerr_code<'tcx>) {
- self.bccx.report(BckError { cmt: self.cmt_original.clone(),
+ self.bccx.report(BckError { cmt: self.cmt_original,
span: self.span,
cause: BorrowViolation(self.cause),
code: code });
fn consume(&mut self,
consume_id: ast::NodeId,
_consume_span: Span,
- cmt: mc::cmt<'tcx>,
+ cmt: &mc::cmt_<'tcx>,
mode: euv::ConsumeMode) {
debug!("consume(consume_id={}, cmt={:?}, mode={:?})",
consume_id, cmt, mode);
fn matched_pat(&mut self,
matched_pat: &hir::Pat,
- cmt: mc::cmt<'tcx>,
+ cmt: &mc::cmt_<'tcx>,
mode: euv::MatchMode) {
debug!("matched_pat(matched_pat={:?}, cmt={:?}, mode={:?})",
matched_pat,
fn consume_pat(&mut self,
consume_pat: &hir::Pat,
- cmt: mc::cmt<'tcx>,
+ cmt: &mc::cmt_<'tcx>,
mode: euv::ConsumeMode) {
debug!("consume_pat(consume_pat={:?}, cmt={:?}, mode={:?})",
consume_pat,
fn borrow(&mut self,
borrow_id: ast::NodeId,
borrow_span: Span,
- cmt: mc::cmt<'tcx>,
+ cmt: &mc::cmt_<'tcx>,
loan_region: ty::Region<'tcx>,
bk: ty::BorrowKind,
loan_cause: euv::LoanCause)
fn mutate(&mut self,
assignment_id: ast::NodeId,
assignment_span: Span,
- assignee_cmt: mc::cmt<'tcx>,
+ assignee_cmt: &mc::cmt_<'tcx>,
mode: euv::MutateMode)
{
self.guarantee_assignment_valid(assignment_id,
fn check_aliasability<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>,
borrow_span: Span,
loan_cause: AliasableViolationKind,
- cmt: mc::cmt<'tcx>,
+ cmt: &mc::cmt_<'tcx>,
req_kind: ty::BorrowKind)
-> Result<(),()> {
fn check_mutability<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>,
borrow_span: Span,
cause: AliasableViolationKind,
- cmt: mc::cmt<'tcx>,
+ cmt: &mc::cmt_<'tcx>,
req_kind: ty::BorrowKind)
-> Result<(),()> {
debug!("check_mutability(cause={:?} cmt={:?} req_kind={:?}",
fn guarantee_assignment_valid(&mut self,
assignment_id: ast::NodeId,
assignment_span: Span,
- cmt: mc::cmt<'tcx>,
+ cmt: &mc::cmt_<'tcx>,
mode: euv::MutateMode) {
- let opt_lp = opt_loan_path(&cmt);
+ let opt_lp = opt_loan_path(cmt);
debug!("guarantee_assignment_valid(assignment_id={}, cmt={:?}) opt_lp={:?}",
assignment_id, cmt, opt_lp);
} else {
// Check that we don't allow assignments to non-mutable data.
if check_mutability(self.bccx, assignment_span, MutabilityViolation,
- cmt.clone(), ty::MutBorrow).is_err() {
+ cmt, ty::MutBorrow).is_err() {
return; // reported an error, no sense in reporting more.
}
}
// Check that we don't allow assignments to aliasable data
if check_aliasability(self.bccx, assignment_span, MutabilityViolation,
- cmt.clone(), ty::MutBorrow).is_err() {
+ cmt, ty::MutBorrow).is_err() {
return; // reported an error, no sense in reporting more.
}
fn guarantee_valid(&mut self,
borrow_id: hir::ItemLocalId,
borrow_span: Span,
- cmt: mc::cmt<'tcx>,
+ cmt: &mc::cmt_<'tcx>,
req_kind: ty::BorrowKind,
loan_region: ty::Region<'tcx>,
cause: euv::LoanCause) {
// Check that the lifetime of the borrow does not exceed
// the lifetime of the data being borrowed.
if lifetime::guarantee_lifetime(self.bccx, self.item_ub,
- borrow_span, cause, cmt.clone(), loan_region,
- req_kind).is_err() {
+ borrow_span, cause, cmt, loan_region, req_kind).is_err() {
return; // reported an error, no sense in reporting more.
}
// Check that we don't allow mutable borrows of non-mutable data.
if check_mutability(self.bccx, borrow_span, BorrowViolation(cause),
- cmt.clone(), req_kind).is_err() {
+ cmt, req_kind).is_err() {
return; // reported an error, no sense in reporting more.
}
// Check that we don't allow mutable borrows of aliasable data.
if check_aliasability(self.bccx, borrow_span, BorrowViolation(cause),
- cmt.clone(), req_kind).is_err() {
+ cmt, req_kind).is_err() {
return; // reported an error, no sense in reporting more.
}
// Compute the restrictions that are required to enforce the
// loan is safe.
let restr = restrictions::compute_restrictions(
- self.bccx, borrow_span, cause,
- cmt.clone(), loan_region);
+ self.bccx, borrow_span, cause, &cmt, loan_region);
debug!("guarantee_valid(): restrictions={:?}", restr);
pub fn compute_restrictions<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>,
span: Span,
cause: euv::LoanCause,
- cmt: mc::cmt<'tcx>,
+ cmt: &mc::cmt_<'tcx>,
loan_region: ty::Region<'tcx>)
-> RestrictionResult<'tcx> {
let ctxt = RestrictionsContext {
impl<'a, 'tcx> RestrictionsContext<'a, 'tcx> {
fn restrict(&self,
- cmt: mc::cmt<'tcx>) -> RestrictionResult<'tcx> {
+ cmt: &mc::cmt_<'tcx>) -> RestrictionResult<'tcx> {
debug!("restrict(cmt={:?})", cmt);
let new_lp = |v: LoanPathKind<'tcx>| Rc::new(LoanPath::new(v, cmt.ty));
// When we borrow the interior of an enum, we have to
// ensure the enum itself is not mutated, because that
// could cause the type of the memory to change.
- self.restrict(cmt_base)
+ self.restrict(&cmt_base)
}
Categorization::Interior(cmt_base, interior) => {
};
let interior = interior.cleaned();
let base_ty = cmt_base.ty;
- let result = self.restrict(cmt_base);
+ let result = self.restrict(&cmt_base);
// Borrowing one union field automatically borrows all its fields.
match base_ty.sty {
ty::TyAdt(adt_def, _) if adt_def.is_union() => match result {
//
// Eventually we should make these non-special and
// just rely on Deref<T> implementation.
- let result = self.restrict(cmt_base);
+ let result = self.restrict(&cmt_base);
self.extend(result, &cmt, LpDeref(pk))
}
mc::Implicit(bk, lt) | mc::BorrowedPtr(bk, lt) => {
BckError {
span: self.span,
cause: BorrowViolation(self.cause),
- cmt: cmt_base,
+ cmt: &cmt_base,
code: err_borrowed_pointer_too_short(
self.loan_region, lt)});
return RestrictionResult::Safe;
// The referent can be aliased after the
// references lifetime ends (by a newly-unfrozen
// borrow).
- let result = self.restrict(cmt_base);
+ let result = self.restrict(&cmt_base);
self.extend(result, &cmt, LpDeref(pk))
}
}
fn extend(&self,
result: RestrictionResult<'tcx>,
- cmt: &mc::cmt<'tcx>,
+ cmt: &mc::cmt_<'tcx>,
elem: LoanPathElem<'tcx>) -> RestrictionResult<'tcx> {
match result {
RestrictionResult::Safe => RestrictionResult::Safe,
// Avoid "cannot borrow immutable field `self.x` as mutable" as that implies that a field *can* be
// mutable independently of the struct it belongs to. (#35937)
-pub fn opt_loan_path_is_field<'tcx>(cmt: &mc::cmt<'tcx>) -> (Option<Rc<LoanPath<'tcx>>>, bool) {
+pub fn opt_loan_path_is_field<'tcx>(cmt: &mc::cmt_<'tcx>) -> (Option<Rc<LoanPath<'tcx>>>, bool) {
let new_lp = |v: LoanPathKind<'tcx>| Rc::new(LoanPath::new(v, cmt.ty));
match cmt.cat {
/// the method `compute()` found in `gather_loans::restrictions`,
/// which allows it to share common loan path pieces as it
/// traverses the CMT.
-pub fn opt_loan_path<'tcx>(cmt: &mc::cmt<'tcx>) -> Option<Rc<LoanPath<'tcx>>> {
+pub fn opt_loan_path<'tcx>(cmt: &mc::cmt_<'tcx>) -> Option<Rc<LoanPath<'tcx>>> {
opt_loan_path_is_field(cmt).0
}
// Combination of an error code and the categorization of the expression
// that caused it
#[derive(Debug, PartialEq)]
-pub struct BckError<'tcx> {
+pub struct BckError<'c, 'tcx: 'c> {
span: Span,
cause: AliasableViolationKind,
- cmt: mc::cmt<'tcx>,
+ cmt: &'c mc::cmt_<'tcx>,
code: bckerr_code<'tcx>
}
region_rels.is_subregion_of(r_sub, r_sup)
}
- pub fn report(&self, err: BckError<'tcx>) {
+ pub fn report(&self, err: BckError<'a, 'tcx>) {
// Catch and handle some particular cases.
match (&err.code, &err.cause) {
(&err_out_of_scope(&ty::ReScope(_), &ty::ReStatic, _),
self.tcx.sess.span_err_with_code(s, msg, code);
}
- fn report_bckerr(&self, err: &BckError<'tcx>) {
+ fn report_bckerr(&self, err: &BckError<'a, 'tcx>) {
let error_span = err.span.clone();
match err.code {
db.emit();
}
err_borrowed_pointer_too_short(loan_scope, ptr_scope) => {
- let descr = self.cmt_to_path_or_string(&err.cmt);
+ let descr = self.cmt_to_path_or_string(err.cmt);
let mut db = self.lifetime_too_short_for_reborrow(error_span, &descr, Origin::Ast);
let descr = match opt_loan_path(&err.cmt) {
Some(lp) => {
span: Span,
kind: AliasableViolationKind,
cause: mc::AliasableReason,
- cmt: mc::cmt<'tcx>) {
+ cmt: &mc::cmt_<'tcx>) {
let mut is_closure = false;
let prefix = match kind {
MutabilityViolation => {
}
fn report_out_of_scope_escaping_closure_capture(&self,
- err: &BckError<'tcx>,
+ err: &BckError<'a, 'tcx>,
capture_span: Span)
{
let cmt_path_or_string = self.cmt_to_path_or_string(&err.cmt);
}
}
- fn note_and_explain_mutbl_error(&self, db: &mut DiagnosticBuilder, err: &BckError<'tcx>,
+ fn note_and_explain_mutbl_error(&self, db: &mut DiagnosticBuilder, err: &BckError<'a, 'tcx>,
error_span: &Span) {
match err.cmt.note {
mc::NoteClosureEnv(upvar_id) | mc::NoteUpvarRef(upvar_id) => {
// If this is an `Fn` closure, it simply can't mutate upvars.
// If it's an `FnMut` closure, the original variable was declared immutable.
// We need to determine which is the case here.
- let kind = match err.cmt.upvar().unwrap().cat {
+ let kind = match err.cmt.upvar_cat().unwrap() {
Categorization::Upvar(mc::Upvar { kind, .. }) => kind,
_ => bug!()
};
- if kind == ty::ClosureKind::Fn {
+ if *kind == ty::ClosureKind::Fn {
let closure_node_id =
self.tcx.hir.local_def_id_to_node_id(upvar_id.closure_expr_id);
db.span_help(self.tcx.hir.span(closure_node_id),
cmt.descriptive_string(self.tcx)
}
- pub fn cmt_to_path_or_string(&self, cmt: &mc::cmt<'tcx>) -> String {
+ pub fn cmt_to_path_or_string(&self, cmt: &mc::cmt_<'tcx>) -> String {
match opt_loan_path(cmt) {
Some(lp) => format!("`{}`", self.loan_path_to_string(&lp)),
None => self.cmt_to_string(cmt),
use syntax::feature_gate::{AttributeGate, AttributeType, Stability, deprecated_attributes};
use syntax_pos::{BytePos, Span, SyntaxContext};
use syntax::symbol::keywords;
-use syntax::errors::DiagnosticBuilder;
+use syntax::errors::{Applicability, DiagnosticBuilder};
use rustc::hir::{self, PatKind};
use rustc::hir::intravisit::FnKind;
} else {
"pub(crate)"
}.to_owned();
- err.span_suggestion(pub_span, "consider restricting its visibility", replacement);
+ let app = if span.ctxt().outer().expn_info().is_none() {
+ // even if macros aren't involved the suggestion
+ // may be incorrect -- the user may have mistakenly
+ // hidden it behind a private module and this lint is
+ // a helpful way to catch that. However, we're trying
+ // not to change the nature of the code with this lint
+ // so it's marked as machine applicable.
+ Applicability::MachineApplicable
+ } else {
+ Applicability::MaybeIncorrect
+ };
+ err.span_suggestion_with_applicability(pub_span, "consider restricting its visibility",
+ replacement, app);
if exportable {
err.help("or consider exporting it for use by other crates");
}
}
}
}
+
+declare_lint! {
+ pub UNNECESSARY_EXTERN_CRATE,
+ Allow,
+ "suggest removing `extern crate` for the 2018 edition"
+}
+
+pub struct ExternCrate(/* depth */ u32);
+
+impl ExternCrate {
+ pub fn new() -> Self {
+ ExternCrate(0)
+ }
+}
+
+impl LintPass for ExternCrate {
+ fn get_lints(&self) -> LintArray {
+ lint_array!(UNNECESSARY_EXTERN_CRATE)
+ }
+}
+
+impl<'a, 'tcx> LateLintPass<'a, 'tcx> for ExternCrate {
+ fn check_item(&mut self, cx: &LateContext, it: &hir::Item) {
+ if let hir::ItemExternCrate(ref orig) = it.node {
+ if it.attrs.iter().any(|a| a.check_name("macro_use")) {
+ return
+ }
+ let mut err = cx.struct_span_lint(UNNECESSARY_EXTERN_CRATE,
+ it.span, "`extern crate` is unnecessary in the new edition");
+ if it.vis == hir::Visibility::Public || self.0 > 1 || orig.is_some() {
+ let pub_ = if it.vis == hir::Visibility::Public {
+ "pub "
+ } else {
+ ""
+ };
+
+ let help = format!("use `{}use`", pub_);
+
+ if let Some(orig) = orig {
+ err.span_suggestion(it.span, &help,
+ format!("{}use {} as {}", pub_, orig, it.name));
+ } else {
+ err.span_suggestion(it.span, &help,
+ format!("{}use {}", pub_, it.name));
+ }
+ } else {
+ err.span_suggestion(it.span, "remove it", "".into());
+ }
+
+ err.emit();
+ }
+ }
+
+ fn check_mod(&mut self, _: &LateContext, _: &hir::Mod,
+ _: Span, _: ast::NodeId) {
+ self.0 += 1;
+ }
+
+ fn check_mod_post(&mut self, _: &LateContext, _: &hir::Mod,
+ _: Span, _: ast::NodeId) {
+ self.0 += 1;
+ }
+}
TypeLimits,
MissingDoc,
MissingDebugImplementations,
+ ExternCrate,
);
add_lint_group!(sess,
UNUSED_PARENS);
add_lint_group!(sess,
- "rust_2018_idioms",
+ "rust_2018_migration",
BARE_TRAIT_OBJECT,
- UNREACHABLE_PUB);
+ UNREACHABLE_PUB,
+ UNNECESSARY_EXTERN_CRATE);
// Guidelines for creating a future incompatibility lint:
//
bug!("failed to get layout for `{}`: {}", t, e)
});
- if let layout::Variants::Tagged { ref variants, ref discr, .. } = layout.variants {
- let discr_size = discr.value.size(cx.tcx).bytes();
+ if let layout::Variants::Tagged { ref variants, ref tag, .. } = layout.variants {
+ let discr_size = tag.value.size(cx.tcx).bytes();
debug!("enum `{}` is {} bytes large with layout:\n{:#?}",
t, layout.size.bytes(), layout);
if attr.check_name("must_use") {
let mut msg = format!("unused {}`{}` which must be used",
describe_path, cx.tcx.item_path_str(def_id));
- // check for #[must_use="..."]
- if let Some(s) = attr.value_str() {
- msg.push_str(": ");
- msg.push_str(&s.as_str());
+ let mut err = cx.struct_span_lint(UNUSED_MUST_USE, sp, &msg);
+ // check for #[must_use = "..."]
+ if let Some(note) = attr.value_str() {
+ err.note(¬e.as_str());
}
- cx.span_lint(UNUSED_MUST_USE, sp, &msg);
+ err.emit();
return true;
}
}
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use rustc::mir::{BasicBlock, Location, Mir};
+use rustc_data_structures::indexed_vec::{Idx, IndexVec};
+
+/// Maps between a MIR Location, which identifies the a particular
+/// statement within a basic block, to a "rich location", which
+/// identifies at a finer granularity. In particular, we distinguish
+/// the *start* of a statement and the *mid-point*. The mid-point is
+/// the point *just* before the statement takes effect; in particular,
+/// for an assignment `A = B`, it is the point where B is about to be
+/// written into A. This mid-point is a kind of hack to work around
+/// our inability to track the position information at sufficient
+/// granularity through outlives relations; however, the rich location
+/// table serves another purpose: it compresses locations from
+/// multiple words into a single u32.
+crate struct LocationTable {
+ num_points: usize,
+ statements_before_block: IndexVec<BasicBlock, usize>,
+}
+
+newtype_index!(LocationIndex { DEBUG_FORMAT = "LocationIndex({})" });
+
+#[derive(Copy, Clone, Debug)]
+crate enum RichLocation {
+ Start(Location),
+ Mid(Location),
+}
+
+impl LocationTable {
+ crate fn new(mir: &Mir<'_>) -> Self {
+ let mut num_points = 0;
+ let statements_before_block = mir.basic_blocks()
+ .iter()
+ .map(|block_data| {
+ let v = num_points;
+ num_points += (block_data.statements.len() + 1) * 2;
+ v
+ })
+ .collect();
+
+ debug!(
+ "LocationTable(statements_before_block={:#?})",
+ statements_before_block
+ );
+ debug!("LocationTable: num_points={:#?}", num_points);
+
+ Self {
+ num_points,
+ statements_before_block,
+ }
+ }
+
+ crate fn all_points(&self) -> impl Iterator<Item = LocationIndex> {
+ (0..self.num_points).map(LocationIndex::new)
+ }
+
+ crate fn start_index(&self, location: Location) -> LocationIndex {
+ let Location {
+ block,
+ statement_index,
+ } = location;
+ let start_index = self.statements_before_block[block];
+ LocationIndex::new(start_index + statement_index * 2)
+ }
+
+ crate fn mid_index(&self, location: Location) -> LocationIndex {
+ let Location {
+ block,
+ statement_index,
+ } = location;
+ let start_index = self.statements_before_block[block];
+ LocationIndex::new(start_index + statement_index * 2 + 1)
+ }
+
+ crate fn to_location(&self, index: LocationIndex) -> RichLocation {
+ let point_index = index.index();
+
+ // Find the basic block. We have a vector with the
+ // starting index of the statement in each block. Imagine
+ // we have statement #22, and we have a vector like:
+ //
+ // [0, 10, 20]
+ //
+ // In that case, this represents point_index 2 of
+ // basic block BB2. We know this because BB0 accounts for
+ // 0..10, BB1 accounts for 11..20, and BB2 accounts for
+ // 20...
+ //
+ // To compute this, we could do a binary search, but
+ // because I am lazy we instead iterate through to find
+ // the last point where the "first index" (0, 10, or 20)
+ // was less than the statement index (22). In our case, this will
+ // be (BB2, 20).
+ let (block, &first_index) = self.statements_before_block
+ .iter_enumerated()
+ .filter(|(_, first_index)| **first_index <= point_index)
+ .last()
+ .unwrap();
+
+ let statement_index = (point_index - first_index) / 2;
+ if index.is_start() {
+ RichLocation::Start(Location { block, statement_index })
+ } else {
+ RichLocation::Mid(Location { block, statement_index })
+ }
+ }
+}
+
+impl LocationIndex {
+ fn is_start(&self) -> bool {
+ // even indices are start points; odd indices are mid points
+ (self.index() % 2) == 0
+ }
+}
use self::borrow_set::{BorrowSet, BorrowData};
use self::flows::Flows;
+use self::location::LocationTable;
use self::prefixes::PrefixSet;
use self::MutateMode::{JustWrite, WriteAndRead};
crate mod borrow_set;
mod error_reporting;
mod flows;
+mod location;
crate mod place_ext;
mod prefixes;
let mut mir: Mir<'tcx> = input_mir.clone();
let free_regions = nll::replace_regions_in_mir(infcx, def_id, param_env, &mut mir);
let mir = &mir; // no further changes
+ let location_table = &LocationTable::new(mir);
let move_data: MoveData<'tcx> = match MoveData::gather_moves(mir, tcx) {
Ok(move_data) => move_data,
def_id,
free_regions,
mir,
+ location_table,
param_env,
&mut flow_inits,
&mdpe.move_data,
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+use borrow_check::borrow_set::BorrowSet;
+use borrow_check::location::LocationTable;
+use borrow_check::nll::facts::AllFacts;
use rustc::hir;
-use rustc::mir::{BasicBlock, BasicBlockData, Location, Place, Mir, Rvalue};
+use rustc::infer::InferCtxt;
+use rustc::mir::visit::TyContext;
use rustc::mir::visit::Visitor;
use rustc::mir::Place::Projection;
-use rustc::mir::{Local, PlaceProjection, ProjectionElem};
-use rustc::mir::visit::TyContext;
-use rustc::infer::InferCtxt;
-use rustc::ty::{self, CanonicalTy, ClosureSubsts};
-use rustc::ty::subst::Substs;
+use rustc::mir::{BasicBlock, BasicBlockData, Location, Mir, Place, Rvalue};
+use rustc::mir::{Local, PlaceProjection, ProjectionElem, Statement, Terminator};
use rustc::ty::fold::TypeFoldable;
+use rustc::ty::subst::Substs;
+use rustc::ty::{self, CanonicalTy, ClosureSubsts};
+use super::region_infer::{Cause, RegionInferenceContext};
use super::ToRegionVid;
-use super::region_infer::{RegionInferenceContext, Cause};
pub(super) fn generate_constraints<'cx, 'gcx, 'tcx>(
infcx: &InferCtxt<'cx, 'gcx, 'tcx>,
regioncx: &mut RegionInferenceContext<'tcx>,
+ all_facts: &mut Option<AllFacts>,
+ location_table: &LocationTable,
mir: &Mir<'tcx>,
+ borrow_set: &BorrowSet<'tcx>,
) {
let mut cg = ConstraintGeneration {
+ borrow_set,
infcx,
regioncx,
+ location_table,
+ all_facts,
mir,
};
/// 'cg = the duration of the constraint generation process itself.
struct ConstraintGeneration<'cg, 'cx: 'cg, 'gcx: 'tcx, 'tcx: 'cx> {
infcx: &'cg InferCtxt<'cx, 'gcx, 'tcx>,
+ all_facts: &'cg mut Option<AllFacts>,
+ location_table: &'cg LocationTable,
regioncx: &'cg mut RegionInferenceContext<'tcx>,
mir: &'cg Mir<'tcx>,
+ borrow_set: &'cg BorrowSet<'tcx>,
}
impl<'cg, 'cx, 'gcx, 'tcx> Visitor<'tcx> for ConstraintGeneration<'cg, 'cx, 'gcx, 'tcx> {
/// call. Make them live at the location where they appear.
fn visit_ty(&mut self, ty: &ty::Ty<'tcx>, ty_context: TyContext) {
match ty_context {
- TyContext::ReturnTy(source_info) |
- TyContext::YieldTy(source_info) |
- TyContext::LocalDecl { source_info, .. } => {
- span_bug!(source_info.span,
- "should not be visiting outside of the CFG: {:?}",
- ty_context);
+ TyContext::ReturnTy(source_info)
+ | TyContext::YieldTy(source_info)
+ | TyContext::LocalDecl { source_info, .. } => {
+ span_bug!(
+ source_info.span,
+ "should not be visiting outside of the CFG: {:?}",
+ ty_context
+ );
}
TyContext::Location(location) => {
self.add_regular_live_constraint(*ty, location, Cause::LiveOther(location));
self.super_closure_substs(substs);
}
+ fn visit_statement(
+ &mut self,
+ block: BasicBlock,
+ statement: &Statement<'tcx>,
+ location: Location,
+ ) {
+ if let Some(all_facts) = self.all_facts {
+ all_facts.cfg_edge.push((
+ self.location_table.start_index(location),
+ self.location_table.mid_index(location),
+ ));
+
+ all_facts.cfg_edge.push((
+ self.location_table.mid_index(location),
+ self.location_table
+ .start_index(location.successor_within_block()),
+ ));
+ }
+
+ self.super_statement(block, statement, location);
+ }
+
+ fn visit_assign(
+ &mut self,
+ block: BasicBlock,
+ place: &Place<'tcx>,
+ rvalue: &Rvalue<'tcx>,
+ location: Location,
+ ) {
+ // When we see `X = ...`, then kill borrows of
+ // `(*X).foo` and so forth.
+ if let Some(all_facts) = self.all_facts {
+ if let Place::Local(temp) = place {
+ if let Some(borrow_indices) = self.borrow_set.local_map.get(temp) {
+ for &borrow_index in borrow_indices {
+ let location_index = self.location_table.mid_index(location);
+ all_facts.killed.push((borrow_index, location_index));
+ }
+ }
+ }
+ }
+
+ self.super_assign(block, place, rvalue, location);
+ }
+
+ fn visit_terminator(
+ &mut self,
+ block: BasicBlock,
+ terminator: &Terminator<'tcx>,
+ location: Location,
+ ) {
+ if let Some(all_facts) = self.all_facts {
+ all_facts.cfg_edge.push((
+ self.location_table.start_index(location),
+ self.location_table.mid_index(location),
+ ));
+
+ for successor_block in terminator.successors() {
+ all_facts.cfg_edge.push((
+ self.location_table.mid_index(location),
+ self.location_table
+ .start_index(successor_block.start_location()),
+ ));
+ }
+ }
+
+ self.super_terminator(block, terminator, location);
+ }
+
fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
debug!("visit_rvalue(rvalue={:?}, location={:?})", rvalue, location);
- // Look for an rvalue like:
- //
- // & L
- //
- // where L is the path that is borrowed. In that case, we have
- // to add the reborrow constraints (which don't fall out
- // naturally from the type-checker).
- if let Rvalue::Ref(region, _bk, ref borrowed_lv) = *rvalue {
- self.add_reborrow_constraint(location, region, borrowed_lv);
+ match rvalue {
+ Rvalue::Ref(region, _borrow_kind, borrowed_place) => {
+ // In some cases, e.g. when borrowing from an unsafe
+ // place, we don't bother to create a loan, since
+ // there are no conditions to validate.
+ if let Some(all_facts) = self.all_facts {
+ if let Some(borrow_index) = self.borrow_set.location_map.get(&location) {
+ let region_vid = region.to_region_vid();
+ all_facts.borrow_region.push((
+ region_vid,
+ *borrow_index,
+ self.location_table.mid_index(location),
+ ));
+ }
+ }
+
+ // Look for an rvalue like:
+ //
+ // & L
+ //
+ // where L is the path that is borrowed. In that case, we have
+ // to add the reborrow constraints (which don't fall out
+ // naturally from the type-checker).
+ self.add_reborrow_constraint(location, region, borrowed_place);
+ }
+
+ _ => { }
}
self.super_rvalue(rvalue, location);
}
- fn visit_user_assert_ty(&mut self, _c_ty: &CanonicalTy<'tcx>,
- _local: &Local, _location: Location) { }
+ fn visit_user_assert_ty(
+ &mut self,
+ _c_ty: &CanonicalTy<'tcx>,
+ _local: &Local,
+ _location: Location,
+ ) {
+ }
}
impl<'cx, 'cg, 'gcx, 'tcx> ConstraintGeneration<'cx, 'cg, 'gcx, 'tcx> {
{
debug!(
"add_regular_live_constraint(live_ty={:?}, location={:?})",
- live_ty,
- location
+ live_ty, location
);
self.infcx
) {
let mut borrowed_place = borrowed_place;
- debug!("add_reborrow_constraint({:?}, {:?}, {:?})",
- location, borrow_region, borrowed_place);
+ debug!(
+ "add_reborrow_constraint({:?}, {:?}, {:?})",
+ location, borrow_region, borrowed_place
+ );
while let Projection(box PlaceProjection { base, elem }) = borrowed_place {
debug!("add_reborrow_constraint - iteration {:?}", borrowed_place);
location.successor_within_block(),
);
+ if let Some(all_facts) = self.all_facts {
+ all_facts.outlives.push((
+ ref_region.to_region_vid(),
+ borrow_region.to_region_vid(),
+ self.location_table.mid_index(location),
+ ));
+ }
+
match mutbl {
hir::Mutability::MutImmutable => {
// Immutable reference. We don't need the base
// to be valid for the entire lifetime of
// the borrow.
- break
+ break;
}
hir::Mutability::MutMutable => {
// Mutable reference. We *do* need the base
}
ty::TyRawPtr(..) => {
// deref of raw pointer, guaranteed to be valid
- break
+ break;
}
ty::TyAdt(def, _) if def.is_box() => {
// deref of `Box`, need the base to be valid - propagate
}
- _ => bug!("unexpected deref ty {:?} in {:?}", base_ty, borrowed_place)
+ _ => bug!("unexpected deref ty {:?} in {:?}", base_ty, borrowed_place),
}
}
- ProjectionElem::Field(..) |
- ProjectionElem::Downcast(..) |
- ProjectionElem::Index(..) |
- ProjectionElem::ConstantIndex { .. } |
- ProjectionElem::Subslice { .. } => {
+ ProjectionElem::Field(..)
+ | ProjectionElem::Downcast(..)
+ | ProjectionElem::Index(..)
+ | ProjectionElem::ConstantIndex { .. }
+ | ProjectionElem::Subslice { .. } => {
// other field access
}
}
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use borrow_check::location::{LocationIndex, LocationTable};
+use dataflow::indexes::BorrowIndex;
+use rustc::ty::RegionVid;
+use std::error::Error;
+use std::fmt::Debug;
+use std::fs::{self, File};
+use std::io::Write;
+use std::path::Path;
+
+/// The "facts" which are the basis of the NLL borrow analysis.
+#[derive(Default)]
+crate struct AllFacts {
+ // `borrow_region(R, B, P)` -- the region R may refer to data from borrow B
+ // starting at the point P (this is usually the point *after* a borrow rvalue)
+ crate borrow_region: Vec<(RegionVid, BorrowIndex, LocationIndex)>,
+
+ // universal_region(R) -- this is a "free region" within fn body
+ crate universal_region: Vec<RegionVid>,
+
+ // `cfg_edge(P,Q)` for each edge P -> Q in the control flow
+ crate cfg_edge: Vec<(LocationIndex, LocationIndex)>,
+
+ // `killed(B,P)` when some prefix of the path borrowed at B is assigned at point P
+ crate killed: Vec<(BorrowIndex, LocationIndex)>,
+
+ // `outlives(R1, R2, P)` when we require `R1@P: R2@P`
+ crate outlives: Vec<(RegionVid, RegionVid, LocationIndex)>,
+
+ // `region_live_at(R, P)` when the region R appears in a live variable at P
+ crate region_live_at: Vec<(RegionVid, LocationIndex)>,
+}
+
+impl AllFacts {
+ crate fn write_to_dir(
+ &self,
+ dir: impl AsRef<Path>,
+ location_table: &LocationTable,
+ ) -> Result<(), Box<dyn Error>> {
+ let dir: &Path = dir.as_ref();
+ fs::create_dir_all(dir)?;
+ let wr = FactWriter { location_table, dir };
+ macro_rules! write_facts_to_path {
+ ($wr:ident . write_facts_to_path($this:ident . [
+ $($field:ident,)*
+ ])) => {
+ $(
+ $wr.write_facts_to_path(
+ &$this.$field,
+ &format!("{}.facts", stringify!($field))
+ )?;
+ )*
+ }
+ }
+ write_facts_to_path! {
+ wr.write_facts_to_path(self.[
+ borrow_region,
+ universal_region,
+ cfg_edge,
+ killed,
+ outlives,
+ region_live_at,
+ ])
+ }
+ Ok(())
+ }
+}
+
+struct FactWriter<'w> {
+ location_table: &'w LocationTable,
+ dir: &'w Path,
+}
+
+impl<'w> FactWriter<'w> {
+ fn write_facts_to_path<T>(
+ &self,
+ rows: &Vec<T>,
+ file_name: &str,
+ ) -> Result<(), Box<dyn Error>>
+ where
+ T: FactRow,
+ {
+ let file = &self.dir.join(file_name);
+ let mut file = File::create(file)?;
+ for row in rows {
+ row.write(&mut file, self.location_table)?;
+ }
+ Ok(())
+ }
+}
+
+trait FactRow {
+ fn write(
+ &self,
+ out: &mut File,
+ location_table: &LocationTable,
+ ) -> Result<(), Box<dyn Error>>;
+}
+
+impl FactRow for RegionVid {
+ fn write(
+ &self,
+ out: &mut File,
+ location_table: &LocationTable,
+ ) -> Result<(), Box<dyn Error>> {
+ write_row(out, location_table, &[self])
+ }
+}
+
+impl<A, B> FactRow for (A, B)
+where
+ A: FactCell,
+ B: FactCell,
+{
+ fn write(
+ &self,
+ out: &mut File,
+ location_table: &LocationTable,
+ ) -> Result<(), Box<dyn Error>> {
+ write_row(out, location_table, &[&self.0, &self.1])
+ }
+}
+
+impl<A, B, C> FactRow for (A, B, C)
+where
+ A: FactCell,
+ B: FactCell,
+ C: FactCell,
+{
+ fn write(
+ &self,
+ out: &mut File,
+ location_table: &LocationTable,
+ ) -> Result<(), Box<dyn Error>> {
+ write_row(out, location_table, &[&self.0, &self.1, &self.2])
+ }
+}
+
+impl<A, B, C, D> FactRow for (A, B, C, D)
+where
+ A: FactCell,
+ B: FactCell,
+ C: FactCell,
+ D: FactCell,
+{
+ fn write(
+ &self,
+ out: &mut File,
+ location_table: &LocationTable,
+ ) -> Result<(), Box<dyn Error>> {
+ write_row(out, location_table, &[&self.0, &self.1, &self.2, &self.3])
+ }
+}
+
+fn write_row(
+ out: &mut dyn Write,
+ location_table: &LocationTable,
+ columns: &[&dyn FactCell],
+) -> Result<(), Box<dyn Error>> {
+ for (index, c) in columns.iter().enumerate() {
+ let tail = if index == columns.len() - 1 {
+ "\n"
+ } else {
+ "\t"
+ };
+ write!(out, "{:?}{}", c.to_string(location_table), tail)?;
+ }
+ Ok(())
+}
+
+trait FactCell {
+ fn to_string(&self, location_table: &LocationTable) -> String;
+}
+
+impl<A: Debug> FactCell for A {
+ default fn to_string(&self, _location_table: &LocationTable) -> String {
+ format!("{:?}", self)
+ }
+}
+
+impl FactCell for LocationIndex {
+ fn to_string(&self, location_table: &LocationTable) -> String {
+ format!("{:?}", location_table.to_location(*self))
+ }
+}
// except according to those terms.
use borrow_check::borrow_set::BorrowSet;
+use borrow_check::location::LocationTable;
+use dataflow::move_paths::MoveData;
+use dataflow::FlowAtLocation;
+use dataflow::MaybeInitializedPlaces;
use rustc::hir::def_id::DefId;
-use rustc::mir::{ClosureRegionRequirements, ClosureOutlivesSubject, Mir};
use rustc::infer::InferCtxt;
+use rustc::mir::{ClosureOutlivesSubject, ClosureRegionRequirements, Mir};
use rustc::ty::{self, RegionKind, RegionVid};
use rustc::util::nodemap::FxHashMap;
use std::collections::BTreeSet;
use std::fmt::Debug;
use std::io;
+use std::path::PathBuf;
use transform::MirSource;
use util::liveness::{LivenessResults, LocalSet};
-use dataflow::FlowAtLocation;
-use dataflow::MaybeInitializedPlaces;
-use dataflow::move_paths::MoveData;
+use self::mir_util::PassWhere;
use util as mir_util;
use util::pretty::{self, ALIGN};
-use self::mir_util::PassWhere;
mod constraint_generation;
pub mod explain_borrow;
-pub(crate) mod region_infer;
+mod facts;
+crate mod region_infer;
mod renumber;
mod subtype_constraint_generation;
-pub(crate) mod type_check;
+crate mod type_check;
mod universal_regions;
+use self::facts::AllFacts;
use self::region_infer::RegionInferenceContext;
use self::universal_regions::UniversalRegions;
-
/// Rewrites the regions in the MIR to use NLL variables, also
/// scraping out the set of universal regions (e.g., region parameters)
/// declared on the function. That set will need to be given to
def_id: DefId,
universal_regions: UniversalRegions<'tcx>,
mir: &Mir<'tcx>,
+ location_table: &LocationTable,
param_env: ty::ParamEnv<'gcx>,
flow_inits: &mut FlowAtLocation<MaybeInitializedPlaces<'cx, 'gcx, 'tcx>>,
move_data: &MoveData<'tcx>,
- _borrow_set: &BorrowSet<'tcx>,
+ borrow_set: &BorrowSet<'tcx>,
) -> (
RegionInferenceContext<'tcx>,
Option<ClosureRegionRequirements<'gcx>>,
move_data,
);
+ let mut all_facts = if infcx.tcx.sess.opts.debugging_opts.nll_facts {
+ Some(AllFacts::default())
+ } else {
+ None
+ };
+
+ if let Some(all_facts) = &mut all_facts {
+ all_facts
+ .universal_region
+ .extend(universal_regions.universal_regions());
+ }
+
// Create the region inference context, taking ownership of the region inference
// data that was contained in `infcx`.
let var_origins = infcx.take_region_var_origins();
- let mut regioncx = RegionInferenceContext::new(var_origins, universal_regions, mir);
- subtype_constraint_generation::generate(&mut regioncx, mir, constraint_sets);
-
+ let mut regioncx =
+ RegionInferenceContext::new(var_origins, universal_regions, mir);
+
+ // Generate various constraints.
+ subtype_constraint_generation::generate(
+ &mut regioncx,
+ &mut all_facts,
+ location_table,
+ mir,
+ constraint_sets,
+ );
+ constraint_generation::generate_constraints(
+ infcx,
+ &mut regioncx,
+ &mut all_facts,
+ location_table,
+ &mir,
+ borrow_set,
+ );
- // Generate non-subtyping constraints.
- constraint_generation::generate_constraints(infcx, &mut regioncx, &mir);
+ // Dump facts if requested.
+ if let Some(all_facts) = all_facts {
+ let def_path = infcx.tcx.hir.def_path(def_id);
+ let dir_path = PathBuf::from("nll-facts").join(def_path.to_filename_friendly_no_crate());
+ all_facts.write_to_dir(dir_path, location_table).unwrap();
+ }
// Solve the region constraints.
let closure_region_requirements = regioncx.solve(infcx, &mir, def_id);
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use rustc::mir::Mir;
+use borrow_check::location::LocationTable;
+use borrow_check::nll::facts::AllFacts;
use rustc::infer::region_constraints::Constraint;
use rustc::infer::region_constraints::RegionConstraintData;
use rustc::infer::region_constraints::{Verify, VerifyBound};
+use rustc::mir::{Location, Mir};
use rustc::ty;
+use std::iter;
use syntax::codemap::Span;
-use super::region_infer::{TypeTest, RegionInferenceContext, RegionTest};
+use super::region_infer::{RegionInferenceContext, RegionTest, TypeTest};
use super::type_check::Locations;
use super::type_check::MirTypeckRegionConstraints;
use super::type_check::OutlivesSet;
/// them into the NLL `RegionInferenceContext`.
pub(super) fn generate<'tcx>(
regioncx: &mut RegionInferenceContext<'tcx>,
+ all_facts: &mut Option<AllFacts>,
+ location_table: &LocationTable,
mir: &Mir<'tcx>,
constraints: &MirTypeckRegionConstraints<'tcx>,
) {
- SubtypeConstraintGenerator { regioncx, mir }.generate(constraints);
+ SubtypeConstraintGenerator {
+ regioncx,
+ location_table,
+ mir,
+ }.generate(constraints, all_facts);
}
struct SubtypeConstraintGenerator<'cx, 'tcx: 'cx> {
regioncx: &'cx mut RegionInferenceContext<'tcx>,
+ location_table: &'cx LocationTable,
mir: &'cx Mir<'tcx>,
}
impl<'cx, 'tcx> SubtypeConstraintGenerator<'cx, 'tcx> {
- fn generate(&mut self, constraints: &MirTypeckRegionConstraints<'tcx>) {
+ fn generate(
+ &mut self,
+ constraints: &MirTypeckRegionConstraints<'tcx>,
+ all_facts: &mut Option<AllFacts>,
+ ) {
let MirTypeckRegionConstraints {
liveness_set,
outlives_sets,
self.regioncx.add_live_point(region_vid, *location, &cause);
}
+ if let Some(all_facts) = all_facts {
+ all_facts
+ .region_live_at
+ .extend(liveness_set.into_iter().flat_map(|(region, location, _)| {
+ let r = self.to_region_vid(region);
+ let p1 = self.location_table.start_index(*location);
+ let p2 = self.location_table.mid_index(*location);
+ iter::once((r, p1)).chain(iter::once((r, p2)))
+ }));
+ }
+
for OutlivesSet { locations, data } in outlives_sets {
debug!("generate: constraints at: {:#?}", locations);
let RegionConstraintData {
givens,
} = data;
- let span = self.mir.source_info(locations.from_location).span;
+ let span = self.mir
+ .source_info(locations.from_location().unwrap_or(Location::START))
+ .span;
+
+ let at_location = locations.at_location().unwrap_or(Location::START);
for constraint in constraints.keys() {
debug!("generate: constraint: {:?}", constraint);
// reverse direction, because `regioncx` talks about
// "outlives" (`>=`) whereas the region constraints
// talk about `<=`.
- self.regioncx
- .add_outlives(span, b_vid, a_vid, locations.at_location);
+ self.regioncx.add_outlives(span, b_vid, a_vid, at_location);
+
+ // In the new analysis, all outlives relations etc
+ // "take effect" at the mid point of the statement
+ // that requires them, so ignore the `at_location`.
+ if let Some(all_facts) = all_facts {
+ if let Some(from_location) = locations.from_location() {
+ all_facts.outlives.push((
+ b_vid,
+ a_vid,
+ self.location_table.mid_index(from_location),
+ ));
+ } else {
+ for location in self.location_table.all_points() {
+ all_facts.outlives.push((b_vid, a_vid, location));
+ }
+ }
+ }
}
for verify in verifys {
let lower_bound = self.to_region_vid(verify.region);
- let point = locations.at_location;
+ let point = locations.at_location().unwrap_or(Location::START);
let test = self.verify_bound_to_region_test(&verify.bound);
}
fn to_region_vid(&self, r: ty::Region<'tcx>) -> ty::RegionVid {
- // Every region that we see in the constraints came from the
- // MIR or from the parameter environment. If the former, it
- // will be a region variable. If the latter, it will be in
- // the set of universal regions *somewhere*.
- if let ty::ReVar(vid) = r {
- *vid
- } else {
- self.regioncx.to_region_vid(r)
- }
+ self.regioncx.to_region_vid(r)
}
}
use rustc_data_structures::indexed_vec::Idx;
-use super::{AtLocation, TypeChecker};
+use super::{Locations, TypeChecker};
impl<'a, 'gcx, 'tcx> TypeChecker<'a, 'gcx, 'tcx> {
pub(super) fn equate_inputs_and_outputs(
} = universal_regions;
let infcx = self.infcx;
- let start_position = Location {
- block: START_BLOCK,
- statement_index: 0,
- };
-
// Equate expected input tys with those in the MIR.
let argument_locals = (1..).map(Local::new);
for (&unnormalized_input_ty, local) in unnormalized_input_tys.iter().zip(argument_locals) {
- let input_ty = self.normalize(&unnormalized_input_ty, start_position);
+ let input_ty = self.normalize(&unnormalized_input_ty, Locations::All);
let mir_input_ty = mir.local_decls[local].ty;
- self.equate_normalized_input_or_output(start_position, input_ty, mir_input_ty);
+ self.equate_normalized_input_or_output(input_ty, mir_input_ty);
}
assert!(
mir.yield_ty.is_some() && universal_regions.yield_ty.is_some() ||
mir.yield_ty.is_none() && universal_regions.yield_ty.is_none()
- );
+ );
if let Some(mir_yield_ty) = mir.yield_ty {
let ur_yield_ty = universal_regions.yield_ty.unwrap();
- self.equate_normalized_input_or_output(start_position, ur_yield_ty, mir_yield_ty);
+ self.equate_normalized_input_or_output(ur_yield_ty, mir_yield_ty);
}
// Return types are a bit more complex. They may contain existential `impl Trait`
"equate_inputs_and_outputs: unnormalized_output_ty={:?}",
unnormalized_output_ty
);
- let output_ty = self.normalize(&unnormalized_output_ty, start_position);
+ let output_ty = self.normalize(&unnormalized_output_ty, Locations::All);
debug!(
"equate_inputs_and_outputs: normalized output_ty={:?}",
output_ty
);
let mir_output_ty = mir.local_decls[RETURN_PLACE].ty;
- let anon_type_map = self.fully_perform_op(start_position.at_self(), |cx| {
+ let anon_type_map = self.fully_perform_op(Locations::All, |cx| {
let mut obligations = ObligationAccumulator::default();
let (output_ty, anon_type_map) = obligations.add(infcx.instantiate_anon_types(
let anon_defn_ty = anon_defn_ty.subst(tcx, anon_decl.substs);
let anon_defn_ty = renumber::renumber_regions(
cx.infcx,
- TyContext::Location(start_position),
+ TyContext::Location(Location::START),
&anon_defn_ty,
);
debug!(
}).unwrap_or_else(|terr| {
span_mirbug!(
self,
- start_position,
+ Location::START,
"equate_inputs_and_outputs: `{:?}=={:?}` failed with `{:?}`",
output_ty,
mir_output_ty,
// prove that `T: Iterator` where `T` is the type we
// instantiated it with).
if let Some(anon_type_map) = anon_type_map {
- self.fully_perform_op(start_position.at_self(), |_cx| {
+ self.fully_perform_op(Locations::All, |_cx| {
infcx.constrain_anon_types(&anon_type_map, universal_regions);
Ok(InferOk {
value: (),
}
}
- fn equate_normalized_input_or_output(&mut self, location: Location, a: Ty<'tcx>, b: Ty<'tcx>) {
+ fn equate_normalized_input_or_output(&mut self, a: Ty<'tcx>, b: Ty<'tcx>) {
debug!("equate_normalized_input_or_output(a={:?}, b={:?})", a, b);
- if let Err(terr) = self.eq_types(a, b, location.at_self()) {
+ if let Err(terr) = self.eq_types(a, b, Locations::All) {
span_mirbug!(
self,
- location,
+ Location::START,
"equate_normalized_input_or_output: `{:?}=={:?}` failed with `{:?}`",
a,
b,
pub data: RegionConstraintData<'tcx>,
}
+/// The `Locations` type summarizes *where* region constraints are
+/// required to hold. Normally, this is at a particular point which
+/// created the obligation, but for constraints that the user gave, we
+/// want the constraint to hold at all points.
#[derive(Copy, Clone, Debug)]
-pub struct Locations {
- /// The location in the MIR that generated these constraints.
- /// This is intended for error reporting and diagnosis; the
- /// constraints may *take effect* at a distinct spot.
- pub from_location: Location,
-
- /// The constraints must be met at this location. In terms of the
- /// NLL RFC, when you have a constraint `R1: R2 @ P`, this field
- /// is the `P` value.
- pub at_location: Location,
+pub enum Locations {
+ /// Indicates that a type constraint should always be true. This
+ /// is particularly important in the new borrowck analysis for
+ /// things like the type of the return slot. Consider this
+ /// example:
+ ///
+ /// ```
+ /// fn foo<'a>(x: &'a u32) -> &'a u32 {
+ /// let y = 22;
+ /// return &y; // error
+ /// }
+ /// ```
+ ///
+ /// Here, we wind up with the signature from the return type being
+ /// something like `&'1 u32` where `'1` is a universal region. But
+ /// the type of the return slot `_0` is something like `&'2 u32`
+ /// where `'2` is an existential region variable. The type checker
+ /// requires that `&'2 u32 = &'1 u32` -- but at what point? In the
+ /// older NLL analysis, we required this only at the entry point
+ /// to the function. By the nature of the constraints, this wound
+ /// up propagating to all points reachable from start (because
+ /// `'1` -- as a universal region -- is live everywhere). In the
+ /// newer analysis, though, this doesn't work: `_0` is considered
+ /// dead at the start (it has no usable value) and hence this type
+ /// equality is basically a no-op. Then, later on, when we do `_0
+ /// = &'3 y`, that region `'3` never winds up related to the
+ /// universal region `'1` and hence no error occurs. Therefore, we
+ /// use Locations::All instead, which ensures that the `'1` and
+ /// `'2` are equal everything. We also use this for other
+ /// user-given type annotations; e.g., if the user wrote `let mut
+ /// x: &'static u32 = ...`, we would ensure that all values
+ /// assigned to `x` are of `'static` lifetime.
+ All,
+
+ Pair {
+ /// The location in the MIR that generated these constraints.
+ /// This is intended for error reporting and diagnosis; the
+ /// constraints may *take effect* at a distinct spot.
+ from_location: Location,
+
+ /// The constraints must be met at this location. In terms of the
+ /// NLL RFC, when you have a constraint `R1: R2 @ P`, this field
+ /// is the `P` value.
+ at_location: Location,
+ }
+}
+
+impl Locations {
+ pub fn from_location(&self) -> Option<Location> {
+ match self {
+ Locations::All => None,
+ Locations::Pair { from_location, .. } => Some(*from_location),
+ }
+ }
+
+ pub fn at_location(&self) -> Option<Location> {
+ match self {
+ Locations::All => None,
+ Locations::Pair { at_location, .. } => Some(*at_location),
+ }
+ }
}
impl<'a, 'gcx, 'tcx> TypeChecker<'a, 'gcx, 'tcx> {
"check_stmt: user_assert_ty ty={:?} local_ty={:?}",
ty, local_ty
);
- if let Err(terr) = self.eq_types(ty, local_ty, location.at_self()) {
+ if let Err(terr) = self.eq_types(ty, local_ty, Locations::All) {
span_mirbug!(
self,
stmt,
let place_ty = location.ty(mir, tcx).to_ty(tcx);
let rv_ty = value.ty(mir, tcx);
- let locations = Locations {
+ let locations = Locations::Pair {
from_location: term_location,
at_location: target.start_location(),
};
// *both* blocks, so we need to ensure that it holds
// at both locations.
if let Some(unwind) = unwind {
- let locations = Locations {
+ let locations = Locations::Pair {
from_location: term_location,
at_location: unwind.start_location(),
};
match *destination {
Some((ref dest, target_block)) => {
let dest_ty = dest.ty(mir, tcx).to_ty(tcx);
- let locations = Locations {
+ let locations = Locations::Pair {
from_location: term_location,
at_location: target_block.start_location(),
};
};
let operand_ty = operand.ty(mir, tcx);
if let Err(terr) =
- self.sub_types(operand_ty, field_ty, location.at_successor_within_block())
+ self.sub_types(operand_ty, field_ty, location.at_self())
{
span_mirbug!(
self,
}
}
- fn normalize<T>(&mut self, value: &T, location: Location) -> T
+ fn normalize<T>(&mut self, value: &T, location: impl ToLocations) -> T
where
T: fmt::Debug + TypeFoldable<'tcx>,
{
debug!("normalize(value={:?}, location={:?})", value, location);
- self.fully_perform_op(location.at_self(), |this| {
+ self.fully_perform_op(location.to_locations(), |this| {
let Normalized { value, obligations } = this.infcx
.at(&this.misc(this.last_span), this.param_env)
.normalize(value)
impl AtLocation for Location {
fn at_self(self) -> Locations {
- Locations {
+ Locations::Pair {
from_location: self,
at_location: self,
}
}
fn at_successor_within_block(self) -> Locations {
- Locations {
+ Locations::Pair {
from_location: self,
at_location: self.successor_within_block(),
}
}
}
+
+trait ToLocations: fmt::Debug + Copy {
+ fn to_locations(self) -> Locations;
+}
+
+impl ToLocations for Locations {
+ fn to_locations(self) -> Locations {
+ self
+ }
+}
+
+impl ToLocations for Location {
+ fn to_locations(self) -> Locations {
+ self.at_self()
+ }
+}
use rustc::middle::expr_use_visitor::{ConsumeMode, Delegate, ExprUseVisitor};
use rustc::middle::expr_use_visitor::{LoanCause, MutateMode};
use rustc::middle::expr_use_visitor as euv;
-use rustc::middle::mem_categorization::{cmt};
+use rustc::middle::mem_categorization::cmt_;
use rustc::middle::region;
use rustc::session::Session;
use rustc::ty::{self, Ty, TyCtxt};
}
impl<'a, 'tcx> Delegate<'tcx> for MutationChecker<'a, 'tcx> {
- fn matched_pat(&mut self, _: &Pat, _: cmt, _: euv::MatchMode) {}
- fn consume(&mut self, _: ast::NodeId, _: Span, _: cmt, _: ConsumeMode) {}
- fn consume_pat(&mut self, _: &Pat, _: cmt, _: ConsumeMode) {}
+ fn matched_pat(&mut self, _: &Pat, _: &cmt_, _: euv::MatchMode) {}
+ fn consume(&mut self, _: ast::NodeId, _: Span, _: &cmt_, _: ConsumeMode) {}
+ fn consume_pat(&mut self, _: &Pat, _: &cmt_, _: ConsumeMode) {}
fn borrow(&mut self,
_: ast::NodeId,
span: Span,
- _: cmt,
+ _: &cmt_,
_: ty::Region<'tcx>,
kind:ty:: BorrowKind,
_: LoanCause) {
}
}
fn decl_without_init(&mut self, _: ast::NodeId, _: Span) {}
- fn mutate(&mut self, _: ast::NodeId, span: Span, _: cmt, mode: MutateMode) {
+ fn mutate(&mut self, _: ast::NodeId, span: Span, _: &cmt_, mode: MutateMode) {
match mode {
MutateMode::JustWrite | MutateMode::WriteAndRead => {
struct_span_err!(self.cx.tcx.sess, span, E0302, "cannot assign in a pattern guard")
if self.type_is_fat_ptr(src.ty) {
match (src.value, self.type_is_fat_ptr(dest_ty)) {
(Value::ByRef { .. }, _) |
+ // pointers to extern types
+ (Value::ByVal(_),_) |
+ // slices and trait objects to other slices/trait objects
(Value::ByValPair(..), true) => {
let valty = ValTy {
value: src.value,
};
self.write_value(valty, dest)?;
}
+ // slices and trait objects to thin pointers (dropping the metadata)
(Value::ByValPair(data, _), false) => {
let valty = ValTy {
value: Value::ByVal(data),
};
self.write_value(valty, dest)?;
}
- (Value::ByVal(_), _) => bug!("expected fat ptr"),
}
} else {
let src_layout = self.layout_of(src.ty)?;
layout::Abi::Uninhabited);
}
}
- layout::Variants::Tagged { ref discr, .. } => {
+ layout::Variants::Tagged { ref tag, .. } => {
let discr_val = dest_ty.ty_adt_def().unwrap()
.discriminant_for_variant(*self.tcx, variant_index)
.val;
// raw discriminants for enums are isize or bigger during
// their computation, but the in-memory tag is the smallest possible
// representation
- let size = discr.value.size(self.tcx.tcx).bits();
+ let size = tag.value.size(self.tcx.tcx).bits();
let amt = 128 - size;
let discr_val = (discr_val << amt) >> amt;
- let (discr_dest, discr) = self.place_field(dest, mir::Field::new(0), layout)?;
- self.write_primval(discr_dest, PrimVal::Bytes(discr_val), discr.ty)?;
+ let (discr_dest, tag) = self.place_field(dest, mir::Field::new(0), layout)?;
+ self.write_primval(discr_dest, PrimVal::Bytes(discr_val), tag.ty)?;
}
layout::Variants::NicheFilling {
dataful_variant,
#![feature(inclusive_range_methods)]
#![feature(crate_visibility_modifier)]
#![feature(never_type)]
+#![feature(specialization)]
#![cfg_attr(stage0, feature(try_trait))]
extern crate arena;
}
fn drop_flags_on_init(&mut self) {
- let loc = Location { block: START_BLOCK, statement_index: 0 };
+ let loc = Location::START;
let span = self.patch.source_info_for_location(self.mir, loc).span;
let false_ = self.constant_bool(span, false);
for flag in self.drop_flags.values() {
}
fn drop_flags_for_args(&mut self) {
- let loc = Location { block: START_BLOCK, statement_index: 0 };
+ let loc = Location::START;
dataflow::drop_flag_effects_for_function_entry(
self.tcx, self.mir, self.env, |path, ds| {
self.set_drop_flag(loc, path, ds);
fn consume(&mut self,
_consume_id: ast::NodeId,
_consume_span: Span,
- _cmt: mc::cmt,
+ _cmt: &mc::cmt_,
_mode: euv::ConsumeMode) {}
fn borrow(&mut self,
borrow_id: ast::NodeId,
_borrow_span: Span,
- cmt: mc::cmt<'tcx>,
+ cmt: &mc::cmt_<'tcx>,
_loan_region: ty::Region<'tcx>,
bk: ty::BorrowKind,
loan_cause: euv::LoanCause) {
_ => {}
}
- let mut cur = &cmt;
+ let mut cur = cmt;
loop {
match cur.cat {
Categorization::Rvalue(..) => {
fn mutate(&mut self,
_assignment_id: ast::NodeId,
_assignment_span: Span,
- _assignee_cmt: mc::cmt,
+ _assignee_cmt: &mc::cmt_,
_mode: euv::MutateMode) {
}
- fn matched_pat(&mut self, _: &hir::Pat, _: mc::cmt, _: euv::MatchMode) {}
+ fn matched_pat(&mut self, _: &hir::Pat, _: &mc::cmt_, _: euv::MatchMode) {}
- fn consume_pat(&mut self, _consume_pat: &hir::Pat, _cmt: mc::cmt, _mode: euv::ConsumeMode) {}
+ fn consume_pat(&mut self, _consume_pat: &hir::Pat, _cmt: &mc::cmt_, _mode: euv::ConsumeMode) {}
}
html_root_url = "https://doc.rust-lang.org/nightly/")]
#![feature(rustc_diagnostic_macros)]
-#![feature(staged_api)]
#[macro_use] extern crate syntax;
/// This can be used in place of `register_syntax_extension` to register legacy custom derives
/// (i.e. attribute syntax extensions whose name begins with `derive_`). Legacy custom
/// derives defined by this function do not trigger deprecation warnings when used.
- #[unstable(feature = "rustc_private", issue = "27812")]
- #[rustc_deprecated(since = "1.15.0", reason = "replaced by macros 1.1 (RFC 1861)")]
pub fn register_custom_derive(&mut self, name: ast::Name, extension: SyntaxExtension) {
assert!(name.as_str().starts_with("derive_"));
self.whitelisted_custom_derives.push(name);
let prev_name = path[0].name;
if prev_name == keywords::Extern.name() ||
prev_name == keywords::CrateRoot.name() &&
- // Note: When this feature stabilizes, this should
- // be gated on sess.rust_2018()
- self.session.features_untracked().extern_absolute_paths {
+ self.session.features_untracked().extern_absolute_paths &&
+ self.session.rust_2018() {
// `::extern_crate::a::b`
let crate_id = self.crate_loader.process_path_extern(name, ident.span);
let crate_root =
if module_path.len() == 1 && (module_path[0].name == keywords::CrateRoot.name() ||
module_path[0].name == keywords::Extern.name()) {
let is_extern = module_path[0].name == keywords::Extern.name() ||
- self.session.features_untracked().extern_absolute_paths;
+ (self.session.features_untracked().extern_absolute_paths &&
+ self.session.rust_2018());
match directive.subclass {
GlobImport { .. } if is_extern => {
return Some((directive.span,
use rustc::hir;
use rustc::hir::def::Def as HirDef;
-use rustc::hir::map::{Node, NodeItem};
+use rustc::hir::map::{Node, NodeTraitItem, NodeImplItem};
use rustc::hir::def_id::{DefId, LOCAL_CRATE};
use rustc::middle::cstore::ExternCrate;
use rustc::session::config::CrateType::CrateTypeExecutable;
Some(impl_id) => match self.tcx.hir.get_if_local(impl_id) {
Some(Node::NodeItem(item)) => match item.node {
hir::ItemImpl(.., ref ty, _) => {
- let mut result = String::from("<");
- result.push_str(&self.tcx.hir.node_to_pretty_string(ty.id));
+ let mut qualname = String::from("<");
+ qualname.push_str(&self.tcx.hir.node_to_pretty_string(ty.id));
let mut trait_id = self.tcx.trait_id_of_impl(impl_id);
let mut decl_id = None;
+ let mut docs = String::new();
+ let mut attrs = vec![];
+ if let Some(NodeImplItem(item)) = self.tcx.hir.find(id) {
+ docs = self.docs_for_attrs(&item.attrs);
+ attrs = item.attrs.to_vec();
+ }
+
if let Some(def_id) = trait_id {
- result.push_str(" as ");
- result.push_str(&self.tcx.item_path_str(def_id));
+ // A method in a trait impl.
+ qualname.push_str(" as ");
+ qualname.push_str(&self.tcx.item_path_str(def_id));
self.tcx
.associated_items(def_id)
.find(|item| item.name == name)
.map(|item| decl_id = Some(item.def_id));
- } else {
- if let Some(NodeItem(item)) = self.tcx.hir.find(id) {
- if let hir::ItemImpl(_, _, _, _, _, ref ty, _) = item.node {
- trait_id = self.lookup_ref_id(ty.id);
- }
- }
}
- result.push_str(">");
-
- (
- result,
- trait_id,
- decl_id,
- self.docs_for_attrs(&item.attrs),
- item.attrs.to_vec(),
- )
+ qualname.push_str(">");
+
+ (qualname, trait_id, decl_id, docs, attrs)
}
_ => {
span_bug!(
}
},
None => match self.tcx.trait_of_item(self.tcx.hir.local_def_id(id)) {
- Some(def_id) => match self.tcx.hir.get_if_local(def_id) {
- Some(Node::NodeItem(item)) => (
+ Some(def_id) => {
+ let mut docs = String::new();
+ let mut attrs = vec![];
+
+ if let Some(NodeTraitItem(item)) = self.tcx.hir.find(id) {
+ docs = self.docs_for_attrs(&item.attrs);
+ attrs = item.attrs.to_vec();
+ }
+
+ (
format!("::{}", self.tcx.item_path_str(def_id)),
Some(def_id),
None,
- self.docs_for_attrs(&item.attrs),
- item.attrs.to_vec(),
- ),
- r => {
- span_bug!(
- span,
- "Could not find container {:?} for \
- method {}, got {:?}",
- def_id,
- id,
- r
- );
- }
- },
+ docs,
+ attrs,
+ )
+ }
None => {
debug!("Could not find container for method {} at {:?}", id, span);
// This is not necessarily a bug, if there was a compilation error,
},
/// General-case enums: for each case there is a struct, and they all have
- /// all space reserved for the discriminant, and their first field starts
- /// at a non-0 offset, after where the discriminant would go.
+ /// all space reserved for the tag, and their first field starts
+ /// at a non-0 offset, after where the tag would go.
Tagged {
- discr: Scalar,
+ tag: Scalar,
variants: Vec<LayoutDetails>,
},
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use spec::{LinkerFlavor, Target, TargetOptions, TargetResult};
+
+pub fn target() -> TargetResult {
+ let base = super::linux_musl_base::opts();
+ Ok(Target {
+ // It's important we use "gnueabihf" and not "musleabihf" here. LLVM
+ // uses it to determine the calling convention and float ABI, and LLVM
+ // doesn't support the "musleabihf" value.
+ llvm_target: "armv5te-unknown-linux-gnueabi".to_string(),
+ target_endian: "little".to_string(),
+ target_pointer_width: "32".to_string(),
+ target_c_int_width: "32".to_string(),
+ data_layout: "e-m:e-p:32:32-i64:64-v128:64:128-a:0:32-n32-S64".to_string(),
+ arch: "arm".to_string(),
+ target_os: "linux".to_string(),
+ target_env: "musl".to_string(),
+ target_vendor: "unknown".to_string(),
+ linker_flavor: LinkerFlavor::Gcc,
+
+ options: TargetOptions {
+ features: "+soft-float,+strict-align".to_string(),
+ // Atomic operations provided by compiler-builtins
+ max_atomic_width: Some(32),
+ abi_blacklist: super::arm_base::abi_blacklist(),
+ .. base
+ }
+ })
+}
("arm-unknown-linux-musleabihf", arm_unknown_linux_musleabihf),
("armv4t-unknown-linux-gnueabi", armv4t_unknown_linux_gnueabi),
("armv5te-unknown-linux-gnueabi", armv5te_unknown_linux_gnueabi),
+ ("armv5te-unknown-linux-musleabi", armv5te_unknown_linux_musleabi),
("armv7-unknown-linux-gnueabihf", armv7_unknown_linux_gnueabihf),
("armv7-unknown-linux-musleabihf", armv7_unknown_linux_musleabihf),
("aarch64-unknown-linux-gnu", aarch64_unknown_linux_gnu),
self.inline_threshold = sess.opts.cg.inline_threshold;
self.obj_is_bitcode = sess.target.target.options.obj_is_bitcode;
let embed_bitcode = sess.target.target.options.embed_bitcode ||
- sess.opts.debugging_opts.embed_bitcode;
+ sess.opts.debugging_opts.embed_bitcode ||
+ sess.opts.debugging_opts.cross_lang_lto;
if embed_bitcode {
match sess.opts.optimize {
config::OptLevel::No |
"rustc.embedded.module\0".as_ptr() as *const _,
);
llvm::LLVMSetInitializer(llglobal, llconst);
- let section = if cgcx.opts.target_triple.triple().contains("-ios") {
+
+ let is_apple = cgcx.opts.target_triple.triple().contains("-ios") ||
+ cgcx.opts.target_triple.triple().contains("-darwin");
+
+ let section = if is_apple {
"__LLVM,__bitcode\0"
} else {
".llvmbc\0"
};
llvm::LLVMSetSection(llglobal, section.as_ptr() as *const _);
llvm::LLVMRustSetLinkage(llglobal, llvm::Linkage::PrivateLinkage);
+ llvm::LLVMSetGlobalConstant(llglobal, llvm::True);
let llconst = C_bytes_in_context(llcx, &[]);
let llglobal = llvm::LLVMAddGlobal(
"rustc.embedded.cmdline\0".as_ptr() as *const _,
);
llvm::LLVMSetInitializer(llglobal, llconst);
- let section = if cgcx.opts.target_triple.triple().contains("-ios") {
+ let section = if is_apple {
"__LLVM,__cmdline\0"
} else {
".llvmcmd\0"
// settings.
let needs_lto = needs_lto && mtrans.kind != ModuleKind::Metadata;
+ // Don't run LTO passes when cross-lang LTO is enabled. The linker
+ // will do that for us in this case.
+ let needs_lto = needs_lto && !cgcx.opts.debugging_opts.cross_lang_lto;
+
if needs_lto {
Ok(WorkItemResult::NeedsLTO(mtrans))
} else {
let discriminant_type_metadata = match layout.variants {
layout::Variants::Single { .. } |
layout::Variants::NicheFilling { .. } => None,
- layout::Variants::Tagged { ref discr, .. } => {
- Some(discriminant_type_metadata(discr.value))
+ layout::Variants::Tagged { ref tag, .. } => {
+ Some(discriminant_type_metadata(tag.value))
}
};
let lldiscr = discr.load(bx).immediate();
match self.layout.variants {
layout::Variants::Single { .. } => bug!(),
- layout::Variants::Tagged { ref discr, .. } => {
- let signed = match discr.value {
+ layout::Variants::Tagged { ref tag, .. } => {
+ let signed = match tag.value {
layout::Int(_, signed) => signed,
_ => false
};
use std::mem;
use std::ops::Deref;
+use std::rc::Rc;
use rustc_data_structures::sync::Lrc;
use syntax::ast;
use syntax_pos::Span;
// the adjusted form if there is an adjustment.
match cmt_result {
Ok(head_cmt) => {
- self.check_safety_of_rvalue_destructor_if_necessary(head_cmt, expr.span);
+ self.check_safety_of_rvalue_destructor_if_necessary(&head_cmt, expr.span);
}
Err(..) => {
self.tcx.sess.delay_span_bug(expr.span, "cat_expr Errd");
/// Invoked on any adjustments that occur. Checks that if this is a region pointer being
/// dereferenced, the lifetime of the pointer includes the deref expr.
- fn constrain_adjustments(&mut self, expr: &hir::Expr) -> mc::McResult<mc::cmt<'tcx>> {
+ fn constrain_adjustments(&mut self, expr: &hir::Expr) -> mc::McResult<mc::cmt_<'tcx>> {
debug!("constrain_adjustments(expr={:?})", expr);
let mut cmt = self.with_mc(|mc| mc.cat_expr_unadjusted(expr))?;
// If necessary, constrain destructors in the unadjusted form of this
// expression.
- self.check_safety_of_rvalue_destructor_if_necessary(cmt.clone(), expr.span);
+ self.check_safety_of_rvalue_destructor_if_necessary(&cmt, expr.span);
let expr_region = self.tcx.mk_region(ty::ReScope(
region::Scope::Node(expr.hir_id.local_id)));
});
self.link_region(expr.span, deref.region,
- ty::BorrowKind::from_mutbl(deref.mutbl), cmt.clone());
+ ty::BorrowKind::from_mutbl(deref.mutbl), &cmt);
// Specialized version of constrain_call.
self.type_must_outlive(infer::CallRcvr(expr.span),
}
if let adjustment::Adjust::Borrow(ref autoref) = adjustment.kind {
- self.link_autoref(expr, cmt.clone(), autoref);
+ self.link_autoref(expr, &cmt, autoref);
// Require that the resulting region encompasses
// the current node.
}
fn check_safety_of_rvalue_destructor_if_necessary(&mut self,
- cmt: mc::cmt<'tcx>,
+ cmt: &mc::cmt_<'tcx>,
span: Span) {
match cmt.cat {
Categorization::Rvalue(region) => {
debug!("link_addr_of: cmt={:?}", cmt);
- self.link_region_from_node_type(expr.span, expr.hir_id, mutability, cmt);
+ self.link_region_from_node_type(expr.span, expr.hir_id, mutability, &cmt);
}
/// Computes the guarantors for any ref bindings in a `let` and
None => { return; }
Some(ref expr) => &**expr,
};
- let discr_cmt = ignore_err!(self.with_mc(|mc| mc.cat_expr(init_expr)));
+ let discr_cmt = Rc::new(ignore_err!(self.with_mc(|mc| mc.cat_expr(init_expr))));
self.link_pattern(discr_cmt, &local.pat);
}
/// linked to the lifetime of its guarantor (if any).
fn link_match(&self, discr: &hir::Expr, arms: &[hir::Arm]) {
debug!("regionck::for_match()");
- let discr_cmt = ignore_err!(self.with_mc(|mc| mc.cat_expr(discr)));
+ let discr_cmt = Rc::new(ignore_err!(self.with_mc(|mc| mc.cat_expr(discr))));
debug!("discr_cmt={:?}", discr_cmt);
for arm in arms {
for root_pat in &arm.pats {
let arg_ty = self.node_ty(arg.hir_id);
let re_scope = self.tcx.mk_region(ty::ReScope(body_scope));
let arg_cmt = self.with_mc(|mc| {
- mc.cat_rvalue(arg.id, arg.pat.span, re_scope, arg_ty)
+ Rc::new(mc.cat_rvalue(arg.id, arg.pat.span, re_scope, arg_ty))
});
debug!("arg_ty={:?} arg_cmt={:?} arg={:?}",
arg_ty,
.expect("missing binding mode");
if let ty::BindByReference(mutbl) = bm {
self.link_region_from_node_type(sub_pat.span, sub_pat.hir_id,
- mutbl, sub_cmt);
+ mutbl, &sub_cmt);
}
}
_ => {}
/// autoref'd.
fn link_autoref(&self,
expr: &hir::Expr,
- expr_cmt: mc::cmt<'tcx>,
+ expr_cmt: &mc::cmt_<'tcx>,
autoref: &adjustment::AutoBorrow<'tcx>)
{
debug!("link_autoref(autoref={:?}, expr_cmt={:?})", autoref, expr_cmt);
match *autoref {
adjustment::AutoBorrow::Ref(r, m) => {
- self.link_region(expr.span, r,
- ty::BorrowKind::from_mutbl(m.into()), expr_cmt);
+ self.link_region(expr.span, r, ty::BorrowKind::from_mutbl(m.into()), expr_cmt);
}
adjustment::AutoBorrow::RawPtr(m) => {
span: Span,
id: hir::HirId,
mutbl: hir::Mutability,
- cmt_borrowed: mc::cmt<'tcx>) {
+ cmt_borrowed: &mc::cmt_<'tcx>) {
debug!("link_region_from_node_type(id={:?}, mutbl={:?}, cmt_borrowed={:?})",
id, mutbl, cmt_borrowed);
let rptr_ty = self.resolve_node_type(id);
if let ty::TyRef(r, _) = rptr_ty.sty {
debug!("rptr_ty={}", rptr_ty);
- self.link_region(span, r, ty::BorrowKind::from_mutbl(mutbl),
- cmt_borrowed);
+ self.link_region(span, r, ty::BorrowKind::from_mutbl(mutbl), cmt_borrowed);
}
}
span: Span,
borrow_region: ty::Region<'tcx>,
borrow_kind: ty::BorrowKind,
- borrow_cmt: mc::cmt<'tcx>) {
- let mut borrow_cmt = borrow_cmt;
- let mut borrow_kind = borrow_kind;
-
+ borrow_cmt: &mc::cmt_<'tcx>) {
let origin = infer::DataBorrowed(borrow_cmt.ty, span);
self.type_must_outlive(origin, borrow_cmt.ty, borrow_region);
+ let mut borrow_kind = borrow_kind;
+ let mut borrow_cmt_cat = borrow_cmt.cat.clone();
+
loop {
debug!("link_region(borrow_region={:?}, borrow_kind={:?}, borrow_cmt={:?})",
borrow_region,
borrow_kind,
borrow_cmt);
- match borrow_cmt.cat.clone() {
+ match borrow_cmt_cat {
Categorization::Deref(ref_cmt, mc::Implicit(ref_kind, ref_region)) |
Categorization::Deref(ref_cmt, mc::BorrowedPtr(ref_kind, ref_region)) => {
match self.link_reborrowed_region(span,
ref_cmt, ref_region, ref_kind,
borrow_cmt.note) {
Some((c, k)) => {
- borrow_cmt = c;
+ borrow_cmt_cat = c.cat.clone();
borrow_kind = k;
}
None => {
Categorization::Interior(cmt_base, _) => {
// Borrowing interior or owned data requires the base
// to be valid and borrowable in the same fashion.
- borrow_cmt = cmt_base;
+ borrow_cmt_cat = cmt_base.cat.clone();
borrow_kind = borrow_kind;
}
}
impl<'a, 'gcx, 'tcx> InferBorrowKind<'a, 'gcx, 'tcx> {
- fn adjust_upvar_borrow_kind_for_consume(&mut self, cmt: mc::cmt<'tcx>, mode: euv::ConsumeMode) {
+ fn adjust_upvar_borrow_kind_for_consume(&mut self, cmt: &mc::cmt_<'tcx>,
+ mode: euv::ConsumeMode) {
debug!(
"adjust_upvar_borrow_kind_for_consume(cmt={:?}, mode={:?})",
cmt,
/// Indicates that `cmt` is being directly mutated (e.g., assigned
/// to). If cmt contains any by-ref upvars, this implies that
/// those upvars must be borrowed using an `&mut` borrow.
- fn adjust_upvar_borrow_kind_for_mut(&mut self, cmt: mc::cmt<'tcx>) {
+ fn adjust_upvar_borrow_kind_for_mut(&mut self, cmt: &mc::cmt_<'tcx>) {
debug!("adjust_upvar_borrow_kind_for_mut(cmt={:?})", cmt);
match cmt.cat.clone() {
Categorization::Downcast(base, _) => {
// Interior or owned data is mutable if base is
// mutable, so iterate to the base.
- self.adjust_upvar_borrow_kind_for_mut(base);
+ self.adjust_upvar_borrow_kind_for_mut(&base);
}
Categorization::Deref(base, mc::BorrowedPtr(..)) |
// borrowed pointer implies that the
// pointer itself must be unique, but not
// necessarily *mutable*
- self.adjust_upvar_borrow_kind_for_unique(base);
+ self.adjust_upvar_borrow_kind_for_unique(&base);
}
}
}
}
- fn adjust_upvar_borrow_kind_for_unique(&mut self, cmt: mc::cmt<'tcx>) {
+ fn adjust_upvar_borrow_kind_for_unique(&mut self, cmt: &mc::cmt_<'tcx>) {
debug!("adjust_upvar_borrow_kind_for_unique(cmt={:?})", cmt);
match cmt.cat.clone() {
Categorization::Downcast(base, _) => {
// Interior or owned data is unique if base is
// unique.
- self.adjust_upvar_borrow_kind_for_unique(base);
+ self.adjust_upvar_borrow_kind_for_unique(&base);
}
Categorization::Deref(base, mc::BorrowedPtr(..)) |
if !self.try_adjust_upvar_deref(cmt, ty::UniqueImmBorrow) {
// for a borrowed pointer to be unique, its
// base must be unique
- self.adjust_upvar_borrow_kind_for_unique(base);
+ self.adjust_upvar_borrow_kind_for_unique(&base);
}
}
}
}
- fn try_adjust_upvar_deref(&mut self, cmt: mc::cmt<'tcx>, borrow_kind: ty::BorrowKind) -> bool {
+ fn try_adjust_upvar_deref(&mut self, cmt: &mc::cmt_<'tcx>, borrow_kind: ty::BorrowKind)
+ -> bool
+ {
assert!(match borrow_kind {
ty::MutBorrow => true,
ty::UniqueImmBorrow => true,
&mut self,
_consume_id: ast::NodeId,
_consume_span: Span,
- cmt: mc::cmt<'tcx>,
+ cmt: &mc::cmt_<'tcx>,
mode: euv::ConsumeMode,
) {
debug!("consume(cmt={:?},mode={:?})", cmt, mode);
self.adjust_upvar_borrow_kind_for_consume(cmt, mode);
}
- fn matched_pat(&mut self, _matched_pat: &hir::Pat, _cmt: mc::cmt<'tcx>, _mode: euv::MatchMode) {
+ fn matched_pat(&mut self, _matched_pat: &hir::Pat, _cmt: &mc::cmt_<'tcx>,
+ _mode: euv::MatchMode) {
}
- fn consume_pat(&mut self, _consume_pat: &hir::Pat, cmt: mc::cmt<'tcx>, mode: euv::ConsumeMode) {
+ fn consume_pat(&mut self, _consume_pat: &hir::Pat, cmt: &mc::cmt_<'tcx>,
+ mode: euv::ConsumeMode) {
debug!("consume_pat(cmt={:?},mode={:?})", cmt, mode);
self.adjust_upvar_borrow_kind_for_consume(cmt, mode);
}
&mut self,
borrow_id: ast::NodeId,
_borrow_span: Span,
- cmt: mc::cmt<'tcx>,
+ cmt: &mc::cmt_<'tcx>,
_loan_region: ty::Region<'tcx>,
bk: ty::BorrowKind,
_loan_cause: euv::LoanCause,
&mut self,
_assignment_id: ast::NodeId,
_assignment_span: Span,
- assignee_cmt: mc::cmt<'tcx>,
+ assignee_cmt: &mc::cmt_<'tcx>,
_mode: euv::MutateMode,
) {
debug!("mutate(assignee_cmt={:?})", assignee_cmt);
///
/// The returned value is `None` if the definition could not be inlined,
/// and `Some` of a vector of items if it was successfully expanded.
-pub fn try_inline(cx: &DocContext, def: Def, name: ast::Name)
+pub fn try_inline(cx: &DocContext, def: Def, name: ast::Name, visited: &mut FxHashSet<DefId>)
-> Option<Vec<clean::Item>> {
if def == Def::Err { return None }
let did = def.def_id();
Def::StructCtor(..) => return Some(Vec::new()),
Def::Mod(did) => {
record_extern_fqn(cx, did, clean::TypeKind::Module);
- clean::ModuleItem(build_module(cx, did))
+ clean::ModuleItem(build_module(cx, did, visited))
}
Def::Static(did, mtbl) => {
record_extern_fqn(cx, did, clean::TypeKind::Static);
});
}
-fn build_module(cx: &DocContext, did: DefId) -> clean::Module {
+fn build_module(cx: &DocContext, did: DefId, visited: &mut FxHashSet<DefId>) -> clean::Module {
let mut items = Vec::new();
- fill_in(cx, did, &mut items);
+ fill_in(cx, did, &mut items, visited);
return clean::Module {
items,
is_crate: false,
};
- fn fill_in(cx: &DocContext, did: DefId, items: &mut Vec<clean::Item>) {
+ fn fill_in(cx: &DocContext, did: DefId, items: &mut Vec<clean::Item>,
+ visited: &mut FxHashSet<DefId>) {
// If we're re-exporting a re-export it may actually re-export something in
// two namespaces, so the target may be listed twice. Make sure we only
// visit each node at most once.
- let mut visited = FxHashSet();
for &item in cx.tcx.item_children(did).iter() {
let def_id = item.def.def_id();
if item.vis == ty::Visibility::Public {
- if !visited.insert(def_id) { continue }
- if let Some(i) = try_inline(cx, item.def, item.ident.name) {
+ if did == def_id || !visited.insert(def_id) { continue }
+ if let Some(i) = try_inline(cx, item.def, item.ident.name, visited) {
items.extend(i)
}
}
impl Clean<Generics> for hir::Generics {
fn clean(&self, cx: &DocContext) -> Generics {
+ let mut params = Vec::with_capacity(self.params.len());
+ for p in &self.params {
+ let p = p.clean(cx);
+ if let GenericParam::Type(ref tp) = p {
+ if tp.synthetic == Some(hir::SyntheticTyParamKind::ImplTrait) {
+ cx.impl_trait_bounds.borrow_mut().insert(tp.did, tp.bounds.clone());
+ }
+ }
+ params.push(p);
+ }
let mut g = Generics {
- params: self.params.clean(cx),
+ params,
where_predicates: self.where_clause.predicates.clean(cx)
};
impl<'a> Clean<Method> for (&'a hir::MethodSig, &'a hir::Generics, hir::BodyId) {
fn clean(&self, cx: &DocContext) -> Method {
- let generics = self.1.clean(cx);
+ let (generics, decl) = enter_impl_trait(cx, || {
+ (self.1.clean(cx), (&*self.0.decl, self.2).clean(cx))
+ });
Method {
- decl: enter_impl_trait(cx, &generics.params, || (&*self.0.decl, self.2).clean(cx)),
+ decl,
generics,
unsafety: self.0.unsafety,
constness: self.0.constness,
impl Clean<Item> for doctree::Function {
fn clean(&self, cx: &DocContext) -> Item {
- let generics = self.generics.clean(cx);
- let decl = enter_impl_trait(cx, &generics.params, || (&self.decl, self.body).clean(cx));
+ let (generics, decl) = enter_impl_trait(cx, || {
+ (self.generics.clean(cx), (&self.decl, self.body).clean(cx))
+ });
Item {
name: Some(self.name.clean(cx)),
attrs: self.attrs.clean(cx),
MethodItem((sig, &self.generics, body).clean(cx))
}
hir::TraitItemKind::Method(ref sig, hir::TraitMethod::Required(ref names)) => {
- let generics = self.generics.clean(cx);
+ let (generics, decl) = enter_impl_trait(cx, || {
+ (self.generics.clean(cx), (&*sig.decl, &names[..]).clean(cx))
+ });
TyMethodItem(TyMethod {
unsafety: sig.unsafety.clone(),
- decl: enter_impl_trait(cx, &generics.params, || {
- (&*sig.decl, &names[..]).clean(cx)
- }),
+ decl,
generics,
abi: sig.abi
})
impl Clean<BareFunctionDecl> for hir::BareFnTy {
fn clean(&self, cx: &DocContext) -> BareFunctionDecl {
- let generic_params = self.generic_params.clean(cx);
+ let (generic_params, decl) = enter_impl_trait(cx, || {
+ (self.generic_params.clean(cx), (&*self.decl, &self.arg_names[..]).clean(cx))
+ });
BareFunctionDecl {
unsafety: self.unsafety,
- decl: enter_impl_trait(cx, &generic_params, || {
- (&*self.decl, &self.arg_names[..]).clean(cx)
- }),
+ decl,
generic_params,
abi: self.abi,
}
} else {
let name = self.name;
if !denied {
- if let Some(items) = inline::try_inline(cx, path.def, name) {
+ let mut visited = FxHashSet();
+ if let Some(items) = inline::try_inline(cx, path.def, name, &mut visited) {
return items;
}
}
fn clean(&self, cx: &DocContext) -> Item {
let inner = match self.node {
hir::ForeignItemFn(ref decl, ref names, ref generics) => {
- let generics = generics.clean(cx);
+ let (generics, decl) = enter_impl_trait(cx, || {
+ (generics.clean(cx), (&**decl, &names[..]).clean(cx))
+ });
ForeignFunctionItem(Function {
- decl: enter_impl_trait(cx, &generics.params, || {
- (&**decl, &names[..]).clean(cx)
- }),
+ decl,
generics,
unsafety: hir::Unsafety::Unsafe,
abi: Abi::Rust,
once(crate_name).chain(relative).collect()
}
-pub fn enter_impl_trait<F, R>(cx: &DocContext, gps: &[GenericParam], f: F) -> R
+pub fn enter_impl_trait<F, R>(cx: &DocContext, f: F) -> R
where
F: FnOnce() -> R,
{
- let bounds = gps.iter()
- .filter_map(|p| {
- if let GenericParam::Type(ref tp) = *p {
- if tp.synthetic == Some(hir::SyntheticTyParamKind::ImplTrait) {
- return Some((tp.did, tp.bounds.clone()));
- }
- }
-
- None
- })
- .collect::<FxHashMap<DefId, Vec<TyParamBound>>>();
-
- let old_bounds = mem::replace(&mut *cx.impl_trait_bounds.borrow_mut(), bounds);
+ let old_bounds = mem::replace(&mut *cx.impl_trait_bounds.borrow_mut(), Default::default());
let r = f();
assert!(cx.impl_trait_bounds.borrow().is_empty());
*cx.impl_trait_bounds.borrow_mut() = old_bounds;
// with a rustc without jemalloc.
// FIXME(#44236) shouldn't need MSVC logic
#![cfg_attr(all(not(target_env = "msvc"),
- any(stage0, feature = "force_alloc_system")),
+ any(all(stage0, not(test)), feature = "force_alloc_system")),
feature(global_allocator))]
#[cfg(all(not(target_env = "msvc"),
- any(stage0, feature = "force_alloc_system")))]
+ any(all(stage0, not(test)), feature = "force_alloc_system")))]
#[global_allocator]
static ALLOC: alloc_system::System = alloc_system::System;
///
/// # `!` and generics
///
+/// ## Infallible errors
+///
/// The main place you'll see `!` used explicitly is in generic code. Consider the [`FromStr`]
/// trait:
///
/// [`Ok`] variant. This illustrates another behaviour of `!` - it can be used to "delete" certain
/// enum variants from generic types like `Result`.
///
+/// ## Infinite loops
+///
+/// While [`Result<T, !>`] is very useful for removing errors, `!` can also be used to remove
+/// successes as well. If we think of [`Result<T, !>`] as "if this function returns, it has not
+/// errored," we get a very intuitive idea of [`Result<!, E>`] as well: if the function returns, it
+/// *has* errored.
+///
+/// For example, consider the case of a simple web server, which can be simplified to:
+///
+/// ```ignore (hypothetical-example)
+/// loop {
+/// let (client, request) = get_request().expect("disconnected");
+/// let response = request.process();
+/// response.send(client);
+/// }
+/// ```
+///
+/// Currently, this isn't ideal, because we simply panic whenever we fail to get a new connection.
+/// Instead, we'd like to keep track of this error, like this:
+///
+/// ```ignore (hypothetical-example)
+/// loop {
+/// match get_request() {
+/// Err(err) => break err,
+/// Ok((client, request)) => {
+/// let response = request.process();
+/// response.send(client);
+/// },
+/// }
+/// }
+/// ```
+///
+/// Now, when the server disconnects, we exit the loop with an error instead of panicking. While it
+/// might be intuitive to simply return the error, we might want to wrap it in a [`Result<!, E>`]
+/// instead:
+///
+/// ```ignore (hypothetical-example)
+/// fn server_loop() -> Result<!, ConnectionError> {
+/// loop {
+/// let (client, request) = get_request()?;
+/// let response = request.process();
+/// response.send(client);
+/// }
+/// }
+/// ```
+///
+/// Now, we can use `?` instead of `match`, and the return type makes a lot more sense: if the loop
+/// ever stops, it means that an error occurred. We don't even have to wrap the loop in an `Ok`
+/// because `!` coerces to `Result<!, ConnectionError>` automatically.
+///
/// [`String::from_str`]: str/trait.FromStr.html#tymethod.from_str
/// [`Result<String, !>`]: result/enum.Result.html
/// [`Result<T, !>`]: result/enum.Result.html
+/// [`Result<!, E>`]: result/enum.Result.html
/// [`Ok`]: result/enum.Result.html#variant.Ok
/// [`String`]: string/struct.String.html
/// [`Err`]: result/enum.Result.html#variant.Err
impl Edition {
pub fn lint_name(&self) -> &'static str {
match *self {
- Edition::Edition2015 => "edition_2015",
- Edition::Edition2018 => "edition_2018",
+ Edition::Edition2015 => "rust_2015_breakage",
+ Edition::Edition2018 => "rust_2018_breakage",
}
}
(active, abi_unadjusted, "1.16.0", None, None),
// Procedural macros 2.0.
- (active, proc_macro, "1.16.0", Some(38356), None),
+ (active, proc_macro, "1.16.0", Some(38356), Some(Edition::Edition2018)),
// Declarative macros 2.0 (`macro`).
(active, decl_macro, "1.17.0", Some(39412), None),
// Allows the `catch {...}` expression
- (active, catch_expr, "1.17.0", Some(31436), None),
+ (active, catch_expr, "1.17.0", Some(31436), Some(Edition::Edition2018)),
// Used to preserve symbols (see llvm.used)
(active, used, "1.18.0", Some(40289), None),
let mut feature_checker = FeatureChecker::default();
+ for &(.., f_edition, set) in ACTIVE_FEATURES.iter() {
+ if let Some(f_edition) = f_edition {
+ if f_edition <= crate_edition {
+ set(&mut features, DUMMY_SP);
+ }
+ }
+ }
+
for attr in krate_attrs {
if !attr.check_name("feature") {
continue
token::Integer(s) => (false, integer_lit(&s.as_str(), suf, diag)),
token::Float(s) => (false, float_lit(&s.as_str(), suf, diag)),
- token::Str_(s) => {
- let s = Symbol::intern(&str_lit(&s.as_str(), diag));
- (true, Some(LitKind::Str(s, ast::StrStyle::Cooked)))
+ token::Str_(mut sym) => {
+ // If there are no characters requiring special treatment we can
+ // reuse the symbol from the Token. Otherwise, we must generate a
+ // new symbol because the string in the LitKind is different to the
+ // string in the Token.
+ let s = &sym.as_str();
+ if s.as_bytes().iter().any(|&c| c == b'\\' || c == b'\r') {
+ sym = Symbol::intern(&str_lit(s, diag));
+ }
+ (true, Some(LitKind::Str(sym, ast::StrStyle::Cooked)))
}
- token::StrRaw(s, n) => {
- let s = Symbol::intern(&raw_str_lit(&s.as_str()));
- (true, Some(LitKind::Str(s, ast::StrStyle::Raw(n))))
+ token::StrRaw(mut sym, n) => {
+ // Ditto.
+ let s = &sym.as_str();
+ if s.contains('\r') {
+ sym = Symbol::intern(&raw_str_lit(s));
+ }
+ (true, Some(LitKind::Str(sym, ast::StrStyle::Raw(n))))
}
token::ByteStr(i) => {
(true, Some(LitKind::ByteStr(byte_str_lit(&i.as_str()))))
style: ast::StrStyle) -> io::Result<()> {
let st = match style {
ast::StrStyle::Cooked => {
- (format!("\"{}\"", st.escape_default()))
+ (format!("\"{}\"", st.escape_debug()))
}
ast::StrStyle::Raw(n) => {
(format!("r{delim}\"{string}\"{delim}",
tts: if let Some(ts) = custom_msg_args {
ts.into()
} else {
- // `expr_to_string` escapes the string literals with `.escape_default()`
- // which escapes all non-ASCII characters with `\u`.
- let escaped_expr = escape_format_string(&unescape_printable_unicode(
- &pprust::expr_to_string(&cond_expr),
- ));
-
TokenStream::from(TokenTree::Token(
DUMMY_SP,
token::Literal(
- token::Lit::Str_(Name::intern(&format!("assertion failed: {}", escaped_expr))),
+ token::Lit::Str_(Name::intern(&format!(
+ "assertion failed: {}",
+ pprust::expr_to_string(&cond_expr).escape_debug()
+ ))),
None,
),
)).into()
);
MacEager::expr(if_expr)
}
-
-/// Escapes a string for use as a formatting string.
-fn escape_format_string(s: &str) -> String {
- let mut res = String::with_capacity(s.len());
- for c in s.chars() {
- res.extend(c.escape_debug());
- match c {
- '{' | '}' => res.push(c),
- _ => {}
- }
- }
- res
-}
-
-#[test]
-fn test_escape_format_string() {
- assert!(escape_format_string(r"foo{}\") == r"foo{{}}\\");
-}
-
-/// Unescapes the escaped unicodes (`\u{...}`) that are printable.
-fn unescape_printable_unicode(mut s: &str) -> String {
- use std::{char, u32};
-
- let mut res = String::with_capacity(s.len());
-
- loop {
- if let Some(start) = s.find(r"\u{") {
- res.push_str(&s[0..start]);
- s = &s[start..];
- s.find('}')
- .and_then(|end| {
- let v = u32::from_str_radix(&s[3..end], 16).ok()?;
- let c = char::from_u32(v)?;
- // Escape unprintable characters.
- res.extend(c.escape_debug());
- s = &s[end + 1..];
- Some(())
- })
- .expect("lexer should have rejected invalid escape sequences");
- } else {
- res.push_str(s);
- return res;
- }
- }
-}
-
-#[test]
-fn test_unescape_printable_unicode() {
- assert!(unescape_printable_unicode(r"\u{2603}\n\u{0}") == r"☃\n\u{0}");
-}
-Subproject commit 1ea18a5cb431e24aa838b652ac305acc5e394d6b
+Subproject commit 2f86c75a2479cf051b92fc98273daaf7f151e7a1
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-#![deny(incoherent_fundamental_impls)]
-
pub trait Trait1<X> {
type Output;
}
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// compile-pass
+
+fn main() {
+ assert!({false});
+
+ assert!(r"\u{41}" == "A");
+
+ assert!(r"\u{".is_empty());
+}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+// compile-flags: --edition=2018 -Zunstable-options
+
#![feature(extern_absolute_paths)]
use xcrate::S; //~ ERROR can't find crate for `xcrate`
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+// compile-flags: --edition=2018 -Zunstable-options
+
#![feature(extern_absolute_paths)]
fn main() {
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+// compile-flags: --edition=2018 -Zunstable-options
+
#![feature(extern_absolute_paths)]
use ycrate; //~ ERROR can't find crate for `ycrate`
// except according to those terms.
// aux-build:xcrate.rs
+// compile-flags: --edition=2018 -Zunstable-options
#![feature(crate_in_paths)]
#![feature(extern_absolute_paths)]
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-#![deny(unused_results, unused_must_use)]
#![allow(dead_code)]
+#![deny(unused_results, unused_must_use)]
+//~^ NOTE: lint level defined here
+//~| NOTE: lint level defined here
#[must_use]
enum MustUse { Test }
fn test() {
foo::<isize>();
foo::<MustUse>(); //~ ERROR: unused `MustUse` which must be used
- foo::<MustUseMsg>(); //~ ERROR: unused `MustUseMsg` which must be used: some message
+ foo::<MustUseMsg>(); //~ ERROR: unused `MustUseMsg` which must be used
+ //~^ NOTE: some message
}
#[allow(unused_results, unused_must_use)]
fn main() {
foo::<isize>(); //~ ERROR: unused result
foo::<MustUse>(); //~ ERROR: unused `MustUse` which must be used
- foo::<MustUseMsg>(); //~ ERROR: unused `MustUseMsg` which must be used: some message
+ foo::<MustUseMsg>(); //~ ERROR: unused `MustUseMsg` which must be used
+ //~^ NOTE: some message
let _ = foo::<isize>();
let _ = foo::<MustUse>();
all:
$(RUSTC) --test f.rs
- $(call RUN,f) -Z unstable-options --test-threads=1 --format=json > $(OUTPUT_FILE) || true
+ RUST_BACKTRACE=0 $(call RUN,f) -Z unstable-options --test-threads=1 --format=json > $(OUTPUT_FILE) || true
cat $(OUTPUT_FILE) | "$(PYTHON)" validate_json.py
-include ../tools.mk
all: extern_absolute_paths.rs extern_in_paths.rs krate2
- $(RUSTC) extern_absolute_paths.rs -Zsave-analysis
+ $(RUSTC) extern_absolute_paths.rs -Zsave-analysis --edition=2018
cat $(TMPDIR)/save-analysis/extern_absolute_paths.json | "$(PYTHON)" validate_json.py
- $(RUSTC) extern_in_paths.rs -Zsave-analysis
+ $(RUSTC) extern_in_paths.rs -Zsave-analysis --edition=2018
cat $(TMPDIR)/save-analysis/extern_in_paths.json | "$(PYTHON)" validate_json.py
krate2: krate2.rs
--- /dev/null
+
+# min-llvm-version 4.0
+# ignore-mingw
+
+-include ../../run-make-fulldeps/tools.mk
+
+# This test makes sure that the expected .llvmbc sections for use by
+# linker-based LTO are available in object files when compiling with
+# -Z cross-lang-lto
+
+LLVMBC_SECTION_NAME=\\.llvmbc
+
+ifeq ($(UNAME),Darwin)
+ LLVMBC_SECTION_NAME=__bitcode
+endif
+
+
+OBJDUMP=llvm-objdump
+SECTION_HEADERS=$(OBJDUMP) -section-headers
+
+BUILD_LIB=$(RUSTC) lib.rs -Copt-level=2 -Z cross-lang-lto -Ccodegen-units=1
+
+BUILD_EXE=$(RUSTC) main.rs -Copt-level=2 -Z cross-lang-lto -Ccodegen-units=1 --emit=obj
+
+all: staticlib staticlib-fat-lto staticlib-thin-lto rlib exe cdylib rdylib
+
+staticlib: lib.rs
+ $(BUILD_LIB) --crate-type=staticlib -o $(TMPDIR)/liblib.a
+ [ "$$($(SECTION_HEADERS) $(TMPDIR)/liblib.a | grep -c $(LLVMBC_SECTION_NAME))" -ne "0" ]
+
+staticlib-fat-lto: lib.rs
+ $(BUILD_LIB) --crate-type=staticlib -o $(TMPDIR)/liblib-fat-lto.a -Clto=fat
+ [ "$$($(SECTION_HEADERS) $(TMPDIR)/liblib-fat-lto.a | grep -c $(LLVMBC_SECTION_NAME))" -ne "0" ]
+
+staticlib-thin-lto: lib.rs
+ $(BUILD_LIB) --crate-type=staticlib -o $(TMPDIR)/liblib-thin-lto.a -Clto=thin
+ [ "$$($(SECTION_HEADERS) $(TMPDIR)/liblib-thin-lto.a | grep -c $(LLVMBC_SECTION_NAME))" -ne "0" ]
+
+rlib: lib.rs
+ $(BUILD_LIB) --crate-type=rlib -o $(TMPDIR)/liblib.rlib
+ [ "$$($(SECTION_HEADERS) $(TMPDIR)/liblib.rlib | grep -c $(LLVMBC_SECTION_NAME))" -ne "0" ]
+
+cdylib: lib.rs
+ $(BUILD_LIB) --crate-type=cdylib --emit=obj -o $(TMPDIR)/cdylib.o
+ [ "$$($(SECTION_HEADERS) $(TMPDIR)/cdylib.o | grep -c $(LLVMBC_SECTION_NAME))" -ne "0" ]
+
+rdylib: lib.rs
+ $(BUILD_LIB) --crate-type=dylib --emit=obj -o $(TMPDIR)/rdylib.o
+ [ "$$($(SECTION_HEADERS) $(TMPDIR)/rdylib.o | grep -c $(LLVMBC_SECTION_NAME))" -ne "0" ]
+
+exe: lib.rs
+ $(BUILD_EXE) -o $(TMPDIR)/exe.o
+ [ "$$($(SECTION_HEADERS) $(TMPDIR)/exe.o | grep -c $(LLVMBC_SECTION_NAME))" -ne "0" ]
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#[no_mangle]
+pub extern "C" fn foo() {
+ println!("abc");
+}
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn main() {
+ println!("Hello World");
+}
+++ /dev/null
-// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// Check that the code for issue #43355 can run without an ICE, please remove
-// this test when it becomes an hard error.
-
-pub trait Trait1<X> {
- type Output;
-}
-pub trait Trait2<X> {}
-
-impl<X, T> Trait1<X> for T where T: Trait2<X> {
- type Output = ();
-}
-impl<X> Trait1<Box<X>> for A {
- type Output = i32;
-}
-
-pub struct A;
-
-fn f<X, T: Trait1<Box<X>>>() {
- println!("k: {}", ::std::mem::size_of::<<T as Trait1<Box<X>>>::Output>());
-}
-
-pub fn g<X, T: Trait2<Box<X>>>() {
- f::<X, T>();
-}
-
-fn main() {}
// except according to those terms.
// aux-build:xcrate.rs
+// compile-flags: --edition=2018 -Zunstable-options
#![feature(extern_absolute_paths)]
//
// Regression test for #47075.
-// compile-flags: --test
+// compile-flags: --test --edition=2018 -Zunstable-options
#![feature(extern_absolute_paths)]
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// compile-flags: -Cmetadata=aux
+
+pub mod tree {
+ pub use tree;
+}
+
+pub mod tree2 {
+ pub mod prelude {
+ pub use tree2;
+ }
+}
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// aux-build:mod-stackoverflow.rs
+// ignore-cross-compile
+
+extern crate mod_stackoverflow;
+pub use mod_stackoverflow::tree;
+pub use mod_stackoverflow::tree2;
#![feature(universal_impl_trait)]
#![crate_name = "foo"]
+use std::io::Read;
+
// @has foo/fn.foo.html
// @has - //pre 'foo('
// @matches - '_x: impl <a class="trait" href="[^"]+/trait\.Clone\.html"'
// @matches - '_baz:.+struct\.S\.html.+impl .+trait\.Clone\.html'
pub fn baz(_baz: S<impl Clone>) {
}
+
+ // @has - 'qux</a>('
+ // @matches - 'trait\.Read\.html'
+ pub fn qux(_qux: impl IntoIterator<Item = S<impl Read>>) {
+ }
}
// @has - 'method</a>('
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![deny(unnecessary_extern_crate)]
+#![feature(alloc, test, libc)]
+
+extern crate alloc;
+//~^ ERROR `extern crate` is unnecessary in the new edition
+//~| HELP remove
+extern crate alloc as x;
+//~^ ERROR `extern crate` is unnecessary in the new edition
+//~| HELP use `use`
+
+#[macro_use]
+extern crate test;
+pub extern crate test as y;
+//~^ ERROR `extern crate` is unnecessary in the new edition
+//~| HELP use `pub use`
+pub extern crate libc;
+//~^ ERROR `extern crate` is unnecessary in the new edition
+//~| HELP use `pub use`
+
+
+mod foo {
+ extern crate alloc;
+ //~^ ERROR `extern crate` is unnecessary in the new edition
+ //~| HELP use `use`
+ extern crate alloc as x;
+ //~^ ERROR `extern crate` is unnecessary in the new edition
+ //~| HELP use `use`
+ pub extern crate test;
+ //~^ ERROR `extern crate` is unnecessary in the new edition
+ //~| HELP use `pub use`
+ pub extern crate test as y;
+ //~^ ERROR `extern crate` is unnecessary in the new edition
+ //~| HELP use `pub use`
+ mod bar {
+ extern crate alloc;
+ //~^ ERROR `extern crate` is unnecessary in the new edition
+ //~| HELP use `use`
+ extern crate alloc as x;
+ //~^ ERROR `extern crate` is unnecessary in the new edition
+ //~| HELP use `use`
+ }
+}
+
+
+fn main() {}
--- /dev/null
+error: `extern crate` is unnecessary in the new edition
+ --> $DIR/unnecessary-extern-crate.rs:14:1
+ |
+LL | extern crate alloc;
+ | ^^^^^^^^^^^^^^^^^^^ help: remove it
+ |
+note: lint level defined here
+ --> $DIR/unnecessary-extern-crate.rs:11:9
+ |
+LL | #![deny(unnecessary_extern_crate)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: `extern crate` is unnecessary in the new edition
+ --> $DIR/unnecessary-extern-crate.rs:17:1
+ |
+LL | extern crate alloc as x;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: use `use`: `use alloc as x`
+
+error: `extern crate` is unnecessary in the new edition
+ --> $DIR/unnecessary-extern-crate.rs:23:1
+ |
+LL | pub extern crate test as y;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `pub use`: `pub use test as y`
+
+error: `extern crate` is unnecessary in the new edition
+ --> $DIR/unnecessary-extern-crate.rs:26:1
+ |
+LL | pub extern crate libc;
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: use `pub use`: `pub use libc`
+
+error: `extern crate` is unnecessary in the new edition
+ --> $DIR/unnecessary-extern-crate.rs:32:5
+ |
+LL | extern crate alloc;
+ | ^^^^^^^^^^^^^^^^^^^ help: use `use`: `use alloc`
+
+error: `extern crate` is unnecessary in the new edition
+ --> $DIR/unnecessary-extern-crate.rs:35:5
+ |
+LL | extern crate alloc as x;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: use `use`: `use alloc as x`
+
+error: `extern crate` is unnecessary in the new edition
+ --> $DIR/unnecessary-extern-crate.rs:38:5
+ |
+LL | pub extern crate test;
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: use `pub use`: `pub use test`
+
+error: `extern crate` is unnecessary in the new edition
+ --> $DIR/unnecessary-extern-crate.rs:41:5
+ |
+LL | pub extern crate test as y;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `pub use`: `pub use test as y`
+
+error: `extern crate` is unnecessary in the new edition
+ --> $DIR/unnecessary-extern-crate.rs:45:9
+ |
+LL | extern crate alloc;
+ | ^^^^^^^^^^^^^^^^^^^ help: use `use`: `use alloc`
+
+error: `extern crate` is unnecessary in the new edition
+ --> $DIR/unnecessary-extern-crate.rs:48:9
+ |
+LL | extern crate alloc as x;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: use `use`: `use alloc as x`
+
+error: aborting due to 10 previous errors
+
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// compile-pass
+
+#![feature(extern_types)]
+
+extern {
+ type Opaque;
+}
+
+const FOO: *const u8 = &42 as *const _ as *const Opaque as *const u8;
+
+fn main() {
+ let _foo = FOO;
+}
-warning: unused return value of `need_to_use_this_value` which must be used: it's important
+warning: unused return value of `need_to_use_this_value` which must be used
--> $DIR/fn_must_use.rs:60:5
|
LL | need_to_use_this_value(); //~ WARN unused return value
|
LL | #![warn(unused_must_use)]
| ^^^^^^^^^^^^^^^
+ = note: it's important
warning: unused return value of `MyStruct::need_to_use_this_method_value` which must be used
--> $DIR/fn_must_use.rs:65:5
LL | m.need_to_use_this_method_value(); //~ WARN unused return value
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-warning: unused return value of `EvenNature::is_even` which must be used: no side effects
+warning: unused return value of `EvenNature::is_even` which must be used
--> $DIR/fn_must_use.rs:66:5
|
LL | m.is_even(); // trait method!
| ^^^^^^^^^^^^
+ |
+ = note: no side effects
warning: unused return value of `std::cmp::PartialEq::eq` which must be used
--> $DIR/fn_must_use.rs:72:5
--> $DIR/no-debug.rs:20:27
|
LL | println!("{:?} {:?}", Foo, Bar);
- | ^^^ `Foo` cannot be formatted using `:?`; add `#[derive(Debug)]` or manually implement `std::fmt::Debug`
+ | ^^^ `Foo` cannot be formatted using `{:?}`
|
= help: the trait `std::fmt::Debug` is not implemented for `Foo`
+ = note: add `#[derive(Debug)]` or manually implement `std::fmt::Debug`
= note: required by `std::fmt::Debug::fmt`
error[E0277]: `no_debug::Bar` doesn't implement `std::fmt::Debug`
--> $DIR/no-debug.rs:20:32
|
LL | println!("{:?} {:?}", Foo, Bar);
- | ^^^ `no_debug::Bar` cannot be formatted using `:?` because it doesn't implement `std::fmt::Debug`
+ | ^^^ `no_debug::Bar` cannot be formatted using `{:?}` because it doesn't implement `std::fmt::Debug`
|
= help: the trait `std::fmt::Debug` is not implemented for `no_debug::Bar`
= note: required by `std::fmt::Debug::fmt`
--> $DIR/no-debug.rs:21:23
|
LL | println!("{} {}", Foo, Bar);
- | ^^^ `Foo` cannot be formatted with the default formatter; try using `:?` instead if you are using a format string
+ | ^^^ `Foo` cannot be formatted with the default formatter
|
= help: the trait `std::fmt::Display` is not implemented for `Foo`
+ = note: in format strings you may be able to use `{:?}` (or {:#?} for pretty-print) instead
= note: required by `std::fmt::Display::fmt`
error[E0277]: `no_debug::Bar` doesn't implement `std::fmt::Display`
--> $DIR/no-debug.rs:21:28
|
LL | println!("{} {}", Foo, Bar);
- | ^^^ `no_debug::Bar` cannot be formatted with the default formatter; try using `:?` instead if you are using a format string
+ | ^^^ `no_debug::Bar` cannot be formatted with the default formatter
|
= help: the trait `std::fmt::Display` is not implemented for `no_debug::Bar`
+ = note: in format strings you may be able to use `{:?}` (or {:#?} for pretty-print) instead
= note: required by `std::fmt::Display::fmt`
error: aborting due to 4 previous errors
"arm-unknown-linux-musleabi",
"arm-unknown-linux-musleabihf",
"armv5te-unknown-linux-gnueabi",
+ "armv5te-unknown-linux-musleabi",
"armv7-apple-ios",
"armv7-linux-androideabi",
"armv7-unknown-cloudabi-eabihf",
-Subproject commit af3f1cd29bc872b932a13083e531255aab233a7e
+Subproject commit 9e53ac6e6525da914cb05a85e5e8eff7b5dca81f
-Subproject commit 1742229ebb7843a65c05ee495d8de5366fcc5567
+Subproject commit e456241f18227c7eb8d78a45daa66c756a9b65e7
libc = "0.2"
[target.'cfg(windows)'.dependencies]
+lazy_static = "1.0"
miow = "0.3"
winapi = { version = "0.3", features = ["winerror"] }
if testfile.is_dir() {
return;
}
+
+ let comment = if testfile.to_string_lossy().ends_with(".rs") {
+ "//"
+ } else {
+ "#"
+ };
+
+ let comment_with_brace = comment.to_string() + "[";
+
let rdr = BufReader::new(File::open(testfile).unwrap());
for ln in rdr.lines() {
// Assume that any directives will be found before the first
let ln = ln.trim();
if ln.starts_with("fn") || ln.starts_with("mod") {
return;
- } else if ln.starts_with("//[") {
+ } else if ln.starts_with(&comment_with_brace) {
// A comment like `//[foo]` is specific to revision `foo`
if let Some(close_brace) = ln.find(']') {
- let lncfg = &ln[3..close_brace];
+ let open_brace = ln.find('[').unwrap();
+ let lncfg = &ln[open_brace + 1 .. close_brace];
let matches = match cfg {
Some(s) => s == &lncfg[..],
None => false,
it(ln[(close_brace + 1) ..].trim_left());
}
} else {
- panic!("malformed condition directive: expected `//[foo]`, found `{}`",
- ln)
+ panic!("malformed condition directive: expected `{}foo]`, found `{}`",
+ comment_with_brace, ln)
}
- } else if ln.starts_with("//") {
- it(ln[2..].trim_left());
+ } else if ln.starts_with(comment) {
+ it(ln[comment.len() ..].trim_left());
}
}
return;
extern crate log;
extern crate regex;
#[macro_use]
+#[cfg(windows)]
+extern crate lazy_static;
+#[macro_use]
extern crate serde_derive;
extern crate serde_json;
extern crate test;
}
pub fn make_test(config: &Config, testpaths: &TestPaths) -> test::TestDescAndFn {
- let early_props = EarlyProps::from_file(config, &testpaths.file);
+
+ let early_props = if config.mode == Mode::RunMake {
+ EarlyProps::from_file(config, &testpaths.file.join("Makefile"))
+ } else {
+ EarlyProps::from_file(config, &testpaths.file)
+ };
// The `should-fail` annotation doesn't apply to pretty tests,
// since we run the pretty printer across all tests by default.
use extract_gdb_version;
+#[cfg(windows)]
+fn disable_error_reporting<F: FnOnce() -> R, R>(f: F) -> R {
+ use std::sync::Mutex;
+ const SEM_NOGPFAULTERRORBOX: u32 = 0x0002;
+ extern "system" {
+ fn SetErrorMode(mode: u32) -> u32;
+ }
+
+ lazy_static! {
+ static ref LOCK: Mutex<()> = {
+ Mutex::new(())
+ };
+ }
+ // Error mode is a global variable, so lock it so only one thread will change it
+ let _lock = LOCK.lock().unwrap();
+
+ // Tell Windows to not show any UI on errors (such as terminating abnormally).
+ // This is important for running tests, since some of them use abnormal
+ // termination by design. This mode is inherited by all child processes.
+ unsafe {
+ let old_mode = SetErrorMode(SEM_NOGPFAULTERRORBOX); // read inherited flags
+ SetErrorMode(old_mode | SEM_NOGPFAULTERRORBOX);
+ let r = f();
+ SetErrorMode(old_mode);
+ r
+ }
+}
+
+#[cfg(not(windows))]
+fn disable_error_reporting<F: FnOnce() -> R, R>(f: F) -> R {
+ f()
+}
+
/// The name of the environment variable that holds dynamic library locations.
pub fn dylib_env_var() -> &'static str {
if cfg!(windows) {
let newpath = env::join_paths(&path).unwrap();
command.env(dylib_env_var(), newpath);
- let mut child = command
- .spawn()
+ let mut child = disable_error_reporting(|| command.spawn())
.expect(&format!("failed to exec `{:?}`", &command));
if let Some(input) = input {
child
-Subproject commit f48fed70d4447445b586a35c4ae88683542ffc72
+Subproject commit e0e1bd7ff778e5913b566c9e03224faecc0eb486
.format(tool, os, old, new)
elif new < old:
changed = True
- message += '💔 {} on {}: {} → {} (cc {}).\n' \
+ message += '💔 {} on {}: {} → {} (cc {}, @rust-lang/infra).\n' \
.format(tool, os, old, new, MAINTAINERS.get(tool))
if changed:
-Subproject commit d2f44357fef6d61f316abc403e0a5d917f2771c6
+Subproject commit d2ade31a52a417257742de72c5936a8a342a34b5
-Subproject commit b6cd17f28ae314f2484ff05d3ce57652d51c5e85
+Subproject commit 0f8029f251b569a010cb5cfc5a8bff8bf3c949ac
Crate("flate2"),
Crate("fuchsia-zircon"),
Crate("fuchsia-zircon-sys"),
+ Crate("getopts"),
Crate("humantime"),
Crate("jobserver"),
Crate("kernel32-sys"),
pub mod deps;
pub mod ui_tests;
pub mod unstable_book;
+pub mod libcoretest;
fn filter_dirs(path: &Path) -> bool {
let skip = [
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Tidy check to ensure `#[test]` is not used directly inside `libcore`.
+//!
+//! `#![no_core]` libraries cannot be tested directly due to duplicating lang
+//! item. All tests must be written externally in `libcore/tests`.
+
+use std::path::Path;
+use std::fs::read_to_string;
+
+pub fn check(path: &Path, bad: &mut bool) {
+ let libcore_path = path.join("libcore");
+ super::walk(
+ &libcore_path,
+ &mut |subpath| t!(subpath.strip_prefix(&libcore_path)).starts_with("tests"),
+ &mut |subpath| {
+ if t!(read_to_string(subpath)).contains("#[test]") {
+ tidy_error!(
+ bad,
+ "{} contains #[test]; libcore tests must be placed inside `src/libcore/tests/`",
+ subpath.display()
+ );
+ }
+ },
+ );
+}
features::check(&path, &mut bad, quiet);
pal::check(&path, &mut bad);
unstable_book::check(&path, &mut bad);
+ libcoretest::check(&path, &mut bad);
if !args.iter().any(|s| *s == "--no-vendor") {
deps::check(&path, &mut bad);
}