From: Mark Simulacrum Date: Fri, 19 May 2017 20:16:14 +0000 (-0600) Subject: Rollup merge of #41971 - japaric:pre-link-args, r=alexcrichton X-Git-Url: https://git.lizzy.rs/?a=commitdiff_plain;h=8292136fb0fd77aab00f28df7f6b1ae06c5167e9;hp=94d2c43300148d5c0ea8c93cedb94151953dcf83;p=rust.git Rollup merge of #41971 - japaric:pre-link-args, r=alexcrichton add -Z pre-link-arg{,s} to rustc This PR adds two unstable flags to `rustc`: `-Z pre-link-arg` and `-Z pre-link-args`. These are the counterpart of the existing `-C link-arg{,s}` flags and can be used to pass extra arguments at the *beginning* of the linker invocation, before the Rust object files are passed. I have [started] a discussion on the rust-embedded RFCs repo about settling on a convention for passing extra arguments to the linker and there are two options on discussion: `.cargo/config`'s `target.$T.rustflags` and custom target specification files (`{pre,,post}-link-args` fields). However, to compare these two options on equal footing this `-Z pre-link-arg` feature is required. [started]: https://github.com/rust-embedded/rfcs/pull/24 Therefore I'm requesting landing this `-Z pre-link-arg` flag as an experimental feature to evaluate these two options. cc @brson r? @alexcrichton --- diff --git a/.gitmodules b/.gitmodules index 7cd896b763f..1ef3c086a1c 100644 --- a/.gitmodules +++ b/.gitmodules @@ -13,7 +13,7 @@ path = src/jemalloc url = https://github.com/rust-lang/jemalloc.git [submodule "src/rust-installer"] - path = src/rust-installer + path = src/tools/rust-installer url = https://github.com/rust-lang/rust-installer.git [submodule "src/liblibc"] path = src/liblibc @@ -23,7 +23,7 @@ url = https://github.com/rust-lang-nursery/nomicon.git [submodule "src/tools/cargo"] path = src/tools/cargo - url = https://github.com/rust-lang/cargo + url = https://github.com/rust-lang/cargo.git [submodule "reference"] path = src/doc/reference url = https://github.com/rust-lang-nursery/reference.git @@ -32,4 +32,4 @@ url = https://github.com/rust-lang/book.git [submodule "src/tools/rls"] path = src/tools/rls - url = https://github.com/rust-lang-nursery/rls + url = https://github.com/rust-lang-nursery/rls.git diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 0314a5dfd8d..8f121f8d6ed 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -177,7 +177,7 @@ python x.py test src/test/rustdoc python x.py build src/libcore --stage 0 ``` -You can explore the build system throught the various `--help` pages for each +You can explore the build system through the various `--help` pages for each subcommand. For example to learn more about a command you can run: ``` diff --git a/appveyor.yml b/appveyor.yml index 42c6256a95a..96de1d90f25 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -141,9 +141,9 @@ install: - set PATH="C:\Program Files (x86)\Inno Setup 5";%PATH% # Help debug some handle issues on AppVeyor - - ps: Invoke-WebRequest -Uri https://download.sysinternals.com/files/Handle.zip -OutFile handle.zip + - appveyor-retry appveyor DownloadFile https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-15-Handle.zip - mkdir handle - - ps: Expand-Archive handle.zip -dest handle + - 7z x -ohandle 2017-05-15-Handle.zip - set PATH=%PATH%;%CD%\handle - handle.exe -accepteula -help diff --git a/configure b/configure index db41f0dfb94..af59d5b0bb8 100755 --- a/configure +++ b/configure @@ -519,6 +519,7 @@ valopt_nosave host "${CFG_BUILD}" "GNUs ./configure syntax LLVM host triples" valopt_nosave target "${CFG_HOST}" "GNUs ./configure syntax LLVM target triples" valopt_nosave mandir "${CFG_PREFIX}/share/man" "install man pages in PATH" valopt_nosave docdir "${CFG_PREFIX}/share/doc/rust" "install documentation in PATH" +valopt_nosave bindir "${CFG_PREFIX}/bin" "install binaries" # On Windows this determines root of the subtree for target libraries. # Host runtime libs always go to 'bin'. @@ -710,6 +711,7 @@ envopt LDFLAGS CFG_PREFIX=${CFG_PREFIX%/} CFG_MANDIR=${CFG_MANDIR%/} CFG_DOCDIR=${CFG_DOCDIR%/} +CFG_BINDIR=${CFG_BINDIR%/} CFG_HOST="$(echo $CFG_HOST | tr ',' ' ')" CFG_TARGET="$(echo $CFG_TARGET | tr ',' ' ')" @@ -750,6 +752,7 @@ putvar CFG_X86_64_LINUX_ANDROID_NDK putvar CFG_NACL_CROSS_PATH putvar CFG_MANDIR putvar CFG_DOCDIR +putvar CFG_BINDIR putvar CFG_USING_LIBCPP msg diff --git a/src/Cargo.lock b/src/Cargo.lock index 3dd8231a49f..804fd580773 100644 --- a/src/Cargo.lock +++ b/src/Cargo.lock @@ -6,6 +6,23 @@ dependencies = [ "libc 0.0.0", ] +[[package]] +name = "advapi32-sys" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "aho-corasick" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "aho-corasick" version = "0.6.3" @@ -27,7 +44,7 @@ version = "0.0.0" dependencies = [ "build_helper 0.1.0", "core 0.0.0", - "gcc 0.3.45 (registry+https://github.com/rust-lang/crates.io-index)", + "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.0.0", ] @@ -49,20 +66,38 @@ name = "arena" version = "0.0.0" [[package]] -name = "atty" -version = "0.2.2" +name = "backtrace" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ + "backtrace-sys 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "dbghelp-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc-demangle 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "backtrace-sys" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "bitflags" version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "bitflags" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "bitflags" version = "0.8.2" @@ -73,21 +108,26 @@ name = "bootstrap" version = "0.0.0" dependencies = [ "build_helper 0.1.0", - "cmake 0.1.22 (registry+https://github.com/rust-lang/crates.io-index)", + "cmake 0.1.23 (registry+https://github.com/rust-lang/crates.io-index)", "filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", - "gcc 0.3.45 (registry+https://github.com/rust-lang/crates.io-index)", + "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)", "getopts 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)", "num_cpus 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", - "rustc-serialize 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", "toml 0.1.30 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "bufstream" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "build-manifest" version = "0.1.0" dependencies = [ - "rustc-serialize 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", "toml 0.1.30 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -98,31 +138,118 @@ dependencies = [ "filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "cargo" +version = "0.20.0" +source = "git+https://github.com/rust-lang/cargo#2b32084293d8da63b48de56363a0f2e986ec3367" +replace = "cargo 0.20.0" + +[[package]] +name = "cargo" +version = "0.20.0" +dependencies = [ + "advapi32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "bufstream 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", + "cargotest 0.1.0", + "chrono 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", + "crates-io 0.9.0", + "crossbeam 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)", + "curl 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", + "docopt 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", + "env_logger 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "flate2 0.2.19 (registry+https://github.com/rust-lang/crates.io-index)", + "fs2 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", + "git2 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)", + "git2-curl 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", + "glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)", + "hamcrest 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)", + "libgit2-sys 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)", + "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", + "miow 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "num_cpus 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "openssl 0.9.12 (registry+https://github.com/rust-lang/crates.io-index)", + "psapi-sys 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", + "semver 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_ignored 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_json 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", + "shell-escape 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", + "tar 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", + "tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", + "term 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)", + "toml 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", + "url 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "cargotest" +version = "0.1.0" +dependencies = [ + "bufstream 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", + "cargo 0.20.0", + "filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "flate2 0.2.19 (registry+https://github.com/rust-lang/crates.io-index)", + "git2 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)", + "hamcrest 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)", + "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_json 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", + "tar 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", + "tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", + "term 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)", + "url 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "cargotest2" version = "0.1.0" +[[package]] +name = "cfg-if" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "chrono" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "num 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)", + "time 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "clap" -version = "2.22.1" +version = "2.19.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "ansi_term 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", - "atty 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "bitflags 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)", - "strsim 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)", + "bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)", + "strsim 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)", "term_size 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", - "unicode-segmentation 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "unicode-segmentation 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", "unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", - "vec_map 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", + "vec_map 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)", + "yaml-rust 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "cmake" -version = "0.1.22" +version = "0.1.23" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "gcc 0.3.45 (registry+https://github.com/rust-lang/crates.io-index)", + "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -140,7 +267,7 @@ version = "0.0.0" dependencies = [ "build_helper 0.1.0", "core 0.0.0", - "gcc 0.3.45 (registry+https://github.com/rust-lang/crates.io-index)", + "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -151,23 +278,115 @@ dependencies = [ "env_logger 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", "filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", - "rustc-serialize 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "core" version = "0.0.0" +[[package]] +name = "crates-io" +version = "0.9.0" +dependencies = [ + "curl 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_json 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", + "url 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "crossbeam" +version = "0.2.10" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "curl" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "curl-sys 0.3.11 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)", + "openssl-probe 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "openssl-sys 0.9.12 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "curl-sys" +version = "0.3.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)", + "libz-sys 1.0.13 (registry+https://github.com/rust-lang/crates.io-index)", + "openssl-sys 0.9.12 (registry+https://github.com/rust-lang/crates.io-index)", + "pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "dbghelp-sys" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "derive-new" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "quote 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 0.8.7 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "diff" version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "docopt" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "lazy_static 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", + "regex 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", + "strsim 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "dtoa" version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "either" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "enum_primitive" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "num-traits 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "env_logger" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", + "regex 0.1.80 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "env_logger" version = "0.4.2" @@ -177,6 +396,14 @@ dependencies = [ "regex 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "error-chain" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "backtrace 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "error_index_generator" version = "0.0.0" @@ -186,7 +413,7 @@ name = "filetime" version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -194,17 +421,50 @@ name = "flate" version = "0.0.0" dependencies = [ "build_helper 0.1.0", - "gcc 0.3.45 (registry+https://github.com/rust-lang/crates.io-index)", + "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "flate2" +version = "0.2.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)", + "miniz-sys 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "fmt_macros" version = "0.0.0" +[[package]] +name = "foreign-types" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "fs2" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "gcc" -version = "0.3.45" +version = "0.3.46" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "gdi32-sys" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", +] [[package]] name = "getopts" @@ -215,22 +475,93 @@ name = "getopts" version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "git2" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)", + "libgit2-sys 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)", + "openssl-probe 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "openssl-sys 0.9.12 (registry+https://github.com/rust-lang/crates.io-index)", + "url 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "git2-curl" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "curl 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", + "git2 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)", + "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", + "url 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "glob" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "graphviz" version = "0.0.0" +[[package]] +name = "hamcrest" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "num 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)", + "regex 0.1.80 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "handlebars" -version = "0.25.2" +version = "0.25.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "lazy_static 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", "pest 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", - "quick-error 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "quick-error 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "regex 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", - "rustc-serialize 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_json 0.9.9 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_json 0.9.10 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "idna" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "matches 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", + "unicode-bidi 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", + "unicode-normalization 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "installer" +version = "0.0.0" +dependencies = [ + "clap 2.19.3 (registry+https://github.com/rust-lang/crates.io-index)", + "error-chain 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)", + "flate2 0.2.19 (registry+https://github.com/rust-lang/crates.io-index)", + "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", + "tar 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", + "walkdir 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", + "xz2 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "itertools" +version = "0.5.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "either 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -247,9 +578,22 @@ dependencies = [ "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "languageserver-types" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "enum_primitive 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_json 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", + "url 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "url_serde 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "lazy_static" -version = "0.2.5" +version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -261,8 +605,45 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.21" +version = "0.2.22" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "libgit2-sys" +version = "0.6.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "cmake 0.1.23 (registry+https://github.com/rust-lang/crates.io-index)", + "curl-sys 0.3.11 (registry+https://github.com/rust-lang/crates.io-index)", + "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)", + "libssh2-sys 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", + "libz-sys 1.0.13 (registry+https://github.com/rust-lang/crates.io-index)", + "openssl-sys 0.9.12 (registry+https://github.com/rust-lang/crates.io-index)", + "pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "libssh2-sys" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "cmake 0.1.23 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)", + "libz-sys 1.0.13 (registry+https://github.com/rust-lang/crates.io-index)", + "openssl-sys 0.9.12 (registry+https://github.com/rust-lang/crates.io-index)", + "pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "libz-sys" +version = "1.0.13" source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)", + "pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", +] [[package]] name = "linkchecker" @@ -273,21 +654,44 @@ name = "log" version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "lzma-sys" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "matches" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "mdbook" version = "0.0.21" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "clap 2.22.1 (registry+https://github.com/rust-lang/crates.io-index)", + "clap 2.19.3 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", - "handlebars 0.25.2 (registry+https://github.com/rust-lang/crates.io-index)", + "handlebars 0.25.3 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", "open 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "pulldown-cmark 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", "regex 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 0.9.11 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_json 0.9.9 (registry+https://github.com/rust-lang/crates.io-index)", - "toml 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 0.9.15 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_json 0.9.10 (registry+https://github.com/rust-lang/crates.io-index)", + "toml 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "memchr" +version = "0.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -295,7 +699,105 @@ name = "memchr" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "miniz-sys" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "miow" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "net2 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", + "ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "multimap" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "net2" +version = "0.2.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "cfg-if 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", + "ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "num" +version = "0.1.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "num-bigint 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)", + "num-complex 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)", + "num-integer 0.1.34 (registry+https://github.com/rust-lang/crates.io-index)", + "num-iter 0.1.33 (registry+https://github.com/rust-lang/crates.io-index)", + "num-rational 0.1.36 (registry+https://github.com/rust-lang/crates.io-index)", + "num-traits 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "num-bigint" +version = "0.1.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "num-integer 0.1.34 (registry+https://github.com/rust-lang/crates.io-index)", + "num-traits 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)", + "rand 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "num-complex" +version = "0.1.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "num-traits 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "num-integer" +version = "0.1.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "num-traits 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "num-iter" +version = "0.1.33" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "num-integer 0.1.34 (registry+https://github.com/rust-lang/crates.io-index)", + "num-traits 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "num-rational" +version = "0.1.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "num-bigint 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)", + "num-integer 0.1.34 (registry+https://github.com/rust-lang/crates.io-index)", + "num-traits 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -308,7 +810,7 @@ name = "num_cpus" version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -316,6 +818,43 @@ name = "open" version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "openssl" +version = "0.9.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "bitflags 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)", + "foreign-types 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)", + "openssl-sys 0.9.12 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "openssl-probe" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "openssl-sys" +version = "0.9.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)", + "gdi32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)", + "pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", + "user32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "owning_ref" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "stable_deref_trait 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "panic_abort" version = "0.0.0" @@ -339,6 +878,11 @@ name = "pest" version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "pkg-config" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "proc_macro" version = "0.0.0" @@ -355,6 +899,15 @@ dependencies = [ "syntax_pos 0.0.0", ] +[[package]] +name = "psapi-sys" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "pulldown-cmark" version = "0.0.8" @@ -374,8 +927,32 @@ dependencies = [ [[package]] name = "quick-error" -version = "1.1.0" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "quote" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "quote" +version = "0.3.15" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "racer" +version = "2.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "clap 2.19.3 (registry+https://github.com/rust-lang/crates.io-index)", + "env_logger 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", + "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", + "syntex_errors 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)", + "syntex_syntax 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)", + "toml 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "typed-arena 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", +] [[package]] name = "rand" @@ -384,6 +961,31 @@ dependencies = [ "core 0.0.0", ] +[[package]] +name = "rand" +version = "0.3.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "redox_syscall" +version = "0.1.17" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "regex" +version = "0.1.80" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "aho-corasick 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)", + "memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)", + "regex-syntax 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", + "thread_local 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)", + "utf8-ranges 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "regex" version = "0.2.1" @@ -396,6 +998,11 @@ dependencies = [ "utf8-ranges 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "regex-syntax" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "regex-syntax" version = "0.4.0" @@ -409,13 +1016,48 @@ version = "0.1.0" name = "remote-test-server" version = "0.1.0" +[[package]] +name = "rls" +version = "0.1.0" +dependencies = [ + "cargo 0.20.0 (git+https://github.com/rust-lang/cargo)", + "derive-new 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "env_logger 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "languageserver-types 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)", + "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", + "racer 2.0.6 (registry+https://github.com/rust-lang/crates.io-index)", + "rls-analysis 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "rls-data 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", + "rls-span 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "rls-vfs 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "rustfmt 0.8.4 (git+https://github.com/rust-lang-nursery/rustfmt)", + "serde 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_json 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", + "toml 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", + "url 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "url_serde 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "rls-analysis" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "derive-new 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", + "rls-data 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", + "rls-span 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "rls-data" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "rls-span 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", - "rustc-serialize 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -423,14 +1065,25 @@ name = "rls-span" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "rustc-serialize 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "rls-vfs" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "racer 2.0.6 (registry+https://github.com/rust-lang/crates.io-index)", + "rls-span 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "rustbook" version = "0.1.0" dependencies = [ - "clap 2.22.1 (registry+https://github.com/rust-lang/crates.io-index)", + "clap 2.19.3 (registry+https://github.com/rust-lang/crates.io-index)", "mdbook 0.0.21 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -442,17 +1095,22 @@ dependencies = [ "fmt_macros 0.0.0", "graphviz 0.0.0", "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", + "owning_ref 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "rustc_back 0.0.0", "rustc_bitflags 0.0.0", "rustc_const_math 0.0.0", "rustc_data_structures 0.0.0", "rustc_errors 0.0.0", - "rustc_llvm 0.0.0", "serialize 0.0.0", "syntax 0.0.0", "syntax_pos 0.0.0", ] +[[package]] +name = "rustc-demangle" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "rustc-main" version = "0.0.0" @@ -464,7 +1122,7 @@ dependencies = [ [[package]] name = "rustc-serialize" -version = "0.3.23" +version = "0.3.24" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -473,7 +1131,7 @@ version = "0.0.0" dependencies = [ "alloc_system 0.0.0", "build_helper 0.1.0", - "cmake 0.1.22 (registry+https://github.com/rust-lang/crates.io-index)", + "cmake 0.1.23 (registry+https://github.com/rust-lang/crates.io-index)", "core 0.0.0", ] @@ -552,7 +1210,6 @@ dependencies = [ "rustc_errors 0.0.0", "rustc_incremental 0.0.0", "rustc_lint 0.0.0", - "rustc_llvm 0.0.0", "rustc_metadata 0.0.0", "rustc_mir 0.0.0", "rustc_passes 0.0.0", @@ -606,7 +1263,7 @@ name = "rustc_llvm" version = "0.0.0" dependencies = [ "build_helper 0.1.0", - "gcc 0.3.45 (registry+https://github.com/rust-lang/crates.io-index)", + "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)", "rustc_bitflags 0.0.0", ] @@ -616,7 +1273,7 @@ version = "0.0.0" dependencies = [ "alloc_system 0.0.0", "build_helper 0.1.0", - "cmake 0.1.22 (registry+https://github.com/rust-lang/crates.io-index)", + "cmake 0.1.23 (registry+https://github.com/rust-lang/crates.io-index)", "core 0.0.0", ] @@ -626,13 +1283,13 @@ version = "0.0.0" dependencies = [ "flate 0.0.0", "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", + "owning_ref 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "proc_macro 0.0.0", "rustc 0.0.0", "rustc_back 0.0.0", "rustc_const_math 0.0.0", "rustc_data_structures 0.0.0", "rustc_errors 0.0.0", - "rustc_llvm 0.0.0", "serialize 0.0.0", "syntax 0.0.0", "syntax_ext 0.0.0", @@ -660,7 +1317,7 @@ version = "0.0.0" dependencies = [ "alloc_system 0.0.0", "build_helper 0.1.0", - "cmake 0.1.22 (registry+https://github.com/rust-lang/crates.io-index)", + "cmake 0.1.23 (registry+https://github.com/rust-lang/crates.io-index)", "core 0.0.0", ] @@ -722,7 +1379,7 @@ dependencies = [ "rls-data 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "rls-span 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "rustc 0.0.0", - "rustc-serialize 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", "rustc_typeck 0.0.0", "syntax 0.0.0", "syntax_pos 0.0.0", @@ -734,6 +1391,7 @@ version = "0.0.0" dependencies = [ "flate 0.0.0", "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", + "owning_ref 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "rustc 0.0.0", "rustc_back 0.0.0", "rustc_bitflags 0.0.0", @@ -754,7 +1412,7 @@ version = "0.0.0" dependencies = [ "alloc_system 0.0.0", "build_helper 0.1.0", - "cmake 0.1.22 (registry+https://github.com/rust-lang/crates.io-index)", + "cmake 0.1.23 (registry+https://github.com/rust-lang/crates.io-index)", "core 0.0.0", ] @@ -782,7 +1440,7 @@ dependencies = [ "arena 0.0.0", "build_helper 0.1.0", "env_logger 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", - "gcc 0.3.45 (registry+https://github.com/rust-lang/crates.io-index)", + "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", "pulldown-cmark 0.0.14 (registry+https://github.com/rust-lang/crates.io-index)", "rustc 0.0.0", @@ -800,26 +1458,128 @@ dependencies = [ "syntax_pos 0.0.0", ] +[[package]] +name = "rustfmt" +version = "0.8.4" +source = "git+https://github.com/rust-lang-nursery/rustfmt#bf9b3fa1d7cab2f7bd541539d397a92b4954ec96" +dependencies = [ + "diff 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "env_logger 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "getopts 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)", + "itertools 0.5.10 (registry+https://github.com/rust-lang/crates.io-index)", + "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)", + "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", + "multimap 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "regex 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_json 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", + "strings 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "syntex_errors 0.58.1 (registry+https://github.com/rust-lang/crates.io-index)", + "syntex_syntax 0.58.1 (registry+https://github.com/rust-lang/crates.io-index)", + "term 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)", + "toml 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", + "unicode-segmentation 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "walkdir 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "same-file" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "semver" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "semver-parser" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "serde" +version = "0.9.15" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "serde" -version = "0.9.11" +version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "serde_derive" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive_internals 0.15.0 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 0.11.11 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "serde_derive_internals" +version = "0.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "syn 0.11.11 (registry+https://github.com/rust-lang/crates.io-index)", + "synom 0.11.3 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "serde_ignored" +version = "0.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "serde 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "serde_json" +version = "0.9.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "dtoa 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", + "itoa 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", + "num-traits 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 0.9.15 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "serde_json" -version = "0.9.9" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "dtoa 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", "itoa 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "num-traits 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 0.9.11 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "serialize" version = "0.0.0" +[[package]] +name = "shell-escape" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "stable_deref_trait" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "std" version = "0.0.0" @@ -831,7 +1591,7 @@ dependencies = [ "collections 0.0.0", "compiler_builtins 0.0.0", "core 0.0.0", - "gcc 0.3.45 (registry+https://github.com/rust-lang/crates.io-index)", + "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.0.0", "panic_abort 0.0.0", "panic_unwind 0.0.0", @@ -851,11 +1611,51 @@ dependencies = [ "core 0.0.0", ] +[[package]] +name = "strings" +version = "0.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "strsim" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "strsim" version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "syn" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "quote 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", + "unicode-xid 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "syn" +version = "0.11.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", + "synom 0.11.3 (registry+https://github.com/rust-lang/crates.io-index)", + "unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "synom" +version = "0.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "syntax" version = "0.0.0" @@ -887,17 +1687,113 @@ dependencies = [ "serialize 0.0.0", ] +[[package]] +name = "syntex_errors" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)", + "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", + "syntex_pos 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)", + "term 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)", + "unicode-xid 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "syntex_errors" +version = "0.58.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", + "syntex_pos 0.58.1 (registry+https://github.com/rust-lang/crates.io-index)", + "term 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)", + "unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "syntex_pos" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "syntex_pos" +version = "0.58.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "syntex_syntax" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)", + "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", + "syntex_errors 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)", + "syntex_pos 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)", + "term 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)", + "unicode-xid 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "syntex_syntax" +version = "0.58.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "bitflags 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)", + "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", + "syntex_errors 0.58.1 (registry+https://github.com/rust-lang/crates.io-index)", + "syntex_pos 0.58.1 (registry+https://github.com/rust-lang/crates.io-index)", + "unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "tar" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)", + "xattr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "tempdir" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "rand 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "term" version = "0.0.0" +[[package]] +name = "term" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "term_size" version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -911,11 +1807,28 @@ dependencies = [ [[package]] name = "thread-id" -version = "3.0.0" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "thread-id" +version = "3.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "thread_local" +version = "0.2.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "thread-id 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -923,7 +1836,7 @@ name = "thread_local" version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "thread-id 3.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "thread-id 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "unreachable 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -931,25 +1844,75 @@ dependencies = [ name = "tidy" version = "0.1.0" +[[package]] +name = "time" +version = "0.1.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)", + "redox_syscall 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "toml" version = "0.1.30" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "rustc-serialize 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "toml" -version = "0.3.1" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "toml" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "serde 0.9.15 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "toml" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "serde 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "typed-arena" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "unicode-bidi" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "serde 0.9.11 (registry+https://github.com/rust-lang/crates.io-index)", + "matches 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "unicode-normalization" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "unicode-segmentation" -version = "1.1.0" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "unicode-segmentation" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -957,6 +1920,16 @@ name = "unicode-width" version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "unicode-xid" +version = "0.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "unicode-xid" +version = "0.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "unreachable" version = "0.1.1" @@ -965,6 +1938,38 @@ dependencies = [ "void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "url" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "idna 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "matches 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "url_serde" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "serde 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", + "url 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "user32-sys" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "utf8-ranges" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "utf8-ranges" version = "1.0.0" @@ -972,7 +1977,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "vec_map" -version = "0.7.0" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -980,6 +1985,16 @@ name = "void" version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "walkdir" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "same-file 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "winapi" version = "0.2.8" @@ -990,53 +2005,189 @@ name = "winapi-build" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "ws2_32-sys" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "xattr" +version = "0.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "xz2" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "lzma-sys 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "yaml-rust" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" + [metadata] +"checksum advapi32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e06588080cb19d0acb6739808aafa5f26bfb2ca015b2b6370028b44cf7cb8a9a" +"checksum aho-corasick 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ca972c2ea5f742bfce5687b9aef75506a764f61d37f8f649047846a9686ddb66" "checksum aho-corasick 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)" = "500909c4f87a9e52355b26626d890833e9e1d53ac566db76c36faa984b889699" "checksum ansi_term 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "23ac7c30002a5accbf7e8987d0632fa6de155b7c3d39d0067317a391e00a2ef6" -"checksum atty 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "d912da0db7fa85514874458ca3651fe2cddace8d0b0505571dbdcd41ab490159" +"checksum backtrace 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f551bc2ddd53aea015d453ef0b635af89444afa5ed2405dd0b2062ad5d600d80" +"checksum backtrace-sys 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "d192fd129132fbc97497c1f2ec2c2c5174e376b95f535199ef4fe0a293d33842" "checksum bitflags 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4f67931368edf3a9a51d29886d245f1c3db2f1ef0dcc9e35ff70341b78c10d23" +"checksum bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "aad18937a628ec6abcd26d1489012cc0e18c21798210f491af69ded9b881106d" "checksum bitflags 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1370e9fc2a6ae53aea8b7a5110edbd08836ed87c88736dfabccade1c2b44bff4" -"checksum clap 2.22.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e17a4a72ffea176f77d6e2db609c6c919ef221f23862c9915e687fb54d833485" -"checksum cmake 0.1.22 (registry+https://github.com/rust-lang/crates.io-index)" = "d18d68987ed4c516dcc3e7913659bfa4076f5182eea4a7e0038bb060953e76ac" +"checksum bufstream 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "f2f382711e76b9de6c744cc00d0497baba02fb00a787f088c879f01d09468e32" +"checksum cargo 0.20.0 (git+https://github.com/rust-lang/cargo)" = "" +"checksum cfg-if 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "de1e760d7b6535af4241fca8bd8adf68e2e7edacc6b29f5d399050c5e48cf88c" +"checksum chrono 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d9123be86fd2a8f627836c235ecdf331fdd067ecf7ac05aa1a68fbcf2429f056" +"checksum clap 2.19.3 (registry+https://github.com/rust-lang/crates.io-index)" = "95b78f3fe0fc94c13c731714363260e04b557a637166f33a4570d3189d642374" +"checksum cmake 0.1.23 (registry+https://github.com/rust-lang/crates.io-index)" = "92278eb79412c8f75cfc89e707a1bb3a6490b68f7f2e78d15c774f30fe701122" +"checksum crossbeam 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)" = "0c5ea215664ca264da8a9d9c3be80d2eaf30923c259d03e870388eb927508f97" +"checksum curl 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "c90e1240ef340dd4027ade439e5c7c2064dd9dc652682117bd50d1486a3add7b" +"checksum curl-sys 0.3.11 (registry+https://github.com/rust-lang/crates.io-index)" = "23e7e544dc5e1ba42c4a4a678bd47985e84b9c3f4d3404c29700622a029db9c3" +"checksum dbghelp-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "97590ba53bcb8ac28279161ca943a924d1fd4a8fb3fa63302591647c4fc5b850" +"checksum derive-new 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "41be6ca3b99e0c0483fb2389685448f650459c3ecbe4e18d7705d8010ec4ab8e" "checksum diff 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "0a515461b6c8c08419850ced27bc29e86166dcdcde8fbe76f8b1f0589bb49472" +"checksum docopt 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ab32ea6e284d87987066f21a9e809a73c14720571ef34516f0890b3d355ccfd8" "checksum dtoa 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "80c8b71fd71146990a9742fc06dcbbde19161a267e0ad4e572c35162f4578c90" +"checksum either 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "18785c1ba806c258137c937e44ada9ee7e69a37e3c72077542cd2f069d78562a" +"checksum enum_primitive 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "be4551092f4d519593039259a9ed8daedf0da12e5109c5280338073eaeb81180" +"checksum env_logger 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "15abd780e45b3ea4f76b4e9a26ff4843258dd8a3eed2775a0e7368c2e7936c2f" "checksum env_logger 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e3856f1697098606fc6cb97a93de88ca3f3bc35bb878c725920e6e82ecf05e83" +"checksum error-chain 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d9435d864e017c3c6afeac1654189b06cdb491cf2ff73dbf0d73b0f292f42ff8" "checksum filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "5363ab8e4139b8568a6237db5248646e5a8a2f89bd5ccb02092182b11fd3e922" -"checksum gcc 0.3.45 (registry+https://github.com/rust-lang/crates.io-index)" = "40899336fb50db0c78710f53e87afc54d8c7266fb76262fecc78ca1a7f09deae" +"checksum flate2 0.2.19 (registry+https://github.com/rust-lang/crates.io-index)" = "36df0166e856739905cd3d7e0b210fe818592211a008862599845e012d8d304c" +"checksum foreign-types 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3e4056b9bd47f8ac5ba12be771f77a0dae796d1bbaaf5fd0b9c2d38b69b8a29d" +"checksum fs2 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "34edaee07555859dc13ca387e6ae05686bb4d0364c95d649b6dab959511f4baf" +"checksum gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)" = "181e3cebba1d663bd92eb90e2da787e10597e027eb00de8d742b260a7850948f" +"checksum gdi32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0912515a8ff24ba900422ecda800b52f4016a56251922d397c576bf92c690518" "checksum getopts 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)" = "d9047cfbd08a437050b363d35ef160452c5fe8ea5187ae0a624708c91581d685" -"checksum handlebars 0.25.2 (registry+https://github.com/rust-lang/crates.io-index)" = "663e1728d8037fb0d4e13bcd1b1909fb5d913690a9929eb385922df157c2ff8f" +"checksum git2 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)" = "9de9df4358c17e448a778d90cd0272e1dab5eae30244502333fa2001c4e24357" +"checksum git2-curl 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "68676bc784bf0bef83278898929bf64a251e87c0340723d0b93fa096c9c5bf8e" +"checksum glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "8be18de09a56b60ed0edf84bc9df007e30040691af7acd1c41874faac5895bfb" +"checksum hamcrest 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "bf088f042a467089e9baa4972f57f9247e42a0cc549ba264c7a04fbb8ecb89d4" +"checksum handlebars 0.25.3 (registry+https://github.com/rust-lang/crates.io-index)" = "15bdf598fc3c2de40c6b340213028301c0d225eea55a2294e6cc148074e557a1" +"checksum idna 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "6ac85ec3f80c8e4e99d9325521337e14ec7555c458a14e377d189659a427f375" +"checksum itertools 0.5.10 (registry+https://github.com/rust-lang/crates.io-index)" = "4833d6978da405305126af4ac88569b5d71ff758581ce5a987dbfa3755f694fc" "checksum itoa 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "eb2f404fbc66fd9aac13e998248505e7ecb2ad8e44ab6388684c5fb11c6c251c" "checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d" -"checksum lazy_static 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "4732c563b9a21a406565c4747daa7b46742f082911ae4753f390dc9ec7ee1a97" -"checksum libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)" = "88ee81885f9f04bff991e306fea7c1c60a5f0f9e409e99f6b40e3311a3363135" +"checksum languageserver-types 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "97c2985bfcbbcb0189cfa25e1c10c1ac7111df2b6214b652c690127aefdf4e5b" +"checksum lazy_static 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "3b37545ab726dd833ec6420aaba8231c5b320814b9029ad585555d2a03e94fbf" +"checksum libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)" = "babb8281da88cba992fa1f4ddec7d63ed96280a1a53ec9b919fd37b53d71e502" +"checksum libgit2-sys 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)" = "dd89dd7196d5fa35b659c3eaf3c1b14b9bd961bfd1a07dfca49adeb8a6aa3763" +"checksum libssh2-sys 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "0db4ec23611747ef772db1c4d650f8bd762f07b461727ec998f953c614024b75" +"checksum libz-sys 1.0.13 (registry+https://github.com/rust-lang/crates.io-index)" = "e5ee912a45d686d393d5ac87fac15ba0ba18daae14e8e7543c63ebf7fb7e970c" "checksum log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)" = "5141eca02775a762cc6cd564d8d2c50f67c0ea3a372cbf1c51592b3e029e10ad" +"checksum lzma-sys 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "fedff6a5cbb24494ec6ee4784e9ac5c187161fede04c7767d49bf87544013afa" +"checksum matches 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "efd7622e3022e1a6eaa602c4cea8912254e5582c9c692e9167714182244801b1" "checksum mdbook 0.0.21 (registry+https://github.com/rust-lang/crates.io-index)" = "f1e2e9d848514dcfad4195788d0d42ae5153a477c191d75d5b84fab10f222fbd" +"checksum memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "d8b629fb514376c675b98c1421e80b151d3817ac42d7c667717d282761418d20" "checksum memchr 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1dbccc0e46f1ea47b9f17e6d67c5a96bd27030519c519c9c91327e31275a47b4" +"checksum miniz-sys 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "28eaee17666671fa872e567547e8428e83308ebe5808cdf6a0e28397dbe2c726" +"checksum miow 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "8c1f2f3b1cf331de6896aabf6e9d55dca90356cc9960cca7eaaf408a355ae919" +"checksum multimap 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9223f4774d08e06185e44e555b9a7561243d387bac49c78a6205c42d6975fbf2" +"checksum net2 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)" = "bc01404e7568680f1259aa5729539f221cb1e6d047a0d9053cab4be8a73b5d67" +"checksum num 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)" = "98b15ba84e910ea7a1973bccd3df7b31ae282bf9d8bd2897779950c9b8303d40" +"checksum num-bigint 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)" = "ba6d838b16e56da1b6c383d065ff1ec3c7d7797f65a3e8f6ba7092fd87820bac" +"checksum num-complex 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)" = "148eb324ca772230853418731ffdf13531738b50f89b30692a01fcdcb0a64677" +"checksum num-integer 0.1.34 (registry+https://github.com/rust-lang/crates.io-index)" = "ef1a4bf6f9174aa5783a9b4cc892cacd11aebad6c69ad027a0b65c6ca5f8aa37" +"checksum num-iter 0.1.33 (registry+https://github.com/rust-lang/crates.io-index)" = "f7d1891bd7b936f12349b7d1403761c8a0b85a18b148e9da4429d5d102c1a41e" +"checksum num-rational 0.1.36 (registry+https://github.com/rust-lang/crates.io-index)" = "c2dc5ea04020a8f18318ae485c751f8cfa1c0e69dcf465c29ddaaa64a313cc44" "checksum num-traits 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)" = "e1cbfa3781f3fe73dc05321bed52a06d2d491eaa764c52335cf4399f046ece99" "checksum num_cpus 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ca313f1862c7ec3e0dfe8ace9fa91b1d9cb5c84ace3d00f5ec4216238e93c167" "checksum open 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3478ed1686bd1300c8a981a940abc92b06fac9cbef747f4c668d4e032ff7b842" +"checksum openssl 0.9.12 (registry+https://github.com/rust-lang/crates.io-index)" = "bb5d1663b73d10c6a3eda53e2e9d0346f822394e7b858d7257718f65f61dfbe2" +"checksum openssl-probe 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d98df0270d404ccd3c050a41d579c52d1db15375168bb3471e04ec0f5f378daf" +"checksum openssl-sys 0.9.12 (registry+https://github.com/rust-lang/crates.io-index)" = "3a5886d87d3e2a0d890bf62dc8944f5e3769a405f7e1e9ef6e517e47fd7a0897" +"checksum owning_ref 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "cdf84f41639e037b484f93433aa3897863b561ed65c6e59c7073d7c561710f37" "checksum pest 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "0a6dda33d67c26f0aac90d324ab2eb7239c819fc7b2552fe9faa4fe88441edc8" +"checksum pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "3a8b4c6b8165cd1a1cd4b9b120978131389f64bdaf456435caa41e630edba903" +"checksum psapi-sys 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "abcd5d1a07d360e29727f757a9decb3ce8bc6e0efa8969cfaad669a8317a2478" "checksum pulldown-cmark 0.0.14 (registry+https://github.com/rust-lang/crates.io-index)" = "d9ab1e588ef8efd702c7ed9d2bd774db5e6f4d878bb5a1a9f371828fbdff6973" "checksum pulldown-cmark 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)" = "1058d7bb927ca067656537eec4e02c2b4b70eaaa129664c5b90c111e20326f41" -"checksum quick-error 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0aad603e8d7fb67da22dbdf1f4b826ce8829e406124109e73cf1b2454b93a71c" +"checksum quick-error 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3c36987d4978eb1be2e422b1e0423a557923a5c3e7e6f31d5699e9aafaefa469" +"checksum quote 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "4c5cf478fe1006dbcc72567121d23dbdae5f1632386068c5c86ff4f645628504" +"checksum quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6e920b65c65f10b2ae65c831a81a073a89edd28c7cce89475bff467ab4167a" +"checksum racer 2.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "b0d72b3afd67882adfca61d609fafb8d7aa5f9e814f12c32fcc6e171995920e8" +"checksum rand 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)" = "022e0636ec2519ddae48154b028864bdce4eaf7d35226ab8e65c611be97b189d" +"checksum redox_syscall 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)" = "29dbdfd4b9df8ab31dec47c6087b7b13cbf4a776f335e4de8efba8288dda075b" +"checksum regex 0.1.80 (registry+https://github.com/rust-lang/crates.io-index)" = "4fd4ace6a8cf7860714a2c2280d6c1f7e6a413486c13298bbc86fd3da019402f" "checksum regex 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4278c17d0f6d62dfef0ab00028feb45bd7d2102843f80763474eeb1be8a10c01" +"checksum regex-syntax 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "f9ec002c35e86791825ed294b50008eea9ddfc8def4420124fbc6b08db834957" "checksum regex-syntax 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2f9191b1f57603095f105d317e375d19b1c9c5c3185ea9633a99a6dcbed04457" +"checksum rls-analysis 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a62d88c341375c6f3f8b2e18b9b364896e7d3e7aa916907de717d0267e116506" "checksum rls-data 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "fc4277ce3c57f456b11fe3145b181a844a25201bab5cbaa1978457e6e2f27d47" "checksum rls-span 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5d7c7046dc6a92f2ae02ed302746db4382e75131b9ce20ce967259f6b5867a6a" -"checksum rustc-serialize 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)" = "684ce48436d6465300c9ea783b6b14c4361d6b8dcbb1375b486a69cc19e2dfb0" -"checksum serde 0.9.11 (registry+https://github.com/rust-lang/crates.io-index)" = "a702319c807c016e51f672e5c77d6f0b46afddd744b5e437d6b8436b888b458f" -"checksum serde_json 0.9.9 (registry+https://github.com/rust-lang/crates.io-index)" = "dbc45439552eb8fb86907a2c41c1fd0ef97458efb87ff7f878db466eb581824e" +"checksum rls-vfs 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "986eada111517bcb5a7a75205b3f2b70c82e7766653cca61a23f5afce79bdb94" +"checksum rustc-demangle 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "3058a43ada2c2d0b92b3ae38007a2d0fa5e9db971be260e0171408a4ff471c95" +"checksum rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)" = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda" +"checksum rustfmt 0.8.4 (git+https://github.com/rust-lang-nursery/rustfmt)" = "" +"checksum same-file 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "d931a44fdaa43b8637009e7632a02adc4f2b2e0733c08caa4cf00e8da4a117a7" +"checksum semver 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7a3186ec9e65071a2095434b1f5bb24838d4e8e130f584c790f6033c79943537" +"checksum semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" +"checksum serde 0.9.15 (registry+https://github.com/rust-lang/crates.io-index)" = "34b623917345a631dc9608d5194cc206b3fe6c3554cd1c75b937e55e285254af" +"checksum serde 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "38a3db3a5757f68069aba764b793823ea9fb9717c42c016f8903f8add50f508a" +"checksum serde_derive 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "e46ef71ee001a4279a4513e79a6ebbb59da3a4987bf77a6df2e5534cd6f21d82" +"checksum serde_derive_internals 0.15.0 (registry+https://github.com/rust-lang/crates.io-index)" = "021c338d22c7e30f957a6ab7e388cb6098499dda9fd4ba1661ee074ca7a180d1" +"checksum serde_ignored 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "c10e798e4405d7dcec3658989e35ee6706f730a9ed7c1184d5ebd84317e82f46" +"checksum serde_json 0.9.10 (registry+https://github.com/rust-lang/crates.io-index)" = "ad8bcf487be7d2e15d3d543f04312de991d631cfe1b43ea0ade69e6a8a5b16a1" +"checksum serde_json 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "48b04779552e92037212c3615370f6bd57a40ebba7f20e554ff9f55e41a69a7b" +"checksum shell-escape 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "dd5cc96481d54583947bfe88bf30c23d53f883c6cd0145368b69989d97b84ef8" +"checksum stable_deref_trait 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "15132e0e364248108c5e2c02e3ab539be8d6f5d52a01ca9bbf27ed657316f02b" +"checksum strings 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "54f86446ab480b4f60782188f4f78886465c5793aee248cbb48b7fdc0d022420" +"checksum strsim 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "67f84c44fbb2f91db7fef94554e6b2ac05909c9c0b0bc23bb98d3a1aebfe7f7c" "checksum strsim 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b4d15c810519a91cf877e7e36e63fe068815c678181439f2f29e2562147c3694" +"checksum syn 0.11.11 (registry+https://github.com/rust-lang/crates.io-index)" = "d3b891b9015c88c576343b9b3e41c2c11a51c219ef067b264bd9c8aa9b441dad" +"checksum syn 0.8.7 (registry+https://github.com/rust-lang/crates.io-index)" = "6ae6fb0dcc9bd85f89a1a4adc0df2fd90c90c98849d61433983dd7a9df6363f7" +"checksum synom 0.11.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a393066ed9010ebaed60b9eafa373d4b1baac186dd7e008555b0f702b51945b6" +"checksum syntex_errors 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9e52bffe6202cfb67587784cf23e0ec5bf26d331eef4922a16d5c42e12aa1e9b" +"checksum syntex_errors 0.58.1 (registry+https://github.com/rust-lang/crates.io-index)" = "867cc5c2d7140ae7eaad2ae9e8bf39cb18a67ca651b7834f88d46ca98faadb9c" +"checksum syntex_pos 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)" = "955ef4b16af4c468e4680d1497f873ff288f557d338180649e18f915af5e15ac" +"checksum syntex_pos 0.58.1 (registry+https://github.com/rust-lang/crates.io-index)" = "13ad4762fe52abc9f4008e85c4fb1b1fe3aa91ccb99ff4826a439c7c598e1047" +"checksum syntex_syntax 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)" = "76a302e717e348aa372ff577791c3832395650073b8d8432f8b3cb170b34afde" +"checksum syntex_syntax 0.58.1 (registry+https://github.com/rust-lang/crates.io-index)" = "6e0e4dbae163dd98989464c23dd503161b338790640e11537686f2ef0f25c791" +"checksum tar 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)" = "ab0ef9ead2fe0aa9e18475a96a207bfd5143f4124779ef7429503a8665416ce8" +"checksum tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "87974a6f5c1dfb344d733055601650059a3363de2a6104819293baff662132d6" +"checksum term 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)" = "d168af3930b369cfe245132550579d47dfd873d69470755a19c2c6568dbbd989" "checksum term_size 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "07b6c1ac5b3fffd75073276bca1ceed01f67a28537097a2a9539e116e50fb21a" -"checksum thread-id 3.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4437c97558c70d129e40629a5b385b3fb1ffac301e63941335e4d354081ec14a" +"checksum thread-id 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a9539db560102d1cef46b8b78ce737ff0bb64e7e18d35b2a5688f7d097d0ff03" +"checksum thread-id 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8df7875b676fddfadffd96deea3b1124e5ede707d4884248931077518cf1f773" +"checksum thread_local 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)" = "8576dbbfcaef9641452d5cf0df9b0e7eeab7694956dd33bb61515fb8f18cfdd5" "checksum thread_local 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "c85048c6260d17cf486ceae3282d9fb6b90be220bf5b28c400f5485ffc29f0c7" +"checksum time 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)" = "ffd7ccbf969a892bf83f1e441126968a07a3941c24ff522a26af9f9f4585d1a3" "checksum toml 0.1.30 (registry+https://github.com/rust-lang/crates.io-index)" = "0590d72182e50e879c4da3b11c6488dae18fccb1ae0c7a3eda18e16795844796" -"checksum toml 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3474f3c6eaf32eedb4f4a66a26214f020f828a6d96c37e38a35e3a379bbcfd11" -"checksum unicode-segmentation 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "18127285758f0e2c6cf325bb3f3d138a12fee27de4f23e146cd6a179f26c2cf3" +"checksum toml 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "736b60249cb25337bc196faa43ee12c705e426f3d55c214d73a4e7be06f92cb4" +"checksum toml 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "bd86ad9ebee246fdedd610e0f6d0587b754a3d81438db930a244d0480ed7878f" +"checksum toml 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4cc5dbfb20a481e64b99eb7ae280859ec76730c7191570ba5edaa962394edb0a" +"checksum typed-arena 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8e2f9dc90da4f9d66ffc9ad3ead2c7d57582a26f4a3292d2ce7011bd29965100" +"checksum unicode-bidi 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "d3a078ebdd62c0e71a709c3d53d2af693fe09fe93fbff8344aebe289b78f9032" +"checksum unicode-normalization 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "e28fa37426fceeb5cf8f41ee273faa7c82c47dc8fba5853402841e665fcd86ff" +"checksum unicode-segmentation 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "c3bc443ded17b11305ffffe6b37e2076f328a5a8cb6aa877b1b98f77699e98b5" +"checksum unicode-segmentation 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a8083c594e02b8ae1654ae26f0ade5158b119bd88ad0e8227a5d8fcd72407946" "checksum unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "bf3a113775714a22dcb774d8ea3655c53a32debae63a063acc00a91cc586245f" +"checksum unicode-xid 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "36dff09cafb4ec7c8cf0023eb0b686cb6ce65499116a12201c9e11840ca01beb" +"checksum unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "8c1f860d7d29cf02cb2f3f359fd35991af3d30bac52c57d265a3c461074cb4dc" "checksum unreachable 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1f2ae5ddb18e1c92664717616dd9549dde73f539f01bd7b77c2edb2446bdff91" +"checksum url 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f5ba8a749fb4479b043733416c244fa9d1d3af3d7c23804944651c8a448cb87e" +"checksum url_serde 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "74e7d099f1ee52f823d4bdd60c93c3602043c728f5db3b97bdb548467f7bddea" +"checksum user32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4ef4711d107b21b410a3a974b1204d9accc8b10dad75d8324b5d755de1617d47" +"checksum utf8-ranges 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a1ca13c08c41c9c3e04224ed9ff80461d97e121589ff27c753a16cb10830ae0f" "checksum utf8-ranges 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "662fab6525a98beff2921d7f61a39e7d59e0b425ebc7d0d9e66d316e55124122" -"checksum vec_map 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f8cdc8b93bd0198ed872357fb2e667f7125646b1762f16d60b2c96350d361897" +"checksum vec_map 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "cac5efe5cb0fa14ec2f84f83c701c562ee63f6dcc680861b21d65c682adfb05f" "checksum void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d" +"checksum walkdir 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)" = "bb08f9e670fab86099470b97cd2b252d6527f0b3cc1401acdb595ffc9dd288ff" "checksum winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a" "checksum winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc" +"checksum ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d59cefebd0c892fa2dd6de581e937301d8552cb44489cdff035c6187cb63fa5e" +"checksum xattr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "5f04de8a1346489a2f9e9bd8526b73d135ec554227b17568456e86aa35b6f3fc" +"checksum xz2 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "e9510bdf100731599107c61f77daf46713a69a568f75458999c1f9dbf6ba25b0" +"checksum yaml-rust 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "e66366e18dc58b46801afbf2ca7661a9f59cc8c5962c29892b6039b4f86fa992" diff --git a/src/Cargo.toml b/src/Cargo.toml index 9aca3e134d6..85a6df3573a 100644 --- a/src/Cargo.toml +++ b/src/Cargo.toml @@ -13,10 +13,7 @@ members = [ "tools/build-manifest", "tools/remote-test-client", "tools/remote-test-server", -] - -# These projects have their own Cargo.lock -exclude = [ + "tools/rust-installer", "tools/cargo", "tools/rls", ] @@ -37,3 +34,6 @@ debug-assertions = false [profile.test] debug = false debug-assertions = false + +[replace] +"https://github.com/rust-lang/cargo#0.20.0" = { path = "tools/cargo" } diff --git a/src/bootstrap/Cargo.toml b/src/bootstrap/Cargo.toml index 1088067c2de..cc560e0172e 100644 --- a/src/bootstrap/Cargo.toml +++ b/src/bootstrap/Cargo.toml @@ -30,11 +30,11 @@ test = false [dependencies] build_helper = { path = "../build_helper" } -cmake = "0.1.17" +cmake = "0.1.23" filetime = "0.1" num_cpus = "1.0" toml = "0.1" getopts = "0.2" rustc-serialize = "0.3" -gcc = "0.3.38" +gcc = "0.3.46" libc = "0.2" diff --git a/src/bootstrap/bin/rustc.rs b/src/bootstrap/bin/rustc.rs index 906c468241a..eb2cef133a3 100644 --- a/src/bootstrap/bin/rustc.rs +++ b/src/bootstrap/bin/rustc.rs @@ -38,7 +38,24 @@ use std::process::{Command, ExitStatus}; fn main() { - let args = env::args_os().skip(1).collect::>(); + let mut args = env::args_os().skip(1).collect::>(); + + // Append metadata suffix for internal crates. See the corresponding entry + // in bootstrap/lib.rs for details. + if let Ok(s) = env::var("RUSTC_METADATA_SUFFIX") { + for i in 1..args.len() { + // Dirty code for borrowing issues + let mut new = None; + if let Some(current_as_str) = args[i].to_str() { + if (&*args[i - 1] == "-C" && current_as_str.starts_with("metadata")) || + current_as_str.starts_with("-Cmetadata") { + new = Some(format!("{}-{}", current_as_str, s)); + } + } + if let Some(new) = new { args[i] = new.into(); } + } + } + // Detect whether or not we're a build script depending on whether --target // is passed (a bit janky...) let target = args.windows(2) diff --git a/src/bootstrap/bootstrap.py b/src/bootstrap/bootstrap.py index ad3cf31c1b9..e15304a7e6e 100644 --- a/src/bootstrap/bootstrap.py +++ b/src/bootstrap/bootstrap.py @@ -14,6 +14,7 @@ import contextlib import datetime import hashlib import os +import re import shutil import subprocess import sys @@ -126,13 +127,13 @@ def unpack(tarball, dst, verbose=False, match=None): shutil.move(tp, fp) shutil.rmtree(os.path.join(dst, fname)) -def run(args, verbose=False, exception=False): +def run(args, verbose=False, exception=False, cwd=None): if verbose: print("running: " + ' '.join(args)) sys.stdout.flush() # Use Popen here instead of call() as it apparently allows powershell on # Windows to not lock up waiting for input presumably. - ret = subprocess.Popen(args) + ret = subprocess.Popen(args, cwd=cwd) code = ret.wait() if code != 0: err = "failed to run: " + ' '.join(args) @@ -297,8 +298,10 @@ class RustBuild(object): def get_toml(self, key): for line in self.config_toml.splitlines(): - if line.startswith(key + ' ='): - return self.get_string(line) + match = re.match(r'^{}\s*=(.*)$'.format(key), line) + if match is not None: + value = match.group(1) + return self.get_string(value) or value.strip() return None def get_mk(self, key): @@ -329,6 +332,8 @@ class RustBuild(object): def get_string(self, line): start = line.find('"') + if start == -1: + return None end = start + 1 + line[start + 1:].find('"') return line[start + 1:end] @@ -386,12 +391,21 @@ class RustBuild(object): args.append("--frozen") self.run(args, env) - def run(self, args, env): - proc = subprocess.Popen(args, env=env) + def run(self, args, env=None, cwd=None): + proc = subprocess.Popen(args, env=env, cwd=cwd) ret = proc.wait() if ret != 0: sys.exit(ret) + def output(self, args, env=None, cwd=None): + proc = subprocess.Popen(args, stdout=subprocess.PIPE, env=env, cwd=cwd) + (out, err) = proc.communicate() + ret = proc.wait() + if ret != 0: + print(out) + sys.exit(ret) + return out + def build_triple(self): default_encoding = sys.getdefaultencoding() config = self.get_toml('build') @@ -529,6 +543,54 @@ class RustBuild(object): return "{}-{}".format(cputype, ostype) + def update_submodules(self): + if (not os.path.exists(os.path.join(self.rust_root, ".git"))) or \ + self.get_toml('submodules') == "false" or \ + self.get_mk('CFG_DISABLE_MANAGE_SUBMODULES') == "1": + return + + print('Updating submodules') + output = self.output(["git", "submodule", "status"], cwd=self.rust_root) + submodules = [] + for line in output.splitlines(): + # NOTE `git submodule status` output looks like this: + # + # -5066b7dcab7e700844b0e2ba71b8af9dc627a59b src/liblibc + # +b37ef24aa82d2be3a3cc0fe89bf82292f4ca181c src/compiler-rt (remotes/origin/..) + # e058ca661692a8d01f8cf9d35939dfe3105ce968 src/jemalloc (3.6.0-533-ge058ca6) + # + # The first character can be '-', '+' or ' ' and denotes the + # `State` of the submodule Right next to this character is the + # SHA-1 of the submodule HEAD And after that comes the path to the + # submodule + path = line[1:].split(' ')[1] + submodules.append([path, line[0]]) + + self.run(["git", "submodule", "sync"], cwd=self.rust_root) + + for submod in submodules: + path, status = submod + if path.endswith(b"llvm") and \ + (self.get_toml('llvm-config') or self.get_mk('CFG_LLVM_ROOT')): + continue + if path.endswith(b"jemalloc") and \ + (self.get_toml('jemalloc') or self.get_mk('CFG_JEMALLOC_ROOT')): + continue + submod_path = os.path.join(self.rust_root, path) + + if status == ' ': + self.run(["git", "reset", "--hard"], cwd=submod_path) + self.run(["git", "clean", "-fdx"], cwd=submod_path) + elif status == '+': + self.run(["git", "submodule", "update", path], cwd=self.rust_root) + self.run(["git", "reset", "--hard"], cwd=submod_path) + self.run(["git", "clean", "-fdx"], cwd=submod_path) + elif status == '-': + self.run(["git", "submodule", "init", path], cwd=self.rust_root) + self.run(["git", "submodule", "update", path], cwd=self.rust_root) + else: + raise ValueError('unknown submodule status: ' + status) + def bootstrap(): parser = argparse.ArgumentParser(description='Build rust') parser.add_argument('--config') @@ -597,6 +659,8 @@ def bootstrap(): else: rb._download_url = 'https://static.rust-lang.org' + rb.update_submodules() + # Fetch/build the bootstrap rb.build = rb.build_triple() rb.download_stage0() diff --git a/src/bootstrap/config.rs b/src/bootstrap/config.rs index 9c536111811..0fb597564e3 100644 --- a/src/bootstrap/config.rs +++ b/src/bootstrap/config.rs @@ -99,7 +99,9 @@ pub struct Config { // Fallback musl-root for all targets pub musl_root: Option, pub prefix: Option, + pub sysconfdir: Option, pub docdir: Option, + pub bindir: Option, pub libdir: Option, pub libdir_relative: Option, pub mandir: Option, @@ -165,9 +167,11 @@ struct Build { #[derive(RustcDecodable, Default, Clone)] struct Install { prefix: Option, - mandir: Option, + sysconfdir: Option, docdir: Option, + bindir: Option, libdir: Option, + mandir: Option, } /// TOML representation of how the LLVM build is configured. @@ -315,9 +319,11 @@ pub fn parse(build: &str, file: Option) -> Config { if let Some(ref install) = toml.install { config.prefix = install.prefix.clone().map(PathBuf::from); - config.mandir = install.mandir.clone().map(PathBuf::from); + config.sysconfdir = install.sysconfdir.clone().map(PathBuf::from); config.docdir = install.docdir.clone().map(PathBuf::from); + config.bindir = install.bindir.clone().map(PathBuf::from); config.libdir = install.libdir.clone().map(PathBuf::from); + config.mandir = install.mandir.clone().map(PathBuf::from); } if let Some(ref llvm) = toml.llvm { @@ -523,9 +529,15 @@ macro_rules! check { "CFG_PREFIX" => { self.prefix = Some(PathBuf::from(value)); } + "CFG_SYSCONFDIR" => { + self.sysconfdir = Some(PathBuf::from(value)); + } "CFG_DOCDIR" => { self.docdir = Some(PathBuf::from(value)); } + "CFG_BINDIR" => { + self.bindir = Some(PathBuf::from(value)); + } "CFG_LIBDIR" => { self.libdir = Some(PathBuf::from(value)); } diff --git a/src/bootstrap/config.toml.example b/src/bootstrap/config.toml.example index 25da976a555..df180be4e27 100644 --- a/src/bootstrap/config.toml.example +++ b/src/bootstrap/config.toml.example @@ -160,15 +160,22 @@ # Instead of installing to /usr/local, install to this path instead. #prefix = "/usr/local" +# Where to install system configuration files +# If this is a relative path, it will get installed in `prefix` above +#sysconfdir = "/etc" + +# Where to install documentation in `prefix` above +#docdir = "share/doc/rust" + +# Where to install binaries in `prefix` above +#bindir = "bin" + # Where to install libraries in `prefix` above #libdir = "lib" # Where to install man pages in `prefix` above #mandir = "share/man" -# Where to install documentation in `prefix` above -#docdir = "share/doc/rust" - # ============================================================================= # Options for compiling Rust code itself # ============================================================================= diff --git a/src/bootstrap/dist.rs b/src/bootstrap/dist.rs index 1b69f7413b5..511f2c9e80e 100644 --- a/src/bootstrap/dist.rs +++ b/src/bootstrap/dist.rs @@ -26,12 +26,6 @@ use build_helper::output; -#[cfg(not(target_os = "solaris"))] -const SH_CMD: &'static str = "sh"; -// On Solaris, sh is the historical bourne shell, not a POSIX shell, or bash. -#[cfg(target_os = "solaris")] -const SH_CMD: &'static str = "bash"; - use {Build, Compiler, Mode}; use channel; use util::{cp_r, libdir, is_dylib, cp_filtered, copy, exe}; @@ -55,6 +49,10 @@ pub fn tmpdir(build: &Build) -> PathBuf { build.out.join("tmp/dist") } +fn rust_installer(build: &Build) -> Command { + build.tool_cmd(&Compiler::new(0, &build.config.build), "rust-installer") +} + /// Builds the `rust-docs` installer component. /// /// Slurps up documentation from the `stage`'s `host`. @@ -74,14 +72,14 @@ pub fn docs(build: &Build, stage: u32, host: &str) { let src = build.out.join(host).join("doc"); cp_r(&src, &dst); - let mut cmd = Command::new(SH_CMD); - cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh"))) + let mut cmd = rust_installer(build); + cmd.arg("generate") .arg("--product-name=Rust-Documentation") .arg("--rel-manifest-dir=rustlib") .arg("--success-message=Rust-documentation-is-installed.") - .arg(format!("--image-dir={}", sanitize_sh(&image))) - .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build)))) - .arg(format!("--output-dir={}", sanitize_sh(&distdir(build)))) + .arg("--image-dir").arg(&image) + .arg("--work-dir").arg(&tmpdir(build)) + .arg("--output-dir").arg(&distdir(build)) .arg(format!("--package-name={}-{}", name, host)) .arg("--component-name=rust-docs") .arg("--legacy-manifest-dirs=rustlib,cargo") @@ -124,14 +122,14 @@ pub fn mingw(build: &Build, host: &str) { .arg(host); build.run(&mut cmd); - let mut cmd = Command::new(SH_CMD); - cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh"))) + let mut cmd = rust_installer(build); + cmd.arg("generate") .arg("--product-name=Rust-MinGW") .arg("--rel-manifest-dir=rustlib") .arg("--success-message=Rust-MinGW-is-installed.") - .arg(format!("--image-dir={}", sanitize_sh(&image))) - .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build)))) - .arg(format!("--output-dir={}", sanitize_sh(&distdir(build)))) + .arg("--image-dir").arg(&image) + .arg("--work-dir").arg(&tmpdir(build)) + .arg("--output-dir").arg(&distdir(build)) .arg(format!("--package-name={}-{}", name, host)) .arg("--component-name=rust-mingw") .arg("--legacy-manifest-dirs=rustlib,cargo"); @@ -190,15 +188,15 @@ pub fn rustc(build: &Build, stage: u32, host: &str) { } // Finally, wrap everything up in a nice tarball! - let mut cmd = Command::new(SH_CMD); - cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh"))) + let mut cmd = rust_installer(build); + cmd.arg("generate") .arg("--product-name=Rust") .arg("--rel-manifest-dir=rustlib") .arg("--success-message=Rust-is-ready-to-roll.") - .arg(format!("--image-dir={}", sanitize_sh(&image))) - .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build)))) - .arg(format!("--output-dir={}", sanitize_sh(&distdir(build)))) - .arg(format!("--non-installed-overlay={}", sanitize_sh(&overlay))) + .arg("--image-dir").arg(&image) + .arg("--work-dir").arg(&tmpdir(build)) + .arg("--output-dir").arg(&distdir(build)) + .arg("--non-installed-overlay").arg(&overlay) .arg(format!("--package-name={}-{}", name, host)) .arg("--component-name=rustc") .arg("--legacy-manifest-dirs=rustlib,cargo"); @@ -300,14 +298,14 @@ pub fn std(build: &Build, compiler: &Compiler, target: &str) { let src = build.sysroot(compiler).join("lib/rustlib"); cp_r(&src.join(target), &dst); - let mut cmd = Command::new(SH_CMD); - cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh"))) + let mut cmd = rust_installer(build); + cmd.arg("generate") .arg("--product-name=Rust") .arg("--rel-manifest-dir=rustlib") .arg("--success-message=std-is-standing-at-the-ready.") - .arg(format!("--image-dir={}", sanitize_sh(&image))) - .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build)))) - .arg(format!("--output-dir={}", sanitize_sh(&distdir(build)))) + .arg("--image-dir").arg(&image) + .arg("--work-dir").arg(&tmpdir(build)) + .arg("--output-dir").arg(&distdir(build)) .arg(format!("--package-name={}-{}", name, target)) .arg(format!("--component-name=rust-std-{}", target)) .arg("--legacy-manifest-dirs=rustlib,cargo"); @@ -356,14 +354,14 @@ pub fn analysis(build: &Build, compiler: &Compiler, target: &str) { println!("image_src: {:?}, dst: {:?}", image_src, dst); cp_r(&image_src, &dst); - let mut cmd = Command::new(SH_CMD); - cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh"))) + let mut cmd = rust_installer(build); + cmd.arg("generate") .arg("--product-name=Rust") .arg("--rel-manifest-dir=rustlib") .arg("--success-message=save-analysis-saved.") - .arg(format!("--image-dir={}", sanitize_sh(&image))) - .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build)))) - .arg(format!("--output-dir={}", sanitize_sh(&distdir(build)))) + .arg("--image-dir").arg(&image) + .arg("--work-dir").arg(&tmpdir(build)) + .arg("--output-dir").arg(&distdir(build)) .arg(format!("--package-name={}-{}", name, target)) .arg(format!("--component-name=rust-analysis-{}", target)) .arg("--legacy-manifest-dirs=rustlib,cargo"); @@ -471,13 +469,17 @@ pub fn rust_src(build: &Build) { write_file(&plain_dst_src.join("version"), build.rust_version().as_bytes()); // Create plain source tarball - let tarball = rust_src_location(build); + let mut tarball = rust_src_location(build); + tarball.set_extension(""); // strip .gz + tarball.set_extension(""); // strip .tar if let Some(dir) = tarball.parent() { t!(fs::create_dir_all(dir)); } - let mut cmd = Command::new("tar"); - cmd.arg("-czf").arg(sanitize_sh(&tarball)) - .arg(&plain_name) + let mut cmd = rust_installer(build); + cmd.arg("tarball") + .arg("--input").arg(&plain_name) + .arg("--output").arg(&tarball) + .arg("--work-dir=.") .current_dir(tmpdir(build)); build.run(&mut cmd); @@ -521,14 +523,14 @@ pub fn rust_src(build: &Build) { } // Create source tarball in rust-installer format - let mut cmd = Command::new(SH_CMD); - cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh"))) + let mut cmd = rust_installer(build); + cmd.arg("generate") .arg("--product-name=Rust") .arg("--rel-manifest-dir=rustlib") .arg("--success-message=Awesome-Source.") - .arg(format!("--image-dir={}", sanitize_sh(&image))) - .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build)))) - .arg(format!("--output-dir={}", sanitize_sh(&distdir(build)))) + .arg("--image-dir").arg(&image) + .arg("--work-dir").arg(&tmpdir(build)) + .arg("--output-dir").arg(&distdir(build)) .arg(format!("--package-name={}", name)) .arg("--component-name=rust-src") .arg("--legacy-manifest-dirs=rustlib,cargo"); @@ -594,7 +596,7 @@ pub fn cargo(build: &Build, stage: u32, target: &str) { // Prepare the image directory t!(fs::create_dir_all(image.join("share/zsh/site-functions"))); - t!(fs::create_dir_all(image.join("etc/bash_completions.d"))); + t!(fs::create_dir_all(image.join("etc/bash_completion.d"))); let cargo = build.cargo_out(&compiler, Mode::Tool, target) .join(exe("cargo", target)); install(&cargo, &image.join("bin"), 0o755); @@ -604,7 +606,7 @@ pub fn cargo(build: &Build, stage: u32, target: &str) { } install(&etc.join("_cargo"), &image.join("share/zsh/site-functions"), 0o644); copy(&etc.join("cargo.bashcomp.sh"), - &image.join("etc/bash_completions.d/cargo")); + &image.join("etc/bash_completion.d/cargo")); let doc = image.join("share/doc/cargo"); install(&src.join("README.md"), &doc, 0o644); install(&src.join("LICENSE-MIT"), &doc, 0o644); @@ -622,15 +624,15 @@ pub fn cargo(build: &Build, stage: u32, target: &str) { t!(t!(File::create(overlay.join("version"))).write_all(version.as_bytes())); // Generate the installer tarball - let mut cmd = Command::new("sh"); - cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh"))) + let mut cmd = rust_installer(build); + cmd.arg("generate") .arg("--product-name=Rust") .arg("--rel-manifest-dir=rustlib") .arg("--success-message=Rust-is-ready-to-roll.") - .arg(format!("--image-dir={}", sanitize_sh(&image))) - .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build)))) - .arg(format!("--output-dir={}", sanitize_sh(&distdir(build)))) - .arg(format!("--non-installed-overlay={}", sanitize_sh(&overlay))) + .arg("--image-dir").arg(&image) + .arg("--work-dir").arg(&tmpdir(build)) + .arg("--output-dir").arg(&distdir(build)) + .arg("--non-installed-overlay").arg(&overlay) .arg(format!("--package-name={}-{}", name, target)) .arg("--component-name=cargo") .arg("--legacy-manifest-dirs=rustlib,cargo"); @@ -671,15 +673,15 @@ pub fn rls(build: &Build, stage: u32, target: &str) { t!(t!(File::create(overlay.join("version"))).write_all(version.as_bytes())); // Generate the installer tarball - let mut cmd = Command::new("sh"); - cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh"))) + let mut cmd = rust_installer(build); + cmd.arg("generate") .arg("--product-name=Rust") .arg("--rel-manifest-dir=rustlib") .arg("--success-message=RLS-ready-to-serve.") - .arg(format!("--image-dir={}", sanitize_sh(&image))) - .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build)))) - .arg(format!("--output-dir={}", sanitize_sh(&distdir(build)))) - .arg(format!("--non-installed-overlay={}", sanitize_sh(&overlay))) + .arg("--image-dir").arg(&image) + .arg("--work-dir").arg(&tmpdir(build)) + .arg("--output-dir").arg(&distdir(build)) + .arg("--non-installed-overlay").arg(&overlay) .arg(format!("--package-name={}-{}", name, target)) .arg("--component-name=rls") .arg("--legacy-manifest-dirs=rustlib,cargo"); @@ -730,29 +732,28 @@ pub fn extended(build: &Build, stage: u32, target: &str) { // upgrades rustc was upgraded before rust-std. To avoid rustc clobbering // the std files during uninstall. To do this ensure that rustc comes // before rust-std in the list below. - let mut input_tarballs = format!("{},{},{},{},{},{}", - sanitize_sh(&rustc_installer), - sanitize_sh(&cargo_installer), - sanitize_sh(&rls_installer), - sanitize_sh(&analysis_installer), - sanitize_sh(&docs_installer), - sanitize_sh(&std_installer)); + let mut tarballs = vec![rustc_installer, cargo_installer, rls_installer, + analysis_installer, docs_installer, std_installer]; if target.contains("pc-windows-gnu") { - input_tarballs.push_str(","); - input_tarballs.push_str(&sanitize_sh(&mingw_installer)); + tarballs.push(mingw_installer); + } + let mut input_tarballs = tarballs[0].as_os_str().to_owned(); + for tarball in &tarballs[1..] { + input_tarballs.push(","); + input_tarballs.push(tarball); } - let mut cmd = Command::new(SH_CMD); - cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/combine-installers.sh"))) + let mut cmd = rust_installer(build); + cmd.arg("combine") .arg("--product-name=Rust") .arg("--rel-manifest-dir=rustlib") .arg("--success-message=Rust-is-ready-to-roll.") - .arg(format!("--work-dir={}", sanitize_sh(&work))) - .arg(format!("--output-dir={}", sanitize_sh(&distdir(build)))) + .arg("--work-dir").arg(&work) + .arg("--output-dir").arg(&distdir(build)) .arg(format!("--package-name={}-{}", pkgname(build, "rust"), target)) .arg("--legacy-manifest-dirs=rustlib,cargo") - .arg(format!("--input-tarballs={}", input_tarballs)) - .arg(format!("--non-installed-overlay={}", sanitize_sh(&overlay))); + .arg("--input-tarballs").arg(input_tarballs) + .arg("--non-installed-overlay").arg(&overlay); build.run(&mut cmd); let mut license = String::new(); diff --git a/src/bootstrap/install.rs b/src/bootstrap/install.rs index c805522fbf5..386b001971b 100644 --- a/src/bootstrap/install.rs +++ b/src/bootstrap/install.rs @@ -24,22 +24,30 @@ /// Installs everything. pub fn install(build: &Build, stage: u32, host: &str) { let prefix_default = PathBuf::from("/usr/local"); + let sysconfdir_default = PathBuf::from("/etc"); let docdir_default = PathBuf::from("share/doc/rust"); - let mandir_default = PathBuf::from("share/man"); + let bindir_default = PathBuf::from("bin"); let libdir_default = PathBuf::from("lib"); + let mandir_default = PathBuf::from("share/man"); let prefix = build.config.prefix.as_ref().unwrap_or(&prefix_default); + let sysconfdir = build.config.sysconfdir.as_ref().unwrap_or(&sysconfdir_default); let docdir = build.config.docdir.as_ref().unwrap_or(&docdir_default); + let bindir = build.config.bindir.as_ref().unwrap_or(&bindir_default); let libdir = build.config.libdir.as_ref().unwrap_or(&libdir_default); let mandir = build.config.mandir.as_ref().unwrap_or(&mandir_default); + let sysconfdir = prefix.join(sysconfdir); let docdir = prefix.join(docdir); + let bindir = prefix.join(bindir); let libdir = prefix.join(libdir); let mandir = prefix.join(mandir); let destdir = env::var_os("DESTDIR").map(PathBuf::from); let prefix = add_destdir(&prefix, &destdir); + let sysconfdir = add_destdir(&sysconfdir, &destdir); let docdir = add_destdir(&docdir, &destdir); + let bindir = add_destdir(&bindir, &destdir); let libdir = add_destdir(&libdir, &destdir); let mandir = add_destdir(&mandir, &destdir); @@ -47,29 +55,35 @@ pub fn install(build: &Build, stage: u32, host: &str) { t!(fs::create_dir_all(&empty_dir)); if build.config.docs { install_sh(&build, "docs", "rust-docs", &build.rust_package_vers(), - stage, host, &prefix, &docdir, &libdir, &mandir, &empty_dir); + stage, host, &prefix, &sysconfdir, &docdir, &bindir, &libdir, + &mandir, &empty_dir); } for target in build.config.target.iter() { install_sh(&build, "std", "rust-std", &build.rust_package_vers(), - stage, target, &prefix, &docdir, &libdir, &mandir, &empty_dir); + stage, target, &prefix, &sysconfdir, &docdir, &bindir, &libdir, + &mandir, &empty_dir); } if build.config.extended { install_sh(&build, "cargo", "cargo", &build.cargo_package_vers(), - stage, host, &prefix, &docdir, &libdir, &mandir, &empty_dir); + stage, host, &prefix, &sysconfdir, &docdir, &bindir, &libdir, + &mandir, &empty_dir); install_sh(&build, "rls", "rls", &build.rls_package_vers(), - stage, host, &prefix, &docdir, &libdir, &mandir, &empty_dir); + stage, host, &prefix, &sysconfdir, &docdir, &bindir, &libdir, + &mandir, &empty_dir); } install_sh(&build, "rustc", "rustc", &build.rust_package_vers(), - stage, host, &prefix, &docdir, &libdir, &mandir, &empty_dir); + stage, host, &prefix, &sysconfdir, &docdir, &bindir, &libdir, + &mandir, &empty_dir); t!(fs::remove_dir_all(&empty_dir)); } fn install_sh(build: &Build, package: &str, name: &str, version: &str, stage: u32, host: &str, - prefix: &Path, docdir: &Path, libdir: &Path, mandir: &Path, empty_dir: &Path) { + prefix: &Path, sysconfdir: &Path, docdir: &Path, bindir: &Path, libdir: &Path, + mandir: &Path, empty_dir: &Path) { println!("Install {} stage{} ({})", package, stage, host); let package_name = format!("{}-{}-{}", name, version, host); @@ -77,7 +91,9 @@ fn install_sh(build: &Build, package: &str, name: &str, version: &str, stage: u3 cmd.current_dir(empty_dir) .arg(sanitize_sh(&tmpdir(build).join(&package_name).join("install.sh"))) .arg(format!("--prefix={}", sanitize_sh(prefix))) + .arg(format!("--sysconfdir={}", sanitize_sh(sysconfdir))) .arg(format!("--docdir={}", sanitize_sh(docdir))) + .arg(format!("--bindir={}", sanitize_sh(bindir))) .arg(format!("--libdir={}", sanitize_sh(libdir))) .arg(format!("--mandir={}", sanitize_sh(mandir))) .arg("--disable-ldconfig"); diff --git a/src/bootstrap/lib.rs b/src/bootstrap/lib.rs index ea0b521a2ce..ca9de43f542 100644 --- a/src/bootstrap/lib.rs +++ b/src/bootstrap/lib.rs @@ -82,7 +82,7 @@ use std::ffi::OsString; use std::fs::{self, File}; use std::io::Read; -use std::path::{Component, PathBuf, Path}; +use std::path::{PathBuf, Path}; use std::process::Command; use build_helper::{run_silent, run_suppressed, output, mtime}; @@ -285,129 +285,12 @@ pub fn build(&mut self) { self.verbose(&format!("auto-detected local-rebuild {}", local_release)); self.local_rebuild = true; } - self.verbose("updating submodules"); - self.update_submodules(); self.verbose("learning about cargo"); metadata::build(self); step::run(self); } - /// Updates all git submodules that we have. - /// - /// This will detect if any submodules are out of date an run the necessary - /// commands to sync them all with upstream. - fn update_submodules(&self) { - struct Submodule<'a> { - path: &'a Path, - state: State, - } - - enum State { - // The submodule may have staged/unstaged changes - MaybeDirty, - // Or could be initialized but never updated - NotInitialized, - // The submodule, itself, has extra commits but those changes haven't been commited to - // the (outer) git repository - OutOfSync, - } - - if !self.src_is_git || !self.config.submodules { - return - } - let git = || { - let mut cmd = Command::new("git"); - cmd.current_dir(&self.src); - return cmd - }; - let git_submodule = || { - let mut cmd = Command::new("git"); - cmd.current_dir(&self.src).arg("submodule"); - return cmd - }; - - // FIXME: this takes a seriously long time to execute on Windows and a - // nontrivial amount of time on Unix, we should have a better way - // of detecting whether we need to run all the submodule commands - // below. - let out = output(git_submodule().arg("status")); - let mut submodules = vec![]; - for line in out.lines() { - // NOTE `git submodule status` output looks like this: - // - // -5066b7dcab7e700844b0e2ba71b8af9dc627a59b src/liblibc - // +b37ef24aa82d2be3a3cc0fe89bf82292f4ca181c src/compiler-rt (remotes/origin/..) - // e058ca661692a8d01f8cf9d35939dfe3105ce968 src/jemalloc (3.6.0-533-ge058ca6) - // - // The first character can be '-', '+' or ' ' and denotes the `State` of the submodule - // Right next to this character is the SHA-1 of the submodule HEAD - // And after that comes the path to the submodule - let path = Path::new(line[1..].split(' ').skip(1).next().unwrap()); - let state = if line.starts_with('-') { - State::NotInitialized - } else if line.starts_with('+') { - State::OutOfSync - } else if line.starts_with(' ') { - State::MaybeDirty - } else { - panic!("unexpected git submodule state: {:?}", line.chars().next()); - }; - - submodules.push(Submodule { path: path, state: state }) - } - - self.run(git_submodule().arg("sync")); - - for submodule in submodules { - // If using llvm-root then don't touch the llvm submodule. - if submodule.path.components().any(|c| c == Component::Normal("llvm".as_ref())) && - self.config.target_config.get(&self.config.build) - .and_then(|c| c.llvm_config.as_ref()).is_some() - { - continue - } - - if submodule.path.components().any(|c| c == Component::Normal("jemalloc".as_ref())) && - !self.config.use_jemalloc - { - continue - } - - // `submodule.path` is the relative path to a submodule (from the repository root) - // `submodule_path` is the path to a submodule from the cwd - - // use `submodule.path` when e.g. executing a submodule specific command from the - // repository root - // use `submodule_path` when e.g. executing a normal git command for the submodule - // (set via `current_dir`) - let submodule_path = self.src.join(submodule.path); - - match submodule.state { - State::MaybeDirty => { - // drop staged changes - self.run(git().current_dir(&submodule_path) - .args(&["reset", "--hard"])); - // drops unstaged changes - self.run(git().current_dir(&submodule_path) - .args(&["clean", "-fdx"])); - }, - State::NotInitialized => { - self.run(git_submodule().arg("init").arg(submodule.path)); - self.run(git_submodule().arg("update").arg(submodule.path)); - }, - State::OutOfSync => { - // drops submodule commits that weren't reported to the (outer) git repository - self.run(git_submodule().arg("update").arg(submodule.path)); - self.run(git().current_dir(&submodule_path) - .args(&["reset", "--hard"])); - self.run(git().current_dir(&submodule_path) - .args(&["clean", "-fdx"])); - }, - } - } - } - /// Clear out `dir` if `input` is newer. /// /// After this executes, it will also ensure that `dir` exists. @@ -475,12 +358,30 @@ fn cargo(&self, .env("RUSTDOC_REAL", self.rustdoc(compiler)) .env("RUSTC_FLAGS", self.rustc_flags(target).join(" ")); - // Tools don't get debuginfo right now, e.g. cargo and rls don't get - // compiled with debuginfo. if mode != Mode::Tool { - cargo.env("RUSTC_DEBUGINFO", self.config.rust_debuginfo.to_string()) - .env("RUSTC_DEBUGINFO_LINES", self.config.rust_debuginfo_lines.to_string()) - .env("RUSTC_FORCE_UNSTABLE", "1"); + // Tools don't get debuginfo right now, e.g. cargo and rls don't + // get compiled with debuginfo. + cargo.env("RUSTC_DEBUGINFO", self.config.rust_debuginfo.to_string()) + .env("RUSTC_DEBUGINFO_LINES", self.config.rust_debuginfo_lines.to_string()) + .env("RUSTC_FORCE_UNSTABLE", "1"); + + // Currently the compiler depends on crates from crates.io, and + // then other crates can depend on the compiler (e.g. proc-macro + // crates). Let's say, for example that rustc itself depends on the + // bitflags crate. If an external crate then depends on the + // bitflags crate as well, we need to make sure they don't + // conflict, even if they pick the same verison of bitflags. We'll + // want to make sure that e.g. a plugin and rustc each get their + // own copy of bitflags. + + // Cargo ensures that this works in general through the -C metadata + // flag. This flag will frob the symbols in the binary to make sure + // they're different, even though the source code is the exact + // same. To solve this problem for the compiler we extend Cargo's + // already-passed -C metadata flag with our own. Our rustc.rs + // wrapper around the actual rustc will detect -C metadata being + // passed and frob it with this extra string we're passing in. + cargo.env("RUSTC_METADATA_SUFFIX", "rustc"); } // Enable usage of unstable features diff --git a/src/bootstrap/native.rs b/src/bootstrap/native.rs index 67edd70a156..6cb1d1fc4bf 100644 --- a/src/bootstrap/native.rs +++ b/src/bootstrap/native.rs @@ -108,6 +108,7 @@ pub fn llvm(build: &Build, target: &str) { cfg.define("LLVM_USE_CRT_DEBUG", "MT"); cfg.define("LLVM_USE_CRT_RELEASE", "MT"); cfg.define("LLVM_USE_CRT_RELWITHDEBINFO", "MT"); + cfg.static_crt(true); } if target.starts_with("i686") { diff --git a/src/bootstrap/step.rs b/src/bootstrap/step.rs index 970c0bc565d..92666e8e639 100644 --- a/src/bootstrap/step.rs +++ b/src/bootstrap/step.rs @@ -574,6 +574,10 @@ fn crate_rule<'a, 'b>(build: &'a Build, .dep(|s| s.name("maybe-clean-tools")) .dep(|s| s.name("libstd-tool")) .run(move |s| compile::tool(build, s.stage, s.target, "remote-test-client")); + rules.build("tool-rust-installer", "src/tools/rust-installer") + .dep(|s| s.name("maybe-clean-tools")) + .dep(|s| s.name("libstd-tool")) + .run(move |s| compile::tool(build, s.stage, s.target, "rust-installer")); rules.build("tool-cargo", "src/tools/cargo") .host(true) .default(build.config.extended) @@ -704,6 +708,7 @@ fn crate_rule<'a, 'b>(build: &'a Build, .host(true) .only_host_build(true) .default(true) + .dep(move |s| tool_rust_installer(build, s)) .run(move |s| dist::rustc(build, s.stage, s.target)); rules.dist("dist-std", "src/libstd") .dep(move |s| { @@ -718,10 +723,12 @@ fn crate_rule<'a, 'b>(build: &'a Build, }) .default(true) .only_host_build(true) + .dep(move |s| tool_rust_installer(build, s)) .run(move |s| dist::std(build, &s.compiler(), s.target)); rules.dist("dist-mingw", "path/to/nowhere") .default(true) .only_host_build(true) + .dep(move |s| tool_rust_installer(build, s)) .run(move |s| { if s.target.contains("pc-windows-gnu") { dist::mingw(build, s.target) @@ -732,21 +739,25 @@ fn crate_rule<'a, 'b>(build: &'a Build, .host(true) .only_build(true) .only_host_build(true) + .dep(move |s| tool_rust_installer(build, s)) .run(move |_| dist::rust_src(build)); rules.dist("dist-docs", "src/doc") .default(true) .only_host_build(true) .dep(|s| s.name("default:doc")) + .dep(move |s| tool_rust_installer(build, s)) .run(move |s| dist::docs(build, s.stage, s.target)); rules.dist("dist-analysis", "analysis") .default(build.config.extended) .dep(|s| s.name("dist-std")) .only_host_build(true) + .dep(move |s| tool_rust_installer(build, s)) .run(move |s| dist::analysis(build, &s.compiler(), s.target)); rules.dist("dist-rls", "rls") .host(true) .only_host_build(true) .dep(|s| s.name("tool-rls")) + .dep(move |s| tool_rust_installer(build, s)) .run(move |s| dist::rls(build, s.stage, s.target)); rules.dist("install", "path/to/nowhere") .dep(|s| s.name("default:dist")) @@ -755,6 +766,7 @@ fn crate_rule<'a, 'b>(build: &'a Build, .host(true) .only_host_build(true) .dep(|s| s.name("tool-cargo")) + .dep(move |s| tool_rust_installer(build, s)) .run(move |s| dist::cargo(build, s.stage, s.target)); rules.dist("dist-extended", "extended") .default(build.config.extended) @@ -767,6 +779,7 @@ fn crate_rule<'a, 'b>(build: &'a Build, .dep(|d| d.name("dist-cargo")) .dep(|d| d.name("dist-rls")) .dep(|d| d.name("dist-analysis")) + .dep(move |s| tool_rust_installer(build, s)) .run(move |s| dist::extended(build, s.stage, s.target)); rules.dist("dist-sign", "hash-and-sign") @@ -778,6 +791,14 @@ fn crate_rule<'a, 'b>(build: &'a Build, rules.verify(); return rules; + + /// Helper to depend on a stage0 build-only rust-installer tool. + fn tool_rust_installer<'a>(build: &'a Build, step: &Step<'a>) -> Step<'a> { + step.name("tool-rust-installer") + .host(&build.config.build) + .target(&build.config.build) + .stage(0) + } } #[derive(PartialEq, Eq, Hash, Clone, Debug)] diff --git a/src/ci/docker/README.md b/src/ci/docker/README.md index 6f3a7e091e1..627b5062df3 100644 --- a/src/ci/docker/README.md +++ b/src/ci/docker/README.md @@ -16,6 +16,12 @@ for example: Images will output artifacts in an `obj` dir at the root of a repository. +## Filesystem layout + +- Each directory, excluding `scripts` and `disabled`, corresponds to a docker image +- `scripts` contains files shared by docker images +- `disabled` contains images that are not build travis + ## Cross toolchains A number of these images take quite a long time to compile as they're building diff --git a/src/ci/docker/android-ndk.sh b/src/ci/docker/android-ndk.sh deleted file mode 100644 index 4849f843007..00000000000 --- a/src/ci/docker/android-ndk.sh +++ /dev/null @@ -1,35 +0,0 @@ -#!/bin/sh -# Copyright 2017 The Rust Project Developers. See the COPYRIGHT -# file at the top-level directory of this distribution and at -# http://rust-lang.org/COPYRIGHT. -# -# Licensed under the Apache License, Version 2.0 or the MIT license -# , at your -# option. This file may not be copied, modified, or distributed -# except according to those terms. - -set -ex - -URL=https://dl.google.com/android/repository - -download_ndk() { - mkdir -p /android/ndk - cd /android/ndk - curl -O $URL/$1 - unzip -q $1 - rm $1 - mv android-ndk-* ndk -} - -make_standalone_toolchain() { - # See https://developer.android.com/ndk/guides/standalone_toolchain.htm - python2.7 /android/ndk/ndk/build/tools/make_standalone_toolchain.py \ - --install-dir /android/ndk/$1-$2 \ - --arch $1 \ - --api $2 -} - -remove_ndk() { - rm -rf /android/ndk/ndk -} diff --git a/src/ci/docker/arm-android/Dockerfile b/src/ci/docker/arm-android/Dockerfile index 93f15baf55e..2a928c5ec7e 100644 --- a/src/ci/docker/arm-android/Dockerfile +++ b/src/ci/docker/arm-android/Dockerfile @@ -2,52 +2,44 @@ FROM ubuntu:16.04 RUN apt-get update && \ apt-get install -y --no-install-recommends \ + ca-certificates \ + cmake \ + curl \ + file \ g++ \ + git \ + libssl-dev \ make \ - file \ - curl \ - ca-certificates \ + pkg-config \ python2.7 \ - git \ - cmake \ - unzip \ sudo \ - xz-utils \ - libssl-dev \ - pkg-config - -RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \ - dpkg -i dumb-init_*.deb && \ - rm dumb-init_*.deb + unzip \ + xz-utils -RUN curl -o /usr/local/bin/sccache \ - https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \ - chmod +x /usr/local/bin/sccache +# dumb-init +COPY scripts/dumb-init.sh /scripts/ +RUN sh /scripts/dumb-init.sh -# Install NDK -COPY install-ndk.sh /tmp -RUN . /tmp/install-ndk.sh && \ - download_ndk android-ndk-r13b-linux-x86_64.zip && \ - make_standalone_toolchain arm 9 && \ - remove_ndk +# ndk +COPY scripts/android-ndk.sh /scripts/ +RUN . /scripts/android-ndk.sh && \ + download_and_make_toolchain android-ndk-r13b-linux-x86_64.zip arm 9 -# Install SDK +# sdk RUN dpkg --add-architecture i386 && \ apt-get update && \ apt-get install -y --no-install-recommends \ - openjdk-9-jre-headless \ - tzdata \ - libstdc++6:i386 \ libgl1-mesa-glx \ - libpulse0 + libpulse0 \ + libstdc++6:i386 \ + openjdk-9-jre-headless \ + tzdata -COPY install-sdk.sh /tmp -RUN . /tmp/install-sdk.sh && \ - download_sdk tools_r25.2.5-linux.zip && \ - download_sysimage armeabi-v7a 18 && \ - create_avd armeabi-v7a 18 +COPY scripts/android-sdk.sh /scripts/ +RUN . /scripts/android-sdk.sh && \ + download_and_create_avd tools_r25.2.5-linux.zip armeabi-v7a 18 -# Setup env +# env ENV PATH=$PATH:/android/sdk/tools ENV PATH=$PATH:/android/sdk/platform-tools @@ -57,8 +49,12 @@ ENV RUST_CONFIGURE_ARGS \ --target=$TARGETS \ --arm-linux-androideabi-ndk=/android/ndk/arm-9 -ENV SCRIPT python2.7 ../x.py test --target $TARGETS --verbose +ENV SCRIPT python2.7 ../x.py test --target $TARGETS + +# sccache +COPY scripts/sccache.sh /scripts/ +RUN sh /scripts/sccache.sh -# Entrypoint -COPY start-emulator.sh /android/ -ENTRYPOINT ["/usr/bin/dumb-init", "--", "/android/start-emulator.sh"] +# init +COPY scripts/android-start-emulator.sh /scripts/ +ENTRYPOINT ["/usr/bin/dumb-init", "--", "/scripts/android-start-emulator.sh"] diff --git a/src/ci/docker/arm-android/install-ndk.sh b/src/ci/docker/arm-android/install-ndk.sh deleted file mode 100644 index 80818721199..00000000000 --- a/src/ci/docker/arm-android/install-ndk.sh +++ /dev/null @@ -1,35 +0,0 @@ -#!/bin/sh -# Copyright 2016 The Rust Project Developers. See the COPYRIGHT -# file at the top-level directory of this distribution and at -# http://rust-lang.org/COPYRIGHT. -# -# Licensed under the Apache License, Version 2.0 or the MIT license -# , at your -# option. This file may not be copied, modified, or distributed -# except according to those terms. - -set -ex - -URL=https://dl.google.com/android/repository - -download_ndk() { - mkdir -p /android/ndk - cd /android/ndk - curl -O $URL/$1 - unzip -q $1 - rm $1 - mv android-ndk-* ndk -} - -make_standalone_toolchain() { - # See https://developer.android.com/ndk/guides/standalone_toolchain.html - python2.7 /android/ndk/ndk/build/tools/make_standalone_toolchain.py \ - --install-dir /android/ndk/$1-$2 \ - --arch $1 \ - --api $2 -} - -remove_ndk() { - rm -rf /android/ndk/ndk -} diff --git a/src/ci/docker/arm-android/install-sdk.sh b/src/ci/docker/arm-android/install-sdk.sh deleted file mode 100644 index 258fc47a7a6..00000000000 --- a/src/ci/docker/arm-android/install-sdk.sh +++ /dev/null @@ -1,49 +0,0 @@ -#!/bin/sh -# Copyright 2017 The Rust Project Developers. See the COPYRIGHT -# file at the top-level directory of this distribution and at -# http://rust-lang.org/COPYRIGHT. -# -# Licensed under the Apache License, Version 2.0 or the MIT license -# , at your -# option. This file may not be copied, modified, or distributed -# except according to those terms. - -set -ex - -URL=https://dl.google.com/android/repository - -download_sdk() { - mkdir -p /android/sdk - cd /android/sdk - curl -O $URL/$1 - unzip -q $1 - rm -rf $1 -} - -download_sysimage() { - # See https://developer.android.com/studio/tools/help/android.html - abi=$1 - api=$2 - - filter="platform-tools,android-$api" - filter="$filter,sys-img-$abi-android-$api" - - # Keep printing yes to accept the licenses - while true; do echo yes; sleep 10; done | \ - /android/sdk/tools/android update sdk -a --no-ui \ - --filter "$filter" -} - -create_avd() { - # See https://developer.android.com/studio/tools/help/android.html - abi=$1 - api=$2 - - echo no | \ - /android/sdk/tools/android create avd \ - --name $abi-$api \ - --target android-$api \ - --abi $abi -} - diff --git a/src/ci/docker/arm-android/start-emulator.sh b/src/ci/docker/arm-android/start-emulator.sh deleted file mode 100755 index cd3369d5ead..00000000000 --- a/src/ci/docker/arm-android/start-emulator.sh +++ /dev/null @@ -1,25 +0,0 @@ -#!/bin/sh -# Copyright 2016 The Rust Project Developers. See the COPYRIGHT -# file at the top-level directory of this distribution and at -# http://rust-lang.org/COPYRIGHT. -# -# Licensed under the Apache License, Version 2.0 or the MIT license -# , at your -# option. This file may not be copied, modified, or distributed -# except according to those terms. - -set -ex - -# Setting SHELL to a file instead on a symlink helps android -# emulator identify the system -export SHELL=/bin/bash - -# Using the default qemu2 engine makes time::tests::since_epoch fails because -# the emulator date is set to unix epoch (in armeabi-v7a-18 image). Using -# classic engine the emulator starts with the current date and the tests run -# fine. If another image is used, this need to be evaluated again. -nohup nohup emulator @armeabi-v7a-18 \ - -engine classic -no-window -partition-size 2047 0<&- &>/dev/null & - -exec "$@" diff --git a/src/ci/docker/armhf-gnu/Dockerfile b/src/ci/docker/armhf-gnu/Dockerfile index 801de69a63d..03e0b78ba89 100644 --- a/src/ci/docker/armhf-gnu/Dockerfile +++ b/src/ci/docker/armhf-gnu/Dockerfile @@ -31,7 +31,7 @@ WORKDIR /build # The `vexpress_config` config file was a previously generated config file for # the kernel. This file was generated by running `make vexpress_defconfig` # followed by `make menuconfig` and then enabling the IPv6 protocol page. -COPY vexpress_config /build/.config +COPY armhf-gnu/vexpress_config /build/.config RUN curl https://cdn.kernel.org/pub/linux/kernel/v4.x/linux-4.4.42.tar.xz | \ tar xJf - && \ cd /build/linux-4.4.42 && \ @@ -63,11 +63,11 @@ RUN curl http://cdimage.ubuntu.com/ubuntu-base/releases/16.04/release/ubuntu-bas # Copy over our init script, which starts up our test server and also a few # other misc tasks. -COPY rcS rootfs/etc/init.d/rcS +COPY armhf-gnu/rcS rootfs/etc/init.d/rcS RUN chmod +x rootfs/etc/init.d/rcS # Helper to quickly fill the entropy pool in the kernel. -COPY addentropy.c /tmp/ +COPY armhf-gnu/addentropy.c /tmp/ RUN arm-linux-gnueabihf-gcc addentropy.c -o rootfs/addentropy -static # TODO: What is this?! diff --git a/src/ci/docker/cross/Dockerfile b/src/ci/docker/cross/Dockerfile index 30a699c3ba2..7759d91e1bb 100644 --- a/src/ci/docker/cross/Dockerfile +++ b/src/ci/docker/cross/Dockerfile @@ -32,10 +32,10 @@ ENTRYPOINT ["/usr/bin/dumb-init", "--"] WORKDIR /tmp -COPY build-rumprun.sh /tmp/ +COPY cross/build-rumprun.sh /tmp/ RUN ./build-rumprun.sh -COPY build-arm-musl.sh /tmp/ +COPY cross/build-arm-musl.sh /tmp/ RUN ./build-arm-musl.sh # originally from diff --git a/src/ci/docker/disabled/dist-aarch64-android/Dockerfile b/src/ci/docker/disabled/dist-aarch64-android/Dockerfile index e15876edbd8..918d2911ae2 100644 --- a/src/ci/docker/disabled/dist-aarch64-android/Dockerfile +++ b/src/ci/docker/disabled/dist-aarch64-android/Dockerfile @@ -2,36 +2,30 @@ FROM ubuntu:16.04 RUN apt-get update && \ apt-get install -y --no-install-recommends \ + ca-certificates \ + cmake \ + curl \ + file \ g++ \ + git \ + libssl-dev \ make \ - file \ - curl \ - ca-certificates \ + pkg-config \ python2.7 \ - git \ - cmake \ - unzip \ sudo \ - xz-utils \ - libssl-dev \ - pkg-config - -RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \ - dpkg -i dumb-init_*.deb && \ - rm dumb-init_*.deb - -RUN curl -o /usr/local/bin/sccache \ - https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \ - chmod +x /usr/local/bin/sccache + unzip \ + xz-utils -ENTRYPOINT ["/usr/bin/dumb-init", "--"] +# dumb-init +COPY scripts/dumb-init.sh /scripts/ +RUN sh /scripts/dumb-init.sh -COPY android-ndk.sh / -RUN . /android-ndk.sh && \ - download_ndk android-ndk-r13b-linux-x86_64.zip && \ - make_standalone_toolchain arm64 21 && \ - remove_ndk +# ndk +COPY scripts/android-ndk.sh /scripts/ +RUN . /scripts/android-ndk.sh && \ + download_and_make_toolchain android-ndk-r13b-linux-x86_64.zip arm64 21 +# env ENV PATH=$PATH:/android/ndk/arm64-21/bin ENV DEP_Z_ROOT=/android/ndk/arm64-21/sysroot/usr/ @@ -47,3 +41,10 @@ ENV RUST_CONFIGURE_ARGS \ --enable-cargo-openssl-static ENV SCRIPT python2.7 ../x.py dist --target $HOSTS --host $HOSTS + +# sccache +COPY scripts/sccache.sh /scripts/ +RUN sh /scripts/sccache.sh + +# init +ENTRYPOINT ["/usr/bin/dumb-init", "--"] diff --git a/src/ci/docker/disabled/dist-armv7-android/Dockerfile b/src/ci/docker/disabled/dist-armv7-android/Dockerfile index 0d81e404b5c..aed82e6c138 100644 --- a/src/ci/docker/disabled/dist-armv7-android/Dockerfile +++ b/src/ci/docker/disabled/dist-armv7-android/Dockerfile @@ -2,37 +2,36 @@ FROM ubuntu:16.04 RUN apt-get update && \ apt-get install -y --no-install-recommends \ + ca-certificates \ + cmake \ + curl \ + file \ g++ \ + git \ + libssl-dev \ make \ - file \ - curl \ - ca-certificates \ + pkg-config \ python2.7 \ - git \ - cmake \ - unzip \ sudo \ - xz-utils \ - libssl-dev \ - pkg-config - -RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \ - dpkg -i dumb-init_*.deb && \ - rm dumb-init_*.deb - -RUN curl -o /usr/local/bin/sccache \ - https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \ - chmod +x /usr/local/bin/sccache + unzip \ + xz-utils -ENTRYPOINT ["/usr/bin/dumb-init", "--"] +# dumb-init +COPY scripts/dumb-init.sh /scripts/ +RUN sh /scripts/dumb-init.sh -COPY android-ndk.sh / -RUN . /android-ndk.sh && \ +# ndk +COPY scripts/android-ndk.sh /scripts/ +RUN . /scripts/android-ndk.sh && \ download_ndk android-ndk-r13b-linux-x86_64.zip && \ make_standalone_toolchain arm 9 && \ make_standalone_toolchain arm 21 && \ remove_ndk +RUN chmod 777 /android/ndk && \ + ln -s /android/ndk/arm-21 /android/ndk/arm + +# env ENV PATH=$PATH:/android/ndk/arm-9/bin ENV DEP_Z_ROOT=/android/ndk/arm-9/sysroot/usr/ @@ -54,12 +53,16 @@ ENV RUST_CONFIGURE_ARGS \ # level 9), the default linker behavior is to generate an error, to allow the # build to finish we use --warn-unresolved-symbols. Note that the missing # symbols does not affect std, only the compiler (llvm) and cargo (openssl). -RUN chmod 777 /android/ndk && \ - ln -s /android/ndk/arm-21 /android/ndk/arm - ENV SCRIPT \ python2.7 ../x.py build src/llvm --host $HOSTS --target $HOSTS && \ (export RUSTFLAGS="\"-C link-arg=-Wl,--warn-unresolved-symbols\""; \ rm /android/ndk/arm && \ ln -s /android/ndk/arm-9 /android/ndk/arm && \ python2.7 ../x.py dist --host $HOSTS --target $HOSTS) + +# sccache +COPY scripts/sccache.sh /scripts/ +RUN sh /scripts/sccache.sh + +# init +ENTRYPOINT ["/usr/bin/dumb-init", "--"] diff --git a/src/ci/docker/disabled/dist-i686-android/Dockerfile b/src/ci/docker/disabled/dist-i686-android/Dockerfile index 37930639b8a..f012e869e78 100644 --- a/src/ci/docker/disabled/dist-i686-android/Dockerfile +++ b/src/ci/docker/disabled/dist-i686-android/Dockerfile @@ -2,37 +2,36 @@ FROM ubuntu:16.04 RUN apt-get update && \ apt-get install -y --no-install-recommends \ + ca-certificates \ + cmake \ + curl \ + file \ g++ \ + git \ + libssl-dev \ make \ - file \ - curl \ - ca-certificates \ + pkg-config \ python2.7 \ - git \ - cmake \ - unzip \ sudo \ - xz-utils \ - libssl-dev \ - pkg-config - -RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \ - dpkg -i dumb-init_*.deb && \ - rm dumb-init_*.deb - -RUN curl -o /usr/local/bin/sccache \ - https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \ - chmod +x /usr/local/bin/sccache + unzip \ + xz-utils -ENTRYPOINT ["/usr/bin/dumb-init", "--"] +# dumb-init +COPY scripts/dumb-init.sh /scripts/ +RUN sh /scripts/dumb-init.sh -COPY android-ndk.sh / -RUN . /android-ndk.sh && \ +# ndk +COPY scripts/android-ndk.sh /scripts/ +RUN . /scripts/android-ndk.sh && \ download_ndk android-ndk-r13b-linux-x86_64.zip && \ make_standalone_toolchain x86 9 && \ make_standalone_toolchain x86 21 && \ remove_ndk +RUN chmod 777 /android/ndk && \ + ln -s /android/ndk/x86-21 /android/ndk/x86 + +# env ENV PATH=$PATH:/android/ndk/x86-9/bin ENV DEP_Z_ROOT=/android/ndk/x86-9/sysroot/usr/ @@ -54,12 +53,16 @@ ENV RUST_CONFIGURE_ARGS \ # level 9), the default linker behavior is to generate an error, to allow the # build to finish we use --warn-unresolved-symbols. Note that the missing # symbols does not affect std, only the compiler (llvm) and cargo (openssl). -RUN chmod 777 /android/ndk && \ - ln -s /android/ndk/x86-21 /android/ndk/x86 - ENV SCRIPT \ python2.7 ../x.py build src/llvm --host $HOSTS --target $HOSTS && \ (export RUSTFLAGS="\"-C link-arg=-Wl,--warn-unresolved-symbols\""; \ rm /android/ndk/x86 && \ ln -s /android/ndk/x86-9 /android/ndk/x86 && \ python2.7 ../x.py dist --host $HOSTS --target $HOSTS) + +# sccache +COPY scripts/sccache.sh /scripts/ +RUN sh /scripts/sccache.sh + +# init +ENTRYPOINT ["/usr/bin/dumb-init", "--"] diff --git a/src/ci/docker/disabled/dist-x86_64-android/Dockerfile b/src/ci/docker/disabled/dist-x86_64-android/Dockerfile index a642d8ed6ec..0c586452840 100644 --- a/src/ci/docker/disabled/dist-x86_64-android/Dockerfile +++ b/src/ci/docker/disabled/dist-x86_64-android/Dockerfile @@ -2,36 +2,30 @@ FROM ubuntu:16.04 RUN apt-get update && \ apt-get install -y --no-install-recommends \ + ca-certificates \ + cmake \ + curl \ + file \ g++ \ + git \ + libssl-dev \ make \ - file \ - curl \ - ca-certificates \ + pkg-config \ python2.7 \ - git \ - cmake \ - unzip \ sudo \ - xz-utils \ - libssl-dev \ - pkg-config - -RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \ - dpkg -i dumb-init_*.deb && \ - rm dumb-init_*.deb - -RUN curl -o /usr/local/bin/sccache \ - https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \ - chmod +x /usr/local/bin/sccache + unzip \ + xz-utils -ENTRYPOINT ["/usr/bin/dumb-init", "--"] +# dumb-init +COPY scripts/dumb-init.sh /scripts/ +RUN sh /scripts/dumb-init.sh -COPY android-ndk.sh / -RUN . /android-ndk.sh && \ - download_ndk android-ndk-r13b-linux-x86_64.zip && \ - make_standalone_toolchain x86_64 21 && \ - remove_ndk +# ndk +COPY scripts/android-ndk.sh /scripts/ +RUN . /scripts/android-ndk.sh && \ + download_and_make_toolchain android-ndk-r13b-linux-x86_64.zip x86_64 21 +# env ENV PATH=$PATH:/android/ndk/x86_64-21/bin ENV DEP_Z_ROOT=/android/ndk/x86_64-21/sysroot/usr/ @@ -47,3 +41,10 @@ ENV RUST_CONFIGURE_ARGS \ --enable-cargo-openssl-static ENV SCRIPT python2.7 ../x.py dist --target $HOSTS --host $HOSTS + +# sccache +COPY scripts/sccache.sh /scripts/ +RUN sh /scripts/sccache.sh + +# init +ENTRYPOINT ["/usr/bin/dumb-init", "--"] diff --git a/src/ci/docker/dist-aarch64-linux/Dockerfile b/src/ci/docker/dist-aarch64-linux/Dockerfile index c8257c05acd..0134a540793 100644 --- a/src/ci/docker/dist-aarch64-linux/Dockerfile +++ b/src/ci/docker/dist-aarch64-linux/Dockerfile @@ -56,7 +56,7 @@ RUN mkdir /x-tools && chown rustbuild:rustbuild /x-tools USER rustbuild WORKDIR /tmp -COPY aarch64-linux-gnu.config build-toolchains.sh /tmp/ +COPY dist-aarch64-linux/aarch64-linux-gnu.config dist-aarch64-linux/build-toolchains.sh /tmp/ RUN ./build-toolchains.sh USER root diff --git a/src/ci/docker/dist-android/Dockerfile b/src/ci/docker/dist-android/Dockerfile index 711c0ee5747..31389dd148a 100644 --- a/src/ci/docker/dist-android/Dockerfile +++ b/src/ci/docker/dist-android/Dockerfile @@ -2,33 +2,27 @@ FROM ubuntu:16.04 RUN apt-get update && \ apt-get install -y --no-install-recommends \ + ca-certificates \ + cmake \ + curl \ + file \ g++ \ + git \ + libssl-dev \ make \ - file \ - curl \ - ca-certificates \ + pkg-config \ python2.7 \ - git \ - cmake \ - unzip \ sudo \ - xz-utils \ - libssl-dev \ - pkg-config - -RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \ - dpkg -i dumb-init_*.deb && \ - rm dumb-init_*.deb - -RUN curl -o /usr/local/bin/sccache \ - https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \ - chmod +x /usr/local/bin/sccache + unzip \ + xz-utils -ENTRYPOINT ["/usr/bin/dumb-init", "--"] +# dumb-init +COPY scripts/dumb-init.sh /scripts/ +RUN sh /scripts/dumb-init.sh -# Install NDK -COPY install-ndk.sh /tmp -RUN . /tmp/install-ndk.sh && \ +# ndk +COPY scripts/android-ndk.sh /scripts/ +RUN . /scripts/android-ndk.sh && \ download_ndk android-ndk-r13b-linux-x86_64.zip && \ make_standalone_toolchain arm 9 && \ make_standalone_toolchain x86 9 && \ @@ -36,6 +30,7 @@ RUN . /tmp/install-ndk.sh && \ make_standalone_toolchain x86_64 21 && \ remove_ndk +# env ENV TARGETS=arm-linux-androideabi ENV TARGETS=$TARGETS,armv7-linux-androideabi ENV TARGETS=$TARGETS,i686-linux-android @@ -52,3 +47,10 @@ ENV RUST_CONFIGURE_ARGS \ --x86_64-linux-android-ndk=/android/ndk/x86_64-21 ENV SCRIPT python2.7 ../x.py dist --target $TARGETS + +# cache +COPY scripts/sccache.sh /scripts/ +RUN sh /scripts/sccache.sh + +# init +ENTRYPOINT ["/usr/bin/dumb-init", "--"] diff --git a/src/ci/docker/dist-android/install-ndk.sh b/src/ci/docker/dist-android/install-ndk.sh deleted file mode 100644 index 80818721199..00000000000 --- a/src/ci/docker/dist-android/install-ndk.sh +++ /dev/null @@ -1,35 +0,0 @@ -#!/bin/sh -# Copyright 2016 The Rust Project Developers. See the COPYRIGHT -# file at the top-level directory of this distribution and at -# http://rust-lang.org/COPYRIGHT. -# -# Licensed under the Apache License, Version 2.0 or the MIT license -# , at your -# option. This file may not be copied, modified, or distributed -# except according to those terms. - -set -ex - -URL=https://dl.google.com/android/repository - -download_ndk() { - mkdir -p /android/ndk - cd /android/ndk - curl -O $URL/$1 - unzip -q $1 - rm $1 - mv android-ndk-* ndk -} - -make_standalone_toolchain() { - # See https://developer.android.com/ndk/guides/standalone_toolchain.html - python2.7 /android/ndk/ndk/build/tools/make_standalone_toolchain.py \ - --install-dir /android/ndk/$1-$2 \ - --arch $1 \ - --api $2 -} - -remove_ndk() { - rm -rf /android/ndk/ndk -} diff --git a/src/ci/docker/dist-arm-linux/Dockerfile b/src/ci/docker/dist-arm-linux/Dockerfile index af2b58f7d6b..862818a7c91 100644 --- a/src/ci/docker/dist-arm-linux/Dockerfile +++ b/src/ci/docker/dist-arm-linux/Dockerfile @@ -56,7 +56,7 @@ RUN mkdir /x-tools && chown rustbuild:rustbuild /x-tools USER rustbuild WORKDIR /tmp -COPY arm-linux-gnueabi.config build-toolchains.sh /tmp/ +COPY dist-arm-linux/arm-linux-gnueabi.config dist-arm-linux/build-toolchains.sh /tmp/ RUN ./build-toolchains.sh USER root diff --git a/src/ci/docker/dist-armhf-linux/Dockerfile b/src/ci/docker/dist-armhf-linux/Dockerfile index 076bc50946c..7f1f91f844c 100644 --- a/src/ci/docker/dist-armhf-linux/Dockerfile +++ b/src/ci/docker/dist-armhf-linux/Dockerfile @@ -56,7 +56,7 @@ RUN mkdir /x-tools && chown rustbuild:rustbuild /x-tools USER rustbuild WORKDIR /tmp -COPY arm-linux-gnueabihf.config build-toolchains.sh /tmp/ +COPY dist-armhf-linux/arm-linux-gnueabihf.config dist-armhf-linux/build-toolchains.sh /tmp/ RUN ./build-toolchains.sh USER root diff --git a/src/ci/docker/dist-armv7-linux/Dockerfile b/src/ci/docker/dist-armv7-linux/Dockerfile index 9367a5a6270..030fd24ebcd 100644 --- a/src/ci/docker/dist-armv7-linux/Dockerfile +++ b/src/ci/docker/dist-armv7-linux/Dockerfile @@ -56,7 +56,7 @@ RUN mkdir /x-tools && chown rustbuild:rustbuild /x-tools USER rustbuild WORKDIR /tmp -COPY build-toolchains.sh armv7-linux-gnueabihf.config /tmp/ +COPY dist-armv7-linux/build-toolchains.sh dist-armv7-linux/armv7-linux-gnueabihf.config /tmp/ RUN ./build-toolchains.sh USER root diff --git a/src/ci/docker/dist-fuchsia/Dockerfile b/src/ci/docker/dist-fuchsia/Dockerfile index 8699e0d87d7..d1d9767d35e 100644 --- a/src/ci/docker/dist-fuchsia/Dockerfile +++ b/src/ci/docker/dist-fuchsia/Dockerfile @@ -21,7 +21,7 @@ RUN curl -L https://cmake.org/files/v3.8/cmake-3.8.0-rc1-Linux-x86_64.tar.gz | \ tar xzf - -C /usr/local --strip-components=1 WORKDIR /tmp -COPY shared.sh build-toolchain.sh compiler-rt-dso-handle.patch /tmp/ +COPY dist-fuchsia/shared.sh dist-fuchsia/build-toolchain.sh dist-fuchsia/compiler-rt-dso-handle.patch /tmp/ RUN /tmp/build-toolchain.sh RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \ diff --git a/src/ci/docker/dist-i586-gnu-i686-musl/Dockerfile b/src/ci/docker/dist-i586-gnu-i686-musl/Dockerfile index 3e823339eaa..805d238de1f 100644 --- a/src/ci/docker/dist-i586-gnu-i686-musl/Dockerfile +++ b/src/ci/docker/dist-i586-gnu-i686-musl/Dockerfile @@ -17,7 +17,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ pkg-config WORKDIR /build/ -COPY musl-libunwind-patch.patch build-musl.sh /build/ +COPY dist-i586-gnu-i686-musl/musl-libunwind-patch.patch dist-i586-gnu-i686-musl/build-musl.sh /build/ RUN sh /build/build-musl.sh && rm -rf /build RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \ diff --git a/src/ci/docker/dist-i686-freebsd/Dockerfile b/src/ci/docker/dist-i686-freebsd/Dockerfile index a1f36257f96..9c4d43bfa92 100644 --- a/src/ci/docker/dist-i686-freebsd/Dockerfile +++ b/src/ci/docker/dist-i686-freebsd/Dockerfile @@ -16,7 +16,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ libssl-dev \ pkg-config -COPY build-toolchain.sh /tmp/ +COPY dist-i686-freebsd/build-toolchain.sh /tmp/ RUN /tmp/build-toolchain.sh i686 RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \ diff --git a/src/ci/docker/dist-i686-linux/Dockerfile b/src/ci/docker/dist-i686-linux/Dockerfile index 8335147de60..a3c08e93ed1 100644 --- a/src/ci/docker/dist-i686-linux/Dockerfile +++ b/src/ci/docker/dist-i686-linux/Dockerfile @@ -29,13 +29,13 @@ ENV PATH=/rustroot/bin:$PATH ENV LD_LIBRARY_PATH=/rustroot/lib64:/rustroot/lib ENV PKG_CONFIG_PATH=/rustroot/lib/pkgconfig WORKDIR /tmp -COPY shared.sh build-binutils.sh /tmp/ +COPY dist-i686-linux/shared.sh dist-i686-linux/build-binutils.sh /tmp/ # We need a build of openssl which supports SNI to download artifacts from # static.rust-lang.org. This'll be used to link into libcurl below (and used # later as well), so build a copy of OpenSSL with dynamic libraries into our # generic root. -COPY build-openssl.sh /tmp/ +COPY dist-i686-linux/build-openssl.sh /tmp/ RUN ./build-openssl.sh # The `curl` binary on CentOS doesn't support SNI which is needed for fetching @@ -44,7 +44,7 @@ RUN ./build-openssl.sh # # Note that we also disable a bunch of optional features of curl that we don't # really need. -COPY build-curl.sh /tmp/ +COPY dist-i686-linux/build-curl.sh /tmp/ RUN ./build-curl.sh # binutils < 2.22 has a bug where the 32-bit executables it generates @@ -54,26 +54,26 @@ RUN ./build-curl.sh RUN ./build-binutils.sh # Need a newer version of gcc than centos has to compile LLVM nowadays -COPY build-gcc.sh /tmp/ +COPY dist-i686-linux/build-gcc.sh /tmp/ RUN ./build-gcc.sh # CentOS 5.5 has Python 2.4 by default, but LLVM needs 2.7+ -COPY build-python.sh /tmp/ +COPY dist-i686-linux/build-python.sh /tmp/ RUN ./build-python.sh # Apparently CentOS 5.5 desn't have `git` in yum, but we're gonna need it for # cloning, so download and build it here. -COPY build-git.sh /tmp/ +COPY dist-i686-linux/build-git.sh /tmp/ RUN ./build-git.sh # libssh2 (a dependency of Cargo) requires cmake 2.8.11 or higher but CentOS # only has 2.6.4, so build our own -COPY build-cmake.sh /tmp/ +COPY dist-i686-linux/build-cmake.sh /tmp/ RUN ./build-cmake.sh # for sanitizers, we need kernel headers files newer than the ones CentOS ships # with so we install newer ones here -COPY build-headers.sh /tmp/ +COPY dist-i686-linux/build-headers.sh /tmp/ RUN ./build-headers.sh RUN curl -Lo /rustroot/dumb-init \ diff --git a/src/ci/docker/dist-powerpc-linux/Dockerfile b/src/ci/docker/dist-powerpc-linux/Dockerfile index bff6504749e..0074665f34f 100644 --- a/src/ci/docker/dist-powerpc-linux/Dockerfile +++ b/src/ci/docker/dist-powerpc-linux/Dockerfile @@ -56,8 +56,8 @@ RUN mkdir /x-tools && chown rustbuild:rustbuild /x-tools USER rustbuild WORKDIR /tmp -COPY patches/ /tmp/patches/ -COPY powerpc-linux-gnu.config build-powerpc-toolchain.sh /tmp/ +COPY dist-powerpc-linux/patches/ /tmp/patches/ +COPY dist-powerpc-linux/powerpc-linux-gnu.config dist-powerpc-linux/build-powerpc-toolchain.sh /tmp/ RUN ./build-powerpc-toolchain.sh USER root diff --git a/src/ci/docker/dist-powerpc64-linux/Dockerfile b/src/ci/docker/dist-powerpc64-linux/Dockerfile index 58b09fd0fa7..bd38ee0c111 100644 --- a/src/ci/docker/dist-powerpc64-linux/Dockerfile +++ b/src/ci/docker/dist-powerpc64-linux/Dockerfile @@ -56,8 +56,8 @@ RUN mkdir /x-tools && chown rustbuild:rustbuild /x-tools USER rustbuild WORKDIR /tmp -COPY patches/ /tmp/patches/ -COPY shared.sh powerpc64-linux-gnu.config build-powerpc64-toolchain.sh /tmp/ +COPY dist-powerpc64-linux/patches/ /tmp/patches/ +COPY dist-powerpc64-linux/shared.sh dist-powerpc64-linux/powerpc64-linux-gnu.config dist-powerpc64-linux/build-powerpc64-toolchain.sh /tmp/ RUN ./build-powerpc64-toolchain.sh USER root diff --git a/src/ci/docker/dist-powerpc64le-linux/Dockerfile b/src/ci/docker/dist-powerpc64le-linux/Dockerfile index 08f1d1d7ed5..cbded156b4c 100644 --- a/src/ci/docker/dist-powerpc64le-linux/Dockerfile +++ b/src/ci/docker/dist-powerpc64le-linux/Dockerfile @@ -59,7 +59,7 @@ WORKDIR /tmp USER root RUN apt-get install -y --no-install-recommends rpm2cpio cpio -COPY shared.sh build-powerpc64le-toolchain.sh /tmp/ +COPY dist-powerpc64le-linux/shared.sh dist-powerpc64le-linux/build-powerpc64le-toolchain.sh /tmp/ RUN ./build-powerpc64le-toolchain.sh RUN curl -o /usr/local/bin/sccache \ diff --git a/src/ci/docker/dist-s390x-linux/Dockerfile b/src/ci/docker/dist-s390x-linux/Dockerfile index 5eb238fa887..5c00287107a 100644 --- a/src/ci/docker/dist-s390x-linux/Dockerfile +++ b/src/ci/docker/dist-s390x-linux/Dockerfile @@ -56,8 +56,8 @@ RUN mkdir /x-tools && chown rustbuild:rustbuild /x-tools USER rustbuild WORKDIR /tmp -COPY patches/ /tmp/patches/ -COPY s390x-linux-gnu.config build-s390x-toolchain.sh /tmp/ +COPY dist-s390x-linux/patches/ /tmp/patches/ +COPY dist-s390x-linux/s390x-linux-gnu.config dist-s390x-linux/build-s390x-toolchain.sh /tmp/ RUN ./build-s390x-toolchain.sh USER root diff --git a/src/ci/docker/dist-x86_64-freebsd/Dockerfile b/src/ci/docker/dist-x86_64-freebsd/Dockerfile index 0ac58468147..a6c4eee5e81 100644 --- a/src/ci/docker/dist-x86_64-freebsd/Dockerfile +++ b/src/ci/docker/dist-x86_64-freebsd/Dockerfile @@ -16,7 +16,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ libssl-dev \ pkg-config -COPY build-toolchain.sh /tmp/ +COPY dist-x86_64-freebsd/build-toolchain.sh /tmp/ RUN /tmp/build-toolchain.sh x86_64 RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \ diff --git a/src/ci/docker/dist-x86_64-linux/Dockerfile b/src/ci/docker/dist-x86_64-linux/Dockerfile index d688bb7f8a4..e2e42836dcd 100644 --- a/src/ci/docker/dist-x86_64-linux/Dockerfile +++ b/src/ci/docker/dist-x86_64-linux/Dockerfile @@ -29,13 +29,13 @@ ENV PATH=/rustroot/bin:$PATH ENV LD_LIBRARY_PATH=/rustroot/lib64:/rustroot/lib ENV PKG_CONFIG_PATH=/rustroot/lib/pkgconfig WORKDIR /tmp -COPY shared.sh build-binutils.sh /tmp/ +COPY dist-x86_64-linux/shared.sh dist-x86_64-linux/build-binutils.sh /tmp/ # We need a build of openssl which supports SNI to download artifacts from # static.rust-lang.org. This'll be used to link into libcurl below (and used # later as well), so build a copy of OpenSSL with dynamic libraries into our # generic root. -COPY build-openssl.sh /tmp/ +COPY dist-x86_64-linux/build-openssl.sh /tmp/ RUN ./build-openssl.sh # The `curl` binary on CentOS doesn't support SNI which is needed for fetching @@ -44,7 +44,7 @@ RUN ./build-openssl.sh # # Note that we also disable a bunch of optional features of curl that we don't # really need. -COPY build-curl.sh /tmp/ +COPY dist-x86_64-linux/build-curl.sh /tmp/ RUN ./build-curl.sh # binutils < 2.22 has a bug where the 32-bit executables it generates @@ -54,26 +54,26 @@ RUN ./build-curl.sh RUN ./build-binutils.sh # Need a newer version of gcc than centos has to compile LLVM nowadays -COPY build-gcc.sh /tmp/ +COPY dist-x86_64-linux/build-gcc.sh /tmp/ RUN ./build-gcc.sh # CentOS 5.5 has Python 2.4 by default, but LLVM needs 2.7+ -COPY build-python.sh /tmp/ +COPY dist-x86_64-linux/build-python.sh /tmp/ RUN ./build-python.sh # Apparently CentOS 5.5 desn't have `git` in yum, but we're gonna need it for # cloning, so download and build it here. -COPY build-git.sh /tmp/ +COPY dist-x86_64-linux/build-git.sh /tmp/ RUN ./build-git.sh # libssh2 (a dependency of Cargo) requires cmake 2.8.11 or higher but CentOS # only has 2.6.4, so build our own -COPY build-cmake.sh /tmp/ +COPY dist-x86_64-linux/build-cmake.sh /tmp/ RUN ./build-cmake.sh # for sanitizers, we need kernel headers files newer than the ones CentOS ships # with so we install newer ones here -COPY build-headers.sh /tmp/ +COPY dist-x86_64-linux/build-headers.sh /tmp/ RUN ./build-headers.sh RUN curl -Lo /rustroot/dumb-init \ diff --git a/src/ci/docker/dist-x86_64-musl/Dockerfile b/src/ci/docker/dist-x86_64-musl/Dockerfile index 87550641bc6..2eea5ab1469 100644 --- a/src/ci/docker/dist-x86_64-musl/Dockerfile +++ b/src/ci/docker/dist-x86_64-musl/Dockerfile @@ -17,7 +17,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ pkg-config WORKDIR /build/ -COPY build-musl.sh /build/ +COPY dist-x86_64-musl/build-musl.sh /build/ RUN sh /build/build-musl.sh && rm -rf /build RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \ diff --git a/src/ci/docker/dist-x86_64-netbsd/Dockerfile b/src/ci/docker/dist-x86_64-netbsd/Dockerfile index b6d9c221c1c..f76e6271f4c 100644 --- a/src/ci/docker/dist-x86_64-netbsd/Dockerfile +++ b/src/ci/docker/dist-x86_64-netbsd/Dockerfile @@ -56,7 +56,7 @@ RUN mkdir /x-tools && chown rustbuild:rustbuild /x-tools USER rustbuild WORKDIR /tmp -COPY build-netbsd-toolchain.sh /tmp/ +COPY dist-x86_64-netbsd/build-netbsd-toolchain.sh /tmp/ RUN ./build-netbsd-toolchain.sh USER root diff --git a/src/ci/docker/emscripten/Dockerfile b/src/ci/docker/emscripten/Dockerfile index 09657d2f892..0f0e5b69c32 100644 --- a/src/ci/docker/emscripten/Dockerfile +++ b/src/ci/docker/emscripten/Dockerfile @@ -24,7 +24,7 @@ RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-ini ENTRYPOINT ["/usr/bin/dumb-init", "--"] WORKDIR /tmp -COPY build-emscripten.sh /tmp/ +COPY emscripten/build-emscripten.sh /tmp/ RUN ./build-emscripten.sh ENV PATH=$PATH:/tmp/emsdk_portable ENV PATH=$PATH:/tmp/emsdk_portable/clang/tag-e1.37.10/build_tag-e1.37.10_32/bin diff --git a/src/ci/docker/run.sh b/src/ci/docker/run.sh index 6abbf0530af..bb9a860574d 100755 --- a/src/ci/docker/run.sh +++ b/src/ci/docker/run.sh @@ -26,7 +26,8 @@ if [ -f "$docker_dir/$image/Dockerfile" ]; then build \ --rm \ -t rust-ci \ - "$docker_dir/$image" + -f "$docker_dir/$image/Dockerfile" \ + "$docker_dir" elif [ -f "$docker_dir/disabled/$image/Dockerfile" ]; then if [ -n "$TRAVIS_OS_NAME" ]; then echo Cannot run disabled images on travis! diff --git a/src/ci/docker/scripts/android-ndk.sh b/src/ci/docker/scripts/android-ndk.sh new file mode 100644 index 00000000000..c3d83c087e5 --- /dev/null +++ b/src/ci/docker/scripts/android-ndk.sh @@ -0,0 +1,40 @@ +# Copyright 2017 The Rust Project Developers. See the COPYRIGHT +# file at the top-level directory of this distribution and at +# http://rust-lang.org/COPYRIGHT. +# +# Licensed under the Apache License, Version 2.0 or the MIT license +# , at your +# option. This file may not be copied, modified, or distributed +# except according to those terms. + +set -ex + +URL=https://dl.google.com/android/repository + +download_ndk() { + mkdir -p /android/ndk + cd /android/ndk + curl -O $URL/$1 + unzip -q $1 + rm $1 + mv android-ndk-* ndk +} + +make_standalone_toolchain() { + # See https://developer.android.com/ndk/guides/standalone_toolchain.htm + python2.7 /android/ndk/ndk/build/tools/make_standalone_toolchain.py \ + --install-dir /android/ndk/$1-$2 \ + --arch $1 \ + --api $2 +} + +remove_ndk() { + rm -rf /android/ndk/ndk +} + +download_and_make_toolchain() { + download_ndk $1 && \ + make_standalone_toolchain $2 $3 && \ + remove_ndk +} diff --git a/src/ci/docker/scripts/android-sdk.sh b/src/ci/docker/scripts/android-sdk.sh new file mode 100644 index 00000000000..7d8110efede --- /dev/null +++ b/src/ci/docker/scripts/android-sdk.sh @@ -0,0 +1,53 @@ +# Copyright 2017 The Rust Project Developers. See the COPYRIGHT +# file at the top-level directory of this distribution and at +# http://rust-lang.org/COPYRIGHT. +# +# Licensed under the Apache License, Version 2.0 or the MIT license +# , at your +# option. This file may not be copied, modified, or distributed +# except according to those terms. + +set -ex + +URL=https://dl.google.com/android/repository + +download_sdk() { + mkdir -p /android/sdk + cd /android/sdk + curl -O $URL/$1 + unzip -q $1 + rm -rf $1 +} + +download_sysimage() { + # See https://developer.android.com/studio/tools/help/android.html + abi=$1 + api=$2 + + filter="platform-tools,android-$api" + filter="$filter,sys-img-$abi-android-$api" + + # Keep printing yes to accept the licenses + while true; do echo yes; sleep 10; done | \ + /android/sdk/tools/android update sdk -a --no-ui \ + --filter "$filter" +} + +create_avd() { + # See https://developer.android.com/studio/tools/help/android.html + abi=$1 + api=$2 + + echo no | \ + /android/sdk/tools/android create avd \ + --name $abi-$api \ + --target android-$api \ + --abi $abi +} + +download_and_create_avd() { + download_sdk $1 + download_sysimage $2 $3 + create_avd $2 $3 +} diff --git a/src/ci/docker/scripts/android-start-emulator.sh b/src/ci/docker/scripts/android-start-emulator.sh new file mode 100755 index 00000000000..cd3369d5ead --- /dev/null +++ b/src/ci/docker/scripts/android-start-emulator.sh @@ -0,0 +1,25 @@ +#!/bin/sh +# Copyright 2016 The Rust Project Developers. See the COPYRIGHT +# file at the top-level directory of this distribution and at +# http://rust-lang.org/COPYRIGHT. +# +# Licensed under the Apache License, Version 2.0 or the MIT license +# , at your +# option. This file may not be copied, modified, or distributed +# except according to those terms. + +set -ex + +# Setting SHELL to a file instead on a symlink helps android +# emulator identify the system +export SHELL=/bin/bash + +# Using the default qemu2 engine makes time::tests::since_epoch fails because +# the emulator date is set to unix epoch (in armeabi-v7a-18 image). Using +# classic engine the emulator starts with the current date and the tests run +# fine. If another image is used, this need to be evaluated again. +nohup nohup emulator @armeabi-v7a-18 \ + -engine classic -no-window -partition-size 2047 0<&- &>/dev/null & + +exec "$@" diff --git a/src/ci/docker/scripts/dumb-init.sh b/src/ci/docker/scripts/dumb-init.sh new file mode 100644 index 00000000000..839c3907992 --- /dev/null +++ b/src/ci/docker/scripts/dumb-init.sh @@ -0,0 +1,15 @@ +# Copyright 2017 The Rust Project Developers. See the COPYRIGHT +# file at the top-level directory of this distribution and at +# http://rust-lang.org/COPYRIGHT. +# +# Licensed under the Apache License, Version 2.0 or the MIT license +# , at your +# option. This file may not be copied, modified, or distributed +# except according to those terms. + +set -ex + +curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb +dpkg -i dumb-init_*.deb +rm dumb-init_*.deb diff --git a/src/ci/docker/scripts/sccache.sh b/src/ci/docker/scripts/sccache.sh new file mode 100644 index 00000000000..7a2befaf671 --- /dev/null +++ b/src/ci/docker/scripts/sccache.sh @@ -0,0 +1,16 @@ +# Copyright 2017 The Rust Project Developers. See the COPYRIGHT +# file at the top-level directory of this distribution and at +# http://rust-lang.org/COPYRIGHT. +# +# Licensed under the Apache License, Version 2.0 or the MIT license +# , at your +# option. This file may not be copied, modified, or distributed +# except according to those terms. + +set -ex + +curl -o /usr/local/bin/sccache \ + https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl + +chmod +x /usr/local/bin/sccache diff --git a/src/doc/book b/src/doc/book index ad7de198561..97422981c53 160000 --- a/src/doc/book +++ b/src/doc/book @@ -1 +1 @@ -Subproject commit ad7de198561b3a12217ea2da76d796d9c7fc0ed3 +Subproject commit 97422981c53a00f7c3d6584d363443117f179fff diff --git a/src/doc/reference b/src/doc/reference index 6b0de90d87d..f7a108dfa9e 160000 --- a/src/doc/reference +++ b/src/doc/reference @@ -1 +1 @@ -Subproject commit 6b0de90d87dda15e323ef24cdf7ed873ac5cf4d3 +Subproject commit f7a108dfa9e90b07821700c55d01f08a9adf005c diff --git a/src/doc/unstable-book/src/SUMMARY.md b/src/doc/unstable-book/src/SUMMARY.md index 8f26e4d36cd..39f80059148 100644 --- a/src/doc/unstable-book/src/SUMMARY.md +++ b/src/doc/unstable-book/src/SUMMARY.md @@ -217,6 +217,7 @@ - [unique](library-features/unique.md) - [unsize](library-features/unsize.md) - [utf8_error_error_len](library-features/utf8-error-error-len.md) + - [vec_resize_default](library-features/vec-resize-default.md) - [vec_remove_item](library-features/vec-remove-item.md) - [windows_c](library-features/windows-c.md) - [windows_handle](library-features/windows-handle.md) diff --git a/src/doc/unstable-book/src/language-features/advanced-slice-patterns.md b/src/doc/unstable-book/src/language-features/advanced-slice-patterns.md index 30d22ca8208..e8256469b14 100644 --- a/src/doc/unstable-book/src/language-features/advanced-slice-patterns.md +++ b/src/doc/unstable-book/src/language-features/advanced-slice-patterns.md @@ -4,7 +4,7 @@ The tracking issue for this feature is: [#23121] [#23121]: https://github.com/rust-lang/rust/issues/23121 -See also [`slice_patterns`](slice-patterns.html). +See also [`slice_patterns`](language-features/slice-patterns.html). ------------------------ diff --git a/src/doc/unstable-book/src/language-features/asm.md b/src/doc/unstable-book/src/language-features/asm.md index 5e68be633e7..8deb8f46256 100644 --- a/src/doc/unstable-book/src/language-features/asm.md +++ b/src/doc/unstable-book/src/language-features/asm.md @@ -190,4 +190,4 @@ constraints, etc. [llvm-docs]: http://llvm.org/docs/LangRef.html#inline-assembler-expressions If you need more power and don't mind losing some of the niceties of -`asm!`, check out [global_asm](global_asm.html). +`asm!`, check out [global_asm](language-features/global_asm.html). diff --git a/src/doc/unstable-book/src/language-features/box-patterns.md b/src/doc/unstable-book/src/language-features/box-patterns.md index 86346364a71..0896627acae 100644 --- a/src/doc/unstable-book/src/language-features/box-patterns.md +++ b/src/doc/unstable-book/src/language-features/box-patterns.md @@ -4,7 +4,7 @@ The tracking issue for this feature is: [#29641] [#29641]: https://github.com/rust-lang/rust/issues/29641 -See also [`box_syntax`](box-syntax.html) +See also [`box_syntax`](language-features/box-syntax.html) ------------------------ diff --git a/src/doc/unstable-book/src/language-features/box-syntax.md b/src/doc/unstable-book/src/language-features/box-syntax.md index 47aade0d045..50e59231a4d 100644 --- a/src/doc/unstable-book/src/language-features/box-syntax.md +++ b/src/doc/unstable-book/src/language-features/box-syntax.md @@ -4,7 +4,7 @@ The tracking issue for this feature is: [#27779] [#27779]: https://github.com/rust-lang/rust/issues/27779 -See also [`box_patterns`](box-patterns.html) +See also [`box_patterns`](language-features/box-patterns.html) ------------------------ diff --git a/src/doc/unstable-book/src/language-features/global_asm.md b/src/doc/unstable-book/src/language-features/global_asm.md index 44921aa309f..f1ef74a63b5 100644 --- a/src/doc/unstable-book/src/language-features/global_asm.md +++ b/src/doc/unstable-book/src/language-features/global_asm.md @@ -74,5 +74,5 @@ usages and placed the larger, single usage in the crate root. If you don't need quite as much power and flexibility as `global_asm!` provides, and you don't mind restricting your inline -assembly to `fn` bodies only, you might try the [asm](asm.html) -feature instead. +assembly to `fn` bodies only, you might try the +[asm](language-features/asm.html) feature instead. diff --git a/src/doc/unstable-book/src/language-features/loop-break-value.md b/src/doc/unstable-book/src/language-features/loop-break-value.md index 54d6e62ce4c..e8fefe3b733 100644 --- a/src/doc/unstable-book/src/language-features/loop-break-value.md +++ b/src/doc/unstable-book/src/language-features/loop-break-value.md @@ -4,7 +4,80 @@ The tracking issue for this feature is: [#37339] [#37339]: https://github.com/rust-lang/rust/issues/37339 +Documentation to be appended to section G of the book. + ------------------------ +### Loops as expressions + +Like most things in Rust, loops are expressions, and have a value; normally `()` unless the loop +never exits. +A `loop` can instead evaluate to a useful value via *break with value*: + +```rust +#![feature(loop_break_value)] + +// Find the first square number over 1000: +let mut n = 1; +let square = loop { + if n * n > 1000 { + break n * n; + } + n += 1; +}; +``` + +The evaluation type may be specified externally: + +```rust +#![feature(loop_break_value)] + +// Declare that value returned is unsigned 64-bit: +let n: u64 = loop { + break 1; +}; +``` + +It is an error if types do not agree, either between a "break" value and an external requirement, +or between multiple "break" values: + +```no_compile +#![feature(loop_break_value)] + +loop { + if true { + break 1u32; + } else { + break 0u8; // error: types do not agree + } +}; + +let n: i32 = loop { + break 0u32; // error: type does not agree with external requirement +}; +``` + +#### Break: label, value + +Four forms of `break` are available, where EXPR is some expression which evaluates to a value: + +1. `break;` +2. `break 'label;` +3. `break EXPR;` +4. `break 'label EXPR;` + +When no value is given, the value `()` is assumed, thus `break;` is equivalent to `break ();`. + +Using a label allows returning a value from an inner loop: +```rust +#![feature(loop_break_value)] +let result = 'outer: loop { + for n in 1..10 { + if n > 4 { + break 'outer n; + } + } +}; +``` diff --git a/src/doc/unstable-book/src/language-features/plugin-registrar.md b/src/doc/unstable-book/src/language-features/plugin-registrar.md index ca3738bd93f..b16e2ac2d22 100644 --- a/src/doc/unstable-book/src/language-features/plugin-registrar.md +++ b/src/doc/unstable-book/src/language-features/plugin-registrar.md @@ -8,6 +8,6 @@ This feature is part of "compiler plugins." It will often be used with the [`plugin`] and `rustc_private` features as well. For more details, see their docs. -[`plugin`]: plugin.html +[`plugin`]: language-features/plugin.html ------------------------ diff --git a/src/doc/unstable-book/src/language-features/plugin.md b/src/doc/unstable-book/src/language-features/plugin.md index 3a1872e18dd..4b8603e3c44 100644 --- a/src/doc/unstable-book/src/language-features/plugin.md +++ b/src/doc/unstable-book/src/language-features/plugin.md @@ -8,7 +8,7 @@ The tracking issue for this feature is: [#29597] This feature is part of "compiler plugins." It will often be used with the [`plugin_registrar`] and `rustc_private` features. -[`plugin_registrar`]: plugin-registrar.html +[`plugin_registrar`]: language-features/plugin-registrar.html ------------------------ diff --git a/src/doc/unstable-book/src/language-features/proc-macro.md b/src/doc/unstable-book/src/language-features/proc-macro.md index f8b53bd5a2f..1bd8c41629e 100644 --- a/src/doc/unstable-book/src/language-features/proc-macro.md +++ b/src/doc/unstable-book/src/language-features/proc-macro.md @@ -6,5 +6,236 @@ The tracking issue for this feature is: [#38356] ------------------------ +This feature flag guards the new procedural macro features as laid out by [RFC 1566], which alongside the now-stable +[custom derives], provide stabilizable alternatives to the compiler plugin API (which requires the use of +perma-unstable internal APIs) for programmatically modifying Rust code at compile-time. +The two new procedural macro kinds are: + +* Function-like procedural macros which are invoked like regular declarative macros, and: +* Attribute-like procedural macros which can be applied to any item which built-in attributes can +be applied to, and which can take arguments in their invocation as well. + +Additionally, this feature flag implicitly enables the [`use_extern_macros`](language-features/use-extern-macros.html) feature, +which allows macros to be imported like any other item with `use` statements, as compared to +applying `#[macro_use]` to an `extern crate` declaration. It is important to note that procedural macros may +**only** be imported in this manner, and will throw an error otherwise. + +You **must** declare the `proc_macro` feature in both the crate declaring these new procedural macro kinds as well as +in any crates that use them. + +### Common Concepts + +As with custom derives, procedural macros may only be declared in crates of the `proc-macro` type, and must be public +functions. No other public items may be declared in `proc-macro` crates, but private items are fine. + +To declare your crate as a `proc-macro` crate, simply add: + +```toml +[lib] +proc-macro = true +``` + +to your `Cargo.toml`. + +Unlike custom derives, however, the name of the function implementing the procedural macro is used directly as the +procedural macro's name, so choose carefully. + +Additionally, both new kinds of procedural macros return a `TokenStream` which *wholly* replaces the original +invocation and its input. + +#### Importing + +As referenced above, the new procedural macros are not meant to be imported via `#[macro_use]` and will throw an +error if they are. Instead, they are meant to be imported like any other item in Rust, with `use` statements: + +```rust,ignore +#![feature(proc_macro)] + +// Where `my_proc_macros` is some crate of type `proc_macro` +extern crate my_proc_macros; + +// And declares a `#[proc_macro] pub fn my_bang_macro()` at its root. +use my_proc_macros::my_bang_macro; + +fn main() { + println!("{}", my_bang_macro!()); +} +``` + +#### Error Reporting + +Any panics in a procedural macro implementation will be caught by the compiler and turned into an error message pointing +to the problematic invocation. Thus, it is important to make your panic messages as informative as possible: use +`Option::expect` instead of `Option::unwrap` and `Result::expect` instead of `Result::unwrap`, and inform the user of +the error condition as unambiguously as you can. + +#### `TokenStream` + +The `proc_macro::TokenStream` type is hardcoded into the signatures of procedural macro functions for both input and +output. It is a wrapper around the compiler's internal representation for a given chunk of Rust code. + +### Function-like Procedural Macros + +These are procedural macros that are invoked like regular declarative macros. They are declared as public functions in +crates of the `proc_macro` type and using the `#[proc_macro]` attribute. The name of the declared function becomes the +name of the macro as it is to be imported and used. The function must be of the kind `fn(TokenStream) -> TokenStream` +where the sole argument is the input to the macro and the return type is the macro's output. + +This kind of macro can expand to anything that is valid for the context it is invoked in, including expressions and +statements, as well as items. + +**Note**: invocations of this kind of macro require a wrapping `[]`, `{}` or `()` like regular macros, but these do not +appear in the input, only the tokens between them. The tokens between the braces do not need to be valid Rust syntax. + +my_macro_crate/src/lib.rs + +```rust,ignore +#![feature(proc_macro)] + +// This is always necessary to get the `TokenStream` typedef. +extern crate proc_macro; + +use proc_macro::TokenStream; + +#[proc_macro] +pub fn say_hello(_input: TokenStream) -> TokenStream { + // This macro will accept any input because it ignores it. + // To enforce correctness in macros which don't take input, + // you may want to add `assert!(_input.to_string().is_empty());`. + "println!(\"Hello, world!\")".parse().unwrap() +} +``` + +my_macro_user/Cargo.toml + +```toml +[dependencies] +my_macro_crate = { path = "" } +``` + +my_macro_user/src/lib.rs + +```rust,ignore +#![feature(proc_macro)] + +extern crate my_macro_crate; + +use my_macro_crate::say_hello; + +fn main() { + say_hello!(); +} +``` + +As expected, this prints `Hello, world!`. + +### Attribute-like Procedural Macros + +These are arguably the most powerful flavor of procedural macro as they can be applied anywhere attributes are allowed. + +They are declared as public functions in crates of the `proc-macro` type, using the `#[proc_macro_attribute]` attribute. +The name of the function becomes the name of the attribute as it is to be imported and used. The function must be of the +kind `fn(TokenStream, TokenStream) -> TokenStream` where: + +The first argument represents any metadata for the attribute (see [the reference chapter on attributes][refr-attr]). +Only the metadata itself will appear in this argument, for example: + + * `#[my_macro]` will get an empty string. + * `#[my_macro = "string"]` will get `= "string"`. + * `#[my_macro(ident)]` will get `(ident)`. + * etc. + +The second argument is the item that the attribute is applied to. It can be a function, a type definition, +an impl block, an `extern` block, or a module—attribute invocations can take the inner form (`#![my_attr]`) +or outer form (`#[my_attr]`). + +The return type is the output of the macro which *wholly* replaces the item it was applied to. Thus, if your intention +is to merely modify an item, it *must* be copied to the output. The output must be an item; expressions, statements +and bare blocks are not allowed. + +There is no restriction on how many items an attribute-like procedural macro can emit as long as they are valid in +the given context. + +my_macro_crate/src/lib.rs + +```rust,ignore +#![feature(proc_macro)] + +extern crate proc_macro; + +use proc_macro::TokenStream; + +/// Adds a `/// ### Panics` docstring to the end of the input's documentation +/// +/// Does not assert that its receiver is a function or method. +#[proc_macro_attribute] +pub fn panics_note(args: TokenStream, input: TokenStream) -> TokenStream { + let args = args.to_string(); + let mut input = input.to_string(); + + assert!(args.starts_with("= \""), "`#[panics_note]` requires an argument of the form \ + `#[panics_note = \"panic note here\"]`"); + + // Get just the bare note string + let panics_note = args.trim_matches(&['=', ' ', '"'][..]); + + // The input will include all docstrings regardless of where the attribute is placed, + // so we need to find the last index before the start of the item + let insert_idx = idx_after_last_docstring(&input); + + // And insert our `### Panics` note there so it always appears at the end of an item's docs + input.insert_str(insert_idx, &format!("/// # Panics \n/// {}\n", panics_note)); + + input.parse().unwrap() +} + +// `proc-macro` crates can contain any kind of private item still +fn idx_after_last_docstring(input: &str) -> usize { + // Skip docstring lines to find the start of the item proper + input.lines().skip_while(|line| line.trim_left().starts_with("///")).next() + // Find the index of the first non-docstring line in the input + // Note: assumes this exact line is unique in the input + .and_then(|line_after| input.find(line_after)) + // No docstrings in the input + .unwrap_or(0) +} +``` + +my_macro_user/Cargo.toml + +```toml +[dependencies] +my_macro_crate = { path = "" } +``` + +my_macro_user/src/lib.rs + +```rust,ignore +#![feature(proc_macro)] + +extern crate my_macro_crate; + +use my_macro_crate::panics_note; + +/// Do the `foo` thing. +#[panics_note = "Always."] +pub fn foo() { + panic!() +} +``` + +Then the rendered documentation for `pub fn foo` will look like this: + +> `pub fn foo()` +> +> ---- +> Do the `foo` thing. +> # Panics +> Always. + +[RFC 1566]: https://github.com/rust-lang/rfcs/blob/master/text/1566-proc-macros.md +[custom derives]: https://doc.rust-lang.org/book/procedural-macros.html +[rust-lang/rust#41430]: https://github.com/rust-lang/rust/issues/41430 +[refr-attr]: https://doc.rust-lang.org/reference/attributes.html diff --git a/src/doc/unstable-book/src/language-features/slice-patterns.md b/src/doc/unstable-book/src/language-features/slice-patterns.md index 1e9e1eaafda..69857297582 100644 --- a/src/doc/unstable-book/src/language-features/slice-patterns.md +++ b/src/doc/unstable-book/src/language-features/slice-patterns.md @@ -4,7 +4,8 @@ The tracking issue for this feature is: [#23121] [#23121]: https://github.com/rust-lang/rust/issues/23121 -See also [`advanced_slice_patterns`](advanced-slice-patterns.html). +See also +[`advanced_slice_patterns`](language-features/advanced-slice-patterns.html). ------------------------ diff --git a/src/doc/unstable-book/src/library-features/alloc-jemalloc.md b/src/doc/unstable-book/src/library-features/alloc-jemalloc.md index 9bffa2ff99b..18ff838dd32 100644 --- a/src/doc/unstable-book/src/library-features/alloc-jemalloc.md +++ b/src/doc/unstable-book/src/library-features/alloc-jemalloc.md @@ -4,7 +4,7 @@ The tracking issue for this feature is: [#33082] [#33082]: https://github.com/rust-lang/rust/issues/33082 -See also [`alloc_system`](alloc-system.html). +See also [`alloc_system`](library-features/alloc-system.html). ------------------------ diff --git a/src/doc/unstable-book/src/library-features/alloc-system.md b/src/doc/unstable-book/src/library-features/alloc-system.md index 6fa89179d8e..1d261db6ba1 100644 --- a/src/doc/unstable-book/src/library-features/alloc-system.md +++ b/src/doc/unstable-book/src/library-features/alloc-system.md @@ -4,7 +4,7 @@ The tracking issue for this feature is: [#33082] [#33082]: https://github.com/rust-lang/rust/issues/33082 -See also [`alloc_jemalloc`](alloc-jemalloc.html). +See also [`alloc_jemalloc`](library-features/alloc-jemalloc.html). ------------------------ diff --git a/src/doc/unstable-book/src/library-features/vec-resize-default.md b/src/doc/unstable-book/src/library-features/vec-resize-default.md new file mode 100644 index 00000000000..5803d3215a5 --- /dev/null +++ b/src/doc/unstable-book/src/library-features/vec-resize-default.md @@ -0,0 +1,7 @@ +# `vec_resize_default` + +The tracking issue for this feature is: [#41758] + +[#41758]: https://github.com/rust-lang/rust/issues/41758 + +------------------------ diff --git a/src/libcollections/vec.rs b/src/libcollections/vec.rs index 7ec5c29de6b..1cf713290d8 100644 --- a/src/libcollections/vec.rs +++ b/src/libcollections/vec.rs @@ -1220,11 +1220,14 @@ pub fn split_off(&mut self, at: usize) -> Self { } impl Vec { - /// Resizes the `Vec` in-place so that `len()` is equal to `new_len`. + /// Resizes the `Vec` in-place so that `len` is equal to `new_len`. /// - /// If `new_len` is greater than `len()`, the `Vec` is extended by the + /// If `new_len` is greater than `len`, the `Vec` is extended by the /// difference, with each additional slot filled with `value`. - /// If `new_len` is less than `len()`, the `Vec` is simply truncated. + /// If `new_len` is less than `len`, the `Vec` is simply truncated. + /// + /// This method requires `Clone` to clone the passed value. If you'd + /// rather create a value with `Default` instead, see [`resize_default`]. /// /// # Examples /// @@ -1237,19 +1240,100 @@ impl Vec { /// vec.resize(2, 0); /// assert_eq!(vec, [1, 2]); /// ``` + /// + /// [`resize_default`]: #method.resize_default #[stable(feature = "vec_resize", since = "1.5.0")] pub fn resize(&mut self, new_len: usize, value: T) { let len = self.len(); if new_len > len { - self.extend_with_element(new_len - len, value); + self.extend_with(new_len - len, ExtendElement(value)) + } else { + self.truncate(new_len); + } + } + + /// Clones and appends all elements in a slice to the `Vec`. + /// + /// Iterates over the slice `other`, clones each element, and then appends + /// it to this `Vec`. The `other` vector is traversed in-order. + /// + /// Note that this function is same as `extend` except that it is + /// specialized to work with slices instead. If and when Rust gets + /// specialization this function will likely be deprecated (but still + /// available). + /// + /// # Examples + /// + /// ``` + /// let mut vec = vec![1]; + /// vec.extend_from_slice(&[2, 3, 4]); + /// assert_eq!(vec, [1, 2, 3, 4]); + /// ``` + #[stable(feature = "vec_extend_from_slice", since = "1.6.0")] + pub fn extend_from_slice(&mut self, other: &[T]) { + self.spec_extend(other.iter()) + } +} + +impl Vec { + /// Resizes the `Vec` in-place so that `len` is equal to `new_len`. + /// + /// If `new_len` is greater than `len`, the `Vec` is extended by the + /// difference, with each additional slot filled with `Default::default()`. + /// If `new_len` is less than `len`, the `Vec` is simply truncated. + /// + /// This method uses `Default` to create new values on every push. If + /// you'd rather `Clone` a given value, use [`resize`]. + /// + /// + /// # Examples + /// + /// ``` + /// #![feature(vec_resize_default)] + /// + /// let mut vec = vec![1, 2, 3]; + /// vec.resize_default(5); + /// assert_eq!(vec, [1, 2, 3, 0, 0]); + /// + /// let mut vec = vec![1, 2, 3, 4]; + /// vec.resize_default(2); + /// assert_eq!(vec, [1, 2]); + /// ``` + /// + /// [`resize`]: #method.resize + #[unstable(feature = "vec_resize_default", issue = "41758")] + pub fn resize_default(&mut self, new_len: usize) { + let len = self.len(); + + if new_len > len { + self.extend_with(new_len - len, ExtendDefault); } else { self.truncate(new_len); } } +} - /// Extend the vector by `n` additional clones of `value`. - fn extend_with_element(&mut self, n: usize, value: T) { +// This code generalises `extend_with_{element,default}`. +trait ExtendWith { + fn next(&self) -> T; + fn last(self) -> T; +} + +struct ExtendElement(T); +impl ExtendWith for ExtendElement { + fn next(&self) -> T { self.0.clone() } + fn last(self) -> T { self.0 } +} + +struct ExtendDefault; +impl ExtendWith for ExtendDefault { + fn next(&self) -> T { Default::default() } + fn last(self) -> T { Default::default() } +} +impl Vec { + /// Extend the vector by `n` values, using the given generator. + fn extend_with>(&mut self, n: usize, value: E) { self.reserve(n); unsafe { @@ -1261,43 +1345,21 @@ fn extend_with_element(&mut self, n: usize, value: T) { // Write all elements except the last one for _ in 1..n { - ptr::write(ptr, value.clone()); + ptr::write(ptr, value.next()); ptr = ptr.offset(1); - // Increment the length in every step in case clone() panics + // Increment the length in every step in case next() panics local_len.increment_len(1); } if n > 0 { // We can write the last element directly without cloning needlessly - ptr::write(ptr, value); + ptr::write(ptr, value.last()); local_len.increment_len(1); } // len set by scope guard } } - - /// Clones and appends all elements in a slice to the `Vec`. - /// - /// Iterates over the slice `other`, clones each element, and then appends - /// it to this `Vec`. The `other` vector is traversed in-order. - /// - /// Note that this function is same as `extend` except that it is - /// specialized to work with slices instead. If and when Rust gets - /// specialization this function will likely be deprecated (but still - /// available). - /// - /// # Examples - /// - /// ``` - /// let mut vec = vec![1]; - /// vec.extend_from_slice(&[2, 3, 4]); - /// assert_eq!(vec, [1, 2, 3, 4]); - /// ``` - #[stable(feature = "vec_extend_from_slice", since = "1.6.0")] - pub fn extend_from_slice(&mut self, other: &[T]) { - self.spec_extend(other.iter()) - } } // Set the length of the vec when the `SetLenOnDrop` value goes out of scope. @@ -1389,7 +1451,7 @@ trait SpecFromElem: Sized { impl SpecFromElem for T { default fn from_elem(elem: Self, n: usize) -> Vec { let mut v = Vec::with_capacity(n); - v.extend_with_element(n, elem); + v.extend_with(n, ExtendElement(elem)); v } } @@ -1424,7 +1486,7 @@ fn from_elem(elem: $t, n: usize) -> Vec<$t> { } } let mut v = Vec::with_capacity(n); - v.extend_with_element(n, elem); + v.extend_with(n, ExtendElement(elem)); v } } diff --git a/src/libcore/fmt/float.rs b/src/libcore/fmt/float.rs index 87def375b20..4825c2aa132 100644 --- a/src/libcore/fmt/float.rs +++ b/src/libcore/fmt/float.rs @@ -21,7 +21,7 @@ fn float_to_decimal_common_exact(fmt: &mut Formatter, num: &T, { unsafe { let mut buf: [u8; 1024] = mem::uninitialized(); // enough for f32 and f64 - let mut parts: [flt2dec::Part; 5] = mem::uninitialized(); + let mut parts: [flt2dec::Part; 4] = mem::uninitialized(); let formatted = flt2dec::to_exact_fixed_str(flt2dec::strategy::grisu::format_exact, *num, sign, precision, false, &mut buf, &mut parts); @@ -39,7 +39,7 @@ fn float_to_decimal_common_shortest(fmt: &mut Formatter, unsafe { // enough for f32 and f64 let mut buf: [u8; flt2dec::MAX_SIG_DIGITS] = mem::uninitialized(); - let mut parts: [flt2dec::Part; 5] = mem::uninitialized(); + let mut parts: [flt2dec::Part; 4] = mem::uninitialized(); let formatted = flt2dec::to_shortest_str(flt2dec::strategy::grisu::format_shortest, *num, sign, 0, false, &mut buf, &mut parts); fmt.pad_formatted_parts(&formatted) @@ -75,7 +75,7 @@ fn float_to_exponential_common_exact(fmt: &mut Formatter, num: &T, { unsafe { let mut buf: [u8; 1024] = mem::uninitialized(); // enough for f32 and f64 - let mut parts: [flt2dec::Part; 7] = mem::uninitialized(); + let mut parts: [flt2dec::Part; 6] = mem::uninitialized(); let formatted = flt2dec::to_exact_exp_str(flt2dec::strategy::grisu::format_exact, *num, sign, precision, upper, &mut buf, &mut parts); @@ -94,7 +94,7 @@ fn float_to_exponential_common_shortest(fmt: &mut Formatter, unsafe { // enough for f32 and f64 let mut buf: [u8; flt2dec::MAX_SIG_DIGITS] = mem::uninitialized(); - let mut parts: [flt2dec::Part; 7] = mem::uninitialized(); + let mut parts: [flt2dec::Part; 6] = mem::uninitialized(); let formatted = flt2dec::to_shortest_exp_str(flt2dec::strategy::grisu::format_shortest, *num, sign, (0, 0), upper, &mut buf, &mut parts); diff --git a/src/libcore/num/flt2dec/mod.rs b/src/libcore/num/flt2dec/mod.rs index 5123e42df61..74b9e7bf37d 100644 --- a/src/libcore/num/flt2dec/mod.rs +++ b/src/libcore/num/flt2dec/mod.rs @@ -410,8 +410,8 @@ fn determine_sign(sign: Sign, decoded: &FullDecoded, negative: bool) -> &'static /// it will only print given digits and nothing else. /// /// The byte buffer should be at least `MAX_SIG_DIGITS` bytes long. -/// There should be at least 5 parts available, due to the worst case like -/// `[+][0.][0000][45][0000]` with `frac_digits = 10`. +/// There should be at least 4 parts available, due to the worst case like +/// `[+][0.][0000][2][0000]` with `frac_digits = 10`. pub fn to_shortest_str<'a, T, F>(mut format_shortest: F, v: T, sign: Sign, frac_digits: usize, _upper: bool, buf: &'a mut [u8], parts: &'a mut [Part<'a>]) -> Formatted<'a> @@ -465,8 +465,8 @@ pub fn to_shortest_str<'a, T, F>(mut format_shortest: F, v: T, /// cannot be in this range, avoiding any confusion. /// /// The byte buffer should be at least `MAX_SIG_DIGITS` bytes long. -/// There should be at least 7 parts available, due to the worst case like -/// `[+][1][.][2345][e][-][67]`. +/// There should be at least 6 parts available, due to the worst case like +/// `[+][1][.][2345][e][-][6]`. pub fn to_shortest_exp_str<'a, T, F>(mut format_shortest: F, v: T, sign: Sign, dec_bounds: (i16, i16), upper: bool, buf: &'a mut [u8], parts: &'a mut [Part<'a>]) -> Formatted<'a> @@ -544,8 +544,8 @@ fn estimate_max_buf_len(exp: i16) -> usize { /// The byte buffer should be at least `ndigits` bytes long unless `ndigits` is /// so large that only the fixed number of digits will be ever written. /// (The tipping point for `f64` is about 800, so 1000 bytes should be enough.) -/// There should be at least 7 parts available, due to the worst case like -/// `[+][1][.][2345][e][-][67]`. +/// There should be at least 6 parts available, due to the worst case like +/// `[+][1][.][2345][e][-][6]`. pub fn to_exact_exp_str<'a, T, F>(mut format_exact: F, v: T, sign: Sign, ndigits: usize, upper: bool, buf: &'a mut [u8], parts: &'a mut [Part<'a>]) -> Formatted<'a> @@ -600,8 +600,8 @@ pub fn to_exact_exp_str<'a, T, F>(mut format_exact: F, v: T, /// The byte buffer should be enough for the output unless `frac_digits` is /// so large that only the fixed number of digits will be ever written. /// (The tipping point for `f64` is about 800, and 1000 bytes should be enough.) -/// There should be at least 5 parts available, due to the worst case like -/// `[+][0.][0000][45][0000]` with `frac_digits = 10`. +/// There should be at least 4 parts available, due to the worst case like +/// `[+][0.][0000][2][0000]` with `frac_digits = 10`. pub fn to_exact_fixed_str<'a, T, F>(mut format_exact: F, v: T, sign: Sign, frac_digits: usize, _upper: bool, buf: &'a mut [u8], parts: &'a mut [Part<'a>]) -> Formatted<'a> diff --git a/src/libcore/num/wrapping.rs b/src/libcore/num/wrapping.rs index 013f02685ba..6cc374b13b7 100644 --- a/src/libcore/num/wrapping.rs +++ b/src/libcore/num/wrapping.rs @@ -12,6 +12,7 @@ use ops::*; +#[allow(unused_macros)] macro_rules! sh_impl_signed { ($t:ident, $f:ident) => ( #[stable(feature = "rust1", since = "1.0.0")] diff --git a/src/libcore/ops.rs b/src/libcore/ops.rs index 391b606f613..fc3af096b18 100644 --- a/src/libcore/ops.rs +++ b/src/libcore/ops.rs @@ -235,6 +235,42 @@ pub trait Drop { /// } /// ``` /// +/// Here is an example of the same `Point` struct implementing the `Add` trait +/// using generics. +/// +/// ``` +/// use std::ops::Add; +/// +/// #[derive(Debug)] +/// struct Point { +/// x: T, +/// y: T, +/// } +/// +/// // Notice that the implementation uses the `Output` associated type +/// impl> Add for Point { +/// type Output = Point; +/// +/// fn add(self, other: Point) -> Point { +/// Point { +/// x: self.x + other.x, +/// y: self.y + other.y, +/// } +/// } +/// } +/// +/// impl PartialEq for Point { +/// fn eq(&self, other: &Self) -> bool { +/// self.x == other.x && self.y == other.y +/// } +/// } +/// +/// fn main() { +/// assert_eq!(Point { x: 1, y: 0 } + Point { x: 2, y: 3 }, +/// Point { x: 3, y: 3 }); +/// } +/// ``` +/// /// Note that `RHS = Self` by default, but this is not mandatory. For example, /// [std::time::SystemTime] implements `Add`, which permits /// operations of the form `SystemTime = SystemTime + Duration`. @@ -763,6 +799,7 @@ macro_rules! neg_impl_numeric { ($($t:ty)*) => { neg_impl_core!{ x => -x, $($t)*} } } +#[allow(unused_macros)] macro_rules! neg_impl_unsigned { ($($t:ty)*) => { neg_impl_core!{ x => { diff --git a/src/libcore/ptr.rs b/src/libcore/ptr.rs index 5f189d473be..f89f86e18a1 100644 --- a/src/libcore/ptr.rs +++ b/src/libcore/ptr.rs @@ -56,7 +56,6 @@ /// invalid pointers, types, and double drops. #[stable(feature = "drop_in_place", since = "1.8.0")] #[lang="drop_in_place"] -#[inline] #[allow(unconditional_recursion)] pub unsafe fn drop_in_place(to_drop: *mut T) { // Code here does not matter - this is replaced by the diff --git a/src/libproc_macro_plugin/quote.rs b/src/libproc_macro_plugin/quote.rs index ad71584b61a..09675564291 100644 --- a/src/libproc_macro_plugin/quote.rs +++ b/src/libproc_macro_plugin/quote.rs @@ -133,6 +133,14 @@ fn quote(&self) -> TokenStream { } } +impl Quote for usize { + fn quote(&self) -> TokenStream { + let integer_symbol = Symbol::intern(&self.to_string()); + TokenTree::Token(DUMMY_SP, Token::Literal(token::Lit::Integer(integer_symbol), None)) + .into() + } +} + impl Quote for Ident { fn quote(&self) -> TokenStream { // FIXME(jseyfried) quote hygiene @@ -193,15 +201,17 @@ macro_rules! gen_match { impl Quote for Lit { fn quote(&self) -> TokenStream { macro_rules! gen_match { - ($($i:ident),*) => { + ($($i:ident),*; $($raw:ident),*) => { match *self { $( Lit::$i(lit) => quote!(::syntax::parse::token::Lit::$i((quote lit))), )* - _ => panic!("Unsupported literal"), + $( Lit::$raw(lit, n) => { + quote!(::syntax::parse::token::Lit::$raw((quote lit), (quote n))) + })* } } } - gen_match!(Byte, Char, Float, Str_, Integer, ByteStr) + gen_match!(Byte, Char, Float, Str_, Integer, ByteStr; StrRaw, ByteStrRaw) } } diff --git a/src/librustc/Cargo.toml b/src/librustc/Cargo.toml index fa217acd9f9..9d64f511914 100644 --- a/src/librustc/Cargo.toml +++ b/src/librustc/Cargo.toml @@ -13,12 +13,12 @@ arena = { path = "../libarena" } fmt_macros = { path = "../libfmt_macros" } graphviz = { path = "../libgraphviz" } log = "0.3" +owning_ref = "0.3.3" rustc_back = { path = "../librustc_back" } rustc_bitflags = { path = "../librustc_bitflags" } rustc_const_math = { path = "../librustc_const_math" } rustc_data_structures = { path = "../librustc_data_structures" } rustc_errors = { path = "../librustc_errors" } -rustc_llvm = { path = "../librustc_llvm" } serialize = { path = "../libserialize" } syntax = { path = "../libsyntax" } syntax_pos = { path = "../libsyntax_pos" } diff --git a/src/librustc/cfg/construct.rs b/src/librustc/cfg/construct.rs index a8ad49c6582..c1c195852f9 100644 --- a/src/librustc/cfg/construct.rs +++ b/src/librustc/cfg/construct.rs @@ -10,6 +10,7 @@ use rustc_data_structures::graph; use cfg::*; +use middle::region::CodeExtent; use ty::{self, TyCtxt}; use syntax::ast; use syntax::ptr::P; @@ -586,8 +587,8 @@ fn add_exiting_edge(&mut self, scope_id: ast::NodeId, to_index: CFGIndex) { let mut data = CFGEdgeData { exiting_scopes: vec![] }; - let mut scope = self.tcx.node_extent(from_expr.id); - let target_scope = self.tcx.node_extent(scope_id); + let mut scope = CodeExtent::Misc(from_expr.id); + let target_scope = CodeExtent::Misc(scope_id); let region_maps = self.tcx.region_maps(self.owner_def_id); while scope != target_scope { data.exiting_scopes.push(scope.node_id()); diff --git a/src/librustc/dep_graph/dep_node.rs b/src/librustc/dep_graph/dep_node.rs index 25fc5b7a4f6..15c4469b746 100644 --- a/src/librustc/dep_graph/dep_node.rs +++ b/src/librustc/dep_graph/dep_node.rs @@ -106,6 +106,8 @@ pub enum DepNode { UsedTraitImports(D), ConstEval(D), SymbolName(D), + SpecializationGraph(D), + ObjectSafety(D), // The set of impls for a given trait. Ultimately, it would be // nice to get more fine-grained here (e.g., to include a @@ -116,6 +118,8 @@ pub enum DepNode { // than changes in the impl body. TraitImpls(D), + AllLocalTraitImpls, + // Nodes representing caches. To properly handle a true cache, we // don't use a DepTrackingMap, but rather we push a task node. // Otherwise the write into the map would be incorrectly @@ -262,7 +266,10 @@ pub fn map_def(&self, mut op: OP) -> Option> UsedTraitImports(ref d) => op(d).map(UsedTraitImports), ConstEval(ref d) => op(d).map(ConstEval), SymbolName(ref d) => op(d).map(SymbolName), + SpecializationGraph(ref d) => op(d).map(SpecializationGraph), + ObjectSafety(ref d) => op(d).map(ObjectSafety), TraitImpls(ref d) => op(d).map(TraitImpls), + AllLocalTraitImpls => Some(AllLocalTraitImpls), TraitItems(ref d) => op(d).map(TraitItems), ReprHints(ref d) => op(d).map(ReprHints), TraitSelect { ref trait_def_id, ref input_def_id } => { diff --git a/src/librustc/diagnostics.rs b/src/librustc/diagnostics.rs index 8ef42826fac..470dcb4bd61 100644 --- a/src/librustc/diagnostics.rs +++ b/src/librustc/diagnostics.rs @@ -409,6 +409,67 @@ impl Quux for Foo { } [iss15872]: https://github.com/rust-lang/rust/issues/15872 "##, +E0119: r##" +There are conflicting trait implementations for the same type. +Example of erroneous code: + +```compile_fail,E0119 +trait MyTrait { + fn get(&self) -> usize; +} + +impl MyTrait for T { + fn get(&self) -> usize { 0 } +} + +struct Foo { + value: usize +} + +impl MyTrait for Foo { // error: conflicting implementations of trait + // `MyTrait` for type `Foo` + fn get(&self) -> usize { self.value } +} +``` + +When looking for the implementation for the trait, the compiler finds +both the `impl MyTrait for T` where T is all types and the `impl +MyTrait for Foo`. Since a trait cannot be implemented multiple times, +this is an error. So, when you write: + +``` +trait MyTrait { + fn get(&self) -> usize; +} + +impl MyTrait for T { + fn get(&self) -> usize { 0 } +} +``` + +This makes the trait implemented on all types in the scope. So if you +try to implement it on another one after that, the implementations will +conflict. Example: + +``` +trait MyTrait { + fn get(&self) -> usize; +} + +impl MyTrait for T { + fn get(&self) -> usize { 0 } +} + +struct Foo; + +fn main() { + let f = Foo; + + f.get(); // the trait is implemented so we can use it +} +``` +"##, + E0133: r##" Unsafe code was used outside of an unsafe function or block. diff --git a/src/librustc/hir/map/mod.rs b/src/librustc/hir/map/mod.rs index c715484a934..868730edfed 100644 --- a/src/librustc/hir/map/mod.rs +++ b/src/librustc/hir/map/mod.rs @@ -497,7 +497,7 @@ pub fn ty_param_name(&self, id: NodeId) -> Name { } pub fn trait_impls(&self, trait_did: DefId) -> &'hir [NodeId] { - self.dep_graph.read(DepNode::TraitImpls(trait_did)); + self.dep_graph.read(DepNode::AllLocalTraitImpls); // NB: intentionally bypass `self.forest.krate()` so that we // do not trigger a read of the whole krate here @@ -505,7 +505,7 @@ pub fn trait_impls(&self, trait_did: DefId) -> &'hir [NodeId] { } pub fn trait_default_impl(&self, trait_did: DefId) -> Option { - self.dep_graph.read(DepNode::TraitImpls(trait_did)); + self.dep_graph.read(DepNode::AllLocalTraitImpls); // NB: intentionally bypass `self.forest.krate()` so that we // do not trigger a read of the whole krate here diff --git a/src/librustc/ich/fingerprint.rs b/src/librustc/ich/fingerprint.rs index e760f7efc93..ccdbab88b8b 100644 --- a/src/librustc/ich/fingerprint.rs +++ b/src/librustc/ich/fingerprint.rs @@ -94,3 +94,11 @@ fn finish(mut hasher: stable_hasher::StableHasher) -> Self { fingerprint } } + +impl stable_hasher::HashStable for Fingerprint { + fn hash_stable(&self, + _: &mut CTX, + hasher: &mut stable_hasher::StableHasher) { + ::std::hash::Hash::hash(&self.0, hasher); + } +} diff --git a/src/librustc/ich/hcx.rs b/src/librustc/ich/hcx.rs index 3a6367c353c..786d1c5035d 100644 --- a/src/librustc/ich/hcx.rs +++ b/src/librustc/ich/hcx.rs @@ -16,7 +16,7 @@ use util::nodemap::NodeMap; use std::hash as std_hash; -use std::collections::{HashMap, HashSet}; +use std::collections::{HashMap, HashSet, BTreeMap}; use syntax::ast; use syntax::attr; @@ -348,3 +348,25 @@ pub fn hash_stable_nodemap<'a, 'tcx, V, W>(hcx: &mut StableHashingContext<'a, 't hcx.tcx.hir.definitions().node_to_hir_id(*node_id).local_id }); } + + +pub fn hash_stable_btreemap<'a, 'tcx, K, V, SK, F, W>(hcx: &mut StableHashingContext<'a, 'tcx>, + hasher: &mut StableHasher, + map: &BTreeMap, + extract_stable_key: F) + where K: Eq + Ord, + V: HashStable>, + SK: HashStable> + Ord + Clone, + F: Fn(&mut StableHashingContext<'a, 'tcx>, &K) -> SK, + W: StableHasherResult, +{ + let mut keys: Vec<_> = map.keys() + .map(|k| (extract_stable_key(hcx, k), k)) + .collect(); + keys.sort_unstable_by_key(|&(ref stable_key, _)| stable_key.clone()); + keys.len().hash_stable(hcx, hasher); + for (stable_key, key) in keys { + stable_key.hash_stable(hcx, hasher); + map[key].hash_stable(hcx, hasher); + } +} diff --git a/src/librustc/ich/impls_ty.rs b/src/librustc/ich/impls_ty.rs index 52bdb5d0240..3bbac8d6a64 100644 --- a/src/librustc/ich/impls_ty.rs +++ b/src/librustc/ich/impls_ty.rs @@ -39,7 +39,7 @@ fn hash_stable(&self, } } -impl<'a, 'tcx> HashStable> for ty::RegionKind<'tcx> { +impl<'a, 'tcx> HashStable> for ty::RegionKind { fn hash_stable(&self, hcx: &mut StableHashingContext<'a, 'tcx>, hasher: &mut StableHasher) { @@ -54,7 +54,8 @@ fn hash_stable(&self, db.depth.hash_stable(hcx, hasher); i.hash_stable(hcx, hasher); } - ty::ReEarlyBound(ty::EarlyBoundRegion { index, name }) => { + ty::ReEarlyBound(ty::EarlyBoundRegion { def_id, index, name }) => { + def_id.hash_stable(hcx, hasher); index.hash_stable(hcx, hasher); name.hash_stable(hcx, hasher); } @@ -409,11 +410,6 @@ fn hash_stable(&self, Free(call_site_scope_data, decl) }); -impl_stable_hash_for!(struct ::middle::region::CallSiteScopeData { - fn_id, - body_id -}); - impl_stable_hash_for!(struct ty::DebruijnIndex { depth }); @@ -432,25 +428,24 @@ fn hash_stable(&self, FnPtrAddrCast }); -impl<'a, 'tcx> HashStable> for ::middle::region::CodeExtentData +impl<'a, 'tcx> HashStable> for ::middle::region::CodeExtent { fn hash_stable(&self, hcx: &mut StableHashingContext<'a, 'tcx>, hasher: &mut StableHasher) { - use middle::region::CodeExtentData; + use middle::region::CodeExtent; mem::discriminant(self).hash_stable(hcx, hasher); match *self { - CodeExtentData::Misc(node_id) | - CodeExtentData::DestructionScope(node_id) => { + CodeExtent::Misc(node_id) | + CodeExtent::DestructionScope(node_id) => { node_id.hash_stable(hcx, hasher); } - CodeExtentData::CallSiteScope { fn_id, body_id } | - CodeExtentData::ParameterScope { fn_id, body_id } => { - fn_id.hash_stable(hcx, hasher); + CodeExtent::CallSiteScope(body_id) | + CodeExtent::ParameterScope(body_id) => { body_id.hash_stable(hcx, hasher); } - CodeExtentData::Remainder(block_remainder) => { + CodeExtent::Remainder(block_remainder) => { block_remainder.hash_stable(hcx, hasher); } } @@ -466,7 +461,7 @@ fn hash_stable(&self, custom_kind }); -impl_stable_hash_for!(struct ty::FreeRegion<'tcx> { +impl_stable_hash_for!(struct ty::FreeRegion { scope, bound_region }); diff --git a/src/librustc/ich/mod.rs b/src/librustc/ich/mod.rs index d881a1cc45a..5b238090850 100644 --- a/src/librustc/ich/mod.rs +++ b/src/librustc/ich/mod.rs @@ -13,7 +13,8 @@ pub use self::fingerprint::Fingerprint; pub use self::caching_codemap_view::CachingCodemapView; pub use self::hcx::{StableHashingContext, NodeIdHashingMode, hash_stable_hashmap, - hash_stable_hashset, hash_stable_nodemap}; + hash_stable_hashset, hash_stable_nodemap, + hash_stable_btreemap}; mod fingerprint; mod caching_codemap_view; mod hcx; diff --git a/src/librustc/infer/combine.rs b/src/librustc/infer/combine.rs index 82578f6aa61..aabb6aff551 100644 --- a/src/librustc/infer/combine.rs +++ b/src/librustc/infer/combine.rs @@ -39,10 +39,12 @@ use super::InferCtxt; use super::{MiscVariable, TypeTrace}; +use hir::def_id::DefId; use ty::{IntType, UintType}; use ty::{self, Ty, TyCtxt}; use ty::error::TypeError; use ty::relate::{self, Relate, RelateResult, TypeRelation}; +use ty::subst::Substs; use traits::{Obligation, PredicateObligations}; use syntax::ast; @@ -336,6 +338,23 @@ fn binders(&mut self, a: &ty::Binder, b: &ty::Binder) Ok(ty::Binder(self.relate(a.skip_binder(), b.skip_binder())?)) } + fn relate_item_substs(&mut self, + item_def_id: DefId, + a_subst: &'tcx Substs<'tcx>, + b_subst: &'tcx Substs<'tcx>) + -> RelateResult<'tcx, &'tcx Substs<'tcx>> + { + if self.ambient_variance == ty::Variance::Invariant { + // Avoid fetching the variance if we are in an invariant + // context; no need, and it can induce dependency cycles + // (e.g. #41849). + relate::relate_substs(self, None, a_subst, b_subst) + } else { + let opt_variances = self.tcx().variances_of(item_def_id); + relate::relate_substs(self, Some(&opt_variances), a_subst, b_subst) + } + } + fn relate_with_variance>(&mut self, variance: ty::Variance, a: &T, @@ -423,15 +442,6 @@ fn regions(&mut self, r: ty::Region<'tcx>, r2: ty::Region<'tcx>) return Ok(r); } - // Early-bound regions should really have been substituted away before - // we get to this point. - ty::ReEarlyBound(..) => { - span_bug!( - self.span, - "Encountered early bound region when generalizing: {:?}", - r); - } - // Always make a fresh region variable for skolemized regions; // the higher-ranked decision procedures rely on this. ty::ReSkolemized(..) => { } @@ -442,6 +452,7 @@ fn regions(&mut self, r: ty::Region<'tcx>, r2: ty::Region<'tcx>) ty::ReStatic | ty::ReScope(..) | ty::ReVar(..) | + ty::ReEarlyBound(..) | ty::ReFree(..) => { match self.ambient_variance { ty::Invariant => return Ok(r), diff --git a/src/librustc/infer/error_reporting/mod.rs b/src/librustc/infer/error_reporting/mod.rs index 4c27bade0f7..c07b3b3c4be 100644 --- a/src/librustc/infer/error_reporting/mod.rs +++ b/src/librustc/infer/error_reporting/mod.rs @@ -151,19 +151,19 @@ fn explain_span<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>, return; } }; - let scope_decorated_tag = match *scope { - region::CodeExtentData::Misc(_) => tag, - region::CodeExtentData::CallSiteScope { .. } => { + let scope_decorated_tag = match scope { + region::CodeExtent::Misc(_) => tag, + region::CodeExtent::CallSiteScope(_) => { "scope of call-site for function" } - region::CodeExtentData::ParameterScope { .. } => { + region::CodeExtent::ParameterScope(_) => { "scope of function body" } - region::CodeExtentData::DestructionScope(_) => { + region::CodeExtent::DestructionScope(_) => { new_string = format!("destruction scope surrounding {}", tag); &new_string[..] } - region::CodeExtentData::Remainder(r) => { + region::CodeExtent::Remainder(r) => { new_string = format!("block suffix following statement {}", r.first_statement_index); &new_string[..] @@ -172,19 +172,35 @@ fn explain_span<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>, explain_span(self, scope_decorated_tag, span) } - ty::ReFree(ref fr) => { - let prefix = match fr.bound_region { - ty::BrAnon(idx) => { - format!("the anonymous lifetime #{} defined on", idx + 1) + ty::ReEarlyBound(_) | + ty::ReFree(_) => { + let scope = match *region { + ty::ReEarlyBound(ref br) => { + self.parent_def_id(br.def_id).unwrap() } - ty::BrFresh(_) => "an anonymous lifetime defined on".to_owned(), - _ => { - format!("the lifetime {} as defined on", - fr.bound_region) + ty::ReFree(ref fr) => fr.scope, + _ => bug!() + }; + let prefix = match *region { + ty::ReEarlyBound(ref br) => { + format!("the lifetime {} as defined on", br.name) + } + ty::ReFree(ref fr) => { + match fr.bound_region { + ty::BrAnon(idx) => { + format!("the anonymous lifetime #{} defined on", idx + 1) + } + ty::BrFresh(_) => "an anonymous lifetime defined on".to_owned(), + _ => { + format!("the lifetime {} as defined on", + fr.bound_region) + } + } } + _ => bug!() }; - let node = fr.scope.map(|s| s.node_id()) + let node = self.hir.as_local_node_id(scope) .unwrap_or(DUMMY_NODE_ID); let unknown; let tag = match self.hir.find(node) { @@ -199,12 +215,12 @@ fn explain_span<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>, Some(_) => { unknown = format!("unexpected node ({}) for scope {:?}. \ Please report a bug.", - self.hir.node_to_string(node), fr.scope); + self.hir.node_to_string(node), scope); &unknown } None => { unknown = format!("unknown node for scope {:?}. \ - Please report a bug.", fr.scope); + Please report a bug.", scope); &unknown } }; @@ -216,8 +232,6 @@ fn explain_span<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>, ty::ReEmpty => ("the empty lifetime".to_owned(), None), - ty::ReEarlyBound(ref data) => (data.name.to_string(), None), - // FIXME(#13998) ReSkolemized should probably print like // ReFree rather than dumping Debug output on the user. // @@ -797,6 +811,7 @@ fn report_generic_bound_failure(&self, } let mut err = match *sub { + ty::ReEarlyBound(_) | ty::ReFree(ty::FreeRegion {bound_region: ty::BrNamed(..), ..}) => { // Does the required lifetime have a nice name we can print? let mut err = struct_span_err!(self.tcx.sess, diff --git a/src/librustc/infer/freshen.rs b/src/librustc/infer/freshen.rs index ad67ef9a127..a0ef1f65f52 100644 --- a/src/librustc/infer/freshen.rs +++ b/src/librustc/infer/freshen.rs @@ -85,13 +85,13 @@ fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> { fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> { match *r { - ty::ReEarlyBound(..) | ty::ReLateBound(..) => { // leave bound regions alone r } ty::ReStatic | + ty::ReEarlyBound(..) | ty::ReFree(_) | ty::ReScope(_) | ty::ReVar(_) | diff --git a/src/librustc/infer/higher_ranked/mod.rs b/src/librustc/infer/higher_ranked/mod.rs index 09f909ef399..dbbcc6cfbec 100644 --- a/src/librustc/infer/higher_ranked/mod.rs +++ b/src/librustc/infer/higher_ranked/mod.rs @@ -274,7 +274,7 @@ fn generalize_region<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>, -> ty::Region<'tcx> { // Regions that pre-dated the LUB computation stay as they are. if !is_var_in_set(new_vars, r0) { - assert!(!r0.is_bound()); + assert!(!r0.is_late_bound()); debug!("generalize_region(r0={:?}): not new variable", r0); return r0; } @@ -288,7 +288,7 @@ fn generalize_region<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>, debug!("generalize_region(r0={:?}): \ non-new-variables found in {:?}", r0, tainted); - assert!(!r0.is_bound()); + assert!(!r0.is_late_bound()); return r0; } @@ -371,7 +371,7 @@ fn generalize_region<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>, r0: ty::Region<'tcx>) -> ty::Region<'tcx> { if !is_var_in_set(new_vars, r0) { - assert!(!r0.is_bound()); + assert!(!r0.is_late_bound()); return r0; } @@ -424,7 +424,7 @@ fn generalize_region<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>, return rev_lookup(infcx, span, a_map, a_r.unwrap()); } else if a_r.is_none() && b_r.is_none() { // Not related to bound variables from either fn: - assert!(!r0.is_bound()); + assert!(!r0.is_late_bound()); return r0; } else { // Other: diff --git a/src/librustc/infer/mod.rs b/src/librustc/infer/mod.rs index e91af21c6db..1ecc277c7ca 100644 --- a/src/librustc/infer/mod.rs +++ b/src/librustc/infer/mod.rs @@ -450,10 +450,10 @@ fn to_parts(self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> (Option<&'a ty::TypeckTables<'tcx>>, Option>, Option>) { - let item_id = tcx.hir.body_owner(self); - (Some(tcx.typeck_tables_of(tcx.hir.local_def_id(item_id))), + let def_id = tcx.hir.body_owner_def_id(self); + (Some(tcx.typeck_tables_of(def_id)), None, - Some(ty::ParameterEnvironment::for_item(tcx, item_id))) + Some(tcx.parameter_environment(def_id))) } } @@ -1009,7 +1009,7 @@ pub fn probe(&self, f: F) -> R where } pub fn add_given(&self, - sub: ty::FreeRegion<'tcx>, + sub: ty::Region<'tcx>, sup: ty::RegionVid) { self.region_vars.add_given(sub, sup); @@ -1324,7 +1324,7 @@ pub fn expr_ty(&self, ex: &hir::Expr) -> Ty<'tcx> { pub fn resolve_regions_and_report_errors(&self, region_context: DefId, - region_map: &RegionMaps<'tcx>, + region_map: &RegionMaps, free_regions: &FreeRegionMap<'tcx>) { let region_rels = RegionRelations::new(self.tcx, region_context, diff --git a/src/librustc/infer/region_inference/graphviz.rs b/src/librustc/infer/region_inference/graphviz.rs index c48b8f610a2..cce253c1a1a 100644 --- a/src/librustc/infer/region_inference/graphviz.rs +++ b/src/librustc/infer/region_inference/graphviz.rs @@ -124,20 +124,20 @@ struct ConstraintGraph<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { graph_name: String, region_rels: &'a RegionRelations<'a, 'gcx, 'tcx>, map: &'a FxHashMap, SubregionOrigin<'tcx>>, - node_ids: FxHashMap, usize>, + node_ids: FxHashMap, } #[derive(Clone, Hash, PartialEq, Eq, Debug, Copy)] -enum Node<'tcx> { +enum Node { RegionVid(ty::RegionVid), - Region(ty::RegionKind<'tcx>), + Region(ty::RegionKind), } // type Edge = Constraint; #[derive(Clone, PartialEq, Eq, Debug, Copy)] enum Edge<'tcx> { Constraint(Constraint<'tcx>), - EnclScope(CodeExtent<'tcx>, CodeExtent<'tcx>), + EnclScope(CodeExtent, CodeExtent), } impl<'a, 'gcx, 'tcx> ConstraintGraph<'a, 'gcx, 'tcx> { @@ -176,7 +176,7 @@ fn new(name: String, } impl<'a, 'gcx, 'tcx> dot::Labeller<'a> for ConstraintGraph<'a, 'gcx, 'tcx> { - type Node = Node<'tcx>; + type Node = Node; type Edge = Edge<'tcx>; fn graph_id(&self) -> dot::Id { dot::Id::new(&*self.graph_name).unwrap() @@ -209,7 +209,7 @@ fn edge_label(&self, e: &Edge) -> dot::LabelText { } } -fn constraint_to_nodes<'tcx>(c: &Constraint<'tcx>) -> (Node<'tcx>, Node<'tcx>) { +fn constraint_to_nodes(c: &Constraint) -> (Node, Node) { match *c { Constraint::ConstrainVarSubVar(rv_1, rv_2) => (Node::RegionVid(rv_1), Node::RegionVid(rv_2)), @@ -222,7 +222,7 @@ fn constraint_to_nodes<'tcx>(c: &Constraint<'tcx>) -> (Node<'tcx>, Node<'tcx>) { } } -fn edge_to_nodes<'tcx>(e: &Edge<'tcx>) -> (Node<'tcx>, Node<'tcx>) { +fn edge_to_nodes(e: &Edge) -> (Node, Node) { match *e { Edge::Constraint(ref c) => constraint_to_nodes(c), Edge::EnclScope(sub, sup) => { @@ -233,9 +233,9 @@ fn edge_to_nodes<'tcx>(e: &Edge<'tcx>) -> (Node<'tcx>, Node<'tcx>) { } impl<'a, 'gcx, 'tcx> dot::GraphWalk<'a> for ConstraintGraph<'a, 'gcx, 'tcx> { - type Node = Node<'tcx>; + type Node = Node; type Edge = Edge<'tcx>; - fn nodes(&self) -> dot::Nodes> { + fn nodes(&self) -> dot::Nodes { let mut set = FxHashSet(); for node in self.node_ids.keys() { set.insert(*node); @@ -250,12 +250,12 @@ fn edges(&self) -> dot::Edges> { debug!("region graph has {} edges", v.len()); Cow::Owned(v) } - fn source(&self, edge: &Edge<'tcx>) -> Node<'tcx> { + fn source(&self, edge: &Edge<'tcx>) -> Node { let (n1, _) = edge_to_nodes(edge); debug!("edge {:?} has source {:?}", edge, n1); n1 } - fn target(&self, edge: &Edge<'tcx>) -> Node<'tcx> { + fn target(&self, edge: &Edge<'tcx>) -> Node { let (_, n2) = edge_to_nodes(edge); debug!("edge {:?} has target {:?}", edge, n2); n2 diff --git a/src/librustc/infer/region_inference/mod.rs b/src/librustc/infer/region_inference/mod.rs index 39554d1fa3a..2e3c2443544 100644 --- a/src/librustc/infer/region_inference/mod.rs +++ b/src/librustc/infer/region_inference/mod.rs @@ -29,7 +29,6 @@ use ty::{ReLateBound, ReScope, ReVar, ReSkolemized, BrFresh}; use std::cell::{Cell, RefCell}; -use std::cmp::Ordering::{self, Less, Greater, Equal}; use std::fmt; use std::mem; use std::u32; @@ -127,7 +126,7 @@ pub enum UndoLogEntry<'tcx> { AddVerify(usize), /// We added the given `given` - AddGiven(ty::FreeRegion<'tcx>, ty::RegionVid), + AddGiven(Region<'tcx>, ty::RegionVid), /// We added a GLB/LUB "combinaton variable" AddCombination(CombineMapType, TwoRegions<'tcx>), @@ -213,7 +212,7 @@ pub struct RegionVarBindings<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { // record the fact that `'a <= 'b` is implied by the fn signature, // and then ignore the constraint when solving equations. This is // a bit of a hack but seems to work. - givens: RefCell, ty::RegionVid)>>, + givens: RefCell, ty::RegionVid)>>, lubs: RefCell>, glbs: RefCell>, @@ -309,8 +308,7 @@ fn fixed_point(&mut self, self.add_edge(a, b); } &AddGiven(a, b) => { - self.add_edge(tcx.mk_region(ReFree(a)), - tcx.mk_region(ReVar(b))); + self.add_edge(a, tcx.mk_region(ReVar(b))); } &AddVerify(i) => { verifys[i].bound.for_each_region(&mut |b| { @@ -661,7 +659,7 @@ fn add_verify(&self, verify: Verify<'tcx>) { } } - pub fn add_given(&self, sub: ty::FreeRegion<'tcx>, sup: ty::RegionVid) { + pub fn add_given(&self, sub: Region<'tcx>, sup: ty::RegionVid) { // cannot add givens once regions are resolved assert!(self.values_are_none()); @@ -702,9 +700,7 @@ pub fn make_subregion(&self, origin); match (sub, sup) { - (&ReEarlyBound(..), _) | (&ReLateBound(..), _) | - (_, &ReEarlyBound(..)) | (_, &ReLateBound(..)) => { span_bug!(origin.span(), "cannot relate bound region: {:?} <= {:?}", @@ -908,8 +904,6 @@ fn lub_concrete_regions(&self, match (a, b) { (&ReLateBound(..), _) | (_, &ReLateBound(..)) | - (&ReEarlyBound(..), _) | - (_, &ReEarlyBound(..)) | (&ReErased, _) | (_, &ReErased) => { bug!("cannot relate region: LUB({:?}, {:?})", a, b); @@ -931,18 +925,31 @@ fn lub_concrete_regions(&self, b); } - (&ReFree(fr), &ReScope(s_id)) | - (&ReScope(s_id), &ReFree(fr)) => { + (&ReEarlyBound(_), &ReScope(s_id)) | + (&ReScope(s_id), &ReEarlyBound(_)) | + (&ReFree(_), &ReScope(s_id)) | + (&ReScope(s_id), &ReFree(_)) => { // A "free" region can be interpreted as "some region - // at least as big as the block fr.scope_id". So, we can + // at least as big as fr.scope". So, we can // reasonably compare free regions and scopes: - if let Some(fr_scope) = fr.scope { - let r_id = region_rels.region_maps.nearest_common_ancestor(fr_scope, s_id); - if r_id == fr_scope { - // if the free region's scope `fr.scope_id` is bigger than - // the scope region `s_id`, then the LUB is the free - // region itself: - return self.tcx.mk_region(ReFree(fr)); + let fr_scope = match (a, b) { + (&ReEarlyBound(ref br), _) | (_, &ReEarlyBound(ref br)) => { + region_rels.region_maps.early_free_extent(self.tcx, br) + } + (&ReFree(ref fr), _) | (_, &ReFree(ref fr)) => { + region_rels.region_maps.free_extent(self.tcx, fr) + } + _ => bug!() + }; + let r_id = region_rels.region_maps.nearest_common_ancestor(fr_scope, s_id); + if r_id == fr_scope { + // if the free region's scope `fr.scope` is bigger than + // the scope region `s_id`, then the LUB is the free + // region itself: + match (a, b) { + (_, &ReScope(_)) => return a, + (&ReScope(_), _) => return b, + _ => bug!() } } @@ -959,6 +966,9 @@ fn lub_concrete_regions(&self, self.tcx.mk_region(ReScope(lub)) } + (&ReEarlyBound(_), &ReEarlyBound(_)) | + (&ReFree(_), &ReEarlyBound(_)) | + (&ReEarlyBound(_), &ReFree(_)) | (&ReFree(_), &ReFree(_)) => { region_rels.lub_free_regions(a, b) } @@ -1041,13 +1051,13 @@ fn expand_givens(&self, graph: &RegionGraph) { let mut givens = self.givens.borrow_mut(); let seeds: Vec<_> = givens.iter().cloned().collect(); - for (fr, vid) in seeds { + for (r, vid) in seeds { let seed_index = NodeIndex(vid.index as usize); for succ_index in graph.depth_traverse(seed_index, OUTGOING) { let succ_index = succ_index.0 as u32; if succ_index < self.num_vars() { let succ_vid = RegionVid { index: succ_index }; - givens.insert((fr, succ_vid)); + givens.insert((r, succ_vid)); } } } @@ -1096,8 +1106,9 @@ fn expand_node(&self, // Check if this relationship is implied by a given. match *a_region { - ty::ReFree(fr) => { - if self.givens.borrow().contains(&(fr, b_vid)) { + ty::ReEarlyBound(_) | + ty::ReFree(_) => { + if self.givens.borrow().contains(&(a_region, b_vid)) { debug!("given"); return false; } @@ -1333,16 +1344,15 @@ fn collect_error_for_expanding_node(&self, // We place free regions first because we are special casing // SubSupConflict(ReFree, ReFree) when reporting error, and so // the user will more likely get a specific suggestion. - fn free_regions_first(a: &RegionAndOrigin, b: &RegionAndOrigin) -> Ordering { - match (a.region, b.region) { - (&ReFree(..), &ReFree(..)) => Equal, - (&ReFree(..), _) => Less, - (_, &ReFree(..)) => Greater, - (..) => Equal, + fn region_order_key(x: &RegionAndOrigin) -> u8 { + match *x.region { + ReEarlyBound(_) => 0, + ReFree(_) => 1, + _ => 2 } } - lower_bounds.sort_by(|a, b| free_regions_first(a, b)); - upper_bounds.sort_by(|a, b| free_regions_first(a, b)); + lower_bounds.sort_by_key(region_order_key); + upper_bounds.sort_by_key(region_order_key); for lower_bound in &lower_bounds { for upper_bound in &upper_bounds { diff --git a/src/librustc/lib.rs b/src/librustc/lib.rs index 5cf26ea8bfc..f32ee790064 100644 --- a/src/librustc/lib.rs +++ b/src/librustc/lib.rs @@ -54,7 +54,7 @@ extern crate getopts; extern crate graphviz; extern crate libc; -extern crate rustc_llvm as llvm; +extern crate owning_ref; extern crate rustc_back; extern crate rustc_data_structures; extern crate serialize; diff --git a/src/librustc/lint/builtin.rs b/src/librustc/lint/builtin.rs index e681d55cf94..07140f71aeb 100644 --- a/src/librustc/lint/builtin.rs +++ b/src/librustc/lint/builtin.rs @@ -76,6 +76,12 @@ "detects unreachable patterns" } +declare_lint! { + pub UNUSED_MACROS, + Warn, + "detects macros that were not used" +} + declare_lint! { pub WARNINGS, Warn, @@ -259,6 +265,7 @@ fn get_lints(&self) -> LintArray { DEAD_CODE, UNREACHABLE_CODE, UNREACHABLE_PATTERNS, + UNUSED_MACROS, WARNINGS, UNUSED_FEATURES, STABLE_FEATURES, diff --git a/src/librustc/lint/context.rs b/src/librustc/lint/context.rs index 6f3e84247f7..9d5ba2c8f95 100644 --- a/src/librustc/lint/context.rs +++ b/src/librustc/lint/context.rs @@ -49,6 +49,7 @@ use hir::def_id::LOCAL_CRATE; use hir::intravisit as hir_visit; use syntax::visit as ast_visit; +use syntax::tokenstream::ThinTokenStream; /// Information about the registered lints. /// @@ -1055,7 +1056,7 @@ fn visit_ident(&mut self, sp: Span, id: ast::Ident) { run_lints!(self, check_ident, early_passes, sp, id); } - fn visit_mod(&mut self, m: &'a ast::Mod, s: Span, n: ast::NodeId) { + fn visit_mod(&mut self, m: &'a ast::Mod, s: Span, _a: &[ast::Attribute], n: ast::NodeId) { run_lints!(self, check_mod, early_passes, m, s, n); ast_visit::walk_mod(self, m); run_lints!(self, check_mod_post, early_passes, m, s, n); @@ -1125,6 +1126,13 @@ fn visit_path_list_item(&mut self, prefix: &'a ast::Path, item: &'a ast::PathLis fn visit_attribute(&mut self, attr: &'a ast::Attribute) { run_lints!(self, check_attribute, early_passes, attr); } + + fn visit_mac_def(&mut self, _mac: &'a ThinTokenStream, id: ast::NodeId) { + let lints = self.sess.lints.borrow_mut().take(id); + for early_lint in lints { + self.early_lint(&early_lint); + } + } } enum CheckLintNameResult { diff --git a/src/librustc/middle/cstore.rs b/src/librustc/middle/cstore.rs index 569b1aeeb09..a68aca46000 100644 --- a/src/librustc/middle/cstore.rs +++ b/src/librustc/middle/cstore.rs @@ -36,8 +36,9 @@ use util::nodemap::{NodeSet, DefIdMap}; use std::any::Any; -use std::path::PathBuf; +use std::path::{Path, PathBuf}; use std::rc::Rc; +use owning_ref::ErasedBoxRef; use syntax::ast; use syntax::ext::base::SyntaxExtension; use syntax::symbol::Symbol; @@ -201,11 +202,33 @@ pub fn new() -> EncodedMetadataHashes { } } +/// The backend's way to give the crate store access to the metadata in a library. +/// Note that it returns the raw metadata bytes stored in the library file, whether +/// it is compressed, uncompressed, some weird mix, etc. +/// rmeta files are backend independent and not handled here. +/// +/// At the time of this writing, there is only one backend and one way to store +/// metadata in library -- this trait just serves to decouple rustc_metadata from +/// the archive reader, which depends on LLVM. +pub trait MetadataLoader { + fn get_rlib_metadata(&self, + target: &Target, + filename: &Path) + -> Result, String>; + fn get_dylib_metadata(&self, + target: &Target, + filename: &Path) + -> Result, String>; +} + /// A store of Rust crates, through with their metadata /// can be accessed. pub trait CrateStore { fn crate_data_as_rc_any(&self, krate: CrateNum) -> Rc; + // access to the metadata loader + fn metadata_loader(&self) -> &MetadataLoader; + // item info fn visibility(&self, def: DefId) -> ty::Visibility; fn visible_parent_map<'a>(&'a self) -> ::std::cell::Ref<'a, DefIdMap>; @@ -275,8 +298,6 @@ fn item_body<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) fn used_link_args(&self) -> Vec; // utility functions - fn metadata_filename(&self) -> &str; - fn metadata_section_name(&self, target: &Target) -> &str; fn used_crates(&self, prefer: LinkagePreference) -> Vec<(CrateNum, LibSource)>; fn used_crate_source(&self, cnum: CrateNum) -> CrateSource; fn extern_mod_stmt_cnum(&self, emod_id: ast::NodeId) -> Option; @@ -413,8 +434,6 @@ fn used_libraries(&self) -> Vec { vec![] } fn used_link_args(&self) -> Vec { vec![] } // utility functions - fn metadata_filename(&self) -> &str { bug!("metadata_filename") } - fn metadata_section_name(&self, target: &Target) -> &str { bug!("metadata_section_name") } fn used_crates(&self, prefer: LinkagePreference) -> Vec<(CrateNum, LibSource)> { vec![] } fn used_crate_source(&self, cnum: CrateNum) -> CrateSource { bug!("used_crate_source") } @@ -427,6 +446,9 @@ fn encode_metadata<'a, 'tcx>(&self, bug!("encode_metadata") } fn metadata_encoding_version(&self) -> &[u8] { bug!("metadata_encoding_version") } + + // access to the metadata loader + fn metadata_loader(&self) -> &MetadataLoader { bug!("metadata_loader") } } pub trait CrateLoader { diff --git a/src/librustc/middle/expr_use_visitor.rs b/src/librustc/middle/expr_use_visitor.rs index 41f9311dd80..99b140f690a 100644 --- a/src/librustc/middle/expr_use_visitor.rs +++ b/src/librustc/middle/expr_use_visitor.rs @@ -271,7 +271,7 @@ enum PassArgs { impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> { pub fn new(delegate: &'a mut (Delegate<'tcx>+'a), - region_maps: &'a RegionMaps<'tcx>, + region_maps: &'a RegionMaps, infcx: &'a InferCtxt<'a, 'gcx, 'tcx>) -> Self { @@ -283,7 +283,7 @@ pub fn new(delegate: &'a mut (Delegate<'tcx>+'a), pub fn with_options(delegate: &'a mut (Delegate<'tcx>+'a), infcx: &'a InferCtxt<'a, 'gcx, 'tcx>, - region_maps: &'a RegionMaps<'tcx>, + region_maps: &'a RegionMaps, options: mc::MemCategorizationOptions) -> Self { diff --git a/src/librustc/middle/free_region.rs b/src/librustc/middle/free_region.rs index 2dc7aac04ae..6a21bdc19e0 100644 --- a/src/librustc/middle/free_region.rs +++ b/src/librustc/middle/free_region.rs @@ -35,7 +35,7 @@ pub struct RegionRelations<'a, 'gcx: 'tcx, 'tcx: 'a> { pub context: DefId, /// region maps for the given context - pub region_maps: &'a RegionMaps<'tcx>, + pub region_maps: &'a RegionMaps, /// free-region relationships pub free_regions: &'a FreeRegionMap<'tcx>, @@ -45,7 +45,7 @@ impl<'a, 'gcx, 'tcx> RegionRelations<'a, 'gcx, 'tcx> { pub fn new( tcx: TyCtxt<'a, 'gcx, 'tcx>, context: DefId, - region_maps: &'a RegionMaps<'tcx>, + region_maps: &'a RegionMaps, free_regions: &'a FreeRegionMap<'tcx>, ) -> Self { Self { @@ -71,26 +71,27 @@ pub fn is_subregion_of(&self, (&ty::ReScope(sub_scope), &ty::ReScope(super_scope)) => self.region_maps.is_subscope_of(sub_scope, super_scope), - (&ty::ReScope(sub_scope), &ty::ReFree(fr)) => { - // 1. It is safe to unwrap `fr.scope` because we - // should only ever wind up comparing against - // `ReScope` in the context of a method or - // body, where `fr.scope` should be `Some`. - self.region_maps.is_subscope_of(sub_scope, fr.scope.unwrap() /*1*/) || - self.is_static(super_region) + (&ty::ReScope(sub_scope), &ty::ReEarlyBound(ref br)) => { + let fr_scope = self.region_maps.early_free_extent(self.tcx, br); + self.region_maps.is_subscope_of(sub_scope, fr_scope) } - (&ty::ReFree(_), &ty::ReFree(_)) => - self.free_regions.relation.contains(&sub_region, &super_region) || - self.is_static(super_region), + (&ty::ReScope(sub_scope), &ty::ReFree(ref fr)) => { + let fr_scope = self.region_maps.free_extent(self.tcx, fr); + self.region_maps.is_subscope_of(sub_scope, fr_scope) + } - (&ty::ReStatic, &ty::ReFree(_)) => - self.is_static(super_region), + (&ty::ReEarlyBound(_), &ty::ReEarlyBound(_)) | + (&ty::ReFree(_), &ty::ReEarlyBound(_)) | + (&ty::ReEarlyBound(_), &ty::ReFree(_)) | + (&ty::ReFree(_), &ty::ReFree(_)) => + self.free_regions.relation.contains(&sub_region, &super_region), _ => false, } }; + let result = result || self.is_static(super_region); debug!("is_subregion_of(sub_region={:?}, super_region={:?}) = {:?}", sub_region, super_region, result); result @@ -101,11 +102,11 @@ fn is_static(&self, super_region: ty::Region<'tcx>) -> bool { debug!("is_static(super_region={:?})", super_region); match *super_region { ty::ReStatic => true, - ty::ReFree(_) => { + ty::ReEarlyBound(_) | ty::ReFree(_) => { let re_static = self.tcx.mk_region(ty::ReStatic); self.free_regions.relation.contains(&re_static, &super_region) } - _ => bug!("only free regions should be given to `is_static`") + _ => false } } @@ -142,11 +143,9 @@ pub fn relate_free_regions_from_implied_bounds(&mut self, for implied_bound in implied_bounds { debug!("implied bound: {:?}", implied_bound); match *implied_bound { - ImpliedBound::RegionSubRegion(a @ &ty::ReFree(_), b @ &ty::ReFree(_)) | - ImpliedBound::RegionSubRegion(a @ &ty::ReStatic, b @ &ty::ReFree(_)) => { + ImpliedBound::RegionSubRegion(a, b) => { self.relate_regions(a, b); } - ImpliedBound::RegionSubRegion(..) | ImpliedBound::RegionSubParam(..) | ImpliedBound::RegionSubProjection(..) => { } @@ -170,32 +169,18 @@ pub fn relate_free_regions_from_predicates(&mut self, // No region bounds here } ty::Predicate::RegionOutlives(ty::Binder(ty::OutlivesPredicate(r_a, r_b))) => { - match (r_a, r_b) { - // `'static: 'x` is not notable - (&ty::ReStatic, &ty::ReFree(_)) => {}, - - (&ty::ReFree(_), &ty::ReStatic) | - (&ty::ReFree(_), &ty::ReFree(_)) => { - // Record that `'a:'b`. Or, put another way, `'b <= 'a`. - self.relate_regions(r_b, r_a); - } - - _ => { - // All named regions are instantiated with free regions. - bug!("record_region_bounds: non free region: {:?} / {:?}", - r_a, - r_b); - } - } + self.relate_regions(r_b, r_a); } } } } + // Record that `'sup:'sub`. Or, put another way, `'sub <= 'sup`. + // (with the exception that `'static: 'x` is not notable) fn relate_regions(&mut self, sub: Region<'tcx>, sup: Region<'tcx>) { - assert!(match *sub { ty::ReFree(_) | ty::ReStatic => true, _ => false }); - assert!(match *sup { ty::ReFree(_) | ty::ReStatic => true, _ => false }); - self.relation.add(sub, sup) + if (is_free(sub) || *sub == ty::ReStatic) && is_free(sup) { + self.relation.add(sub, sup) + } } pub fn lub_free_regions<'a, 'gcx>(&self, @@ -203,8 +188,8 @@ pub fn lub_free_regions<'a, 'gcx>(&self, r_a: Region<'tcx>, r_b: Region<'tcx>) -> Region<'tcx> { - assert!(match *r_a { ty::ReFree(_) => true, _ => false }); - assert!(match *r_b { ty::ReFree(_) => true, _ => false }); + assert!(is_free(r_a)); + assert!(is_free(r_b)); let result = if r_a == r_b { r_a } else { match self.relation.postdom_upper_bound(&r_a, &r_b) { None => tcx.mk_region(ty::ReStatic), @@ -216,6 +201,13 @@ pub fn lub_free_regions<'a, 'gcx>(&self, } } +fn is_free(r: Region) -> bool { + match *r { + ty::ReEarlyBound(_) | ty::ReFree(_) => true, + _ => false + } +} + impl_stable_hash_for!(struct FreeRegionMap<'tcx> { relation }); diff --git a/src/librustc/middle/liveness.rs b/src/librustc/middle/liveness.rs index 1ea87cc0a45..ecd350d1273 100644 --- a/src/librustc/middle/liveness.rs +++ b/src/librustc/middle/liveness.rs @@ -96,9 +96,6 @@ //! //! - `fallthrough_ln`: a live node that represents a fallthrough //! -//! - `no_ret_var`: a synthetic variable that is only 'read' from, the -//! fallthrough node. This allows us to detect functions where we fail -//! to return explicitly. //! - `clean_exit_var`: a synthetic variable that is only 'read' from the //! fallthrough node. It is only live if the function could converge //! via means other than an explicit `return` expression. That is, it is @@ -110,9 +107,7 @@ use self::VarKind::*; use hir::def::*; -use ty::{self, TyCtxt, ParameterEnvironment}; -use traits::{self, Reveal}; -use ty::subst::Subst; +use ty::{self, TyCtxt}; use lint; use util::nodemap::NodeMap; @@ -256,7 +251,6 @@ struct LocalInfo { enum VarKind { Arg(NodeId, ast::Name), Local(LocalInfo), - ImplicitRet, CleanExit } @@ -313,7 +307,7 @@ fn add_variable(&mut self, vk: VarKind) -> Variable { Local(LocalInfo { id: node_id, .. }) | Arg(node_id, _) => { self.variable_map.insert(node_id, v); }, - ImplicitRet | CleanExit => {} + CleanExit => {} } debug!("{:?} is {:?}", v, vk); @@ -335,7 +329,6 @@ fn variable_name(&self, var: Variable) -> String { Local(LocalInfo { name, .. }) | Arg(_, name) => { name.to_string() }, - ImplicitRet => "".to_string(), CleanExit => "".to_string() } } @@ -382,7 +375,6 @@ fn visit_fn<'a, 'tcx: 'a>(ir: &mut IrMaps<'a, 'tcx>, // check for various error conditions lsets.visit_body(body); - lsets.check_ret(id, sp, entry_ln, body); lsets.warn_about_unused_args(body, entry_ln); } @@ -500,7 +492,6 @@ fn invalid_users() -> Users { struct Specials { exit_ln: LiveNode, fallthrough_ln: LiveNode, - no_ret_var: Variable, clean_exit_var: Variable } @@ -534,7 +525,6 @@ fn new(ir: &'a mut IrMaps<'a, 'tcx>, body: hir::BodyId) -> Liveness<'a, 'tcx> { let specials = Specials { exit_ln: ir.add_live_node(ExitNode), fallthrough_ln: ir.add_live_node(ExitNode), - no_ret_var: ir.add_variable(ImplicitRet), clean_exit_var: ir.add_variable(CleanExit) }; @@ -1420,45 +1410,6 @@ fn check_expr<'a, 'tcx>(this: &mut Liveness<'a, 'tcx>, expr: &'tcx Expr) { } impl<'a, 'tcx> Liveness<'a, 'tcx> { - fn check_ret(&self, - id: NodeId, - sp: Span, - entry_ln: LiveNode, - body: &hir::Body) - { - let fn_ty = self.ir.tcx.type_of(self.ir.tcx.hir.local_def_id(id)); - let fn_sig = match fn_ty.sty { - ty::TyClosure(closure_def_id, substs) => { - self.ir.tcx.closure_type(closure_def_id) - .subst(self.ir.tcx, substs.substs) - } - _ => fn_ty.fn_sig() - }; - - let fn_ret = fn_sig.output(); - - // within the fn body, late-bound regions are liberated - // and must outlive the *call-site* of the function. - let fn_ret = - self.ir.tcx.liberate_late_bound_regions( - Some(self.ir.tcx.call_site_extent(id, body.value.id)), - &fn_ret); - - if !fn_ret.is_never() && self.live_on_entry(entry_ln, self.s.no_ret_var).is_some() { - let param_env = ParameterEnvironment::for_item(self.ir.tcx, id); - let t_ret_subst = fn_ret.subst(self.ir.tcx, ¶m_env.free_substs); - let is_nil = self.ir.tcx.infer_ctxt(param_env, Reveal::All).enter(|infcx| { - let cause = traits::ObligationCause::dummy(); - traits::fully_normalize(&infcx, cause, &t_ret_subst).unwrap().is_nil() - }); - - // for nil return types, it is ok to not return a value expl. - if !is_nil { - span_bug!(sp, "not all control paths return a value"); - } - } - } - fn check_lvalue(&mut self, expr: &'tcx Expr) { match expr.node { hir::ExprPath(hir::QPath::Resolved(_, ref path)) => { diff --git a/src/librustc/middle/mem_categorization.rs b/src/librustc/middle/mem_categorization.rs index 11a364f92c3..d0adf51d79e 100644 --- a/src/librustc/middle/mem_categorization.rs +++ b/src/librustc/middle/mem_categorization.rs @@ -290,7 +290,7 @@ fn span(&self) -> Span { self.span } #[derive(Clone)] pub struct MemCategorizationContext<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { pub infcx: &'a InferCtxt<'a, 'gcx, 'tcx>, - pub region_maps: &'a RegionMaps<'tcx>, + pub region_maps: &'a RegionMaps, options: MemCategorizationOptions, } @@ -406,7 +406,7 @@ pub fn to_user_str(&self) -> &'static str { impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> { /// Context should be the `DefId` we use to fetch region-maps. pub fn new(infcx: &'a InferCtxt<'a, 'gcx, 'tcx>, - region_maps: &'a RegionMaps<'tcx>) + region_maps: &'a RegionMaps) -> MemCategorizationContext<'a, 'gcx, 'tcx> { MemCategorizationContext::with_options(infcx, region_maps, @@ -414,7 +414,7 @@ pub fn new(infcx: &'a InferCtxt<'a, 'gcx, 'tcx>, } pub fn with_options(infcx: &'a InferCtxt<'a, 'gcx, 'tcx>, - region_maps: &'a RegionMaps<'tcx>, + region_maps: &'a RegionMaps, options: MemCategorizationOptions) -> MemCategorizationContext<'a, 'gcx, 'tcx> { MemCategorizationContext { @@ -785,26 +785,12 @@ fn env_deref(&self, cmt_result: cmt_<'tcx>) -> cmt_<'tcx> { - // Look up the node ID of the closure body so we can construct - // a free region within it - let fn_body_id = { - let fn_expr = match self.tcx().hir.find(upvar_id.closure_expr_id) { - Some(hir_map::NodeExpr(e)) => e, - _ => bug!() - }; - - match fn_expr.node { - hir::ExprClosure(.., body_id, _) => body_id, - _ => bug!() - } - }; - // Region of environment pointer let env_region = self.tcx().mk_region(ty::ReFree(ty::FreeRegion { // The environment of a closure is guaranteed to // outlive any bindings introduced in the body of the // closure itself. - scope: Some(self.tcx().item_extent(fn_body_id.node_id)), + scope: self.tcx().hir.local_def_id(upvar_id.closure_expr_id), bound_region: ty::BrEnv })); @@ -853,7 +839,7 @@ fn env_deref(&self, pub fn temporary_scope(&self, id: ast::NodeId) -> (ty::Region<'tcx>, ty::Region<'tcx>) { let (scope, old_scope) = - self.region_maps.old_and_new_temporary_scope(self.tcx(), id); + self.region_maps.old_and_new_temporary_scope(id); (self.tcx().mk_region(match scope { Some(scope) => ty::ReScope(scope), None => ty::ReStatic diff --git a/src/librustc/middle/region.rs b/src/librustc/middle/region.rs index 087ab4b94da..2d632e3feb5 100644 --- a/src/librustc/middle/region.rs +++ b/src/librustc/middle/region.rs @@ -22,7 +22,6 @@ use std::mem; use std::rc::Rc; -use serialize; use syntax::codemap; use syntax::ast; use syntax_pos::Span; @@ -35,11 +34,6 @@ use hir::{Block, Arm, Pat, PatKind, Stmt, Expr, Local}; use mir::transform::MirSource; -pub type CodeExtent<'tcx> = &'tcx CodeExtentData; - -impl<'tcx> serialize::UseSpecializedEncodable for CodeExtent<'tcx> {} -impl<'tcx> serialize::UseSpecializedDecodable for CodeExtent<'tcx> {} - /// CodeExtent represents a statically-describable extent that can be /// used to bound the lifetime/region for values. /// @@ -102,16 +96,16 @@ impl<'tcx> serialize::UseSpecializedDecodable for CodeExtent<'tcx> {} /// actually attach a more meaningful ordering to scopes than the one /// generated via deriving here. #[derive(Clone, PartialEq, PartialOrd, Eq, Ord, Hash, Debug, Copy, RustcEncodable, RustcDecodable)] -pub enum CodeExtentData { +pub enum CodeExtent { Misc(ast::NodeId), // extent of the call-site for a function or closure (outlives // the parameters as well as the body). - CallSiteScope { fn_id: ast::NodeId, body_id: ast::NodeId }, + CallSiteScope(hir::BodyId), // extent of parameters passed to a function or closure (they // outlive its body) - ParameterScope { fn_id: ast::NodeId, body_id: ast::NodeId }, + ParameterScope(hir::BodyId), // extent of destructors for temporaries of node-id DestructionScope(ast::NodeId), @@ -120,23 +114,6 @@ pub enum CodeExtentData { Remainder(BlockRemainder) } -/// extent of call-site for a function/method. -#[derive(Clone, PartialEq, PartialOrd, Eq, Ord, Hash, RustcEncodable, - RustcDecodable, Debug, Copy)] -pub struct CallSiteScopeData { - pub fn_id: ast::NodeId, pub body_id: ast::NodeId, -} - -impl CallSiteScopeData { - pub fn to_code_extent<'a, 'tcx, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> CodeExtent<'tcx> { - tcx.intern_code_extent( - match *self { - CallSiteScopeData { fn_id, body_id } => - CodeExtentData::CallSiteScope { fn_id: fn_id, body_id: body_id }, - }) - } -} - /// Represents a subscope of `block` for a binding that is introduced /// by `block.stmts[first_statement_index]`. Such subscopes represent /// a suffix of the block. Note that each subscope does not include @@ -148,9 +125,9 @@ pub fn to_code_extent<'a, 'tcx, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Cod /// * the subscope with `first_statement_index == 0` is scope of both /// `a` and `b`; it does not include EXPR_1, but does include /// everything after that first `let`. (If you want a scope that -/// includes EXPR_1 as well, then do not use `CodeExtentData::Remainder`, +/// includes EXPR_1 as well, then do not use `CodeExtent::Remainder`, /// but instead another `CodeExtent` that encompasses the whole block, -/// e.g. `CodeExtentData::Misc`. +/// e.g. `CodeExtent::Misc`. /// /// * the subscope with `first_statement_index == 1` is scope of `c`, /// and thus does not include EXPR_2, but covers the `...`. @@ -161,21 +138,21 @@ pub struct BlockRemainder { pub first_statement_index: u32, } -impl CodeExtentData { +impl CodeExtent { /// Returns a node id associated with this scope. /// /// NB: likely to be replaced as API is refined; e.g. pnkfelix /// anticipates `fn entry_node_id` and `fn each_exit_node_id`. pub fn node_id(&self) -> ast::NodeId { match *self { - CodeExtentData::Misc(node_id) => node_id, + CodeExtent::Misc(node_id) => node_id, // These cases all return rough approximations to the // precise extent denoted by `self`. - CodeExtentData::Remainder(br) => br.block, - CodeExtentData::DestructionScope(node_id) => node_id, - CodeExtentData::CallSiteScope { fn_id: _, body_id } | - CodeExtentData::ParameterScope { fn_id: _, body_id } => body_id, + CodeExtent::Remainder(br) => br.block, + CodeExtent::DestructionScope(node_id) => node_id, + CodeExtent::CallSiteScope(body_id) | + CodeExtent::ParameterScope(body_id) => body_id.node_id, } } @@ -186,12 +163,12 @@ pub fn span(&self, hir_map: &hir_map::Map) -> Option { match hir_map.find(self.node_id()) { Some(hir_map::NodeBlock(ref blk)) => { match *self { - CodeExtentData::CallSiteScope { .. } | - CodeExtentData::ParameterScope { .. } | - CodeExtentData::Misc(_) | - CodeExtentData::DestructionScope(_) => Some(blk.span), + CodeExtent::CallSiteScope(_) | + CodeExtent::ParameterScope(_) | + CodeExtent::Misc(_) | + CodeExtent::DestructionScope(_) => Some(blk.span), - CodeExtentData::Remainder(r) => { + CodeExtent::Remainder(r) => { assert_eq!(r.block, blk.id); // Want span for extent starting after the // indexed statement and ending at end of @@ -214,21 +191,29 @@ pub fn span(&self, hir_map: &hir_map::Map) -> Option { } /// The region maps encode information about region relationships. -pub struct RegionMaps<'tcx> { +pub struct RegionMaps { + /// If not empty, this body is the root of this region hierarchy. + root_body: Option, + + /// The parent of the root body owner, if the latter is an + /// an associated const or method, as impls/traits can also + /// have lifetime parameters free in this body. + root_parent: Option, + /// `scope_map` maps from a scope id to the enclosing scope id; /// this is usually corresponding to the lexical nesting, though /// in the case of closures the parent scope is the innermost /// conditional expression or repeating block. (Note that the /// enclosing scope id for the block associated with a closure is /// the closure itself.) - scope_map: FxHashMap, CodeExtent<'tcx>>, + scope_map: FxHashMap, /// `var_map` maps from a variable or binding id to the block in /// which that variable is declared. - var_map: NodeMap>, + var_map: NodeMap, /// maps from a node-id to the associated destruction scope (if any) - destruction_scopes: NodeMap>, + destruction_scopes: NodeMap, /// `rvalue_scopes` includes entries for those expressions whose cleanup scope is /// larger than the default. The map goes from the expression id @@ -236,14 +221,14 @@ pub struct RegionMaps<'tcx> { /// table, the appropriate cleanup scope is the innermost /// enclosing statement, conditional expression, or repeating /// block (see `terminating_scopes`). - rvalue_scopes: NodeMap>, + rvalue_scopes: NodeMap, /// Records the value of rvalue scopes before they were shrunk by /// #36082, for error reporting. /// /// FIXME: this should be temporary. Remove this by 1.18.0 or /// so. - shrunk_rvalue_scopes: NodeMap>, + shrunk_rvalue_scopes: NodeMap, /// Encodes the hierarchy of fn bodies. Every fn body (including /// closures) forms its own distinct region hierarchy, rooted in @@ -259,7 +244,7 @@ pub struct RegionMaps<'tcx> { } #[derive(Debug, Copy, Clone)] -pub struct Context<'tcx> { +pub struct Context { /// the root of the current region tree. This is typically the id /// of the innermost fn body. Each fn forms its own disjoint tree /// in the region hierarchy. These fn bodies are themselves @@ -269,21 +254,19 @@ pub struct Context<'tcx> { root_id: Option, /// the scope that contains any new variables declared - var_parent: Option>, + var_parent: Option, /// region parent of expressions etc - parent: Option>, + parent: Option, } struct RegionResolutionVisitor<'a, 'tcx: 'a> { tcx: TyCtxt<'a, 'tcx, 'tcx>, // Generated maps: - region_maps: &'a mut RegionMaps<'tcx>, - - cx: Context<'tcx>, + region_maps: RegionMaps, - map: &'a hir_map::Map<'tcx>, + cx: Context, /// `terminating_scopes` is a set containing the ids of each /// statement, or conditional/repeating expression. These scopes @@ -309,9 +292,11 @@ struct RegionResolutionVisitor<'a, 'tcx: 'a> { } -impl<'tcx> RegionMaps<'tcx> { +impl<'tcx> RegionMaps { pub fn new() -> Self { RegionMaps { + root_body: None, + root_parent: None, scope_map: FxHashMap(), destruction_scopes: FxHashMap(), var_map: NodeMap(), @@ -322,8 +307,8 @@ pub fn new() -> Self { } pub fn record_code_extent(&mut self, - child: CodeExtent<'tcx>, - parent: Option>) { + child: CodeExtent, + parent: Option) { debug!("{:?}.parent = {:?}", child, parent); if let Some(p) = parent { @@ -332,24 +317,24 @@ pub fn record_code_extent(&mut self, } // record the destruction scopes for later so we can query them - if let &CodeExtentData::DestructionScope(n) = child { + if let CodeExtent::DestructionScope(n) = child { self.destruction_scopes.insert(n, child); } } - pub fn each_encl_scope(&self, mut e:E) where E: FnMut(CodeExtent<'tcx>, CodeExtent<'tcx>) { + pub fn each_encl_scope(&self, mut e:E) where E: FnMut(CodeExtent, CodeExtent) { for (&child, &parent) in &self.scope_map { e(child, parent) } } - pub fn each_var_scope(&self, mut e:E) where E: FnMut(&ast::NodeId, CodeExtent<'tcx>) { - for (child, parent) in self.var_map.iter() { + pub fn each_var_scope(&self, mut e:E) where E: FnMut(&ast::NodeId, CodeExtent) { + for (child, &parent) in self.var_map.iter() { e(child, parent) } } - pub fn opt_destruction_extent(&self, n: ast::NodeId) -> Option> { + pub fn opt_destruction_extent(&self, n: ast::NodeId) -> Option { self.destruction_scopes.get(&n).cloned() } @@ -373,48 +358,46 @@ fn fn_is_enclosed_by(&self, mut sub_fn: ast::NodeId, sup_fn: ast::NodeId) -> boo } } - fn record_var_scope(&mut self, var: ast::NodeId, lifetime: CodeExtent<'tcx>) { + fn record_var_scope(&mut self, var: ast::NodeId, lifetime: CodeExtent) { debug!("record_var_scope(sub={:?}, sup={:?})", var, lifetime); assert!(var != lifetime.node_id()); self.var_map.insert(var, lifetime); } - fn record_rvalue_scope(&mut self, var: ast::NodeId, lifetime: CodeExtent<'tcx>) { + fn record_rvalue_scope(&mut self, var: ast::NodeId, lifetime: CodeExtent) { debug!("record_rvalue_scope(sub={:?}, sup={:?})", var, lifetime); assert!(var != lifetime.node_id()); self.rvalue_scopes.insert(var, lifetime); } - fn record_shrunk_rvalue_scope(&mut self, var: ast::NodeId, lifetime: CodeExtent<'tcx>) { + fn record_shrunk_rvalue_scope(&mut self, var: ast::NodeId, lifetime: CodeExtent) { debug!("record_rvalue_scope(sub={:?}, sup={:?})", var, lifetime); assert!(var != lifetime.node_id()); self.shrunk_rvalue_scopes.insert(var, lifetime); } - pub fn opt_encl_scope(&self, id: CodeExtent<'tcx>) -> Option> { + pub fn opt_encl_scope(&self, id: CodeExtent) -> Option { //! Returns the narrowest scope that encloses `id`, if any. self.scope_map.get(&id).cloned() } #[allow(dead_code)] // used in cfg - pub fn encl_scope(&self, id: CodeExtent<'tcx>) -> CodeExtent<'tcx> { + pub fn encl_scope(&self, id: CodeExtent) -> CodeExtent { //! Returns the narrowest scope that encloses `id`, if any. self.opt_encl_scope(id).unwrap() } /// Returns the lifetime of the local variable `var_id` - pub fn var_scope(&self, var_id: ast::NodeId) -> CodeExtent<'tcx> { + pub fn var_scope(&self, var_id: ast::NodeId) -> CodeExtent { match self.var_map.get(&var_id) { Some(&r) => r, None => { bug!("no enclosing scope for id {:?}", var_id); } } } - pub fn temporary_scope2<'a, 'gcx: 'tcx>(&self, - tcx: TyCtxt<'a, 'gcx, 'tcx>, - expr_id: ast::NodeId) - -> (Option>, bool) { - let temporary_scope = self.temporary_scope(tcx, expr_id); + pub fn temporary_scope2(&self, expr_id: ast::NodeId) + -> (Option, bool) { + let temporary_scope = self.temporary_scope(expr_id); let was_shrunk = match self.shrunk_rvalue_scopes.get(&expr_id) { Some(&s) => { info!("temporary_scope2({:?}, scope={:?}, shrunk={:?})", @@ -427,23 +410,18 @@ pub fn temporary_scope2<'a, 'gcx: 'tcx>(&self, (temporary_scope, was_shrunk) } - pub fn old_and_new_temporary_scope<'a, 'gcx: 'tcx>(&self, - tcx: TyCtxt<'a, 'gcx, 'tcx>, - expr_id: ast::NodeId) - -> (Option>, - Option>) + pub fn old_and_new_temporary_scope(&self, expr_id: ast::NodeId) + -> (Option, + Option) { - let temporary_scope = self.temporary_scope(tcx, expr_id); + let temporary_scope = self.temporary_scope(expr_id); (temporary_scope, self.shrunk_rvalue_scopes .get(&expr_id).cloned() .or(temporary_scope)) } - pub fn temporary_scope<'a, 'gcx: 'tcx>(&self, - tcx: TyCtxt<'a, 'gcx, 'tcx>, - expr_id: ast::NodeId) - -> Option> { + pub fn temporary_scope(&self, expr_id: ast::NodeId) -> Option { //! Returns the scope when temp created by expr_id will be cleaned up // check for a designated rvalue scope @@ -456,11 +434,11 @@ pub fn temporary_scope<'a, 'gcx: 'tcx>(&self, // if there's one. Static items, for instance, won't // have an enclosing scope, hence no scope will be // returned. - let mut id = tcx.node_extent(expr_id); + let mut id = CodeExtent::Misc(expr_id); - while let Some(&p) = self.scope_map.get(id) { - match *p { - CodeExtentData::DestructionScope(..) => { + while let Some(&p) = self.scope_map.get(&id) { + match p { + CodeExtent::DestructionScope(..) => { debug!("temporary_scope({:?}) = {:?} [enclosing]", expr_id, id); return Some(id); @@ -473,7 +451,7 @@ pub fn temporary_scope<'a, 'gcx: 'tcx>(&self, return None; } - pub fn var_region(&self, id: ast::NodeId) -> ty::RegionKind<'tcx> { + pub fn var_region(&self, id: ast::NodeId) -> ty::RegionKind { //! Returns the lifetime of the variable `id`. let scope = ty::ReScope(self.var_scope(id)); @@ -515,9 +493,9 @@ pub fn is_subscope_of(&self, /// Finds the nearest common ancestor (if any) of two scopes. That is, finds the smallest /// scope which is greater than or equal to both `scope_a` and `scope_b`. pub fn nearest_common_ancestor(&self, - scope_a: CodeExtent<'tcx>, - scope_b: CodeExtent<'tcx>) - -> CodeExtent<'tcx> { + scope_a: CodeExtent, + scope_b: CodeExtent) + -> CodeExtent { if scope_a == scope_b { return scope_a; } /// [1] The initial values for `a_buf` and `b_buf` are not used. @@ -525,9 +503,9 @@ pub fn nearest_common_ancestor(&self, /// is re-initialized with new values (or else fallback to a /// heap-allocated vector). let mut a_buf: [CodeExtent; 32] = [scope_a /* [1] */; 32]; - let mut a_vec: Vec> = vec![]; + let mut a_vec: Vec = vec![]; let mut b_buf: [CodeExtent; 32] = [scope_b /* [1] */; 32]; - let mut b_vec: Vec> = vec![]; + let mut b_vec: Vec = vec![]; let scope_map = &self.scope_map; let a_ancestors = ancestors_of(scope_map, scope_a, &mut a_buf, &mut a_vec); let b_ancestors = ancestors_of(scope_map, scope_b, &mut b_buf, &mut b_vec); @@ -551,8 +529,8 @@ pub fn nearest_common_ancestor(&self, let a_root_scope = a_ancestors[a_index]; let b_root_scope = a_ancestors[a_index]; return match (a_root_scope, b_root_scope) { - (&CodeExtentData::DestructionScope(a_root_id), - &CodeExtentData::DestructionScope(b_root_id)) => { + (CodeExtent::DestructionScope(a_root_id), + CodeExtent::DestructionScope(b_root_id)) => { if self.fn_is_enclosed_by(a_root_id, b_root_id) { // `a` is enclosed by `b`, hence `b` is the ancestor of everything in `a` scope_b @@ -583,11 +561,11 @@ pub fn nearest_common_ancestor(&self, } } - fn ancestors_of<'a, 'tcx>(scope_map: &FxHashMap, CodeExtent<'tcx>>, - scope: CodeExtent<'tcx>, - buf: &'a mut [CodeExtent<'tcx>; 32], - vec: &'a mut Vec>) - -> &'a [CodeExtent<'tcx>] { + fn ancestors_of<'a, 'tcx>(scope_map: &FxHashMap, + scope: CodeExtent, + buf: &'a mut [CodeExtent; 32], + vec: &'a mut Vec) + -> &'a [CodeExtent] { // debug!("ancestors_of(scope={:?})", scope); let mut scope = scope; @@ -595,7 +573,7 @@ fn ancestors_of<'a, 'tcx>(scope_map: &FxHashMap, CodeExtent<'tc while i < 32 { buf[i] = scope; match scope_map.get(&scope) { - Some(superscope) => scope = superscope, + Some(&superscope) => scope = superscope, _ => return &buf[..i+1] } i += 1; @@ -606,12 +584,55 @@ fn ancestors_of<'a, 'tcx>(scope_map: &FxHashMap, CodeExtent<'tc loop { vec.push(scope); match scope_map.get(&scope) { - Some(superscope) => scope = superscope, + Some(&superscope) => scope = superscope, _ => return &*vec } } } } + + /// Assuming that the provided region was defined within this `RegionMaps`, + /// returns the outermost `CodeExtent` that the region outlives. + pub fn early_free_extent<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, + br: &ty::EarlyBoundRegion) + -> CodeExtent { + let param_owner = tcx.parent_def_id(br.def_id).unwrap(); + + let param_owner_id = tcx.hir.as_local_node_id(param_owner).unwrap(); + let body_id = tcx.hir.maybe_body_owned_by(param_owner_id).unwrap_or_else(|| { + // The lifetime was defined on node that doesn't own a body, + // which in practice can only mean a trait or an impl, that + // is the parent of a method, and that is enforced below. + assert_eq!(Some(param_owner_id), self.root_parent, + "free_extent: {:?} not recognized by the region maps for {:?}", + param_owner, + self.root_body.map(|body| tcx.hir.body_owner_def_id(body))); + + // The trait/impl lifetime is in scope for the method's body. + self.root_body.unwrap() + }); + + CodeExtent::CallSiteScope(body_id) + } + + /// Assuming that the provided region was defined within this `RegionMaps`, + /// returns the outermost `CodeExtent` that the region outlives. + pub fn free_extent<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, fr: &ty::FreeRegion) + -> CodeExtent { + let param_owner = match fr.bound_region { + ty::BoundRegion::BrNamed(def_id, _) => { + tcx.parent_def_id(def_id).unwrap() + } + _ => fr.scope + }; + + // Ensure that the named late-bound lifetimes were defined + // on the same function that they ended up being freed in. + assert_eq!(param_owner, fr.scope); + + let param_owner_id = tcx.hir.as_local_node_id(param_owner).unwrap(); + CodeExtent::CallSiteScope(tcx.hir.body_owned_by(param_owner_id)) + } } /// Records the lifetime of a local variable as `cx.var_parent` @@ -633,7 +654,6 @@ fn resolve_block<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>, blk: debug!("resolve_block(blk.id={:?})", blk.id); let prev_cx = visitor.cx; - let block_extent = visitor.new_node_extent_with_dtor(blk.id); // We treat the tail expression in the block (if any) somewhat // differently from the statements. The issue has to do with @@ -660,11 +680,8 @@ fn resolve_block<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>, blk: // `other_argument()` has run and also the call to `quux(..)` // itself has returned. - visitor.cx = Context { - root_id: prev_cx.root_id, - var_parent: Some(block_extent), - parent: Some(block_extent), - }; + visitor.enter_node_extent_with_dtor(blk.id); + visitor.cx.var_parent = visitor.cx.parent; { // This block should be kept approximately in sync with @@ -680,17 +697,13 @@ fn resolve_block<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>, blk: // has the previous subscope in the block as a parent, // except for the first such subscope, which has the // block itself as a parent. - let stmt_extent = visitor.new_code_extent( - CodeExtentData::Remainder(BlockRemainder { + visitor.enter_code_extent( + CodeExtent::Remainder(BlockRemainder { block: blk.id, first_statement_index: i as u32 }) ); - visitor.cx = Context { - root_id: prev_cx.root_id, - var_parent: Some(stmt_extent), - parent: Some(stmt_extent), - }; + visitor.cx.var_parent = visitor.cx.parent; } visitor.visit_stmt(statement) } @@ -711,7 +724,7 @@ fn resolve_arm<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>, arm: & } fn resolve_pat<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>, pat: &'tcx hir::Pat) { - visitor.new_node_extent(pat.id); + visitor.record_code_extent(CodeExtent::Misc(pat.id)); // If this is a binding then record the lifetime of that binding. if let PatKind::Binding(..) = pat.node { @@ -731,20 +744,20 @@ fn resolve_stmt<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>, stmt: // statement plus its destructors, and thus the extent for which // regions referenced by the destructors need to survive. visitor.terminating_scopes.insert(stmt_id); - let stmt_extent = visitor.new_node_extent_with_dtor(stmt_id); let prev_parent = visitor.cx.parent; - visitor.cx.parent = Some(stmt_extent); + visitor.enter_node_extent_with_dtor(stmt_id); + intravisit::walk_stmt(visitor, stmt); + visitor.cx.parent = prev_parent; } fn resolve_expr<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>, expr: &'tcx hir::Expr) { debug!("resolve_expr(expr.id={:?})", expr.id); - let expr_extent = visitor.new_node_extent_with_dtor(expr.id); let prev_cx = visitor.cx; - visitor.cx.parent = Some(expr_extent); + visitor.enter_node_extent_with_dtor(expr.id); { let terminating_scopes = &mut visitor.terminating_scopes; @@ -784,7 +797,7 @@ fn resolve_expr<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>, expr: } hir::ExprMatch(..) => { - visitor.cx.var_parent = Some(expr_extent); + visitor.cx.var_parent = visitor.cx.parent; } hir::ExprAssignOp(..) | hir::ExprIndex(..) | @@ -971,7 +984,7 @@ fn is_borrowed_ty(ty: &hir::Ty) -> bool { fn record_rvalue_scope_if_borrow_expr<'a, 'tcx>( visitor: &mut RegionResolutionVisitor<'a, 'tcx>, expr: &hir::Expr, - blk_id: CodeExtent<'tcx>) + blk_id: CodeExtent) { match expr.node { hir::ExprAddrOf(_, ref subexpr) => { @@ -1021,7 +1034,7 @@ fn record_rvalue_scope_if_borrow_expr<'a, 'tcx>( /// Note: ET is intended to match "rvalues or lvalues based on rvalues". fn record_rvalue_scope<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>, expr: &hir::Expr, - blk_scope: CodeExtent<'tcx>, + blk_scope: CodeExtent, is_shrunk: bool) { let mut expr = expr; loop { @@ -1054,43 +1067,28 @@ fn record_rvalue_scope<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx> } impl<'a, 'tcx> RegionResolutionVisitor<'a, 'tcx> { - pub fn intern_code_extent(&mut self, - data: CodeExtentData, - parent: Option>) - -> CodeExtent<'tcx> { - let code_extent = self.tcx.intern_code_extent(data); - self.region_maps.record_code_extent(code_extent, parent); - code_extent - } - - pub fn intern_node(&mut self, - n: ast::NodeId, - parent: Option>) -> CodeExtent<'tcx> { - self.intern_code_extent(CodeExtentData::Misc(n), parent) - } - /// Records the current parent (if any) as the parent of `child_scope`. - fn new_code_extent(&mut self, child_scope: CodeExtentData) -> CodeExtent<'tcx> { + fn record_code_extent(&mut self, child_scope: CodeExtent) { let parent = self.cx.parent; - self.intern_code_extent(child_scope, parent) + self.region_maps.record_code_extent(child_scope, parent); } - fn new_node_extent(&mut self, child_scope: ast::NodeId) -> CodeExtent<'tcx> { - self.new_code_extent(CodeExtentData::Misc(child_scope)) + /// Records the current parent (if any) as the parent of `child_scope`, + /// and sets `child_scope` as the new current parent. + fn enter_code_extent(&mut self, child_scope: CodeExtent) { + self.record_code_extent(child_scope); + self.cx.parent = Some(child_scope); } - fn new_node_extent_with_dtor(&mut self, id: ast::NodeId) -> CodeExtent<'tcx> { + fn enter_node_extent_with_dtor(&mut self, id: ast::NodeId) { // If node was previously marked as a terminating scope during the // recursive visit of its parent node in the AST, then we need to // account for the destruction scope representing the extent of // the destructors that run immediately after it completes. if self.terminating_scopes.contains(&id) { - let ds = self.new_code_extent( - CodeExtentData::DestructionScope(id)); - self.intern_node(id, Some(ds)) - } else { - self.new_node_extent(id) + self.enter_code_extent(CodeExtent::DestructionScope(id)); } + self.enter_code_extent(CodeExtent::Misc(id)); } } @@ -1105,7 +1103,7 @@ fn visit_block(&mut self, b: &'tcx Block) { fn visit_body(&mut self, body: &'tcx hir::Body) { let body_id = body.id(); - let owner_id = self.map.body_owner(body_id); + let owner_id = self.tcx.hir.body_owner(body_id); debug!("visit_body(id={:?}, span={:?}, body.id={:?}, cx.parent={:?})", owner_id, @@ -1127,10 +1125,8 @@ fn visit_body(&mut self, body: &'tcx hir::Body) { } self.cx.root_id = Some(body_id.node_id); - self.cx.parent = Some(self.new_code_extent( - CodeExtentData::CallSiteScope { fn_id: owner_id, body_id: body_id.node_id })); - self.cx.parent = Some(self.new_code_extent( - CodeExtentData::ParameterScope { fn_id: owner_id, body_id: body_id.node_id })); + self.enter_code_extent(CodeExtent::CallSiteScope(body_id)); + self.enter_code_extent(CodeExtent::ParameterScope(body_id)); // The arguments and `self` are parented to the fn. self.cx.var_parent = self.cx.parent.take(); @@ -1165,21 +1161,18 @@ fn visit_local(&mut self, l: &'tcx Local) { } fn region_maps<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) - -> Rc> + -> Rc { let closure_base_def_id = tcx.closure_base_def_id(def_id); if closure_base_def_id != def_id { return tcx.region_maps(closure_base_def_id); } - let mut maps = RegionMaps::new(); - let id = tcx.hir.as_local_node_id(def_id).unwrap(); - if let Some(body) = tcx.hir.maybe_body_owned_by(id) { + let maps = if let Some(body) = tcx.hir.maybe_body_owned_by(id) { let mut visitor = RegionResolutionVisitor { - tcx: tcx, - region_maps: &mut maps, - map: &tcx.hir, + tcx, + region_maps: RegionMaps::new(), cx: Context { root_id: None, parent: None, @@ -1188,8 +1181,25 @@ fn region_maps<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) terminating_scopes: NodeSet(), }; + visitor.region_maps.root_body = Some(body); + + // If the item is an associated const or a method, + // record its impl/trait parent, as it can also have + // lifetime parameters free in this body. + match tcx.hir.get(id) { + hir::map::NodeImplItem(_) | + hir::map::NodeTraitItem(_) => { + visitor.region_maps.root_parent = Some(tcx.hir.get_parent(id)); + } + _ => {} + } + visitor.visit_body(tcx.hir.body(body)); - } + + visitor.region_maps + } else { + RegionMaps::new() + }; Rc::new(maps) } diff --git a/src/librustc/middle/resolve_lifetime.rs b/src/librustc/middle/resolve_lifetime.rs index 67b8dfb2d8e..7d7308d73bb 100644 --- a/src/librustc/middle/resolve_lifetime.rs +++ b/src/librustc/middle/resolve_lifetime.rs @@ -19,7 +19,6 @@ use session::Session; use hir::def::Def; use hir::def_id::DefId; -use middle::region; use ty; use std::cell::Cell; @@ -42,7 +41,7 @@ pub enum Region { EarlyBound(/* index */ u32, /* lifetime decl */ ast::NodeId), LateBound(ty::DebruijnIndex, /* lifetime decl */ ast::NodeId), LateBoundAnon(ty::DebruijnIndex, /* anon index */ u32), - Free(region::CallSiteScopeData, /* lifetime decl */ ast::NodeId), + Free(DefId, /* lifetime decl */ ast::NodeId), } impl Region { @@ -895,11 +894,10 @@ fn resolve_lifetime_ref(&mut self, lifetime_ref: &hir::Lifetime) { }; if let Some(mut def) = result { - if let Some(body_id) = outermost_body { + if let Region::EarlyBound(..) = def { + // Do not free early-bound regions, only late-bound ones. + } else if let Some(body_id) = outermost_body { let fn_id = self.hir_map.body_owner(body_id); - let scope_data = region::CallSiteScopeData { - fn_id: fn_id, body_id: body_id.node_id - }; match self.hir_map.get(fn_id) { hir::map::NodeItem(&hir::Item { node: hir::ItemFn(..), .. @@ -910,7 +908,8 @@ fn resolve_lifetime_ref(&mut self, lifetime_ref: &hir::Lifetime) { hir::map::NodeImplItem(&hir::ImplItem { node: hir::ImplItemKind::Method(..), .. }) => { - def = Region::Free(scope_data, def.id().unwrap()); + let scope = self.hir_map.local_def_id(fn_id); + def = Region::Free(scope, def.id().unwrap()); } _ => {} } diff --git a/src/librustc/session/config.rs b/src/librustc/session/config.rs index 79bb20599ca..7cb5f2510d5 100644 --- a/src/librustc/session/config.rs +++ b/src/librustc/session/config.rs @@ -328,7 +328,7 @@ pub struct Options { } ); -#[derive(Clone, PartialEq, Eq)] +#[derive(Copy, Clone, PartialEq, Eq, Debug)] pub enum PrintRequest { FileNames, Sysroot, @@ -963,7 +963,7 @@ fn parse_optimization_fuel(slot: &mut Option<(String, u64)>, v: Option<&str>) -> "attempt to recover from parse errors (experimental)"), incremental: Option = (None, parse_opt_string, [UNTRACKED], "enable incremental compilation (experimental)"), - incremental_cc: bool = (false, parse_bool, [UNTRACKED], + incremental_cc: bool = (true, parse_bool, [UNTRACKED], "enable cross-crate incremental compilation (even more experimental)"), incremental_info: bool = (false, parse_bool, [UNTRACKED], "print high-level information about incremental reuse (or the lack thereof)"), diff --git a/src/librustc/session/mod.rs b/src/librustc/session/mod.rs index 2e2d5a6bd4d..814246330a4 100644 --- a/src/librustc/session/mod.rs +++ b/src/librustc/session/mod.rs @@ -37,19 +37,16 @@ use rustc_back::{LinkerFlavor, PanicStrategy}; use rustc_back::target::Target; use rustc_data_structures::flock; -use llvm; use std::path::{Path, PathBuf}; use std::cell::{self, Cell, RefCell}; use std::collections::HashMap; use std::env; -use std::ffi::CString; use std::io::Write; use std::rc::Rc; use std::fmt; use std::time::Duration; use std::sync::Arc; -use libc::c_int; mod code_stats; pub mod config; @@ -713,8 +710,6 @@ pub fn build_session_(sopts: config::Options, out_of_fuel: Cell::new(false), }; - init_llvm(&sess); - sess } @@ -743,55 +738,6 @@ pub enum IncrCompSession { } } -fn init_llvm(sess: &Session) { - unsafe { - // Before we touch LLVM, make sure that multithreading is enabled. - use std::sync::Once; - static INIT: Once = Once::new(); - static mut POISONED: bool = false; - INIT.call_once(|| { - if llvm::LLVMStartMultithreaded() != 1 { - // use an extra bool to make sure that all future usage of LLVM - // cannot proceed despite the Once not running more than once. - POISONED = true; - } - - configure_llvm(sess); - }); - - if POISONED { - bug!("couldn't enable multi-threaded LLVM"); - } - } -} - -unsafe fn configure_llvm(sess: &Session) { - let mut llvm_c_strs = Vec::new(); - let mut llvm_args = Vec::new(); - - { - let mut add = |arg: &str| { - let s = CString::new(arg).unwrap(); - llvm_args.push(s.as_ptr()); - llvm_c_strs.push(s); - }; - add("rustc"); // fake program name - if sess.time_llvm_passes() { add("-time-passes"); } - if sess.print_llvm_passes() { add("-debug-pass=Structure"); } - - for arg in &sess.opts.cg.llvm_args { - add(&(*arg)); - } - } - - llvm::LLVMInitializePasses(); - - llvm::initialize_available_targets(); - - llvm::LLVMRustSetLLVMOptions(llvm_args.len() as c_int, - llvm_args.as_ptr()); -} - pub fn early_error(output: config::ErrorOutputType, msg: &str) -> ! { let emitter: Box = match output { config::ErrorOutputType::HumanReadable(color_config) => { diff --git a/src/librustc/traits/mod.rs b/src/librustc/traits/mod.rs index 2f525e1b8b4..1823373348b 100644 --- a/src/librustc/traits/mod.rs +++ b/src/librustc/traits/mod.rs @@ -619,8 +619,6 @@ pub fn get_vtable_methods<'a, 'tcx>( debug!("get_vtable_methods({:?})", trait_ref); supertraits(tcx, trait_ref).flat_map(move |trait_ref| { - tcx.populate_implementations_for_trait_if_necessary(trait_ref.def_id()); - let trait_methods = tcx.associated_items(trait_ref.def_id()) .filter(|item| item.kind == ty::AssociatedKind::Method); @@ -782,3 +780,19 @@ fn self_ty(&self) -> ty::Binder> { ty::Binder(self.predicate.skip_binder().self_ty()) } } + +pub fn provide(providers: &mut ty::maps::Providers) { + *providers = ty::maps::Providers { + is_object_safe: object_safety::is_object_safe_provider, + specialization_graph_of: specialize::specialization_graph_provider, + ..*providers + }; +} + +pub fn provide_extern(providers: &mut ty::maps::Providers) { + *providers = ty::maps::Providers { + is_object_safe: object_safety::is_object_safe_provider, + specialization_graph_of: specialize::specialization_graph_provider, + ..*providers + }; +} diff --git a/src/librustc/traits/object_safety.rs b/src/librustc/traits/object_safety.rs index ea1a2f9a982..0e3a53129d1 100644 --- a/src/librustc/traits/object_safety.rs +++ b/src/librustc/traits/object_safety.rs @@ -77,25 +77,6 @@ pub enum MethodViolationCode { } impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { - pub fn is_object_safe(self, trait_def_id: DefId) -> bool { - // Because we query yes/no results frequently, we keep a cache: - let def = self.trait_def(trait_def_id); - - let result = def.object_safety().unwrap_or_else(|| { - let result = self.object_safety_violations(trait_def_id).is_empty(); - - // Record just a yes/no result in the cache; this is what is - // queried most frequently. Note that this may overwrite a - // previous result, but always with the same thing. - def.set_object_safety(result); - - result - }); - - debug!("is_object_safe({:?}) = {}", trait_def_id, result); - - result - } /// Returns the object safety violations that affect /// astconv - currently, Self in supertraits. This is needed @@ -206,9 +187,8 @@ fn generics_require_sized_self(self, def_id: DefId) -> bool { }; // Search for a predicate like `Self : Sized` amongst the trait bounds. - let free_substs = self.construct_free_substs(def_id, None); let predicates = self.predicates_of(def_id); - let predicates = predicates.instantiate(self, free_substs).predicates; + let predicates = predicates.instantiate_identity(self).predicates; elaborate_predicates(self, predicates) .any(|predicate| { match predicate { @@ -392,3 +372,9 @@ fn contains_illegal_self_type_reference(self, error } } + +pub(super) fn is_object_safe_provider<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + trait_def_id: DefId) + -> bool { + tcx.object_safety_violations(trait_def_id).is_empty() +} diff --git a/src/librustc/traits/project.rs b/src/librustc/traits/project.rs index e01f97eb1f3..d7911870f39 100644 --- a/src/librustc/traits/project.rs +++ b/src/librustc/traits/project.rs @@ -900,96 +900,50 @@ fn assemble_candidates_from_impls<'cx, 'gcx, 'tcx>( // In either case, we handle this by not adding a // candidate for an impl if it contains a `default` // type. - let opt_node_item = assoc_ty_def(selcx, - impl_data.impl_def_id, - obligation.predicate.item_name); - let new_candidate = if let Some(node_item) = opt_node_item { - let is_default = if node_item.node.is_from_trait() { - // If true, the impl inherited a `type Foo = Bar` - // given in the trait, which is implicitly default. - // Otherwise, the impl did not specify `type` and - // neither did the trait: - // - // ```rust - // trait Foo { type T; } - // impl Foo for Bar { } - // ``` - // - // This is an error, but it will be - // reported in `check_impl_items_against_trait`. - // We accept it here but will flag it as - // an error when we confirm the candidate - // (which will ultimately lead to `normalize_to_error` - // being invoked). - node_item.item.defaultness.has_value() - } else { - node_item.item.defaultness.is_default() || - selcx.tcx().impl_is_default(node_item.node.def_id()) - }; - - // Only reveal a specializable default if we're past type-checking - // and the obligations is monomorphic, otherwise passes such as - // transmute checking and polymorphic MIR optimizations could - // get a result which isn't correct for all monomorphizations. - if !is_default { + let node_item = assoc_ty_def(selcx, + impl_data.impl_def_id, + obligation.predicate.item_name); + + let is_default = if node_item.node.is_from_trait() { + // If true, the impl inherited a `type Foo = Bar` + // given in the trait, which is implicitly default. + // Otherwise, the impl did not specify `type` and + // neither did the trait: + // + // ```rust + // trait Foo { type T; } + // impl Foo for Bar { } + // ``` + // + // This is an error, but it will be + // reported in `check_impl_items_against_trait`. + // We accept it here but will flag it as + // an error when we confirm the candidate + // (which will ultimately lead to `normalize_to_error` + // being invoked). + node_item.item.defaultness.has_value() + } else { + node_item.item.defaultness.is_default() || + selcx.tcx().impl_is_default(node_item.node.def_id()) + }; + + // Only reveal a specializable default if we're past type-checking + // and the obligations is monomorphic, otherwise passes such as + // transmute checking and polymorphic MIR optimizations could + // get a result which isn't correct for all monomorphizations. + let new_candidate = if !is_default { + Some(ProjectionTyCandidate::Select) + } else if selcx.projection_mode() == Reveal::All { + assert!(!poly_trait_ref.needs_infer()); + if !poly_trait_ref.needs_subst() { Some(ProjectionTyCandidate::Select) - } else if selcx.projection_mode() == Reveal::All { - assert!(!poly_trait_ref.needs_infer()); - if !poly_trait_ref.needs_subst() { - Some(ProjectionTyCandidate::Select) - } else { - None - } } else { None } } else { - // This is saying that neither the trait nor - // the impl contain a definition for this - // associated type. Normally this situation - // could only arise through a compiler bug -- - // if the user wrote a bad item name, it - // should have failed in astconv. **However**, - // at coherence-checking time, we only look at - // the topmost impl (we don't even consider - // the trait itself) for the definition -- and - // so in that case it may be that the trait - // *DOES* have a declaration, but we don't see - // it, and we end up in this branch. - // - // This is kind of tricky to handle actually. - // For now, we just unconditionally ICE, - // because otherwise, examples like the - // following will succeed: - // - // ``` - // trait Assoc { - // type Output; - // } - // - // impl Assoc for T { - // default type Output = bool; - // } - // - // impl Assoc for u8 {} - // impl Assoc for u16 {} - // - // trait Foo {} - // impl Foo for ::Output {} - // impl Foo for ::Output {} - // return None; - // } - // ``` - // - // The essential problem here is that the - // projection fails, leaving two unnormalized - // types, which appear not to unify -- so the - // overlap check succeeds, when it should - // fail. - span_bug!(obligation.cause.span, - "Tried to project an inherited associated type during \ - coherence checking, which is currently not supported."); + None }; + candidate_set.vec.extend(new_candidate); } super::VtableParam(..) => { @@ -1274,35 +1228,25 @@ fn confirm_impl_candidate<'cx, 'gcx, 'tcx>( let VtableImplData { substs, nested, impl_def_id } = impl_vtable; let tcx = selcx.tcx(); - let trait_ref = obligation.predicate.trait_ref; let assoc_ty = assoc_ty_def(selcx, impl_def_id, obligation.predicate.item_name); - match assoc_ty { - Some(node_item) => { - let ty = if !node_item.item.defaultness.has_value() { - // This means that the impl is missing a definition for the - // associated type. This error will be reported by the type - // checker method `check_impl_items_against_trait`, so here we - // just return TyError. - debug!("confirm_impl_candidate: no associated type {:?} for {:?}", - node_item.item.name, - obligation.predicate.trait_ref); - tcx.types.err - } else { - tcx.type_of(node_item.item.def_id) - }; - let substs = translate_substs(selcx.infcx(), impl_def_id, substs, node_item.node); - Progress { - ty: ty.subst(tcx, substs), - obligations: nested, - cacheable: true - } - } - None => { - span_bug!(obligation.cause.span, - "No associated type for {:?}", - trait_ref); - } + let ty = if !assoc_ty.item.defaultness.has_value() { + // This means that the impl is missing a definition for the + // associated type. This error will be reported by the type + // checker method `check_impl_items_against_trait`, so here we + // just return TyError. + debug!("confirm_impl_candidate: no associated type {:?} for {:?}", + assoc_ty.item.name, + obligation.predicate.trait_ref); + tcx.types.err + } else { + tcx.type_of(assoc_ty.item.def_id) + }; + let substs = translate_substs(selcx.infcx(), impl_def_id, substs, assoc_ty.node); + Progress { + ty: ty.subst(tcx, substs), + obligations: nested, + cacheable: true } } @@ -1315,27 +1259,43 @@ fn assoc_ty_def<'cx, 'gcx, 'tcx>( selcx: &SelectionContext<'cx, 'gcx, 'tcx>, impl_def_id: DefId, assoc_ty_name: ast::Name) - -> Option> + -> specialization_graph::NodeItem { - let trait_def_id = selcx.tcx().impl_trait_ref(impl_def_id).unwrap().def_id; - let trait_def = selcx.tcx().trait_def(trait_def_id); - - if !trait_def.is_complete(selcx.tcx()) { - let impl_node = specialization_graph::Node::Impl(impl_def_id); - for item in impl_node.items(selcx.tcx()) { - if item.kind == ty::AssociatedKind::Type && item.name == assoc_ty_name { - return Some(specialization_graph::NodeItem { - node: specialization_graph::Node::Impl(impl_def_id), - item: item, - }); - } + let tcx = selcx.tcx(); + let trait_def_id = tcx.impl_trait_ref(impl_def_id).unwrap().def_id; + let trait_def = tcx.trait_def(trait_def_id); + + // This function may be called while we are still building the + // specialization graph that is queried below (via TraidDef::ancestors()), + // so, in order to avoid unnecessary infinite recursion, we manually look + // for the associated item at the given impl. + // If there is no such item in that impl, this function will fail with a + // cycle error if the specialization graph is currently being built. + let impl_node = specialization_graph::Node::Impl(impl_def_id); + for item in impl_node.items(tcx) { + if item.kind == ty::AssociatedKind::Type && item.name == assoc_ty_name { + return specialization_graph::NodeItem { + node: specialization_graph::Node::Impl(impl_def_id), + item: item, + }; } - None + } + + if let Some(assoc_item) = trait_def + .ancestors(tcx, impl_def_id) + .defs(tcx, assoc_ty_name, ty::AssociatedKind::Type) + .next() { + assoc_item } else { - trait_def - .ancestors(impl_def_id) - .defs(selcx.tcx(), assoc_ty_name, ty::AssociatedKind::Type) - .next() + // This is saying that neither the trait nor + // the impl contain a definition for this + // associated type. Normally this situation + // could only arise through a compiler bug -- + // if the user wrote a bad item name, it + // should have failed in astconv. + bug!("No associated type `{}` for {}", + assoc_ty_name, + tcx.item_path_str(impl_def_id)) } } diff --git a/src/librustc/traits/specialize/mod.rs b/src/librustc/traits/specialize/mod.rs index d5d17e3c812..0e5779f9d17 100644 --- a/src/librustc/traits/specialize/mod.rs +++ b/src/librustc/traits/specialize/mod.rs @@ -27,6 +27,7 @@ use traits::{self, Reveal, ObligationCause}; use ty::{self, TyCtxt, TypeFoldable}; use syntax_pos::DUMMY_SP; +use std::rc::Rc; pub mod specialization_graph; @@ -118,7 +119,7 @@ pub fn find_associated_item<'a, 'tcx>( let trait_def_id = tcx.trait_id_of_impl(impl_data.impl_def_id).unwrap(); let trait_def = tcx.trait_def(trait_def_id); - let ancestors = trait_def.ancestors(impl_data.impl_def_id); + let ancestors = trait_def.ancestors(tcx, impl_data.impl_def_id); match ancestors.defs(tcx, item.name, item.kind).next() { Some(node_item) => { let substs = tcx.infer_ctxt((), Reveal::All).enter(|infcx| { @@ -179,12 +180,8 @@ pub fn specializes<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, } // create a parameter environment corresponding to a (skolemized) instantiation of impl1 - let penv = tcx.construct_parameter_environment(DUMMY_SP, - impl1_def_id, - None); - let impl1_trait_ref = tcx.impl_trait_ref(impl1_def_id) - .unwrap() - .subst(tcx, &penv.free_substs); + let penv = tcx.parameter_environment(impl1_def_id); + let impl1_trait_ref = tcx.impl_trait_ref(impl1_def_id).unwrap(); // Create a infcx, taking the predicates of impl1 as assumptions: let result = tcx.infer_ctxt(penv, Reveal::UserFacing).enter(|infcx| { @@ -289,3 +286,62 @@ pub fn insert(&mut self, a: DefId, b: DefId, result: bool) { self.map.insert((a, b), result); } } + +// Query provider for `specialization_graph_of`. +pub(super) fn specialization_graph_provider<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + trait_id: DefId) + -> Rc { + let mut sg = specialization_graph::Graph::new(); + + let mut trait_impls: Vec = tcx.trait_impls_of(trait_id).iter().collect(); + + // The coherence checking implementation seems to rely on impls being + // iterated over (roughly) in definition order, so we are sorting by + // negated CrateNum (so remote definitions are visited first) and then + // by a flattend version of the DefIndex. + trait_impls.sort_unstable_by_key(|def_id| { + (-(def_id.krate.as_u32() as i64), + def_id.index.address_space().index(), + def_id.index.as_array_index()) + }); + + for impl_def_id in trait_impls { + if impl_def_id.is_local() { + // This is where impl overlap checking happens: + let insert_result = sg.insert(tcx, impl_def_id); + // Report error if there was one. + if let Err(overlap) = insert_result { + let mut err = struct_span_err!(tcx.sess, + tcx.span_of_impl(impl_def_id).unwrap(), + E0119, + "conflicting implementations of trait `{}`{}:", + overlap.trait_desc, + overlap.self_desc.clone().map_or(String::new(), + |ty| { + format!(" for type `{}`", ty) + })); + + match tcx.span_of_impl(overlap.with_impl) { + Ok(span) => { + err.span_label(span, format!("first implementation here")); + err.span_label(tcx.span_of_impl(impl_def_id).unwrap(), + format!("conflicting implementation{}", + overlap.self_desc + .map_or(String::new(), + |ty| format!(" for `{}`", ty)))); + } + Err(cname) => { + err.note(&format!("conflicting implementation in crate `{}`", cname)); + } + } + + err.emit(); + } + } else { + let parent = tcx.impl_parent(impl_def_id).unwrap_or(trait_id); + sg.record_impl_from_cstore(tcx, parent, impl_def_id) + } + } + + Rc::new(sg) +} diff --git a/src/librustc/traits/specialize/specialization_graph.rs b/src/librustc/traits/specialize/specialization_graph.rs index 6e2c16c82ae..87c98a0ef0e 100644 --- a/src/librustc/traits/specialize/specialization_graph.rs +++ b/src/librustc/traits/specialize/specialization_graph.rs @@ -12,8 +12,9 @@ use hir::def_id::DefId; use traits::{self, Reveal}; -use ty::{self, TyCtxt, TraitDef, TypeFoldable}; +use ty::{self, TyCtxt, TypeFoldable}; use ty::fast_reject::{self, SimplifiedType}; +use std::rc::Rc; use syntax::ast::Name; use util::nodemap::{DefIdMap, FxHashMap}; @@ -301,18 +302,19 @@ pub fn def_id(&self) -> DefId { } } -pub struct Ancestors<'a> { - trait_def: &'a TraitDef, +pub struct Ancestors { + trait_def_id: DefId, + specialization_graph: Rc, current_source: Option, } -impl<'a> Iterator for Ancestors<'a> { +impl Iterator for Ancestors { type Item = Node; fn next(&mut self) -> Option { let cur = self.current_source.take(); if let Some(Node::Impl(cur_impl)) = cur { - let parent = self.trait_def.specialization_graph.borrow().parent(cur_impl); - if parent == self.trait_def.def_id { + let parent = self.specialization_graph.parent(cur_impl); + if parent == self.trait_def_id { self.current_source = Some(Node::Trait(parent)); } else { self.current_source = Some(Node::Impl(parent)); @@ -336,7 +338,7 @@ pub fn map U>(self, f: F) -> NodeItem { } } -impl<'a, 'gcx, 'tcx> Ancestors<'a> { +impl<'a, 'gcx, 'tcx> Ancestors { /// Search the items from the given ancestors, returning each definition /// with the given name and the given kind. #[inline] // FIXME(#35870) Avoid closures being unexported due to impl Trait. @@ -351,9 +353,14 @@ pub fn defs(self, tcx: TyCtxt<'a, 'gcx, 'tcx>, name: Name, kind: ty::AssociatedK /// Walk up the specialization ancestors of a given impl, starting with that /// impl itself. -pub fn ancestors<'a>(trait_def: &'a TraitDef, start_from_impl: DefId) -> Ancestors<'a> { +pub fn ancestors(tcx: TyCtxt, + trait_def_id: DefId, + start_from_impl: DefId) + -> Ancestors { + let specialization_graph = tcx.specialization_graph_of(trait_def_id); Ancestors { - trait_def: trait_def, + trait_def_id, + specialization_graph, current_source: Some(Node::Impl(start_from_impl)), } } diff --git a/src/librustc/traits/util.rs b/src/librustc/traits/util.rs index 1d10c3a9695..3f5cf7eca53 100644 --- a/src/librustc/traits/util.rs +++ b/src/librustc/traits/util.rs @@ -197,7 +197,7 @@ fn push(&mut self, predicate: &ty::Predicate<'tcx>) { // I want to be conservative. --nmatsakis let ty_max = data.skip_binder().0; let r_min = data.skip_binder().1; - if r_min.is_bound() { + if r_min.is_late_bound() { return; } @@ -206,7 +206,7 @@ fn push(&mut self, predicate: &ty::Predicate<'tcx>) { tcx.outlives_components(ty_max) .into_iter() .filter_map(|component| match component { - Component::Region(r) => if r.is_bound() { + Component::Region(r) => if r.is_late_bound() { None } else { Some(ty::Predicate::RegionOutlives( diff --git a/src/librustc/ty/context.rs b/src/librustc/ty/context.rs index 74aac7b788b..b9355c264b3 100644 --- a/src/librustc/ty/context.rs +++ b/src/librustc/ty/context.rs @@ -21,7 +21,6 @@ use hir::map::DisambiguatedDefPathData; use middle::free_region::FreeRegionMap; use middle::lang_items; -use middle::region::{CodeExtent, CodeExtentData}; use middle::resolve_lifetime; use middle::stability; use mir::Mir; @@ -99,7 +98,7 @@ pub struct CtxtInterners<'tcx> { type_: RefCell>>>, type_list: RefCell>>>>, substs: RefCell>>>, - region: RefCell>>>, + region: RefCell>>, existential_predicates: RefCell>>>>, predicates: RefCell>>>>, } @@ -548,8 +547,6 @@ pub struct GlobalCtxt<'tcx> { layout_interner: RefCell>, - code_extent_interner: RefCell>>, - /// A vector of every trait accessible in the whole crate /// (i.e. including those from subcrates). This is used only for /// error reporting, and so is lazily initialised and generally @@ -651,32 +648,6 @@ pub fn intern_stability(self, stab: attr::Stability) -> &'gcx attr::Stability { interned } - pub fn node_extent(self, n: ast::NodeId) -> CodeExtent<'gcx> { - self.intern_code_extent(CodeExtentData::Misc(n)) - } - - // Returns the code extent for an item - the destruction scope. - pub fn item_extent(self, n: ast::NodeId) -> CodeExtent<'gcx> { - self.intern_code_extent(CodeExtentData::DestructionScope(n)) - } - - pub fn call_site_extent(self, fn_id: ast::NodeId, body_id: ast::NodeId) -> CodeExtent<'gcx> { - assert!(fn_id != body_id); - self.intern_code_extent(CodeExtentData::CallSiteScope { fn_id: fn_id, body_id: body_id }) - } - - pub fn intern_code_extent(self, data: CodeExtentData) -> CodeExtent<'gcx> { - if let Some(st) = self.code_extent_interner.borrow().get(&data) { - return st; - } - - let interned = self.global_interners.arena.alloc(data); - if let Some(prev) = self.code_extent_interner.borrow_mut().replace(interned) { - bug!("Tried to overwrite interned code-extent: {:?}", prev) - } - interned - } - pub fn intern_layout(self, layout: Layout) -> &'gcx Layout { if let Some(layout) = self.layout_interner.borrow().get(&layout) { return layout; @@ -764,7 +735,6 @@ pub fn create_and_enter(s: &'tcx Session, data_layout: data_layout, layout_cache: RefCell::new(FxHashMap()), layout_interner: RefCell::new(FxHashSet()), - code_extent_interner: RefCell::new(FxHashSet()), layout_depth: Cell::new(0), derive_macros: RefCell::new(NodeMap()), stability_interner: RefCell::new(FxHashSet()), @@ -843,15 +813,6 @@ fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<&'tcx Sub } } -impl<'a, 'tcx> Lift<'tcx> for ty::FreeRegion<'a> { - type Lifted = ty::FreeRegion<'tcx>; - fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option { - let scope = self.scope.map(|code_extent| tcx.intern_code_extent(*code_extent)); - let bound_region = self.bound_region; - Some(ty::FreeRegion { scope, bound_region }) - } -} - impl<'a, 'tcx> Lift<'tcx> for Region<'a> { type Lifted = Region<'tcx>; fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option> { @@ -1115,8 +1076,8 @@ fn borrow<'a>(&'a self) -> &'a [Kind<'lcx>] { } } -impl<'tcx> Borrow> for Interned<'tcx, RegionKind<'tcx>> { - fn borrow<'a>(&'a self) -> &'a RegionKind<'tcx> { +impl<'tcx> Borrow for Interned<'tcx, RegionKind> { + fn borrow<'a>(&'a self) -> &'a RegionKind { &self.0 } } @@ -1215,7 +1176,7 @@ fn keep_local<'tcx, T: ty::TypeFoldable<'tcx>>(x: &T) -> bool { &ty::ReVar(_) | &ty::ReSkolemized(..) => true, _ => false } - }) -> RegionKind<'tcx> + }) -> RegionKind ); macro_rules! slice_interners { diff --git a/src/librustc/ty/fold.rs b/src/librustc/ty/fold.rs index 21ccf6f987b..6de3c018bda 100644 --- a/src/librustc/ty/fold.rs +++ b/src/librustc/ty/fold.rs @@ -39,7 +39,6 @@ //! These methods return true to indicate that the visitor has found what it is looking for //! and does not need to visit anything else. -use middle::region; use ty::subst::Substs; use ty::adjustment; use ty::{self, Binder, Ty, TyCtxt, TypeFlags}; @@ -326,23 +325,6 @@ pub fn replace_late_bound_regions(self, (result, replacer.map) } - - /// Replace any late-bound regions bound in `value` with free variants attached to scope-id - /// `scope_id`. - pub fn liberate_late_bound_regions(self, - all_outlive_scope: Option>, - value: &Binder) - -> T - where T : TypeFoldable<'tcx> - { - self.replace_late_bound_regions(value, |br| { - self.mk_region(ty::ReFree(ty::FreeRegion { - scope: all_outlive_scope, - bound_region: br - })) - }).0 - } - /// Flattens two binding levels into one. So `for<'a> for<'b> Foo` /// becomes `for<'a,'b> Foo`. pub fn flatten_late_bound_regions(self, bound2_value: &Binder>) @@ -554,7 +536,7 @@ fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> { // regions. See comment on `shift_regions_through_binders` method in // `subst.rs` for more details. -pub fn shift_region<'tcx>(region: ty::RegionKind<'tcx>, amount: u32) -> ty::RegionKind<'tcx> { +pub fn shift_region(region: ty::RegionKind, amount: u32) -> ty::RegionKind { match region { ty::ReLateBound(debruijn, br) => { ty::ReLateBound(debruijn.shifted(amount), br) diff --git a/src/librustc/ty/layout.rs b/src/librustc/ty/layout.rs index 480b8967a79..bd38a6c3fd3 100644 --- a/src/librustc/ty/layout.rs +++ b/src/librustc/ty/layout.rs @@ -1268,11 +1268,10 @@ pub fn compute_uncached(ty: Ty<'gcx>, let kind = if def.is_enum() || def.variants[0].fields.len() == 0{ StructKind::AlwaysSizedUnivariant } else { - let param_env = tcx.construct_parameter_environment(DUMMY_SP, - def.did, None); + let param_env = tcx.parameter_environment(def.did); let fields = &def.variants[0].fields; let last_field = &fields[fields.len()-1]; - let always_sized = last_field.ty(tcx, param_env.free_substs) + let always_sized = tcx.type_of(last_field.did) .is_sized(tcx, ¶m_env, DUMMY_SP); if !always_sized { StructKind::MaybeUnsizedUnivariant } else { StructKind::AlwaysSizedUnivariant } diff --git a/src/librustc/ty/maps.rs b/src/librustc/ty/maps.rs index 3b5dc2ae164..85462bd9b12 100644 --- a/src/librustc/ty/maps.rs +++ b/src/librustc/ty/maps.rs @@ -18,10 +18,12 @@ use mir; use mir::transform::{MirSuite, MirPassIndex}; use session::CompileResult; +use traits::specialization_graph; use ty::{self, CrateInherentImpls, Ty, TyCtxt}; use ty::item_path; use ty::steal::Steal; use ty::subst::Substs; +use ty::fast_reject::SimplifiedType; use util::nodemap::{DefIdSet, NodeSet}; use rustc_data_structures::indexed_vec::IndexVec; @@ -98,6 +100,15 @@ fn default_span(&self, tcx: TyCtxt) -> Span { } } +impl Key for (DefId, SimplifiedType) { + fn map_crate(&self) -> CrateNum { + self.0.krate + } + fn default_span(&self, tcx: TyCtxt) -> Span { + self.0.default_span(tcx) + } +} + impl<'tcx> Key for (DefId, &'tcx Substs<'tcx>) { fn map_crate(&self) -> CrateNum { self.0.krate @@ -391,6 +402,24 @@ fn describe(tcx: TyCtxt, def_id: DefId) -> String { } } +impl<'tcx> QueryDescription for queries::trait_impls_of<'tcx> { + fn describe(tcx: TyCtxt, def_id: DefId) -> String { + format!("trait impls of `{}`", tcx.item_path_str(def_id)) + } +} + +impl<'tcx> QueryDescription for queries::relevant_trait_impls_for<'tcx> { + fn describe(tcx: TyCtxt, (def_id, ty): (DefId, SimplifiedType)) -> String { + format!("relevant impls for: `({}, {:?})`", tcx.item_path_str(def_id), ty) + } +} + +impl<'tcx> QueryDescription for queries::is_object_safe<'tcx> { + fn describe(tcx: TyCtxt, def_id: DefId) -> String { + format!("determine object safety of trait `{}`", tcx.item_path_str(def_id)) + } +} + macro_rules! define_maps { (<$tcx:tt> $($(#[$attr:meta])* @@ -592,7 +621,7 @@ pub struct Maps<$tcx> { output: $output:tt) => { define_map_struct! { tcx: $tcx, - ready: ([pub] $attrs $name), + ready: ([] $attrs $name), input: ($($input)*), output: $output } @@ -801,7 +830,7 @@ fn default() -> Self { /// Per-function `RegionMaps`. The `DefId` should be the owner-def-id for the fn body; /// in the case of closures or "inline" expressions, this will be redirected to the enclosing /// fn item. - [] region_maps: RegionMaps(DefId) -> Rc>, + [] region_maps: RegionMaps(DefId) -> Rc, [] mir_shims: mir_shim_dep_node(ty::InstanceDef<'tcx>) -> &'tcx mir::Mir<'tcx>, @@ -820,6 +849,13 @@ fn default() -> Self { [] item_body_nested_bodies: ItemBodyNestedBodies(DefId) -> Rc>, [] const_is_rvalue_promotable_to_static: ConstIsRvaluePromotableToStatic(DefId) -> bool, [] is_mir_available: IsMirAvailable(DefId) -> bool, + + [] trait_impls_of: TraitImpls(DefId) -> ty::trait_def::TraitImpls, + // Note that TraitDef::for_each_relevant_impl() will do type simplication for you. + [] relevant_trait_impls_for: relevant_trait_impls_for((DefId, SimplifiedType)) + -> ty::trait_def::TraitImpls, + [] specialization_graph_of: SpecializationGraph(DefId) -> Rc, + [] is_object_safe: ObjectSafety(DefId) -> bool, } fn coherent_trait_dep_node((_, def_id): (CrateNum, DefId)) -> DepNode { @@ -859,3 +895,7 @@ fn mir_keys(_: CrateNum) -> DepNode { fn crate_variances(_: CrateNum) -> DepNode { DepNode::CrateVariances } + +fn relevant_trait_impls_for((def_id, _): (DefId, SimplifiedType)) -> DepNode { + DepNode::TraitImpls(def_id) +} diff --git a/src/librustc/ty/mod.rs b/src/librustc/ty/mod.rs index f5d510c11ae..a86d7351ef4 100644 --- a/src/librustc/ty/mod.rs +++ b/src/librustc/ty/mod.rs @@ -23,8 +23,8 @@ use middle::const_val::ConstVal; use middle::lang_items::{FnTraitLangItem, FnMutTraitLangItem, FnOnceTraitLangItem}; use middle::privacy::AccessLevels; -use middle::region::CodeExtent; use middle::resolve_lifetime::ObjectLifetimeDefault; +use middle::region::CodeExtent; use mir::Mir; use traits; use ty; @@ -80,7 +80,7 @@ pub use self::instance::{Instance, InstanceDef}; -pub use self::trait_def::{TraitDef, TraitFlags}; +pub use self::trait_def::TraitDef; pub use self::maps::queries; @@ -732,11 +732,18 @@ pub struct RegionParameterDef { impl RegionParameterDef { pub fn to_early_bound_region_data(&self) -> ty::EarlyBoundRegion { ty::EarlyBoundRegion { + def_id: self.def_id, index: self.index, name: self.name, } } + pub fn to_bound_region(&self) -> ty::BoundRegion { + self.to_early_bound_region_data().to_bound_region() + } +} + +impl ty::EarlyBoundRegion { pub fn to_bound_region(&self) -> ty::BoundRegion { ty::BoundRegion::BrNamed(self.def_id, self.name) } @@ -816,6 +823,21 @@ fn instantiate_into(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, instantiated.predicates.extend(self.predicates.iter().map(|p| p.subst(tcx, substs))) } + pub fn instantiate_identity(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) + -> InstantiatedPredicates<'tcx> { + let mut instantiated = InstantiatedPredicates::empty(); + self.instantiate_identity_into(tcx, &mut instantiated); + instantiated + } + + fn instantiate_identity_into(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, + instantiated: &mut InstantiatedPredicates<'tcx>) { + if let Some(def_id) = self.parent { + tcx.predicates_of(def_id).instantiate_identity_into(tcx, instantiated); + } + instantiated.predicates.extend(&self.predicates) + } + pub fn instantiate_supertrait(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, poly_trait_ref: &ty::PolyTraitRef<'tcx>) -> InstantiatedPredicates<'tcx> @@ -1241,31 +1263,11 @@ pub fn is_empty(&self) -> bool { /// more distinctions clearer. #[derive(Clone)] pub struct ParameterEnvironment<'tcx> { - /// See `construct_free_substs` for details. - pub free_substs: &'tcx Substs<'tcx>, - - /// Each type parameter has an implicit region bound that - /// indicates it must outlive at least the function body (the user - /// may specify stronger requirements). This field indicates the - /// region of the callee. If it is `None`, then the parameter - /// environment is for an item or something where the "callee" is - /// not clear. - pub implicit_region_bound: Option>, - /// Obligations that the caller must satisfy. This is basically /// the set of bounds on the in-scope type parameters, translated /// into Obligations, and elaborated and normalized. pub caller_bounds: &'tcx [ty::Predicate<'tcx>], - /// Scope that is attached to free regions for this scope. This is - /// usually the id of the fn body, but for more abstract scopes - /// like structs we use None or the item extent. - /// - /// FIXME(#3696). It would be nice to refactor so that free - /// regions don't have this implicit scope and instead introduce - /// relationships in the environment. - pub free_id_outlive: Option>, - /// A cache for `moves_by_default`. pub is_copy_cache: RefCell, bool>>, @@ -1282,120 +1284,12 @@ pub fn with_caller_bounds(&self, -> ParameterEnvironment<'tcx> { ParameterEnvironment { - free_substs: self.free_substs, - implicit_region_bound: self.implicit_region_bound, caller_bounds: caller_bounds, - free_id_outlive: self.free_id_outlive, is_copy_cache: RefCell::new(FxHashMap()), is_sized_cache: RefCell::new(FxHashMap()), is_freeze_cache: RefCell::new(FxHashMap()), } } - - /// Construct a parameter environment given an item, impl item, or trait item - pub fn for_item(tcx: TyCtxt<'a, 'tcx, 'tcx>, id: NodeId) - -> ParameterEnvironment<'tcx> { - match tcx.hir.find(id) { - Some(hir_map::NodeImplItem(ref impl_item)) => { - match impl_item.node { - hir::ImplItemKind::Type(_) => { - // associated types don't have their own entry (for some reason), - // so for now just grab environment for the impl - let impl_id = tcx.hir.get_parent(id); - let impl_def_id = tcx.hir.local_def_id(impl_id); - tcx.construct_parameter_environment(impl_item.span, - impl_def_id, - Some(tcx.item_extent(id))) - } - hir::ImplItemKind::Const(_, body) | - hir::ImplItemKind::Method(_, body) => { - tcx.construct_parameter_environment( - impl_item.span, - tcx.hir.local_def_id(id), - Some(tcx.call_site_extent(id, body.node_id))) - } - } - } - Some(hir_map::NodeTraitItem(trait_item)) => { - match trait_item.node { - hir::TraitItemKind::Type(..) | - hir::TraitItemKind::Const(_, None) | - hir::TraitItemKind::Method(_, hir::TraitMethod::Required(_))=> { - tcx.construct_parameter_environment(trait_item.span, - tcx.hir.local_def_id(id), - Some(tcx.item_extent(id))) - } - hir::TraitItemKind::Const(_, Some(body)) | - hir::TraitItemKind::Method(_, hir::TraitMethod::Provided(body)) => { - tcx.construct_parameter_environment( - trait_item.span, - tcx.hir.local_def_id(id), - Some(tcx.call_site_extent(id, body.node_id))) - } - } - } - Some(hir_map::NodeItem(item)) => { - match item.node { - hir::ItemConst(_, body) | - hir::ItemStatic(.., body) | - hir::ItemFn(.., body) => { - tcx.construct_parameter_environment( - item.span, - tcx.hir.local_def_id(id), - Some(tcx.call_site_extent(id, body.node_id))) - } - hir::ItemEnum(..) | - hir::ItemStruct(..) | - hir::ItemUnion(..) | - hir::ItemTy(..) | - hir::ItemImpl(..) | - hir::ItemTrait(..) => { - let def_id = tcx.hir.local_def_id(id); - tcx.construct_parameter_environment(item.span, - def_id, - Some(tcx.item_extent(id))) - } - _ => { - span_bug!(item.span, - "ParameterEnvironment::for_item(): - can't create a parameter \ - environment for this kind of item") - } - } - } - Some(hir_map::NodeExpr(expr)) => { - // This is a convenience to allow closures to work. - if let hir::ExprClosure(.., body, _) = expr.node { - let def_id = tcx.hir.local_def_id(id); - let base_def_id = tcx.closure_base_def_id(def_id); - tcx.construct_parameter_environment( - expr.span, - base_def_id, - Some(tcx.call_site_extent(id, body.node_id))) - } else { - tcx.empty_parameter_environment() - } - } - Some(hir_map::NodeForeignItem(item)) => { - let def_id = tcx.hir.local_def_id(id); - tcx.construct_parameter_environment(item.span, - def_id, - None) - } - Some(hir_map::NodeStructCtor(..)) | - Some(hir_map::NodeVariant(..)) => { - let def_id = tcx.hir.local_def_id(id); - tcx.construct_parameter_environment(tcx.hir.span(id), - def_id, - None) - } - it => { - bug!("ParameterEnvironment::from_item(): \ - `{}` = {:?} is unsupported", - tcx.hir.node_to_string(id), it) - } - } - } } #[derive(Copy, Clone, Debug)] @@ -2430,37 +2324,7 @@ pub fn has_attr(self, did: DefId, attr: &str) -> bool { } pub fn trait_has_default_impl(self, trait_def_id: DefId) -> bool { - let def = self.trait_def(trait_def_id); - def.flags.get().intersects(TraitFlags::HAS_DEFAULT_IMPL) - } - - /// Populates the type context with all the implementations for the given - /// trait if necessary. - pub fn populate_implementations_for_trait_if_necessary(self, trait_id: DefId) { - if trait_id.is_local() { - return - } - - // The type is not local, hence we are reading this out of - // metadata and don't need to track edges. - let _ignore = self.dep_graph.in_ignore(); - - let def = self.trait_def(trait_id); - if def.flags.get().intersects(TraitFlags::HAS_REMOTE_IMPLS) { - return; - } - - debug!("populate_implementations_for_trait_if_necessary: searching for {:?}", def); - - for impl_def_id in self.sess.cstore.implementations_of_trait(Some(trait_id)) { - let trait_ref = self.impl_trait_ref(impl_def_id).unwrap(); - - // Record the trait->implementation mapping. - let parent = self.impl_parent(impl_def_id).unwrap_or(trait_id); - def.record_remote_impl(self, impl_def_id, trait_ref, parent); - } - - def.flags.set(def.flags.get() | TraitFlags::HAS_REMOTE_IMPLS); + self.trait_def(trait_def_id).has_default_impl } /// Given the def_id of an impl, return the def_id of the trait it implements. @@ -2497,64 +2361,21 @@ pub fn impl_of_method(self, def_id: DefId) -> Option { /// are no free type/lifetime parameters in scope. pub fn empty_parameter_environment(self) -> ParameterEnvironment<'tcx> { ty::ParameterEnvironment { - free_substs: self.intern_substs(&[]), caller_bounds: Slice::empty(), - implicit_region_bound: None, - free_id_outlive: None, is_copy_cache: RefCell::new(FxHashMap()), is_sized_cache: RefCell::new(FxHashMap()), is_freeze_cache: RefCell::new(FxHashMap()), } } - /// Constructs and returns a substitution that can be applied to move from - /// the "outer" view of a type or method to the "inner" view. - /// In general, this means converting from bound parameters to - /// free parameters. Since we currently represent bound/free type - /// parameters in the same way, this only has an effect on regions. - pub fn construct_free_substs(self, - def_id: DefId, - free_id_outlive: Option>) - -> &'gcx Substs<'gcx> { - - let substs = Substs::for_item(self.global_tcx(), def_id, |def, _| { - // map bound 'a => free 'a - self.global_tcx().mk_region(ReFree(FreeRegion { - scope: free_id_outlive, - bound_region: def.to_bound_region() - })) - }, |def, _| { - // map T => T - self.global_tcx().mk_param_from_def(def) - }); - - debug!("construct_parameter_environment: {:?}", substs); - substs - } - /// See `ParameterEnvironment` struct def'n for details. - /// If you were using `free_id: NodeId`, you might try `self.region_maps().item_extent(free_id)` - /// for the `free_id_outlive` parameter. (But note that this is not always quite right.) - pub fn construct_parameter_environment(self, - span: Span, - def_id: DefId, - free_id_outlive: Option>) - -> ParameterEnvironment<'gcx> - { - // - // Construct the free substs. - // - - let free_substs = self.construct_free_substs(def_id, free_id_outlive); - + pub fn parameter_environment(self, def_id: DefId) -> ParameterEnvironment<'gcx> { // // Compute the bounds on Self and the type parameters. // let tcx = self.global_tcx(); - let generic_predicates = tcx.predicates_of(def_id); - let bounds = generic_predicates.instantiate(tcx, free_substs); - let bounds = tcx.liberate_late_bound_regions(free_id_outlive, &ty::Binder(bounds)); + let bounds = tcx.predicates_of(def_id).instantiate_identity(tcx); let predicates = bounds.predicates; // Finally, we have to normalize the bounds in the environment, in @@ -2571,23 +2392,21 @@ pub fn construct_parameter_environment(self, // let unnormalized_env = ty::ParameterEnvironment { - free_substs: free_substs, - implicit_region_bound: free_id_outlive.map(|f| tcx.mk_region(ty::ReScope(f))), caller_bounds: tcx.intern_predicates(&predicates), - free_id_outlive: free_id_outlive, is_copy_cache: RefCell::new(FxHashMap()), is_sized_cache: RefCell::new(FxHashMap()), is_freeze_cache: RefCell::new(FxHashMap()), }; - let body_id = free_id_outlive.map(|f| f.node_id()) - .unwrap_or(DUMMY_NODE_ID); - let cause = traits::ObligationCause::misc(span, body_id); + let body_id = self.hir.as_local_node_id(def_id).map_or(DUMMY_NODE_ID, |id| { + self.hir.maybe_body_owned_by(id).map_or(id, |body| body.node_id) + }); + let cause = traits::ObligationCause::misc(tcx.def_span(def_id), body_id); traits::normalize_param_env_or_error(tcx, def_id, unnormalized_env, cause) } pub fn node_scope_region(self, id: NodeId) -> Region<'tcx> { - self.mk_region(ty::ReScope(self.node_extent(id))) + self.mk_region(ty::ReScope(CodeExtent::Misc(id))) } /// Looks up the span of `impl_did` if the impl is local; otherwise returns `Err` @@ -2754,6 +2573,8 @@ pub fn provide(providers: &mut ty::maps::Providers) { adt_dtorck_constraint, def_span, trait_of_item, + trait_impls_of: trait_def::trait_impls_of_provider, + relevant_trait_impls_for: trait_def::relevant_trait_impls_provider, ..*providers }; } @@ -2762,6 +2583,8 @@ pub fn provide_extern(providers: &mut ty::maps::Providers) { *providers = ty::maps::Providers { adt_sized_constraint, adt_dtorck_constraint, + trait_impls_of: trait_def::trait_impls_of_provider, + relevant_trait_impls_for: trait_def::relevant_trait_impls_provider, ..*providers }; } diff --git a/src/librustc/ty/outlives.rs b/src/librustc/ty/outlives.rs index a544b2dd399..ab1b1b3857d 100644 --- a/src/librustc/ty/outlives.rs +++ b/src/librustc/ty/outlives.rs @@ -204,7 +204,7 @@ fn capture_components(&self, ty: Ty<'tcx>) -> Vec> { fn push_region_constraints<'tcx>(out: &mut Vec>, regions: Vec>) { for r in regions { - if !r.is_bound() { + if !r.is_late_bound() { out.push(Component::Region(r)); } } diff --git a/src/librustc/ty/sty.rs b/src/librustc/ty/sty.rs index 89960b0e4f6..cfbf1244db3 100644 --- a/src/librustc/ty/sty.rs +++ b/src/librustc/ty/sty.rs @@ -43,12 +43,8 @@ pub struct TypeAndMut<'tcx> { RustcEncodable, RustcDecodable, Copy)] /// A "free" region `fr` can be interpreted as "some region /// at least as big as the scope `fr.scope`". -/// -/// If `fr.scope` is None, then this is in some context (e.g., an -/// impl) where lifetimes are more abstract and the notion of the -/// caller/callee stack frames are not applicable. -pub struct FreeRegion<'tcx> { - pub scope: Option>, +pub struct FreeRegion { + pub scope: DefId, pub bound_region: BoundRegion, } @@ -688,7 +684,7 @@ pub struct DebruijnIndex { pub depth: u32, } -pub type Region<'tcx> = &'tcx RegionKind<'tcx>; +pub type Region<'tcx> = &'tcx RegionKind; /// Representation of regions. /// @@ -747,7 +743,7 @@ pub struct DebruijnIndex { /// [1] http://smallcultfollowing.com/babysteps/blog/2013/10/29/intermingled-parameter-lists/ /// [2] http://smallcultfollowing.com/babysteps/blog/2013/11/04/intermingled-parameter-lists/ #[derive(Clone, PartialEq, Eq, Hash, Copy, RustcEncodable, RustcDecodable)] -pub enum RegionKind<'tcx> { +pub enum RegionKind { // Region bound in a type or fn declaration which will be // substituted 'early' -- that is, at the same time when type // parameters are substituted. @@ -760,12 +756,12 @@ pub enum RegionKind<'tcx> { /// When checking a function body, the types of all arguments and so forth /// that refer to bound region parameters are modified to refer to free /// region parameters. - ReFree(FreeRegion<'tcx>), + ReFree(FreeRegion), /// A concrete region naming some statically determined extent /// (e.g. an expression or sequence of statements) within the /// current function. - ReScope(region::CodeExtent<'tcx>), + ReScope(region::CodeExtent), /// Static data that has an "infinite" lifetime. Top in the region lattice. ReStatic, @@ -794,6 +790,7 @@ impl<'tcx> serialize::UseSpecializedDecodable for Region<'tcx> {} #[derive(Copy, Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, Debug)] pub struct EarlyBoundRegion { + pub def_id: DefId, pub index: u32, pub name: Name, } @@ -909,10 +906,9 @@ pub fn shifted(&self, amount: u32) -> DebruijnIndex { } /// Region utilities -impl<'tcx> RegionKind<'tcx> { - pub fn is_bound(&self) -> bool { +impl RegionKind { + pub fn is_late_bound(&self) -> bool { match *self { - ty::ReEarlyBound(..) => true, ty::ReLateBound(..) => true, _ => false, } @@ -933,7 +929,7 @@ pub fn escapes_depth(&self, depth: u32) -> bool { } /// Returns the depth of `self` from the (1-based) binding level `depth` - pub fn from_depth(&self, depth: u32) -> RegionKind<'tcx> { + pub fn from_depth(&self, depth: u32) -> RegionKind { match *self { ty::ReLateBound(debruijn, r) => ty::ReLateBound(DebruijnIndex { depth: debruijn.depth - (depth - 1) diff --git a/src/librustc/ty/trait_def.rs b/src/librustc/ty/trait_def.rs index 097b596c5eb..865297c7ecb 100644 --- a/src/librustc/ty/trait_def.rs +++ b/src/librustc/ty/trait_def.rs @@ -8,18 +8,13 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use dep_graph::DepNode; -use hir::def_id::{DefId, LOCAL_CRATE}; -use traits::{self, specialization_graph}; -use ty; +use hir::def_id::DefId; +use traits::specialization_graph; use ty::fast_reject; -use ty::{Ty, TyCtxt, TraitRef}; -use std::cell::{Cell, RefCell}; +use ty::fold::TypeFoldable; +use ty::{Ty, TyCtxt}; +use std::rc::Rc; use hir; -use util::nodemap::FxHashMap; - -use syntax::ast; -use syntax_pos::DUMMY_SP; /// A trait's definition with type information. pub struct TraitDef { @@ -33,237 +28,93 @@ pub struct TraitDef { /// be usable with the sugar (or without it). pub paren_sugar: bool, - // Impls of a trait. To allow for quicker lookup, the impls are indexed by a - // simplified version of their `Self` type: impls with a simplifiable `Self` - // are stored in `nonblanket_impls` keyed by it, while all other impls are - // stored in `blanket_impls`. - // - // A similar division is used within `specialization_graph`, but the ones - // here are (1) stored as a flat list for the trait and (2) populated prior - // to -- and used while -- determining specialization order. - // - // FIXME: solve the reentrancy issues and remove these lists in favor of the - // ones in `specialization_graph`. - // - // These lists are tracked by `DepNode::TraitImpls`; we don't use - // a DepTrackingMap but instead have the `TraitDef` insert the - // required reads/writes. - - /// Impls of the trait. - nonblanket_impls: RefCell< - FxHashMap> - >, - - /// Blanket impls associated with the trait. - blanket_impls: RefCell>, - - /// The specialization order for impls of this trait. - pub specialization_graph: RefCell, - - /// Various flags - pub flags: Cell, - - /// The number of impls we've added from the local crate. - /// When this number matches up the list in the HIR map, - /// we're done, and the specialization graph is correct. - local_impl_count: Cell, + pub has_default_impl: bool, /// The ICH of this trait's DefPath, cached here so it doesn't have to be /// recomputed all the time. pub def_path_hash: u64, } -impl<'a, 'gcx, 'tcx> TraitDef { - pub fn new(def_id: DefId, - unsafety: hir::Unsafety, - paren_sugar: bool, - def_path_hash: u64) - -> TraitDef { - TraitDef { - def_id: def_id, - paren_sugar: paren_sugar, - unsafety: unsafety, - nonblanket_impls: RefCell::new(FxHashMap()), - blanket_impls: RefCell::new(vec![]), - flags: Cell::new(ty::TraitFlags::NO_TRAIT_FLAGS), - local_impl_count: Cell::new(0), - specialization_graph: RefCell::new(traits::specialization_graph::Graph::new()), - def_path_hash: def_path_hash, - } - } +// We don't store the list of impls in a flat list because each cached list of +// `relevant_impls_for` we would then duplicate all blanket impls. By keeping +// blanket and non-blanket impls separate, we can share the list of blanket +// impls. +#[derive(Clone)] +pub struct TraitImpls { + blanket_impls: Rc>, + non_blanket_impls: Rc>, +} - // returns None if not yet calculated - pub fn object_safety(&self) -> Option { - if self.flags.get().intersects(TraitFlags::OBJECT_SAFETY_VALID) { - Some(self.flags.get().intersects(TraitFlags::IS_OBJECT_SAFE)) - } else { - None +impl TraitImpls { + pub fn iter(&self) -> TraitImplsIter { + TraitImplsIter { + blanket_impls: self.blanket_impls.clone(), + non_blanket_impls: self.non_blanket_impls.clone(), + index: 0 } } +} - pub fn set_object_safety(&self, is_safe: bool) { - assert!(self.object_safety().map(|cs| cs == is_safe).unwrap_or(true)); - self.flags.set( - self.flags.get() | if is_safe { - TraitFlags::OBJECT_SAFETY_VALID | TraitFlags::IS_OBJECT_SAFE - } else { - TraitFlags::OBJECT_SAFETY_VALID - } - ); - } - - fn write_trait_impls(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) { - tcx.dep_graph.write(DepNode::TraitImpls(self.def_id)); - } - - fn read_trait_impls(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) { - tcx.dep_graph.read(DepNode::TraitImpls(self.def_id)); - } - - /// Records a basic trait-to-implementation mapping. - /// - /// Returns `true` iff the impl has not previously been recorded. - fn record_impl(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, - impl_def_id: DefId, - impl_trait_ref: TraitRef<'tcx>) - -> bool { - debug!("TraitDef::record_impl for {:?}, from {:?}", - self, impl_trait_ref); +#[derive(Clone)] +pub struct TraitImplsIter { + blanket_impls: Rc>, + non_blanket_impls: Rc>, + index: usize, +} - // Record the write into the impl set, but only for local - // impls: external impls are handled differently. - if impl_def_id.is_local() { - self.write_trait_impls(tcx); - } +impl Iterator for TraitImplsIter { + type Item = DefId; - // We don't want to borrow_mut after we already populated all impls, - // so check if an impl is present with an immutable borrow first. - if let Some(sty) = fast_reject::simplify_type(tcx, - impl_trait_ref.self_ty(), false) { - if let Some(is) = self.nonblanket_impls.borrow().get(&sty) { - if is.contains(&impl_def_id) { - return false; // duplicate - skip - } - } - - self.nonblanket_impls.borrow_mut().entry(sty).or_insert(vec![]).push(impl_def_id) + fn next(&mut self) -> Option { + if self.index < self.blanket_impls.len() { + let bi_index = self.index; + self.index += 1; + Some(self.blanket_impls[bi_index]) } else { - if self.blanket_impls.borrow().contains(&impl_def_id) { - return false; // duplicate - skip + let nbi_index = self.index - self.blanket_impls.len(); + if nbi_index < self.non_blanket_impls.len() { + self.index += 1; + Some(self.non_blanket_impls[nbi_index]) + } else { + None } - self.blanket_impls.borrow_mut().push(impl_def_id) } - - true - } - - /// Records a trait-to-implementation mapping for a crate-local impl. - pub fn record_local_impl(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, - impl_def_id: DefId, - impl_trait_ref: TraitRef<'tcx>) { - assert!(impl_def_id.is_local()); - let was_new = self.record_impl(tcx, impl_def_id, impl_trait_ref); - assert!(was_new); - - self.local_impl_count.set(self.local_impl_count.get() + 1); } - /// Records a trait-to-implementation mapping. - pub fn record_has_default_impl(&self) { - self.flags.set(self.flags.get() | TraitFlags::HAS_DEFAULT_IMPL); + fn size_hint(&self) -> (usize, Option) { + let items_left = (self.blanket_impls.len() + self.non_blanket_impls.len()) - self.index; + (items_left, Some(items_left)) } +} - /// Records a trait-to-implementation mapping for a non-local impl. - /// - /// The `parent_impl` is the immediately-less-specialized impl, or the - /// trait's def ID if the impl is not a specialization -- information that - /// should be pulled from the metadata. - pub fn record_remote_impl(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, - impl_def_id: DefId, - impl_trait_ref: TraitRef<'tcx>, - parent_impl: DefId) { - assert!(!impl_def_id.is_local()); +impl ExactSizeIterator for TraitImplsIter {} - // if the impl has not previously been recorded - if self.record_impl(tcx, impl_def_id, impl_trait_ref) { - // if the impl is non-local, it's placed directly into the - // specialization graph using parent information drawn from metadata. - self.specialization_graph.borrow_mut() - .record_impl_from_cstore(tcx, parent_impl, impl_def_id) +impl<'a, 'gcx, 'tcx> TraitDef { + pub fn new(def_id: DefId, + unsafety: hir::Unsafety, + paren_sugar: bool, + has_default_impl: bool, + def_path_hash: u64) + -> TraitDef { + TraitDef { + def_id, + paren_sugar, + unsafety, + has_default_impl, + def_path_hash, } } - /// Adds a local impl into the specialization graph, returning an error with - /// overlap information if the impl overlaps but does not specialize an - /// existing impl. - pub fn add_impl_for_specialization(&self, - tcx: TyCtxt<'a, 'gcx, 'tcx>, - impl_def_id: DefId) - -> Result<(), traits::OverlapError> { - assert!(impl_def_id.is_local()); - - self.specialization_graph.borrow_mut() - .insert(tcx, impl_def_id) - } - - pub fn ancestors(&'a self, of_impl: DefId) -> specialization_graph::Ancestors<'a> { - specialization_graph::ancestors(self, of_impl) - } - - /// Whether the impl set and specialization graphs are complete. - pub fn is_complete(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> bool { - tcx.populate_implementations_for_trait_if_necessary(self.def_id); - ty::queries::coherent_trait::try_get(tcx, DUMMY_SP, (LOCAL_CRATE, self.def_id)).is_ok() - } - - /// If any local impls haven't been added yet, returns - /// Some(list of local impls for this trait). - fn missing_local_impls(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) - -> Option<&'gcx [ast::NodeId]> { - if self.flags.get().intersects(TraitFlags::HAS_LOCAL_IMPLS) { - return None; - } - - if self.is_complete(tcx) { - self.flags.set(self.flags.get() | TraitFlags::HAS_LOCAL_IMPLS); - return None; - } - - let impls = tcx.hir.trait_impls(self.def_id); - assert!(self.local_impl_count.get() <= impls.len()); - if self.local_impl_count.get() == impls.len() { - self.flags.set(self.flags.get() | TraitFlags::HAS_LOCAL_IMPLS); - return None; - } - - Some(impls) + pub fn ancestors(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, + of_impl: DefId) + -> specialization_graph::Ancestors { + specialization_graph::ancestors(tcx, self.def_id, of_impl) } pub fn for_each_impl(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, mut f: F) { - self.read_trait_impls(tcx); - tcx.populate_implementations_for_trait_if_necessary(self.def_id); - - let local_impls = self.missing_local_impls(tcx); - if let Some(impls) = local_impls { - for &id in impls { - f(tcx.hir.local_def_id(id)); - } - } - let mut f = |def_id: DefId| { - if !(local_impls.is_some() && def_id.is_local()) { - f(def_id); - } - }; - - for &impl_def_id in self.blanket_impls.borrow().iter() { + for impl_def_id in tcx.trait_impls_of(self.def_id).iter() { f(impl_def_id); } - - for v in self.nonblanket_impls.borrow().values() { - for &impl_def_id in v { - f(impl_def_id); - } - } } /// Iterate over every impl that could possibly match the @@ -273,25 +124,6 @@ pub fn for_each_relevant_impl(&self, self_ty: Ty<'tcx>, mut f: F) { - self.read_trait_impls(tcx); - tcx.populate_implementations_for_trait_if_necessary(self.def_id); - - let local_impls = self.missing_local_impls(tcx); - if let Some(impls) = local_impls { - for &id in impls { - f(tcx.hir.local_def_id(id)); - } - } - let mut f = |def_id: DefId| { - if !(local_impls.is_some() && def_id.is_local()) { - f(def_id); - } - }; - - for &impl_def_id in self.blanket_impls.borrow().iter() { - f(impl_def_id); - } - // simplify_type(.., false) basically replaces type parameters and // projections with infer-variables. This is, of course, done on // the impl trait-ref when it is instantiated, but not on the @@ -304,29 +136,86 @@ pub fn for_each_relevant_impl(&self, // replace `S` with anything - this impl of course can't be // selected, and as there are hundreds of similar impls, // considering them would significantly harm performance. - if let Some(simp) = fast_reject::simplify_type(tcx, self_ty, true) { - if let Some(impls) = self.nonblanket_impls.borrow().get(&simp) { - for &impl_def_id in impls { - f(impl_def_id); - } - } + let relevant_impls = if let Some(simplified_self_ty) = + fast_reject::simplify_type(tcx, self_ty, true) { + tcx.relevant_trait_impls_for((self.def_id, simplified_self_ty)) } else { - for v in self.nonblanket_impls.borrow().values() { - for &impl_def_id in v { - f(impl_def_id); - } - } + tcx.trait_impls_of(self.def_id) + }; + + for impl_def_id in relevant_impls.iter() { + f(impl_def_id); } } } -bitflags! { - flags TraitFlags: u32 { - const NO_TRAIT_FLAGS = 0, - const HAS_DEFAULT_IMPL = 1 << 0, - const IS_OBJECT_SAFE = 1 << 1, - const OBJECT_SAFETY_VALID = 1 << 2, - const HAS_REMOTE_IMPLS = 1 << 3, - const HAS_LOCAL_IMPLS = 1 << 4, +// Query provider for `trait_impls_of`. +pub(super) fn trait_impls_of_provider<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + trait_id: DefId) + -> TraitImpls { + let remote_impls = if trait_id.is_local() { + // Traits defined in the current crate can't have impls in upstream + // crates, so we don't bother querying the cstore. + Vec::new() + } else { + tcx.sess.cstore.implementations_of_trait(Some(trait_id)) + }; + + let mut blanket_impls = Vec::new(); + let mut non_blanket_impls = Vec::new(); + + let local_impls = tcx.hir + .trait_impls(trait_id) + .into_iter() + .map(|&node_id| tcx.hir.local_def_id(node_id)); + + for impl_def_id in local_impls.chain(remote_impls.into_iter()) { + let impl_self_ty = tcx.type_of(impl_def_id); + if impl_def_id.is_local() && impl_self_ty.references_error() { + continue + } + + if fast_reject::simplify_type(tcx, impl_self_ty, false).is_some() { + non_blanket_impls.push(impl_def_id); + } else { + blanket_impls.push(impl_def_id); + } + } + + TraitImpls { + blanket_impls: Rc::new(blanket_impls), + non_blanket_impls: Rc::new(non_blanket_impls), + } +} + +// Query provider for `relevant_trait_impls_for`. +pub(super) fn relevant_trait_impls_provider<'a, 'tcx>( + tcx: TyCtxt<'a, 'tcx, 'tcx>, + (trait_id, self_ty): (DefId, fast_reject::SimplifiedType)) + -> TraitImpls +{ + let all_trait_impls = tcx.trait_impls_of(trait_id); + + let relevant: Vec = all_trait_impls + .non_blanket_impls + .iter() + .cloned() + .filter(|&impl_def_id| { + let impl_self_ty = tcx.type_of(impl_def_id); + let impl_simple_self_ty = fast_reject::simplify_type(tcx, + impl_self_ty, + false).unwrap(); + impl_simple_self_ty == self_ty + }) + .collect(); + + if all_trait_impls.non_blanket_impls.len() == relevant.len() { + // If we didn't filter anything out, re-use the existing vec. + all_trait_impls + } else { + TraitImpls { + blanket_impls: all_trait_impls.blanket_impls.clone(), + non_blanket_impls: Rc::new(relevant), + } } } diff --git a/src/librustc/ty/util.rs b/src/librustc/ty/util.rs index 06d09bd350a..c6c6a0e4700 100644 --- a/src/librustc/ty/util.rs +++ b/src/librustc/ty/util.rs @@ -688,9 +688,8 @@ fn visit_region(&mut self, r: ty::Region<'tcx>) -> bool { self.hash(db.depth); self.hash(i); } - ty::ReEarlyBound(ty::EarlyBoundRegion { index, name }) => { - self.hash(index); - self.hash(name.as_str()); + ty::ReEarlyBound(ty::EarlyBoundRegion { def_id, .. }) => { + self.def_id(def_id); } ty::ReLateBound(..) | ty::ReFree(..) | diff --git a/src/librustc/util/common.rs b/src/librustc/util/common.rs index 7b5e2253109..17564671a1e 100644 --- a/src/librustc/util/common.rs +++ b/src/librustc/util/common.rs @@ -116,6 +116,7 @@ pub fn record_time(accu: &Cell, f: F) -> T where } // Like std::macros::try!, but for Option<>. +#[cfg(unix)] macro_rules! option_try( ($e:expr) => (match $e { Some(e) => e, None => return None }) ); diff --git a/src/librustc/util/ppaux.rs b/src/librustc/util/ppaux.rs index d773bb2da08..8ca699339d3 100644 --- a/src/librustc/util/ppaux.rs +++ b/src/librustc/util/ppaux.rs @@ -458,7 +458,7 @@ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { } } -impl<'tcx> fmt::Debug for ty::RegionKind<'tcx> { +impl fmt::Debug for ty::RegionKind { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { ty::ReEarlyBound(ref data) => { @@ -506,17 +506,11 @@ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { impl<'tcx> fmt::Debug for ty::ParameterEnvironment<'tcx> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "ParameterEnvironment(\ - free_substs={:?}, \ - implicit_region_bound={:?}, \ - caller_bounds={:?})", - self.free_substs, - self.implicit_region_bound, - self.caller_bounds) + write!(f, "ParameterEnvironment({:?})", self.caller_bounds) } } -impl<'tcx> fmt::Display for ty::RegionKind<'tcx> { +impl<'tcx> fmt::Display for ty::RegionKind { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { if verbose() { return write!(f, "{:?}", *self); @@ -544,7 +538,7 @@ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { } } -impl<'tcx> fmt::Debug for ty::FreeRegion<'tcx> { +impl fmt::Debug for ty::FreeRegion { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "ReFree({:?}, {:?})", self.scope, self.bound_region) diff --git a/src/librustc_borrowck/borrowck/check_loans.rs b/src/librustc_borrowck/borrowck/check_loans.rs index adabbe11f5e..eeb5a3fb957 100644 --- a/src/librustc_borrowck/borrowck/check_loans.rs +++ b/src/librustc_borrowck/borrowck/check_loans.rs @@ -232,7 +232,7 @@ pub fn each_issued_loan(&self, node: ast::NodeId, mut op: F) -> bool where }) } - pub fn each_in_scope_loan(&self, scope: region::CodeExtent<'tcx>, mut op: F) -> bool where + pub fn each_in_scope_loan(&self, scope: region::CodeExtent, mut op: F) -> bool where F: FnMut(&Loan<'tcx>) -> bool, { //! Like `each_issued_loan()`, but only considers loans that are @@ -248,7 +248,7 @@ pub fn each_in_scope_loan(&self, scope: region::CodeExtent<'tcx>, mut op: F) } fn each_in_scope_loan_affecting_path(&self, - scope: region::CodeExtent<'tcx>, + scope: region::CodeExtent, loan_path: &LoanPath<'tcx>, mut op: F) -> bool where @@ -708,7 +708,7 @@ pub fn analyze_restrictions_on_use(&self, let mut ret = UseOk; self.each_in_scope_loan_affecting_path( - self.tcx().node_extent(expr_id), use_path, |loan| { + region::CodeExtent::Misc(expr_id), use_path, |loan| { if !compatible_borrow_kinds(loan.kind, borrow_kind) { ret = UseWhileBorrowed(loan.loan_path.clone(), loan.span); false @@ -822,7 +822,7 @@ fn check_assignment(&self, // Check that we don't invalidate any outstanding loans if let Some(loan_path) = opt_loan_path(&assignee_cmt) { - let scope = self.tcx().node_extent(assignment_id); + let scope = region::CodeExtent::Misc(assignment_id); self.each_in_scope_loan_affecting_path(scope, &loan_path, |loan| { self.report_illegal_mutation(assignment_span, &loan_path, loan); false diff --git a/src/librustc_borrowck/borrowck/gather_loans/lifetime.rs b/src/librustc_borrowck/borrowck/gather_loans/lifetime.rs index 12854d3c979..5fc5682a60b 100644 --- a/src/librustc_borrowck/borrowck/gather_loans/lifetime.rs +++ b/src/librustc_borrowck/borrowck/gather_loans/lifetime.rs @@ -24,7 +24,7 @@ type R = Result<(),()>; pub fn guarantee_lifetime<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>, - item_scope: region::CodeExtent<'tcx>, + item_scope: region::CodeExtent, span: Span, cause: euv::LoanCause, cmt: mc::cmt<'tcx>, @@ -52,7 +52,7 @@ struct GuaranteeLifetimeContext<'a, 'tcx: 'a> { bccx: &'a BorrowckCtxt<'a, 'tcx>, // the scope of the function body for the enclosing item - item_scope: region::CodeExtent<'tcx>, + item_scope: region::CodeExtent, span: Span, cause: euv::LoanCause, diff --git a/src/librustc_borrowck/borrowck/gather_loans/mod.rs b/src/librustc_borrowck/borrowck/gather_loans/mod.rs index 8c1bcdc1fe2..4cfee36359c 100644 --- a/src/librustc_borrowck/borrowck/gather_loans/mod.rs +++ b/src/librustc_borrowck/borrowck/gather_loans/mod.rs @@ -45,7 +45,7 @@ pub fn gather_loans_in_fn<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>, bccx: bccx, infcx: &infcx, all_loans: Vec::new(), - item_ub: bccx.tcx.node_extent(body.node_id), + item_ub: region::CodeExtent::Misc(body.node_id), move_data: MoveData::new(), move_error_collector: move_error::MoveErrorCollector::new(), }; @@ -66,7 +66,7 @@ struct GatherLoanCtxt<'a, 'tcx: 'a> { all_loans: Vec>, /// `item_ub` is used as an upper-bound on the lifetime whenever we /// ask for the scope of an expression categorized as an upvar. - item_ub: region::CodeExtent<'tcx>, + item_ub: region::CodeExtent, } impl<'a, 'tcx> euv::Delegate<'tcx> for GatherLoanCtxt<'a, 'tcx> { @@ -353,13 +353,18 @@ fn guarantee_valid(&mut self, let loan_scope = match *loan_region { ty::ReScope(scope) => scope, - ty::ReFree(ref fr) => fr.scope.unwrap_or(self.item_ub), + ty::ReEarlyBound(ref br) => { + self.bccx.region_maps.early_free_extent(self.tcx(), br) + } + + ty::ReFree(ref fr) => { + self.bccx.region_maps.free_extent(self.tcx(), fr) + } ty::ReStatic => self.item_ub, ty::ReEmpty | ty::ReLateBound(..) | - ty::ReEarlyBound(..) | ty::ReVar(..) | ty::ReSkolemized(..) | ty::ReErased => { @@ -371,7 +376,7 @@ fn guarantee_valid(&mut self, }; debug!("loan_scope = {:?}", loan_scope); - let borrow_scope = self.tcx().node_extent(borrow_id); + let borrow_scope = region::CodeExtent::Misc(borrow_id); let gen_scope = self.compute_gen_scope(borrow_scope, loan_scope); debug!("gen_scope = {:?}", gen_scope); @@ -450,9 +455,9 @@ pub fn mark_loan_path_as_mutated(&self, loan_path: &LoanPath) { } pub fn compute_gen_scope(&self, - borrow_scope: region::CodeExtent<'tcx>, - loan_scope: region::CodeExtent<'tcx>) - -> region::CodeExtent<'tcx> { + borrow_scope: region::CodeExtent, + loan_scope: region::CodeExtent) + -> region::CodeExtent { //! Determine when to introduce the loan. Typically the loan //! is introduced at the point of the borrow, but in some cases, //! notably method arguments, the loan may be introduced only @@ -465,8 +470,8 @@ pub fn compute_gen_scope(&self, } } - pub fn compute_kill_scope(&self, loan_scope: region::CodeExtent<'tcx>, lp: &LoanPath<'tcx>) - -> region::CodeExtent<'tcx> { + pub fn compute_kill_scope(&self, loan_scope: region::CodeExtent, lp: &LoanPath<'tcx>) + -> region::CodeExtent { //! Determine when the loan restrictions go out of scope. //! This is either when the lifetime expires or when the //! local variable which roots the loan-path goes out of scope, diff --git a/src/librustc_borrowck/borrowck/mir/elaborate_drops.rs b/src/librustc_borrowck/borrowck/mir/elaborate_drops.rs index 4b7d52c2517..520a90d940b 100644 --- a/src/librustc_borrowck/borrowck/mir/elaborate_drops.rs +++ b/src/librustc_borrowck/borrowck/mir/elaborate_drops.rs @@ -44,7 +44,7 @@ fn run_pass<'a, 'tcx>(&self, _ => return } let id = src.item_id(); - let param_env = ty::ParameterEnvironment::for_item(tcx, id); + let param_env = tcx.parameter_environment(tcx.hir.local_def_id(id)); let move_data = MoveData::gather_moves(mir, tcx, ¶m_env); let elaborate_patch = { let mir = &*mir; diff --git a/src/librustc_borrowck/borrowck/mir/mod.rs b/src/librustc_borrowck/borrowck/mir/mod.rs index 47f708bf583..fbaa60f8445 100644 --- a/src/librustc_borrowck/borrowck/mir/mod.rs +++ b/src/librustc_borrowck/borrowck/mir/mod.rs @@ -65,7 +65,7 @@ pub fn borrowck_mir(bcx: &mut BorrowckCtxt, // steals it, but it forces the `borrowck` query. let mir = &tcx.mir_validated(def_id).borrow(); - let param_env = ty::ParameterEnvironment::for_item(tcx, id); + let param_env = tcx.parameter_environment(def_id); let move_data = MoveData::gather_moves(mir, tcx, ¶m_env); let mdpe = MoveDataParamEnv { move_data: move_data, param_env: param_env }; let dead_unwinds = IdxSetBuf::new_empty(mir.basic_blocks().len()); diff --git a/src/librustc_borrowck/borrowck/mod.rs b/src/librustc_borrowck/borrowck/mod.rs index 7eb73a87532..99df1431265 100644 --- a/src/librustc_borrowck/borrowck/mod.rs +++ b/src/librustc_borrowck/borrowck/mod.rs @@ -208,7 +208,7 @@ pub struct BorrowckCtxt<'a, 'tcx: 'a> { // Some in `borrowck_fn` and cleared later tables: &'a ty::TypeckTables<'tcx>, - region_maps: Rc>, + region_maps: Rc, owner_def_id: DefId, } @@ -228,13 +228,13 @@ pub struct Loan<'tcx> { /// cases, notably method arguments, the loan may be introduced /// only later, once it comes into scope. See also /// `GatherLoanCtxt::compute_gen_scope`. - gen_scope: region::CodeExtent<'tcx>, + gen_scope: region::CodeExtent, /// kill_scope indicates when the loan goes out of scope. This is /// either when the lifetime expires or when the local variable /// which roots the loan-path goes out of scope, whichever happens /// faster. See also `GatherLoanCtxt::compute_kill_scope`. - kill_scope: region::CodeExtent<'tcx>, + kill_scope: region::CodeExtent, span: Span, cause: euv::LoanCause, } @@ -334,12 +334,12 @@ pub fn closure_to_block(closure_id: ast::NodeId, } impl<'a, 'tcx> LoanPath<'tcx> { - pub fn kill_scope(&self, bccx: &BorrowckCtxt<'a, 'tcx>) -> region::CodeExtent<'tcx> { + pub fn kill_scope(&self, bccx: &BorrowckCtxt<'a, 'tcx>) -> region::CodeExtent { match self.kind { LpVar(local_id) => bccx.region_maps.var_scope(local_id), LpUpvar(upvar_id) => { let block_id = closure_to_block(upvar_id.closure_expr_id, bccx.tcx); - bccx.tcx.node_extent(block_id) + region::CodeExtent::Misc(block_id) } LpDowncast(ref base, _) | LpExtend(ref base, ..) => base.kill_scope(bccx), @@ -513,6 +513,8 @@ pub fn report(&self, err: BckError<'tcx>) { match (&err.code, &err.cause) { (&err_out_of_scope(&ty::ReScope(_), &ty::ReStatic, _), &BorrowViolation(euv::ClosureCapture(span))) | + (&err_out_of_scope(&ty::ReScope(_), &ty::ReEarlyBound(..), _), + &BorrowViolation(euv::ClosureCapture(span))) | (&err_out_of_scope(&ty::ReScope(_), &ty::ReFree(..), _), &BorrowViolation(euv::ClosureCapture(span))) => { return self.report_out_of_scope_escaping_closure_capture(&err, span); diff --git a/src/librustc_const_eval/check_match.rs b/src/librustc_const_eval/check_match.rs index cd31290eb55..a18f91a9ee3 100644 --- a/src/librustc_const_eval/check_match.rs +++ b/src/librustc_const_eval/check_match.rs @@ -46,14 +46,13 @@ fn visit_fn(&mut self, fk: FnKind<'tcx>, fd: &'tcx hir::FnDecl, b: hir::BodyId, s: Span, id: ast::NodeId) { intravisit::walk_fn(self, fk, fd, b, s, id); - let region_context = self.tcx.hir.local_def_id(id); - let region_maps = self.tcx.region_maps(region_context); + let def_id = self.tcx.hir.local_def_id(id); MatchVisitor { tcx: self.tcx, tables: self.tcx.body_tables(b), - region_maps: ®ion_maps, - param_env: &ty::ParameterEnvironment::for_item(self.tcx, id) + region_maps: &self.tcx.region_maps(def_id), + param_env: &self.tcx.parameter_environment(def_id) }.visit_body(self.tcx.hir.body(b)); } } @@ -71,7 +70,7 @@ struct MatchVisitor<'a, 'tcx: 'a> { tcx: TyCtxt<'a, 'tcx, 'tcx>, tables: &'a ty::TypeckTables<'tcx>, param_env: &'a ty::ParameterEnvironment<'tcx>, - region_maps: &'a RegionMaps<'tcx>, + region_maps: &'a RegionMaps, } impl<'a, 'tcx> Visitor<'tcx> for MatchVisitor<'a, 'tcx> { diff --git a/src/librustc_driver/Cargo.toml b/src/librustc_driver/Cargo.toml index 5b5113caa8e..2e949f48c17 100644 --- a/src/librustc_driver/Cargo.toml +++ b/src/librustc_driver/Cargo.toml @@ -22,7 +22,6 @@ rustc_data_structures = { path = "../librustc_data_structures" } rustc_errors = { path = "../librustc_errors" } rustc_incremental = { path = "../librustc_incremental" } rustc_lint = { path = "../librustc_lint" } -rustc_llvm = { path = "../librustc_llvm" } rustc_metadata = { path = "../librustc_metadata" } rustc_mir = { path = "../librustc_mir" } rustc_passes = { path = "../librustc_passes" } diff --git a/src/librustc_driver/driver.rs b/src/librustc_driver/driver.rs index 8fddbe110b0..bca82ff9a46 100644 --- a/src/librustc_driver/driver.rs +++ b/src/librustc_driver/driver.rs @@ -22,6 +22,7 @@ use rustc::middle::privacy::AccessLevels; use rustc::mir::transform::{MIR_CONST, MIR_VALIDATED, MIR_OPTIMIZED, Passes}; use rustc::ty::{self, TyCtxt, Resolutions, GlobalArenas}; +use rustc::traits; use rustc::util::common::time; use rustc::util::nodemap::NodeSet; use rustc::util::fs::rename_or_copy_remove; @@ -699,6 +700,8 @@ pub fn phase_2_configure_and_expand(sess: &Session, let krate = ecx.monotonic_expander().expand_crate(krate); + ecx.check_unused_macros(); + let mut missing_fragment_specifiers: Vec<_> = ecx.parse_sess.missing_fragment_specifiers.borrow().iter().cloned().collect(); missing_fragment_specifiers.sort(); @@ -892,6 +895,7 @@ macro_rules! try_with_f { trans::provide(&mut local_providers); typeck::provide(&mut local_providers); ty::provide(&mut local_providers); + traits::provide(&mut local_providers); reachable::provide(&mut local_providers); rustc_const_eval::provide(&mut local_providers); middle::region::provide(&mut local_providers); @@ -900,6 +904,7 @@ macro_rules! try_with_f { cstore::provide(&mut extern_providers); trans::provide(&mut extern_providers); ty::provide_extern(&mut extern_providers); + traits::provide_extern(&mut extern_providers); // FIXME(eddyb) get rid of this once we replace const_eval with miri. rustc_const_eval::provide(&mut extern_providers); diff --git a/src/librustc_driver/lib.rs b/src/librustc_driver/lib.rs index 024fc546a15..34f636d0b9a 100644 --- a/src/librustc_driver/lib.rs +++ b/src/librustc_driver/lib.rs @@ -56,7 +56,6 @@ extern crate rustc_trans; extern crate rustc_typeck; extern crate serialize; -extern crate rustc_llvm as llvm; #[macro_use] extern crate log; extern crate syntax; @@ -70,7 +69,7 @@ use rustc_save_analysis as save; use rustc_save_analysis::DumpHandler; use rustc_trans::back::link; -use rustc_trans::back::write::{create_target_machine, RELOC_MODEL_ARGS, CODE_GEN_MODEL_ARGS}; +use rustc_trans::back::write::{RELOC_MODEL_ARGS, CODE_GEN_MODEL_ARGS}; use rustc::dep_graph::DepGraph; use rustc::session::{self, config, Session, build_session, CompileResult}; use rustc::session::config::{Input, PrintRequest, OutputType, ErrorOutputType}; @@ -182,7 +181,7 @@ macro_rules! do_or_return {($expr: expr, $sess: expr) => { let (sopts, cfg) = config::build_session_options_and_crate_config(&matches); if sopts.debugging_opts.debug_llvm { - unsafe { llvm::LLVMRustSetDebug(1); } + rustc_trans::enable_llvm_debug(); } let descriptions = diagnostics_registry(); @@ -204,13 +203,14 @@ macro_rules! do_or_return {($expr: expr, $sess: expr) => { }; let dep_graph = DepGraph::new(sopts.build_dep_graph()); - let cstore = Rc::new(CStore::new(&dep_graph)); + let cstore = Rc::new(CStore::new(&dep_graph, box rustc_trans::LlvmMetadataLoader)); let loader = file_loader.unwrap_or(box RealFileLoader); let codemap = Rc::new(CodeMap::with_file_loader(loader, sopts.file_path_mapping())); let mut sess = session::build_session_with_codemap( sopts, &dep_graph, input_file_path, descriptions, cstore.clone(), codemap, emitter_dest, ); + rustc_trans::init(&sess); rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess)); let mut cfg = config::build_configuration(&sess, cfg); @@ -409,12 +409,13 @@ fn no_input(&mut self, return None; } let dep_graph = DepGraph::new(sopts.build_dep_graph()); - let cstore = Rc::new(CStore::new(&dep_graph)); + let cstore = Rc::new(CStore::new(&dep_graph, box rustc_trans::LlvmMetadataLoader)); let mut sess = build_session(sopts.clone(), &dep_graph, None, descriptions.clone(), cstore.clone()); + rustc_trans::init(&sess); rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess)); let mut cfg = config::build_configuration(&sess, cfg.clone()); target_features::add_configuration(&mut cfg, &sess); @@ -558,7 +559,11 @@ pub fn list_metadata(sess: &Session, matches: &getopts::Matches, input: &Input) &Input::File(ref ifile) => { let path = &(*ifile); let mut v = Vec::new(); - locator::list_file_metadata(&sess.target.target, path, &mut v).unwrap(); + locator::list_file_metadata(&sess.target.target, + path, + sess.cstore.metadata_loader(), + &mut v) + .unwrap(); println!("{}", String::from_utf8(v).unwrap()); } &Input::Str { .. } => { @@ -665,14 +670,6 @@ fn print_crate_info(sess: &Session, println!("{}", cfg); } } - PrintRequest::TargetCPUs => { - let tm = create_target_machine(sess); - unsafe { llvm::LLVMRustPrintTargetCPUs(tm); } - } - PrintRequest::TargetFeatures => { - let tm = create_target_machine(sess); - unsafe { llvm::LLVMRustPrintTargetFeatures(tm); } - } PrintRequest::RelocationModels => { println!("Available relocation models:"); for &(name, _) in RELOC_MODEL_ARGS.iter() { @@ -687,6 +684,9 @@ fn print_crate_info(sess: &Session, } println!(""); } + PrintRequest::TargetCPUs | PrintRequest::TargetFeatures => { + rustc_trans::print(*req, sess); + } } } return Compilation::Stop; @@ -724,10 +724,7 @@ fn unw(x: Option<&str>) -> &str { println!("commit-date: {}", unw(commit_date_str())); println!("host: {}", config::host_triple()); println!("release: {}", unw(release_str())); - unsafe { - println!("LLVM version: {}.{}", - llvm::LLVMRustVersionMajor(), llvm::LLVMRustVersionMinor()); - } + rustc_trans::print_version(); } } @@ -1020,9 +1017,7 @@ pub fn handle_options(args: &[String]) -> Option { } if cg_flags.contains(&"passes=list".to_string()) { - unsafe { - ::llvm::LLVMRustPrintPasses(); - } + rustc_trans::print_passes(); return None; } diff --git a/src/librustc_driver/target_features.rs b/src/librustc_driver/target_features.rs index 61bc7c6eb4c..bee61bb3980 100644 --- a/src/librustc_driver/target_features.rs +++ b/src/librustc_driver/target_features.rs @@ -9,24 +9,9 @@ // except according to those terms. use syntax::ast; -use llvm::LLVMRustHasFeature; use rustc::session::Session; -use rustc_trans::back::write::create_target_machine; use syntax::symbol::Symbol; -use libc::c_char; - -// WARNING: the features must be known to LLVM or the feature -// detection code will walk past the end of the feature array, -// leading to crashes. - -const ARM_WHITELIST: &'static [&'static str] = &["neon\0", "vfp2\0", "vfp3\0", "vfp4\0"]; - -const X86_WHITELIST: &'static [&'static str] = &["avx\0", "avx2\0", "bmi\0", "bmi2\0", "sse\0", - "sse2\0", "sse3\0", "sse4.1\0", "sse4.2\0", - "ssse3\0", "tbm\0", "lzcnt\0", "popcnt\0", - "sse4a\0", "rdrnd\0", "rdseed\0", "fma\0"]; - -const HEXAGON_WHITELIST: &'static [&'static str] = &["hvx\0", "hvx-double\0"]; +use rustc_trans; /// Add `target_feature = "..."` cfgs for a variety of platform /// specific features (SSE, NEON etc.). @@ -34,21 +19,10 @@ /// This is performed by checking whether a whitelisted set of /// features is available on the target machine, by querying LLVM. pub fn add_configuration(cfg: &mut ast::CrateConfig, sess: &Session) { - let target_machine = create_target_machine(sess); - - let whitelist = match &*sess.target.target.arch { - "arm" => ARM_WHITELIST, - "x86" | "x86_64" => X86_WHITELIST, - "hexagon" => HEXAGON_WHITELIST, - _ => &[], - }; - let tf = Symbol::intern("target_feature"); - for feat in whitelist { - assert_eq!(feat.chars().last(), Some('\0')); - if unsafe { LLVMRustHasFeature(target_machine, feat.as_ptr() as *const c_char) } { - cfg.insert((tf, Some(Symbol::intern(&feat[..feat.len() - 1])))); - } + + for feat in rustc_trans::target_features(sess) { + cfg.insert((tf, Some(feat))); } let requested_features = sess.opts.cg.target_feature.split(','); diff --git a/src/librustc_driver/test.rs b/src/librustc_driver/test.rs index 3b4f2560fc5..1d236a96bf6 100644 --- a/src/librustc_driver/test.rs +++ b/src/librustc_driver/test.rs @@ -14,10 +14,10 @@ use rustc::dep_graph::DepGraph; use rustc_lint; use rustc_resolve::MakeGlobMap; +use rustc_trans; use rustc::middle::lang_items; use rustc::middle::free_region::FreeRegionMap; use rustc::middle::region::{CodeExtent, RegionMaps}; -use rustc::middle::region::CodeExtentData; use rustc::middle::resolve_lifetime; use rustc::middle::stability; use rustc::ty::subst::{Kind, Subst}; @@ -45,7 +45,7 @@ struct Env<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> { infcx: &'a infer::InferCtxt<'a, 'gcx, 'tcx>, - region_maps: &'a mut RegionMaps<'tcx>, + region_maps: &'a mut RegionMaps, } struct RH<'a> { @@ -105,13 +105,14 @@ fn test_env(source_string: &str, let dep_graph = DepGraph::new(false); let _ignore = dep_graph.in_ignore(); - let cstore = Rc::new(CStore::new(&dep_graph)); + let cstore = Rc::new(CStore::new(&dep_graph, box rustc_trans::LlvmMetadataLoader)); let sess = session::build_session_(options, &dep_graph, None, diagnostic_handler, Rc::new(CodeMap::new(FilePathMapping::empty())), cstore.clone()); + rustc_trans::init(&sess); rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess)); let input = config::Input::Str { name: driver::anon_src(), @@ -168,8 +169,8 @@ pub fn tcx(&self) -> TyCtxt<'a, 'gcx, 'tcx> { self.infcx.tcx } - pub fn create_region_hierarchy(&mut self, rh: &RH, parent: CodeExtent<'tcx>) { - let me = self.tcx().intern_code_extent(CodeExtentData::Misc(rh.id)); + pub fn create_region_hierarchy(&mut self, rh: &RH, parent: CodeExtent) { + let me = CodeExtent::Misc(rh.id); self.region_maps.record_code_extent(me, Some(parent)); for child_rh in rh.sub { self.create_region_hierarchy(child_rh, me); @@ -181,7 +182,7 @@ pub fn create_simple_region_hierarchy(&mut self) { // children of 1, etc let node = ast::NodeId::from_u32; - let dscope = self.tcx().intern_code_extent(CodeExtentData::DestructionScope(node(1))); + let dscope = CodeExtent::DestructionScope(node(1)); self.region_maps.record_code_extent(dscope, None); self.create_region_hierarchy(&RH { id: node(1), @@ -296,8 +297,9 @@ pub fn t_param(&self, index: u32) -> Ty<'tcx> { pub fn re_early_bound(&self, index: u32, name: &'static str) -> ty::Region<'tcx> { let name = Symbol::intern(name); self.infcx.tcx.mk_region(ty::ReEarlyBound(ty::EarlyBoundRegion { - index: index, - name: name, + def_id: self.infcx.tcx.hir.local_def_id(ast::CRATE_NODE_ID), + index, + name, })) } @@ -326,19 +328,19 @@ pub fn t_rptr_late_bound_with_debruijn(&self, } pub fn t_rptr_scope(&self, id: u32) -> Ty<'tcx> { - let r = ty::ReScope(self.tcx().node_extent(ast::NodeId::from_u32(id))); + let r = ty::ReScope(CodeExtent::Misc(ast::NodeId::from_u32(id))); self.infcx.tcx.mk_imm_ref(self.infcx.tcx.mk_region(r), self.tcx().types.isize) } - pub fn re_free(&self, nid: ast::NodeId, id: u32) -> ty::Region<'tcx> { + pub fn re_free(&self, id: u32) -> ty::Region<'tcx> { self.infcx.tcx.mk_region(ty::ReFree(ty::FreeRegion { - scope: Some(self.tcx().node_extent(nid)), + scope: self.infcx.tcx.hir.local_def_id(ast::CRATE_NODE_ID), bound_region: ty::BrAnon(id), })) } - pub fn t_rptr_free(&self, nid: u32, id: u32) -> Ty<'tcx> { - let r = self.re_free(ast::NodeId::from_u32(nid), id); + pub fn t_rptr_free(&self, id: u32) -> Ty<'tcx> { + let r = self.re_free(id); self.infcx.tcx.mk_imm_ref(r, self.tcx().types.isize) } @@ -464,7 +466,7 @@ fn sub_free_bound_false() { test_env(EMPTY_SOURCE_STR, errors(&[]), |mut env| { env.create_simple_region_hierarchy(); - let t_rptr_free1 = env.t_rptr_free(1, 1); + let t_rptr_free1 = env.t_rptr_free(1); let t_rptr_bound1 = env.t_rptr_late_bound(1); env.check_not_sub(env.t_fn(&[t_rptr_free1], env.tcx().types.isize), env.t_fn(&[t_rptr_bound1], env.tcx().types.isize)); @@ -482,7 +484,7 @@ fn sub_bound_free_true() { test_env(EMPTY_SOURCE_STR, errors(&[]), |mut env| { env.create_simple_region_hierarchy(); let t_rptr_bound1 = env.t_rptr_late_bound(1); - let t_rptr_free1 = env.t_rptr_free(1, 1); + let t_rptr_free1 = env.t_rptr_free(1); env.check_sub(env.t_fn(&[t_rptr_bound1], env.tcx().types.isize), env.t_fn(&[t_rptr_free1], env.tcx().types.isize)); }) @@ -518,7 +520,7 @@ fn lub_free_bound_infer() { env.create_simple_region_hierarchy(); let t_infer1 = env.infcx.next_ty_var(TypeVariableOrigin::MiscVariable(DUMMY_SP)); let t_rptr_bound1 = env.t_rptr_late_bound(1); - let t_rptr_free1 = env.t_rptr_free(1, 1); + let t_rptr_free1 = env.t_rptr_free(1); env.check_lub(env.t_fn(&[t_infer1], env.tcx().types.isize), env.t_fn(&[t_rptr_bound1], env.tcx().types.isize), env.t_fn(&[t_rptr_free1], env.tcx().types.isize)); @@ -541,7 +543,7 @@ fn lub_bound_free() { test_env(EMPTY_SOURCE_STR, errors(&[]), |mut env| { env.create_simple_region_hierarchy(); let t_rptr_bound1 = env.t_rptr_late_bound(1); - let t_rptr_free1 = env.t_rptr_free(1, 1); + let t_rptr_free1 = env.t_rptr_free(1); env.check_lub(env.t_fn(&[t_rptr_bound1], env.tcx().types.isize), env.t_fn(&[t_rptr_free1], env.tcx().types.isize), env.t_fn(&[t_rptr_free1], env.tcx().types.isize)); @@ -574,8 +576,8 @@ fn lub_bound_bound_inverse_order() { fn lub_free_free() { test_env(EMPTY_SOURCE_STR, errors(&[]), |mut env| { env.create_simple_region_hierarchy(); - let t_rptr_free1 = env.t_rptr_free(1, 1); - let t_rptr_free2 = env.t_rptr_free(1, 2); + let t_rptr_free1 = env.t_rptr_free(1); + let t_rptr_free2 = env.t_rptr_free(2); let t_rptr_static = env.t_rptr_static(); env.check_lub(env.t_fn(&[t_rptr_free1], env.tcx().types.isize), env.t_fn(&[t_rptr_free2], env.tcx().types.isize), @@ -600,8 +602,8 @@ fn lub_returning_scope() { fn glb_free_free_with_common_scope() { test_env(EMPTY_SOURCE_STR, errors(&[]), |mut env| { env.create_simple_region_hierarchy(); - let t_rptr_free1 = env.t_rptr_free(1, 1); - let t_rptr_free2 = env.t_rptr_free(1, 2); + let t_rptr_free1 = env.t_rptr_free(1); + let t_rptr_free2 = env.t_rptr_free(2); let t_rptr_scope = env.t_rptr_scope(1); env.check_glb(env.t_fn(&[t_rptr_free1], env.tcx().types.isize), env.t_fn(&[t_rptr_free2], env.tcx().types.isize), @@ -625,7 +627,7 @@ fn glb_bound_free() { test_env(EMPTY_SOURCE_STR, errors(&[]), |mut env| { env.create_simple_region_hierarchy(); let t_rptr_bound1 = env.t_rptr_late_bound(1); - let t_rptr_free1 = env.t_rptr_free(1, 1); + let t_rptr_free1 = env.t_rptr_free(1); env.check_glb(env.t_fn(&[t_rptr_bound1], env.tcx().types.isize), env.t_fn(&[t_rptr_free1], env.tcx().types.isize), env.t_fn(&[t_rptr_bound1], env.tcx().types.isize)); @@ -751,7 +753,7 @@ fn escaping() { assert!(!env.t_nil().has_escaping_regions()); - let t_rptr_free1 = env.t_rptr_free(1, 1); + let t_rptr_free1 = env.t_rptr_free(1); assert!(!t_rptr_free1.has_escaping_regions()); let t_rptr_bound1 = env.t_rptr_late_bound_with_debruijn(1, ty::DebruijnIndex::new(1)); diff --git a/src/librustc_incremental/calculate_svh/mod.rs b/src/librustc_incremental/calculate_svh/mod.rs index 6f5cc1f3f45..8cdabc1d894 100644 --- a/src/librustc_incremental/calculate_svh/mod.rs +++ b/src/librustc_incremental/calculate_svh/mod.rs @@ -36,9 +36,10 @@ use rustc::hir::itemlikevisit::ItemLikeVisitor; use rustc::ich::{Fingerprint, StableHashingContext}; use rustc::ty::TyCtxt; +use rustc::util::common::record_time; use rustc_data_structures::stable_hasher::{StableHasher, HashStable}; use rustc_data_structures::fx::FxHashMap; -use rustc::util::common::record_time; +use rustc_data_structures::accumulate_vec::AccumulateVec; pub type IchHasher = StableHasher; @@ -159,6 +160,11 @@ fn compute_crate_hash(&mut self) { // difference, filter them out. return None } + DepNode::AllLocalTraitImpls => { + // These are already covered by hashing + // the HIR. + return None + } ref other => { bug!("Found unexpected DepNode during \ SVH computation: {:?}", @@ -213,6 +219,49 @@ fn hash_crate_root_module(&mut self, krate: &'tcx hir::Crate) { true, (module, (span, attrs))); } + + fn compute_and_store_ich_for_trait_impls(&mut self, krate: &'tcx hir::Crate) + { + let tcx = self.hcx.tcx(); + + let mut impls: Vec<(u64, Fingerprint)> = krate + .trait_impls + .iter() + .map(|(&trait_id, impls)| { + let trait_id = tcx.def_path_hash(trait_id); + let mut impls: AccumulateVec<[_; 32]> = impls + .iter() + .map(|&node_id| { + let def_id = tcx.hir.local_def_id(node_id); + tcx.def_path_hash(def_id) + }) + .collect(); + + impls.sort_unstable(); + let mut hasher = StableHasher::new(); + impls.hash_stable(&mut self.hcx, &mut hasher); + (trait_id, hasher.finish()) + }) + .collect(); + + impls.sort_unstable(); + + let mut default_impls: AccumulateVec<[_; 32]> = krate + .trait_default_impl + .iter() + .map(|(&trait_def_id, &impl_node_id)| { + let impl_def_id = tcx.hir.local_def_id(impl_node_id); + (tcx.def_path_hash(trait_def_id), tcx.def_path_hash(impl_def_id)) + }) + .collect(); + + default_impls.sort_unstable(); + + let mut hasher = StableHasher::new(); + impls.hash_stable(&mut self.hcx, &mut hasher); + + self.hashes.insert(DepNode::AllLocalTraitImpls, hasher.finish()); + } } impl<'a, 'tcx: 'a> ItemLikeVisitor<'tcx> for ComputeItemHashesVisitor<'a, 'tcx> { @@ -235,6 +284,8 @@ fn visit_impl_item(&mut self, item: &'tcx hir::ImplItem) { } } + + pub fn compute_incremental_hashes_map<'a, 'tcx: 'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> IncrementalHashesMap { let _ignore = tcx.dep_graph.in_ignore(); @@ -272,6 +323,8 @@ pub fn compute_incremental_hashes_map<'a, 'tcx: 'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>) let fingerprint = hasher.finish(); visitor.hashes.insert(dep_node, fingerprint); } + + visitor.compute_and_store_ich_for_trait_impls(krate); }); tcx.sess.perf_stats.incr_comp_hashes_count.set(visitor.hashes.len() as u64); diff --git a/src/librustc_incremental/persist/preds/compress/test_macro.rs b/src/librustc_incremental/persist/preds/compress/test_macro.rs index 31b30d2b285..044b143e306 100644 --- a/src/librustc_incremental/persist/preds/compress/test_macro.rs +++ b/src/librustc_incremental/persist/preds/compress/test_macro.rs @@ -37,14 +37,3 @@ macro_rules! graph { } } } - -macro_rules! set { - ($( $value:expr ),*) => { - { - use $crate::rustc_data_structures::fx::FxHashSet; - let mut set = FxHashSet(); - $(set.insert($value);)* - set - } - } -} diff --git a/src/librustc_lint/builtin.rs b/src/librustc_lint/builtin.rs index 3b6516af35a..6423d65a4c2 100644 --- a/src/librustc_lint/builtin.rs +++ b/src/librustc_lint/builtin.rs @@ -990,12 +990,7 @@ fn method_call_refers_to_method<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, traits::Obligation::new(traits::ObligationCause::misc(span, expr_id), trait_ref.to_poly_trait_predicate()); - // unwrap() is ok here b/c `method` is the method - // defined in this crate whose body we are - // checking, so it's always local - let node_id = tcx.hir.as_local_node_id(method.def_id).unwrap(); - - let param_env = ty::ParameterEnvironment::for_item(tcx, node_id); + let param_env = tcx.parameter_environment(method.def_id); tcx.infer_ctxt(param_env, Reveal::UserFacing).enter(|infcx| { let mut selcx = traits::SelectionContext::new(&infcx); match selcx.select(&obligation) { @@ -1263,7 +1258,7 @@ fn get_lints(&self) -> LintArray { impl<'a, 'tcx> LateLintPass<'a, 'tcx> for UnionsWithDropFields { fn check_item(&mut self, ctx: &LateContext, item: &hir::Item) { if let hir::ItemUnion(ref vdata, _) = item.node { - let param_env = &ty::ParameterEnvironment::for_item(ctx.tcx, item.id); + let param_env = &ctx.tcx.parameter_environment(ctx.tcx.hir.local_def_id(item.id)); for field in vdata.fields() { let field_ty = ctx.tcx.type_of(ctx.tcx.hir.local_def_id(field.id)); if field_ty.needs_drop(ctx.tcx, param_env) { diff --git a/src/librustc_lint/lib.rs b/src/librustc_lint/lib.rs index 2d0b5a6a51c..479c7206cb4 100644 --- a/src/librustc_lint/lib.rs +++ b/src/librustc_lint/lib.rs @@ -171,7 +171,8 @@ macro_rules! add_lint_group { UNUSED_MUST_USE, UNUSED_UNSAFE, PATH_STATEMENTS, - UNUSED_ATTRIBUTES); + UNUSED_ATTRIBUTES, + UNUSED_MACROS); // Guidelines for creating a future incompatibility lint: // diff --git a/src/librustc_metadata/Cargo.toml b/src/librustc_metadata/Cargo.toml index e8b90609273..f47788ee036 100644 --- a/src/librustc_metadata/Cargo.toml +++ b/src/librustc_metadata/Cargo.toml @@ -11,13 +11,13 @@ crate-type = ["dylib"] [dependencies] flate = { path = "../libflate" } log = "0.3" +owning_ref = "0.3.3" proc_macro = { path = "../libproc_macro" } rustc = { path = "../librustc" } rustc_back = { path = "../librustc_back" } rustc_const_math = { path = "../librustc_const_math" } rustc_data_structures = { path = "../librustc_data_structures" } rustc_errors = { path = "../librustc_errors" } -rustc_llvm = { path = "../librustc_llvm" } serialize = { path = "../libserialize" } syntax = { path = "../libsyntax" } syntax_ext = { path = "../libsyntax_ext" } diff --git a/src/librustc_metadata/creader.rs b/src/librustc_metadata/creader.rs index d2874f16289..ec6947b4a48 100644 --- a/src/librustc_metadata/creader.rs +++ b/src/librustc_metadata/creader.rs @@ -315,11 +315,20 @@ fn register_crate(&mut self, let exported_symbols = crate_root.exported_symbols .map(|x| x.decode(&metadata).collect()); + let trait_impls = crate_root + .impls + .map(|impls| { + impls.decode(&metadata) + .map(|trait_impls| (trait_impls.trait_id, trait_impls.impls)) + .collect() + }); + let mut cmeta = cstore::CrateMetadata { name: name, extern_crate: Cell::new(None), def_path_table: def_path_table, exported_symbols: exported_symbols, + trait_impls: trait_impls, proc_macros: crate_root.macro_derive_registrar.map(|_| { self.load_derive_macros(&crate_root, dylib.clone().map(|p| p.0), span) }), @@ -393,6 +402,7 @@ fn resolve_crate(&mut self, rejected_via_filename: vec![], should_match_name: true, is_proc_macro: Some(false), + metadata_loader: &*self.cstore.metadata_loader, }; self.load(&mut locate_ctxt).or_else(|| { @@ -554,6 +564,7 @@ fn read_extension_crate(&mut self, span: Span, info: &ExternCrateInfo) -> Extens rejected_via_filename: vec![], should_match_name: true, is_proc_macro: None, + metadata_loader: &*self.cstore.metadata_loader, }; let library = self.load(&mut locate_ctxt).or_else(|| { if !is_cross { diff --git a/src/librustc_metadata/cstore.rs b/src/librustc_metadata/cstore.rs index c12b4209675..64fccb0314e 100644 --- a/src/librustc_metadata/cstore.rs +++ b/src/librustc_metadata/cstore.rs @@ -11,21 +11,20 @@ // The crate store - a central repo for information collected about external // crates and libraries -use locator; use schema::{self, Tracked}; use rustc::dep_graph::{DepGraph, DepNode, GlobalMetaDataKind}; use rustc::hir::def_id::{CRATE_DEF_INDEX, LOCAL_CRATE, CrateNum, DefIndex, DefId}; use rustc::hir::map::definitions::DefPathTable; use rustc::hir::svh::Svh; -use rustc::middle::cstore::{DepKind, ExternCrate}; +use rustc::middle::cstore::{DepKind, ExternCrate, MetadataLoader}; use rustc_back::PanicStrategy; use rustc_data_structures::indexed_vec::IndexVec; use rustc::util::nodemap::{FxHashMap, FxHashSet, NodeMap, DefIdMap}; use std::cell::{RefCell, Cell}; use std::rc::Rc; -use flate::Bytes; +use owning_ref::ErasedBoxRef; use syntax::{ast, attr}; use syntax::ext::base::SyntaxExtension; use syntax::symbol::Symbol; @@ -43,11 +42,7 @@ // own crate numbers. pub type CrateNumMap = IndexVec; -pub enum MetadataBlob { - Inflated(Bytes), - Archive(locator::ArchiveMetadata), - Raw(Vec), -} +pub struct MetadataBlob(pub ErasedBoxRef<[u8]>); /// Holds information about a syntax_pos::FileMap imported from another crate. /// See `imported_filemaps()` for more information. @@ -85,6 +80,8 @@ pub struct CrateMetadata { pub exported_symbols: Tracked>, + pub trait_impls: Tracked>>, + pub dep_kind: Cell, pub source: CrateSource, @@ -103,10 +100,11 @@ pub struct CStore { statically_included_foreign_items: RefCell>, pub dllimport_foreign_items: RefCell>, pub visible_parent_map: RefCell>, + pub metadata_loader: Box, } impl CStore { - pub fn new(dep_graph: &DepGraph) -> CStore { + pub fn new(dep_graph: &DepGraph, metadata_loader: Box) -> CStore { CStore { dep_graph: dep_graph.clone(), metas: RefCell::new(FxHashMap()), @@ -116,6 +114,7 @@ pub fn new(dep_graph: &DepGraph) -> CStore { statically_included_foreign_items: RefCell::new(FxHashSet()), dllimport_foreign_items: RefCell::new(FxHashSet()), visible_parent_map: RefCell::new(FxHashMap()), + metadata_loader: metadata_loader, } } diff --git a/src/librustc_metadata/cstore_impl.rs b/src/librustc_metadata/cstore_impl.rs index dbf3e94832f..4b7083590d2 100644 --- a/src/librustc_metadata/cstore_impl.rs +++ b/src/librustc_metadata/cstore_impl.rs @@ -10,12 +10,11 @@ use cstore; use encoder; -use locator; use schema; use rustc::dep_graph::DepTrackingMapConfig; use rustc::middle::cstore::{CrateStore, CrateSource, LibSource, DepKind, - ExternCrate, NativeLibrary, LinkMeta, + ExternCrate, NativeLibrary, MetadataLoader, LinkMeta, LinkagePreference, LoadedMacro, EncodedMetadata}; use rustc::hir::def; use rustc::middle::lang_items; @@ -38,7 +37,6 @@ use syntax::symbol::Symbol; use syntax_pos::{Span, NO_EXPANSION}; use rustc::hir::svh::Svh; -use rustc_back::target::Target; use rustc::hir; macro_rules! provide { @@ -135,6 +133,10 @@ fn crate_data_as_rc_any(&self, krate: CrateNum) -> Rc { self.get_crate_data(krate) } + fn metadata_loader(&self) -> &MetadataLoader { + &*self.metadata_loader + } + fn visibility(&self, def: DefId) -> ty::Visibility { self.dep_graph.read(DepNode::MetaData(def)); self.get_crate_data(def.krate).get_visibility(def.index) @@ -147,10 +149,8 @@ fn item_generics_cloned(&self, def: DefId) -> ty::Generics { fn implementations_of_trait(&self, filter: Option) -> Vec { - if let Some(def_id) = filter { - self.dep_graph.read(DepNode::MetaData(def_id)); - } let mut result = vec![]; + self.iter_crate_data(|_, cdata| { cdata.get_implementations_for_trait(filter, &self.dep_graph, &mut result) }); @@ -420,17 +420,6 @@ fn used_link_args(&self) -> Vec { self.get_used_link_args().borrow().clone() } - - fn metadata_filename(&self) -> &str - { - locator::METADATA_FILENAME - } - - fn metadata_section_name(&self, target: &Target) -> &str - { - locator::meta_section_name(target) - } - fn used_crates(&self, prefer: LinkagePreference) -> Vec<(CrateNum, LibSource)> { self.do_get_used_crates(prefer) @@ -522,4 +511,4 @@ fn visible_parent_map<'a>(&'a self) -> ::std::cell::Ref<'a, DefIdMap> { drop(visible_parent_map); self.visible_parent_map.borrow() } -} +} \ No newline at end of file diff --git a/src/librustc_metadata/decoder.rs b/src/librustc_metadata/decoder.rs index ea845f722c3..754f27810c4 100644 --- a/src/librustc_metadata/decoder.rs +++ b/src/librustc_metadata/decoder.rs @@ -21,7 +21,6 @@ use rustc::hir::def::{self, Def, CtorKind}; use rustc::hir::def_id::{CrateNum, DefId, DefIndex, CRATE_DEF_INDEX, LOCAL_CRATE}; use rustc::middle::lang_items; -use rustc::middle::region; use rustc::session::Session; use rustc::ty::{self, Ty, TyCtxt}; use rustc::ty::subst::Substs; @@ -78,11 +77,7 @@ fn decoder(self, pos: usize) -> DecodeContext<'a, 'tcx> { impl<'a, 'tcx> Metadata<'a, 'tcx> for &'a MetadataBlob { fn raw_bytes(self) -> &'a [u8] { - match *self { - MetadataBlob::Inflated(ref vec) => vec, - MetadataBlob::Archive(ref ar) => ar.as_slice(), - MetadataBlob::Raw(ref vec) => vec, - } + &self.0 } } @@ -360,12 +355,6 @@ fn specialized_decode(&mut self) -> Result, Self::Error> { } } -impl<'a, 'tcx> SpecializedDecoder> for DecodeContext<'a, 'tcx> { - fn specialized_decode(&mut self) -> Result, Self::Error> { - Ok(self.tcx().intern_code_extent(Decodable::decode(self)?)) - } -} - impl<'a, 'tcx> SpecializedDecoder<&'tcx ty::Slice>> for DecodeContext<'a, 'tcx> { fn specialized_decode(&mut self) -> Result<&'tcx ty::Slice>, Self::Error> { Ok(self.tcx().mk_type_list((0..self.read_usize()?).map(|_| Decodable::decode(self)))?) @@ -512,16 +501,11 @@ pub fn get_trait_def(&self, item_id: DefIndex) -> ty::TraitDef { _ => bug!(), }; - let def = ty::TraitDef::new(self.local_def_id(item_id), - data.unsafety, - data.paren_sugar, - self.def_path_table.def_path_hash(item_id)); - - if data.has_default_impl { - def.record_has_default_impl(); - } - - def + ty::TraitDef::new(self.local_def_id(item_id), + data.unsafety, + data.paren_sugar, + data.has_default_impl, + self.def_path_table.def_path_hash(item_id)) } fn get_variant(&self, item: &Entry, index: DefIndex) -> ty::VariantDef { @@ -968,17 +952,17 @@ pub fn get_implementations_for_trait(&self, None => None, }; - // FIXME(eddyb) Make this O(1) instead of O(n). let dep_node = self.metadata_dep_node(GlobalMetaDataKind::Impls); - for trait_impls in self.root.impls.get(dep_graph, dep_node).decode(self) { - if filter.is_some() && filter != Some(trait_impls.trait_id) { - continue; - } - - result.extend(trait_impls.impls.decode(self).map(|index| self.local_def_id(index))); - if filter.is_some() { - break; + if let Some(filter) = filter { + if let Some(impls) = self.trait_impls + .get(dep_graph, dep_node) + .get(&filter) { + result.extend(impls.decode(self).map(|idx| self.local_def_id(idx))); + } + } else { + for impls in self.trait_impls.get(dep_graph, dep_node).values() { + result.extend(impls.decode(self).map(|idx| self.local_def_id(idx))); } } } diff --git a/src/librustc_metadata/encoder.rs b/src/librustc_metadata/encoder.rs index fa4ebed1618..93fcdc455e5 100644 --- a/src/librustc_metadata/encoder.rs +++ b/src/librustc_metadata/encoder.rs @@ -943,7 +943,7 @@ fn encode_info_for_item(&mut self, (def_id, item): (DefId, &'tcx hir::Item)) -> let trait_ref = tcx.impl_trait_ref(def_id); let parent = if let Some(trait_ref) = trait_ref { let trait_def = tcx.trait_def(trait_ref.def_id); - trait_def.ancestors(def_id).skip(1).next().and_then(|node| { + trait_def.ancestors(tcx, def_id).skip(1).next().and_then(|node| { match node { specialization_graph::Node::Impl(parent) => Some(parent), _ => None, @@ -1295,23 +1295,37 @@ fn encode_lang_items_missing(&mut self, _: ()) -> LazySeq /// Encodes an index, mapping each trait to its (local) implementations. fn encode_impls(&mut self, _: ()) -> LazySeq { + debug!("IsolatedEncoder::encode_impls()"); + let tcx = self.tcx; let mut visitor = ImplVisitor { - tcx: self.tcx, + tcx: tcx, impls: FxHashMap(), }; - self.tcx.hir.krate().visit_all_item_likes(&mut visitor); + tcx.hir.krate().visit_all_item_likes(&mut visitor); + + let mut all_impls: Vec<_> = visitor.impls.into_iter().collect(); - let all_impls: Vec<_> = visitor.impls + // Bring everything into deterministic order for hashing + all_impls.sort_unstable_by_key(|&(trait_def_id, _)| { + tcx.def_path_hash(trait_def_id) + }); + + let all_impls: Vec<_> = all_impls .into_iter() - .map(|(trait_def_id, impls)| { + .map(|(trait_def_id, mut impls)| { + // Bring everything into deterministic order for hashing + impls.sort_unstable_by_key(|&def_index| { + tcx.hir.definitions().def_path_hash(def_index) + }); + TraitImpls { trait_id: (trait_def_id.krate.as_u32(), trait_def_id.index), - impls: self.lazy_seq(impls), + impls: self.lazy_seq_from_slice(&impls[..]), } }) .collect(); - self.lazy_seq(all_impls) + self.lazy_seq_from_slice(&all_impls[..]) } // Encodes all symbols exported from this crate into the metadata. diff --git a/src/librustc_metadata/lib.rs b/src/librustc_metadata/lib.rs index 27555f49e57..56c150fd4c8 100644 --- a/src/librustc_metadata/lib.rs +++ b/src/librustc_metadata/lib.rs @@ -29,6 +29,7 @@ #![cfg_attr(stage0, unstable(feature = "rustc_private", issue = "27812"))] #![cfg_attr(stage0, feature(staged_api))] +#![feature(sort_unstable)] #[macro_use] extern crate log; @@ -37,6 +38,7 @@ extern crate syntax_pos; extern crate flate; extern crate serialize as rustc_serialize; // used by deriving +extern crate owning_ref; extern crate rustc_errors as errors; extern crate syntax_ext; extern crate proc_macro; @@ -46,7 +48,6 @@ extern crate rustc_back; extern crate rustc_const_math; extern crate rustc_data_structures; -extern crate rustc_llvm; mod diagnostics; diff --git a/src/librustc_metadata/locator.rs b/src/librustc_metadata/locator.rs index 84bb82de370..34b07af9f01 100644 --- a/src/librustc_metadata/locator.rs +++ b/src/librustc_metadata/locator.rs @@ -224,15 +224,12 @@ use schema::{METADATA_HEADER, rustc_version}; use rustc::hir::svh::Svh; +use rustc::middle::cstore::MetadataLoader; use rustc::session::{config, Session}; use rustc::session::filesearch::{FileSearch, FileMatches, FileDoesntMatch}; use rustc::session::search_paths::PathKind; -use rustc::util::common; use rustc::util::nodemap::FxHashMap; -use rustc_llvm as llvm; -use rustc_llvm::{False, ObjectFile, mk_section_iter}; -use rustc_llvm::archive_ro::ArchiveRO; use errors::DiagnosticBuilder; use syntax::symbol::Symbol; use syntax_pos::Span; @@ -243,11 +240,10 @@ use std::fs::{self, File}; use std::io::{self, Read}; use std::path::{Path, PathBuf}; -use std::ptr; -use std::slice; use std::time::Instant; use flate; +use owning_ref::{ErasedBoxRef, OwningRef}; pub struct CrateMismatch { path: PathBuf, @@ -272,12 +268,7 @@ pub struct Context<'a> { pub rejected_via_filename: Vec, pub should_match_name: bool, pub is_proc_macro: Option, -} - -pub struct ArchiveMetadata { - _archive: ArchiveRO, - // points into self._archive - data: *const [u8], + pub metadata_loader: &'a MetadataLoader, } pub struct CratePaths { @@ -287,8 +278,6 @@ pub struct CratePaths { pub rmeta: Option, } -pub const METADATA_FILENAME: &'static str = "rust.metadata.bin"; - #[derive(Copy, Clone, PartialEq)] enum CrateFlavor { Rlib, @@ -596,20 +585,21 @@ fn extract_one(&mut self, let mut err: Option = None; for (lib, kind) in m { info!("{} reading metadata from: {}", flavor, lib.display()); - let (hash, metadata) = match get_metadata_section(self.target, flavor, &lib) { - Ok(blob) => { - if let Some(h) = self.crate_matches(&blob, &lib) { - (h, blob) - } else { - info!("metadata mismatch"); + let (hash, metadata) = + match get_metadata_section(self.target, flavor, &lib, self.metadata_loader) { + Ok(blob) => { + if let Some(h) = self.crate_matches(&blob, &lib) { + (h, blob) + } else { + info!("metadata mismatch"); + continue; + } + } + Err(err) => { + info!("no metadata found: {}", err); continue; } - } - Err(err) => { - info!("no metadata found: {}", err); - continue; - } - }; + }; // If we see multiple hashes, emit an error about duplicate candidates. if slot.as_ref().map_or(false, |s| s.0 != hash) { let mut e = struct_span_err!(self.sess, @@ -833,50 +823,14 @@ pub fn note_crate_name(err: &mut DiagnosticBuilder, name: &str) { err.note(&format!("crate name: {}", name)); } -impl ArchiveMetadata { - fn new(ar: ArchiveRO) -> Option { - let data = { - let section = ar.iter() - .filter_map(|s| s.ok()) - .find(|sect| sect.name() == Some(METADATA_FILENAME)); - match section { - Some(s) => s.data() as *const [u8], - None => { - debug!("didn't find '{}' in the archive", METADATA_FILENAME); - return None; - } - } - }; - - Some(ArchiveMetadata { - _archive: ar, - data: data, - }) - } - - pub fn as_slice<'a>(&'a self) -> &'a [u8] { - unsafe { &*self.data } - } -} - -fn verify_decompressed_encoding_version(blob: &MetadataBlob, - filename: &Path) - -> Result<(), String> { - if !blob.is_compatible() { - Err((format!("incompatible metadata version found: '{}'", - filename.display()))) - } else { - Ok(()) - } -} - // Just a small wrapper to time how long reading metadata takes. fn get_metadata_section(target: &Target, flavor: CrateFlavor, - filename: &Path) + filename: &Path, + loader: &MetadataLoader) -> Result { let start = Instant::now(); - let ret = get_metadata_section_imp(target, flavor, filename); + let ret = get_metadata_section_imp(target, flavor, filename, loader); info!("reading {:?} => {:?}", filename.file_name().unwrap(), start.elapsed()); @@ -885,118 +839,61 @@ fn get_metadata_section(target: &Target, fn get_metadata_section_imp(target: &Target, flavor: CrateFlavor, - filename: &Path) + filename: &Path, + loader: &MetadataLoader) -> Result { if !filename.exists() { return Err(format!("no such file: '{}'", filename.display())); } - if flavor == CrateFlavor::Rlib { - // Use ArchiveRO for speed here, it's backed by LLVM and uses mmap - // internally to read the file. We also avoid even using a memcpy by - // just keeping the archive along while the metadata is in use. - let archive = match ArchiveRO::open(filename) { - Some(ar) => ar, - None => { - debug!("llvm didn't like `{}`", filename.display()); - return Err(format!("failed to read rlib metadata: '{}'", filename.display())); + let raw_bytes: ErasedBoxRef<[u8]> = match flavor { + CrateFlavor::Rlib => loader.get_rlib_metadata(target, filename)?, + CrateFlavor::Dylib => { + let buf = loader.get_dylib_metadata(target, filename)?; + // The header is uncompressed + let header_len = METADATA_HEADER.len(); + debug!("checking {} bytes of metadata-version stamp", header_len); + let header = &buf[..cmp::min(header_len, buf.len())]; + if header != METADATA_HEADER { + return Err(format!("incompatible metadata version found: '{}'", + filename.display())); } - }; - return match ArchiveMetadata::new(archive).map(|ar| MetadataBlob::Archive(ar)) { - None => Err(format!("failed to read rlib metadata: '{}'", filename.display())), - Some(blob) => { - verify_decompressed_encoding_version(&blob, filename)?; - Ok(blob) - } - }; - } else if flavor == CrateFlavor::Rmeta { - let mut file = File::open(filename).map_err(|_| - format!("could not open file: '{}'", filename.display()))?; - let mut buf = vec![]; - file.read_to_end(&mut buf).map_err(|_| - format!("failed to read rlib metadata: '{}'", filename.display()))?; - let blob = MetadataBlob::Raw(buf); - verify_decompressed_encoding_version(&blob, filename)?; - return Ok(blob); - } - unsafe { - let buf = common::path2cstr(filename); - let mb = llvm::LLVMRustCreateMemoryBufferWithContentsOfFile(buf.as_ptr()); - if mb as isize == 0 { - return Err(format!("error reading library: '{}'", filename.display())); - } - let of = match ObjectFile::new(mb) { - Some(of) => of, - _ => { - return Err((format!("provided path not an object file: '{}'", filename.display()))) - } - }; - let si = mk_section_iter(of.llof); - while llvm::LLVMIsSectionIteratorAtEnd(of.llof, si.llsi) == False { - let mut name_buf = ptr::null(); - let name_len = llvm::LLVMRustGetSectionName(si.llsi, &mut name_buf); - let name = slice::from_raw_parts(name_buf as *const u8, name_len as usize).to_vec(); - let name = String::from_utf8(name).unwrap(); - debug!("get_metadata_section: name {}", name); - if read_meta_section_name(target) == name { - let cbuf = llvm::LLVMGetSectionContents(si.llsi); - let csz = llvm::LLVMGetSectionSize(si.llsi) as usize; - let cvbuf: *const u8 = cbuf as *const u8; - let vlen = METADATA_HEADER.len(); - debug!("checking {} bytes of metadata-version stamp", vlen); - let minsz = cmp::min(vlen, csz); - let buf0 = slice::from_raw_parts(cvbuf, minsz); - let version_ok = buf0 == METADATA_HEADER; - if !version_ok { - return Err((format!("incompatible metadata version found: '{}'", - filename.display()))); - } - let cvbuf1 = cvbuf.offset(vlen as isize); - debug!("inflating {} bytes of compressed metadata", csz - vlen); - let bytes = slice::from_raw_parts(cvbuf1, csz - vlen); - match flate::inflate_bytes(bytes) { - Ok(inflated) => { - let blob = MetadataBlob::Inflated(inflated); - verify_decompressed_encoding_version(&blob, filename)?; - return Ok(blob); - } - Err(_) => {} + // Header is okay -> inflate the actual metadata + let compressed_bytes = &buf[header_len..]; + debug!("inflating {} bytes of compressed metadata", compressed_bytes.len()); + match flate::inflate_bytes(compressed_bytes) { + Ok(inflated) => { + let buf = unsafe { OwningRef::new_assert_stable_address(inflated) }; + buf.map_owner_box().erase_owner() + } + Err(_) => { + return Err(format!("failed to decompress metadata: {}", filename.display())); } } - llvm::LLVMMoveToNextSection(si.llsi); } - Err(format!("metadata not found: '{}'", filename.display())) - } -} - -pub fn meta_section_name(target: &Target) -> &'static str { - // Historical note: - // - // When using link.exe it was seen that the section name `.note.rustc` - // was getting shortened to `.note.ru`, and according to the PE and COFF - // specification: - // - // > Executable images do not use a string table and do not support - // > section names longer than 8 characters - // - // https://msdn.microsoft.com/en-us/library/windows/hardware/gg463119.aspx - // - // As a result, we choose a slightly shorter name! As to why - // `.note.rustc` works on MinGW, that's another good question... - - if target.options.is_like_osx { - "__DATA,.rustc" + CrateFlavor::Rmeta => { + let mut file = File::open(filename).map_err(|_| + format!("could not open file: '{}'", filename.display()))?; + let mut buf = vec![]; + file.read_to_end(&mut buf).map_err(|_| + format!("failed to read rmeta metadata: '{}'", filename.display()))?; + OwningRef::new(buf).map_owner_box().erase_owner() + } + }; + let blob = MetadataBlob(raw_bytes); + if blob.is_compatible() { + Ok(blob) } else { - ".rustc" + Err(format!("incompatible metadata version found: '{}'", filename.display())) } } -pub fn read_meta_section_name(_target: &Target) -> &'static str { - ".rustc" -} - // A diagnostic function for dumping crate metadata to an output stream -pub fn list_file_metadata(target: &Target, path: &Path, out: &mut io::Write) -> io::Result<()> { +pub fn list_file_metadata(target: &Target, + path: &Path, + loader: &MetadataLoader, + out: &mut io::Write) + -> io::Result<()> { let filename = path.file_name().unwrap().to_str().unwrap(); let flavor = if filename.ends_with(".rlib") { CrateFlavor::Rlib @@ -1005,7 +902,7 @@ pub fn list_file_metadata(target: &Target, path: &Path, out: &mut io::Write) -> } else { CrateFlavor::Dylib }; - match get_metadata_section(target, flavor, path) { + match get_metadata_section(target, flavor, path, loader) { Ok(metadata) => metadata.list_crate_metadata(out), Err(msg) => write!(out, "{}\n", msg), } diff --git a/src/librustc_metadata/schema.rs b/src/librustc_metadata/schema.rs index 5abe1adfb6f..91a22d92219 100644 --- a/src/librustc_metadata/schema.rs +++ b/src/librustc_metadata/schema.rs @@ -221,6 +221,20 @@ pub fn map(&self, f: F) -> Tracked } } +impl<'a, 'tcx, T> HashStable> for Tracked + where T: HashStable> +{ + fn hash_stable(&self, + hcx: &mut StableHashingContext<'a, 'tcx>, + hasher: &mut StableHasher) { + let Tracked { + ref state + } = *self; + + state.hash_stable(hcx, hasher); + } +} + #[derive(RustcEncodable, RustcDecodable)] pub struct CrateRoot { diff --git a/src/librustc_mir/build/expr/as_operand.rs b/src/librustc_mir/build/expr/as_operand.rs index f7534737edc..a3680214432 100644 --- a/src/librustc_mir/build/expr/as_operand.rs +++ b/src/librustc_mir/build/expr/as_operand.rs @@ -39,7 +39,7 @@ pub fn as_local_operand(&mut self, block: BasicBlock, expr: M) /// The operand is known to be live until the end of `scope`. pub fn as_operand(&mut self, block: BasicBlock, - scope: Option>, + scope: Option, expr: M) -> BlockAnd> where M: Mirror<'tcx, Output = Expr<'tcx>> { @@ -49,7 +49,7 @@ pub fn as_operand(&mut self, fn expr_as_operand(&mut self, mut block: BasicBlock, - scope: Option>, + scope: Option, expr: Expr<'tcx>) -> BlockAnd> { debug!("expr_as_operand(block={:?}, expr={:?})", block, expr); diff --git a/src/librustc_mir/build/expr/as_rvalue.rs b/src/librustc_mir/build/expr/as_rvalue.rs index 46e2408c38d..7b29cd970d7 100644 --- a/src/librustc_mir/build/expr/as_rvalue.rs +++ b/src/librustc_mir/build/expr/as_rvalue.rs @@ -38,7 +38,7 @@ pub fn as_local_rvalue(&mut self, block: BasicBlock, expr: M) } /// Compile `expr`, yielding an rvalue. - pub fn as_rvalue(&mut self, block: BasicBlock, scope: Option>, expr: M) + pub fn as_rvalue(&mut self, block: BasicBlock, scope: Option, expr: M) -> BlockAnd> where M: Mirror<'tcx, Output = Expr<'tcx>> { @@ -48,7 +48,7 @@ pub fn as_rvalue(&mut self, block: BasicBlock, scope: Option fn expr_as_rvalue(&mut self, mut block: BasicBlock, - scope: Option>, + scope: Option, expr: Expr<'tcx>) -> BlockAnd> { debug!("expr_as_rvalue(block={:?}, expr={:?})", block, expr); diff --git a/src/librustc_mir/build/expr/as_temp.rs b/src/librustc_mir/build/expr/as_temp.rs index db4561af734..a334923546f 100644 --- a/src/librustc_mir/build/expr/as_temp.rs +++ b/src/librustc_mir/build/expr/as_temp.rs @@ -21,7 +21,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { /// up rvalues so as to freeze the value that will be consumed. pub fn as_temp(&mut self, block: BasicBlock, - temp_lifetime: Option>, + temp_lifetime: Option, expr: M) -> BlockAnd> where M: Mirror<'tcx, Output = Expr<'tcx>> @@ -32,7 +32,7 @@ pub fn as_temp(&mut self, fn expr_as_temp(&mut self, mut block: BasicBlock, - temp_lifetime: Option>, + temp_lifetime: Option, expr: Expr<'tcx>) -> BlockAnd> { debug!("expr_as_temp(block={:?}, expr={:?})", block, expr); diff --git a/src/librustc_mir/build/mod.rs b/src/librustc_mir/build/mod.rs index 8c057b02df2..fb173e2487b 100644 --- a/src/librustc_mir/build/mod.rs +++ b/src/librustc_mir/build/mod.rs @@ -14,7 +14,7 @@ use hair::Pattern; use rustc::hir; use rustc::hir::def_id::DefId; -use rustc::middle::region::{CodeExtent, CodeExtentData}; +use rustc::middle::region::CodeExtent; use rustc::mir::*; use rustc::mir::transform::MirSource; use rustc::mir::visit::MutVisitor; @@ -172,7 +172,7 @@ fn create_constructor_shim<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, { let span = tcx.hir.span(ctor_id); if let hir::VariantData::Tuple(ref fields, ctor_id) = *v { - let pe = ty::ParameterEnvironment::for_item(tcx, ctor_id); + let pe = tcx.parameter_environment(tcx.hir.local_def_id(ctor_id)); tcx.infer_ctxt(pe, Reveal::UserFacing).enter(|infcx| { let (mut mir, src) = shim::build_adt_ctor(&infcx, ctor_id, fields, span); @@ -206,13 +206,14 @@ fn closure_self_ty<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, -> Ty<'tcx> { let closure_ty = tcx.body_tables(body_id).node_id_to_type(closure_expr_id); + let closure_def_id = tcx.hir.local_def_id(closure_expr_id); let region = ty::ReFree(ty::FreeRegion { - scope: Some(tcx.item_extent(body_id.node_id)), + scope: closure_def_id, bound_region: ty::BoundRegion::BrEnv, }); let region = tcx.mk_region(region); - match tcx.closure_kind(tcx.hir.local_def_id(closure_expr_id)) { + match tcx.closure_kind(closure_def_id) { ty::ClosureKind::Fn => tcx.mk_ref(region, ty::TypeAndMut { ty: closure_ty, @@ -337,12 +338,8 @@ fn construct_fn<'a, 'gcx, 'tcx, A>(hir: Cx<'a, 'gcx, 'tcx>, let span = tcx.hir.span(fn_id); let mut builder = Builder::new(hir.clone(), span, arguments.len(), return_ty); - let call_site_extent = - tcx.intern_code_extent( - CodeExtentData::CallSiteScope { fn_id: fn_id, body_id: body.value.id }); - let arg_extent = - tcx.intern_code_extent( - CodeExtentData::ParameterScope { fn_id: fn_id, body_id: body.value.id }); + let call_site_extent = CodeExtent::CallSiteScope(body.id()); + let arg_extent = CodeExtent::ParameterScope(body.id()); let mut block = START_BLOCK; unpack!(block = builder.in_scope(call_site_extent, block, |builder| { unpack!(block = builder.in_scope(arg_extent, block, |builder| { @@ -405,22 +402,15 @@ pub fn construct_const<'a, 'gcx, 'tcx>(hir: Cx<'a, 'gcx, 'tcx>, let span = tcx.hir.span(owner_id); let mut builder = Builder::new(hir.clone(), span, 0, ty); - let extent = hir.region_maps.temporary_scope(tcx, ast_expr.id) - .unwrap_or(tcx.item_extent(owner_id)); let mut block = START_BLOCK; - let _ = builder.in_scope(extent, block, |builder| { - let expr = builder.hir.mirror(ast_expr); - unpack!(block = builder.into(&Lvalue::Local(RETURN_POINTER), block, expr)); + let expr = builder.hir.mirror(ast_expr); + unpack!(block = builder.into_expr(&Lvalue::Local(RETURN_POINTER), block, expr)); - let source_info = builder.source_info(span); - let return_block = builder.return_block(); - builder.cfg.terminate(block, source_info, - TerminatorKind::Goto { target: return_block }); - builder.cfg.terminate(return_block, source_info, - TerminatorKind::Return); + let source_info = builder.source_info(span); + builder.cfg.terminate(block, source_info, TerminatorKind::Return); - return_block.unit() - }); + // Constants can't `return` so a return block should not be created. + assert_eq!(builder.cached_return_block, None); builder.finish(vec![], ty) } @@ -490,7 +480,7 @@ fn finish(self, fn args_and_body(&mut self, mut block: BasicBlock, arguments: &[(Ty<'gcx>, Option<&'gcx hir::Pat>)], - argument_extent: CodeExtent<'tcx>, + argument_extent: CodeExtent, ast_body: &'gcx hir::Expr) -> BlockAnd<()> { diff --git a/src/librustc_mir/build/scope.rs b/src/librustc_mir/build/scope.rs index 6043a696183..ae47f4c4244 100644 --- a/src/librustc_mir/build/scope.rs +++ b/src/librustc_mir/build/scope.rs @@ -87,7 +87,7 @@ */ use build::{BlockAnd, BlockAndExtension, Builder, CFG}; -use rustc::middle::region::{CodeExtent, CodeExtentData}; +use rustc::middle::region::CodeExtent; use rustc::middle::lang_items; use rustc::middle::const_val::ConstVal; use rustc::ty::subst::{Kind, Subst}; @@ -102,7 +102,7 @@ pub struct Scope<'tcx> { visibility_scope: VisibilityScope, /// the extent of this scope within source code. - extent: CodeExtent<'tcx>, + extent: CodeExtent, /// Whether there's anything to do for the cleanup path, that is, /// when unwinding through this scope. This includes destructors, @@ -137,7 +137,7 @@ pub struct Scope<'tcx> { free: Option>, /// The cache for drop chain on “normal” exit into a particular BasicBlock. - cached_exits: FxHashMap<(BasicBlock, CodeExtent<'tcx>), BasicBlock>, + cached_exits: FxHashMap<(BasicBlock, CodeExtent), BasicBlock>, } struct DropData<'tcx> { @@ -180,7 +180,7 @@ struct FreeData<'tcx> { #[derive(Clone, Debug)] pub struct BreakableScope<'tcx> { /// Extent of the loop - pub extent: CodeExtent<'tcx>, + pub extent: CodeExtent, /// Where the body of the loop begins. `None` if block pub continue_block: Option, /// Block to branch into when the loop or block terminates (either by being `break`-en out @@ -271,7 +271,7 @@ pub fn in_breakable_scope(&mut self, /// Convenience wrapper that pushes a scope and then executes `f` /// to build its contents, popping the scope afterwards. pub fn in_scope(&mut self, - extent: CodeExtent<'tcx>, + extent: CodeExtent, mut block: BasicBlock, f: F) -> BlockAnd @@ -289,7 +289,7 @@ pub fn in_scope(&mut self, /// scope and call `pop_scope` afterwards. Note that these two /// calls must be paired; using `in_scope` as a convenience /// wrapper maybe preferable. - pub fn push_scope(&mut self, extent: CodeExtent<'tcx>) { + pub fn push_scope(&mut self, extent: CodeExtent) { debug!("push_scope({:?})", extent); let vis_scope = self.visibility_scope; self.scopes.push(Scope { @@ -306,7 +306,7 @@ pub fn push_scope(&mut self, extent: CodeExtent<'tcx>) { /// drops onto the end of `block` that are needed. This must /// match 1-to-1 with `push_scope`. pub fn pop_scope(&mut self, - extent: CodeExtent<'tcx>, + extent: CodeExtent, mut block: BasicBlock) -> BlockAnd<()> { debug!("pop_scope({:?}, {:?})", extent, block); @@ -330,7 +330,7 @@ pub fn pop_scope(&mut self, /// module comment for details. pub fn exit_scope(&mut self, span: Span, - extent: CodeExtent<'tcx>, + extent: CodeExtent, mut block: BasicBlock, target: BasicBlock) { debug!("exit_scope(extent={:?}, block={:?}, target={:?})", extent, block, target); @@ -391,7 +391,7 @@ pub fn new_visibility_scope(&mut self, span: Span) -> VisibilityScope { /// resolving `break` and `continue`. pub fn find_breakable_scope(&mut self, span: Span, - label: CodeExtent<'tcx>) + label: CodeExtent) -> &mut BreakableScope<'tcx> { // find the loop-scope with the correct id self.breakable_scopes.iter_mut() @@ -411,12 +411,12 @@ pub fn source_info(&self, span: Span) -> SourceInfo { /// Returns the extent of the scope which should be exited by a /// return. - pub fn extent_of_return_scope(&self) -> CodeExtent<'tcx> { + pub fn extent_of_return_scope(&self) -> CodeExtent { // The outermost scope (`scopes[0]`) will be the `CallSiteScope`. // We want `scopes[1]`, which is the `ParameterScope`. assert!(self.scopes.len() >= 2); - assert!(match *self.scopes[1].extent { - CodeExtentData::ParameterScope { .. } => true, + assert!(match self.scopes[1].extent { + CodeExtent::ParameterScope(_) => true, _ => false, }); self.scopes[1].extent @@ -424,7 +424,7 @@ pub fn extent_of_return_scope(&self) -> CodeExtent<'tcx> { /// Returns the topmost active scope, which is known to be alive until /// the next scope expression. - pub fn topmost_scope(&self) -> CodeExtent<'tcx> { + pub fn topmost_scope(&self) -> CodeExtent { self.scopes.last().expect("topmost_scope: no scopes present").extent } @@ -434,7 +434,7 @@ pub fn topmost_scope(&self) -> CodeExtent<'tcx> { /// `extent`. pub fn schedule_drop(&mut self, span: Span, - extent: CodeExtent<'tcx>, + extent: CodeExtent, lvalue: &Lvalue<'tcx>, lvalue_ty: Ty<'tcx>) { let needs_drop = self.hir.needs_drop(lvalue_ty); @@ -524,7 +524,7 @@ pub fn schedule_drop(&mut self, /// There may only be one “free” scheduled in any given scope. pub fn schedule_box_free(&mut self, span: Span, - extent: CodeExtent<'tcx>, + extent: CodeExtent, value: &Lvalue<'tcx>, item_ty: Ty<'tcx>) { for scope in self.scopes.iter_mut().rev() { diff --git a/src/librustc_mir/hair/cx/block.rs b/src/librustc_mir/hair/cx/block.rs index 2ec4a8a07df..920da306116 100644 --- a/src/librustc_mir/hair/cx/block.rs +++ b/src/librustc_mir/hair/cx/block.rs @@ -11,7 +11,7 @@ use hair::*; use hair::cx::Cx; use hair::cx::to_ref::ToRef; -use rustc::middle::region::{BlockRemainder, CodeExtentData}; +use rustc::middle::region::{BlockRemainder, CodeExtent}; use rustc::hir; use syntax::ast; @@ -24,7 +24,7 @@ fn make_mirror<'a, 'gcx>(self, cx: &mut Cx<'a, 'gcx, 'tcx>) -> Block<'tcx> { let stmts = mirror_stmts(cx, self.id, &*self.stmts); Block { targeted_by_break: self.targeted_by_break, - extent: cx.tcx.node_extent(self.id), + extent: CodeExtent::Misc(self.id), span: self.span, stmts: stmts, expr: self.expr.to_ref(), @@ -44,7 +44,7 @@ fn mirror_stmts<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>, result.push(StmtRef::Mirror(Box::new(Stmt { span: stmt.span, kind: StmtKind::Expr { - scope: cx.tcx.node_extent(id), + scope: CodeExtent::Misc(id), expr: expr.to_ref(), }, }))) @@ -55,19 +55,17 @@ fn mirror_stmts<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>, // ignore for purposes of the MIR } hir::DeclLocal(ref local) => { - let remainder_extent = CodeExtentData::Remainder(BlockRemainder { + let remainder_extent = CodeExtent::Remainder(BlockRemainder { block: block_id, first_statement_index: index as u32, }); - let remainder_extent = - cx.tcx.intern_code_extent(remainder_extent); let pattern = Pattern::from_hir(cx.tcx, cx.tables(), &local.pat); result.push(StmtRef::Mirror(Box::new(Stmt { span: stmt.span, kind: StmtKind::Let { remainder_scope: remainder_extent, - init_scope: cx.tcx.node_extent(id), + init_scope: CodeExtent::Misc(id), pattern: pattern, initializer: local.init.to_ref(), }, @@ -84,7 +82,7 @@ pub fn to_expr_ref<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>, block: &'tcx hir::Block) -> ExprRef<'tcx> { let block_ty = cx.tables().node_id_to_type(block.id); - let (temp_lifetime, was_shrunk) = cx.region_maps.temporary_scope2(cx.tcx, block.id); + let (temp_lifetime, was_shrunk) = cx.region_maps.temporary_scope2(block.id); let expr = Expr { ty: block_ty, temp_lifetime: temp_lifetime, diff --git a/src/librustc_mir/hair/cx/expr.rs b/src/librustc_mir/hair/cx/expr.rs index 6a1817aba09..b180d982e86 100644 --- a/src/librustc_mir/hair/cx/expr.rs +++ b/src/librustc_mir/hair/cx/expr.rs @@ -14,7 +14,6 @@ use hair::cx::Cx; use hair::cx::block; use hair::cx::to_ref::ToRef; -use rustc::hir::map; use rustc::hir::def::{Def, CtorKind}; use rustc::middle::const_val::ConstVal; use rustc::ty::{self, AdtKind, VariantDef, Ty}; @@ -26,8 +25,8 @@ impl<'tcx> Mirror<'tcx> for &'tcx hir::Expr { type Output = Expr<'tcx>; fn make_mirror<'a, 'gcx>(self, cx: &mut Cx<'a, 'gcx, 'tcx>) -> Expr<'tcx> { - let (temp_lifetime, was_shrunk) = cx.region_maps.temporary_scope2(cx.tcx, self.id); - let expr_extent = cx.tcx.node_extent(self.id); + let (temp_lifetime, was_shrunk) = cx.region_maps.temporary_scope2(self.id); + let expr_extent = CodeExtent::Misc(self.id); debug!("Expr::make_mirror(): id={}, span={:?}", self.id, self.span); @@ -238,7 +237,7 @@ fn make_mirror_unadjusted<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>, expr: &'tcx hir::Expr) -> Expr<'tcx> { let expr_ty = cx.tables().expr_ty(expr); - let (temp_lifetime, was_shrunk) = cx.region_maps.temporary_scope2(cx.tcx, expr.id); + let (temp_lifetime, was_shrunk) = cx.region_maps.temporary_scope2(expr.id); let kind = match expr.node { // Here comes the interesting stuff: @@ -610,7 +609,7 @@ fn make_mirror_unadjusted<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>, match dest.target_id { hir::ScopeTarget::Block(target_id) | hir::ScopeTarget::Loop(hir::LoopIdResult::Ok(target_id)) => ExprKind::Break { - label: cx.tcx.node_extent(target_id), + label: CodeExtent::Misc(target_id), value: value.to_ref(), }, hir::ScopeTarget::Loop(hir::LoopIdResult::Err(err)) => @@ -621,7 +620,7 @@ fn make_mirror_unadjusted<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>, match dest.target_id { hir::ScopeTarget::Block(_) => bug!("cannot continue to blocks"), hir::ScopeTarget::Loop(hir::LoopIdResult::Ok(loop_id)) => ExprKind::Continue { - label: cx.tcx.node_extent(loop_id), + label: CodeExtent::Misc(loop_id), }, hir::ScopeTarget::Loop(hir::LoopIdResult::Err(err)) => bug!("invalid loop id for continue: {}", err) @@ -686,7 +685,7 @@ fn make_mirror_unadjusted<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>, hir::ExprBox(ref value) => { ExprKind::Box { value: value.to_ref(), - value_extents: cx.tcx.node_extent(value.id), + value_extents: CodeExtent::Misc(value.id), } } hir::ExprArray(ref fields) => ExprKind::Array { fields: fields.to_ref() }, @@ -707,7 +706,7 @@ fn method_callee<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>, method_call: ty::MethodCall) -> Expr<'tcx> { let callee = cx.tables().method_map[&method_call]; - let (temp_lifetime, was_shrunk) = cx.region_maps.temporary_scope2(cx.tcx, expr.id); + let (temp_lifetime, was_shrunk) = cx.region_maps.temporary_scope2(expr.id); Expr { temp_lifetime: temp_lifetime, temp_lifetime_was_shrunk: was_shrunk, @@ -791,7 +790,7 @@ fn convert_var<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>, expr: &'tcx hir::Expr, def: Def) -> ExprKind<'tcx> { - let (temp_lifetime, was_shrunk) = cx.region_maps.temporary_scope2(cx.tcx, expr.id); + let (temp_lifetime, was_shrunk) = cx.region_maps.temporary_scope2(expr.id); match def { Def::Local(def_id) => { @@ -807,33 +806,20 @@ fn convert_var<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>, closure_expr_id); let var_ty = cx.tables().node_id_to_type(id_var); - let body_id = match cx.tcx.hir.find(closure_expr_id) { - Some(map::NodeExpr(expr)) => { - match expr.node { - hir::ExprClosure(.., body, _) => body.node_id, - _ => { - span_bug!(expr.span, "closure expr is not a closure expr"); - } - } - } - _ => { - span_bug!(expr.span, "ast-map has garbage for closure expr"); - } - }; - // FIXME free regions in closures are not right let closure_ty = cx.tables().node_id_to_type(closure_expr_id); // FIXME we're just hard-coding the idea that the // signature will be &self or &mut self and hence will // have a bound region with number 0 + let closure_def_id = cx.tcx.hir.local_def_id(closure_expr_id); let region = ty::ReFree(ty::FreeRegion { - scope: Some(cx.tcx.node_extent(body_id)), + scope: closure_def_id, bound_region: ty::BoundRegion::BrAnon(0), }); let region = cx.tcx.mk_region(region); - let self_expr = match cx.tcx.closure_kind(cx.tcx.hir.local_def_id(closure_expr_id)) { + let self_expr = match cx.tcx.closure_kind(closure_def_id) { ty::ClosureKind::Fn => { let ref_closure_ty = cx.tcx.mk_ref(region, ty::TypeAndMut { @@ -979,7 +965,7 @@ fn overloaded_operator<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>, PassArgs::ByRef => { let region = cx.tcx.node_scope_region(expr.id); let (temp_lifetime, was_shrunk) = - cx.region_maps.temporary_scope2(cx.tcx, expr.id); + cx.region_maps.temporary_scope2(expr.id); argrefs.extend(args.iter() .map(|arg| { let arg_ty = cx.tables().expr_ty_adjusted(arg); @@ -1031,7 +1017,7 @@ fn overloaded_lvalue<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>, // construct the complete expression `foo()` for the overloaded call, // which will yield the &T type - let (temp_lifetime, was_shrunk) = cx.region_maps.temporary_scope2(cx.tcx, expr.id); + let (temp_lifetime, was_shrunk) = cx.region_maps.temporary_scope2(expr.id); let ref_kind = overloaded_operator(cx, expr, method_call, pass_args, receiver, args); let ref_expr = Expr { temp_lifetime: temp_lifetime, @@ -1056,7 +1042,7 @@ fn capture_freevar<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>, closure_expr_id: closure_expr.id, }; let upvar_capture = cx.tables().upvar_capture(upvar_id).unwrap(); - let (temp_lifetime, was_shrunk) = cx.region_maps.temporary_scope2(cx.tcx, closure_expr.id); + let (temp_lifetime, was_shrunk) = cx.region_maps.temporary_scope2(closure_expr.id); let var_ty = cx.tables().node_id_to_type(id_var); let captured_var = Expr { temp_lifetime: temp_lifetime, diff --git a/src/librustc_mir/hair/cx/mod.rs b/src/librustc_mir/hair/cx/mod.rs index ee8547e5dd6..9ffce18fe15 100644 --- a/src/librustc_mir/hair/cx/mod.rs +++ b/src/librustc_mir/hair/cx/mod.rs @@ -35,7 +35,7 @@ pub struct Cx<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> { tcx: TyCtxt<'a, 'gcx, 'tcx>, infcx: &'a InferCtxt<'a, 'gcx, 'tcx>, - pub region_maps: Rc>, + pub region_maps: Rc, constness: hir::Constness, /// True if this constant/function needs overflow checks. diff --git a/src/librustc_mir/hair/mod.rs b/src/librustc_mir/hair/mod.rs index 0e8992e62ea..1af9d722599 100644 --- a/src/librustc_mir/hair/mod.rs +++ b/src/librustc_mir/hair/mod.rs @@ -32,7 +32,7 @@ #[derive(Clone, Debug)] pub struct Block<'tcx> { pub targeted_by_break: bool, - pub extent: CodeExtent<'tcx>, + pub extent: CodeExtent, pub span: Span, pub stmts: Vec>, pub expr: Option>, @@ -53,7 +53,7 @@ pub struct Stmt<'tcx> { pub enum StmtKind<'tcx> { Expr { /// scope for this statement; may be used as lifetime of temporaries - scope: CodeExtent<'tcx>, + scope: CodeExtent, /// expression being evaluated in this statement expr: ExprRef<'tcx>, @@ -62,11 +62,11 @@ pub enum StmtKind<'tcx> { Let { /// scope for variables bound in this let; covers this and /// remaining statements in block - remainder_scope: CodeExtent<'tcx>, + remainder_scope: CodeExtent, /// scope for the initialization itself; might be used as /// lifetime of temporaries - init_scope: CodeExtent<'tcx>, + init_scope: CodeExtent, /// let = ... pattern: Pattern<'tcx>, @@ -97,7 +97,7 @@ pub struct Expr<'tcx> { /// lifetime of this expression if it should be spilled into a /// temporary; should be None only if in a constant context - pub temp_lifetime: Option>, + pub temp_lifetime: Option, /// whether this temp lifetime was shrunk by #36082. pub temp_lifetime_was_shrunk: bool, @@ -112,12 +112,12 @@ pub struct Expr<'tcx> { #[derive(Clone, Debug)] pub enum ExprKind<'tcx> { Scope { - extent: CodeExtent<'tcx>, + extent: CodeExtent, value: ExprRef<'tcx>, }, Box { value: ExprRef<'tcx>, - value_extents: CodeExtent<'tcx>, + value_extents: CodeExtent, }, Call { ty: ty::Ty<'tcx>, @@ -210,11 +210,11 @@ pub enum ExprKind<'tcx> { arg: ExprRef<'tcx>, }, Break { - label: CodeExtent<'tcx>, + label: CodeExtent, value: Option>, }, Continue { - label: CodeExtent<'tcx>, + label: CodeExtent, }, Return { value: Option>, diff --git a/src/librustc_mir/shim.rs b/src/librustc_mir/shim.rs index a6f9952b23c..6f4480bf6dd 100644 --- a/src/librustc_mir/shim.rs +++ b/src/librustc_mir/shim.rs @@ -15,7 +15,7 @@ use rustc::mir::*; use rustc::mir::transform::MirSource; use rustc::ty::{self, Ty}; -use rustc::ty::subst::{Kind, Subst}; +use rustc::ty::subst::{Kind, Subst, Substs}; use rustc::ty::maps::Providers; use rustc_data_structures::indexed_vec::{IndexVec, Idx}; @@ -41,8 +41,7 @@ fn make_shim<'a, 'tcx>(tcx: ty::TyCtxt<'a, 'tcx, 'tcx>, { debug!("make_shim({:?})", instance); let did = instance.def_id(); - let span = tcx.def_span(did); - let param_env = tcx.construct_parameter_environment(span, did, None); + let param_env = tcx.parameter_environment(did); let mut result = match instance { ty::InstanceDef::Item(..) => @@ -66,7 +65,6 @@ fn make_shim<'a, 'tcx>(tcx: ty::TyCtxt<'a, 'tcx, 'tcx>, build_call_shim( tcx, - ¶m_env, def_id, adjustment, CallKind::Indirect, @@ -78,7 +76,6 @@ fn make_shim<'a, 'tcx>(tcx: ty::TyCtxt<'a, 'tcx, 'tcx>, // trans::mir knows to turn to an actual virtual call. build_call_shim( tcx, - ¶m_env, def_id, Adjustment::Identity, CallKind::Direct(def_id), @@ -94,7 +91,6 @@ fn make_shim<'a, 'tcx>(tcx: ty::TyCtxt<'a, 'tcx, 'tcx>, build_call_shim( tcx, - ¶m_env, call_once, Adjustment::RefMut, CallKind::Direct(call_mut), @@ -158,7 +154,7 @@ fn build_drop_shim<'a, 'tcx>(tcx: ty::TyCtxt<'a, 'tcx, 'tcx>, let substs = if let Some(ty) = ty { tcx.mk_substs(iter::once(Kind::from(ty))) } else { - param_env.free_substs + Substs::identity_for_item(tcx, def_id) }; let fn_ty = tcx.type_of(def_id).subst(tcx, substs); let sig = tcx.erase_late_bound_regions(&fn_ty.fn_sig()); @@ -272,7 +268,6 @@ fn downcast_subpath(&self, _path: Self::Path, _variant: usize) -> Option(tcx: ty::TyCtxt<'a, 'tcx, 'tcx>, - param_env: &ty::ParameterEnvironment<'tcx>, def_id: DefId, rcvr_adjustment: Adjustment, call_kind: CallKind, @@ -283,7 +278,7 @@ fn build_call_shim<'a, 'tcx>(tcx: ty::TyCtxt<'a, 'tcx, 'tcx>, call_kind={:?}, untuple_args={:?})", def_id, rcvr_adjustment, call_kind, untuple_args); - let fn_ty = tcx.type_of(def_id).subst(tcx, param_env.free_substs); + let fn_ty = tcx.type_of(def_id); let sig = tcx.erase_late_bound_regions(&fn_ty.fn_sig()); let span = tcx.def_span(def_id); @@ -325,9 +320,10 @@ fn build_call_shim<'a, 'tcx>(tcx: ty::TyCtxt<'a, 'tcx, 'tcx>, CallKind::Direct(def_id) => ( Operand::Constant(box Constant { span: span, - ty: tcx.type_of(def_id).subst(tcx, param_env.free_substs), + ty: tcx.type_of(def_id), literal: Literal::Value { - value: ConstVal::Function(def_id, param_env.free_substs), + value: ConstVal::Function(def_id, + Substs::identity_for_item(tcx, def_id)), }, }), vec![rcvr] diff --git a/src/librustc_mir/transform/inline.rs b/src/librustc_mir/transform/inline.rs index f60dcbed6ba..e6d62dc6460 100644 --- a/src/librustc_mir/transform/inline.rs +++ b/src/librustc_mir/transform/inline.rs @@ -219,7 +219,8 @@ fn should_inline(&self, // FIXME: Give a bonus to functions with only a single caller - let param_env = ty::ParameterEnvironment::for_item(tcx, self.source.item_id()); + let def_id = tcx.hir.local_def_id(self.source.item_id()); + let param_env = tcx.parameter_environment(def_id); let mut first_block = true; let mut cost = 0; diff --git a/src/librustc_mir/transform/qualify_consts.rs b/src/librustc_mir/transform/qualify_consts.rs index df837a32133..72edf68f403 100644 --- a/src/librustc_mir/transform/qualify_consts.rs +++ b/src/librustc_mir/transform/qualify_consts.rs @@ -937,8 +937,7 @@ fn mir_const_qualif<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, return Qualif::NOT_CONST.bits(); } - let node_id = tcx.hir.as_local_node_id(def_id).unwrap(); - let param_env = ty::ParameterEnvironment::for_item(tcx, node_id); + let param_env = tcx.parameter_environment(def_id); let mut qualifier = Qualifier::new(tcx, param_env, def_id, mir, Mode::Const); qualifier.qualify_const().bits() @@ -966,7 +965,7 @@ fn run_pass<'a, 'tcx>(&self, MirSource::Const(_) | MirSource::Promoted(..) => return }; - let param_env = ty::ParameterEnvironment::for_item(tcx, id); + let param_env = tcx.parameter_environment(def_id); if mode == Mode::Fn || mode == Mode::ConstFn { // This is ugly because Qualifier holds onto mir, diff --git a/src/librustc_mir/transform/type_check.rs b/src/librustc_mir/transform/type_check.rs index be384218a41..82c0d2c1b01 100644 --- a/src/librustc_mir/transform/type_check.rs +++ b/src/librustc_mir/transform/type_check.rs @@ -751,7 +751,7 @@ fn run_pass<'a, 'tcx>(&self, // broken MIR, so try not to report duplicate errors. return; } - let param_env = ty::ParameterEnvironment::for_item(tcx, item_id); + let param_env = tcx.parameter_environment(def_id); tcx.infer_ctxt(param_env, Reveal::UserFacing).enter(|infcx| { let mut checker = TypeChecker::new(&infcx, item_id); { diff --git a/src/librustc_mir/util/pretty.rs b/src/librustc_mir/util/pretty.rs index 14f277d1767..eaba573dcd2 100644 --- a/src/librustc_mir/util/pretty.rs +++ b/src/librustc_mir/util/pretty.rs @@ -324,7 +324,9 @@ fn write_mir_sig(tcx: TyCtxt, src: MirSource, mir: &Mir, w: &mut Write) MirSource::Promoted(_, i) => write!(w, "{:?} in", i)? } - write!(w, " {}", tcx.node_path_str(src.item_id()))?; + item_path::with_forced_impl_filename_line(|| { // see notes on #41697 elsewhere + write!(w, " {}", tcx.node_path_str(src.item_id())) + })?; if let MirSource::Fn(_) = src { write!(w, "(")?; diff --git a/src/librustc_passes/hir_stats.rs b/src/librustc_passes/hir_stats.rs index 749146fe496..29fac5463e5 100644 --- a/src/librustc_passes/hir_stats.rs +++ b/src/librustc_passes/hir_stats.rs @@ -252,7 +252,7 @@ fn visit_macro_def(&mut self, macro_def: &'v hir::MacroDef) { impl<'v> ast_visit::Visitor<'v> for StatCollector<'v> { - fn visit_mod(&mut self, m: &'v ast::Mod, _s: Span, _n: NodeId) { + fn visit_mod(&mut self, m: &'v ast::Mod, _s: Span, _a: &[ast::Attribute], _n: NodeId) { self.record("Mod", Id::None, m); ast_visit::walk_mod(self, m) } diff --git a/src/librustc_plugin/registry.rs b/src/librustc_plugin/registry.rs index cdde56f5f63..3027489d65b 100644 --- a/src/librustc_plugin/registry.rs +++ b/src/librustc_plugin/registry.rs @@ -103,7 +103,8 @@ pub fn register_syntax_extension(&mut self, name: ast::Name, extension: SyntaxEx } self.syntax_exts.push((name, match extension { NormalTT(ext, _, allow_internal_unstable) => { - NormalTT(ext, Some(self.krate_span), allow_internal_unstable) + let nid = ast::CRATE_NODE_ID; + NormalTT(ext, Some((nid, self.krate_span)), allow_internal_unstable) } IdentTT(ext, _, allow_internal_unstable) => { IdentTT(ext, Some(self.krate_span), allow_internal_unstable) diff --git a/src/librustc_resolve/diagnostics.rs b/src/librustc_resolve/diagnostics.rs index 2c2babf0a66..368fb7a8868 100644 --- a/src/librustc_resolve/diagnostics.rs +++ b/src/librustc_resolve/diagnostics.rs @@ -1222,27 +1222,26 @@ fn bar() -> u32 { "##, E0435: r##" -A non-constant value was used to initialise a constant. +A non-constant value was used in a constant expression. Erroneous code example: ```compile_fail,E0435 -let foo = 42u32; -const FOO : u32 = foo; // error: attempt to use a non-constant value in a - // constant +let foo = 42; +let a: [u8; foo]; // error: attempt to use a non-constant value in a constant ``` To fix this error, please replace the value with a constant. Example: ``` -const FOO : u32 = 42u32; // ok! +let a: [u8; 42]; // ok! ``` Or: ``` -const OTHER_FOO : u32 = 42u32; -const FOO : u32 = OTHER_FOO; // ok! +const FOO: usize = 42; +let a: [u8; FOO]; // ok! ``` "##, @@ -1560,7 +1559,7 @@ fn print_on_failure(state: &State) { // E0157, unused error code // E0257, // E0258, - E0402, // cannot use an outer type parameter in this context +// E0402, // cannot use an outer type parameter in this context // E0406, merged into 420 // E0410, merged into 408 // E0413, merged into 530 diff --git a/src/librustc_resolve/lib.rs b/src/librustc_resolve/lib.rs index c4512cb38c4..6ea666e21dc 100644 --- a/src/librustc_resolve/lib.rs +++ b/src/librustc_resolve/lib.rs @@ -127,8 +127,6 @@ fn cmp(&self, other: &BindingError) -> cmp::Ordering { enum ResolutionError<'a> { /// error E0401: can't use type parameters from outer function TypeParametersFromOuterFunction, - /// error E0402: cannot use an outer type parameter in this context - OuterTypeParameterContext, /// error E0403: the name is already used for a type parameter in this type parameter list NameAlreadyUsedInTypeParameterList(Name, &'a Span), /// error E0407: method is not a member of trait @@ -187,12 +185,6 @@ fn resolve_struct_error<'sess, 'a>(resolver: &'sess Resolver, err.span_label(span, "use of type variable from outer function"); err } - ResolutionError::OuterTypeParameterContext => { - struct_span_err!(resolver.session, - span, - E0402, - "cannot use an outer type parameter in this context") - } ResolutionError::NameAlreadyUsedInTypeParameterList(name, first_use_span) => { let mut err = struct_span_err!(resolver.session, span, @@ -1195,6 +1187,10 @@ pub struct Resolver<'a> { pub whitelisted_legacy_custom_derives: Vec, pub found_unresolved_macro: bool, + // List of crate local macros that we need to warn about as being unused. + // Right now this only includes macro_rules! macros. + unused_macros: FxHashSet, + // Maps the `Mark` of an expansion to its containing module or block. invocations: FxHashMap>, @@ -1400,6 +1396,7 @@ pub fn new(session: &'a Session, potentially_unused_imports: Vec::new(), struct_constructors: DefIdMap(), found_unresolved_macro: false, + unused_macros: FxHashSet(), } } @@ -1671,16 +1668,16 @@ fn resolve_item(&mut self, item: &Item) { this.check_proc_macro_attrs(&trait_item.attrs); match trait_item.node { - TraitItemKind::Const(_, ref default) => { + TraitItemKind::Const(ref ty, ref default) => { + this.visit_ty(ty); + // Only impose the restrictions of - // ConstRibKind if there's an actual constant + // ConstRibKind for an actual constant // expression in a provided default. - if default.is_some() { + if let Some(ref expr) = *default{ this.with_constant_rib(|this| { - visit::walk_trait_item(this, trait_item) + this.visit_expr(expr); }); - } else { - visit::walk_trait_item(this, trait_item) } } TraitItemKind::Method(ref sig, _) => { @@ -1709,9 +1706,13 @@ fn resolve_item(&mut self, item: &Item) { }); } - ItemKind::Const(..) | ItemKind::Static(..) => { - self.with_constant_rib(|this| { - visit::walk_item(this, item); + ItemKind::Static(ref ty, _, ref expr) | + ItemKind::Const(ref ty, ref expr) => { + self.with_item_rib(|this| { + this.visit_ty(ty); + this.with_constant_rib(|this| { + this.visit_expr(expr); + }); }); } @@ -1782,13 +1783,21 @@ fn with_label_rib(&mut self, f: F) self.label_ribs.pop(); } + fn with_item_rib(&mut self, f: F) + where F: FnOnce(&mut Resolver) + { + self.ribs[ValueNS].push(Rib::new(ItemRibKind)); + self.ribs[TypeNS].push(Rib::new(ItemRibKind)); + f(self); + self.ribs[TypeNS].pop(); + self.ribs[ValueNS].pop(); + } + fn with_constant_rib(&mut self, f: F) where F: FnOnce(&mut Resolver) { self.ribs[ValueNS].push(Rib::new(ConstantItemRibKind)); - self.ribs[TypeNS].push(Rib::new(ConstantItemRibKind)); f(self); - self.ribs[TypeNS].pop(); self.ribs[ValueNS].pop(); } @@ -2755,7 +2764,8 @@ fn adjust_local_def(&mut self, for rib in ribs { match rib.kind { NormalRibKind | MethodRibKind(_) | ClosureRibKind(..) | - ModuleRibKind(..) | MacroDefinition(..) | ForwardTyParamBanRibKind => { + ModuleRibKind(..) | MacroDefinition(..) | ForwardTyParamBanRibKind | + ConstantItemRibKind => { // Nothing to do. Continue. } ItemRibKind => { @@ -2767,14 +2777,6 @@ fn adjust_local_def(&mut self, } return Def::Err; } - ConstantItemRibKind => { - // see #9186 - if record_used { - resolve_error(self, span, - ResolutionError::OuterTypeParameterContext); - } - return Def::Err; - } } } } diff --git a/src/librustc_resolve/macros.rs b/src/librustc_resolve/macros.rs index c08421cb937..231d30cd2a2 100644 --- a/src/librustc_resolve/macros.rs +++ b/src/librustc_resolve/macros.rs @@ -16,7 +16,7 @@ use rustc::hir::def_id::{DefId, BUILTIN_MACROS_CRATE, CRATE_DEF_INDEX, DefIndex}; use rustc::hir::def::{Def, Export}; use rustc::hir::map::{self, DefCollector}; -use rustc::ty; +use rustc::{ty, lint}; use syntax::ast::{self, Name, Ident}; use syntax::attr::{self, HasAttrs}; use syntax::errors::DiagnosticBuilder; @@ -291,12 +291,32 @@ fn resolve_invoc(&mut self, invoc: &mut Invocation, scope: Mark, force: bool) }, }; self.macro_defs.insert(invoc.expansion_data.mark, def.def_id()); + self.unused_macros.remove(&def.def_id()); Ok(Some(self.get_macro(def))) } fn resolve_macro(&mut self, scope: Mark, path: &ast::Path, kind: MacroKind, force: bool) -> Result, Determinacy> { - self.resolve_macro_to_def(scope, path, kind, force).map(|def| self.get_macro(def)) + self.resolve_macro_to_def(scope, path, kind, force).map(|def| { + self.unused_macros.remove(&def.def_id()); + self.get_macro(def) + }) + } + + fn check_unused_macros(&self) { + for did in self.unused_macros.iter() { + let id_span = match *self.macro_map[did] { + SyntaxExtension::NormalTT(_, isp, _) => isp, + _ => None, + }; + if let Some((id, span)) = id_span { + let lint = lint::builtin::UNUSED_MACROS; + let msg = "unused macro definition".to_string(); + self.session.add_lint(lint, id, span, msg); + } else { + bug!("attempted to create unused macro error, but span not available"); + } + } } } @@ -687,6 +707,8 @@ pub fn define_macro(&mut self, item: &ast::Item, legacy_scope: &mut LegacyScope< if attr::contains_name(&item.attrs, "macro_export") { let def = Def::Macro(def_id, MacroKind::Bang); self.macro_exports.push(Export { name: ident.name, def: def, span: item.span }); + } else { + self.unused_macros.insert(def_id); } } diff --git a/src/librustc_save_analysis/data.rs b/src/librustc_save_analysis/data.rs index d4ded71a333..cac1a2e3c5a 100644 --- a/src/librustc_save_analysis/data.rs +++ b/src/librustc_save_analysis/data.rs @@ -267,7 +267,7 @@ pub struct ModData { pub items: Vec, pub visibility: Visibility, pub docs: String, - pub sig: Signature, + pub sig: Option, pub attributes: Vec, } diff --git a/src/librustc_save_analysis/dump_visitor.rs b/src/librustc_save_analysis/dump_visitor.rs index 763414c1a55..a95236e2a50 100644 --- a/src/librustc_save_analysis/dump_visitor.rs +++ b/src/librustc_save_analysis/dump_visitor.rs @@ -1211,6 +1211,31 @@ fn process_impl_item(&mut self, impl_item: &'l ast::ImplItem, impl_id: DefId) { } impl<'l, 'tcx: 'l, 'll, D: Dump +'ll> Visitor<'l> for DumpVisitor<'l, 'tcx, 'll, D> { + fn visit_mod(&mut self, m: &'l ast::Mod, span: Span, attrs: &[ast::Attribute], id: NodeId) { + // Since we handle explicit modules ourselves in visit_item, this should + // only get called for the root module of a crate. + assert_eq!(id, ast::CRATE_NODE_ID); + + let qualname = format!("::{}", self.tcx.node_path_str(id)); + + let cm = self.tcx.sess.codemap(); + let filename = cm.span_to_filename(span); + self.dumper.mod_data(ModData { + id: id, + name: String::new(), + qualname: qualname, + span: span, + scope: id, + filename: filename, + items: m.items.iter().map(|i| i.id).collect(), + visibility: Visibility::Public, + docs: docs_for_attrs(attrs), + sig: None, + attributes: attrs.to_owned(), + }.lower(self.tcx)); + self.nest_scope(id, |v| visit::walk_mod(v, m)); + } + fn visit_item(&mut self, item: &'l ast::Item) { use syntax::ast::ItemKind::*; self.process_macro_use(item.span, item.id); diff --git a/src/librustc_save_analysis/external_data.rs b/src/librustc_save_analysis/external_data.rs index 6fd2de97767..02441a0587e 100644 --- a/src/librustc_save_analysis/external_data.rs +++ b/src/librustc_save_analysis/external_data.rs @@ -392,7 +392,7 @@ pub struct ModData { pub items: Vec, pub visibility: Visibility, pub docs: String, - pub sig: Signature, + pub sig: Option, pub attributes: Vec, } @@ -410,7 +410,7 @@ fn lower(self, tcx: TyCtxt) -> ModData { items: self.items.into_iter().map(|id| make_def_id(id, &tcx.hir)).collect(), visibility: self.visibility, docs: self.docs, - sig: self.sig.lower(tcx), + sig: self.sig.map(|s| s.lower(tcx)), attributes: self.attributes.lower(tcx), } } diff --git a/src/librustc_save_analysis/json_api_dumper.rs b/src/librustc_save_analysis/json_api_dumper.rs index 41221ad9863..49b14f5eca0 100644 --- a/src/librustc_save_analysis/json_api_dumper.rs +++ b/src/librustc_save_analysis/json_api_dumper.rs @@ -293,7 +293,7 @@ fn into(self) -> Option { parent: None, decl_id: None, docs: self.docs, - sig: Some(self.sig.into()), + sig: self.sig.map(|s| s.into()), attributes: vec![], }), _ => None, diff --git a/src/librustc_save_analysis/json_dumper.rs b/src/librustc_save_analysis/json_dumper.rs index 2d1e12bf0a1..eaa0c0825f0 100644 --- a/src/librustc_save_analysis/json_dumper.rs +++ b/src/librustc_save_analysis/json_dumper.rs @@ -121,7 +121,7 @@ fn mod_data(&mut self, data: ModData) { children: data.items.into_iter().map(|id| id_from_def_id(id)).collect(), decl_id: None, docs: data.docs, - sig: Some(data.sig.into()), + sig: data.sig.map(|s| s.into()), attributes: data.attributes.into_iter().map(|a| a.into()).collect(), }; if def.span.file_name.to_str().unwrap() != def.value { diff --git a/src/librustc_save_analysis/lib.rs b/src/librustc_save_analysis/lib.rs index cc98754f610..5a8acf9abe1 100644 --- a/src/librustc_save_analysis/lib.rs +++ b/src/librustc_save_analysis/lib.rs @@ -258,7 +258,7 @@ pub fn get_item_data(&self, item: &ast::Item) -> Option { items: m.items.iter().map(|i| i.id).collect(), visibility: From::from(&item.vis), docs: docs_for_attrs(&item.attrs), - sig: self.sig_base(item), + sig: Some(self.sig_base(item)), attributes: item.attrs.clone(), })) } diff --git a/src/librustc_trans/Cargo.toml b/src/librustc_trans/Cargo.toml index af477f5a152..4ccc85257f3 100644 --- a/src/librustc_trans/Cargo.toml +++ b/src/librustc_trans/Cargo.toml @@ -12,6 +12,7 @@ test = false [dependencies] flate = { path = "../libflate" } log = "0.3" +owning_ref = "0.3.3" rustc = { path = "../librustc" } rustc_back = { path = "../librustc_back" } rustc_bitflags = { path = "../librustc_bitflags" } diff --git a/src/librustc_trans/back/archive.rs b/src/librustc_trans/back/archive.rs index 0f908b7d069..902065c8688 100644 --- a/src/librustc_trans/back/archive.rs +++ b/src/librustc_trans/back/archive.rs @@ -20,6 +20,7 @@ use libc; use llvm::archive_ro::{ArchiveRO, Child}; use llvm::{self, ArchiveKind}; +use metadata::METADATA_FILENAME; use rustc::session::Session; pub struct ArchiveConfig<'a> { @@ -158,11 +159,9 @@ pub fn add_rlib(&mut self, // Ignoring all bytecode files, no matter of // name let bc_ext = ".bytecode.deflate"; - let metadata_filename = - self.config.sess.cstore.metadata_filename().to_owned(); self.add_archive(rlib, move |fname: &str| { - if fname.ends_with(bc_ext) || fname == metadata_filename { + if fname.ends_with(bc_ext) || fname == METADATA_FILENAME { return true } diff --git a/src/librustc_trans/back/link.rs b/src/librustc_trans/back/link.rs index d1000a574b5..f85d3f9f54d 100644 --- a/src/librustc_trans/back/link.rs +++ b/src/librustc_trans/back/link.rs @@ -13,6 +13,7 @@ use super::rpath::RPathConfig; use super::rpath; use super::msvc; +use metadata::METADATA_FILENAME; use session::config; use session::config::NoDebugInfo; use session::config::{OutputFilenames, Input, OutputType}; @@ -521,7 +522,7 @@ fn link_rlib<'a>(sess: &'a Session, // contain the metadata in a separate file. We use a temp directory // here so concurrent builds in the same directory don't try to use // the same filename for metadata (stomping over one another) - let metadata = tmpdir.join(sess.cstore.metadata_filename()); + let metadata = tmpdir.join(METADATA_FILENAME); emit_metadata(sess, trans, &metadata); ab.add_file(&metadata); @@ -1145,8 +1146,7 @@ fn link_sanitizer_runtime(cmd: &mut Linker, archive.update_symbols(); for f in archive.src_files() { - if f.ends_with("bytecode.deflate") || - f == sess.cstore.metadata_filename() { + if f.ends_with("bytecode.deflate") || f == METADATA_FILENAME { archive.remove_file(&f); continue } @@ -1221,8 +1221,7 @@ fn add_static_crate(cmd: &mut Linker, let mut any_objects = false; for f in archive.src_files() { - if f.ends_with("bytecode.deflate") || - f == sess.cstore.metadata_filename() { + if f.ends_with("bytecode.deflate") || f == METADATA_FILENAME { archive.remove_file(&f); continue } diff --git a/src/librustc_trans/base.rs b/src/librustc_trans/base.rs index 8689e176f7a..437ced85b2e 100644 --- a/src/librustc_trans/base.rs +++ b/src/librustc_trans/base.rs @@ -34,6 +34,7 @@ use back::symbol_export::{self, ExportedSymbols}; use llvm::{ContextRef, Linkage, ModuleRef, ValueRef, Vector, get_param}; use llvm; +use metadata; use rustc::hir::def_id::LOCAL_CRATE; use middle::lang_items::StartFnLangItem; use middle::cstore::EncodedMetadata; @@ -778,8 +779,7 @@ enum MetadataKind { }; unsafe { llvm::LLVMSetInitializer(llglobal, llconst); - let section_name = - tcx.sess.cstore.metadata_section_name(&tcx.sess.target.target); + let section_name = metadata::metadata_section_name(&tcx.sess.target.target); let name = CString::new(section_name).unwrap(); llvm::LLVMSetSection(llglobal, name.as_ptr()); diff --git a/src/librustc_trans/common.rs b/src/librustc_trans/common.rs index 025062f7dde..efd4f136785 100644 --- a/src/librustc_trans/common.rs +++ b/src/librustc_trans/common.rs @@ -537,6 +537,12 @@ pub fn requests_inline<'a, 'tcx>( if is_inline_instance(tcx, instance) { return true } + if let ty::InstanceDef::DropGlue(..) = instance.def { + // Drop glue wants to be instantiated at every translation + // unit, but without an #[inline] hint. We should make this + // available to normal end-users. + return true + } attr::requests_inline(&instance.def.attrs(tcx)[..]) } diff --git a/src/librustc_trans/lib.rs b/src/librustc_trans/lib.rs index 8e633ee59b6..3ac0d88b90d 100644 --- a/src/librustc_trans/lib.rs +++ b/src/librustc_trans/lib.rs @@ -45,6 +45,7 @@ extern crate flate; extern crate libc; +extern crate owning_ref; #[macro_use] extern crate rustc; extern crate rustc_back; extern crate rustc_data_structures; @@ -70,6 +71,9 @@ pub use base::trans_crate; pub use back::symbol_names::provide; +pub use metadata::LlvmMetadataLoader; +pub use llvm_util::{init, target_features, print_version, print_passes, print, enable_llvm_debug}; + pub mod back { pub use rustc::hir::svh; @@ -119,7 +123,9 @@ pub mod back { mod declare; mod glue; mod intrinsic; +mod llvm_util; mod machine; +mod metadata; mod meth; mod mir; mod monomorphize; diff --git a/src/librustc_trans/llvm_util.rs b/src/librustc_trans/llvm_util.rs new file mode 100644 index 00000000000..15f56036b0c --- /dev/null +++ b/src/librustc_trans/llvm_util.rs @@ -0,0 +1,127 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use syntax_pos::symbol::Symbol; +use back::write::create_target_machine; +use llvm; +use rustc::session::Session; +use rustc::session::config::PrintRequest; +use libc::{c_int, c_char}; +use std::ffi::CString; + +use std::sync::atomic::{AtomicBool, Ordering}; +use std::sync::Once; + +pub fn init(sess: &Session) { + unsafe { + // Before we touch LLVM, make sure that multithreading is enabled. + static POISONED: AtomicBool = AtomicBool::new(false); + static INIT: Once = Once::new(); + INIT.call_once(|| { + if llvm::LLVMStartMultithreaded() != 1 { + // use an extra bool to make sure that all future usage of LLVM + // cannot proceed despite the Once not running more than once. + POISONED.store(true, Ordering::SeqCst); + } + + configure_llvm(sess); + }); + + if POISONED.load(Ordering::SeqCst) { + bug!("couldn't enable multi-threaded LLVM"); + } + } +} + +unsafe fn configure_llvm(sess: &Session) { + let mut llvm_c_strs = Vec::new(); + let mut llvm_args = Vec::new(); + + { + let mut add = |arg: &str| { + let s = CString::new(arg).unwrap(); + llvm_args.push(s.as_ptr()); + llvm_c_strs.push(s); + }; + add("rustc"); // fake program name + if sess.time_llvm_passes() { add("-time-passes"); } + if sess.print_llvm_passes() { add("-debug-pass=Structure"); } + + for arg in &sess.opts.cg.llvm_args { + add(&(*arg)); + } + } + + llvm::LLVMInitializePasses(); + + llvm::initialize_available_targets(); + + llvm::LLVMRustSetLLVMOptions(llvm_args.len() as c_int, + llvm_args.as_ptr()); +} + +// WARNING: the features must be known to LLVM or the feature +// detection code will walk past the end of the feature array, +// leading to crashes. + +const ARM_WHITELIST: &'static [&'static str] = &["neon\0", "vfp2\0", "vfp3\0", "vfp4\0"]; + +const X86_WHITELIST: &'static [&'static str] = &["avx\0", "avx2\0", "bmi\0", "bmi2\0", "sse\0", + "sse2\0", "sse3\0", "sse4.1\0", "sse4.2\0", + "ssse3\0", "tbm\0", "lzcnt\0", "popcnt\0", + "sse4a\0", "rdrnd\0", "rdseed\0", "fma\0"]; + +const HEXAGON_WHITELIST: &'static [&'static str] = &["hvx\0", "hvx-double\0"]; + +pub fn target_features(sess: &Session) -> Vec { + let target_machine = create_target_machine(sess); + + let whitelist = match &*sess.target.target.arch { + "arm" => ARM_WHITELIST, + "x86" | "x86_64" => X86_WHITELIST, + "hexagon" => HEXAGON_WHITELIST, + _ => &[], + }; + + let mut features = Vec::new(); + for feat in whitelist { + assert_eq!(feat.chars().last(), Some('\0')); + if unsafe { llvm::LLVMRustHasFeature(target_machine, feat.as_ptr() as *const c_char) } { + features.push(Symbol::intern(&feat[..feat.len() - 1])); + } + } + features +} + +pub fn print_version() { + unsafe { + println!("LLVM version: {}.{}", + llvm::LLVMRustVersionMajor(), llvm::LLVMRustVersionMinor()); + } +} + +pub fn print_passes() { + unsafe { llvm::LLVMRustPrintPasses(); } +} + +pub fn print(req: PrintRequest, sess: &Session) { + let tm = create_target_machine(sess); + unsafe { + match req { + PrintRequest::TargetCPUs => llvm::LLVMRustPrintTargetCPUs(tm), + PrintRequest::TargetFeatures => llvm::LLVMRustPrintTargetFeatures(tm), + _ => bug!("rustc_trans can't handle print request: {:?}", req), + } + } +} + +pub fn enable_llvm_debug() { + unsafe { llvm::LLVMRustSetDebug(1); } +} diff --git a/src/librustc_trans/metadata.rs b/src/librustc_trans/metadata.rs new file mode 100644 index 00000000000..2c0148dfbb3 --- /dev/null +++ b/src/librustc_trans/metadata.rs @@ -0,0 +1,122 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use rustc::util::common; +use rustc::middle::cstore::MetadataLoader; +use rustc_back::target::Target; +use llvm; +use llvm::{False, ObjectFile, mk_section_iter}; +use llvm::archive_ro::ArchiveRO; + +use owning_ref::{ErasedBoxRef, OwningRef}; +use std::path::Path; +use std::ptr; +use std::slice; + +pub const METADATA_FILENAME: &str = "rust.metadata.bin"; + +pub struct LlvmMetadataLoader; + +impl MetadataLoader for LlvmMetadataLoader { + fn get_rlib_metadata(&self, _: &Target, filename: &Path) -> Result, String> { + // Use ArchiveRO for speed here, it's backed by LLVM and uses mmap + // internally to read the file. We also avoid even using a memcpy by + // just keeping the archive along while the metadata is in use. + let archive = ArchiveRO::open(filename) + .map(|ar| OwningRef::new(box ar)) + .ok_or_else(|| { + debug!("llvm didn't like `{}`", filename.display()); + format!("failed to read rlib metadata: '{}'", filename.display()) + })?; + let buf: OwningRef<_, [u8]> = archive + .try_map(|ar| { + ar.iter() + .filter_map(|s| s.ok()) + .find(|sect| sect.name() == Some(METADATA_FILENAME)) + .map(|s| s.data()) + .ok_or_else(|| { + debug!("didn't find '{}' in the archive", METADATA_FILENAME); + format!("failed to read rlib metadata: '{}'", + filename.display()) + }) + })?; + Ok(buf.erase_owner()) + } + + fn get_dylib_metadata(&self, + target: &Target, + filename: &Path) + -> Result, String> { + unsafe { + let buf = common::path2cstr(filename); + let mb = llvm::LLVMRustCreateMemoryBufferWithContentsOfFile(buf.as_ptr()); + if mb as isize == 0 { + return Err(format!("error reading library: '{}'", filename.display())); + } + let of = ObjectFile::new(mb) + .map(|of| OwningRef::new(box of)) + .ok_or_else(|| format!("provided path not an object file: '{}'", + filename.display()))?; + let buf = of.try_map(|of| search_meta_section(of, target, filename))?; + Ok(buf.erase_owner()) + } + } +} + +fn search_meta_section<'a>(of: &'a ObjectFile, + target: &Target, + filename: &Path) + -> Result<&'a [u8], String> { + unsafe { + let si = mk_section_iter(of.llof); + while llvm::LLVMIsSectionIteratorAtEnd(of.llof, si.llsi) == False { + let mut name_buf = ptr::null(); + let name_len = llvm::LLVMRustGetSectionName(si.llsi, &mut name_buf); + let name = slice::from_raw_parts(name_buf as *const u8, name_len as usize).to_vec(); + let name = String::from_utf8(name).unwrap(); + debug!("get_metadata_section: name {}", name); + if read_metadata_section_name(target) == name { + let cbuf = llvm::LLVMGetSectionContents(si.llsi); + let csz = llvm::LLVMGetSectionSize(si.llsi) as usize; + // The buffer is valid while the object file is around + let buf: &'a [u8] = slice::from_raw_parts(cbuf as *const u8, csz); + return Ok(buf); + } + llvm::LLVMMoveToNextSection(si.llsi); + } + } + Err(format!("metadata not found: '{}'", filename.display())) +} + +pub fn metadata_section_name(target: &Target) -> &'static str { + // Historical note: + // + // When using link.exe it was seen that the section name `.note.rustc` + // was getting shortened to `.note.ru`, and according to the PE and COFF + // specification: + // + // > Executable images do not use a string table and do not support + // > section names longer than 8 characters + // + // https://msdn.microsoft.com/en-us/library/windows/hardware/gg463119.aspx + // + // As a result, we choose a slightly shorter name! As to why + // `.note.rustc` works on MinGW, that's another good question... + + if target.options.is_like_osx { + "__DATA,.rustc" + } else { + ".rustc" + } +} + +fn read_metadata_section_name(_target: &Target) -> &'static str { + ".rustc" +} diff --git a/src/librustc_typeck/astconv.rs b/src/librustc_typeck/astconv.rs index adcb3d682ca..9e8352fde80 100644 --- a/src/librustc_typeck/astconv.rs +++ b/src/librustc_typeck/astconv.rs @@ -41,12 +41,6 @@ pub trait AstConv<'gcx, 'tcx> { fn get_type_parameter_bounds(&self, span: Span, def_id: DefId) -> ty::GenericPredicates<'tcx>; - /// Return an (optional) substitution to convert bound type parameters that - /// are in scope into free ones. This function should only return Some - /// within a fn body. - /// See ParameterEnvironment::free_substs for more information. - fn get_free_substs(&self) -> Option<&Substs<'tcx>>; - /// What lifetime should we use when a lifetime is omitted (and not elided)? fn re_infer(&self, span: Span, _def: Option<&ty::RegionParameterDef>) -> Option>; @@ -121,6 +115,7 @@ pub fn ast_region_to_region(&self, Some(&rl::Region::EarlyBound(index, id)) => { let name = tcx.hir.name(id); tcx.mk_region(ty::ReEarlyBound(ty::EarlyBoundRegion { + def_id: tcx.hir.local_def_id(id), index: index, name: name })) @@ -129,7 +124,7 @@ pub fn ast_region_to_region(&self, Some(&rl::Region::Free(scope, id)) => { let name = tcx.hir.name(id); tcx.mk_region(ty::ReFree(ty::FreeRegion { - scope: Some(scope.to_code_extent(tcx)), + scope, bound_region: ty::BrNamed(tcx.hir.local_def_id(id), name) })) @@ -857,12 +852,6 @@ pub fn associated_path_def_to_ty(&self, } }; - let trait_ref = if let Some(free_substs) = self.get_free_substs() { - trait_ref.subst(tcx, free_substs) - } else { - trait_ref - }; - let candidates = traits::supertraits(tcx, ty::Binder(trait_ref)) .filter(|r| self.trait_defines_associated_type_named(r.def_id(), @@ -1020,12 +1009,7 @@ pub fn def_to_ty(&self, assert_eq!(opt_self_ty, None); self.prohibit_type_params(&path.segments); - let ty = tcx.at(span).type_of(def_id); - if let Some(free_substs) = self.get_free_substs() { - ty.subst(tcx, free_substs) - } else { - ty - } + tcx.at(span).type_of(def_id) } Def::SelfTy(Some(_), None) => { // Self in trait. diff --git a/src/librustc_typeck/check/closure.rs b/src/librustc_typeck/check/closure.rs index fb3be849319..4c3d5c8aaca 100644 --- a/src/librustc_typeck/check/closure.rs +++ b/src/librustc_typeck/check/closure.rs @@ -15,6 +15,7 @@ use astconv::AstConv; use rustc::infer::type_variable::TypeVariableOrigin; use rustc::ty::{self, ToPolyTraitRef, Ty}; +use rustc::ty::subst::Substs; use std::cmp; use std::iter; use syntax::abi::Abi; @@ -60,12 +61,17 @@ fn check_closure(&self, decl, Abi::RustCall, expected_sig); + // `deduce_expectations_from_expected_type` introduces late-bound + // lifetimes defined elsewhere, which we need to anonymize away. + let sig = self.tcx.anonymize_late_bound_regions(&sig); // Create type variables (for now) to represent the transformed // types of upvars. These will be unified during the upvar // inference phase (`upvar.rs`). + let base_substs = Substs::identity_for_item(self.tcx, + self.tcx.closure_base_def_id(expr_def_id)); let closure_type = self.tcx.mk_closure(expr_def_id, - self.parameter_environment.free_substs.extend_to(self.tcx, expr_def_id, + base_substs.extend_to(self.tcx, expr_def_id, |_, _| span_bug!(expr.span, "closure has region param"), |_, _| self.infcx.next_ty_var(TypeVariableOrigin::TransformedUpvar(expr.span)) ) @@ -73,8 +79,7 @@ fn check_closure(&self, debug!("check_closure: expr.id={:?} closure_type={:?}", expr.id, closure_type); - let extent = self.tcx.call_site_extent(expr.id, body.value.id); - let fn_sig = self.tcx.liberate_late_bound_regions(Some(extent), &sig); + let fn_sig = self.liberate_late_bound_regions(expr_def_id, &sig); let fn_sig = self.inh.normalize_associated_types_in(body.value.span, body.value.id, &fn_sig); diff --git a/src/librustc_typeck/check/compare_method.rs b/src/librustc_typeck/check/compare_method.rs index 0579bb15fd6..d9f77e8f04f 100644 --- a/src/librustc_typeck/check/compare_method.rs +++ b/src/librustc_typeck/check/compare_method.rs @@ -167,16 +167,15 @@ fn compare_predicate_entailment<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, // Create a parameter environment that represents the implementation's // method. - let impl_param_env = ty::ParameterEnvironment::for_item(tcx, impl_m_node_id); + let impl_param_env = tcx.parameter_environment(impl_m.def_id); // Create mapping from impl to skolemized. - let impl_to_skol_substs = &impl_param_env.free_substs; + let impl_to_skol_substs = Substs::identity_for_item(tcx, impl_m.def_id); // Create mapping from trait to skolemized. let trait_to_skol_substs = impl_to_skol_substs.rebase_onto(tcx, impl_m.container.id(), - trait_to_impl_substs.subst(tcx, - impl_to_skol_substs)); + trait_to_impl_substs); debug!("compare_impl_method: trait_to_skol_substs={:?}", trait_to_skol_substs); @@ -191,8 +190,7 @@ fn compare_predicate_entailment<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, impl_m, &trait_m_generics, &impl_m_generics, - trait_to_skol_substs, - impl_to_skol_substs)?; + trait_to_skol_substs)?; // Create obligations for each predicate declared by the impl // definition in the context of the trait's parameter @@ -200,7 +198,7 @@ fn compare_predicate_entailment<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, // however, because we want to replace all late-bound regions with // region variables. let impl_predicates = tcx.predicates_of(impl_m_predicates.parent.unwrap()); - let mut hybrid_preds = impl_predicates.instantiate(tcx, impl_to_skol_substs); + let mut hybrid_preds = impl_predicates.instantiate_identity(tcx); debug!("compare_impl_method: impl_bounds={:?}", hybrid_preds); @@ -226,7 +224,7 @@ fn compare_predicate_entailment<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, normalize_cause.clone()); tcx.infer_ctxt(trait_param_env, Reveal::UserFacing).enter(|infcx| { - let inh = Inherited::new(infcx); + let inh = Inherited::new(infcx, impl_m.def_id); let infcx = &inh.infcx; debug!("compare_impl_method: caller_bounds={:?}", @@ -273,8 +271,6 @@ fn compare_predicate_entailment<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, infcx.replace_late_bound_regions_with_fresh_var(impl_m_span, infer::HigherRankedType, &m_sig(impl_m)); - let impl_sig = - impl_sig.subst(tcx, impl_to_skol_substs); let impl_sig = inh.normalize_associated_types_in(impl_m_span, impl_m_node_id, @@ -282,8 +278,8 @@ fn compare_predicate_entailment<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, let impl_fty = tcx.mk_fn_ptr(ty::Binder(impl_sig)); debug!("compare_impl_method: impl_fty={:?}", impl_fty); - let trait_sig = tcx.liberate_late_bound_regions( - infcx.parameter_environment.free_id_outlive, + let trait_sig = inh.liberate_late_bound_regions( + impl_m.def_id, &m_sig(trait_m)); let trait_sig = trait_sig.subst(tcx, trait_to_skol_substs); @@ -370,8 +366,7 @@ fn check_region_bounds_on_impl_method<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, impl_m: &ty::AssociatedItem, trait_generics: &ty::Generics, impl_generics: &ty::Generics, - trait_to_skol_substs: &Substs<'tcx>, - impl_to_skol_substs: &Substs<'tcx>) + trait_to_skol_substs: &Substs<'tcx>) -> Result<(), ErrorReported> { let trait_params = &trait_generics.regions[..]; let impl_params = &impl_generics.regions[..]; @@ -379,12 +374,10 @@ fn check_region_bounds_on_impl_method<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, debug!("check_region_bounds_on_impl_method: \ trait_generics={:?} \ impl_generics={:?} \ - trait_to_skol_substs={:?} \ - impl_to_skol_substs={:?}", + trait_to_skol_substs={:?}", trait_generics, impl_generics, - trait_to_skol_substs, - impl_to_skol_substs); + trait_to_skol_substs); // Must have same number of early-bound lifetime parameters. // Unfortunately, if the user screws up the bounds, then this @@ -726,7 +719,7 @@ pub fn compare_const_impl<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, debug!("compare_const_impl(impl_trait_ref={:?})", impl_trait_ref); tcx.infer_ctxt((), Reveal::UserFacing).enter(|infcx| { - let inh = Inherited::new(infcx); + let inh = Inherited::new(infcx, impl_c.def_id); let infcx = &inh.infcx; // The below is for the most part highly similar to the procedure @@ -739,22 +732,10 @@ pub fn compare_const_impl<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, // Create a parameter environment that represents the implementation's // method. let impl_c_node_id = tcx.hir.as_local_node_id(impl_c.def_id).unwrap(); - let impl_param_env = ty::ParameterEnvironment::for_item(tcx, impl_c_node_id); - - // Create mapping from impl to skolemized. - let impl_to_skol_substs = &impl_param_env.free_substs; - - // Create mapping from trait to skolemized. - let trait_to_skol_substs = impl_to_skol_substs.rebase_onto(tcx, - impl_c.container.id(), - trait_to_impl_substs.subst(tcx, - impl_to_skol_substs)); - debug!("compare_const_impl: trait_to_skol_substs={:?}", - trait_to_skol_substs); // Compute skolemized form of impl and trait const tys. - let impl_ty = tcx.type_of(impl_c.def_id).subst(tcx, impl_to_skol_substs); - let trait_ty = tcx.type_of(trait_c.def_id).subst(tcx, trait_to_skol_substs); + let impl_ty = tcx.type_of(impl_c.def_id); + let trait_ty = tcx.type_of(trait_c.def_id).subst(tcx, trait_to_impl_substs); let mut cause = ObligationCause::misc(impl_c_span, impl_c_node_id); // There is no "body" here, so just pass dummy id. diff --git a/src/librustc_typeck/check/dropck.rs b/src/librustc_typeck/check/dropck.rs index c20777a403a..e0293325596 100644 --- a/src/librustc_typeck/check/dropck.rs +++ b/src/librustc_typeck/check/dropck.rs @@ -76,17 +76,15 @@ fn ensure_drop_params_and_item_params_correspond<'a, 'tcx>( -> Result<(), ErrorReported> { let drop_impl_node_id = tcx.hir.as_local_node_id(drop_impl_did).unwrap(); - let self_type_node_id = tcx.hir.as_local_node_id(self_type_did).unwrap(); // check that the impl type can be made to match the trait type. - let impl_param_env = ty::ParameterEnvironment::for_item(tcx, self_type_node_id); + let impl_param_env = tcx.parameter_environment(self_type_did); tcx.infer_ctxt(impl_param_env, Reveal::UserFacing).enter(|ref infcx| { let tcx = infcx.tcx; let mut fulfillment_cx = traits::FulfillmentContext::new(); let named_type = tcx.type_of(self_type_did); - let named_type = named_type.subst(tcx, &infcx.parameter_environment.free_substs); let drop_impl_span = tcx.def_span(drop_impl_did); let fresh_impl_substs = @@ -99,7 +97,7 @@ fn ensure_drop_params_and_item_params_correspond<'a, 'tcx>( fulfillment_cx.register_predicate_obligations(infcx, obligations); } Err(_) => { - let item_span = tcx.hir.span(self_type_node_id); + let item_span = tcx.def_span(self_type_did); struct_span_err!(tcx.sess, drop_impl_span, E0366, "Implementations of Drop cannot be specialized") .span_note(item_span, @@ -272,7 +270,7 @@ pub fn check_safety_of_destructor_if_necessary<'a, 'gcx, 'tcx>( rcx: &mut RegionCtxt<'a, 'gcx, 'tcx>, ty: ty::Ty<'tcx>, span: Span, - scope: region::CodeExtent<'tcx>) + scope: region::CodeExtent) -> Result<(), ErrorReported> { debug!("check_safety_of_destructor_if_necessary typ: {:?} scope: {:?}", diff --git a/src/librustc_typeck/check/method/suggest.rs b/src/librustc_typeck/check/method/suggest.rs index c7ec379b0de..7e70bb92cd6 100644 --- a/src/librustc_typeck/check/method/suggest.rs +++ b/src/librustc_typeck/check/method/suggest.rs @@ -251,9 +251,9 @@ macro_rules! report_function { let bound_list = unsatisfied_predicates.iter() .map(|p| format!("`{} : {}`", p.self_ty(), p)) .collect::>() - .join(", "); + .join("\n"); err.note(&format!("the method `{}` exists but the following trait bounds \ - were not satisfied: {}", + were not satisfied:\n{}", item_name, bound_list)); } diff --git a/src/librustc_typeck/check/mod.rs b/src/librustc_typeck/check/mod.rs index 127ffc60cf4..d304d79bc52 100644 --- a/src/librustc_typeck/check/mod.rs +++ b/src/librustc_typeck/check/mod.rs @@ -88,10 +88,10 @@ use rustc_back::slice::ref_slice; use rustc::infer::{self, InferCtxt, InferOk, RegionVariableOrigin}; use rustc::infer::type_variable::{TypeVariableOrigin}; +use rustc::middle::region::CodeExtent; use rustc::ty::subst::{Kind, Subst, Substs}; use rustc::traits::{self, FulfillmentContext, ObligationCause, ObligationCauseCode, Reveal}; -use rustc::ty::{ParamTy, ParameterEnvironment}; -use rustc::ty::{LvaluePreference, NoPreference, PreferMutLvalue}; +use rustc::ty::{ParamTy, LvaluePreference, NoPreference, PreferMutLvalue}; use rustc::ty::{self, Ty, TyCtxt, Visibility}; use rustc::ty::{MethodCall, MethodCallee}; use rustc::ty::adjustment::{Adjust, Adjustment, AutoBorrow}; @@ -177,6 +177,14 @@ pub struct Inherited<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { // variables to get the concrete type, which can be used to // deanonymize TyAnon, after typeck is done with all functions. anon_types: RefCell>>, + + /// Each type parameter has an implicit region bound that + /// indicates it must outlive at least the function body (the user + /// may specify stronger requirements). This field indicates the + /// region of the callee. If it is `None`, then the parameter + /// environment is for an item or something where the "callee" is + /// not clear. + implicit_region_bound: Option>, } impl<'a, 'gcx, 'tcx> Deref for Inherited<'a, 'gcx, 'tcx> { @@ -523,16 +531,18 @@ fn deref(&self) -> &Self::Target { /// Necessary because we can't write the following bound: /// F: for<'b, 'tcx> where 'gcx: 'tcx FnOnce(Inherited<'b, 'gcx, 'tcx>). pub struct InheritedBuilder<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { - infcx: infer::InferCtxtBuilder<'a, 'gcx, 'tcx> + infcx: infer::InferCtxtBuilder<'a, 'gcx, 'tcx>, + def_id: DefId, } impl<'a, 'gcx, 'tcx> Inherited<'a, 'gcx, 'tcx> { - pub fn build(tcx: TyCtxt<'a, 'gcx, 'gcx>, id: ast::NodeId) + pub fn build(tcx: TyCtxt<'a, 'gcx, 'gcx>, def_id: DefId) -> InheritedBuilder<'a, 'gcx, 'tcx> { let tables = ty::TypeckTables::empty(); - let param_env = ParameterEnvironment::for_item(tcx, id); + let param_env = tcx.parameter_environment(def_id); InheritedBuilder { - infcx: tcx.infer_ctxt((tables, param_env), Reveal::UserFacing) + infcx: tcx.infer_ctxt((tables, param_env), Reveal::UserFacing), + def_id, } } } @@ -541,12 +551,20 @@ impl<'a, 'gcx, 'tcx> InheritedBuilder<'a, 'gcx, 'tcx> { fn enter(&'tcx mut self, f: F) -> R where F: for<'b> FnOnce(Inherited<'b, 'gcx, 'tcx>) -> R { - self.infcx.enter(|infcx| f(Inherited::new(infcx))) + let def_id = self.def_id; + self.infcx.enter(|infcx| f(Inherited::new(infcx, def_id))) } } impl<'a, 'gcx, 'tcx> Inherited<'a, 'gcx, 'tcx> { - fn new(infcx: InferCtxt<'a, 'gcx, 'tcx>) -> Self { + fn new(infcx: InferCtxt<'a, 'gcx, 'tcx>, def_id: DefId) -> Self { + let tcx = infcx.tcx; + let item_id = tcx.hir.as_local_node_id(def_id); + let body_id = item_id.and_then(|id| tcx.hir.maybe_body_owned_by(id)); + let implicit_region_bound = body_id.map(|body| { + tcx.mk_region(ty::ReScope(CodeExtent::CallSiteScope(body))) + }); + Inherited { infcx: infcx, fulfillment_cx: RefCell::new(traits::FulfillmentContext::new()), @@ -554,6 +572,7 @@ fn new(infcx: InferCtxt<'a, 'gcx, 'tcx>) -> Self { deferred_call_resolutions: RefCell::new(DefIdMap()), deferred_cast_checks: RefCell::new(Vec::new()), anon_types: RefCell::new(NodeMap()), + implicit_region_bound, } } @@ -606,6 +625,22 @@ fn normalize_associated_types_in_as_infer_ok(&self, obligations); InferOk { value, obligations } } + + /// Replace any late-bound regions bound in `value` with + /// free variants attached to `all_outlive_scope`. + fn liberate_late_bound_regions(&self, + all_outlive_scope: DefId, + value: &ty::Binder) + -> T + where T: TypeFoldable<'tcx> + { + self.tcx.replace_late_bound_regions(value, |br| { + self.tcx.mk_region(ty::ReFree(ty::FreeRegion { + scope: all_outlive_scope, + bound_region: br + })) + }).0 + } } struct CheckItemTypesVisitor<'a, 'tcx: 'a> { tcx: TyCtxt<'a, 'tcx, 'tcx> } @@ -772,18 +807,15 @@ fn typeck_tables_of<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, }); let body = tcx.hir.body(body_id); - Inherited::build(tcx, id).enter(|inh| { + Inherited::build(tcx, def_id).enter(|inh| { let fcx = if let Some(decl) = fn_decl { let fn_sig = tcx.type_of(def_id).fn_sig(); check_abi(tcx, span, fn_sig.abi()); // Compute the fty from point of view of inside fn. - let fn_scope = inh.tcx.call_site_extent(id, body_id.node_id); - let fn_sig = - fn_sig.subst(inh.tcx, &inh.parameter_environment.free_substs); let fn_sig = - inh.tcx.liberate_late_bound_regions(Some(fn_scope), &fn_sig); + inh.liberate_late_bound_regions(def_id, &fn_sig); let fn_sig = inh.normalize_associated_types_in(body.value.span, body_id.node_id, &fn_sig); @@ -1168,7 +1200,7 @@ fn check_specialization_validity<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, impl_id: DefId, impl_item: &hir::ImplItem) { - let ancestors = trait_def.ancestors(impl_id); + let ancestors = trait_def.ancestors(tcx, impl_id); let kind = match impl_item.node { hir::ImplItemKind::Const(..) => ty::AssociatedKind::Const, @@ -1298,7 +1330,7 @@ fn check_impl_items_against_trait<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, let mut invalidated_items = Vec::new(); let associated_type_overridden = overridden_associated_type.is_some(); for trait_item in tcx.associated_items(impl_trait_ref.def_id) { - let is_implemented = trait_def.ancestors(impl_id) + let is_implemented = trait_def.ancestors(tcx, impl_id) .defs(tcx, trait_item.name, trait_item.kind) .next() .map(|node_item| !node_item.node.is_from_trait()) @@ -1518,10 +1550,6 @@ pub fn check_enum<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, impl<'a, 'gcx, 'tcx> AstConv<'gcx, 'tcx> for FnCtxt<'a, 'gcx, 'tcx> { fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> { self.tcx } - fn get_free_substs(&self) -> Option<&Substs<'tcx>> { - Some(&self.parameter_environment.free_substs) - } - fn get_type_parameter_bounds(&self, _: Span, def_id: DefId) -> ty::GenericPredicates<'tcx> { diff --git a/src/librustc_typeck/check/regionck.rs b/src/librustc_typeck/check/regionck.rs index e4936dfc47b..754bd288bfa 100644 --- a/src/librustc_typeck/check/regionck.rs +++ b/src/librustc_typeck/check/regionck.rs @@ -87,7 +87,7 @@ use middle::free_region::FreeRegionMap; use middle::mem_categorization as mc; use middle::mem_categorization::Categorization; -use middle::region::{self, CodeExtent, RegionMaps}; +use middle::region::{CodeExtent, RegionMaps}; use rustc::hir::def_id::DefId; use rustc::ty::subst::Substs; use rustc::traits; @@ -178,7 +178,7 @@ pub struct RegionCtxt<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { region_bound_pairs: Vec<(ty::Region<'tcx>, GenericKind<'tcx>)>, - pub region_maps: Rc>, + pub region_maps: Rc, free_region_map: FreeRegionMap<'tcx>, @@ -186,7 +186,7 @@ pub struct RegionCtxt<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { body_id: ast::NodeId, // call_site scope of innermost fn - call_site_scope: Option>, + call_site_scope: Option, // id of innermost fn or loop repeating_scope: ast::NodeId, @@ -224,8 +224,8 @@ pub fn new(fcx: &'a FnCtxt<'a, 'gcx, 'tcx>, } } - fn set_call_site_scope(&mut self, call_site_scope: Option>) - -> Option> { + fn set_call_site_scope(&mut self, call_site_scope: Option) + -> Option { mem::replace(&mut self.call_site_scope, call_site_scope) } @@ -286,8 +286,7 @@ fn visit_fn_body(&mut self, let body_id = body.id(); - let call_site = self.tcx.intern_code_extent( - region::CodeExtentData::CallSiteScope { fn_id: id, body_id: body_id.node_id }); + let call_site = CodeExtent::CallSiteScope(body_id); let old_call_site_scope = self.set_call_site_scope(Some(call_site)); let fn_sig = { @@ -312,7 +311,7 @@ fn visit_fn_body(&mut self, let old_body_id = self.set_body_id(body_id.node_id); self.relate_free_regions(&fn_sig_tys[..], body_id.node_id, span); - self.link_fn_args(self.tcx.node_extent(body_id.node_id), &body.arguments); + self.link_fn_args(CodeExtent::Misc(body_id.node_id), &body.arguments); self.visit_body(body); self.visit_region_obligations(body_id.node_id); @@ -398,9 +397,11 @@ fn relate_free_regions(&mut self, for implication in implied_bounds { debug!("implication: {:?}", implication); match implication { - ImpliedBound::RegionSubRegion(&ty::ReFree(free_a), + ImpliedBound::RegionSubRegion(r_a @ &ty::ReEarlyBound(_), + &ty::ReVar(vid_b)) | + ImpliedBound::RegionSubRegion(r_a @ &ty::ReFree(_), &ty::ReVar(vid_b)) => { - self.add_given(free_a, vid_b); + self.add_given(r_a, vid_b); } ImpliedBound::RegionSubParam(r_a, param_b) => { self.region_bound_pairs.push((r_a, GenericKind::Param(param_b))); @@ -876,7 +877,7 @@ fn constrain_call<'b, I: Iterator>(&mut self, // call occurs. // // FIXME(#6268) to support nested method calls, should be callee_id - let callee_scope = self.tcx.node_extent(call_expr.id); + let callee_scope = CodeExtent::Misc(call_expr.id); let callee_region = self.tcx.mk_region(ty::ReScope(callee_scope)); debug!("callee_region={:?}", callee_region); @@ -1029,7 +1030,7 @@ fn constrain_index(&mut self, debug!("constrain_index(index_expr=?, indexed_ty={}", self.ty_to_string(indexed_ty)); - let r_index_expr = ty::ReScope(self.tcx.node_extent(index_expr.id)); + let r_index_expr = ty::ReScope(CodeExtent::Misc(index_expr.id)); if let ty::TyRef(r_ptr, mt) = indexed_ty.sty { match mt.ty.sty { ty::TySlice(_) | ty::TyStr => { @@ -1109,7 +1110,7 @@ fn link_match(&self, discr: &hir::Expr, arms: &[hir::Arm]) { /// Computes the guarantors for any ref bindings in a match and /// then ensures that the lifetime of the resulting pointer is /// linked to the lifetime of its guarantor (if any). - fn link_fn_args(&self, body_scope: CodeExtent<'tcx>, args: &[hir::Arg]) { + fn link_fn_args(&self, body_scope: CodeExtent, args: &[hir::Arg]) { debug!("regionck::link_fn_args(body_scope={:?})", body_scope); let mc = &mc::MemCategorizationContext::new(self, &self.region_maps); for arg in args { @@ -1175,7 +1176,7 @@ fn link_autoref(&self, /// must outlive `callee_scope`. fn link_by_ref(&self, expr: &hir::Expr, - callee_scope: CodeExtent<'tcx>) { + callee_scope: CodeExtent) { debug!("link_by_ref(expr={:?}, callee_scope={:?})", expr, callee_scope); let mc = mc::MemCategorizationContext::new(self, &self.region_maps); @@ -1613,8 +1614,6 @@ fn type_bound(&self, span: Span, ty: Ty<'tcx>) -> VerifyBound<'tcx> { } fn param_bound(&self, param_ty: ty::ParamTy) -> VerifyBound<'tcx> { - let param_env = &self.parameter_environment; - debug!("param_bound(param_ty={:?})", param_ty); @@ -1622,7 +1621,7 @@ fn param_bound(&self, param_ty: ty::ParamTy) -> VerifyBound<'tcx> { // Add in the default bound of fn body that applies to all in // scope type parameters: - param_bounds.extend(param_env.implicit_region_bound); + param_bounds.extend(self.implicit_region_bound); VerifyBound::AnyRegion(param_bounds) } @@ -1667,7 +1666,7 @@ fn recursive_type_bound(&self, span: Span, ty: Ty<'tcx>) -> VerifyBound<'tcx> { } let mut regions = ty.regions(); - regions.retain(|r| !r.is_bound()); // ignore late-bound regions + regions.retain(|r| !r.is_late_bound()); // ignore late-bound regions bounds.push(VerifyBound::AllRegions(regions)); // remove bounds that must hold, since they are not interesting diff --git a/src/librustc_typeck/check/wfcheck.rs b/src/librustc_typeck/check/wfcheck.rs index 93529aecac0..6895d738625 100644 --- a/src/librustc_typeck/check/wfcheck.rs +++ b/src/librustc_typeck/check/wfcheck.rs @@ -13,7 +13,6 @@ use constrained_type_params::{identify_constrained_type_params, Parameter}; use hir::def_id::DefId; -use middle::region::{CodeExtent}; use rustc::traits::{self, ObligationCauseCode}; use rustc::ty::{self, Ty, TyCtxt}; use rustc::util::nodemap::{FxHashSet, FxHashMap}; @@ -117,8 +116,8 @@ fn check_item_well_formed(&mut self, item: &hir::Item) { error_192(tcx, item.span); } } - hir::ItemFn(.., body_id) => { - self.check_item_fn(item, body_id); + hir::ItemFn(..) => { + self.check_item_fn(item); } hir::ItemStatic(..) => { self.check_item_type(item); @@ -160,9 +159,6 @@ fn check_associated_item(&mut self, sig_if_method: Option<&hir::MethodSig>) { let code = self.code.clone(); self.for_id(item_id, span).with_fcx(|fcx, this| { - let free_substs = &fcx.parameter_environment.free_substs; - let free_id_outlive = fcx.parameter_environment.free_id_outlive; - let item = fcx.tcx.associated_item(fcx.tcx.hir.local_def_id(item_id)); let (mut implied_bounds, self_ty) = match item.container { @@ -174,25 +170,26 @@ fn check_associated_item(&mut self, match item.kind { ty::AssociatedKind::Const => { let ty = fcx.tcx.type_of(item.def_id); - let ty = fcx.instantiate_type_scheme(span, free_substs, &ty); + let ty = fcx.normalize_associated_types_in(span, &ty); fcx.register_wf_obligation(ty, span, code.clone()); } ty::AssociatedKind::Method => { reject_shadowing_type_parameters(fcx.tcx, item.def_id); let method_ty = fcx.tcx.type_of(item.def_id); - let method_ty = fcx.instantiate_type_scheme(span, free_substs, &method_ty); - let predicates = fcx.instantiate_bounds(span, item.def_id, free_substs); + let method_ty = fcx.normalize_associated_types_in(span, &method_ty); + let predicates = fcx.tcx.predicates_of(item.def_id) + .instantiate_identity(fcx.tcx); + let predicates = fcx.normalize_associated_types_in(span, &predicates); let sig = method_ty.fn_sig(); this.check_fn_or_method(fcx, span, sig, &predicates, - free_id_outlive, &mut implied_bounds); + item.def_id, &mut implied_bounds); let sig_if_method = sig_if_method.expect("bad signature for method"); - this.check_method_receiver(fcx, sig_if_method, &item, - free_id_outlive, self_ty); + this.check_method_receiver(fcx, sig_if_method, &item, self_ty); } ty::AssociatedKind::Type => { if item.defaultness.has_value() { let ty = fcx.tcx.type_of(item.def_id); - let ty = fcx.instantiate_type_scheme(span, free_substs, &ty); + let ty = fcx.normalize_associated_types_in(span, &ty); fcx.register_wf_obligation(ty, span, code.clone()); } } @@ -210,7 +207,7 @@ fn for_item<'tcx>(&self, item: &hir::Item) fn for_id<'tcx>(&self, id: ast::NodeId, span: Span) -> CheckWfFcxBuilder<'a, 'gcx, 'tcx> { CheckWfFcxBuilder { - inherited: Inherited::build(self.tcx, id), + inherited: Inherited::build(self.tcx, self.tcx.hir.local_def_id(id)), code: self.code.clone(), id: id, span: span @@ -242,9 +239,9 @@ fn check_type_defn(&mut self, item: &hir::Item, all_sized: bool, mut lookup_f } } - let free_substs = &fcx.parameter_environment.free_substs; let def_id = fcx.tcx.hir.local_def_id(item.id); - let predicates = fcx.instantiate_bounds(item.span, def_id, free_substs); + let predicates = fcx.tcx.predicates_of(def_id).instantiate_identity(fcx.tcx); + let predicates = fcx.normalize_associated_types_in(item.span, &predicates); this.check_where_clauses(fcx, item.span, &predicates); vec![] // no implied bounds in a struct def'n @@ -320,30 +317,26 @@ fn check_trait(&mut self, item: &hir::Item) { } self.for_item(item).with_fcx(|fcx, this| { - let free_substs = &fcx.parameter_environment.free_substs; - let predicates = fcx.instantiate_bounds(item.span, trait_def_id, free_substs); + let predicates = fcx.tcx.predicates_of(trait_def_id).instantiate_identity(fcx.tcx); + let predicates = fcx.normalize_associated_types_in(item.span, &predicates); this.check_where_clauses(fcx, item.span, &predicates); vec![] }); } - fn check_item_fn(&mut self, - item: &hir::Item, - body_id: hir::BodyId) - { + fn check_item_fn(&mut self, item: &hir::Item) { self.for_item(item).with_fcx(|fcx, this| { - let free_substs = &fcx.parameter_environment.free_substs; let def_id = fcx.tcx.hir.local_def_id(item.id); let ty = fcx.tcx.type_of(def_id); - let item_ty = fcx.instantiate_type_scheme(item.span, free_substs, &ty); + let item_ty = fcx.normalize_associated_types_in(item.span, &ty); let sig = item_ty.fn_sig(); - let predicates = fcx.instantiate_bounds(item.span, def_id, free_substs); + let predicates = fcx.tcx.predicates_of(def_id).instantiate_identity(fcx.tcx); + let predicates = fcx.normalize_associated_types_in(item.span, &predicates); let mut implied_bounds = vec![]; - let free_id_outlive = fcx.tcx.call_site_extent(item.id, body_id.node_id); this.check_fn_or_method(fcx, item.span, sig, &predicates, - Some(free_id_outlive), &mut implied_bounds); + def_id, &mut implied_bounds); implied_bounds }) } @@ -355,10 +348,7 @@ fn check_item_type(&mut self, self.for_item(item).with_fcx(|fcx, this| { let ty = fcx.tcx.type_of(fcx.tcx.hir.local_def_id(item.id)); - let item_ty = fcx.instantiate_type_scheme(item.span, - &fcx.parameter_environment - .free_substs, - &ty); + let item_ty = fcx.normalize_associated_types_in(item.span, &ty); fcx.register_wf_obligation(item_ty, item.span, this.code.clone()); @@ -374,15 +364,14 @@ fn check_impl(&mut self, debug!("check_impl: {:?}", item); self.for_item(item).with_fcx(|fcx, this| { - let free_substs = &fcx.parameter_environment.free_substs; let item_def_id = fcx.tcx.hir.local_def_id(item.id); match *ast_trait_ref { Some(ref ast_trait_ref) => { let trait_ref = fcx.tcx.impl_trait_ref(item_def_id).unwrap(); let trait_ref = - fcx.instantiate_type_scheme( - ast_trait_ref.path.span, free_substs, &trait_ref); + fcx.normalize_associated_types_in( + ast_trait_ref.path.span, &trait_ref); let obligations = ty::wf::trait_obligations(fcx, fcx.body_id, @@ -394,12 +383,13 @@ fn check_impl(&mut self, } None => { let self_ty = fcx.tcx.type_of(item_def_id); - let self_ty = fcx.instantiate_type_scheme(item.span, free_substs, &self_ty); + let self_ty = fcx.normalize_associated_types_in(item.span, &self_ty); fcx.register_wf_obligation(self_ty, ast_self_ty.span, this.code.clone()); } } - let predicates = fcx.instantiate_bounds(item.span, item_def_id, free_substs); + let predicates = fcx.tcx.predicates_of(item_def_id).instantiate_identity(fcx.tcx); + let predicates = fcx.normalize_associated_types_in(item.span, &predicates); this.check_where_clauses(fcx, item.span, &predicates); fcx.impl_implied_bounds(item_def_id, item.span) @@ -429,12 +419,11 @@ fn check_fn_or_method<'fcx, 'tcx>(&mut self, span: Span, sig: ty::PolyFnSig<'tcx>, predicates: &ty::InstantiatedPredicates<'tcx>, - free_id_outlive: Option>, + def_id: DefId, implied_bounds: &mut Vec>) { - let free_substs = &fcx.parameter_environment.free_substs; - let sig = fcx.instantiate_type_scheme(span, free_substs, &sig); - let sig = fcx.tcx.liberate_late_bound_regions(free_id_outlive, &sig); + let sig = fcx.normalize_associated_types_in(span, &sig); + let sig = fcx.liberate_late_bound_regions(def_id, &sig); for input_ty in sig.inputs() { fcx.register_wf_obligation(&input_ty, span, self.code.clone()); @@ -453,7 +442,6 @@ fn check_method_receiver<'fcx, 'tcx>(&mut self, fcx: &FnCtxt<'fcx, 'gcx, 'tcx>, method_sig: &hir::MethodSig, method: &ty::AssociatedItem, - free_id_outlive: Option>, self_ty: ty::Ty<'tcx>) { // check that the type of the method's receiver matches the @@ -467,10 +455,9 @@ fn check_method_receiver<'fcx, 'tcx>(&mut self, let span = method_sig.decl.inputs[0].span; - let free_substs = &fcx.parameter_environment.free_substs; let method_ty = fcx.tcx.type_of(method.def_id); - let fty = fcx.instantiate_type_scheme(span, free_substs, &method_ty); - let sig = fcx.tcx.liberate_late_bound_regions(free_id_outlive, &fty.fn_sig()); + let fty = fcx.normalize_associated_types_in(span, &method_ty); + let sig = fcx.liberate_late_bound_regions(method.def_id, &fty.fn_sig()); debug!("check_method_receiver: sig={:?}", sig); @@ -485,9 +472,9 @@ fn check_method_receiver<'fcx, 'tcx>(&mut self, } ExplicitSelf::ByBox => fcx.tcx.mk_box(self_ty) }; - let rcvr_ty = fcx.instantiate_type_scheme(span, free_substs, &rcvr_ty); - let rcvr_ty = fcx.tcx.liberate_late_bound_regions(free_id_outlive, - &ty::Binder(rcvr_ty)); + let rcvr_ty = fcx.normalize_associated_types_in(span, &rcvr_ty); + let rcvr_ty = fcx.liberate_late_bound_regions(method.def_id, + &ty::Binder(rcvr_ty)); debug!("check_method_receiver: receiver ty = {:?}", rcvr_ty); @@ -632,10 +619,8 @@ fn struct_variant(&self, struct_def: &hir::VariantData) -> AdtVariant<'tcx> { struct_def.fields().iter() .map(|field| { let field_ty = self.tcx.type_of(self.tcx.hir.local_def_id(field.id)); - let field_ty = self.instantiate_type_scheme(field.span, - &self.parameter_environment - .free_substs, - &field_ty); + let field_ty = self.normalize_associated_types_in(field.span, + &field_ty); AdtField { ty: field_ty, span: field.span } }) .collect(); @@ -649,19 +634,18 @@ fn enum_variants(&self, enum_def: &hir::EnumDef) -> Vec> { } fn impl_implied_bounds(&self, impl_def_id: DefId, span: Span) -> Vec> { - let free_substs = &self.parameter_environment.free_substs; match self.tcx.impl_trait_ref(impl_def_id) { Some(ref trait_ref) => { // Trait impl: take implied bounds from all types that // appear in the trait reference. - let trait_ref = self.instantiate_type_scheme(span, free_substs, trait_ref); + let trait_ref = self.normalize_associated_types_in(span, trait_ref); trait_ref.substs.types().collect() } None => { // Inherent impl: take implied bounds from the self type. let self_ty = self.tcx.type_of(impl_def_id); - let self_ty = self.instantiate_type_scheme(span, free_substs, &self_ty); + let self_ty = self.normalize_associated_types_in(span, &self_ty); vec![self_ty] } } diff --git a/src/librustc_typeck/check/writeback.rs b/src/librustc_typeck/check/writeback.rs index 49440037af5..b43e2423757 100644 --- a/src/librustc_typeck/check/writeback.rs +++ b/src/librustc_typeck/check/writeback.rs @@ -19,7 +19,7 @@ use rustc::ty::{self, Ty, TyCtxt, MethodCall, MethodCallee}; use rustc::ty::adjustment; use rustc::ty::fold::{TypeFolder,TypeFoldable}; -use rustc::util::nodemap::{DefIdMap, DefIdSet}; +use rustc::util::nodemap::DefIdSet; use syntax::ast; use syntax_pos::Span; use std::mem; @@ -71,55 +71,17 @@ struct WritebackCx<'cx, 'gcx: 'cx+'tcx, 'tcx: 'cx> { tables: ty::TypeckTables<'gcx>, - // Mapping from free regions of the function to the - // early-bound versions of them, visible from the - // outside of the function. This is needed by, and - // only populated if there are any `impl Trait`. - free_to_bound_regions: DefIdMap>, - body: &'gcx hir::Body, } impl<'cx, 'gcx, 'tcx> WritebackCx<'cx, 'gcx, 'tcx> { fn new(fcx: &'cx FnCtxt<'cx, 'gcx, 'tcx>, body: &'gcx hir::Body) -> WritebackCx<'cx, 'gcx, 'tcx> { - let mut wbcx = WritebackCx { + WritebackCx { fcx: fcx, tables: ty::TypeckTables::empty(), - free_to_bound_regions: DefIdMap(), body: body - }; - - // Only build the reverse mapping if `impl Trait` is used. - if fcx.anon_types.borrow().is_empty() { - return wbcx; } - - let gcx = fcx.tcx.global_tcx(); - let free_substs = fcx.parameter_environment.free_substs; - for (i, k) in free_substs.iter().enumerate() { - let r = if let Some(r) = k.as_region() { - r - } else { - continue; - }; - match *r { - ty::ReFree(ty::FreeRegion { - bound_region: ty::BoundRegion::BrNamed(def_id, name), .. - }) => { - let bound_region = gcx.mk_region(ty::ReEarlyBound(ty::EarlyBoundRegion { - index: i as u32, - name: name, - })); - wbcx.free_to_bound_regions.insert(def_id, bound_region); - } - _ => { - bug!("{:?} is not a free region for an early-bound lifetime", r); - } - } - } - - wbcx } fn tcx(&self) -> TyCtxt<'cx, 'gcx, 'tcx> { @@ -285,22 +247,16 @@ fn visit_anon_types(&mut self) { let inside_ty = self.resolve(&concrete_ty, &node_id); // Convert the type from the function into a type valid outside - // the function, by replacing free regions with early-bound ones. + // the function, by replacing invalid regions with 'static, + // after producing an error for each of them. let outside_ty = gcx.fold_regions(&inside_ty, &mut false, |r, _| { match *r { - // 'static is valid everywhere. - ty::ReStatic => gcx.types.re_static, - ty::ReEmpty => gcx.types.re_empty, - - // Free regions that come from early-bound regions are valid. - ty::ReFree(ty::FreeRegion { - bound_region: ty::BoundRegion::BrNamed(def_id, ..), .. - }) if self.free_to_bound_regions.contains_key(&def_id) => { - self.free_to_bound_regions[&def_id] - } + // 'static and early-bound regions are valid. + ty::ReStatic | + ty::ReEarlyBound(_) | + ty::ReEmpty => r, ty::ReFree(_) | - ty::ReEarlyBound(_) | ty::ReLateBound(..) | ty::ReScope(_) | ty::ReSkolemized(..) => { diff --git a/src/librustc_typeck/coherence/builtin.rs b/src/librustc_typeck/coherence/builtin.rs index 743bfbb44ab..556bd618c78 100644 --- a/src/librustc_typeck/coherence/builtin.rs +++ b/src/librustc_typeck/coherence/builtin.rs @@ -17,10 +17,8 @@ use rustc::traits::{self, ObligationCause, Reveal}; use rustc::ty::{self, Ty, TyCtxt}; -use rustc::ty::ParameterEnvironment; use rustc::ty::TypeFoldable; use rustc::ty::adjustment::CoerceUnsizedInfo; -use rustc::ty::subst::Subst; use rustc::ty::util::CopyImplementationError; use rustc::infer; @@ -107,8 +105,7 @@ fn visit_implementation_of_copy<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, self_type); let span = tcx.hir.span(impl_node_id); - let param_env = ParameterEnvironment::for_item(tcx, impl_node_id); - let self_type = self_type.subst(tcx, ¶m_env.free_substs); + let param_env = tcx.parameter_environment(impl_did); assert!(!self_type.has_escaping_regions()); debug!("visit_implementation_of_copy: self_type={:?} (free)", @@ -202,9 +199,7 @@ pub fn coerce_unsized_info<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, target); let span = tcx.hir.span(impl_node_id); - let param_env = ParameterEnvironment::for_item(tcx, impl_node_id); - let source = source.subst(tcx, ¶m_env.free_substs); - let target = target.subst(tcx, ¶m_env.free_substs); + let param_env = tcx.parameter_environment(impl_did); assert!(!source.has_escaping_regions()); let err_info = CoerceUnsizedInfo { custom_kind: None }; @@ -254,6 +249,45 @@ pub fn coerce_unsized_info<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, return err_info; } + // Here we are considering a case of converting + // `S` to S`. As an example, let's imagine a struct `Foo`, + // which acts like a pointer to `U`, but carries along some extra data of type `T`: + // + // struct Foo { + // extra: T, + // ptr: *mut U, + // } + // + // We might have an impl that allows (e.g.) `Foo` to be unsized + // to `Foo`. That impl would look like: + // + // impl, V> CoerceUnsized> for Foo {} + // + // Here `U = [i32; 3]` and `V = [i32]`. At runtime, + // when this coercion occurs, we would be changing the + // field `ptr` from a thin pointer of type `*mut [i32; + // 3]` to a fat pointer of type `*mut [i32]` (with + // extra data `3`). **The purpose of this check is to + // make sure that we know how to do this conversion.** + // + // To check if this impl is legal, we would walk down + // the fields of `Foo` and consider their types with + // both substitutes. We are looking to find that + // exactly one (non-phantom) field has changed its + // type, which we will expect to be the pointer that + // is becoming fat (we could probably generalize this + // to mutiple thin pointers of the same type becoming + // fat, but we don't). In this case: + // + // - `extra` has type `T` before and type `T` after + // - `ptr` has type `*mut U` before and type `*mut V` after + // + // Since just one field changed, we would then check + // that `*mut U: CoerceUnsized<*mut V>` is implemented + // (in other words, that we know how to do this + // conversion). This will work out because `U: + // Unsize`, and we have a builtin rule that `*mut + // U` can be coerced to `*mut V` if `U: Unsize`. let fields = &def_a.struct_variant().fields; let diff_fields = fields.iter() .enumerate() @@ -265,8 +299,16 @@ pub fn coerce_unsized_info<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, return None; } - // Ignore fields that aren't significantly changed - if let Ok(ok) = infcx.sub_types(false, &cause, b, a) { + // Ignore fields that aren't changed; it may + // be that we could get away with subtyping or + // something more accepting, but we use + // equality because we want to be able to + // perform this check without computing + // variance where possible. (This is because + // we may have to evaluate constraint + // expressions in the course of execution.) + // See e.g. #41936. + if let Ok(ok) = infcx.eq_types(false, &cause, b, a) { if ok.obligations.is_empty() { return None; } diff --git a/src/librustc_typeck/coherence/mod.rs b/src/librustc_typeck/coherence/mod.rs index 8b9dc20315d..165be49f760 100644 --- a/src/librustc_typeck/coherence/mod.rs +++ b/src/librustc_typeck/coherence/mod.rs @@ -46,8 +46,6 @@ fn check_impl<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, node_id: ast::NodeId) { } enforce_trait_manually_implementable(tcx, impl_def_id, trait_ref.def_id); - let trait_def = tcx.trait_def(trait_ref.def_id); - trait_def.record_local_impl(tcx, impl_def_id, trait_ref); } } @@ -117,8 +115,6 @@ pub fn provide(providers: &mut Providers) { fn coherent_trait<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, (_, def_id): (CrateNum, DefId)) { - tcx.populate_implementations_for_trait_if_necessary(def_id); - let impls = tcx.hir.trait_impls(def_id); for &impl_id in impls { check_impl(tcx, impl_id); diff --git a/src/librustc_typeck/coherence/overlap.rs b/src/librustc_typeck/coherence/overlap.rs index f479dc2e6ab..ba1d7b18e8c 100644 --- a/src/librustc_typeck/coherence/overlap.rs +++ b/src/librustc_typeck/coherence/overlap.rs @@ -41,39 +41,10 @@ pub fn check_impl<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, node_id: ast::NodeId) { let _task = tcx.dep_graph.in_task(DepNode::CoherenceOverlapCheck(trait_def_id)); - let def = tcx.trait_def(trait_def_id); - - // attempt to insert into the specialization graph - let insert_result = def.add_impl_for_specialization(tcx, impl_def_id); - - // insertion failed due to overlap - if let Err(overlap) = insert_result { - let mut err = struct_span_err!(tcx.sess, - tcx.span_of_impl(impl_def_id).unwrap(), - E0119, - "conflicting implementations of trait `{}`{}:", - overlap.trait_desc, - overlap.self_desc.clone().map_or(String::new(), - |ty| { - format!(" for type `{}`", ty) - })); - - match tcx.span_of_impl(overlap.with_impl) { - Ok(span) => { - err.span_label(span, "first implementation here"); - err.span_label(tcx.span_of_impl(impl_def_id).unwrap(), - format!("conflicting implementation{}", - overlap.self_desc - .map_or(String::new(), - |ty| format!(" for `{}`", ty)))); - } - Err(cname) => { - err.note(&format!("conflicting implementation in crate `{}`", cname)); - } - } + // Trigger building the specialization graph for the trait of this impl. + // This will detect any overlap errors. + tcx.specialization_graph_of(trait_def_id); - err.emit(); - } // check for overlap with the automatic `impl Trait for Trait` if let ty::TyDynamic(ref data, ..) = trait_ref.self_ty().sty { diff --git a/src/librustc_typeck/collect.rs b/src/librustc_typeck/collect.rs index ec200241ee6..cb1bd3e099d 100644 --- a/src/librustc_typeck/collect.rs +++ b/src/librustc_typeck/collect.rs @@ -205,10 +205,6 @@ fn get_type_parameter_bounds(&self, self.tcx.at(span).type_param_predicates((self.item_def_id, def_id)) } - fn get_free_substs(&self) -> Option<&Substs<'tcx>> { - None - } - fn re_infer(&self, _span: Span, _def: Option<&ty::RegionParameterDef>) -> Option> { None @@ -753,12 +749,12 @@ fn trait_def<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, } let def_path_hash = tcx.def_path_hash(def_id); - let def = ty::TraitDef::new(def_id, unsafety, paren_sugar, def_path_hash); - - if tcx.hir.trait_is_auto(def_id) { - def.record_has_default_impl(); - } - + let has_default_impl = tcx.hir.trait_is_auto(def_id); + let def = ty::TraitDef::new(def_id, + unsafety, + paren_sugar, + has_default_impl, + def_path_hash); tcx.alloc_trait_def(def) } @@ -1299,6 +1295,7 @@ fn predicates_of<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, let mut index = parent_count + has_own_self as u32; for param in early_bound_lifetimes_from_generics(tcx, ast_generics) { let region = tcx.mk_region(ty::ReEarlyBound(ty::EarlyBoundRegion { + def_id: tcx.hir.local_def_id(param.lifetime.id), index: index, name: param.lifetime.name })); diff --git a/src/librustc_typeck/diagnostics.rs b/src/librustc_typeck/diagnostics.rs index 0f42ee15ecf..f9ebe3fff5b 100644 --- a/src/librustc_typeck/diagnostics.rs +++ b/src/librustc_typeck/diagnostics.rs @@ -1524,67 +1524,6 @@ fn get_state(&self) -> String { ``` "##, -E0119: r##" -There are conflicting trait implementations for the same type. -Example of erroneous code: - -```compile_fail,E0119 -trait MyTrait { - fn get(&self) -> usize; -} - -impl MyTrait for T { - fn get(&self) -> usize { 0 } -} - -struct Foo { - value: usize -} - -impl MyTrait for Foo { // error: conflicting implementations of trait - // `MyTrait` for type `Foo` - fn get(&self) -> usize { self.value } -} -``` - -When looking for the implementation for the trait, the compiler finds -both the `impl MyTrait for T` where T is all types and the `impl -MyTrait for Foo`. Since a trait cannot be implemented multiple times, -this is an error. So, when you write: - -``` -trait MyTrait { - fn get(&self) -> usize; -} - -impl MyTrait for T { - fn get(&self) -> usize { 0 } -} -``` - -This makes the trait implemented on all types in the scope. So if you -try to implement it on another one after that, the implementations will -conflict. Example: - -``` -trait MyTrait { - fn get(&self) -> usize; -} - -impl MyTrait for T { - fn get(&self) -> usize { 0 } -} - -struct Foo; - -fn main() { - let f = Foo; - - f.get(); // the trait is implemented so we can use it -} -``` -"##, - E0120: r##" An attempt was made to implement Drop on a trait, which is not allowed: only structs and enums can implement Drop. An example causing this error: diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs index 2dde6d9d4ee..61f941e57b2 100644 --- a/src/librustdoc/clean/mod.rs +++ b/src/librustdoc/clean/mod.rs @@ -820,7 +820,7 @@ fn clean(&self, _: &DocContext) -> Lifetime { } } -impl<'tcx> Clean> for ty::RegionKind<'tcx> { +impl Clean> for ty::RegionKind { fn clean(&self, cx: &DocContext) -> Option { match *self { ty::ReStatic => Some(Lifetime::statik()), diff --git a/src/librustdoc/core.rs b/src/librustdoc/core.rs index 9e2d8516333..9a689ed079e 100644 --- a/src/librustdoc/core.rs +++ b/src/librustdoc/core.rs @@ -19,6 +19,7 @@ use rustc::hir::map as hir_map; use rustc::lint; use rustc::util::nodemap::FxHashMap; +use rustc_trans; use rustc_trans::back::link; use rustc_resolve as resolve; use rustc_metadata::cstore::CStore; @@ -138,10 +139,11 @@ pub fn run_core(search_paths: SearchPaths, let dep_graph = DepGraph::new(false); let _ignore = dep_graph.in_ignore(); - let cstore = Rc::new(CStore::new(&dep_graph)); + let cstore = Rc::new(CStore::new(&dep_graph, box rustc_trans::LlvmMetadataLoader)); let mut sess = session::build_session_( sessopts, &dep_graph, cpath, diagnostic_handler, codemap, cstore.clone() ); + rustc_trans::init(&sess); rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess)); let mut cfg = config::build_configuration(&sess, config::parse_cfgspecs(cfgs)); diff --git a/src/librustdoc/externalfiles.rs b/src/librustdoc/externalfiles.rs index d78f00497ca..111ae4ede27 100644 --- a/src/librustdoc/externalfiles.rs +++ b/src/librustdoc/externalfiles.rs @@ -13,6 +13,7 @@ use std::io; use std::path::Path; use std::str; +use html::markdown::{Markdown, RenderType}; #[derive(Clone)] pub struct ExternalHtml{ @@ -28,17 +29,26 @@ pub struct ExternalHtml{ } impl ExternalHtml { - pub fn load(in_header: &[String], before_content: &[String], after_content: &[String]) + pub fn load(in_header: &[String], before_content: &[String], after_content: &[String], + md_before_content: &[String], md_after_content: &[String], render: RenderType) -> Option { load_external_files(in_header) .and_then(|ih| load_external_files(before_content) .map(|bc| (ih, bc)) ) + .and_then(|(ih, bc)| + load_external_files(md_before_content) + .map(|m_bc| (ih, format!("{}{}", bc, Markdown(&m_bc, render)))) + ) .and_then(|(ih, bc)| load_external_files(after_content) .map(|ac| (ih, bc, ac)) ) + .and_then(|(ih, bc, ac)| + load_external_files(md_after_content) + .map(|m_ac| (ih, bc, format!("{}{}", ac, Markdown(&m_ac, render)))) + ) .map(|(ih, bc, ac)| ExternalHtml { in_header: ih, diff --git a/src/librustdoc/html/format.rs b/src/librustdoc/html/format.rs index 5db82e23bbf..612793e2567 100644 --- a/src/librustdoc/html/format.rs +++ b/src/librustdoc/html/format.rs @@ -1177,7 +1177,6 @@ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let quot = if f.alternate() { "\"" } else { """ }; match self.0 { Abi::Rust => Ok(()), - Abi::C => write!(f, "extern "), abi => write!(f, "extern {0}{1}{0} ", quot, abi.name()), } } diff --git a/src/librustdoc/html/static/main.js b/src/librustdoc/html/static/main.js index fbc7615588e..c115a6ccba6 100644 --- a/src/librustdoc/html/static/main.js +++ b/src/librustdoc/html/static/main.js @@ -943,9 +943,9 @@ if (hasClass(main, 'content')) { removeClass(main, 'hidden'); } - var search = document.getElementById('search'); - if (hasClass(main, 'content')) { - addClass(main, 'hidden'); + var search_c = document.getElementById('search'); + if (hasClass(search_c, 'content')) { + addClass(search_c, 'hidden'); } } // Revert to the previous title manually since the History @@ -959,7 +959,11 @@ // perform the search. This will empty the bar if there's // nothing there, which lets you really go back to a // previous state with nothing in the bar. - document.getElementsByClassName('search-input')[0].value = params.search; + if (params.search) { + document.getElementsByClassName('search-input')[0].value = params.search; + } else { + document.getElementsByClassName('search-input')[0].value = ''; + } // Some browsers fire 'onpopstate' for every page load // (Chrome), while others fire the event only when actually // popping a state (Firefox), which is why search() is diff --git a/src/librustdoc/html/static/rustdoc.css b/src/librustdoc/html/static/rustdoc.css index 60ea6ed1d8d..570a1980782 100644 --- a/src/librustdoc/html/static/rustdoc.css +++ b/src/librustdoc/html/static/rustdoc.css @@ -617,6 +617,11 @@ a.test-arrow:hover{ top: 0; } +h3 > .collapse-toggle, h4 > .collapse-toggle { + font-size: 0.8em; + top: 5px; +} + .toggle-wrapper > .collapse-toggle { left: -24px; margin-top: 0px; diff --git a/src/librustdoc/lib.rs b/src/librustdoc/lib.rs index d89fa547a89..bbaa7bc2fb6 100644 --- a/src/librustdoc/lib.rs +++ b/src/librustdoc/lib.rs @@ -160,6 +160,14 @@ pub fn opts() -> Vec { "files to include inline between the content and of a rendered \ Markdown file or generated documentation", "FILES")), + unstable(optmulti("", "markdown-before-content", + "files to include inline between and the content of a rendered \ + Markdown file or generated documentation", + "FILES")), + unstable(optmulti("", "markdown-after-content", + "files to include inline between the content and of a rendered \ + Markdown file or generated documentation", + "FILES")), stable(optopt("", "markdown-playground-url", "URL to send code snippets to", "URL")), stable(optflag("", "markdown-no-toc", "don't include table of contents")), @@ -275,7 +283,10 @@ pub fn main_args(args: &[String]) -> isize { let external_html = match ExternalHtml::load( &matches.opt_strs("html-in-header"), &matches.opt_strs("html-before-content"), - &matches.opt_strs("html-after-content")) { + &matches.opt_strs("html-after-content"), + &matches.opt_strs("markdown-before-content"), + &matches.opt_strs("markdown-after-content"), + render_type) { Some(eh) => eh, None => return 3, }; diff --git a/src/librustdoc/test.rs b/src/librustdoc/test.rs index d5237d629cf..cfe2fad0fa4 100644 --- a/src/librustdoc/test.rs +++ b/src/librustdoc/test.rs @@ -34,6 +34,7 @@ use rustc_driver::driver::phase_2_configure_and_expand; use rustc_metadata::cstore::CStore; use rustc_resolve::MakeGlobMap; +use rustc_trans; use rustc_trans::back::link; use syntax::ast; use syntax::codemap::CodeMap; @@ -81,10 +82,11 @@ pub fn run(input: &str, let dep_graph = DepGraph::new(false); let _ignore = dep_graph.in_ignore(); - let cstore = Rc::new(CStore::new(&dep_graph)); + let cstore = Rc::new(CStore::new(&dep_graph, box rustc_trans::LlvmMetadataLoader)); let mut sess = session::build_session_( sessopts, &dep_graph, Some(input_path.clone()), handler, codemap.clone(), cstore.clone(), ); + rustc_trans::init(&sess); rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess)); sess.parse_sess.config = config::build_configuration(&sess, config::parse_cfgspecs(cfgs.clone())); @@ -229,10 +231,11 @@ fn drop(&mut self) { let diagnostic_handler = errors::Handler::with_emitter(true, false, box emitter); let dep_graph = DepGraph::new(false); - let cstore = Rc::new(CStore::new(&dep_graph)); + let cstore = Rc::new(CStore::new(&dep_graph, box rustc_trans::LlvmMetadataLoader)); let mut sess = session::build_session_( sessopts, &dep_graph, None, diagnostic_handler, codemap, cstore.clone(), ); + rustc_trans::init(&sess); rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess)); let outdir = Mutex::new(TempDir::new("rustdoctest").ok().expect("rustdoc needs a tempdir")); diff --git a/src/libstd/sys/windows/ext/ffi.rs b/src/libstd/sys/windows/ext/ffi.rs index 253787546c1..3f6c2827a3f 100644 --- a/src/libstd/sys/windows/ext/ffi.rs +++ b/src/libstd/sys/windows/ext/ffi.rs @@ -26,8 +26,22 @@ pub trait OsStringExt { /// Creates an `OsString` from a potentially ill-formed UTF-16 slice of /// 16-bit code units. /// - /// This is lossless: calling `.encode_wide()` on the resulting string + /// This is lossless: calling [`encode_wide`] on the resulting string /// will always return the original code units. + /// + /// # Examples + /// + /// ``` + /// use std::ffi::OsString; + /// use std::os::windows::prelude::*; + /// + /// // UTF-16 encoding for "Unicode". + /// let source = [0x0055, 0x006E, 0x0069, 0x0063, 0x006F, 0x0064, 0x0065]; + /// + /// let string = OsString::from_wide(&source[..]); + /// ``` + /// + /// [`encode_wide`]: ./trait.OsStrExt.html#tymethod.encode_wide #[stable(feature = "rust1", since = "1.0.0")] fn from_wide(wide: &[u16]) -> Self; } @@ -42,11 +56,29 @@ fn from_wide(wide: &[u16]) -> OsString { /// Windows-specific extensions to `OsStr`. #[stable(feature = "rust1", since = "1.0.0")] pub trait OsStrExt { - /// Re-encodes an `OsStr` as a wide character sequence, - /// i.e. potentially ill-formed UTF-16. + /// Re-encodes an `OsStr` as a wide character sequence, i.e. potentially + /// ill-formed UTF-16. + /// + /// This is lossless: calling [`OsString::from_wide`] and then + /// `encode_wide` on the result will yield the original code units. + /// Note that the encoding does not add a final null terminator. + /// + /// # Examples + /// + /// ``` + /// use std::ffi::OsString; + /// use std::os::windows::prelude::*; + /// + /// // UTF-16 encoding for "Unicode". + /// let source = [0x0055, 0x006E, 0x0069, 0x0063, 0x006F, 0x0064, 0x0065]; + /// + /// let string = OsString::from_wide(&source[..]); + /// + /// let result: Vec = string.encode_wide().collect(); + /// assert_eq!(&source[..], &result[..]); + /// ``` /// - /// This is lossless. Note that the encoding does not include a final - /// null. + /// [`OsString::from_wide`]: ./trait.OsStringExt.html#tymethod.from_wide #[stable(feature = "rust1", since = "1.0.0")] fn encode_wide(&self) -> EncodeWide; } diff --git a/src/libstd/sys/windows/ext/fs.rs b/src/libstd/sys/windows/ext/fs.rs index d6e2fed56be..2d00cb38ec4 100644 --- a/src/libstd/sys/windows/ext/fs.rs +++ b/src/libstd/sys/windows/ext/fs.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -//! Windows-specific extensions for the primitives in `std::fs` +//! Windows-specific extensions for the primitives in the `std::fs` module. #![stable(feature = "rust1", since = "1.0.0")] @@ -18,7 +18,9 @@ use sys; use sys_common::{AsInnerMut, AsInner}; -/// Windows-specific extensions to `File` +/// Windows-specific extensions to [`File`]. +/// +/// [`File`]: ../../../fs/struct.File.html #[stable(feature = "file_offset", since = "1.15.0")] pub trait FileExt { /// Seeks to a given position and reads a number of bytes. @@ -35,6 +37,24 @@ pub trait FileExt { /// Note that similar to `File::read`, it is not an error to return with a /// short read. When returning from such a short read, the file pointer is /// still updated. + /// + /// # Examples + /// + /// ```no_run + /// use std::io; + /// use std::fs::File; + /// use std::os::windows::prelude::*; + /// + /// # fn foo() -> io::Result<()> { + /// let mut file = File::open("foo.txt")?; + /// let mut buffer = [0; 10]; + /// + /// // Read 10 bytes, starting 72 bytes from the + /// // start of the file. + /// file.seek_read(&mut buffer[..], 72)?; + /// # Ok(()) + /// # } + /// ``` #[stable(feature = "file_offset", since = "1.15.0")] fn seek_read(&self, buf: &mut [u8], offset: u64) -> io::Result; @@ -52,6 +72,22 @@ pub trait FileExt { /// Note that similar to `File::write`, it is not an error to return a /// short write. When returning from such a short write, the file pointer /// is still updated. + /// + /// # Examples + /// + /// ```no_run + /// use std::fs::File; + /// use std::os::windows::prelude::*; + /// + /// # fn foo() -> std::io::Result<()> { + /// let mut buffer = File::create("foo.txt")?; + /// + /// // Write a byte string starting 72 bytes from + /// // the start of the file. + /// buffer.seek_write(b"some bytes", 72)?; + /// # Ok(()) + /// # } + /// ``` #[stable(feature = "file_offset", since = "1.15.0")] fn seek_write(&self, buf: &[u8], offset: u64) -> io::Result; } @@ -67,81 +103,94 @@ fn seek_write(&self, buf: &[u8], offset: u64) -> io::Result { } } -/// Windows-specific extensions to `OpenOptions` +/// Windows-specific extensions to [`OpenOptions`]. +/// +/// [`OpenOptions`]: ../../../fs/struct.OpenOptions.html #[stable(feature = "open_options_ext", since = "1.10.0")] pub trait OpenOptionsExt { - /// Overrides the `dwDesiredAccess` argument to the call to `CreateFile` + /// Overrides the `dwDesiredAccess` argument to the call to [`CreateFile`] /// with the specified value. /// /// This will override the `read`, `write`, and `append` flags on the /// `OpenOptions` structure. This method provides fine-grained control over /// the permissions to read, write and append data, attributes (like hidden - /// and system) and extended attributes. + /// and system), and extended attributes. /// /// # Examples /// /// ```no_run /// use std::fs::OpenOptions; - /// use std::os::windows::fs::OpenOptionsExt; + /// use std::os::windows::prelude::*; /// /// // Open without read and write permission, for example if you only need - /// // to call `stat()` on the file + /// // to call `stat` on the file /// let file = OpenOptions::new().access_mode(0).open("foo.txt"); /// ``` + /// + /// [`CreateFile`]: https://msdn.microsoft.com/en-us/library/windows/desktop/aa363858.aspx #[stable(feature = "open_options_ext", since = "1.10.0")] fn access_mode(&mut self, access: u32) -> &mut Self; - /// Overrides the `dwShareMode` argument to the call to `CreateFile` with + /// Overrides the `dwShareMode` argument to the call to [`CreateFile`] with /// the specified value. /// /// By default `share_mode` is set to - /// `FILE_SHARE_READ | FILE_SHARE_WRITE | FILE_SHARE_DELETE`. Specifying - /// less permissions denies others to read from, write to and/or delete the - /// file while it is open. + /// `FILE_SHARE_READ | FILE_SHARE_WRITE | FILE_SHARE_DELETE`. This allows + /// other processes to to read, write, and delete/rename the same file + /// while it is open. Removing any of the flags will prevent other + /// processes from performing the corresponding operation until the file + /// handle is closed. /// /// # Examples /// /// ```no_run /// use std::fs::OpenOptions; - /// use std::os::windows::fs::OpenOptionsExt; + /// use std::os::windows::prelude::*; /// /// // Do not allow others to read or modify this file while we have it open - /// // for writing - /// let file = OpenOptions::new().write(true) - /// .share_mode(0) - /// .open("foo.txt"); + /// // for writing. + /// let file = OpenOptions::new() + /// .write(true) + /// .share_mode(0) + /// .open("foo.txt"); /// ``` + /// + /// [`CreateFile`]: https://msdn.microsoft.com/en-us/library/windows/desktop/aa363858.aspx #[stable(feature = "open_options_ext", since = "1.10.0")] fn share_mode(&mut self, val: u32) -> &mut Self; /// Sets extra flags for the `dwFileFlags` argument to the call to - /// `CreateFile2` (or combines it with `attributes` and `security_qos_flags` - /// to set the `dwFlagsAndAttributes` for `CreateFile`). + /// [`CreateFile2`] to the specified value (or combines it with + /// `attributes` and `security_qos_flags` to set the `dwFlagsAndAttributes` + /// for [`CreateFile`]). /// - /// Custom flags can only set flags, not remove flags set by Rusts options. - /// This options overwrites any previously set custom flags. + /// Custom flags can only set flags, not remove flags set by Rust's options. + /// This option overwrites any previously set custom flags. /// /// # Examples /// - /// ```rust,ignore + /// ```ignore /// extern crate winapi; + /// /// use std::fs::OpenOptions; - /// use std::os::windows::fs::OpenOptionsExt; - /// - /// let mut options = OpenOptions::new(); - /// options.create(true).write(true); - /// if cfg!(windows) { - /// options.custom_flags(winapi::FILE_FLAG_DELETE_ON_CLOSE); - /// } - /// let file = options.open("foo.txt"); + /// use std::os::windows::prelude::*; + /// + /// let file = OpenOptions::new() + /// .create(true) + /// .write(true) + /// .custom_flags(winapi::FILE_FLAG_DELETE_ON_CLOSE) + /// .open("foo.txt"); /// ``` + /// + /// [`CreateFile`]: https://msdn.microsoft.com/en-us/library/windows/desktop/aa363858.aspx + /// [`CreateFile2`]: https://msdn.microsoft.com/en-us/library/windows/desktop/hh449422.aspx #[stable(feature = "open_options_ext", since = "1.10.0")] fn custom_flags(&mut self, flags: u32) -> &mut Self; - /// Sets the `dwFileAttributes` argument to the call to `CreateFile2` to + /// Sets the `dwFileAttributes` argument to the call to [`CreateFile2`] to /// the specified value (or combines it with `custom_flags` and /// `security_qos_flags` to set the `dwFlagsAndAttributes` for - /// `CreateFile`). + /// [`CreateFile`]). /// /// If a _new_ file is created because it does not yet exist and /// `.create(true)` or `.create_new(true)` are specified, the new file is @@ -155,21 +204,52 @@ pub trait OpenOptionsExt { /// /// # Examples /// - /// ```rust,ignore + /// ```ignore /// extern crate winapi; + /// /// use std::fs::OpenOptions; - /// use std::os::windows::fs::OpenOptionsExt; + /// use std::os::windows::prelude::*; /// - /// let file = OpenOptions::new().write(true).create(true) - /// .attributes(winapi::FILE_ATTRIBUTE_HIDDEN) - /// .open("foo.txt"); + /// let file = OpenOptions::new() + /// .write(true) + /// .create(true) + /// .attributes(winapi::FILE_ATTRIBUTE_HIDDEN) + /// .open("foo.txt"); /// ``` + /// + /// [`CreateFile`]: https://msdn.microsoft.com/en-us/library/windows/desktop/aa363858.aspx + /// [`CreateFile2`]: https://msdn.microsoft.com/en-us/library/windows/desktop/hh449422.aspx #[stable(feature = "open_options_ext", since = "1.10.0")] fn attributes(&mut self, val: u32) -> &mut Self; - /// Sets the `dwSecurityQosFlags` argument to the call to `CreateFile2` to + /// Sets the `dwSecurityQosFlags` argument to the call to [`CreateFile2`] to /// the specified value (or combines it with `custom_flags` and `attributes` - /// to set the `dwFlagsAndAttributes` for `CreateFile`). + /// to set the `dwFlagsAndAttributes` for [`CreateFile`]). + /// + /// By default, `security_qos_flags` is set to `SECURITY_ANONYMOUS`. For + /// information about possible values, see [Impersonation Levels] on the + /// Windows Dev Center site. + /// + /// # Examples + /// + /// ```no_run + /// use std::fs::OpenOptions; + /// use std::os::windows::prelude::*; + /// + /// let file = OpenOptions::new() + /// .write(true) + /// .create(true) + /// + /// // Sets the flag value to `SecurityIdentification`. + /// .security_qos_flags(1) + /// + /// .open("foo.txt"); + /// ``` + /// + /// [`CreateFile`]: https://msdn.microsoft.com/en-us/library/windows/desktop/aa363858.aspx + /// [`CreateFile2`]: https://msdn.microsoft.com/en-us/library/windows/desktop/hh449422.aspx + /// [Impersonation Levels]: + /// https://msdn.microsoft.com/en-us/library/windows/desktop/aa379572.aspx #[stable(feature = "open_options_ext", since = "1.10.0")] fn security_qos_flags(&mut self, flags: u32) -> &mut OpenOptions; } @@ -197,35 +277,136 @@ fn security_qos_flags(&mut self, flags: u32) -> &mut OpenOptions { } } -/// Extension methods for `fs::Metadata` to access the raw fields contained +/// Extension methods for [`fs::Metadata`] to access the raw fields contained /// within. +/// +/// The data members that this trait exposes correspond to the members +/// of the [`BY_HANDLE_FILE_INFORMATION`] structure. +/// +/// [`fs::Metadata`]: ../../../fs/struct.Metadata.html +/// [`BY_HANDLE_FILE_INFORMATION`]: +/// https://msdn.microsoft.com/en-us/library/windows/desktop/aa363788.aspx #[stable(feature = "metadata_ext", since = "1.1.0")] pub trait MetadataExt { /// Returns the value of the `dwFileAttributes` field of this metadata. /// /// This field contains the file system attribute information for a file - /// or directory. + /// or directory. For possible values and their descriptions, see + /// [File Attribute Constants] in the Windows Dev Center. + /// + /// # Examples + /// + /// ```no_run + /// use std::io; + /// use std::fs; + /// use std::os::windows::prelude::*; + /// + /// # fn foo() -> io::Result<()> { + /// let metadata = fs::metadata("foo.txt")?; + /// let attributes = metadata.file_attributes(); + /// # Ok(()) + /// # } + /// ``` + /// + /// [File Attribute Constants]: + /// https://msdn.microsoft.com/en-us/library/windows/desktop/gg258117.aspx #[stable(feature = "metadata_ext", since = "1.1.0")] fn file_attributes(&self) -> u32; /// Returns the value of the `ftCreationTime` field of this metadata. /// - /// The returned 64-bit value represents the number of 100-nanosecond - /// intervals since January 1, 1601 (UTC). + /// The returned 64-bit value is equivalent to a [`FILETIME`] struct, + /// which represents the number of 100-nanosecond intervals since + /// January 1, 1601 (UTC). The struct is automatically + /// converted to a `u64` value, as that is the recommended way + /// to use it. + /// + /// If the underlying filesystem does not support creation time, the + /// returned value is 0. + /// + /// # Examples + /// + /// ```no_run + /// use std::io; + /// use std::fs; + /// use std::os::windows::prelude::*; + /// + /// # fn foo() -> io::Result<()> { + /// let metadata = fs::metadata("foo.txt")?; + /// let creation_time = metadata.creation_time(); + /// # Ok(()) + /// # } + /// ``` + /// + /// [`FILETIME`]: https://msdn.microsoft.com/en-us/library/windows/desktop/ms724284.aspx #[stable(feature = "metadata_ext", since = "1.1.0")] fn creation_time(&self) -> u64; /// Returns the value of the `ftLastAccessTime` field of this metadata. /// - /// The returned 64-bit value represents the number of 100-nanosecond - /// intervals since January 1, 1601 (UTC). + /// The returned 64-bit value is equivalent to a [`FILETIME`] struct, + /// which represents the number of 100-nanosecond intervals since + /// January 1, 1601 (UTC). The struct is automatically + /// converted to a `u64` value, as that is the recommended way + /// to use it. + /// + /// For a file, the value specifies the last time that a file was read + /// from or written to. For a directory, the value specifies when + /// the directory was created. For both files and directories, the + /// specified date is correct, but the time of day is always set to + /// midnight. + /// + /// If the underlying filesystem does not support last access time, the + /// returned value is 0. + /// + /// # Examples + /// + /// ```no_run + /// use std::io; + /// use std::fs; + /// use std::os::windows::prelude::*; + /// + /// # fn foo() -> io::Result<()> { + /// let metadata = fs::metadata("foo.txt")?; + /// let last_access_time = metadata.last_access_time(); + /// # Ok(()) + /// # } + /// ``` + /// + /// [`FILETIME`]: https://msdn.microsoft.com/en-us/library/windows/desktop/ms724284.aspx #[stable(feature = "metadata_ext", since = "1.1.0")] fn last_access_time(&self) -> u64; /// Returns the value of the `ftLastWriteTime` field of this metadata. /// - /// The returned 64-bit value represents the number of 100-nanosecond - /// intervals since January 1, 1601 (UTC). + /// The returned 64-bit value is equivalent to a [`FILETIME`] struct, + /// which represents the number of 100-nanosecond intervals since + /// January 1, 1601 (UTC). The struct is automatically + /// converted to a `u64` value, as that is the recommended way + /// to use it. + /// + /// For a file, the value specifies the last time that a file was written + /// to. For a directory, the structure specifies when the directory was + /// created. + /// + /// If the underlying filesystem does not support the last write time + /// time, the returned value is 0. + /// + /// # Examples + /// + /// ```no_run + /// use std::io; + /// use std::fs; + /// use std::os::windows::prelude::*; + /// + /// # fn foo() -> io::Result<()> { + /// let metadata = fs::metadata("foo.txt")?; + /// let last_write_time = metadata.last_write_time(); + /// # Ok(()) + /// # } + /// ``` + /// + /// [`FILETIME`]: https://msdn.microsoft.com/en-us/library/windows/desktop/ms724284.aspx #[stable(feature = "metadata_ext", since = "1.1.0")] fn last_write_time(&self) -> u64; @@ -233,6 +414,20 @@ pub trait MetadataExt { /// metadata. /// /// The returned value does not have meaning for directories. + /// + /// # Examples + /// + /// ```no_run + /// use std::io; + /// use std::fs; + /// use std::os::windows::prelude::*; + /// + /// # fn foo() -> io::Result<()> { + /// let metadata = fs::metadata("foo.txt")?; + /// let file_size = metadata.file_size(); + /// # Ok(()) + /// # } + /// ``` #[stable(feature = "metadata_ext", since = "1.1.0")] fn file_size(&self) -> u64; } @@ -253,7 +448,7 @@ fn file_size(&self) -> u64 { self.as_inner().size() } /// /// # Examples /// -/// ```ignore +/// ```no_run /// use std::os::windows::fs; /// /// # fn foo() -> std::io::Result<()> { @@ -274,7 +469,7 @@ pub fn symlink_file, Q: AsRef>(src: P, dst: Q) /// /// # Examples /// -/// ```ignore +/// ```no_run /// use std::os::windows::fs; /// /// # fn foo() -> std::io::Result<()> { diff --git a/src/libstd/sys/windows/ext/mod.rs b/src/libstd/sys/windows/ext/mod.rs index f12e50cc923..11b1337a8ae 100644 --- a/src/libstd/sys/windows/ext/mod.rs +++ b/src/libstd/sys/windows/ext/mod.rs @@ -8,11 +8,13 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -//! Experimental extensions to `std` for Windows. +//! Platform-specific extensions to `std` for Windows. //! -//! For now, this module is limited to extracting handles, file -//! descriptors, and sockets, but its functionality will grow over -//! time. +//! Provides access to platform-level information for Windows, and exposes +//! Windows-specific idioms that would otherwise be inappropriate as part +//! the core `std` library. These extensions allow developers to use +//! `std` types and idioms with Windows in a way that the normal +//! platform-agnostic idioms would not normally support. #![stable(feature = "rust1", since = "1.0.0")] diff --git a/src/libstd/sys_common/wtf8.rs b/src/libstd/sys_common/wtf8.rs index 79aaf34ce2e..df5e4ef1d88 100644 --- a/src/libstd/sys_common/wtf8.rs +++ b/src/libstd/sys_common/wtf8.rs @@ -750,6 +750,7 @@ fn size_hint(&self) -> (usize, Option) { } } +/// Generates a wide character sequence for potentially ill-formed UTF-16. #[stable(feature = "rust1", since = "1.0.0")] #[derive(Clone)] pub struct EncodeWide<'a> { diff --git a/src/libstd/thread/local.rs b/src/libstd/thread/local.rs index e2b22b1d89f..c2c6e6cf87d 100644 --- a/src/libstd/thread/local.rs +++ b/src/libstd/thread/local.rs @@ -19,16 +19,16 @@ /// A thread local storage key which owns its contents. /// /// This key uses the fastest possible implementation available to it for the -/// target platform. It is instantiated with the `thread_local!` macro and the -/// primary method is the `with` method. +/// target platform. It is instantiated with the [`thread_local!`] macro and the +/// primary method is the [`with`] method. /// -/// The `with` method yields a reference to the contained value which cannot be +/// The [`with`] method yields a reference to the contained value which cannot be /// sent across threads or escape the given closure. /// /// # Initialization and Destruction /// -/// Initialization is dynamically performed on the first call to `with()` -/// within a thread, and values that implement `Drop` get destructed when a +/// Initialization is dynamically performed on the first call to [`with`] +/// within a thread, and values that implement [`Drop`] get destructed when a /// thread exits. Some caveats apply, which are explained below. /// /// # Examples @@ -77,6 +77,10 @@ /// 3. On macOS, initializing TLS during destruction of other TLS slots can /// sometimes cancel *all* destructors for the current thread, whether or not /// the slots have already had their destructors run or not. +/// +/// [`with`]: ../../std/thread/struct.LocalKey.html#method.with +/// [`thread_local!`]: ../../std/macro.thread_local.html +/// [`Drop`]: ../../std/ops/trait.Drop.html #[stable(feature = "rust1", since = "1.0.0")] pub struct LocalKey { // This outer `LocalKey` type is what's going to be stored in statics, @@ -106,7 +110,7 @@ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { } } -/// Declare a new thread local storage key of type `std::thread::LocalKey`. +/// Declare a new thread local storage key of type [`std::thread::LocalKey`]. /// /// # Syntax /// @@ -124,8 +128,10 @@ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { /// # fn main() {} /// ``` /// -/// See [LocalKey documentation](thread/struct.LocalKey.html) for more +/// See [LocalKey documentation][`std::thread::LocalKey`] for more /// information. +/// +/// [`std::thread::LocalKey`]: ../std/thread/struct.LocalKey.html #[macro_export] #[stable(feature = "rust1", since = "1.0.0")] #[allow_internal_unstable] @@ -195,11 +201,13 @@ fn __getit() -> $crate::option::Option< #[derive(Debug, Eq, PartialEq, Copy, Clone)] pub enum LocalKeyState { /// All keys are in this state whenever a thread starts. Keys will - /// transition to the `Valid` state once the first call to `with` happens + /// transition to the `Valid` state once the first call to [`with`] happens /// and the initialization expression succeeds. /// /// Keys in the `Uninitialized` state will yield a reference to the closure - /// passed to `with` so long as the initialization routine does not panic. + /// passed to [`with`] so long as the initialization routine does not panic. + /// + /// [`with`]: ../../std/thread/struct.LocalKey.html#method.with Uninitialized, /// Once a key has been accessed successfully, it will enter the `Valid` @@ -208,7 +216,9 @@ pub enum LocalKeyState { /// `Destroyed` state. /// /// Keys in the `Valid` state will be guaranteed to yield a reference to the - /// closure passed to `with`. + /// closure passed to [`with`]. + /// + /// [`with`]: ../../std/thread/struct.LocalKey.html#method.with Valid, /// When a thread exits, the destructors for keys will be run (if @@ -216,7 +226,9 @@ pub enum LocalKeyState { /// destructor has run, a key is in the `Destroyed` state. /// /// Keys in the `Destroyed` states will trigger a panic when accessed via - /// `with`. + /// [`with`]. + /// + /// [`with`]: ../../std/thread/struct.LocalKey.html#method.with Destroyed, } @@ -283,23 +295,26 @@ unsafe fn init(&self, slot: &UnsafeCell>) -> &T { /// Query the current state of this key. /// /// A key is initially in the `Uninitialized` state whenever a thread - /// starts. It will remain in this state up until the first call to `with` + /// starts. It will remain in this state up until the first call to [`with`] /// within a thread has run the initialization expression successfully. /// /// Once the initialization expression succeeds, the key transitions to the - /// `Valid` state which will guarantee that future calls to `with` will + /// `Valid` state which will guarantee that future calls to [`with`] will /// succeed within the thread. /// /// When a thread exits, each key will be destroyed in turn, and as keys are /// destroyed they will enter the `Destroyed` state just before the /// destructor starts to run. Keys may remain in the `Destroyed` state after /// destruction has completed. Keys without destructors (e.g. with types - /// that are `Copy`), may never enter the `Destroyed` state. + /// that are [`Copy`]), may never enter the `Destroyed` state. /// /// Keys in the `Uninitialized` state can be accessed so long as the /// initialization does not panic. Keys in the `Valid` state are guaranteed /// to be able to be accessed. Keys in the `Destroyed` state will panic on - /// any call to `with`. + /// any call to [`with`]. + /// + /// [`with`]: ../../std/thread/struct.LocalKey.html#method.with + /// [`Copy`]: ../../std/marker/trait.Copy.html #[unstable(feature = "thread_local_state", reason = "state querying was recently added", issue = "27716")] diff --git a/src/libstd/thread/mod.rs b/src/libstd/thread/mod.rs index 200368be275..154406a1d8b 100644 --- a/src/libstd/thread/mod.rs +++ b/src/libstd/thread/mod.rs @@ -180,8 +180,33 @@ // Builder //////////////////////////////////////////////////////////////////////////////// -/// Thread configuration. Provides detailed control over the properties -/// and behavior of new threads. +/// Thread factory, which can be used in order to configure the properties of +/// a new thread. +/// +/// Methods can be chained on it in order to configure it. +/// +/// The two configurations available are: +/// +/// - [`name`]: allows to give a name to the thread which is currently +/// only used in `panic` messages. +/// - [`stack_size`]: specifies the desired stack size. Note that this can +/// be overriden by the OS. +/// +/// If the [`stack_size`] field is not specified, the stack size +/// will be the `RUST_MIN_STACK` environment variable. If it is +/// not specified either, a sensible default will be set. +/// +/// If the [`name`] field is not specified, the thread will not be named. +/// +/// The [`spawn`] method will take ownership of the builder and create an +/// [`io::Result`] to the thread handle with the given configuration. +/// +/// The [`thread::spawn`] free function uses a `Builder` with default +/// configuration and [`unwrap`]s its return value. +/// +/// You may want to use [`spawn`] instead of [`thread::spawn`], when you want +/// to recover from a failure to launch a thread, indeed the free function will +/// panick where the `Builder` method will return a [`io::Result`]. /// /// # Examples /// @@ -196,6 +221,13 @@ /// /// handler.join().unwrap(); /// ``` +/// +/// [`thread::spawn`]: ../../std/thread/fn.spawn.html +/// [`stack_size`]: ../../std/thread/struct.Builder.html#method.stack_size +/// [`name`]: ../../std/thread/struct.Builder.html#method.name +/// [`spawn`]: ../../std/thread/struct.Builder.html#method.spawn +/// [`io::Result`]: ../../std/io/type.Result.html +/// [`unwrap`]: ../../std/result/enum.Result.html#method.unwrap #[stable(feature = "rust1", since = "1.0.0")] #[derive(Debug)] pub struct Builder { @@ -209,11 +241,6 @@ impl Builder { /// Generates the base configuration for spawning a thread, from which /// configuration methods can be chained. /// - /// If the [`stack_size`] field is not specified, the stack size - /// will be the `RUST_MIN_STACK` environment variable. If it is - /// not specified either, a sensible default will be set (2MB as - /// of the writting of this doc). - /// /// # Examples /// /// ``` @@ -229,8 +256,6 @@ impl Builder { /// /// handler.join().unwrap(); /// ``` - /// - /// [`stack_size`]: ../../std/thread/struct.Builder.html#method.stack_size #[stable(feature = "rust1", since = "1.0.0")] pub fn new() -> Builder { Builder { @@ -280,9 +305,10 @@ pub fn stack_size(mut self, size: usize) -> Builder { self } - /// Spawns a new thread, and returns a join handle for it. + /// Spawns a new thread by taking ownership of the `Builder`, and returns an + /// [`io::Result`] to its [`JoinHandle`]. /// - /// The child thread may outlive the parent (unless the parent thread + /// The spawned thread may outlive the caller (unless the caller thread /// is the main thread; the whole process is terminated when the main /// thread finishes). The join handle can be used to block on /// termination of the child thread, including recovering its panics. @@ -297,6 +323,7 @@ pub fn stack_size(mut self, size: usize) -> Builder { /// /// [`spawn`]: ../../std/thread/fn.spawn.html /// [`io::Result`]: ../../std/io/type.Result.html + /// [`JoinHandle`]: ../../std/thread/struct.JoinHandle.html /// /// # Examples /// @@ -468,6 +495,23 @@ pub fn current() -> Thread { /// Cooperatively gives up a timeslice to the OS scheduler. /// +/// This is used when the programmer knows that the thread will have nothing +/// to do for some time, and thus avoid wasting computing time. +/// +/// For example when polling on a resource, it is common to check that it is +/// available, and if not to yield in order to avoid busy waiting. +/// +/// Thus the pattern of `yield`ing after a failed poll is rather common when +/// implementing low-level shared resources or synchronization primitives. +/// +/// However programmers will usualy prefer to use, [`channel`]s, [`Condvar`]s, +/// [`Mutex`]es or [`join`] for their synchronisation routines, as they avoid +/// thinking about thread schedulling. +/// +/// Note that [`channel`]s for example are implemented using this primitive. +/// Indeed when you call `send` or `recv`, which are blocking, they will yield +/// if the channel is not available. +/// /// # Examples /// /// ``` @@ -475,6 +519,12 @@ pub fn current() -> Thread { /// /// thread::yield_now(); /// ``` +/// +/// [`channel`]: ../../std/sync/mpsc/index.html +/// [`spawn`]: ../../std/thread/fn.spawn.html +/// [`join`]: ../../std/thread/struct.JoinHandle.html#method.join +/// [`Mutex`]: ../../std/sync/struct.Mutex.html +/// [`Condvar`]: ../../std/sync/struct.Condvar.html #[stable(feature = "rust1", since = "1.0.0")] pub fn yield_now() { imp::Thread::yield_now() diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index e5bb02fe082..24ce99208ed 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -715,7 +715,7 @@ pub fn add_trailing_semicolon(mut self) -> Self { StmtKind::Mac(mac) => StmtKind::Mac(mac.map(|(mac, _style, attrs)| { (mac, MacStmtStyle::Semicolon, attrs) })), - node @ _ => node, + node => node, }; self } @@ -1076,16 +1076,16 @@ pub fn is_str(&self) -> bool { pub fn is_unsuffixed(&self) -> bool { match *self { // unsuffixed variants - LitKind::Str(..) => true, - LitKind::ByteStr(..) => true, - LitKind::Byte(..) => true, - LitKind::Char(..) => true, - LitKind::Int(_, LitIntType::Unsuffixed) => true, - LitKind::FloatUnsuffixed(..) => true, + LitKind::Str(..) | + LitKind::ByteStr(..) | + LitKind::Byte(..) | + LitKind::Char(..) | + LitKind::Int(_, LitIntType::Unsuffixed) | + LitKind::FloatUnsuffixed(..) | LitKind::Bool(..) => true, // suffixed variants - LitKind::Int(_, LitIntType::Signed(..)) => false, - LitKind::Int(_, LitIntType::Unsigned(..)) => false, + LitKind::Int(_, LitIntType::Signed(..)) | + LitKind::Int(_, LitIntType::Unsigned(..)) | LitKind::Float(..) => false, } } diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs index 0980b73e80c..45f891d8dc5 100644 --- a/src/libsyntax/attr.rs +++ b/src/libsyntax/attr.rs @@ -112,7 +112,7 @@ impl NestedMetaItem { /// Returns the MetaItem if self is a NestedMetaItemKind::MetaItem. pub fn meta_item(&self) -> Option<&MetaItem> { match self.node { - NestedMetaItemKind::MetaItem(ref item) => Some(&item), + NestedMetaItemKind::MetaItem(ref item) => Some(item), _ => None } } @@ -120,7 +120,7 @@ pub fn meta_item(&self) -> Option<&MetaItem> { /// Returns the Lit if self is a NestedMetaItemKind::Literal. pub fn literal(&self) -> Option<&Lit> { match self.node { - NestedMetaItemKind::Literal(ref lit) => Some(&lit), + NestedMetaItemKind::Literal(ref lit) => Some(lit), _ => None } } @@ -259,7 +259,7 @@ pub fn value_str(&self) -> Option { match self.node { MetaItemKind::NameValue(ref v) => { match v.node { - LitKind::Str(ref s, _) => Some((*s).clone()), + LitKind::Str(ref s, _) => Some(*s), _ => None, } }, @@ -1217,9 +1217,10 @@ fn token(&self) -> Token { Token::Literal(token::Lit::Float(symbol), Some(Symbol::intern(ty.ty_to_string()))) } LitKind::FloatUnsuffixed(symbol) => Token::Literal(token::Lit::Float(symbol), None), - LitKind::Bool(value) => Token::Ident(Ident::with_empty_ctxt(Symbol::intern(match value { - true => "true", - false => "false", + LitKind::Bool(value) => Token::Ident(Ident::with_empty_ctxt(Symbol::intern(if value { + "true" + } else { + "false" }))), } } @@ -1261,7 +1262,7 @@ fn map_attrs) -> Vec>(self, f: F) impl HasAttrs for Vec { fn attrs(&self) -> &[Attribute] { - &self + self } fn map_attrs) -> Vec>(self, f: F) -> Self { f(self) @@ -1270,7 +1271,7 @@ fn map_attrs) -> Vec>(self, f: F) -> Self { impl HasAttrs for ThinVec { fn attrs(&self) -> &[Attribute] { - &self + self } fn map_attrs) -> Vec>(self, f: F) -> Self { f(self.into()).into() diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs index 0c8be1d4f24..d32c3ec5f46 100644 --- a/src/libsyntax/codemap.rs +++ b/src/libsyntax/codemap.rs @@ -485,7 +485,7 @@ pub fn span_until_char(&self, sp: Span, c: char) -> Span { match self.span_to_snippet(sp) { Ok(snippet) => { let snippet = snippet.split(c).nth(0).unwrap_or("").trim_right(); - if snippet.len() > 0 && !snippet.contains('\n') { + if !snippet.is_empty() && !snippet.contains('\n') { Span { hi: BytePos(sp.lo.0 + snippet.len() as u32), ..sp } } else { sp @@ -502,7 +502,7 @@ pub fn def_span(&self, sp: Span) -> Span { pub fn get_filemap(&self, filename: &str) -> Option> { for fm in self.files.borrow().iter() { if filename == fm.name { - (self.dep_tracking_callback.borrow())(&fm); + (self.dep_tracking_callback.borrow())(fm); return Some(fm.clone()); } } diff --git a/src/libsyntax/config.rs b/src/libsyntax/config.rs index ede8a33df65..2e98c7d9626 100644 --- a/src/libsyntax/config.rs +++ b/src/libsyntax/config.rs @@ -123,7 +123,7 @@ pub fn in_cfg(&mut self, attrs: &[ast::Attribute]) -> bool { return false; } - let mis = if !is_cfg(&attr) { + let mis = if !is_cfg(attr) { return true; } else if let Some(mis) = attr.meta_item_list() { mis @@ -150,7 +150,7 @@ fn visit_expr_attrs(&mut self, attrs: &[ast::Attribute]) { // flag the offending attributes for attr in attrs.iter() { if !self.features.map(|features| features.stmt_expr_attributes).unwrap_or(true) { - let mut err = feature_err(&self.sess, + let mut err = feature_err(self.sess, "stmt_expr_attributes", attr.span, GateIssue::Language, @@ -258,7 +258,7 @@ pub fn configure_stmt(&mut self, stmt: ast::Stmt) -> Option { pub fn configure_struct_expr_field(&mut self, field: ast::Field) -> Option { if !self.features.map(|features| features.struct_field_attributes).unwrap_or(true) { if !field.attrs.is_empty() { - let mut err = feature_err(&self.sess, + let mut err = feature_err(self.sess, "struct_field_attributes", field.span, GateIssue::Language, @@ -290,7 +290,7 @@ fn visit_struct_field_attrs(&mut self, attrs: &[ast::Attribute]) { for attr in attrs.iter() { if !self.features.map(|features| features.struct_field_attributes).unwrap_or(true) { let mut err = feature_err( - &self.sess, + self.sess, "struct_field_attributes", attr.span, GateIssue::Language, diff --git a/src/libsyntax/diagnostics/plugin.rs b/src/libsyntax/diagnostics/plugin.rs index fe5cb87ad59..73aeb40df84 100644 --- a/src/libsyntax/diagnostics/plugin.rs +++ b/src/libsyntax/diagnostics/plugin.rs @@ -120,7 +120,7 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt, // URLs can be unavoidably longer than the line limit, so we allow them. // Allowed format is: `[name]: https://www.rust-lang.org/` - let is_url = |l: &str| l.starts_with('[') && l.contains("]:") && l.contains("http"); + let is_url = |l: &str| l.starts_with("[") && l.contains("]:") && l.contains("http"); if msg.lines().any(|line| line.len() > MAX_DESCRIPTION_WIDTH && !is_url(line)) { ecx.span_err(span, &format!( @@ -177,7 +177,7 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt, if let Err(e) = output_metadata(ecx, &target_triple, &crate_name.name.as_str(), - &diagnostics) { + diagnostics) { ecx.span_bug(span, &format!( "error writing metadata for triple `{}` and crate `{}`, error: {}, \ cause: {:?}", @@ -227,7 +227,7 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt, MacEager::items(SmallVector::many(vec![ P(ast::Item { - ident: name.clone(), + ident: *name, attrs: Vec::new(), id: ast::DUMMY_NODE_ID, node: ast::ItemKind::Const( diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index f731c5abdd6..00483b1ea5f 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -535,7 +535,7 @@ pub enum SyntaxExtension { /// /// The `bool` dictates whether the contents of the macro can /// directly use `#[unstable]` things (true == yes). - NormalTT(Box, Option, bool), + NormalTT(Box, Option<(ast::NodeId, Span)>, bool), /// A function-like syntax extension that has an extra ident before /// the block. @@ -589,6 +589,7 @@ fn resolve_invoc(&mut self, invoc: &mut Invocation, scope: Mark, force: bool) -> Result>, Determinacy>; fn resolve_macro(&mut self, scope: Mark, path: &ast::Path, kind: MacroKind, force: bool) -> Result, Determinacy>; + fn check_unused_macros(&self); } #[derive(Copy, Clone, Debug)] @@ -618,6 +619,7 @@ fn resolve_macro(&mut self, _scope: Mark, _path: &ast::Path, _kind: MacroKind, _force: bool) -> Result, Determinacy> { Err(Determinacy::Determined) } + fn check_unused_macros(&self) {} } #[derive(Clone)] @@ -635,8 +637,8 @@ pub struct ExpansionData { } /// One of these is made during expansion and incrementally updated as we go; -/// when a macro expansion occurs, the resulting nodes have the backtrace() -/// -> expn_info of their expansion context stored into their span. +/// when a macro expansion occurs, the resulting nodes have the `backtrace() +/// -> expn_info` of their expansion context stored into their span. pub struct ExtCtxt<'a> { pub parse_sess: &'a parse::ParseSess, pub ecfg: expand::ExpansionConfig<'a>, @@ -709,7 +711,7 @@ pub fn expansion_cause(&self) -> Span { } ctxt = info.call_site.ctxt; last_macro = Some(info.call_site); - return Some(()); + Some(()) }).is_none() { break } @@ -770,9 +772,9 @@ pub fn span_bug(&self, sp: Span, msg: &str) -> ! { } pub fn trace_macros_diag(&self) { for (sp, notes) in self.expansions.iter() { - let mut db = self.parse_sess.span_diagnostic.span_note_diag(*sp, &"trace_macro"); + let mut db = self.parse_sess.span_diagnostic.span_note_diag(*sp, "trace_macro"); for note in notes { - db.note(¬e); + db.note(note); } db.emit(); } @@ -795,11 +797,15 @@ pub fn std_path(&self, components: &[&str]) -> Vec { v.push(self.ident_of(s)); } v.extend(components.iter().map(|s| self.ident_of(s))); - return v + v } pub fn name_of(&self, st: &str) -> ast::Name { Symbol::intern(st) } + + pub fn check_unused_macros(&self) { + self.resolver.check_unused_macros(); + } } /// Extract a string literal from the macro expanded version of `expr`, diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index a8aa103f80a..25e0aed220a 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -415,19 +415,19 @@ fn expand_attr_invoc(&mut self, invoc: Invocation, ext: Rc) -> match *ext { MultiModifier(ref mac) => { - let meta = panictry!(attr.parse_meta(&self.cx.parse_sess)); + let meta = panictry!(attr.parse_meta(self.cx.parse_sess)); let item = mac.expand(self.cx, attr.span, &meta, item); kind.expect_from_annotatables(item) } MultiDecorator(ref mac) => { let mut items = Vec::new(); - let meta = panictry!(attr.parse_meta(&self.cx.parse_sess)); + let meta = panictry!(attr.parse_meta(self.cx.parse_sess)); mac.expand(self.cx, attr.span, &meta, &item, &mut |item| items.push(item)); items.push(item); kind.expect_from_annotatables(items) } SyntaxExtension::AttrProcMacro(ref mac) => { - let item_toks = stream_for_item(&item, &self.cx.parse_sess); + let item_toks = stream_for_item(&item, self.cx.parse_sess); let span = Span { ctxt: self.cx.backtrace(), ..attr.span }; let tok_result = mac.expand(self.cx, attr.span, attr.tokens, item_toks); @@ -439,7 +439,7 @@ fn expand_attr_invoc(&mut self, invoc: Invocation, ext: Rc) -> } _ => { let msg = &format!("macro `{}` may not be used in attributes", attr.path); - self.cx.span_err(attr.span, &msg); + self.cx.span_err(attr.span, msg); kind.dummy(attr.span) } } @@ -454,7 +454,7 @@ fn expand_bang_invoc(&mut self, invoc: Invocation, ext: Rc) -> }; let path = &mac.node.path; - let ident = ident.unwrap_or(keywords::Invalid.ident()); + let ident = ident.unwrap_or_else(|| keywords::Invalid.ident()); let marked_tts = noop_fold_tts(mac.node.stream(), &mut Marker(mark)); let opt_expanded = match *ext { NormalTT(ref expandfun, exp_span, allow_internal_unstable) => { @@ -469,7 +469,7 @@ fn expand_bang_invoc(&mut self, invoc: Invocation, ext: Rc) -> call_site: span, callee: NameAndSpan { format: MacroBang(Symbol::intern(&format!("{}", path))), - span: exp_span, + span: exp_span.map(|(_, s)| s), allow_internal_unstable: allow_internal_unstable, }, }); @@ -591,7 +591,7 @@ fn expand_derive_invoc(&mut self, invoc: Invocation, ext: Rc) - } _ => { let msg = &format!("macro `{}` may not be used for derive attributes", attr.path); - self.cx.span_err(span, &msg); + self.cx.span_err(span, msg); kind.dummy(span) } } @@ -749,19 +749,15 @@ fn configure(&mut self, node: T) -> Option { fn check_attributes(&mut self, attrs: &[ast::Attribute]) { let features = self.cx.ecfg.features.unwrap(); for attr in attrs.iter() { - feature_gate::check_attribute(&attr, &self.cx.parse_sess, features); + feature_gate::check_attribute(attr, self.cx.parse_sess, features); } } } pub fn find_attr_invoc(attrs: &mut Vec) -> Option { - for i in 0 .. attrs.len() { - if !attr::is_known(&attrs[i]) && !is_builtin_attr(&attrs[i]) { - return Some(attrs.remove(i)); - } - } - - None + attrs.iter() + .position(|a| !attr::is_known(a) && !is_builtin_attr(a)) + .map(|i| attrs.remove(i)) } // These are pretty nasty. Ideally, we would keep the tokens around, linked from @@ -923,7 +919,7 @@ fn fold_item(&mut self, item: P) -> SmallVector> { let result = noop_fold_item(item, self); self.cx.current_expansion.module = orig_module; self.cx.current_expansion.directory_ownership = orig_directory_ownership; - return result; + result } // Ensure that test functions are accessible from the test harness. ast::ItemKind::Fn(..) if self.cx.ecfg.should_test => { diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index d7a85baa3ff..f8fac847a05 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -23,7 +23,7 @@ /// /// This is registered as a set of expression syntax extension called quote! /// that lifts its argument token-tree to an AST representing the -/// construction of the same token tree, with token::SubstNt interpreted +/// construction of the same token tree, with `token::SubstNt` interpreted /// as antiquotes (splices). pub mod rt { @@ -389,7 +389,7 @@ pub fn unflatten(tts: Vec) -> Vec { result = results.pop().unwrap(); result.push(tree); } - tree @ _ => result.push(tree), + tree => result.push(tree), } } result @@ -612,8 +612,11 @@ fn mk_delim(cx: &ExtCtxt, sp: Span, delim: token::DelimToken) -> P { #[allow(non_upper_case_globals)] fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P { macro_rules! mk_lit { - ($name: expr, $suffix: expr, $($args: expr),*) => {{ - let inner = cx.expr_call(sp, mk_token_path(cx, sp, $name), vec![$($args),*]); + ($name: expr, $suffix: expr, $content: expr $(, $count: expr)*) => {{ + let name = mk_name(cx, sp, ast::Ident::with_empty_ctxt($content)); + let inner = cx.expr_call(sp, mk_token_path(cx, sp, $name), vec![ + name $(, cx.expr_usize(sp, $count))* + ]); let suffix = match $suffix { Some(name) => cx.expr_some(sp, mk_name(cx, sp, ast::Ident::with_empty_ctxt(name))), None => cx.expr_none(sp) @@ -621,7 +624,8 @@ macro_rules! mk_lit { cx.expr_call(sp, mk_token_path(cx, sp, "Literal"), vec![inner, suffix]) }} } - match *tok { + + let name = match *tok { token::BinOp(binop) => { return cx.expr_call(sp, mk_token_path(cx, sp, "BinOp"), vec![mk_binop(cx, sp, binop)]); } @@ -639,34 +643,14 @@ macro_rules! mk_lit { vec![mk_delim(cx, sp, delim)]); } - token::Literal(token::Byte(i), suf) => { - let e_byte = mk_name(cx, sp, ast::Ident::with_empty_ctxt(i)); - return mk_lit!("Byte", suf, e_byte); - } - - token::Literal(token::Char(i), suf) => { - let e_char = mk_name(cx, sp, ast::Ident::with_empty_ctxt(i)); - return mk_lit!("Char", suf, e_char); - } - - token::Literal(token::Integer(i), suf) => { - let e_int = mk_name(cx, sp, ast::Ident::with_empty_ctxt(i)); - return mk_lit!("Integer", suf, e_int); - } - - token::Literal(token::Float(fident), suf) => { - let e_fident = mk_name(cx, sp, ast::Ident::with_empty_ctxt(fident)); - return mk_lit!("Float", suf, e_fident); - } - - token::Literal(token::Str_(ident), suf) => { - return mk_lit!("Str_", suf, mk_name(cx, sp, ast::Ident::with_empty_ctxt(ident))) - } - - token::Literal(token::StrRaw(ident, n), suf) => { - return mk_lit!("StrRaw", suf, mk_name(cx, sp, ast::Ident::with_empty_ctxt(ident)), - cx.expr_usize(sp, n)) - } + token::Literal(token::Byte(i), suf) => return mk_lit!("Byte", suf, i), + token::Literal(token::Char(i), suf) => return mk_lit!("Char", suf, i), + token::Literal(token::Integer(i), suf) => return mk_lit!("Integer", suf, i), + token::Literal(token::Float(i), suf) => return mk_lit!("Float", suf, i), + token::Literal(token::Str_(i), suf) => return mk_lit!("Str_", suf, i), + token::Literal(token::StrRaw(i, n), suf) => return mk_lit!("StrRaw", suf, i, n), + token::Literal(token::ByteStr(i), suf) => return mk_lit!("ByteStr", suf, i), + token::Literal(token::ByteStrRaw(i, n), suf) => return mk_lit!("ByteStrRaw", suf, i, n), token::Ident(ident) => { return cx.expr_call(sp, @@ -688,10 +672,6 @@ macro_rules! mk_lit { token::Interpolated(_) => panic!("quote! with interpolated token"), - _ => () - } - - let name = match *tok { token::Eq => "Eq", token::Lt => "Lt", token::Le => "Le", @@ -706,6 +686,7 @@ macro_rules! mk_lit { token::At => "At", token::Dot => "Dot", token::DotDot => "DotDot", + token::DotDotDot => "DotDotDot", token::Comma => "Comma", token::Semi => "Semi", token::Colon => "Colon", @@ -718,7 +699,10 @@ macro_rules! mk_lit { token::Question => "Question", token::Underscore => "Underscore", token::Eof => "Eof", - _ => panic!("unhandled token in quote!"), + + token::Whitespace | token::SubstNt(_) | token::Comment | token::Shebang(_) => { + panic!("unhandled token in quote!"); + } }; mk_token_path(cx, sp, name) } diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs index 22a5776315a..4183583d66f 100644 --- a/src/libsyntax/ext/source_util.rs +++ b/src/libsyntax/ext/source_util.rs @@ -150,7 +150,7 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenT cx.span_err(sp, &format!("{} wasn't a utf-8 file", file.display())); - return DummyResult::expr(sp); + DummyResult::expr(sp) } } } @@ -167,7 +167,7 @@ pub fn expand_include_bytes(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::Toke Err(e) => { cx.span_err(sp, &format!("couldn't read {}: {}", file.display(), e)); - return DummyResult::expr(sp); + DummyResult::expr(sp) } Ok(..) => { // Add this input file to the code map to make it available as diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index eb0b7c29f8d..bf66aa0f00b 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -36,43 +36,47 @@ //! repetitions indicated by Kleene stars. It only advances or calls out to the //! real Rust parser when no `cur_eis` items remain //! -//! Example: Start parsing `a a a a b` against [· a $( a )* a b]. +//! Example: //! -//! Remaining input: `a a a a b` +//! ```text, ignore +//! Start parsing a a a a b against [· a $( a )* a b]. +//! +//! Remaining input: a a a a b //! next_eis: [· a $( a )* a b] //! -//! - - - Advance over an `a`. - - - +//! - - - Advance over an a. - - - //! -//! Remaining input: `a a a b` +//! Remaining input: a a a b //! cur: [a · $( a )* a b] //! Descend/Skip (first item). //! next: [a $( · a )* a b] [a $( a )* · a b]. //! -//! - - - Advance over an `a`. - - - +//! - - - Advance over an a. - - - //! -//! Remaining input: `a a b` +//! Remaining input: a a b //! cur: [a $( a · )* a b] next: [a $( a )* a · b] //! Finish/Repeat (first item) //! next: [a $( a )* · a b] [a $( · a )* a b] [a $( a )* a · b] //! -//! - - - Advance over an `a`. - - - (this looks exactly like the last step) +//! - - - Advance over an a. - - - (this looks exactly like the last step) //! -//! Remaining input: `a b` +//! Remaining input: a b //! cur: [a $( a · )* a b] next: [a $( a )* a · b] //! Finish/Repeat (first item) //! next: [a $( a )* · a b] [a $( · a )* a b] [a $( a )* a · b] //! -//! - - - Advance over an `a`. - - - (this looks exactly like the last step) +//! - - - Advance over an a. - - - (this looks exactly like the last step) //! -//! Remaining input: `b` +//! Remaining input: b //! cur: [a $( a · )* a b] next: [a $( a )* a · b] //! Finish/Repeat (first item) //! next: [a $( a )* · a b] [a $( · a )* a b] //! -//! - - - Advance over a `b`. - - - +//! - - - Advance over a b. - - - //! -//! Remaining input: `` +//! Remaining input: '' //! eof: [a $( a )* a b ·] +//! ``` pub use self::NamedMatch::*; pub use self::ParseResult::*; @@ -178,20 +182,20 @@ fn initial_matcher_pos(ms: Vec, lo: BytePos) -> Box { }) } -/// NamedMatch is a pattern-match result for a single token::MATCH_NONTERMINAL: +/// `NamedMatch` is a pattern-match result for a single `token::MATCH_NONTERMINAL`: /// so it is associated with a single ident in a parse, and all -/// `MatchedNonterminal`s in the NamedMatch have the same nonterminal type -/// (expr, item, etc). Each leaf in a single NamedMatch corresponds to a -/// single token::MATCH_NONTERMINAL in the TokenTree that produced it. +/// `MatchedNonterminal`s in the `NamedMatch` have the same nonterminal type +/// (expr, item, etc). Each leaf in a single `NamedMatch` corresponds to a +/// single `token::MATCH_NONTERMINAL` in the `TokenTree` that produced it. /// -/// The in-memory structure of a particular NamedMatch represents the match +/// The in-memory structure of a particular `NamedMatch` represents the match /// that occurred when a particular subset of a matcher was applied to a /// particular token tree. /// -/// The width of each MatchedSeq in the NamedMatch, and the identity of the -/// `MatchedNonterminal`s, will depend on the token tree it was applied to: -/// each MatchedSeq corresponds to a single TTSeq in the originating -/// token tree. The depth of the NamedMatch structure will therefore depend +/// The width of each `MatchedSeq` in the `NamedMatch`, and the identity of +/// the `MatchedNonterminal`s, will depend on the token tree it was applied +/// to: each `MatchedSeq` corresponds to a single `TTSeq` in the originating +/// token tree. The depth of the `NamedMatch` structure will therefore depend /// only on the nesting depth of `ast::TTSeq`s in the originating /// token tree it was derived from. @@ -267,11 +271,12 @@ pub fn parse_failure_msg(tok: Token) -> String { /// Perform a token equality check, ignoring syntax context (that is, an unhygienic comparison) fn token_name_eq(t1 : &Token, t2 : &Token) -> bool { - match (t1,t2) { - (&token::Ident(id1),&token::Ident(id2)) - | (&token::Lifetime(id1),&token::Lifetime(id2)) => - id1.name == id2.name, - _ => *t1 == *t2 + if let (Some(id1), Some(id2)) = (t1.ident(), t2.ident()) { + id1.name == id2.name + } else if let (&token::Lifetime(id1), &token::Lifetime(id2)) = (t1, t2) { + id1.name == id2.name + } else { + *t1 == *t2 } } @@ -334,7 +339,7 @@ fn inner_parse_loop(sess: &ParseSess, // Check if we need a separator if idx == len && ei.sep.is_some() { // We have a separator, and it is the current token. - if ei.sep.as_ref().map(|ref sep| token_name_eq(&token, sep)).unwrap_or(false) { + if ei.sep.as_ref().map(|sep| token_name_eq(token, sep)).unwrap_or(false) { ei.idx += 1; next_eis.push(ei); } @@ -401,7 +406,7 @@ fn inner_parse_loop(sess: &ParseSess, cur_eis.push(ei); } TokenTree::Token(_, ref t) => { - if token_name_eq(t, &token) { + if token_name_eq(t, token) { ei.idx += 1; next_eis.push(ei); } @@ -485,11 +490,8 @@ pub fn parse(sess: &ParseSess, tts: TokenStream, ms: &[TokenTree], directory: Op } fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal { - match name { - "tt" => { - return token::NtTT(p.parse_token_tree()); - } - _ => {} + if name == "tt" { + return token::NtTT(p.parse_token_tree()); } // check at the beginning and the parser checks after each bump p.process_potential_macro_variable(); diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index f959ccc989e..a208f530602 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -94,7 +94,7 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt, -> Box { if cx.trace_macros() { let sp = sp.macro_backtrace().last().map(|trace| trace.call_site).unwrap_or(sp); - let mut values: &mut Vec = cx.expansions.entry(sp).or_insert(vec![]); + let mut values: &mut Vec = cx.expansions.entry(sp).or_insert_with(Vec::new); values.push(format!("expands to `{}! {{ {} }}`", name, arg)); } @@ -206,7 +206,7 @@ pub fn compile(sess: &ParseSess, features: &RefCell, def: &ast::Item) let mut valid = true; // Extract the arguments: - let lhses = match **argument_map.get(&lhs_nm).unwrap() { + let lhses = match *argument_map[&lhs_nm] { MatchedSeq(ref s, _) => { s.iter().map(|m| { if let MatchedNonterminal(ref nt) = **m { @@ -222,7 +222,7 @@ pub fn compile(sess: &ParseSess, features: &RefCell, def: &ast::Item) _ => sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs") }; - let rhses = match **argument_map.get(&rhs_nm).unwrap() { + let rhses = match *argument_map[&rhs_nm] { MatchedSeq(ref s, _) => { s.iter().map(|m| { if let MatchedNonterminal(ref nt) = **m { @@ -252,7 +252,9 @@ pub fn compile(sess: &ParseSess, features: &RefCell, def: &ast::Item) valid: valid, }); - NormalTT(exp, Some(def.span), attr::contains_name(&def.attrs, "allow_internal_unstable")) + NormalTT(exp, + Some((def.id, def.span)), + attr::contains_name(&def.attrs, "allow_internal_unstable")) } fn check_lhs_nt_follows(sess: &ParseSess, @@ -260,13 +262,12 @@ fn check_lhs_nt_follows(sess: &ParseSess, lhs: "ed::TokenTree) -> bool { // lhs is going to be like TokenTree::Delimited(...), where the // entire lhs is those tts. Or, it can be a "bare sequence", not wrapped in parens. - match lhs { - "ed::TokenTree::Delimited(_, ref tts) => check_matcher(sess, features, &tts.tts), - _ => { - let msg = "invalid macro matcher; matchers must be contained in balanced delimiters"; - sess.span_diagnostic.span_err(lhs.span(), msg); - false - } + if let quoted::TokenTree::Delimited(_, ref tts) = *lhs { + check_matcher(sess, features, &tts.tts) + } else { + let msg = "invalid macro matcher; matchers must be contained in balanced delimiters"; + sess.span_diagnostic.span_err(lhs.span(), msg); + false } // we don't abort on errors on rejection, the driver will do that for us // after parsing/expansion. we can report every error in every macro this way. @@ -283,17 +284,15 @@ fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[quoted::TokenTree]) -> bool { return false; }, TokenTree::Sequence(span, ref seq) => { - if seq.separator.is_none() { - if seq.tts.iter().all(|seq_tt| { - match *seq_tt { - TokenTree::Sequence(_, ref sub_seq) => - sub_seq.op == quoted::KleeneOp::ZeroOrMore, - _ => false, - } - }) { - sess.span_diagnostic.span_err(span, "repetition matches empty token tree"); - return false; + if seq.separator.is_none() && seq.tts.iter().all(|seq_tt| { + match *seq_tt { + TokenTree::Sequence(_, ref sub_seq) => + sub_seq.op == quoted::KleeneOp::ZeroOrMore, + _ => false, } + }) { + sess.span_diagnostic.span_err(span, "repetition matches empty token tree"); + return false; } if !check_lhs_no_empty_seq(sess, &seq.tts) { return false; @@ -407,7 +406,7 @@ fn build_recur(sets: &mut FirstSets, tts: &[TokenTree]) -> TokenSet { } } - return first; + first } } @@ -469,7 +468,7 @@ fn first(&self, tts: &[quoted::TokenTree]) -> TokenSet { // we only exit the loop if `tts` was empty or if every // element of `tts` matches the empty sequence. assert!(first.maybe_empty); - return first; + first } } @@ -579,7 +578,7 @@ fn check_matcher_core(sess: &ParseSess, let build_suffix_first = || { let mut s = first_sets.first(suffix); if s.maybe_empty { s.add_all(follow); } - return s; + s }; // (we build `suffix_first` on demand below; you can tell @@ -861,6 +860,7 @@ fn quoted_tt_to_string(tt: "ed::TokenTree) -> String { match *tt { quoted::TokenTree::Token(_, ref tok) => ::print::pprust::token_to_string(tok), quoted::TokenTree::MetaVarDecl(_, name, kind) => format!("${}:{}", name, kind), - _ => panic!("unexpected quoted::TokenTree::{Sequence or Delimited} in follow set checker"), + _ => panic!("unexpected quoted::TokenTree::{{Sequence or Delimited}} \ + in follow set checker"), } } diff --git a/src/libsyntax/ext/tt/quoted.rs b/src/libsyntax/ext/tt/quoted.rs index d216effbd45..fa65e9501c2 100644 --- a/src/libsyntax/ext/tt/quoted.rs +++ b/src/libsyntax/ext/tt/quoted.rs @@ -96,6 +96,17 @@ pub fn len(&self) -> usize { } } + pub fn is_empty(&self) -> bool { + match *self { + TokenTree::Delimited(_, ref delimed) => match delimed.delim { + token::NoDelim => delimed.tts.is_empty(), + _ => false, + }, + TokenTree::Sequence(_, ref seq) => seq.tts.is_empty(), + _ => true, + } + } + pub fn get_tt(&self, index: usize) -> TokenTree { match (self, index) { (&TokenTree::Delimited(_, ref delimed), _) if delimed.delim == token::NoDelim => { @@ -144,9 +155,9 @@ pub fn parse(input: tokenstream::TokenStream, expect_matchers: bool, sess: &Pars } _ => end_sp, }, - tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span), + tree => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span), }, - tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(start_sp), + tree => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(start_sp), }; sess.missing_fragment_specifiers.borrow_mut().insert(span); result.push(TokenTree::MetaVarDecl(span, ident, keywords::Invalid.ident())); @@ -228,10 +239,10 @@ fn kleene_op(token: &token::Token) -> Option { Some(op) => return (Some(tok), op), None => span, }, - tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span), + tree => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span), } }, - tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span), + tree => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span), }; sess.span_diagnostic.span_err(span, "expected `*` or `+`"); diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index 947089b0b9a..2a435bdea10 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -121,20 +121,20 @@ pub fn transcribe(sp_diag: &Handler, &repeats) { LockstepIterSize::Unconstrained => { panic!(sp_diag.span_fatal( - sp.clone(), /* blame macro writer */ + sp, /* blame macro writer */ "attempted to repeat an expression \ containing no syntax \ variables matched as repeating at this depth")); } LockstepIterSize::Contradiction(ref msg) => { // FIXME #2887 blame macro invoker instead - panic!(sp_diag.span_fatal(sp.clone(), &msg[..])); + panic!(sp_diag.span_fatal(sp, &msg[..])); } LockstepIterSize::Constraint(len, _) => { if len == 0 { if seq.op == quoted::KleeneOp::OneOrMore { // FIXME #2887 blame invoker - panic!(sp_diag.span_fatal(sp.clone(), + panic!(sp_diag.span_fatal(sp, "this must repeat at least once")); } } else { diff --git a/src/libsyntax/feature_gate.rs b/src/libsyntax/feature_gate.rs index b6a2c983fd4..09090ab8731 100644 --- a/src/libsyntax/feature_gate.rs +++ b/src/libsyntax/feature_gate.rs @@ -472,7 +472,7 @@ pub enum Stability { impl ::std::fmt::Debug for AttributeGate { fn fmt(&self, fmt: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { match *self { - Gated(ref stab, ref name, ref expl, _) => + Gated(ref stab, name, expl, _) => write!(fmt, "Gated({:?}, {}, {})", stab, name, expl), Ungated => write!(fmt, "Ungated") } @@ -816,7 +816,7 @@ pub fn is_builtin_attr(attr: &ast::Attribute) -> bool { ]; // cfg(...)'s that are feature gated -const GATED_CFGS: &'static [(&'static str, &'static str, fn(&Features) -> bool)] = &[ +const GATED_CFGS: &[(&str, &str, fn(&Features) -> bool)] = &[ // (name in cfg, feature, function to check if the feature is enabled) ("target_feature", "cfg_target_feature", cfg_fn!(cfg_target_feature)), ("target_vendor", "cfg_target_vendor", cfg_fn!(cfg_target_vendor)), @@ -881,7 +881,7 @@ fn check_attribute(&self, attr: &ast::Attribute, is_macro: bool) { let name = unwrap_or!(attr.name(), return).as_str(); for &(n, ty, ref gateage) in BUILTIN_ATTRIBUTES { if name == n { - if let &Gated(_, ref name, ref desc, ref has_feature) = gateage { + if let Gated(_, name, desc, ref has_feature) = *gateage { gate_feature_fn!(self, has_feature, attr.span, name, desc); } debug!("check_attribute: {:?} is builtin, {:?}, {:?}", attr.path, ty, gateage); @@ -1098,7 +1098,7 @@ fn contains_novel_literal(item: &ast::MetaItem) -> bool { NameValue(ref lit) => !lit.node.is_str(), List(ref list) => list.iter().any(|li| { match li.node { - MetaItem(ref mi) => contains_novel_literal(&mi), + MetaItem(ref mi) => contains_novel_literal(mi), Literal(_) => true, } }), @@ -1120,7 +1120,7 @@ fn visit_attribute(&mut self, attr: &ast::Attribute) { return } - let meta = panictry!(attr.parse_meta(&self.context.parse_sess)); + let meta = panictry!(attr.parse_meta(self.context.parse_sess)); if contains_novel_literal(&meta) { gate_feature_post!(&self, attr_literals, attr.span, "non-string literals in attributes, or string \ @@ -1216,14 +1216,11 @@ fn visit_item(&mut self, i: &'a ast::Item) { } ast::ItemKind::Impl(_, polarity, defaultness, _, _, _, _) => { - match polarity { - ast::ImplPolarity::Negative => { - gate_feature_post!(&self, optin_builtin_traits, - i.span, - "negative trait bounds are not yet fully implemented; \ - use marker types for now"); - }, - _ => {} + if polarity == ast::ImplPolarity::Negative { + gate_feature_post!(&self, optin_builtin_traits, + i.span, + "negative trait bounds are not yet fully implemented; \ + use marker types for now"); } if let ast::Defaultness::Default = defaultness { @@ -1272,11 +1269,9 @@ fn visit_ty(&mut self, ty: &'a ast::Ty) { fn visit_fn_ret_ty(&mut self, ret_ty: &'a ast::FunctionRetTy) { if let ast::FunctionRetTy::Ty(ref output_ty) = *ret_ty { - match output_ty.node { - ast::TyKind::Never => return, - _ => (), - }; - self.visit_ty(output_ty) + if output_ty.node != ast::TyKind::Never { + self.visit_ty(output_ty) + } } } @@ -1373,17 +1368,14 @@ fn visit_fn(&mut self, span: Span, _node_id: NodeId) { // check for const fn declarations - match fn_kind { - FnKind::ItemFn(_, _, _, Spanned { node: ast::Constness::Const, .. }, _, _, _) => { - gate_feature_post!(&self, const_fn, span, "const fn is unstable"); - } - _ => { - // stability of const fn methods are covered in - // visit_trait_item and visit_impl_item below; this is - // because default methods don't pass through this - // point. - } + if let FnKind::ItemFn(_, _, _, Spanned { node: ast::Constness::Const, .. }, _, _, _) = + fn_kind { + gate_feature_post!(&self, const_fn, span, "const fn is unstable"); } + // stability of const fn methods are covered in + // visit_trait_item and visit_impl_item below; this is + // because default methods don't pass through this + // point. match fn_kind { FnKind::ItemFn(_, _, _, _, abi, _, _) | diff --git a/src/libsyntax/json.rs b/src/libsyntax/json.rs index 06335584c96..f37dcfdde89 100644 --- a/src/libsyntax/json.rs +++ b/src/libsyntax/json.rs @@ -337,7 +337,7 @@ fn from_span(span: Span, je: &JsonEmitter) -> Vec { }) .collect() }) - .unwrap_or(vec![]) + .unwrap_or_else(|_| vec![]) } } diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs index 92cec462ffb..082930777e5 100644 --- a/src/libsyntax/parse/attr.rs +++ b/src/libsyntax/parse/attr.rs @@ -62,7 +62,7 @@ pub fn parse_outer_attributes(&mut self) -> PResult<'a, Vec> { _ => break, } } - return Ok(attrs); + Ok(attrs) } /// Matches `attribute = # ! [ meta_item ]` @@ -182,7 +182,7 @@ pub fn parse_inner_attributes(&mut self) -> PResult<'a, Vec> { } let attr = self.parse_attribute(true)?; - assert!(attr.style == ast::AttrStyle::Inner); + assert_eq!(attr.style, ast::AttrStyle::Inner); attrs.push(attr); } token::DocComment(s) => { diff --git a/src/libsyntax/parse/classify.rs b/src/libsyntax/parse/classify.rs index 4fe4ec7e4c0..0c6f09ba766 100644 --- a/src/libsyntax/parse/classify.rs +++ b/src/libsyntax/parse/classify.rs @@ -43,14 +43,14 @@ pub fn expr_is_simple_block(e: &ast::Expr) -> bool { } /// this statement requires a semicolon after it. -/// note that in one case (stmt_semi), we've already +/// note that in one case (`stmt_semi`), we've already /// seen the semicolon, and thus don't need another. pub fn stmt_ends_with_semi(stmt: &ast::StmtKind) -> bool { match *stmt { ast::StmtKind::Local(_) => true, - ast::StmtKind::Item(_) => false, ast::StmtKind::Expr(ref e) => expr_requires_semi_to_be_stmt(e), - ast::StmtKind::Semi(..) => false, + ast::StmtKind::Item(_) | + ast::StmtKind::Semi(..) | ast::StmtKind::Mac(..) => false, } } diff --git a/src/libsyntax/parse/common.rs b/src/libsyntax/parse/common.rs index b57708f9193..fe931f7cf6a 100644 --- a/src/libsyntax/parse/common.rs +++ b/src/libsyntax/parse/common.rs @@ -12,7 +12,7 @@ use parse::token; -/// SeqSep : a sequence separator (token) +/// `SeqSep` : a sequence separator (token) /// and whether a trailing separator is allowed. pub struct SeqSep { pub sep: Option, diff --git a/src/libsyntax/parse/lexer/comments.rs b/src/libsyntax/parse/lexer/comments.rs index 7ac322b144c..8b545d3b909 100644 --- a/src/libsyntax/parse/lexer/comments.rs +++ b/src/libsyntax/parse/lexer/comments.rs @@ -77,7 +77,7 @@ fn vertical_trim(lines: Vec) -> Vec { while j > i && lines[j - 1].trim().is_empty() { j -= 1; } - lines[i..j].iter().cloned().collect() + lines[i..j].to_vec() } /// remove a "[ \t]*\*" block from each line, if possible diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index ded1f0b599a..0bcd4578518 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -144,7 +144,7 @@ pub fn peek(&self) -> TokenAndSpan { impl<'a> StringReader<'a> { /// For comments.rs, which hackily pokes into next_pos and ch - pub fn new_raw<'b>(sess: &'a ParseSess, filemap: Rc) -> Self { + pub fn new_raw(sess: &'a ParseSess, filemap: Rc) -> Self { let mut sr = StringReader::new_raw_internal(sess, filemap); sr.bump(); sr @@ -180,7 +180,7 @@ fn new_raw_internal(sess: &'a ParseSess, filemap: Rc) -> Se pub fn new(sess: &'a ParseSess, filemap: Rc) -> Self { let mut sr = StringReader::new_raw(sess, filemap); - if let Err(_) = sr.advance_token() { + if sr.advance_token().is_err() { sr.emit_fatal_errors(); panic!(FatalError); } @@ -205,7 +205,7 @@ pub fn retokenize(sess: &'a ParseSess, mut span: Span) -> Self { sr.bump(); - if let Err(_) = sr.advance_token() { + if sr.advance_token().is_err() { sr.emit_fatal_errors(); panic!(FatalError); } @@ -525,7 +525,7 @@ fn scan_comment(&mut self) -> Option { self.bump(); } - return if doc_comment { + if doc_comment { self.with_str_from(start_bpos, |string| { // comments with only more "/"s are not doc comments let tok = if is_doc_comment(string) { @@ -544,7 +544,7 @@ fn scan_comment(&mut self) -> Option { tok: token::Comment, sp: mk_sp(start_bpos, self.pos), }) - }; + } } Some('*') => { self.bump(); @@ -754,9 +754,7 @@ fn scan_number(&mut self, c: char) -> token::Lit { // integer literal followed by field/method access or a range pattern // (`0..2` and `12.foo()`) if self.ch_is('.') && !self.nextch_is('.') && - !self.nextch() - .unwrap_or('\0') - .is_xid_start() { + !ident_start(self.nextch()) { // might have stuff after the ., and if it does, it needs to start // with a number self.bump(); @@ -766,7 +764,7 @@ fn scan_number(&mut self, c: char) -> token::Lit { } let pos = self.pos; self.check_float_base(start_bpos, pos, base); - return token::Float(self.name_from(start_bpos)); + token::Float(self.name_from(start_bpos)) } else { // it might be a float if it has an exponent if self.ch_is('e') || self.ch_is('E') { @@ -776,7 +774,7 @@ fn scan_number(&mut self, c: char) -> token::Lit { return token::Float(self.name_from(start_bpos)); } // but we certainly have an integer! - return token::Integer(self.name_from(start_bpos)); + token::Integer(self.name_from(start_bpos)) } } @@ -1053,9 +1051,9 @@ fn binop(&mut self, op: token::BinOpToken) -> token::Token { self.bump(); if self.ch_is('=') { self.bump(); - return token::BinOpEq(op); + token::BinOpEq(op) } else { - return token::BinOp(op); + token::BinOp(op) } } @@ -1102,15 +1100,15 @@ fn next_token_inner(&mut self) -> Result { // One-byte tokens. ';' => { self.bump(); - return Ok(token::Semi); + Ok(token::Semi) } ',' => { self.bump(); - return Ok(token::Comma); + Ok(token::Comma) } '.' => { self.bump(); - return if self.ch_is('.') { + if self.ch_is('.') { self.bump(); if self.ch_is('.') { self.bump(); @@ -1120,61 +1118,61 @@ fn next_token_inner(&mut self) -> Result { } } else { Ok(token::Dot) - }; + } } '(' => { self.bump(); - return Ok(token::OpenDelim(token::Paren)); + Ok(token::OpenDelim(token::Paren)) } ')' => { self.bump(); - return Ok(token::CloseDelim(token::Paren)); + Ok(token::CloseDelim(token::Paren)) } '{' => { self.bump(); - return Ok(token::OpenDelim(token::Brace)); + Ok(token::OpenDelim(token::Brace)) } '}' => { self.bump(); - return Ok(token::CloseDelim(token::Brace)); + Ok(token::CloseDelim(token::Brace)) } '[' => { self.bump(); - return Ok(token::OpenDelim(token::Bracket)); + Ok(token::OpenDelim(token::Bracket)) } ']' => { self.bump(); - return Ok(token::CloseDelim(token::Bracket)); + Ok(token::CloseDelim(token::Bracket)) } '@' => { self.bump(); - return Ok(token::At); + Ok(token::At) } '#' => { self.bump(); - return Ok(token::Pound); + Ok(token::Pound) } '~' => { self.bump(); - return Ok(token::Tilde); + Ok(token::Tilde) } '?' => { self.bump(); - return Ok(token::Question); + Ok(token::Question) } ':' => { self.bump(); if self.ch_is(':') { self.bump(); - return Ok(token::ModSep); + Ok(token::ModSep) } else { - return Ok(token::Colon); + Ok(token::Colon) } } '$' => { self.bump(); - return Ok(token::Dollar); + Ok(token::Dollar) } // Multi-byte tokens. @@ -1182,21 +1180,21 @@ fn next_token_inner(&mut self) -> Result { self.bump(); if self.ch_is('=') { self.bump(); - return Ok(token::EqEq); + Ok(token::EqEq) } else if self.ch_is('>') { self.bump(); - return Ok(token::FatArrow); + Ok(token::FatArrow) } else { - return Ok(token::Eq); + Ok(token::Eq) } } '!' => { self.bump(); if self.ch_is('=') { self.bump(); - return Ok(token::Ne); + Ok(token::Ne) } else { - return Ok(token::Not); + Ok(token::Not) } } '<' => { @@ -1204,21 +1202,21 @@ fn next_token_inner(&mut self) -> Result { match self.ch.unwrap_or('\x00') { '=' => { self.bump(); - return Ok(token::Le); + Ok(token::Le) } '<' => { - return Ok(self.binop(token::Shl)); + Ok(self.binop(token::Shl)) } '-' => { self.bump(); match self.ch.unwrap_or('\x00') { _ => { - return Ok(token::LArrow); + Ok(token::LArrow) } } } _ => { - return Ok(token::Lt); + Ok(token::Lt) } } } @@ -1227,13 +1225,13 @@ fn next_token_inner(&mut self) -> Result { match self.ch.unwrap_or('\x00') { '=' => { self.bump(); - return Ok(token::Ge); + Ok(token::Ge) } '>' => { - return Ok(self.binop(token::Shr)); + Ok(self.binop(token::Shr)) } _ => { - return Ok(token::Gt); + Ok(token::Gt) } } } @@ -1303,7 +1301,7 @@ fn next_token_inner(&mut self) -> Result { }; self.bump(); // advance ch past token let suffix = self.scan_optional_raw_name(); - return Ok(token::Literal(token::Char(id), suffix)); + Ok(token::Literal(token::Char(id), suffix)) } 'b' => { self.bump(); @@ -1314,7 +1312,7 @@ fn next_token_inner(&mut self) -> Result { _ => unreachable!(), // Should have been a token::Ident above. }; let suffix = self.scan_optional_raw_name(); - return Ok(token::Literal(lit, suffix)); + Ok(token::Literal(lit, suffix)) } '"' => { let start_bpos = self.pos; @@ -1345,7 +1343,7 @@ fn next_token_inner(&mut self) -> Result { }; self.bump(); let suffix = self.scan_optional_raw_name(); - return Ok(token::Literal(token::Str_(id), suffix)); + Ok(token::Literal(token::Str_(id), suffix)) } 'r' => { let start_bpos = self.pos; @@ -1416,24 +1414,24 @@ fn next_token_inner(&mut self) -> Result { Symbol::intern("??") }; let suffix = self.scan_optional_raw_name(); - return Ok(token::Literal(token::StrRaw(id, hash_count), suffix)); + Ok(token::Literal(token::StrRaw(id, hash_count), suffix)) } '-' => { if self.nextch_is('>') { self.bump(); self.bump(); - return Ok(token::RArrow); + Ok(token::RArrow) } else { - return Ok(self.binop(token::Minus)); + Ok(self.binop(token::Minus)) } } '&' => { if self.nextch_is('&') { self.bump(); self.bump(); - return Ok(token::AndAnd); + Ok(token::AndAnd) } else { - return Ok(self.binop(token::And)); + Ok(self.binop(token::And)) } } '|' => { @@ -1441,27 +1439,27 @@ fn next_token_inner(&mut self) -> Result { Some('|') => { self.bump(); self.bump(); - return Ok(token::OrOr); + Ok(token::OrOr) } _ => { - return Ok(self.binop(token::Or)); + Ok(self.binop(token::Or)) } } } '+' => { - return Ok(self.binop(token::Plus)); + Ok(self.binop(token::Plus)) } '*' => { - return Ok(self.binop(token::Star)); + Ok(self.binop(token::Star)) } '/' => { - return Ok(self.binop(token::Slash)); + Ok(self.binop(token::Slash)) } '^' => { - return Ok(self.binop(token::Caret)); + Ok(self.binop(token::Caret)) } '%' => { - return Ok(self.binop(token::Percent)); + Ok(self.binop(token::Percent)) } c => { let last_bpos = self.pos; @@ -1470,7 +1468,7 @@ fn next_token_inner(&mut self) -> Result { bpos, "unknown start of token", c); - unicode_chars::check_for_substitution(&self, c, &mut err); + unicode_chars::check_for_substitution(self, c, &mut err); self.fatal_errs.push(err); Err(()) } @@ -1492,14 +1490,14 @@ fn read_to_eol(&mut self) -> String { if self.ch_is('\n') { self.bump(); } - return val; + val } fn read_one_line_comment(&mut self) -> String { let val = self.read_to_eol(); assert!((val.as_bytes()[0] == b'/' && val.as_bytes()[1] == b'/') || (val.as_bytes()[0] == b'#' && val.as_bytes()[1] == b'!')); - return val; + val } fn consume_non_eol_whitespace(&mut self) { @@ -1543,7 +1541,7 @@ fn scan_byte(&mut self) -> token::Lit { Symbol::intern("?") }; self.bump(); // advance ch past token - return token::Byte(id); + token::Byte(id) } fn scan_byte_escape(&mut self, delim: char, below_0x7f_only: bool) -> bool { @@ -1576,7 +1574,7 @@ fn scan_byte_string(&mut self) -> token::Lit { Symbol::intern("??") }; self.bump(); - return token::ByteStr(id); + token::ByteStr(id) } fn scan_raw_byte_string(&mut self) -> token::Lit { @@ -1629,8 +1627,8 @@ fn scan_raw_byte_string(&mut self) -> token::Lit { self.bump(); } self.bump(); - return token::ByteStrRaw(self.name_from_to(content_start_bpos, content_end_bpos), - hash_count); + token::ByteStrRaw(self.name_from_to(content_start_bpos, content_end_bpos), + hash_count) } } @@ -1648,7 +1646,7 @@ fn in_range(c: Option, lo: char, hi: char) -> bool { } fn is_dec_digit(c: Option) -> bool { - return in_range(c, '0', '9'); + in_range(c, '0', '9') } pub fn is_doc_comment(s: &str) -> bool { diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index fe3ca1cf230..1eff819d755 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -107,18 +107,18 @@ pub fn parse_crate_attrs_from_file<'a>(input: &Path, sess: &'a ParseSess) parser.parse_inner_attributes() } -pub fn parse_crate_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess) - -> PResult<'a, ast::Crate> { +pub fn parse_crate_from_source_str(name: String, source: String, sess: &ParseSess) + -> PResult { new_parser_from_source_str(sess, name, source).parse_crate_mod() } -pub fn parse_crate_attrs_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess) - -> PResult<'a, Vec> { +pub fn parse_crate_attrs_from_source_str(name: String, source: String, sess: &ParseSess) + -> PResult> { new_parser_from_source_str(sess, name, source).parse_inner_attributes() } -pub fn parse_expr_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess) - -> PResult<'a, P> { +pub fn parse_expr_from_source_str(name: String, source: String, sess: &ParseSess) + -> PResult> { new_parser_from_source_str(sess, name, source).parse_expr() } @@ -126,29 +126,29 @@ pub fn parse_expr_from_source_str<'a>(name: String, source: String, sess: &'a Pa /// /// Returns `Ok(Some(item))` when successful, `Ok(None)` when no item was found, and`Err` /// when a syntax error occurred. -pub fn parse_item_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess) - -> PResult<'a, Option>> { +pub fn parse_item_from_source_str(name: String, source: String, sess: &ParseSess) + -> PResult>> { new_parser_from_source_str(sess, name, source).parse_item() } -pub fn parse_meta_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess) - -> PResult<'a, ast::MetaItem> { +pub fn parse_meta_from_source_str(name: String, source: String, sess: &ParseSess) + -> PResult { new_parser_from_source_str(sess, name, source).parse_meta_item() } -pub fn parse_stmt_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess) - -> PResult<'a, Option> { +pub fn parse_stmt_from_source_str(name: String, source: String, sess: &ParseSess) + -> PResult> { new_parser_from_source_str(sess, name, source).parse_stmt() } -pub fn parse_stream_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess) +pub fn parse_stream_from_source_str(name: String, source: String, sess: &ParseSess) -> TokenStream { filemap_to_stream(sess, sess.codemap().new_filemap(name, source)) } // Create a new parser from a source string -pub fn new_parser_from_source_str<'a>(sess: &'a ParseSess, name: String, source: String) - -> Parser<'a> { +pub fn new_parser_from_source_str(sess: &ParseSess, name: String, source: String) + -> Parser { filemap_to_parser(sess, sess.codemap().new_filemap(name, source)) } @@ -173,7 +173,7 @@ pub fn new_sub_parser_from_file<'a>(sess: &'a ParseSess, } /// Given a filemap and config, return a parser -pub fn filemap_to_parser<'a>(sess: &'a ParseSess, filemap: Rc, ) -> Parser<'a> { +pub fn filemap_to_parser(sess: & ParseSess, filemap: Rc, ) -> Parser { let end_pos = filemap.end_pos; let mut parser = stream_to_parser(sess, filemap_to_stream(sess, filemap)); @@ -186,7 +186,7 @@ pub fn filemap_to_parser<'a>(sess: &'a ParseSess, filemap: Rc, ) -> Par // must preserve old name for now, because quote! from the *existing* // compiler expands into it -pub fn new_parser_from_tts<'a>(sess: &'a ParseSess, tts: Vec) -> Parser<'a> { +pub fn new_parser_from_tts(sess: &ParseSess, tts: Vec) -> Parser { stream_to_parser(sess, tts.into_iter().collect()) } @@ -216,8 +216,8 @@ pub fn filemap_to_stream(sess: &ParseSess, filemap: Rc) -> TokenStream panictry!(srdr.parse_all_token_trees()) } -/// Given stream and the ParseSess, produce a parser -pub fn stream_to_parser<'a>(sess: &'a ParseSess, stream: TokenStream) -> Parser<'a> { +/// Given stream and the `ParseSess`, produce a parser +pub fn stream_to_parser(sess: &ParseSess, stream: TokenStream) -> Parser { Parser::new(sess, stream, None, false) } @@ -251,7 +251,7 @@ pub fn char_lit(lit: &str) -> (char, isize) { (c, 4) } 'u' => { - assert!(lit.as_bytes()[2] == b'{'); + assert_eq!(lit.as_bytes()[2], b'{'); let idx = lit.find('}').unwrap(); let v = u32::from_str_radix(&lit[3..idx], 16).unwrap(); let c = char::from_u32(v).unwrap(); @@ -287,51 +287,46 @@ fn eat<'a>(it: &mut iter::Peekable>) { } let mut chars = lit.char_indices().peekable(); - loop { - match chars.next() { - Some((i, c)) => { - match c { - '\\' => { - let ch = chars.peek().unwrap_or_else(|| { - panic!("{}", error(i)) - }).1; - - if ch == '\n' { - eat(&mut chars); - } else if ch == '\r' { - chars.next(); - let ch = chars.peek().unwrap_or_else(|| { - panic!("{}", error(i)) - }).1; - - if ch != '\n' { - panic!("lexer accepted bare CR"); - } - eat(&mut chars); - } else { - // otherwise, a normal escape - let (c, n) = char_lit(&lit[i..]); - for _ in 0..n - 1 { // we don't need to move past the first \ - chars.next(); - } - res.push(c); - } - }, - '\r' => { - let ch = chars.peek().unwrap_or_else(|| { - panic!("{}", error(i)) - }).1; + while let Some((i, c)) = chars.next() { + match c { + '\\' => { + let ch = chars.peek().unwrap_or_else(|| { + panic!("{}", error(i)) + }).1; + + if ch == '\n' { + eat(&mut chars); + } else if ch == '\r' { + chars.next(); + let ch = chars.peek().unwrap_or_else(|| { + panic!("{}", error(i)) + }).1; - if ch != '\n' { - panic!("lexer accepted bare CR"); - } + if ch != '\n' { + panic!("lexer accepted bare CR"); + } + eat(&mut chars); + } else { + // otherwise, a normal escape + let (c, n) = char_lit(&lit[i..]); + for _ in 0..n - 1 { // we don't need to move past the first \ chars.next(); - res.push('\n'); } - c => res.push(c), + res.push(c); } }, - None => break + '\r' => { + let ch = chars.peek().unwrap_or_else(|| { + panic!("{}", error(i)) + }).1; + + if ch != '\n' { + panic!("lexer accepted bare CR"); + } + chars.next(); + res.push('\n'); + } + c => res.push(c), } } @@ -346,22 +341,16 @@ pub fn raw_str_lit(lit: &str) -> String { debug!("raw_str_lit: given {}", escape_default(lit)); let mut res = String::with_capacity(lit.len()); - // FIXME #8372: This could be a for-loop if it didn't borrow the iterator let mut chars = lit.chars().peekable(); - loop { - match chars.next() { - Some(c) => { - if c == '\r' { - if *chars.peek().unwrap() != '\n' { - panic!("lexer accepted bare CR"); - } - chars.next(); - res.push('\n'); - } else { - res.push(c); - } - }, - None => break + while let Some(c) = chars.next() { + if c == '\r' { + if *chars.peek().unwrap() != '\n' { + panic!("lexer accepted bare CR"); + } + chars.next(); + res.push('\n'); + } else { + res.push(c); } } @@ -459,7 +448,7 @@ pub fn byte_lit(lit: &str) -> (u8, usize) { if lit.len() == 1 { (lit.as_bytes()[0], 1) } else { - assert!(lit.as_bytes()[0] == b'\\', err(0)); + assert_eq!(lit.as_bytes()[0], b'\\', "{}", err(0)); let b = match lit.as_bytes()[1] { b'"' => b'"', b'n' => b'\n', @@ -480,7 +469,7 @@ pub fn byte_lit(lit: &str) -> (u8, usize) { } } }; - return (b, 2); + (b, 2) } } @@ -491,7 +480,7 @@ pub fn byte_str_lit(lit: &str) -> Rc> { let error = |i| format!("lexer should have rejected {} at {}", lit, i); /// Eat everything up to a non-whitespace - fn eat<'a, I: Iterator>(it: &mut iter::Peekable) { + fn eat>(it: &mut iter::Peekable) { loop { match it.peek().map(|x| x.1) { Some(b' ') | Some(b'\n') | Some(b'\r') | Some(b'\t') => { @@ -578,7 +567,7 @@ pub fn integer_lit(s: &str, suffix: Option, diag: Option<(Span, &Handler if let Some(err) = err { err!(diag, |span, diag| diag.span_err(span, err)); } - return filtered_float_lit(Symbol::intern(&s), Some(suf), diag) + return filtered_float_lit(Symbol::intern(s), Some(suf), diag) } } diff --git a/src/libsyntax/parse/obsolete.rs b/src/libsyntax/parse/obsolete.rs index d5baec675e4..078e86aa294 100644 --- a/src/libsyntax/parse/obsolete.rs +++ b/src/libsyntax/parse/obsolete.rs @@ -59,7 +59,7 @@ fn report(&mut self, if !self.obsolete_set.contains(&kind) && (error || self.sess.span_diagnostic.can_emit_warnings) { - err.note(&format!("{}", desc)); + err.note(desc); self.obsolete_set.insert(kind); } err.emit(); diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index ca1351e3b41..4741f896d3c 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -248,7 +248,7 @@ fn next(&mut self) -> TokenAndSpan { fn next_desugared(&mut self) -> TokenAndSpan { let (sp, name) = match self.next() { TokenAndSpan { sp, tok: token::DocComment(name) } => (sp, name), - tok @ _ => return tok, + tok => return tok, }; let stripped = strip_doc_comment_decoration(&name.as_str()); @@ -354,7 +354,7 @@ pub enum Error { } impl Error { - pub fn span_err<'a>(self, sp: Span, handler: &'a errors::Handler) -> DiagnosticBuilder<'a> { + pub fn span_err(self, sp: Span, handler: &errors::Handler) -> DiagnosticBuilder { match self { Error::FileNotFoundForModule { ref mod_name, ref default_path, @@ -478,9 +478,10 @@ pub fn new(sess: &'a ParseSess, } fn next_tok(&mut self) -> TokenAndSpan { - let mut next = match self.desugar_doc_comments { - true => self.token_cursor.next_desugared(), - false => self.token_cursor.next(), + let mut next = if self.desugar_doc_comments { + self.token_cursor.next_desugared() + } else { + self.token_cursor.next() }; if next.sp == syntax_pos::DUMMY_SP { next.sp = self.prev_span; @@ -551,7 +552,7 @@ fn tokens_to_string(tokens: &[TokenType]) -> String { // This might be a sign we need a connect method on Iterator. let b = i.next() .map_or("".to_string(), |t| t.to_string()); - i.enumerate().fold(b, |mut b, (i, ref a)| { + i.enumerate().fold(b, |mut b, (i, a)| { if tokens.len() > 2 && i == tokens.len() - 2 { b.push_str(", or "); } else if tokens.len() == 2 && i == tokens.len() - 2 { @@ -985,18 +986,15 @@ fn parse_seq_to_before_tokens(&mut self, token::CloseDelim(..) | token::Eof => break, _ => {} }; - match sep.sep { - Some(ref t) => { - if first { - first = false; - } else { - if let Err(e) = self.expect(t) { - fe(e); - break; - } + if let Some(ref t) = sep.sep { + if first { + first = false; + } else { + if let Err(e) = self.expect(t) { + fe(e); + break; } } - _ => () } if sep.trailing_sep_allowed && kets.iter().any(|k| self.check(k)) { break; @@ -1493,7 +1491,7 @@ fn maybe_recover_from_bad_type_plus(&mut self, allow_plus: bool, ty: &Ty) -> PRe let sum_span = ty.span.to(self.prev_span); let mut err = struct_span_err!(self.sess.span_diagnostic, sum_span, E0178, - "expected a path on the left-hand side of `+`, not `{}`", pprust::ty_to_string(&ty)); + "expected a path on the left-hand side of `+`, not `{}`", pprust::ty_to_string(ty)); match ty.node { TyKind::Rptr(ref lifetime, ref mut_ty) => { @@ -1547,7 +1545,7 @@ pub fn parse_ptr(&mut self) -> PResult<'a, MutTy> { pub fn is_named_argument(&mut self) -> bool { let offset = match self.token { - token::BinOp(token::And) => 1, + token::BinOp(token::And) | token::AndAnd => 1, _ if self.token.is_keyword(keywords::Mut) => 1, _ => 0 @@ -3154,10 +3152,11 @@ pub fn parse_arm(&mut self) -> PResult<'a, Arm> { let attrs = self.parse_outer_attributes()?; let pats = self.parse_pats()?; - let mut guard = None; - if self.eat_keyword(keywords::If) { - guard = Some(self.parse_expr()?); - } + let guard = if self.eat_keyword(keywords::If) { + Some(self.parse_expr()?) + } else { + None + }; self.expect(&token::FatArrow)?; let expr = self.parse_expr_res(RESTRICTION_STMT_EXPR, None)?; @@ -3600,10 +3599,11 @@ fn parse_local(&mut self, attrs: ThinVec) -> PResult<'a, P> { let lo = self.span; let pat = self.parse_pat()?; - let mut ty = None; - if self.eat(&token::Colon) { - ty = Some(self.parse_ty()?); - } + let ty = if self.eat(&token::Colon) { + Some(self.parse_ty()?) + } else { + None + }; let init = self.parse_initializer()?; Ok(P(ast::Local { ty: ty, @@ -3929,7 +3929,7 @@ fn parse_stmt_without_recovery(&mut self, }, None => { let unused_attrs = |attrs: &[_], s: &mut Self| { - if attrs.len() > 0 { + if !attrs.is_empty() { if s.prev_token_kind == PrevTokenKind::DocComment { s.span_fatal_err(s.prev_span, Error::UselessDocComment).emit(); } else { @@ -4815,7 +4815,7 @@ fn parse_impl_method(&mut self, vis: &Visibility, at_end: &mut bool) self.expect(&token::Not)?; } - self.complain_if_pub_macro(&vis, prev_span); + self.complain_if_pub_macro(vis, prev_span); // eat a matched-delimiter token tree: *at_end = true; @@ -4917,13 +4917,10 @@ fn parse_item_impl(&mut self, } } } else { - match polarity { - ast::ImplPolarity::Negative => { - // This is a negated type implementation - // `impl !MyType {}`, which is not allowed. - self.span_err(neg_span, "inherent implementation can't be negated"); - }, - _ => {} + if polarity == ast::ImplPolarity::Negative { + // This is a negated type implementation + // `impl !MyType {}`, which is not allowed. + self.span_err(neg_span, "inherent implementation can't be negated"); } None }; @@ -5185,7 +5182,7 @@ pub fn parse_visibility(&mut self, can_take_tuple: bool) -> PResult<'a, Visibili let path_span = self.prev_span; let help_msg = format!("make this visible only to module `{}` with `in`:", path); self.expect(&token::CloseDelim(token::Paren))?; // `)` - let mut err = self.span_fatal_help(path_span, &msg, &suggestion); + let mut err = self.span_fatal_help(path_span, msg, suggestion); err.span_suggestion(path_span, &help_msg, format!("in {}", path)); err.emit(); // emit diagnostic, but continue with public visibility } diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 25cabef70c1..77db604c56e 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -53,6 +53,10 @@ impl DelimToken { pub fn len(self) -> usize { if self == NoDelim { 0 } else { 1 } } + + pub fn is_empty(self) -> bool { + self == NoDelim + } } #[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug, Copy)] @@ -198,17 +202,17 @@ pub fn is_like_gt(&self) -> bool { pub fn can_begin_expr(&self) -> bool { match *self { Ident(ident) => ident_can_begin_expr(ident), // value name or keyword - OpenDelim(..) => true, // tuple, array or block - Literal(..) => true, // literal - Not => true, // operator not - BinOp(Minus) => true, // unary minus - BinOp(Star) => true, // dereference - BinOp(Or) | OrOr => true, // closure - BinOp(And) => true, // reference - AndAnd => true, // double reference - DotDot | DotDotDot => true, // range notation - Lt | BinOp(Shl) => true, // associated path - ModSep => true, // global path + OpenDelim(..) | // tuple, array or block + Literal(..) | // literal + Not | // operator not + BinOp(Minus) | // unary minus + BinOp(Star) | // dereference + BinOp(Or) | OrOr | // closure + BinOp(And) | // reference + AndAnd | // double reference + DotDot | DotDotDot | // range notation + Lt | BinOp(Shl) | // associated path + ModSep | // global path Pound => true, // expression attributes Interpolated(ref nt) => match **nt { NtIdent(..) | NtExpr(..) | NtBlock(..) | NtPath(..) => true, @@ -222,16 +226,16 @@ pub fn can_begin_expr(&self) -> bool { pub fn can_begin_type(&self) -> bool { match *self { Ident(ident) => ident_can_begin_type(ident), // type name or keyword - OpenDelim(Paren) => true, // tuple - OpenDelim(Bracket) => true, // array - Underscore => true, // placeholder - Not => true, // never - BinOp(Star) => true, // raw pointer - BinOp(And) => true, // reference - AndAnd => true, // double reference - Question => true, // maybe bound in trait object - Lifetime(..) => true, // lifetime bound in trait object - Lt | BinOp(Shl) => true, // associated path + OpenDelim(Paren) | // tuple + OpenDelim(Bracket) | // array + Underscore | // placeholder + Not | // never + BinOp(Star) | // raw pointer + BinOp(And) | // reference + AndAnd | // double reference + Question | // maybe bound in trait object + Lifetime(..) | // lifetime bound in trait object + Lt | BinOp(Shl) | // associated path ModSep => true, // global path Interpolated(ref nt) => match **nt { NtIdent(..) | NtTy(..) | NtPath(..) => true, diff --git a/src/libsyntax/print/pp.rs b/src/libsyntax/print/pp.rs index 1d67c2a2c2b..e893c859247 100644 --- a/src/libsyntax/print/pp.rs +++ b/src/libsyntax/print/pp.rs @@ -113,22 +113,22 @@ //! between using 'left' and 'right' terms to denote the wrapped-to-ring-buffer //! and point-in-infinite-stream senses freely. //! -//! There is a parallel ring buffer, 'size', that holds the calculated size of +//! There is a parallel ring buffer, `size`, that holds the calculated size of //! each token. Why calculated? Because for Begin/End pairs, the "size" //! includes everything between the pair. That is, the "size" of Begin is //! actually the sum of the sizes of everything between Begin and the paired -//! End that follows. Since that is arbitrarily far in the future, 'size' is +//! End that follows. Since that is arbitrarily far in the future, `size` is //! being rewritten regularly while the printer runs; in fact most of the -//! machinery is here to work out 'size' entries on the fly (and give up when +//! machinery is here to work out `size` entries on the fly (and give up when //! they're so obviously over-long that "infinity" is a good enough //! approximation for purposes of line breaking). //! //! The "input side" of the printer is managed as an abstract process called -//! SCAN, which uses 'scan_stack', to manage calculating 'size'. SCAN is, in +//! SCAN, which uses `scan_stack`, to manage calculating `size`. SCAN is, in //! other words, the process of calculating 'size' entries. //! //! The "output side" of the printer is managed by an abstract process called -//! PRINT, which uses 'print_stack', 'margin' and 'space' to figure out what to +//! PRINT, which uses `print_stack`, `margin` and `space` to figure out what to //! do with each token/size pair it consumes as it goes. It's trying to consume //! the entire buffered window, but can't output anything until the size is >= //! 0 (sizes are set to negative while they're pending calculation). @@ -409,7 +409,7 @@ pub fn scan_pop_bottom(&mut self) -> usize { pub fn advance_right(&mut self) { self.right += 1; self.right %= self.buf_len; - assert!(self.right != self.left); + assert_ne!(self.right, self.left); } pub fn advance_left(&mut self) -> io::Result<()> { debug!("advance_left Vec<{},{}>, sizeof({})={}", self.left, self.right, diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index 0c7e8fda837..83c289ff80b 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -233,7 +233,7 @@ pub fn token_to_string(tok: &Token) -> String { token::CloseDelim(token::Bracket) => "]".to_string(), token::OpenDelim(token::Brace) => "{".to_string(), token::CloseDelim(token::Brace) => "}".to_string(), - token::OpenDelim(token::NoDelim) => " ".to_string(), + token::OpenDelim(token::NoDelim) | token::CloseDelim(token::NoDelim) => " ".to_string(), token::Pound => "#".to_string(), token::Dollar => "$".to_string(), @@ -244,7 +244,7 @@ pub fn token_to_string(tok: &Token) -> String { let mut out = match lit { token::Byte(b) => format!("b'{}'", b), token::Char(c) => format!("'{}'", c), - token::Float(c) => c.to_string(), + token::Float(c) | token::Integer(c) => c.to_string(), token::Str_(s) => format!("\"{}\"", s), token::StrRaw(s, n) => format!("r{delim}\"{string}\"{delim}", @@ -277,23 +277,23 @@ pub fn token_to_string(tok: &Token) -> String { token::Shebang(s) => format!("/* shebang: {}*/", s), token::Interpolated(ref nt) => match **nt { - token::NtExpr(ref e) => expr_to_string(&e), - token::NtMeta(ref e) => meta_item_to_string(&e), - token::NtTy(ref e) => ty_to_string(&e), - token::NtPath(ref e) => path_to_string(&e), - token::NtItem(ref e) => item_to_string(&e), - token::NtBlock(ref e) => block_to_string(&e), - token::NtStmt(ref e) => stmt_to_string(&e), - token::NtPat(ref e) => pat_to_string(&e), + token::NtExpr(ref e) => expr_to_string(e), + token::NtMeta(ref e) => meta_item_to_string(e), + token::NtTy(ref e) => ty_to_string(e), + token::NtPath(ref e) => path_to_string(e), + token::NtItem(ref e) => item_to_string(e), + token::NtBlock(ref e) => block_to_string(e), + token::NtStmt(ref e) => stmt_to_string(e), + token::NtPat(ref e) => pat_to_string(e), token::NtIdent(ref e) => ident_to_string(e.node), token::NtTT(ref tree) => tt_to_string(tree.clone()), - token::NtArm(ref e) => arm_to_string(&e), - token::NtImplItem(ref e) => impl_item_to_string(&e), - token::NtTraitItem(ref e) => trait_item_to_string(&e), - token::NtGenerics(ref e) => generics_to_string(&e), - token::NtWhereClause(ref e) => where_clause_to_string(&e), - token::NtArg(ref e) => arg_to_string(&e), - token::NtVis(ref e) => vis_to_string(&e), + token::NtArm(ref e) => arm_to_string(e), + token::NtImplItem(ref e) => impl_item_to_string(e), + token::NtTraitItem(ref e) => trait_item_to_string(e), + token::NtGenerics(ref e) => generics_to_string(e), + token::NtWhereClause(ref e) => where_clause_to_string(e), + token::NtArg(ref e) => arg_to_string(e), + token::NtVis(ref e) => vis_to_string(e), } } } @@ -520,8 +520,7 @@ fn next_lit(&mut self, pos: BytePos) -> Option { let mut result = None; - if let &Some(ref lits) = self.literals() - { + if let Some(ref lits) = *self.literals() { while cur_lit < lits.len() { let ltrl = (*lits)[cur_lit].clone(); if ltrl.pos > pos { break; } @@ -618,11 +617,8 @@ fn next_comment(&mut self) -> Option { fn print_literal(&mut self, lit: &ast::Lit) -> io::Result<()> { self.maybe_print_comment(lit.span.lo)?; - match self.next_lit(lit.span.lo) { - Some(ref ltrl) => { - return word(self.writer(), &(*ltrl).lit); - } - _ => () + if let Some(ref ltrl) = self.next_lit(lit.span.lo) { + return word(self.writer(), &(*ltrl).lit); } match lit.node { ast::LitKind::Str(st, style) => self.print_string(&st.as_str(), style), @@ -799,7 +795,7 @@ fn print_meta_item(&mut self, item: &ast::MetaItem) -> io::Result<()> { self.popen()?; self.commasep(Consistent, &items[..], - |s, i| s.print_meta_list_item(&i))?; + |s, i| s.print_meta_list_item(i))?; self.pclose()?; } } @@ -982,14 +978,14 @@ pub fn commasep_cmnt(&mut self, pub fn commasep_exprs(&mut self, b: Breaks, exprs: &[P]) -> io::Result<()> { - self.commasep_cmnt(b, exprs, |s, e| s.print_expr(&e), |e| e.span) + self.commasep_cmnt(b, exprs, |s, e| s.print_expr(e), |e| e.span) } pub fn print_mod(&mut self, _mod: &ast::Mod, attrs: &[ast::Attribute]) -> io::Result<()> { self.print_inner_attributes(attrs)?; for item in &_mod.items { - self.print_item(&item)?; + self.print_item(item)?; } Ok(()) } @@ -1018,7 +1014,7 @@ pub fn print_type(&mut self, ty: &ast::Ty) -> io::Result<()> { match ty.node { ast::TyKind::Slice(ref ty) => { word(&mut self.s, "[")?; - self.print_type(&ty)?; + self.print_type(ty)?; word(&mut self.s, "]")?; } ast::TyKind::Ptr(ref mt) => { @@ -1040,7 +1036,7 @@ pub fn print_type(&mut self, ty: &ast::Ty) -> io::Result<()> { ast::TyKind::Tup(ref elts) => { self.popen()?; self.commasep(Inconsistent, &elts[..], - |s, ty| s.print_type(&ty))?; + |s, ty| s.print_type(ty))?; if elts.len() == 1 { word(&mut self.s, ",")?; } @@ -1048,7 +1044,7 @@ pub fn print_type(&mut self, ty: &ast::Ty) -> io::Result<()> { } ast::TyKind::Paren(ref typ) => { self.popen()?; - self.print_type(&typ)?; + self.print_type(typ)?; self.pclose()?; } ast::TyKind::BareFn(ref f) => { @@ -1081,14 +1077,14 @@ pub fn print_type(&mut self, ty: &ast::Ty) -> io::Result<()> { } ast::TyKind::Array(ref ty, ref v) => { word(&mut self.s, "[")?; - self.print_type(&ty)?; + self.print_type(ty)?; word(&mut self.s, "; ")?; - self.print_expr(&v)?; + self.print_expr(v)?; word(&mut self.s, "]")?; } ast::TyKind::Typeof(ref e) => { word(&mut self.s, "typeof(")?; - self.print_expr(&e)?; + self.print_expr(e)?; word(&mut self.s, ")")?; } ast::TyKind::Infer => { @@ -1130,7 +1126,7 @@ pub fn print_foreign_item(&mut self, } self.print_ident(item.ident)?; self.word_space(":")?; - self.print_type(&t)?; + self.print_type(t)?; word(&mut self.s, ";")?; self.end()?; // end the head-ibox self.end() // end the outer cbox @@ -1187,7 +1183,7 @@ pub fn print_item(&mut self, item: &ast::Item) -> io::Result<()> { self.head(&visibility_qualified(&item.vis, "extern crate"))?; if let Some(p) = *optional_path { let val = p.as_str(); - if val.contains("-") { + if val.contains('-') { self.print_string(&val, ast::StrStyle::Cooked)?; } else { self.print_name(p)?; @@ -1203,7 +1199,7 @@ pub fn print_item(&mut self, item: &ast::Item) -> io::Result<()> { } ast::ItemKind::Use(ref vp) => { self.head(&visibility_qualified(&item.vis, "use"))?; - self.print_view_path(&vp)?; + self.print_view_path(vp)?; word(&mut self.s, ";")?; self.end()?; // end inner head-block self.end()?; // end outer head-block @@ -1215,12 +1211,12 @@ pub fn print_item(&mut self, item: &ast::Item) -> io::Result<()> { } self.print_ident(item.ident)?; self.word_space(":")?; - self.print_type(&ty)?; + self.print_type(ty)?; space(&mut self.s)?; self.end()?; // end the head-ibox self.word_space("=")?; - self.print_expr(&expr)?; + self.print_expr(expr)?; word(&mut self.s, ";")?; self.end()?; // end the outer cbox } @@ -1228,12 +1224,12 @@ pub fn print_item(&mut self, item: &ast::Item) -> io::Result<()> { self.head(&visibility_qualified(&item.vis, "const"))?; self.print_ident(item.ident)?; self.word_space(":")?; - self.print_type(&ty)?; + self.print_type(ty)?; space(&mut self.s)?; self.end()?; // end the head-ibox self.word_space("=")?; - self.print_expr(&expr)?; + self.print_expr(expr)?; word(&mut self.s, ";")?; self.end()?; // end the outer cbox } @@ -1249,7 +1245,7 @@ pub fn print_item(&mut self, item: &ast::Item) -> io::Result<()> { &item.vis )?; word(&mut self.s, " ")?; - self.print_block_with_attrs(&body, &item.attrs)?; + self.print_block_with_attrs(body, &item.attrs)?; } ast::ItemKind::Mod(ref _mod) => { self.head(&visibility_qualified(&item.vis, "mod"))?; @@ -1282,7 +1278,7 @@ pub fn print_item(&mut self, item: &ast::Item) -> io::Result<()> { self.print_where_clause(¶ms.where_clause)?; space(&mut self.s)?; self.word_space("=")?; - self.print_type(&ty)?; + self.print_type(ty)?; word(&mut self.s, ";")?; self.end()?; // end the outer ibox } @@ -1297,11 +1293,11 @@ pub fn print_item(&mut self, item: &ast::Item) -> io::Result<()> { } ast::ItemKind::Struct(ref struct_def, ref generics) => { self.head(&visibility_qualified(&item.vis, "struct"))?; - self.print_struct(&struct_def, generics, item.ident, item.span, true)?; + self.print_struct(struct_def, generics, item.ident, item.span, true)?; } ast::ItemKind::Union(ref struct_def, ref generics) => { self.head(&visibility_qualified(&item.vis, "union"))?; - self.print_struct(&struct_def, generics, item.ident, item.span, true)?; + self.print_struct(struct_def, generics, item.ident, item.span, true)?; } ast::ItemKind::DefaultImpl(unsafety, ref trait_ref) => { self.head("")?; @@ -1333,11 +1329,8 @@ pub fn print_item(&mut self, item: &ast::Item) -> io::Result<()> { space(&mut self.s)?; } - match polarity { - ast::ImplPolarity::Negative => { - word(&mut self.s, "!")?; - }, - _ => {} + if polarity == ast::ImplPolarity::Negative { + word(&mut self.s, "!")?; } if let Some(ref t) = *opt_trait { @@ -1346,7 +1339,7 @@ pub fn print_item(&mut self, item: &ast::Item) -> io::Result<()> { self.word_space("for")?; } - self.print_type(&ty)?; + self.print_type(ty)?; self.print_where_clause(&generics.where_clause)?; space(&mut self.s)?; @@ -1543,7 +1536,7 @@ pub fn print_variant(&mut self, v: &ast::Variant) -> io::Result<()> { Some(ref d) => { space(&mut self.s)?; self.word_space("=")?; - self.print_expr(&d) + self.print_expr(d) } _ => Ok(()) } @@ -1571,7 +1564,7 @@ pub fn print_trait_item(&mut self, ti: &ast::TraitItem) self.print_outer_attributes(&ti.attrs)?; match ti.node { ast::TraitItemKind::Const(ref ty, ref default) => { - self.print_associated_const(ti.ident, &ty, + self.print_associated_const(ti.ident, ty, default.as_ref().map(|expr| &**expr), &ast::Visibility::Inherited)?; } @@ -1614,7 +1607,7 @@ pub fn print_impl_item(&mut self, ii: &ast::ImplItem) -> io::Result<()> { self.print_defaultness(ii.defaultness)?; match ii.node { ast::ImplItemKind::Const(ref ty, ref expr) => { - self.print_associated_const(ii.ident, &ty, Some(&expr), &ii.vis)?; + self.print_associated_const(ii.ident, ty, Some(expr), &ii.vis)?; } ast::ImplItemKind::Method(ref sig, ref body) => { self.head("")?; @@ -1650,38 +1643,38 @@ pub fn print_stmt(&mut self, st: &ast::Stmt) -> io::Result<()> { self.word_nbsp("let")?; self.ibox(INDENT_UNIT)?; - self.print_local_decl(&loc)?; + self.print_local_decl(loc)?; self.end()?; if let Some(ref init) = loc.init { self.nbsp()?; self.word_space("=")?; - self.print_expr(&init)?; + self.print_expr(init)?; } word(&mut self.s, ";")?; self.end()?; } - ast::StmtKind::Item(ref item) => self.print_item(&item)?, + ast::StmtKind::Item(ref item) => self.print_item(item)?, ast::StmtKind::Expr(ref expr) => { self.space_if_not_bol()?; - self.print_expr_outer_attr_style(&expr, false)?; + self.print_expr_outer_attr_style(expr, false)?; if parse::classify::expr_requires_semi_to_be_stmt(expr) { word(&mut self.s, ";")?; } } ast::StmtKind::Semi(ref expr) => { self.space_if_not_bol()?; - self.print_expr_outer_attr_style(&expr, false)?; + self.print_expr_outer_attr_style(expr, false)?; word(&mut self.s, ";")?; } ast::StmtKind::Mac(ref mac) => { let (ref mac, style, ref attrs) = **mac; self.space_if_not_bol()?; - self.print_outer_attributes(&attrs)?; + self.print_outer_attributes(attrs)?; let delim = match style { ast::MacStmtStyle::Braces => token::Brace, _ => token::Paren }; - self.print_mac(&mac, delim)?; + self.print_mac(mac, delim)?; if style == ast::MacStmtStyle::Semicolon { word(&mut self.s, ";")?; } @@ -1735,7 +1728,7 @@ pub fn print_block_maybe_unclosed(&mut self, ast::StmtKind::Expr(ref expr) if i == blk.stmts.len() - 1 => { self.maybe_print_comment(st.span.lo)?; self.space_if_not_bol()?; - self.print_expr_outer_attr_style(&expr, false)?; + self.print_expr_outer_attr_style(expr, false)?; self.maybe_print_trailing_comment(expr.span, Some(blk.span.hi))?; } _ => self.print_stmt(st)?, @@ -1755,9 +1748,9 @@ fn print_else(&mut self, els: Option<&ast::Expr>) -> io::Result<()> { self.cbox(INDENT_UNIT - 1)?; self.ibox(0)?; word(&mut self.s, " else if ")?; - self.print_expr(&i)?; + self.print_expr(i)?; space(&mut self.s)?; - self.print_block(&then)?; + self.print_block(then)?; self.print_else(e.as_ref().map(|e| &**e)) } // "another else-if-let" @@ -1765,12 +1758,12 @@ fn print_else(&mut self, els: Option<&ast::Expr>) -> io::Result<()> { self.cbox(INDENT_UNIT - 1)?; self.ibox(0)?; word(&mut self.s, " else if let ")?; - self.print_pat(&pat)?; + self.print_pat(pat)?; space(&mut self.s)?; self.word_space("=")?; - self.print_expr(&expr)?; + self.print_expr(expr)?; space(&mut self.s)?; - self.print_block(&then)?; + self.print_block(then)?; self.print_else(e.as_ref().map(|e| &**e)) } // "final else" @@ -1778,7 +1771,7 @@ fn print_else(&mut self, els: Option<&ast::Expr>) -> io::Result<()> { self.cbox(INDENT_UNIT - 1)?; self.ibox(0)?; word(&mut self.s, " else ")?; - self.print_block(&b) + self.print_block(b) } // BLEAH, constraints would be great here _ => { @@ -1844,12 +1837,8 @@ pub fn check_expr_bin_needs_paren(&mut self, sub_expr: &ast::Expr, binop: ast::BinOp) -> bool { match sub_expr.node { ast::ExprKind::Binary(ref sub_op, _, _) => { - if AssocOp::from_ast_binop(sub_op.node).precedence() < - AssocOp::from_ast_binop(binop.node).precedence() { - true - } else { - false - } + AssocOp::from_ast_binop(sub_op.node).precedence() < + AssocOp::from_ast_binop(binop.node).precedence() } _ => true } @@ -1929,7 +1918,7 @@ fn print_expr_struct(&mut self, space(&mut self.s)?; } word(&mut self.s, "..")?; - self.print_expr(&expr)?; + self.print_expr(expr)?; self.end()?; } _ => if !fields.is_empty() { @@ -1969,7 +1958,7 @@ fn print_expr_method_call(&mut self, if !tys.is_empty() { word(&mut self.s, "::<")?; self.commasep(Inconsistent, tys, - |s, ty| s.print_type(&ty))?; + |s, ty| s.print_type(ty))?; word(&mut self.s, ">")?; } self.print_call_post(base_args) @@ -2038,7 +2027,7 @@ fn print_expr_outer_attr_style(&mut self, self.print_expr_vec(&exprs[..], attrs)?; } ast::ExprKind::Repeat(ref element, ref count) => { - self.print_expr_repeat(&element, &count, attrs)?; + self.print_expr_repeat(element, count, attrs)?; } ast::ExprKind::Struct(ref path, ref fields, ref wth) => { self.print_expr_struct(path, &fields[..], wth, attrs)?; @@ -2047,43 +2036,43 @@ fn print_expr_outer_attr_style(&mut self, self.print_expr_tup(&exprs[..], attrs)?; } ast::ExprKind::Call(ref func, ref args) => { - self.print_expr_call(&func, &args[..])?; + self.print_expr_call(func, &args[..])?; } ast::ExprKind::MethodCall(ident, ref tys, ref args) => { self.print_expr_method_call(ident, &tys[..], &args[..])?; } ast::ExprKind::Binary(op, ref lhs, ref rhs) => { - self.print_expr_binary(op, &lhs, &rhs)?; + self.print_expr_binary(op, lhs, rhs)?; } ast::ExprKind::Unary(op, ref expr) => { - self.print_expr_unary(op, &expr)?; + self.print_expr_unary(op, expr)?; } ast::ExprKind::AddrOf(m, ref expr) => { - self.print_expr_addr_of(m, &expr)?; + self.print_expr_addr_of(m, expr)?; } ast::ExprKind::Lit(ref lit) => { - self.print_literal(&lit)?; + self.print_literal(lit)?; } ast::ExprKind::Cast(ref expr, ref ty) => { if let ast::ExprKind::Cast(..) = expr.node { - self.print_expr(&expr)?; + self.print_expr(expr)?; } else { - self.print_expr_maybe_paren(&expr)?; + self.print_expr_maybe_paren(expr)?; } space(&mut self.s)?; self.word_space("as")?; - self.print_type(&ty)?; + self.print_type(ty)?; } ast::ExprKind::Type(ref expr, ref ty) => { - self.print_expr(&expr)?; + self.print_expr(expr)?; self.word_space(":")?; - self.print_type(&ty)?; + self.print_type(ty)?; } ast::ExprKind::If(ref test, ref blk, ref elseopt) => { - self.print_if(&test, &blk, elseopt.as_ref().map(|e| &**e))?; + self.print_if(test, blk, elseopt.as_ref().map(|e| &**e))?; } ast::ExprKind::IfLet(ref pat, ref expr, ref blk, ref elseopt) => { - self.print_if_let(&pat, &expr, &blk, elseopt.as_ref().map(|e| &**e))?; + self.print_if_let(pat, expr, blk, elseopt.as_ref().map(|e| &**e))?; } ast::ExprKind::While(ref test, ref blk, opt_ident) => { if let Some(ident) = opt_ident { @@ -2091,9 +2080,9 @@ fn print_expr_outer_attr_style(&mut self, self.word_space(":")?; } self.head("while")?; - self.print_expr(&test)?; + self.print_expr(test)?; space(&mut self.s)?; - self.print_block_with_attrs(&blk, attrs)?; + self.print_block_with_attrs(blk, attrs)?; } ast::ExprKind::WhileLet(ref pat, ref expr, ref blk, opt_ident) => { if let Some(ident) = opt_ident { @@ -2101,12 +2090,12 @@ fn print_expr_outer_attr_style(&mut self, self.word_space(":")?; } self.head("while let")?; - self.print_pat(&pat)?; + self.print_pat(pat)?; space(&mut self.s)?; self.word_space("=")?; - self.print_expr(&expr)?; + self.print_expr(expr)?; space(&mut self.s)?; - self.print_block_with_attrs(&blk, attrs)?; + self.print_block_with_attrs(blk, attrs)?; } ast::ExprKind::ForLoop(ref pat, ref iter, ref blk, opt_ident) => { if let Some(ident) = opt_ident { @@ -2114,12 +2103,12 @@ fn print_expr_outer_attr_style(&mut self, self.word_space(":")?; } self.head("for")?; - self.print_pat(&pat)?; + self.print_pat(pat)?; space(&mut self.s)?; self.word_space("in")?; - self.print_expr(&iter)?; + self.print_expr(iter)?; space(&mut self.s)?; - self.print_block_with_attrs(&blk, attrs)?; + self.print_block_with_attrs(blk, attrs)?; } ast::ExprKind::Loop(ref blk, opt_ident) => { if let Some(ident) = opt_ident { @@ -2128,13 +2117,13 @@ fn print_expr_outer_attr_style(&mut self, } self.head("loop")?; space(&mut self.s)?; - self.print_block_with_attrs(&blk, attrs)?; + self.print_block_with_attrs(blk, attrs)?; } ast::ExprKind::Match(ref expr, ref arms) => { self.cbox(INDENT_UNIT)?; self.ibox(4)?; self.word_nbsp("match")?; - self.print_expr(&expr)?; + self.print_expr(expr)?; space(&mut self.s)?; self.bopen()?; self.print_inner_attributes_no_trailing_hardbreak(attrs)?; @@ -2146,7 +2135,7 @@ fn print_expr_outer_attr_style(&mut self, ast::ExprKind::Closure(capture_clause, ref decl, ref body, _) => { self.print_capture_clause(capture_clause)?; - self.print_fn_block_args(&decl)?; + self.print_fn_block_args(decl)?; space(&mut self.s)?; self.print_expr(body)?; self.end()?; // need to close a box @@ -2161,48 +2150,48 @@ fn print_expr_outer_attr_style(&mut self, self.cbox(INDENT_UNIT)?; // head-box, will be closed by print-block after { self.ibox(0)?; - self.print_block_with_attrs(&blk, attrs)?; + self.print_block_with_attrs(blk, attrs)?; } ast::ExprKind::Assign(ref lhs, ref rhs) => { - self.print_expr(&lhs)?; + self.print_expr(lhs)?; space(&mut self.s)?; self.word_space("=")?; - self.print_expr(&rhs)?; + self.print_expr(rhs)?; } ast::ExprKind::AssignOp(op, ref lhs, ref rhs) => { - self.print_expr(&lhs)?; + self.print_expr(lhs)?; space(&mut self.s)?; word(&mut self.s, op.node.to_string())?; self.word_space("=")?; - self.print_expr(&rhs)?; + self.print_expr(rhs)?; } ast::ExprKind::Field(ref expr, id) => { - self.print_expr(&expr)?; + self.print_expr(expr)?; word(&mut self.s, ".")?; self.print_ident(id.node)?; } ast::ExprKind::TupField(ref expr, id) => { - self.print_expr(&expr)?; + self.print_expr(expr)?; word(&mut self.s, ".")?; self.print_usize(id.node)?; } ast::ExprKind::Index(ref expr, ref index) => { - self.print_expr(&expr)?; + self.print_expr(expr)?; word(&mut self.s, "[")?; - self.print_expr(&index)?; + self.print_expr(index)?; word(&mut self.s, "]")?; } ast::ExprKind::Range(ref start, ref end, limits) => { - if let &Some(ref e) = start { - self.print_expr(&e)?; + if let Some(ref e) = *start { + self.print_expr(e)?; } if limits == ast::RangeLimits::HalfOpen { word(&mut self.s, "..")?; } else { word(&mut self.s, "...")?; } - if let &Some(ref e) = end { - self.print_expr(&e)?; + if let Some(ref e) = *end { + self.print_expr(e)?; } } ast::ExprKind::Path(None, ref path) => { @@ -2233,12 +2222,9 @@ fn print_expr_outer_attr_style(&mut self, } ast::ExprKind::Ret(ref result) => { word(&mut self.s, "return")?; - match *result { - Some(ref expr) => { - word(&mut self.s, " ")?; - self.print_expr(&expr)?; - } - _ => () + if let Some(ref expr) = *result { + word(&mut self.s, " ")?; + self.print_expr(expr)?; } } ast::ExprKind::InlineAsm(ref a) => { @@ -2268,7 +2254,7 @@ fn print_expr_outer_attr_style(&mut self, self.commasep(Inconsistent, &a.inputs, |s, &(co, ref o)| { s.print_string(&co.as_str(), ast::StrStyle::Cooked)?; s.popen()?; - s.print_expr(&o)?; + s.print_expr(o)?; s.pclose()?; Ok(()) })?; @@ -2308,7 +2294,7 @@ fn print_expr_outer_attr_style(&mut self, ast::ExprKind::Paren(ref e) => { self.popen()?; self.print_inner_attributes_inline(attrs)?; - self.print_expr(&e)?; + self.print_expr(e)?; self.pclose()?; }, ast::ExprKind::Try(ref e) => { @@ -2318,7 +2304,7 @@ fn print_expr_outer_attr_style(&mut self, ast::ExprKind::Catch(ref blk) => { self.head("do catch")?; space(&mut self.s)?; - self.print_block_with_attrs(&blk, attrs)? + self.print_block_with_attrs(blk, attrs)? } } self.ann.post(self, NodeExpr(expr))?; @@ -2329,7 +2315,7 @@ pub fn print_local_decl(&mut self, loc: &ast::Local) -> io::Result<()> { self.print_pat(&loc.pat)?; if let Some(ref ty) = loc.ty { self.word_space(":")?; - self.print_type(&ty)?; + self.print_type(ty)?; } Ok(()) } @@ -2397,7 +2383,7 @@ fn print_qpath(&mut self, space(&mut self.s)?; self.word_space("as")?; let depth = path.segments.len() - qself.position; - self.print_path(&path, false, depth, false)?; + self.print_path(path, false, depth, false)?; } word(&mut self.s, ">")?; word(&mut self.s, "::")?; @@ -2438,7 +2424,7 @@ fn print_path_parameters(&mut self, self.commasep( Inconsistent, &data.types, - |s, ty| s.print_type(&ty))?; + |s, ty| s.print_type(ty))?; comma = true; } @@ -2461,13 +2447,13 @@ fn print_path_parameters(&mut self, self.commasep( Inconsistent, &data.inputs, - |s, ty| s.print_type(&ty))?; + |s, ty| s.print_type(ty))?; word(&mut self.s, ")")?; if let Some(ref ty) = data.output { self.space_if_not_bol()?; self.word_space("->")?; - self.print_type(&ty)?; + self.print_type(ty)?; } } } @@ -2496,24 +2482,24 @@ pub fn print_pat(&mut self, pat: &ast::Pat) -> io::Result<()> { self.print_ident(path1.node)?; if let Some(ref p) = *sub { word(&mut self.s, "@")?; - self.print_pat(&p)?; + self.print_pat(p)?; } } PatKind::TupleStruct(ref path, ref elts, ddpos) => { self.print_path(path, true, 0, false)?; self.popen()?; if let Some(ddpos) = ddpos { - self.commasep(Inconsistent, &elts[..ddpos], |s, p| s.print_pat(&p))?; + self.commasep(Inconsistent, &elts[..ddpos], |s, p| s.print_pat(p))?; if ddpos != 0 { self.word_space(",")?; } word(&mut self.s, "..")?; if ddpos != elts.len() { word(&mut self.s, ",")?; - self.commasep(Inconsistent, &elts[ddpos..], |s, p| s.print_pat(&p))?; + self.commasep(Inconsistent, &elts[ddpos..], |s, p| s.print_pat(p))?; } } else { - self.commasep(Inconsistent, &elts[..], |s, p| s.print_pat(&p))?; + self.commasep(Inconsistent, &elts[..], |s, p| s.print_pat(p))?; } self.pclose()?; } @@ -2549,17 +2535,17 @@ pub fn print_pat(&mut self, pat: &ast::Pat) -> io::Result<()> { PatKind::Tuple(ref elts, ddpos) => { self.popen()?; if let Some(ddpos) = ddpos { - self.commasep(Inconsistent, &elts[..ddpos], |s, p| s.print_pat(&p))?; + self.commasep(Inconsistent, &elts[..ddpos], |s, p| s.print_pat(p))?; if ddpos != 0 { self.word_space(",")?; } word(&mut self.s, "..")?; if ddpos != elts.len() { word(&mut self.s, ",")?; - self.commasep(Inconsistent, &elts[ddpos..], |s, p| s.print_pat(&p))?; + self.commasep(Inconsistent, &elts[ddpos..], |s, p| s.print_pat(p))?; } } else { - self.commasep(Inconsistent, &elts[..], |s, p| s.print_pat(&p))?; + self.commasep(Inconsistent, &elts[..], |s, p| s.print_pat(p))?; if elts.len() == 1 { word(&mut self.s, ",")?; } @@ -2568,41 +2554,41 @@ pub fn print_pat(&mut self, pat: &ast::Pat) -> io::Result<()> { } PatKind::Box(ref inner) => { word(&mut self.s, "box ")?; - self.print_pat(&inner)?; + self.print_pat(inner)?; } PatKind::Ref(ref inner, mutbl) => { word(&mut self.s, "&")?; if mutbl == ast::Mutability::Mutable { word(&mut self.s, "mut ")?; } - self.print_pat(&inner)?; + self.print_pat(inner)?; } PatKind::Lit(ref e) => self.print_expr(&**e)?, PatKind::Range(ref begin, ref end, ref end_kind) => { - self.print_expr(&begin)?; + self.print_expr(begin)?; space(&mut self.s)?; match *end_kind { RangeEnd::Included => word(&mut self.s, "...")?, RangeEnd::Excluded => word(&mut self.s, "..")?, } - self.print_expr(&end)?; + self.print_expr(end)?; } PatKind::Slice(ref before, ref slice, ref after) => { word(&mut self.s, "[")?; self.commasep(Inconsistent, &before[..], - |s, p| s.print_pat(&p))?; + |s, p| s.print_pat(p))?; if let Some(ref p) = *slice { if !before.is_empty() { self.word_space(",")?; } if p.node != PatKind::Wild { - self.print_pat(&p)?; + self.print_pat(p)?; } word(&mut self.s, "..")?; if !after.is_empty() { self.word_space(",")?; } } self.commasep(Inconsistent, &after[..], - |s, p| s.print_pat(&p))?; + |s, p| s.print_pat(p))?; word(&mut self.s, "]")?; } PatKind::Mac(ref m) => self.print_mac(m, token::Paren)?, @@ -2628,12 +2614,12 @@ fn print_arm(&mut self, arm: &ast::Arm) -> io::Result<()> { space(&mut self.s)?; self.word_space("|")?; } - self.print_pat(&p)?; + self.print_pat(p)?; } space(&mut self.s)?; if let Some(ref e) = arm.guard { self.word_space("if")?; - self.print_expr(&e)?; + self.print_expr(e)?; space(&mut self.s)?; } self.word_space("=>")?; @@ -2641,7 +2627,7 @@ fn print_arm(&mut self, arm: &ast::Arm) -> io::Result<()> { match arm.body.node { ast::ExprKind::Block(ref blk) => { // the block will close the pattern's ibox - self.print_block_unclosed_indent(&blk, INDENT_UNIT)?; + self.print_block_unclosed_indent(blk, INDENT_UNIT)?; // If it is a user-provided unsafe block, print a comma after it if let BlockCheckMode::Unsafe(ast::UserProvided) = blk.rules { @@ -2673,7 +2659,7 @@ fn print_explicit_self(&mut self, explicit_self: &ast::ExplicitSelf) -> io::Resu self.print_mutability(m)?; word(&mut self.s, "self")?; self.word_space(":")?; - self.print_type(&typ) + self.print_type(typ) } } } @@ -2725,7 +2711,7 @@ pub fn print_fn_block_args( self.word_space("->")?; match decl.output { ast::FunctionRetTy::Ty(ref ty) => { - self.print_type(&ty)?; + self.print_type(ty)?; self.maybe_print_comment(ty.span.lo) } ast::FunctionRetTy::Default(..) => unreachable!(), @@ -2839,7 +2825,7 @@ pub fn print_ty_param(&mut self, param: &ast::TyParam) -> io::Result<()> { Some(ref default) => { space(&mut self.s)?; self.word_space("=")?; - self.print_type(&default) + self.print_type(default) } _ => Ok(()) } @@ -2865,7 +2851,7 @@ pub fn print_where_clause(&mut self, where_clause: &ast::WhereClause) ref bounds, ..}) => { self.print_formal_lifetime_list(bound_lifetimes)?; - self.print_type(&bounded_ty)?; + self.print_type(bounded_ty)?; self.print_bounds(":", bounds)?; } ast::WherePredicate::RegionPredicate(ast::WhereRegionPredicate{ref lifetime, @@ -2977,7 +2963,7 @@ pub fn print_fn_output(&mut self, decl: &ast::FnDecl) -> io::Result<()> { match decl.output { ast::FunctionRetTy::Default(..) => unreachable!(), ast::FunctionRetTy::Ty(ref ty) => - self.print_type(&ty)? + self.print_type(ty)? } self.end()?; @@ -3044,14 +3030,9 @@ pub fn print_remaining_comments(&mut self) -> io::Result<()> { if self.next_comment().is_none() { hardbreak(&mut self.s)?; } - loop { - match self.next_comment() { - Some(ref cmnt) => { - self.print_comment(cmnt)?; - self.cur_cmnt_and_lit.cur_cmnt += 1; - } - _ => break - } + while let Some(ref cmnt) = self.next_comment() { + self.print_comment(cmnt)?; + self.cur_cmnt_and_lit.cur_cmnt += 1; } Ok(()) } diff --git a/src/libsyntax/std_inject.rs b/src/libsyntax/std_inject.rs index c7820a15fb3..8e257102e1c 100644 --- a/src/libsyntax/std_inject.rs +++ b/src/libsyntax/std_inject.rs @@ -18,7 +18,7 @@ use tokenstream::TokenStream; /// Craft a span that will be ignored by the stability lint's -/// call to codemap's is_internal check. +/// call to codemap's `is_internal` check. /// The expanded code uses the unstable `#[prelude_import]` attribute. fn ignored_span(sp: Span) -> Span { let mark = Mark::fresh(); @@ -49,7 +49,7 @@ pub fn maybe_inject_crates_ref(mut krate: ast::Crate, alt_std_name: Option return krate, }; - let crate_name = Symbol::intern(&alt_std_name.unwrap_or(name.to_string())); + let crate_name = Symbol::intern(&alt_std_name.unwrap_or_else(|| name.to_string())); krate.module.items.insert(0, P(ast::Item { attrs: vec![attr::mk_attr_outer(DUMMY_SP, diff --git a/src/libsyntax/test.rs b/src/libsyntax/test.rs index 91746a2edd9..bb1a6ff65a5 100644 --- a/src/libsyntax/test.rs +++ b/src/libsyntax/test.rs @@ -106,9 +106,8 @@ fn fold_crate(&mut self, c: ast::Crate) -> ast::Crate { // Add a special __test module to the crate that will contain code // generated for the test harness let (mod_, reexport) = mk_test_module(&mut self.cx); - match reexport { - Some(re) => folded.module.items.push(re), - None => {} + if let Some(re) = reexport { + folded.module.items.push(re) } folded.module.items.push(mod_); folded @@ -257,7 +256,7 @@ fn mk_reexport_mod(cx: &mut TestCtxt, let parent = if parent == ast::DUMMY_NODE_ID { ast::CRATE_NODE_ID } else { parent }; cx.ext_cx.current_expansion.mark = cx.ext_cx.resolver.get_module_scope(parent); let it = cx.ext_cx.monotonic_expander().fold_item(P(ast::Item { - ident: sym.clone(), + ident: sym, attrs: Vec::new(), id: ast::DUMMY_NODE_ID, node: ast::ItemKind::Mod(reexport_mod), @@ -308,7 +307,7 @@ fn generate_test_harness(sess: &ParseSess, } /// Craft a span that will be ignored by the stability lint's -/// call to codemap's is_internal check. +/// call to codemap's `is_internal` check. /// The expanded code calls some unstable functions in the test crate. fn ignored_span(cx: &TestCtxt, sp: Span) -> Span { Span { ctxt: cx.ctxt, ..sp } @@ -354,7 +353,7 @@ fn has_test_signature(i: &ast::Item) -> HasTestSignature { } } - return has_test_attr && has_test_signature(i) == Yes; + has_test_attr && has_test_signature(i) == Yes } fn is_bench_fn(cx: &TestCtxt, i: &ast::Item) -> bool { @@ -385,7 +384,7 @@ fn has_test_signature(i: &ast::Item) -> bool { `fn(&mut Bencher) -> ()`"); } - return has_bench_attr && has_test_signature(i); + has_bench_attr && has_test_signature(i) } fn is_ignored(i: &ast::Item) -> bool { @@ -504,16 +503,14 @@ fn mk_main(cx: &mut TestCtxt) -> P { ast::Unsafety::Normal, dummy_spanned(ast::Constness::NotConst), ::abi::Abi::Rust, ast::Generics::default(), main_body); - let main = P(ast::Item { + P(ast::Item { ident: Ident::from_str("main"), attrs: vec![main_attr], id: ast::DUMMY_NODE_ID, node: main, vis: ast::Visibility::Public, span: sp - }); - - return main; + }) } fn mk_test_module(cx: &mut TestCtxt) -> (P, Option>) { diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs index 86bfdebe42b..9c1371a31fe 100644 --- a/src/libsyntax/tokenstream.rs +++ b/src/libsyntax/tokenstream.rs @@ -10,16 +10,16 @@ //! # Token Streams //! -//! TokenStreams represent syntactic objects before they are converted into ASTs. +//! `TokenStream`s represent syntactic objects before they are converted into ASTs. //! A `TokenStream` is, roughly speaking, a sequence (eg stream) of `TokenTree`s, //! which are themselves a single `Token` or a `Delimited` subsequence of tokens. //! //! ## Ownership -//! TokenStreams are persistent data structures constructed as ropes with reference -//! counted-children. In general, this means that calling an operation on a TokenStream -//! (such as `slice`) produces an entirely new TokenStream from the borrowed reference to -//! the original. This essentially coerces TokenStreams into 'views' of their subparts, -//! and a borrowed TokenStream is sufficient to build an owned TokenStream without taking +//! `TokenStreams` are persistent data structures constructed as ropes with reference +//! counted-children. In general, this means that calling an operation on a `TokenStream` +//! (such as `slice`) produces an entirely new `TokenStream` from the borrowed reference to +//! the original. This essentially coerces `TokenStream`s into 'views' of their subparts, +//! and a borrowed `TokenStream` is sufficient to build an owned `TokenStream` without taking //! ownership of the original. use syntax_pos::{BytePos, Span, DUMMY_SP}; @@ -88,7 +88,7 @@ pub fn stream(&self) -> TokenStream { /// If the syntax extension is an MBE macro, it will attempt to match its /// LHS token tree against the provided token tree, and if it finds a /// match, will transcribe the RHS token tree, splicing in any captured -/// macro_parser::matched_nonterminals into the `SubstNt`s it finds. +/// `macro_parser::matched_nonterminals` into the `SubstNt`s it finds. /// /// The RHS of an MBE macro is the only place `SubstNt`s are substituted. /// Nothing special happens to misnamed or misplaced `SubstNt`s. diff --git a/src/libsyntax/util/lev_distance.rs b/src/libsyntax/util/lev_distance.rs index a6fff2d7074..9307f3c58d4 100644 --- a/src/libsyntax/util/lev_distance.rs +++ b/src/libsyntax/util/lev_distance.rs @@ -53,9 +53,10 @@ pub fn find_best_match_for_name<'a, T>(iter_names: T, iter_names .filter_map(|&name| { let dist = lev_distance(lookup, &name.as_str()); - match dist <= max_dist { // filter the unwanted cases - true => Some((name, dist)), - false => None, + if dist <= max_dist { // filter the unwanted cases + Some((name, dist)) + } else { + None } }) .min_by_key(|&(_, val)| val) // extract the tuple containing the minimum edit distance diff --git a/src/libsyntax/util/move_map.rs b/src/libsyntax/util/move_map.rs index fe05e2958b3..8cc37afa354 100644 --- a/src/libsyntax/util/move_map.rs +++ b/src/libsyntax/util/move_map.rs @@ -37,10 +37,10 @@ fn move_flat_map(mut self, mut f: F) -> Self // move the read_i'th item out of the vector and map it // to an iterator let e = ptr::read(self.get_unchecked(read_i)); - let mut iter = f(e).into_iter(); + let iter = f(e).into_iter(); read_i += 1; - while let Some(e) = iter.next() { + for e in iter { if write_i < read_i { ptr::write(self.get_unchecked_mut(write_i), e); write_i += 1; @@ -93,10 +93,10 @@ fn move_flat_map(mut self, mut f: F) -> Self // move the read_i'th item out of the vector and map it // to an iterator let e = ptr::read(self.get_unchecked(read_i)); - let mut iter = f(e).into_iter(); + let iter = f(e).into_iter(); read_i += 1; - while let Some(e) = iter.next() { + for e in iter { if write_i < read_i { ptr::write(self.get_unchecked_mut(write_i), e); write_i += 1; diff --git a/src/libsyntax/util/node_count.rs b/src/libsyntax/util/node_count.rs index 9d9957a0f45..0a5d0c2e7fe 100644 --- a/src/libsyntax/util/node_count.rs +++ b/src/libsyntax/util/node_count.rs @@ -31,7 +31,7 @@ fn visit_ident(&mut self, span: Span, ident: Ident) { self.count += 1; walk_ident(self, span, ident); } - fn visit_mod(&mut self, m: &Mod, _s: Span, _n: NodeId) { + fn visit_mod(&mut self, m: &Mod, _s: Span, _a: &[Attribute], _n: NodeId) { self.count += 1; walk_mod(self, m) } diff --git a/src/libsyntax/visit.rs b/src/libsyntax/visit.rs index 2e42c6986e6..0fa0753b22c 100644 --- a/src/libsyntax/visit.rs +++ b/src/libsyntax/visit.rs @@ -27,6 +27,7 @@ use ast::*; use syntax_pos::Span; use codemap::Spanned; +use tokenstream::ThinTokenStream; #[derive(Copy, Clone, PartialEq, Eq)] pub enum FnKind<'a> { @@ -56,7 +57,9 @@ fn visit_name(&mut self, _span: Span, _name: Name) { fn visit_ident(&mut self, span: Span, ident: Ident) { walk_ident(self, span, ident); } - fn visit_mod(&mut self, m: &'ast Mod, _s: Span, _n: NodeId) { walk_mod(self, m) } + fn visit_mod(&mut self, m: &'ast Mod, _s: Span, _attrs: &[Attribute], _n: NodeId) { + walk_mod(self, m); + } fn visit_foreign_item(&mut self, i: &'ast ForeignItem) { walk_foreign_item(self, i) } fn visit_global_asm(&mut self, ga: &'ast GlobalAsm) { walk_global_asm(self, ga) } fn visit_item(&mut self, i: &'ast Item) { walk_item(self, i) } @@ -110,6 +113,9 @@ fn visit_mac(&mut self, _mac: &'ast Mac) { // definition in your trait impl: // visit::walk_mac(self, _mac) } + fn visit_mac_def(&mut self, _mac: &'ast ThinTokenStream, _id: NodeId) { + // Nothing to do + } fn visit_path(&mut self, path: &'ast Path, _id: NodeId) { walk_path(self, path) } @@ -172,7 +178,7 @@ pub fn walk_ident<'a, V: Visitor<'a>>(visitor: &mut V, span: Span, ident: Ident) } pub fn walk_crate<'a, V: Visitor<'a>>(visitor: &mut V, krate: &'a Crate) { - visitor.visit_mod(&krate.module, krate.span, CRATE_NODE_ID); + visitor.visit_mod(&krate.module, krate.span, &krate.attrs, CRATE_NODE_ID); walk_list!(visitor, visit_attribute, &krate.attrs); } @@ -249,7 +255,7 @@ pub fn walk_item<'a, V: Visitor<'a>>(visitor: &mut V, item: &'a Item) { item.id) } ItemKind::Mod(ref module) => { - visitor.visit_mod(module, item.span, item.id) + visitor.visit_mod(module, item.span, &item.attrs, item.id) } ItemKind::ForeignMod(ref foreign_module) => { walk_list!(visitor, visit_foreign_item, &foreign_module.items); @@ -288,7 +294,7 @@ pub fn walk_item<'a, V: Visitor<'a>>(visitor: &mut V, item: &'a Item) { walk_list!(visitor, visit_trait_item, methods); } ItemKind::Mac(ref mac) => visitor.visit_mac(mac), - ItemKind::MacroDef(..) => {}, + ItemKind::MacroDef(ref ts) => visitor.visit_mac_def(ts, item.id), } walk_list!(visitor, visit_attribute, &item.attrs); } @@ -343,9 +349,7 @@ pub fn walk_ty<'a, V: Visitor<'a>>(visitor: &mut V, typ: &'a Ty) { visitor.visit_ty(ty); visitor.visit_expr(expression) } - TyKind::TraitObject(ref bounds) => { - walk_list!(visitor, visit_ty_param_bound, bounds); - } + TyKind::TraitObject(ref bounds) | TyKind::ImplTrait(ref bounds) => { walk_list!(visitor, visit_ty_param_bound, bounds); } @@ -540,7 +544,7 @@ pub fn walk_fn<'a, V>(visitor: &mut V, kind: FnKind<'a>, declaration: &'a FnDecl walk_fn_decl(visitor, declaration); visitor.visit_block(body); } - FnKind::Method(_, ref sig, _, body) => { + FnKind::Method(_, sig, _, body) => { visitor.visit_generics(&sig.generics); walk_fn_decl(visitor, declaration); visitor.visit_block(body); @@ -776,7 +780,7 @@ pub fn walk_expr<'a, V: Visitor<'a>>(visitor: &mut V, expression: &'a Expr) { } ExprKind::InlineAsm(ref ia) => { for &(_, ref input) in &ia.inputs { - visitor.visit_expr(&input) + visitor.visit_expr(input) } for output in &ia.outputs { visitor.visit_expr(&output.expr) diff --git a/src/libsyntax_ext/proc_macro_registrar.rs b/src/libsyntax_ext/proc_macro_registrar.rs index bb89caab709..6318abec69f 100644 --- a/src/libsyntax_ext/proc_macro_registrar.rs +++ b/src/libsyntax_ext/proc_macro_registrar.rs @@ -329,7 +329,7 @@ fn visit_item(&mut self, item: &'a ast::Item) { visit::walk_item(self, item); } - fn visit_mod(&mut self, m: &'a ast::Mod, _s: Span, id: NodeId) { + fn visit_mod(&mut self, m: &'a ast::Mod, _s: Span, _a: &[ast::Attribute], id: NodeId) { let mut prev_in_root = self.in_root; if id != ast::CRATE_NODE_ID { prev_in_root = mem::replace(&mut self.in_root, false); diff --git a/src/libtest/lib.rs b/src/libtest/lib.rs index 0d615db3deb..ef048ac8ca3 100644 --- a/src/libtest/lib.rs +++ b/src/libtest/lib.rs @@ -542,6 +542,7 @@ struct ConsoleTestState { passed: usize, failed: usize, ignored: usize, + filtered_out: usize, measured: usize, metrics: MetricMap, failures: Vec<(TestDesc, Vec)>, @@ -570,6 +571,7 @@ pub fn new(opts: &TestOpts, _: Option) -> io::Result io::Result { } else { self.write_pretty("FAILED", term::color::RED)?; } - let s = format!(". {} passed; {} failed; {} ignored; {} measured\n\n", + let s = format!(". {} passed; {} failed; {} ignored; {} measured; {} filtered out\n\n", self.passed, self.failed, self.ignored, - self.measured); + self.measured, + self.filtered_out); self.write_plain(&s)?; return Ok(success); } @@ -875,6 +878,7 @@ pub fn run_tests_console(opts: &TestOpts, tests: Vec) -> io::Resu fn callback(event: &TestEvent, st: &mut ConsoleTestState) -> io::Result<()> { match (*event).clone() { TeFiltered(ref filtered_tests) => st.write_run_start(filtered_tests.len()), + TeFilteredOut(filtered_out) => Ok(st.filtered_out = filtered_out), TeWait(ref test, padding) => st.write_test_start(test, padding), TeTimeout(ref test) => st.write_timeout(test), TeResult(test, result, stdout) => { @@ -957,6 +961,7 @@ fn should_sort_failures_before_printing_them() { passed: 0, failed: 0, ignored: 0, + filtered_out: 0, measured: 0, max_name_len: 10, metrics: MetricMap::new(), @@ -1017,6 +1022,7 @@ pub enum TestEvent { TeWait(TestDesc, NamePadding), TeResult(TestDesc, TestResult, Vec), TeTimeout(TestDesc), + TeFilteredOut(usize), } pub type MonitorMsg = (TestDesc, TestResult, Vec); @@ -1028,11 +1034,16 @@ pub fn run_tests(opts: &TestOpts, tests: Vec, mut callback: F) use std::collections::HashMap; use std::sync::mpsc::RecvTimeoutError; + let tests_len = tests.len(); + let mut filtered_tests = filter_tests(opts, tests); if !opts.bench_benchmarks { filtered_tests = convert_benchmarks_to_tests(filtered_tests); } + let filtered_out = tests_len - filtered_tests.len(); + callback(TeFilteredOut(filtered_out))?; + let filtered_descs = filtered_tests.iter() .map(|t| t.desc.clone()) .collect(); diff --git a/src/llvm b/src/llvm index cf85b5a8da7..1ef3b9128e1 160000 --- a/src/llvm +++ b/src/llvm @@ -1 +1 @@ -Subproject commit cf85b5a8da7853c4de5cc57766da8b7988c06461 +Subproject commit 1ef3b9128e1baaed61b42d5b0de79dee100acf17 diff --git a/src/rust-installer b/src/rust-installer deleted file mode 160000 index 2e6417f6af5..00000000000 --- a/src/rust-installer +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 2e6417f6af5218a29a8ee72ed17af085560b9b9c diff --git a/src/rustllvm/llvm-rebuild-trigger b/src/rustllvm/llvm-rebuild-trigger index d73d1c25e5b..70663f30e8f 100644 --- a/src/rustllvm/llvm-rebuild-trigger +++ b/src/rustllvm/llvm-rebuild-trigger @@ -1,4 +1,4 @@ # If this file is modified, then llvm will be (optionally) cleaned and then rebuilt. # The actual contents of this file do not matter, but to trigger a change on the # build bots then the contents should be changed so git updates the mtime. -2017-05-06 +2017-05-13 diff --git a/src/test/compile-fail-fulldeps/proc-macro/resolve-error.rs b/src/test/compile-fail-fulldeps/proc-macro/resolve-error.rs index e0066dd43be..ddd8631f02e 100644 --- a/src/test/compile-fail-fulldeps/proc-macro/resolve-error.rs +++ b/src/test/compile-fail-fulldeps/proc-macro/resolve-error.rs @@ -14,6 +14,7 @@ // aux-build:bang_proc_macro.rs #![feature(proc_macro)] +#![allow(unused_macros)] #[macro_use] extern crate derive_foo; diff --git a/src/test/compile-fail/E0435.rs b/src/test/compile-fail/E0435.rs index f687633d34d..b15bf44fbd0 100644 --- a/src/test/compile-fail/E0435.rs +++ b/src/test/compile-fail/E0435.rs @@ -10,6 +10,6 @@ fn main () { let foo = 42u32; - const FOO : u32 = foo; //~ ERROR E0435 + let _: [u8; foo]; //~ ERROR E0435 //~| NOTE non-constant used with constant } diff --git a/src/test/compile-fail/associated-const-type-parameter-arrays-2.rs b/src/test/compile-fail/associated-const-type-parameter-arrays-2.rs index 7fd9605ef2c..e284a61eb2d 100644 --- a/src/test/compile-fail/associated-const-type-parameter-arrays-2.rs +++ b/src/test/compile-fail/associated-const-type-parameter-arrays-2.rs @@ -26,7 +26,7 @@ impl Foo for Def { pub fn test() { let _array = [4; ::Y]; - //~^ ERROR cannot use an outer type parameter in this context [E0402] + //~^ ERROR the trait bound `A: Foo` is not satisfied [E0277] } fn main() { diff --git a/src/test/compile-fail/associated-const-type-parameter-arrays.rs b/src/test/compile-fail/associated-const-type-parameter-arrays.rs index 71c7a3965ec..848ea65a9cf 100644 --- a/src/test/compile-fail/associated-const-type-parameter-arrays.rs +++ b/src/test/compile-fail/associated-const-type-parameter-arrays.rs @@ -26,7 +26,7 @@ impl Foo for Def { pub fn test() { let _array: [u32; ::Y]; - //~^ ERROR cannot use an outer type parameter in this context [E0402] + //~^ ERROR the trait bound `A: Foo` is not satisfied [E0277] } fn main() { diff --git a/src/test/compile-fail/coherence-inherited-assoc-ty-cycle-err.rs b/src/test/compile-fail/coherence-inherited-assoc-ty-cycle-err.rs new file mode 100644 index 00000000000..5d7f3396740 --- /dev/null +++ b/src/test/compile-fail/coherence-inherited-assoc-ty-cycle-err.rs @@ -0,0 +1,34 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Formerly this ICEd with the following message: +// Tried to project an inherited associated type during coherence checking, +// which is currently not supported. +// +// No we expect to run into a more user-friendly cycle error instead. + +#![feature(specialization)] + +trait Trait { type Assoc; } +//~^ unsupported cyclic reference between types/traits detected [E0391] + +impl Trait for Vec { + type Assoc = (); +} + +impl Trait for Vec {} + +impl Trait for String { + type Assoc = (); +} + +impl Trait< as Trait>::Assoc> for String {} + +fn main() {} diff --git a/src/test/compile-fail/feature-gate-allow-internal-unstable-nested-macro.rs b/src/test/compile-fail/feature-gate-allow-internal-unstable-nested-macro.rs index 1aabe6b87df..9af501b1419 100644 --- a/src/test/compile-fail/feature-gate-allow-internal-unstable-nested-macro.rs +++ b/src/test/compile-fail/feature-gate-allow-internal-unstable-nested-macro.rs @@ -10,6 +10,8 @@ // gate-test-allow_internal_unstable +#![allow(unused_macros)] + macro_rules! bar { () => { // more layers don't help: diff --git a/src/test/compile-fail/feature-gate-allow-internal-unstable.rs b/src/test/compile-fail/feature-gate-allow-internal-unstable.rs index 8a2d8dddac0..61a362cb37f 100644 --- a/src/test/compile-fail/feature-gate-allow-internal-unstable.rs +++ b/src/test/compile-fail/feature-gate-allow-internal-unstable.rs @@ -8,6 +8,8 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +#![allow(unused_macros)] + #[allow_internal_unstable] //~ ERROR allow_internal_unstable side-steps macro_rules! foo { () => {} diff --git a/src/test/compile-fail/inner-static-type-parameter.rs b/src/test/compile-fail/inner-static-type-parameter.rs index a6a33198458..6fb497092d2 100644 --- a/src/test/compile-fail/inner-static-type-parameter.rs +++ b/src/test/compile-fail/inner-static-type-parameter.rs @@ -14,7 +14,7 @@ enum Bar { What } //~ ERROR parameter `T` is never used fn foo() { static a: Bar = Bar::What; - //~^ ERROR cannot use an outer type parameter in this context +//~^ ERROR can't use type parameters from outer function; try using a local type parameter instead } fn main() { diff --git a/src/test/compile-fail/invalid-macro-matcher.rs b/src/test/compile-fail/invalid-macro-matcher.rs index a0ac5d4c720..d710f5647dd 100644 --- a/src/test/compile-fail/invalid-macro-matcher.rs +++ b/src/test/compile-fail/invalid-macro-matcher.rs @@ -8,6 +8,8 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +#![allow(unused_macros)] + macro_rules! invalid { _ => (); //~ ERROR invalid macro matcher } diff --git a/src/test/compile-fail/issue-21356.rs b/src/test/compile-fail/issue-21356.rs index fefd432e229..f66c09291cc 100644 --- a/src/test/compile-fail/issue-21356.rs +++ b/src/test/compile-fail/issue-21356.rs @@ -8,6 +8,8 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +#![allow(unused_macros)] + macro_rules! test { ($wrong:t_ty ..) => () } //~^ ERROR: invalid fragment specifier `t_ty` diff --git a/src/test/compile-fail/issue-27433.rs b/src/test/compile-fail/issue-27433.rs index 78d96398b95..782b2057438 100644 --- a/src/test/compile-fail/issue-27433.rs +++ b/src/test/compile-fail/issue-27433.rs @@ -11,5 +11,5 @@ fn main() { let foo = 42u32; const FOO : u32 = foo; - //~^ ERROR attempt to use a non-constant value in a constant + //~^ ERROR can't capture dynamic environment } diff --git a/src/test/compile-fail/issue-27942.rs b/src/test/compile-fail/issue-27942.rs index 595e4bfb0d7..22e7de3838d 100644 --- a/src/test/compile-fail/issue-27942.rs +++ b/src/test/compile-fail/issue-27942.rs @@ -11,17 +11,18 @@ pub trait Resources<'a> {} pub trait Buffer<'a, R: Resources<'a>> { + //~^ NOTE the lifetime 'a as defined on the trait at 13:0... + //~| NOTE ...does not necessarily outlive the lifetime 'a as defined on the trait + fn select(&self) -> BufferViewHandle; //~^ ERROR mismatched types //~| lifetime mismatch //~| NOTE expected type `Resources<'_>` - //~| NOTE the lifetime 'a as defined on the method body at 14:4... //~| NOTE ...does not necessarily outlive the anonymous lifetime #1 defined on the method body //~| ERROR mismatched types //~| lifetime mismatch //~| NOTE expected type `Resources<'_>` - //~| NOTE the anonymous lifetime #1 defined on the method body at 14:4... - //~| NOTE ...does not necessarily outlive the lifetime 'a as defined on the method body + //~| NOTE the anonymous lifetime #1 defined on the method body at 17:4... } pub struct BufferViewHandle<'a, R: 'a+Resources<'a>>(&'a R); diff --git a/src/test/compile-fail/issue-3521-2.rs b/src/test/compile-fail/issue-3521-2.rs index 6cd2c02c417..1742cb4fb72 100644 --- a/src/test/compile-fail/issue-3521-2.rs +++ b/src/test/compile-fail/issue-3521-2.rs @@ -12,7 +12,7 @@ fn main() { let foo = 100; static y: isize = foo + 1; - //~^ ERROR attempt to use a non-constant value in a constant + //~^ ERROR can't capture dynamic environment println!("{}", y); } diff --git a/src/test/compile-fail/issue-3668-2.rs b/src/test/compile-fail/issue-3668-2.rs index 16fb2f68133..fe46877e8d3 100644 --- a/src/test/compile-fail/issue-3668-2.rs +++ b/src/test/compile-fail/issue-3668-2.rs @@ -10,7 +10,7 @@ fn f(x:isize) { static child: isize = x + 1; - //~^ ERROR attempt to use a non-constant value in a constant + //~^ ERROR can't capture dynamic environment } fn main() {} diff --git a/src/test/compile-fail/issue-3668.rs b/src/test/compile-fail/issue-3668.rs index 9c31dc1e38e..00f64414a9e 100644 --- a/src/test/compile-fail/issue-3668.rs +++ b/src/test/compile-fail/issue-3668.rs @@ -16,7 +16,7 @@ trait PTrait { impl PTrait for P { fn getChildOption(&self) -> Option> { static childVal: Box

= self.child.get(); - //~^ ERROR attempt to use a non-constant value in a constant + //~^ ERROR can't capture dynamic environment panic!(); } } diff --git a/src/test/compile-fail/issue-37884.rs b/src/test/compile-fail/issue-37884.rs index 6e1b9b2fbed..6313293bf2b 100644 --- a/src/test/compile-fail/issue-37884.rs +++ b/src/test/compile-fail/issue-37884.rs @@ -11,14 +11,15 @@ struct RepeatMut<'a, T>(T, &'a ()); impl<'a, T: 'a> Iterator for RepeatMut<'a, T> { + //~^ NOTE ...does not necessarily outlive the lifetime 'a as defined on the impl + type Item = &'a mut T; fn next(&'a mut self) -> Option //~^ ERROR method not compatible with trait //~| lifetime mismatch //~| NOTE expected type `fn(&mut RepeatMut<'a, T>) -> std::option::Option<&mut T>` + //~| NOTE the anonymous lifetime #1 defined on the method body { - //~^ NOTE the anonymous lifetime #1 defined on the body - //~| NOTE ...does not necessarily outlive the lifetime 'a as defined on the body Some(&mut self.0) } } diff --git a/src/test/compile-fail/issue-39388.rs b/src/test/compile-fail/issue-39388.rs index 6994d2199d2..15eef429eab 100644 --- a/src/test/compile-fail/issue-39388.rs +++ b/src/test/compile-fail/issue-39388.rs @@ -8,6 +8,8 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +#![allow(unused_macros)] + macro_rules! assign { (($($a:tt)*) = ($($b:tt))*) => { //~ ERROR expected `*` or `+` $($a)* = $($b)* diff --git a/src/test/compile-fail/issue-39404.rs b/src/test/compile-fail/issue-39404.rs index 0168ae7d910..8b49772494a 100644 --- a/src/test/compile-fail/issue-39404.rs +++ b/src/test/compile-fail/issue-39404.rs @@ -9,6 +9,7 @@ // except according to those terms. #![deny(missing_fragment_specifier)] //~ NOTE lint level defined here +#![allow(unused_macros)] macro_rules! m { ($i) => {} } //~^ ERROR missing fragment specifier diff --git a/src/test/compile-fail/issue-39559-2.rs b/src/test/compile-fail/issue-39559-2.rs new file mode 100644 index 00000000000..aa075023064 --- /dev/null +++ b/src/test/compile-fail/issue-39559-2.rs @@ -0,0 +1,28 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +trait Dim { + fn dim() -> usize; +} + +enum Dim3 {} + +impl Dim for Dim3 { + fn dim() -> usize { + 3 + } +} + +fn main() { + let array: [usize; Dim3::dim()] + //~^ ERROR calls in constants are limited to constant functions + = [0; Dim3::dim()]; + //~^ ERROR calls in constants are limited to constant functions +} diff --git a/src/test/compile-fail/issue-39559.rs b/src/test/compile-fail/issue-39559.rs index b7f767f109c..871ecf269ce 100644 --- a/src/test/compile-fail/issue-39559.rs +++ b/src/test/compile-fail/issue-39559.rs @@ -22,12 +22,7 @@ fn dim() -> usize { pub struct Vector { entries: [T; D::dim()] - //~^ ERROR cannot use an outer type parameter in this context + //~^ ERROR no associated item named `dim` found for type `D` in the current scope } -fn main() { - let array: [usize; Dim3::dim()] - //~^ ERROR calls in constants are limited to constant functions - = [0; Dim3::dim()]; - //~^ ERROR calls in constants are limited to constant functions -} +fn main() {} diff --git a/src/test/compile-fail/issue-5067.rs b/src/test/compile-fail/issue-5067.rs index 1c543a5fdac..267362f902d 100644 --- a/src/test/compile-fail/issue-5067.rs +++ b/src/test/compile-fail/issue-5067.rs @@ -8,6 +8,8 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +#![allow(unused_macros)] + macro_rules! foo { ( $()* ) => {}; //~^ ERROR repetition matches empty token tree diff --git a/src/test/compile-fail/macro-expansion-tests.rs b/src/test/compile-fail/macro-expansion-tests.rs index ada06b0b3f4..06f2d86e5d9 100644 --- a/src/test/compile-fail/macro-expansion-tests.rs +++ b/src/test/compile-fail/macro-expansion-tests.rs @@ -8,6 +8,8 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +#![allow(unused_macros)] + mod macros_cant_escape_fns { fn f() { macro_rules! m { () => { 3 + 4 } } diff --git a/src/test/compile-fail/macro-follow.rs b/src/test/compile-fail/macro-follow.rs index 001bc42274b..6e80e9b574b 100644 --- a/src/test/compile-fail/macro-follow.rs +++ b/src/test/compile-fail/macro-follow.rs @@ -10,6 +10,8 @@ // // Check the macro follow sets (see corresponding rpass test). +#![allow(unused_macros)] + // FOLLOW(pat) = {FatArrow, Comma, Eq, Or, Ident(if), Ident(in)} macro_rules! follow_pat { ($p:pat ()) => {}; //~ERROR `$p:pat` is followed by `(` diff --git a/src/test/compile-fail/macro-followed-by-seq-bad.rs b/src/test/compile-fail/macro-followed-by-seq-bad.rs index 0ee2221bbc1..21cc946ded6 100644 --- a/src/test/compile-fail/macro-followed-by-seq-bad.rs +++ b/src/test/compile-fail/macro-followed-by-seq-bad.rs @@ -11,6 +11,8 @@ // Regression test for issue #25436: check that things which can be // followed by any token also permit X* to come afterwards. +#![allow(unused_macros)] + macro_rules! foo { ( $a:expr $($b:tt)* ) => { }; //~ ERROR not allowed for `expr` fragments ( $a:ty $($b:tt)* ) => { }; //~ ERROR not allowed for `ty` fragments diff --git a/src/test/compile-fail/macro-input-future-proofing.rs b/src/test/compile-fail/macro-input-future-proofing.rs index fe758a4a631..e5fdba63b0f 100644 --- a/src/test/compile-fail/macro-input-future-proofing.rs +++ b/src/test/compile-fail/macro-input-future-proofing.rs @@ -8,6 +8,8 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +#![allow(unused_macros)] + macro_rules! errors_everywhere { ($ty:ty <) => (); //~ ERROR `$ty:ty` is followed by `<`, which is not allowed for `ty` ($ty:ty < foo ,) => (); //~ ERROR `$ty:ty` is followed by `<`, which is not allowed for `ty` diff --git a/src/test/compile-fail/macro-shadowing.rs b/src/test/compile-fail/macro-shadowing.rs index 8381dc34a6a..f5e7305e4ea 100644 --- a/src/test/compile-fail/macro-shadowing.rs +++ b/src/test/compile-fail/macro-shadowing.rs @@ -10,6 +10,8 @@ // aux-build:two_macros.rs +#![allow(unused_macros)] + macro_rules! foo { () => {} } macro_rules! macro_one { () => {} } #[macro_use(macro_two)] extern crate two_macros; diff --git a/src/test/compile-fail/method-help-unsatisfied-bound.rs b/src/test/compile-fail/method-help-unsatisfied-bound.rs deleted file mode 100644 index 6416d5467c8..00000000000 --- a/src/test/compile-fail/method-help-unsatisfied-bound.rs +++ /dev/null @@ -1,18 +0,0 @@ -// Copyright 2014 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -struct Foo; - -fn main() { - let a: Result<(), Foo> = Ok(()); - a.unwrap(); - //~^ ERROR no method named `unwrap` found for type `std::result::Result<(), Foo>` - //~| NOTE the following trait bounds were not satisfied: `Foo : std::fmt::Debug` -} diff --git a/src/test/compile-fail/unused-macro-with-bad-frag-spec.rs b/src/test/compile-fail/unused-macro-with-bad-frag-spec.rs index b868b79365d..28a69e6f9e2 100644 --- a/src/test/compile-fail/unused-macro-with-bad-frag-spec.rs +++ b/src/test/compile-fail/unused-macro-with-bad-frag-spec.rs @@ -8,6 +8,8 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +#![allow(unused_macros)] + // Issue #21370 macro_rules! test { diff --git a/src/test/compile-fail/unused-macro-with-follow-violation.rs b/src/test/compile-fail/unused-macro-with-follow-violation.rs index e9d09bb6ad9..dda0d3fc955 100644 --- a/src/test/compile-fail/unused-macro-with-follow-violation.rs +++ b/src/test/compile-fail/unused-macro-with-follow-violation.rs @@ -8,6 +8,8 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +#![allow(unused_macros)] + macro_rules! test { ($e:expr +) => () //~ ERROR not allowed for `expr` fragments } diff --git a/src/test/compile-fail/unused-macro.rs b/src/test/compile-fail/unused-macro.rs new file mode 100644 index 00000000000..5e401c09bda --- /dev/null +++ b/src/test/compile-fail/unused-macro.rs @@ -0,0 +1,39 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![deny(unused_macros)] + +// Most simple case +macro_rules! unused { //~ ERROR: unused macro definition + () => {}; +} + +// Test macros created by macros +macro_rules! create_macro { + () => { + macro_rules! m { //~ ERROR: unused macro definition + () => {}; + } + }; +} +create_macro!(); + +#[allow(unused_macros)] +mod bar { + // Test that putting the #[deny] close to the macro's definition + // works. + + #[deny(unused_macros)] + macro_rules! unused { //~ ERROR: unused macro definition + () => {}; + } +} + +fn main() {} diff --git a/src/test/compile-fail/user-defined-macro-rules.rs b/src/test/compile-fail/user-defined-macro-rules.rs index d55cef434f8..02e1a585fa8 100644 --- a/src/test/compile-fail/user-defined-macro-rules.rs +++ b/src/test/compile-fail/user-defined-macro-rules.rs @@ -8,4 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +#![allow(unused_macros)] + macro_rules! macro_rules { () => {} } //~ ERROR user-defined macros may not be named `macro_rules` diff --git a/src/test/incremental/add_private_fn_at_krate_root_cc/auxiliary/point.rs b/src/test/incremental/add_private_fn_at_krate_root_cc/auxiliary/point.rs index 1064c97b744..adc2b23441e 100644 --- a/src/test/incremental/add_private_fn_at_krate_root_cc/auxiliary/point.rs +++ b/src/test/incremental/add_private_fn_at_krate_root_cc/auxiliary/point.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// compile-flags: -Z incremental-cc - pub struct Point { pub x: f32, pub y: f32, diff --git a/src/test/incremental/callee_caller_cross_crate/auxiliary/a.rs b/src/test/incremental/callee_caller_cross_crate/auxiliary/a.rs index a02b71a753c..d802c9a8352 100644 --- a/src/test/incremental/callee_caller_cross_crate/auxiliary/a.rs +++ b/src/test/incremental/callee_caller_cross_crate/auxiliary/a.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// compile-flags: -Z incremental-cc - #![crate_type="rlib"] #[cfg(rpass1)] diff --git a/src/test/incremental/change_private_fn_cc/auxiliary/point.rs b/src/test/incremental/change_private_fn_cc/auxiliary/point.rs index 08eef2a73f6..dcc1ced635f 100644 --- a/src/test/incremental/change_private_fn_cc/auxiliary/point.rs +++ b/src/test/incremental/change_private_fn_cc/auxiliary/point.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// compile-flags: -Z incremental-cc - pub struct Point { pub x: f32, pub y: f32, diff --git a/src/test/incremental/change_private_impl_method_cc/auxiliary/point.rs b/src/test/incremental/change_private_impl_method_cc/auxiliary/point.rs index e69dc51119e..8df1cf54da2 100644 --- a/src/test/incremental/change_private_impl_method_cc/auxiliary/point.rs +++ b/src/test/incremental/change_private_impl_method_cc/auxiliary/point.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// compile-flags: -Z incremental-cc - pub struct Point { pub x: f32, pub y: f32, diff --git a/src/test/incremental/remapped_paths_cc/auxiliary/extern_crate.rs b/src/test/incremental/remapped_paths_cc/auxiliary/extern_crate.rs index 09db90d618b..1483bf92c97 100644 --- a/src/test/incremental/remapped_paths_cc/auxiliary/extern_crate.rs +++ b/src/test/incremental/remapped_paths_cc/auxiliary/extern_crate.rs @@ -10,7 +10,6 @@ // ignore-tidy-linelength -// aux-build:extern_crate.rs //[rpass1] compile-flags: -g //[rpass2] compile-flags: -g //[rpass3] compile-flags: -g -Zremap-path-prefix-from={{src-base}} -Zremap-path-prefix-to=/the/src diff --git a/src/test/incremental/remapped_paths_cc/main.rs b/src/test/incremental/remapped_paths_cc/main.rs index 8a8c658accc..be4764c7d99 100644 --- a/src/test/incremental/remapped_paths_cc/main.rs +++ b/src/test/incremental/remapped_paths_cc/main.rs @@ -9,7 +9,7 @@ // except according to those terms. // revisions:rpass1 rpass2 rpass3 -// compile-flags: -Z query-dep-graph -g +// compile-flags: -Z query-dep-graph -g -Zincremental-cc // aux-build:extern_crate.rs diff --git a/src/test/incremental/remove-private-item-cross-crate/auxiliary/a.rs b/src/test/incremental/remove-private-item-cross-crate/auxiliary/a.rs index 39547fb7359..4d84e844ded 100644 --- a/src/test/incremental/remove-private-item-cross-crate/auxiliary/a.rs +++ b/src/test/incremental/remove-private-item-cross-crate/auxiliary/a.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// compile-flags: -Z incremental-cc - #![allow(warnings)] #![crate_name = "a"] #![crate_type = "rlib"] diff --git a/src/test/incremental/rlib_cross_crate/auxiliary/a.rs b/src/test/incremental/rlib_cross_crate/auxiliary/a.rs index 3ecd9aff3f8..ff5fd634714 100644 --- a/src/test/incremental/rlib_cross_crate/auxiliary/a.rs +++ b/src/test/incremental/rlib_cross_crate/auxiliary/a.rs @@ -8,7 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// compile-flags: -Z incremental-cc // no-prefer-dynamic #![crate_type="rlib"] diff --git a/src/test/incremental/struct_change_field_type_cross_crate/auxiliary/a.rs b/src/test/incremental/struct_change_field_type_cross_crate/auxiliary/a.rs index d14ebf78d82..2ddcaf15721 100644 --- a/src/test/incremental/struct_change_field_type_cross_crate/auxiliary/a.rs +++ b/src/test/incremental/struct_change_field_type_cross_crate/auxiliary/a.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// compile-flags: -Z incremental-cc - #![crate_type="rlib"] #[cfg(rpass1)] diff --git a/src/test/incremental/type_alias_cross_crate/auxiliary/a.rs b/src/test/incremental/type_alias_cross_crate/auxiliary/a.rs index 0393bcda991..e1dba131770 100644 --- a/src/test/incremental/type_alias_cross_crate/auxiliary/a.rs +++ b/src/test/incremental/type_alias_cross_crate/auxiliary/a.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// compile-flags: -Z incremental-cc - #![crate_type="rlib"] #[cfg(rpass1)] diff --git a/src/test/mir-opt/issue-41697.rs b/src/test/mir-opt/issue-41697.rs new file mode 100644 index 00000000000..47eeffe35a8 --- /dev/null +++ b/src/test/mir-opt/issue-41697.rs @@ -0,0 +1,48 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Regression test for #41697. Using dump-mir was triggering +// artificial cycles: during type-checking, we had to get the MIR for +// the constant expressions in `[u8; 2]`, which in turn would trigger +// an attempt to get the item-path, which in turn would request the +// types of the impl, which would trigger a cycle. We supressed this +// cycle now by forcing mir-dump to avoid asking for types of an impl. + +#![feature(rustc_attrs)] + +use std::sync::Arc; + +trait Foo { + fn get(&self) -> [u8; 2]; +} + +impl Foo for [u8; 2] { + fn get(&self) -> [u8; 2] { + *self + } +} + +struct Bar(T); + +fn unsize_fat_ptr<'a>(x: &'a Bar) -> &'a Bar { + x +} + +fn unsize_nested_fat_ptr(x: Arc) -> Arc { + x +} + +fn main() { + let x: Box> = Box::new(Bar([1,2])); + assert_eq!(unsize_fat_ptr(&*x).0.get(), [1, 2]); + + let x: Arc = Arc::new([3, 4]); + assert_eq!(unsize_nested_fat_ptr(x).get(), [3, 4]); +} diff --git a/src/test/parse-fail/underscore-suffix-for-float.rs b/src/test/parse-fail/underscore-suffix-for-float.rs new file mode 100644 index 00000000000..df7d9aa374d --- /dev/null +++ b/src/test/parse-fail/underscore-suffix-for-float.rs @@ -0,0 +1,13 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +fn main() { + let a = 42._; //~ ERROR unexpected token: `_` +} diff --git a/src/test/run-make/issue-19371/foo.rs b/src/test/run-make/issue-19371/foo.rs index 0336fe277c5..e96588c6e5a 100644 --- a/src/test/run-make/issue-19371/foo.rs +++ b/src/test/run-make/issue-19371/foo.rs @@ -15,6 +15,7 @@ extern crate rustc_lint; extern crate rustc_metadata; extern crate rustc_errors; +extern crate rustc_trans; extern crate syntax; use rustc::dep_graph::DepGraph; @@ -58,8 +59,9 @@ fn basic_sess(sysroot: PathBuf) -> (Session, Rc) { let descriptions = Registry::new(&rustc::DIAGNOSTICS); let dep_graph = DepGraph::new(opts.build_dep_graph()); - let cstore = Rc::new(CStore::new(&dep_graph)); + let cstore = Rc::new(CStore::new(&dep_graph, Box::new(rustc_trans::LlvmMetadataLoader))); let sess = build_session(opts, &dep_graph, None, descriptions, cstore.clone()); + rustc_trans::init(&sess); rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess)); (sess, cstore) } diff --git a/src/test/run-make/llvm-pass/plugin.rs b/src/test/run-make/llvm-pass/plugin.rs index f77b2fca857..37aab2bbd05 100644 --- a/src/test/run-make/llvm-pass/plugin.rs +++ b/src/test/run-make/llvm-pass/plugin.rs @@ -14,6 +14,7 @@ extern crate rustc; extern crate rustc_plugin; +extern crate rustc_trans; #[link(name = "llvm-function-pass", kind = "static")] #[link(name = "llvm-module-pass", kind = "static")] diff --git a/src/test/run-pass-fulldeps/issue-35829.rs b/src/test/run-pass-fulldeps/issue-35829.rs new file mode 100644 index 00000000000..0a4c15a9236 --- /dev/null +++ b/src/test/run-pass-fulldeps/issue-35829.rs @@ -0,0 +1,55 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// ignore-stage1 +// ignore-cross-compile +#![feature(quote, rustc_private)] + +extern crate syntax; + +use syntax::ext::base::{ExtCtxt, DummyResolver}; +use syntax::ext::expand::ExpansionConfig; +use syntax::parse::ParseSess; +use syntax::codemap::{FilePathMapping, dummy_spanned}; +use syntax::print::pprust::expr_to_string; +use syntax::ast::{Expr, ExprKind, LitKind, StrStyle, RangeLimits}; +use syntax::symbol::Symbol; +use syntax::ptr::P; + +use std::rc::Rc; + +fn main() { + let parse_sess = ParseSess::new(FilePathMapping::empty()); + let exp_cfg = ExpansionConfig::default("issue_35829".to_owned()); + let mut resolver = DummyResolver; + let cx = ExtCtxt::new(&parse_sess, exp_cfg, &mut resolver); + + // check byte string + let byte_string = quote_expr!(&cx, b"one"); + let byte_string_lit_kind = LitKind::ByteStr(Rc::new(b"one".to_vec())); + assert_eq!(byte_string.node, ExprKind::Lit(P(dummy_spanned(byte_string_lit_kind)))); + + // check raw byte string + let raw_byte_string = quote_expr!(&cx, br###"#"two"#"###); + let raw_byte_string_lit_kind = LitKind::ByteStr(Rc::new(b"#\"two\"#".to_vec())); + assert_eq!(raw_byte_string.node, ExprKind::Lit(P(dummy_spanned(raw_byte_string_lit_kind)))); + + // check dotdotdot + let closed_range = quote_expr!(&cx, 0 ... 1); + assert_eq!(closed_range.node, ExprKind::Range( + Some(quote_expr!(&cx, 0)), + Some(quote_expr!(&cx, 1)), + RangeLimits::Closed + )); + + // test case from 35829 + let expr_35829 = quote_expr!(&cx, std::io::stdout().write(b"one")); + assert_eq!(expr_to_string(&expr_35829), r#"std::io::stdout().write(b"one")"#); +} diff --git a/src/test/run-pass-fulldeps/macro-quote-1.rs b/src/test/run-pass-fulldeps/macro-quote-1.rs index 01b0ed80235..e7d0a83017b 100644 --- a/src/test/run-pass-fulldeps/macro-quote-1.rs +++ b/src/test/run-pass-fulldeps/macro-quote-1.rs @@ -17,11 +17,24 @@ extern crate syntax; extern crate syntax_pos; -use syntax::ast::Ident; -use syntax::parse::token; +use syntax::ast::{Ident, Name}; +use syntax::parse::token::{self, Token, Lit}; use syntax::tokenstream::TokenTree; fn main() { let true_tok = token::Ident(Ident::from_str("true")); assert!(quote!(true).eq_unspanned(&true_tok.into())); + + // issue #35829, extended check to proc_macro. + let triple_dot_tok = Token::DotDotDot; + assert!(quote!(...).eq_unspanned(&triple_dot_tok.into())); + + let byte_str_tok = Token::Literal(Lit::ByteStr(Name::intern("one")), None); + assert!(quote!(b"one").eq_unspanned(&byte_str_tok.into())); + + let byte_str_raw_tok = Token::Literal(Lit::ByteStrRaw(Name::intern("#\"two\"#"), 3), None); + assert!(quote!(br###"#"two"#"###).eq_unspanned(&byte_str_raw_tok.into())); + + let str_raw_tok = Token::Literal(Lit::StrRaw(Name::intern("#\"three\"#"), 2), None); + assert!(quote!(r##"#"three"#"##).eq_unspanned(&str_raw_tok.into())); } diff --git a/src/test/run-pass/associated-const-type-parameters.rs b/src/test/run-pass/associated-const-type-parameters.rs index b276589f0c4..df208353064 100644 --- a/src/test/run-pass/associated-const-type-parameters.rs +++ b/src/test/run-pass/associated-const-type-parameters.rs @@ -37,6 +37,10 @@ fn sub() -> i32 { A::X - B::X } +trait Bar: Foo { + const Y: i32 = Self::X; +} + fn main() { assert_eq!(11, Abc::X); assert_eq!(97, Def::X); diff --git a/src/test/run-pass/issue-41696.rs b/src/test/run-pass/issue-41696.rs new file mode 100644 index 00000000000..ae57e0cf255 --- /dev/null +++ b/src/test/run-pass/issue-41696.rs @@ -0,0 +1,61 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// this used to cause exponential code-size blowup during LLVM passes. +// min-llvm-version 3.9 + +#![feature(test)] + +extern crate test; + +struct MayUnwind; + +impl Drop for MayUnwind { + fn drop(&mut self) { + if test::black_box(false) { + panic!() + } + } +} + +struct DS { + may_unwind: MayUnwind, + name: String, + next: U, +} + +fn add(ds: DS, name: String) -> DS> { + DS { + may_unwind: MayUnwind, + name: "?".to_owned(), + next: ds, + } +} + +fn main() { + let deserializers = DS { may_unwind: MayUnwind, name: "?".to_owned(), next: () }; + let deserializers = add(deserializers, "?".to_owned()); + let deserializers = add(deserializers, "?".to_owned()); + let deserializers = add(deserializers, "?".to_owned()); + let deserializers = add(deserializers, "?".to_owned()); + let deserializers = add(deserializers, "?".to_owned()); + let deserializers = add(deserializers, "?".to_owned()); + let deserializers = add(deserializers, "?".to_owned()); // 0.7s + let deserializers = add(deserializers, "?".to_owned()); // 1.3s + let deserializers = add(deserializers, "?".to_owned()); // 2.4s + let deserializers = add(deserializers, "?".to_owned()); // 6.7s + let deserializers = add(deserializers, "?".to_owned()); // 26.0s + let deserializers = add(deserializers, "?".to_owned()); // 114.0s + let deserializers = add(deserializers, "?".to_owned()); // 228.0s + let deserializers = add(deserializers, "?".to_owned()); // 400.0s + let deserializers = add(deserializers, "?".to_owned()); // 800.0s + let deserializers = add(deserializers, "?".to_owned()); // 1600.0s + let deserializers = add(deserializers, "?".to_owned()); // 3200.0s +} diff --git a/src/test/run-pass/issue-41697.rs b/src/test/run-pass/issue-41697.rs deleted file mode 100644 index d59b6a16562..00000000000 --- a/src/test/run-pass/issue-41697.rs +++ /dev/null @@ -1,50 +0,0 @@ -// Copyright 2016 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -// compile-flags:-Zdump-mir=NEVER_MATCHED - -// Regression test for #41697. Using dump-mir was triggering -// artificial cycles: during type-checking, we had to get the MIR for -// the constant expressions in `[u8; 2]`, which in turn would trigger -// an attempt to get the item-path, which in turn would request the -// types of the impl, which would trigger a cycle. We supressed this -// cycle now by forcing mir-dump to avoid asking for types of an impl. - -#![feature(rustc_attrs)] - -use std::sync::Arc; - -trait Foo { - fn get(&self) -> [u8; 2]; -} - -impl Foo for [u8; 2] { - fn get(&self) -> [u8; 2] { - *self - } -} - -struct Bar(T); - -fn unsize_fat_ptr<'a>(x: &'a Bar) -> &'a Bar { - x -} - -fn unsize_nested_fat_ptr(x: Arc) -> Arc { - x -} - -fn main() { - let x: Box> = Box::new(Bar([1,2])); - assert_eq!(unsize_fat_ptr(&*x).0.get(), [1, 2]); - - let x: Arc = Arc::new([3, 4]); - assert_eq!(unsize_nested_fat_ptr(x).get(), [3, 4]); -} diff --git a/src/test/run-pass/issue-41803.rs b/src/test/run-pass/issue-41803.rs new file mode 100644 index 00000000000..e18b4204584 --- /dev/null +++ b/src/test/run-pass/issue-41803.rs @@ -0,0 +1,30 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +/// A compile-time map from identifiers to arbitrary (heterogeneous) expressions +macro_rules! ident_map { + ( $name:ident = { $($key:ident => $e:expr,)* } ) => { + macro_rules! $name { + $( + ( $key ) => { $e }; + )* + // Empty invocation expands to nothing. Needed when the map is empty. + () => {}; + } + }; +} + +ident_map!(my_map = { + main => 0, +}); + +fn main() { + my_map!(main); +} diff --git a/src/test/run-pass/issue-41936-variance-coerce-unsized-cycle.rs b/src/test/run-pass/issue-41936-variance-coerce-unsized-cycle.rs new file mode 100644 index 00000000000..bfbead87891 --- /dev/null +++ b/src/test/run-pass/issue-41936-variance-coerce-unsized-cycle.rs @@ -0,0 +1,38 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Regression test for #41936. The coerce-unsized trait check in +// coherence was using subtyping, which triggered variance +// computation, which failed because it required type info for fields +// that had not (yet) been computed. + +#![feature(unsize)] +#![feature(coerce_unsized)] + +use std::{marker,ops}; + +// Change the array to a non-array, and error disappears +// Adding a new field to the end keeps the error +struct LogDataBuf([u8;8]); + +struct Aref +{ + // Inner structure triggers the error, removing the inner removes the message. + ptr: Box>, +} +impl, U: ?Sized> ops::CoerceUnsized> for Aref {} + +struct ArefInner +{ + // Even with this field commented out, the error is raised. + data: T, +} + +fn main(){} diff --git a/src/test/run-pass/specialization/assoc-ty-graph-cycle.rs b/src/test/run-pass/specialization/assoc-ty-graph-cycle.rs new file mode 100644 index 00000000000..a65dcf33d85 --- /dev/null +++ b/src/test/run-pass/specialization/assoc-ty-graph-cycle.rs @@ -0,0 +1,33 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Make sure we don't crash with a cycle error during coherence. + +#![feature(specialization)] + +trait Trait { + type Assoc; +} + +impl Trait for Vec { + default type Assoc = (); +} + +impl Trait for Vec { + type Assoc = u8; +} + +impl Trait for String { + type Assoc = (); +} + +impl Trait< as Trait>::Assoc> for String {} + +fn main() {} diff --git a/src/test/run-pass/underscore-method-after-integer.rs b/src/test/run-pass/underscore-method-after-integer.rs new file mode 100644 index 00000000000..af912564211 --- /dev/null +++ b/src/test/run-pass/underscore-method-after-integer.rs @@ -0,0 +1,19 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +trait Tr : Sized { + fn _method_on_numbers(self) {} +} + +impl Tr for i32 {} + +fn main() { + 42._method_on_numbers(); +} diff --git a/src/test/rustdoc/extern-impl.rs b/src/test/rustdoc/extern-impl.rs index 0e78746704f..5c64b4118c3 100644 --- a/src/test/rustdoc/extern-impl.rs +++ b/src/test/rustdoc/extern-impl.rs @@ -18,9 +18,9 @@ impl Foo { pub fn rust0() {} // @has - '//code' 'fn rust1()' pub extern "Rust" fn rust1() {} - // @has - '//code' 'extern fn c0()' + // @has - '//code' 'extern "C" fn c0()' pub extern fn c0() {} - // @has - '//code' 'extern fn c1()' + // @has - '//code' 'extern "C" fn c1()' pub extern "C" fn c1() {} // @has - '//code' 'extern "system" fn system0()' pub extern "system" fn system0() {} @@ -31,7 +31,7 @@ pub trait Bar {} // @has - '//code' 'impl Bar for fn()' impl Bar for fn() {} -// @has - '//code' 'impl Bar for extern fn()' +// @has - '//code' 'impl Bar for extern "C" fn()' impl Bar for extern fn() {} // @has - '//code' 'impl Bar for extern "system" fn()' impl Bar for extern "system" fn() {} diff --git a/src/test/rustdoc/ffi.rs b/src/test/rustdoc/ffi.rs index 3997dcd81e1..8511d461703 100644 --- a/src/test/rustdoc/ffi.rs +++ b/src/test/rustdoc/ffi.rs @@ -13,10 +13,10 @@ extern crate rustdoc_ffi as lib; -// @has ffi/fn.foreigner.html //pre 'pub unsafe extern fn foreigner(cold_as_ice: u32)' +// @has ffi/fn.foreigner.html //pre 'pub unsafe extern "C" fn foreigner(cold_as_ice: u32)' pub use lib::foreigner; extern "C" { - // @has ffi/fn.another.html //pre 'pub unsafe extern fn another(cold_as_ice: u32)' + // @has ffi/fn.another.html //pre 'pub unsafe extern "C" fn another(cold_as_ice: u32)' pub fn another(cold_as_ice: u32); } diff --git a/src/test/rustdoc/issue-22038.rs b/src/test/rustdoc/issue-22038.rs index 6f84428b079..75df5358945 100644 --- a/src/test/rustdoc/issue-22038.rs +++ b/src/test/rustdoc/issue-22038.rs @@ -10,7 +10,7 @@ extern { // @has issue_22038/fn.foo1.html \ - // '//*[@class="rust fn"]' 'pub unsafe extern fn foo1()' + // '//*[@class="rust fn"]' 'pub unsafe extern "C" fn foo1()' pub fn foo1(); } @@ -21,7 +21,7 @@ } // @has issue_22038/fn.bar.html \ -// '//*[@class="rust fn"]' 'pub extern fn bar()' +// '//*[@class="rust fn"]' 'pub extern "C" fn bar()' pub extern fn bar() {} // @has issue_22038/fn.baz.html \ diff --git a/src/test/rustdoc/variadic.rs b/src/test/rustdoc/variadic.rs index 1b60c2a334f..6ba776ba467 100644 --- a/src/test/rustdoc/variadic.rs +++ b/src/test/rustdoc/variadic.rs @@ -9,6 +9,6 @@ // except according to those terms. extern "C" { - // @has variadic/fn.foo.html //pre 'pub unsafe extern fn foo(x: i32, ...)' + // @has variadic/fn.foo.html //pre 'pub unsafe extern "C" fn foo(x: i32, ...)' pub fn foo(x: i32, ...); } diff --git a/src/test/ui/lifetime-errors/ex1-return-one-existing-name-if-else.stderr b/src/test/ui/lifetime-errors/ex1-return-one-existing-name-if-else.stderr index cf272b63128..55723ee8cd9 100644 --- a/src/test/ui/lifetime-errors/ex1-return-one-existing-name-if-else.stderr +++ b/src/test/ui/lifetime-errors/ex1-return-one-existing-name-if-else.stderr @@ -4,19 +4,17 @@ error[E0312]: lifetime of reference outlives lifetime of borrowed content... 12 | if x > y { x } else { y } | ^ | -note: ...the reference is valid for the lifetime 'a as defined on the body at 11:43... - --> $DIR/ex1-return-one-existing-name-if-else.rs:11:44 +note: ...the reference is valid for the lifetime 'a as defined on the function body at 11:0... + --> $DIR/ex1-return-one-existing-name-if-else.rs:11:1 | -11 | fn foo<'a>(x: &'a i32, y: &i32) -> &'a i32 { - | ____________________________________________^ +11 | / fn foo<'a>(x: &'a i32, y: &i32) -> &'a i32 { 12 | | if x > y { x } else { y } 13 | | } | |_^ -note: ...but the borrowed content is only valid for the anonymous lifetime #1 defined on the body at 11:43 - --> $DIR/ex1-return-one-existing-name-if-else.rs:11:44 +note: ...but the borrowed content is only valid for the anonymous lifetime #1 defined on the function body at 11:0 + --> $DIR/ex1-return-one-existing-name-if-else.rs:11:1 | -11 | fn foo<'a>(x: &'a i32, y: &i32) -> &'a i32 { - | ____________________________________________^ +11 | / fn foo<'a>(x: &'a i32, y: &i32) -> &'a i32 { 12 | | if x > y { x } else { y } 13 | | } | |_^ diff --git a/src/test/ui/lifetime-errors/ex2a-push-one-existing-name.stderr b/src/test/ui/lifetime-errors/ex2a-push-one-existing-name.stderr index 6e03e66dd25..b7d985feca9 100644 --- a/src/test/ui/lifetime-errors/ex2a-push-one-existing-name.stderr +++ b/src/test/ui/lifetime-errors/ex2a-push-one-existing-name.stderr @@ -6,19 +6,17 @@ error[E0308]: mismatched types | = note: expected type `Ref<'a, _>` found type `Ref<'_, _>` -note: the anonymous lifetime #2 defined on the body at 15:51... - --> $DIR/ex2a-push-one-existing-name.rs:15:52 +note: the anonymous lifetime #2 defined on the function body at 15:0... + --> $DIR/ex2a-push-one-existing-name.rs:15:1 | -15 | fn foo<'a>(x: &mut Vec>, y: Ref) { - | ____________________________________________________^ +15 | / fn foo<'a>(x: &mut Vec>, y: Ref) { 16 | | x.push(y); 17 | | } | |_^ -note: ...does not necessarily outlive the lifetime 'a as defined on the body at 15:51 - --> $DIR/ex2a-push-one-existing-name.rs:15:52 +note: ...does not necessarily outlive the lifetime 'a as defined on the function body at 15:0 + --> $DIR/ex2a-push-one-existing-name.rs:15:1 | -15 | fn foo<'a>(x: &mut Vec>, y: Ref) { - | ____________________________________________________^ +15 | / fn foo<'a>(x: &mut Vec>, y: Ref) { 16 | | x.push(y); 17 | | } | |_^ diff --git a/src/test/ui/lifetime-errors/ex2b-push-no-existing-names.stderr b/src/test/ui/lifetime-errors/ex2b-push-no-existing-names.stderr index 028f54ce978..3a6e94f2b1c 100644 --- a/src/test/ui/lifetime-errors/ex2b-push-no-existing-names.stderr +++ b/src/test/ui/lifetime-errors/ex2b-push-no-existing-names.stderr @@ -6,19 +6,17 @@ error[E0308]: mismatched types | = note: expected type `Ref<'_, _>` found type `Ref<'_, _>` -note: the anonymous lifetime #3 defined on the body at 15:43... - --> $DIR/ex2b-push-no-existing-names.rs:15:44 +note: the anonymous lifetime #3 defined on the function body at 15:0... + --> $DIR/ex2b-push-no-existing-names.rs:15:1 | -15 | fn foo(x: &mut Vec>, y: Ref) { - | ____________________________________________^ +15 | / fn foo(x: &mut Vec>, y: Ref) { 16 | | x.push(y); 17 | | } | |_^ -note: ...does not necessarily outlive the anonymous lifetime #2 defined on the body at 15:43 - --> $DIR/ex2b-push-no-existing-names.rs:15:44 +note: ...does not necessarily outlive the anonymous lifetime #2 defined on the function body at 15:0 + --> $DIR/ex2b-push-no-existing-names.rs:15:1 | -15 | fn foo(x: &mut Vec>, y: Ref) { - | ____________________________________________^ +15 | / fn foo(x: &mut Vec>, y: Ref) { 16 | | x.push(y); 17 | | } | |_^ diff --git a/src/test/ui/lifetime-errors/ex2c-push-inference-variable.stderr b/src/test/ui/lifetime-errors/ex2c-push-inference-variable.stderr index 4621214419e..3d7064a4f71 100644 --- a/src/test/ui/lifetime-errors/ex2c-push-inference-variable.stderr +++ b/src/test/ui/lifetime-errors/ex2c-push-inference-variable.stderr @@ -4,11 +4,10 @@ error[E0495]: cannot infer an appropriate lifetime for lifetime parameter `'a` d 16 | let z = Ref { data: y.data }; | ^^^ | -note: first, the lifetime cannot outlive the lifetime 'c as defined on the body at 15:66... - --> $DIR/ex2c-push-inference-variable.rs:15:67 +note: first, the lifetime cannot outlive the lifetime 'c as defined on the function body at 15:0... + --> $DIR/ex2c-push-inference-variable.rs:15:1 | -15 | fn foo<'a, 'b, 'c>(x: &'a mut Vec>, y: Ref<'c, i32>) { - | ___________________________________________________________________^ +15 | / fn foo<'a, 'b, 'c>(x: &'a mut Vec>, y: Ref<'c, i32>) { 16 | | let z = Ref { data: y.data }; 17 | | x.push(z); 18 | | } @@ -18,11 +17,10 @@ note: ...so that reference does not outlive borrowed content | 16 | let z = Ref { data: y.data }; | ^^^^^^ -note: but, the lifetime must be valid for the lifetime 'b as defined on the body at 15:66... - --> $DIR/ex2c-push-inference-variable.rs:15:67 +note: but, the lifetime must be valid for the lifetime 'b as defined on the function body at 15:0... + --> $DIR/ex2c-push-inference-variable.rs:15:1 | -15 | fn foo<'a, 'b, 'c>(x: &'a mut Vec>, y: Ref<'c, i32>) { - | ___________________________________________________________________^ +15 | / fn foo<'a, 'b, 'c>(x: &'a mut Vec>, y: Ref<'c, i32>) { 16 | | let z = Ref { data: y.data }; 17 | | x.push(z); 18 | | } diff --git a/src/test/ui/lifetime-errors/ex2d-push-inference-variable-2.stderr b/src/test/ui/lifetime-errors/ex2d-push-inference-variable-2.stderr index a69694fdc2e..aced855bf66 100644 --- a/src/test/ui/lifetime-errors/ex2d-push-inference-variable-2.stderr +++ b/src/test/ui/lifetime-errors/ex2d-push-inference-variable-2.stderr @@ -4,11 +4,10 @@ error[E0495]: cannot infer an appropriate lifetime for lifetime parameter `'a` d 17 | let b = Ref { data: y.data }; | ^^^ | -note: first, the lifetime cannot outlive the lifetime 'c as defined on the body at 15:66... - --> $DIR/ex2d-push-inference-variable-2.rs:15:67 +note: first, the lifetime cannot outlive the lifetime 'c as defined on the function body at 15:0... + --> $DIR/ex2d-push-inference-variable-2.rs:15:1 | -15 | fn foo<'a, 'b, 'c>(x: &'a mut Vec>, y: Ref<'c, i32>) { - | ___________________________________________________________________^ +15 | / fn foo<'a, 'b, 'c>(x: &'a mut Vec>, y: Ref<'c, i32>) { 16 | | let a: &mut Vec> = x; 17 | | let b = Ref { data: y.data }; 18 | | a.push(b); @@ -19,11 +18,10 @@ note: ...so that reference does not outlive borrowed content | 17 | let b = Ref { data: y.data }; | ^^^^^^ -note: but, the lifetime must be valid for the lifetime 'b as defined on the body at 15:66... - --> $DIR/ex2d-push-inference-variable-2.rs:15:67 +note: but, the lifetime must be valid for the lifetime 'b as defined on the function body at 15:0... + --> $DIR/ex2d-push-inference-variable-2.rs:15:1 | -15 | fn foo<'a, 'b, 'c>(x: &'a mut Vec>, y: Ref<'c, i32>) { - | ___________________________________________________________________^ +15 | / fn foo<'a, 'b, 'c>(x: &'a mut Vec>, y: Ref<'c, i32>) { 16 | | let a: &mut Vec> = x; 17 | | let b = Ref { data: y.data }; 18 | | a.push(b); diff --git a/src/test/ui/lifetime-errors/ex2e-push-inference-variable-3.stderr b/src/test/ui/lifetime-errors/ex2e-push-inference-variable-3.stderr index eff15bb794b..07e2316b63d 100644 --- a/src/test/ui/lifetime-errors/ex2e-push-inference-variable-3.stderr +++ b/src/test/ui/lifetime-errors/ex2e-push-inference-variable-3.stderr @@ -4,11 +4,10 @@ error[E0495]: cannot infer an appropriate lifetime for lifetime parameter `'a` d 17 | let b = Ref { data: y.data }; | ^^^ | -note: first, the lifetime cannot outlive the lifetime 'c as defined on the body at 15:66... - --> $DIR/ex2e-push-inference-variable-3.rs:15:67 +note: first, the lifetime cannot outlive the lifetime 'c as defined on the function body at 15:0... + --> $DIR/ex2e-push-inference-variable-3.rs:15:1 | -15 | fn foo<'a, 'b, 'c>(x: &'a mut Vec>, y: Ref<'c, i32>) { - | ___________________________________________________________________^ +15 | / fn foo<'a, 'b, 'c>(x: &'a mut Vec>, y: Ref<'c, i32>) { 16 | | let a: &mut Vec> = x; 17 | | let b = Ref { data: y.data }; 18 | | Vec::push(a, b); @@ -19,11 +18,10 @@ note: ...so that reference does not outlive borrowed content | 17 | let b = Ref { data: y.data }; | ^^^^^^ -note: but, the lifetime must be valid for the lifetime 'b as defined on the body at 15:66... - --> $DIR/ex2e-push-inference-variable-3.rs:15:67 +note: but, the lifetime must be valid for the lifetime 'b as defined on the function body at 15:0... + --> $DIR/ex2e-push-inference-variable-3.rs:15:1 | -15 | fn foo<'a, 'b, 'c>(x: &'a mut Vec>, y: Ref<'c, i32>) { - | ___________________________________________________________________^ +15 | / fn foo<'a, 'b, 'c>(x: &'a mut Vec>, y: Ref<'c, i32>) { 16 | | let a: &mut Vec> = x; 17 | | let b = Ref { data: y.data }; 18 | | Vec::push(a, b); diff --git a/src/test/ui/mismatched_types/issue-36053-2.stderr b/src/test/ui/mismatched_types/issue-36053-2.stderr index adc229aaacc..78e0f7e619b 100644 --- a/src/test/ui/mismatched_types/issue-36053-2.stderr +++ b/src/test/ui/mismatched_types/issue-36053-2.stderr @@ -4,7 +4,9 @@ error: no method named `count` found for type `std::iter::Filter("str").fuse().filter(|a: &str| true).count(); | ^^^^^ | - = note: the method `count` exists but the following trait bounds were not satisfied: `[closure@$DIR/issue-36053-2.rs:17:39: 17:53] : std::ops::FnMut<(&_,)>`, `std::iter::Filter>, [closure@$DIR/issue-36053-2.rs:17:39: 17:53]> : std::iter::Iterator` + = note: the method `count` exists but the following trait bounds were not satisfied: + `[closure@$DIR/issue-36053-2.rs:17:39: 17:53] : std::ops::FnMut<(&_,)>` + `std::iter::Filter>, [closure@$DIR/issue-36053-2.rs:17:39: 17:53]> : std::iter::Iterator` error[E0281]: type mismatch: `[closure@$DIR/issue-36053-2.rs:17:39: 17:53]` implements the trait `for<'r> std::ops::FnMut<(&'r str,)>`, but the trait `for<'r> std::ops::FnMut<(&'r &str,)>` is required --> $DIR/issue-36053-2.rs:17:32 diff --git a/src/test/ui/mismatched_types/method-help-unsatisfied-bound.rs b/src/test/ui/mismatched_types/method-help-unsatisfied-bound.rs new file mode 100644 index 00000000000..a4eb4455551 --- /dev/null +++ b/src/test/ui/mismatched_types/method-help-unsatisfied-bound.rs @@ -0,0 +1,18 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +struct Foo; + +fn main() { + let a: Result<(), Foo> = Ok(()); + a.unwrap(); + //~^ ERROR no method named `unwrap` found for type `std::result::Result<(), Foo>` + //~| NOTE the method `unwrap` exists but the following trait bounds were not satisfied +} diff --git a/src/test/ui/mismatched_types/method-help-unsatisfied-bound.stderr b/src/test/ui/mismatched_types/method-help-unsatisfied-bound.stderr new file mode 100644 index 00000000000..2bd786c20fe --- /dev/null +++ b/src/test/ui/mismatched_types/method-help-unsatisfied-bound.stderr @@ -0,0 +1,11 @@ +error: no method named `unwrap` found for type `std::result::Result<(), Foo>` in the current scope + --> $DIR/method-help-unsatisfied-bound.rs:15:7 + | +15 | a.unwrap(); + | ^^^^^^ + | + = note: the method `unwrap` exists but the following trait bounds were not satisfied: + `Foo : std::fmt::Debug` + +error: aborting due to previous error + diff --git a/src/tools/cargo b/src/tools/cargo index cf17c9f7118..397359840ec 160000 --- a/src/tools/cargo +++ b/src/tools/cargo @@ -1 +1 @@ -Subproject commit cf17c9f7118f544ec304ed6f50d92b3759487123 +Subproject commit 397359840ecad02d5fe69b2a0cf328e98235ffea diff --git a/src/tools/linkchecker/main.rs b/src/tools/linkchecker/main.rs index 3d9a4fba6cd..1b55dc792c2 100644 --- a/src/tools/linkchecker/main.rs +++ b/src/tools/linkchecker/main.rs @@ -41,7 +41,7 @@ macro_rules! t { } fn main() { - let docs = env::args().nth(1).unwrap(); + let docs = env::args_os().nth(1).unwrap(); let docs = env::current_dir().unwrap().join(docs); let mut errors = false; walk(&mut HashMap::new(), &docs, &docs, &mut errors); @@ -65,7 +65,6 @@ enum Redirect { struct FileEntry { source: String, ids: HashSet, - names: HashSet, } type Cache = HashMap; @@ -73,7 +72,7 @@ struct FileEntry { impl FileEntry { fn parse_ids(&mut self, file: &Path, contents: &str, errors: &mut bool) { if self.ids.is_empty() { - with_attrs_in_source(contents, " id", |fragment, i| { + with_attrs_in_source(contents, " id", |fragment, i, _| { let frag = fragment.trim_left_matches("#").to_owned(); if !self.ids.insert(frag) { *errors = true; @@ -82,15 +81,6 @@ fn parse_ids(&mut self, file: &Path, contents: &str, errors: &mut bool) { }); } } - - fn parse_names(&mut self, contents: &str) { - if self.names.is_empty() { - with_attrs_in_source(contents, " name", |fragment, _| { - let frag = fragment.trim_left_matches("#").to_owned(); - self.names.insert(frag); - }); - } - } } fn walk(cache: &mut Cache, root: &Path, dir: &Path, errors: &mut bool) { @@ -116,15 +106,8 @@ fn check(cache: &mut Cache, file: &Path, errors: &mut bool) -> Option { - // ignore js files as they are not prone to errors as the rest of the - // documentation is and they otherwise bring up false positives. - if file.extension().and_then(|s| s.to_str()) == Some("js") { - return None; - } - - // ignore handlebars files as they use {{}} to build links, we only - // want to test the generated files - if file.extension().and_then(|s| s.to_str()) == Some("hbs") { + // Ignore none HTML files. + if file.extension().and_then(|s| s.to_str()) != Some("html") { return None; } @@ -147,13 +130,7 @@ fn check(cache: &mut Cache, return None; } - // mdbook uses the HTML tag to handle links for subdirectories, which - // linkchecker doesn't support - if file.to_str().unwrap().contains("unstable-book") { - return None; - } - - let res = load_file(cache, root, PathBuf::from(file), SkipRedirect); + let res = load_file(cache, root, file, SkipRedirect); let (pretty_file, contents) = match res { Ok(res) => res, Err(_) => return None, @@ -162,13 +139,10 @@ fn check(cache: &mut Cache, cache.get_mut(&pretty_file) .unwrap() .parse_ids(&pretty_file, &contents, errors); - cache.get_mut(&pretty_file) - .unwrap() - .parse_names(&contents); } // Search for anything that's the regex 'href[ ]*=[ ]*".*?"' - with_attrs_in_source(&contents, " href", |url, i| { + with_attrs_in_source(&contents, " href", |url, i, base| { // Ignore external URLs if url.starts_with("http:") || url.starts_with("https:") || url.starts_with("javascript:") || url.starts_with("ftp:") || @@ -184,9 +158,9 @@ fn check(cache: &mut Cache, // Once we've plucked out the URL, parse it using our base url and // then try to extract a file path. let mut path = file.to_path_buf(); - if !url.is_empty() { + if !base.is_empty() || !url.is_empty() { path.pop(); - for part in Path::new(url).components() { + for part in Path::new(base).join(url).components() { match part { Component::Prefix(_) | Component::RootDir => panic!(), @@ -197,13 +171,6 @@ fn check(cache: &mut Cache, } } - if let Some(extension) = path.extension() { - // don't check these files - if extension == "png" { - return; - } - } - // Alright, if we've found a file name then this file had better // exist! If it doesn't then we register and print an error. if path.exists() { @@ -218,11 +185,17 @@ fn check(cache: &mut Cache, pretty_path.display()); return; } - let res = load_file(cache, root, path.clone(), FromRedirect(false)); + if let Some(extension) = path.extension() { + // Ignore none HTML files. + if extension != "html" { + return; + } + } + let res = load_file(cache, root, &path, FromRedirect(false)); let (pretty_path, contents) = match res { Ok(res) => res, Err(LoadError::IOError(err)) => { - panic!(format!("error loading {}: {}", path.display(), err)); + panic!("error loading {}: {}", path.display(), err); } Err(LoadError::BrokenRedirect(target, _)) => { *errors = true; @@ -245,11 +218,10 @@ fn check(cache: &mut Cache, let entry = &mut cache.get_mut(&pretty_path).unwrap(); entry.parse_ids(&pretty_path, &contents, errors); - entry.parse_names(&contents); - if !(entry.ids.contains(*fragment) || entry.names.contains(*fragment)) { + if !entry.ids.contains(*fragment) { *errors = true; - print!("{}:{}: broken link fragment ", + print!("{}:{}: broken link fragment ", pretty_file.display(), i + 1); println!("`#{}` pointing to `{}`", fragment, pretty_path.display()); @@ -267,7 +239,7 @@ fn check(cache: &mut Cache, fn load_file(cache: &mut Cache, root: &Path, - mut file: PathBuf, + file: &Path, redirect: Redirect) -> Result<(PathBuf, String), LoadError> { let mut contents = String::new(); @@ -279,9 +251,9 @@ fn load_file(cache: &mut Cache, None } Entry::Vacant(entry) => { - let mut fp = File::open(file.clone()).map_err(|err| { + let mut fp = File::open(file).map_err(|err| { if let FromRedirect(true) = redirect { - LoadError::BrokenRedirect(file.clone(), err) + LoadError::BrokenRedirect(file.to_path_buf(), err) } else { LoadError::IOError(err) } @@ -297,17 +269,14 @@ fn load_file(cache: &mut Cache, entry.insert(FileEntry { source: contents.clone(), ids: HashSet::new(), - names: HashSet::new(), }); } maybe } }; - file.pop(); - match maybe_redirect.map(|url| file.join(url)) { + match maybe_redirect.map(|url| file.parent().unwrap().join(url)) { Some(redirect_file) => { - let path = PathBuf::from(redirect_file); - load_file(cache, root, path, FromRedirect(true)) + load_file(cache, root, &redirect_file, FromRedirect(true)) } None => Ok((pretty_file, contents)), } @@ -329,10 +298,14 @@ fn maybe_redirect(source: &str) -> Option { }) } -fn with_attrs_in_source(contents: &str, attr: &str, mut f: F) { +fn with_attrs_in_source(contents: &str, attr: &str, mut f: F) { + let mut base = ""; for (i, mut line) in contents.lines().enumerate() { while let Some(j) = line.find(attr) { let rest = &line[j + attr.len()..]; + // The base tag should always be the first link in the document so + // we can get away with using one pass. + let is_base = line[..j].ends_with(" bool { "src/libbacktrace", "src/compiler-rt", "src/rustllvm", - "src/rust-installer", "src/liblibc", "src/vendor", "src/rt/hoedown", "src/tools/cargo", "src/tools/rls", + "src/tools/rust-installer", ]; skip.iter().any(|p| path.ends_with(p)) }