path = src/jemalloc
url = https://github.com/rust-lang/jemalloc.git
[submodule "src/rust-installer"]
- path = src/rust-installer
+ path = src/tools/rust-installer
url = https://github.com/rust-lang/rust-installer.git
[submodule "src/liblibc"]
path = src/liblibc
url = https://github.com/rust-lang-nursery/nomicon.git
[submodule "src/tools/cargo"]
path = src/tools/cargo
- url = https://github.com/rust-lang/cargo
+ url = https://github.com/rust-lang/cargo.git
[submodule "reference"]
path = src/doc/reference
url = https://github.com/rust-lang-nursery/reference.git
url = https://github.com/rust-lang/book.git
[submodule "src/tools/rls"]
path = src/tools/rls
- url = https://github.com/rust-lang-nursery/rls
+ url = https://github.com/rust-lang-nursery/rls.git
os: osx
osx_image: xcode8.2
install: &osx_install_sccache >
- travis_retry curl -o /usr/local/bin/sccache https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-apple-darwin &&
+ travis_retry curl -o /usr/local/bin/sccache https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-apple-darwin &&
chmod +x /usr/local/bin/sccache &&
travis_retry curl -o /usr/local/bin/stamp https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-17-stamp-x86_64-apple-darwin &&
chmod +x /usr/local/bin/stamp
python x.py build src/libcore --stage 0
```
-You can explore the build system throught the various `--help` pages for each
+You can explore the build system through the various `--help` pages for each
subcommand. For example to learn more about a command you can run:
```
- set PATH=C:\Python27;%PATH%
# Download and install sccache
- - appveyor-retry appveyor DownloadFile https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-pc-windows-msvc
- - mv 2017-04-29-sccache-x86_64-pc-windows-msvc sccache.exe
+ - appveyor-retry appveyor DownloadFile https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-pc-windows-msvc
+ - mv 2017-05-12-sccache-x86_64-pc-windows-msvc sccache.exe
- set PATH=%PATH%;%CD%
# Download and install ninja
- set PATH="C:\Program Files (x86)\Inno Setup 5";%PATH%
# Help debug some handle issues on AppVeyor
- - ps: Invoke-WebRequest -Uri https://download.sysinternals.com/files/Handle.zip -OutFile handle.zip
+ - appveyor-retry appveyor DownloadFile https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-15-Handle.zip
- mkdir handle
- - ps: Expand-Archive handle.zip -dest handle
+ - 7z x -ohandle 2017-05-15-Handle.zip
- set PATH=%PATH%;%CD%\handle
- handle.exe -accepteula -help
valopt_nosave target "${CFG_HOST}" "GNUs ./configure syntax LLVM target triples"
valopt_nosave mandir "${CFG_PREFIX}/share/man" "install man pages in PATH"
valopt_nosave docdir "${CFG_PREFIX}/share/doc/rust" "install documentation in PATH"
+valopt_nosave bindir "${CFG_PREFIX}/bin" "install binaries"
# On Windows this determines root of the subtree for target libraries.
# Host runtime libs always go to 'bin'.
CFG_PREFIX=${CFG_PREFIX%/}
CFG_MANDIR=${CFG_MANDIR%/}
CFG_DOCDIR=${CFG_DOCDIR%/}
+CFG_BINDIR=${CFG_BINDIR%/}
CFG_HOST="$(echo $CFG_HOST | tr ',' ' ')"
CFG_TARGET="$(echo $CFG_TARGET | tr ',' ' ')"
putvar CFG_NACL_CROSS_PATH
putvar CFG_MANDIR
putvar CFG_DOCDIR
+putvar CFG_BINDIR
putvar CFG_USING_LIBCPP
msg
"libc 0.0.0",
]
+[[package]]
+name = "advapi32-sys"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "aho-corasick"
+version = "0.5.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "aho-corasick"
version = "0.6.3"
dependencies = [
"build_helper 0.1.0",
"core 0.0.0",
- "gcc 0.3.45 (registry+https://github.com/rust-lang/crates.io-index)",
+ "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.0.0",
]
version = "0.0.0"
[[package]]
-name = "atty"
-version = "0.2.2"
+name = "backtrace"
+version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
+ "backtrace-sys 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "cfg-if 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "dbghelp-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-demangle 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
]
+[[package]]
+name = "backtrace-sys"
+version = "0.1.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "bitflags"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
+[[package]]
+name = "bitflags"
+version = "0.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
[[package]]
name = "bitflags"
version = "0.8.2"
version = "0.0.0"
dependencies = [
"build_helper 0.1.0",
- "cmake 0.1.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "cmake 0.1.23 (registry+https://github.com/rust-lang/crates.io-index)",
"filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "gcc 0.3.45 (registry+https://github.com/rust-lang/crates.io-index)",
+ "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)",
"getopts 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
"num_cpus 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-serialize 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
"toml 0.1.30 (registry+https://github.com/rust-lang/crates.io-index)",
]
+[[package]]
+name = "bufstream"
+version = "0.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
[[package]]
name = "build-manifest"
version = "0.1.0"
dependencies = [
- "rustc-serialize 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
"toml 0.1.30 (registry+https://github.com/rust-lang/crates.io-index)",
]
"filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
]
+[[package]]
+name = "cargo"
+version = "0.20.0"
+source = "git+https://github.com/rust-lang/cargo#2b32084293d8da63b48de56363a0f2e986ec3367"
+replace = "cargo 0.20.0"
+
+[[package]]
+name = "cargo"
+version = "0.20.0"
+dependencies = [
+ "advapi32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "bufstream 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "cargotest 0.1.0",
+ "chrono 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "crates-io 0.9.0",
+ "crossbeam 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "curl 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "docopt 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "env_logger 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "flate2 0.2.19 (registry+https://github.com/rust-lang/crates.io-index)",
+ "fs2 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "git2 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "git2-curl 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
+ "hamcrest 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libgit2-sys 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "miow 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "num_cpus 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "openssl 0.9.12 (registry+https://github.com/rust-lang/crates.io-index)",
+ "psapi-sys 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
+ "semver 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_derive 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_ignored 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_json 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "shell-escape 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tar 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "term 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "toml 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "url 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "cargotest"
+version = "0.1.0"
+dependencies = [
+ "bufstream 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "cargo 0.20.0",
+ "filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "flate2 0.2.19 (registry+https://github.com/rust-lang/crates.io-index)",
+ "git2 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "hamcrest 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_json 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tar 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "term 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "url 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "cargotest2"
version = "0.1.0"
+[[package]]
+name = "cfg-if"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "chrono"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "num 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)",
+ "time 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "clap"
-version = "2.22.1"
+version = "2.19.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"ansi_term 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "atty 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "bitflags 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "strsim 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "strsim 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)",
"term_size 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "unicode-segmentation 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "unicode-segmentation 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
"unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
- "vec_map 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "vec_map 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "yaml-rust 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "cmake"
-version = "0.1.22"
+version = "0.1.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "gcc 0.3.45 (registry+https://github.com/rust-lang/crates.io-index)",
+ "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
dependencies = [
"build_helper 0.1.0",
"core 0.0.0",
- "gcc 0.3.45 (registry+https://github.com/rust-lang/crates.io-index)",
+ "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
"env_logger 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
"filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-serialize 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "core"
version = "0.0.0"
+[[package]]
+name = "crates-io"
+version = "0.9.0"
+dependencies = [
+ "curl 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_derive 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_json 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "url 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "crossbeam"
+version = "0.2.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "curl"
+version = "0.4.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "curl-sys 0.3.11 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "openssl-probe 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "openssl-sys 0.9.12 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "curl-sys"
+version = "0.3.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libz-sys 1.0.13 (registry+https://github.com/rust-lang/crates.io-index)",
+ "openssl-sys 0.9.12 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "dbghelp-sys"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "derive-new"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "quote 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 0.8.7 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "diff"
version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
+[[package]]
+name = "docopt"
+version = "0.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "lazy_static 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "regex 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
+ "strsim 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "dtoa"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
+[[package]]
+name = "either"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "enum_primitive"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "num-traits 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "env_logger"
+version = "0.3.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "regex 0.1.80 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "env_logger"
version = "0.4.2"
"regex 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
+[[package]]
+name = "error-chain"
+version = "0.10.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "backtrace 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "error_index_generator"
version = "0.0.0"
version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
version = "0.0.0"
dependencies = [
"build_helper 0.1.0",
- "gcc 0.3.45 (registry+https://github.com/rust-lang/crates.io-index)",
+ "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "flate2"
+version = "0.2.19"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "miniz-sys 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "fmt_macros"
version = "0.0.0"
+[[package]]
+name = "foreign-types"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "fs2"
+version = "0.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "gcc"
-version = "0.3.45"
+version = "0.3.46"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "gdi32-sys"
+version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
[[package]]
name = "getopts"
version = "0.2.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
+[[package]]
+name = "git2"
+version = "0.6.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libgit2-sys 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "openssl-probe 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "openssl-sys 0.9.12 (registry+https://github.com/rust-lang/crates.io-index)",
+ "url 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "git2-curl"
+version = "0.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "curl 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "git2 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "url 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "glob"
+version = "0.2.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
[[package]]
name = "graphviz"
version = "0.0.0"
+[[package]]
+name = "hamcrest"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "num 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)",
+ "regex 0.1.80 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "handlebars"
-version = "0.25.2"
+version = "0.25.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "lazy_static 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "lazy_static 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
"pest 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "quick-error 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quick-error 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-serialize 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde_json 0.9.9 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_json 0.9.10 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "idna"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "matches 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
+ "unicode-bidi 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "unicode-normalization 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "installer"
+version = "0.0.0"
+dependencies = [
+ "clap 2.19.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "error-chain 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "flate2 0.2.19 (registry+https://github.com/rust-lang/crates.io-index)",
+ "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "lazy_static 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tar 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)",
+ "walkdir 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "xz2 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "itertools"
+version = "0.5.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "either 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
"winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
+[[package]]
+name = "languageserver-types"
+version = "0.10.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "enum_primitive 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_derive 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_json 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "url 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "url_serde 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "lazy_static"
-version = "0.2.5"
+version = "0.2.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
[[package]]
name = "libc"
-version = "0.2.21"
+version = "0.2.22"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "libgit2-sys"
+version = "0.6.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "cmake 0.1.23 (registry+https://github.com/rust-lang/crates.io-index)",
+ "curl-sys 0.3.11 (registry+https://github.com/rust-lang/crates.io-index)",
+ "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libssh2-sys 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libz-sys 1.0.13 (registry+https://github.com/rust-lang/crates.io-index)",
+ "openssl-sys 0.9.12 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "libssh2-sys"
+version = "0.2.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "cmake 0.1.23 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libz-sys 1.0.13 (registry+https://github.com/rust-lang/crates.io-index)",
+ "openssl-sys 0.9.12 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "libz-sys"
+version = "1.0.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
+]
[[package]]
name = "linkchecker"
version = "0.3.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
+[[package]]
+name = "lzma-sys"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "matches"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
[[package]]
name = "mdbook"
version = "0.0.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "clap 2.22.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "clap 2.19.3 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "handlebars 0.25.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "handlebars 0.25.3 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
"open 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"pulldown-cmark 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde 0.9.11 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde_json 0.9.9 (registry+https://github.com/rust-lang/crates.io-index)",
- "toml 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 0.9.15 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_json 0.9.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "toml 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "memchr"
+version = "0.1.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "miniz-sys"
+version = "0.1.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "miow"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "net2 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "multimap"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "net2"
+version = "0.2.29"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "cfg-if 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "num"
+version = "0.1.37"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "num-bigint 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)",
+ "num-complex 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)",
+ "num-integer 0.1.34 (registry+https://github.com/rust-lang/crates.io-index)",
+ "num-iter 0.1.33 (registry+https://github.com/rust-lang/crates.io-index)",
+ "num-rational 0.1.36 (registry+https://github.com/rust-lang/crates.io-index)",
+ "num-traits 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "num-bigint"
+version = "0.1.37"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "num-integer 0.1.34 (registry+https://github.com/rust-lang/crates.io-index)",
+ "num-traits 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rand 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "num-complex"
+version = "0.1.37"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "num-traits 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "num-integer"
+version = "0.1.34"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "num-traits 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "num-iter"
+version = "0.1.33"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "num-integer 0.1.34 (registry+https://github.com/rust-lang/crates.io-index)",
+ "num-traits 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "num-rational"
+version = "0.1.36"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "num-bigint 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)",
+ "num-integer 0.1.34 (registry+https://github.com/rust-lang/crates.io-index)",
+ "num-traits 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
+[[package]]
+name = "openssl"
+version = "0.9.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "bitflags 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "foreign-types 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "lazy_static 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "openssl-sys 0.9.12 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "openssl-probe"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "openssl-sys"
+version = "0.9.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)",
+ "gdi32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
+ "user32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "owning_ref"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "stable_deref_trait 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "panic_abort"
version = "0.0.0"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
+[[package]]
+name = "pkg-config"
+version = "0.3.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
[[package]]
name = "proc_macro"
version = "0.0.0"
"syntax_pos 0.0.0",
]
+[[package]]
+name = "psapi-sys"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "pulldown-cmark"
version = "0.0.8"
[[package]]
name = "quick-error"
-version = "1.1.0"
+version = "1.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "quote"
+version = "0.2.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "quote"
+version = "0.3.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "racer"
+version = "2.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "clap 2.19.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "env_logger 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syntex_errors 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syntex_syntax 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "toml 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "typed-arena 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
[[package]]
name = "rand"
"core 0.0.0",
]
+[[package]]
+name = "rand"
+version = "0.3.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "redox_syscall"
+version = "0.1.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "regex"
+version = "0.1.80"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "aho-corasick 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
+ "regex-syntax 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
+ "thread_local 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "utf8-ranges 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "regex"
version = "0.2.1"
"utf8-ranges 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
+[[package]]
+name = "regex-syntax"
+version = "0.3.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
[[package]]
name = "regex-syntax"
version = "0.4.0"
name = "remote-test-server"
version = "0.1.0"
+[[package]]
+name = "rls"
+version = "0.1.0"
+dependencies = [
+ "cargo 0.20.0 (git+https://github.com/rust-lang/cargo)",
+ "derive-new 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "env_logger 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "languageserver-types 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "racer 2.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rls-analysis 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rls-data 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rls-span 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rls-vfs 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustfmt 0.8.4 (git+https://github.com/rust-lang-nursery/rustfmt)",
+ "serde 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_derive 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_json 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "toml 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "url 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "url_serde 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "rls-analysis"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "derive-new 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rls-data 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rls-span 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "rls-data"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"rls-span 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-serialize 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "rustc-serialize 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_derive 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "rls-vfs"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "racer 2.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rls-span 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "rustbook"
version = "0.1.0"
dependencies = [
- "clap 2.22.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "clap 2.19.3 (registry+https://github.com/rust-lang/crates.io-index)",
"mdbook 0.0.21 (registry+https://github.com/rust-lang/crates.io-index)",
]
"fmt_macros 0.0.0",
"graphviz 0.0.0",
"log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "owning_ref 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc_back 0.0.0",
"rustc_bitflags 0.0.0",
"rustc_const_math 0.0.0",
"rustc_data_structures 0.0.0",
"rustc_errors 0.0.0",
- "rustc_llvm 0.0.0",
"serialize 0.0.0",
"syntax 0.0.0",
"syntax_pos 0.0.0",
]
+[[package]]
+name = "rustc-demangle"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
[[package]]
name = "rustc-main"
version = "0.0.0"
[[package]]
name = "rustc-serialize"
-version = "0.3.23"
+version = "0.3.24"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
dependencies = [
"alloc_system 0.0.0",
"build_helper 0.1.0",
- "cmake 0.1.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "cmake 0.1.23 (registry+https://github.com/rust-lang/crates.io-index)",
"core 0.0.0",
]
"rustc_errors 0.0.0",
"rustc_incremental 0.0.0",
"rustc_lint 0.0.0",
- "rustc_llvm 0.0.0",
"rustc_metadata 0.0.0",
"rustc_mir 0.0.0",
"rustc_passes 0.0.0",
version = "0.0.0"
dependencies = [
"build_helper 0.1.0",
- "gcc 0.3.45 (registry+https://github.com/rust-lang/crates.io-index)",
+ "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc_bitflags 0.0.0",
]
dependencies = [
"alloc_system 0.0.0",
"build_helper 0.1.0",
- "cmake 0.1.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "cmake 0.1.23 (registry+https://github.com/rust-lang/crates.io-index)",
"core 0.0.0",
]
dependencies = [
"flate 0.0.0",
"log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "owning_ref 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"proc_macro 0.0.0",
"rustc 0.0.0",
"rustc_back 0.0.0",
"rustc_const_math 0.0.0",
"rustc_data_structures 0.0.0",
"rustc_errors 0.0.0",
- "rustc_llvm 0.0.0",
"serialize 0.0.0",
"syntax 0.0.0",
"syntax_ext 0.0.0",
dependencies = [
"alloc_system 0.0.0",
"build_helper 0.1.0",
- "cmake 0.1.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "cmake 0.1.23 (registry+https://github.com/rust-lang/crates.io-index)",
"core 0.0.0",
]
"rls-data 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rls-span 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc 0.0.0",
- "rustc-serialize 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc_typeck 0.0.0",
"syntax 0.0.0",
"syntax_pos 0.0.0",
dependencies = [
"flate 0.0.0",
"log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "owning_ref 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc 0.0.0",
"rustc_back 0.0.0",
"rustc_bitflags 0.0.0",
dependencies = [
"alloc_system 0.0.0",
"build_helper 0.1.0",
- "cmake 0.1.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "cmake 0.1.23 (registry+https://github.com/rust-lang/crates.io-index)",
"core 0.0.0",
]
"arena 0.0.0",
"build_helper 0.1.0",
"env_logger 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "gcc 0.3.45 (registry+https://github.com/rust-lang/crates.io-index)",
+ "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
"pulldown-cmark 0.0.14 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc 0.0.0",
"syntax_pos 0.0.0",
]
+[[package]]
+name = "rustfmt"
+version = "0.8.4"
+source = "git+https://github.com/rust-lang-nursery/rustfmt#bf9b3fa1d7cab2f7bd541539d397a92b4954ec96"
+dependencies = [
+ "diff 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "env_logger 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "getopts 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)",
+ "itertools 0.5.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "multimap 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "regex 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_derive 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_json 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "strings 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syntex_errors 0.58.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syntex_syntax 0.58.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "term 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "toml 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "unicode-segmentation 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "walkdir 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "same-file"
+version = "0.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "semver"
+version = "0.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "semver-parser"
+version = "0.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "serde"
+version = "0.9.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
[[package]]
name = "serde"
-version = "0.9.11"
+version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
+[[package]]
+name = "serde_derive"
+version = "1.0.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_derive_internals 0.15.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 0.11.11 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "serde_derive_internals"
+version = "0.15.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "syn 0.11.11 (registry+https://github.com/rust-lang/crates.io-index)",
+ "synom 0.11.3 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "serde_ignored"
+version = "0.0.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "serde 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "serde_json"
+version = "0.9.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "dtoa 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "itoa 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "num-traits 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 0.9.15 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "serde_json"
-version = "0.9.9"
+version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"dtoa 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
"itoa 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"num-traits 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde 0.9.11 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "serialize"
version = "0.0.0"
+[[package]]
+name = "shell-escape"
+version = "0.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "stable_deref_trait"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
[[package]]
name = "std"
version = "0.0.0"
"collections 0.0.0",
"compiler_builtins 0.0.0",
"core 0.0.0",
- "gcc 0.3.45 (registry+https://github.com/rust-lang/crates.io-index)",
+ "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.0.0",
"panic_abort 0.0.0",
"panic_unwind 0.0.0",
"core 0.0.0",
]
+[[package]]
+name = "strings"
+version = "0.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "strsim"
+version = "0.5.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
[[package]]
name = "strsim"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
+[[package]]
+name = "syn"
+version = "0.8.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "quote 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "unicode-xid 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "syn"
+version = "0.11.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)",
+ "synom 0.11.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "synom"
+version = "0.11.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "syntax"
version = "0.0.0"
"serialize 0.0.0",
]
+[[package]]
+name = "syntex_errors"
+version = "0.52.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syntex_pos 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "term 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "unicode-xid 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "syntex_errors"
+version = "0.58.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syntex_pos 0.58.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "term 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "syntex_pos"
+version = "0.52.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "syntex_pos"
+version = "0.58.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "syntex_syntax"
+version = "0.52.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syntex_errors 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syntex_pos 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "term 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "unicode-xid 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "syntex_syntax"
+version = "0.58.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "bitflags 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syntex_errors 0.58.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syntex_pos 0.58.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "tar"
+version = "0.4.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "xattr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "tempdir"
+version = "0.3.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "rand 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "term"
version = "0.0.0"
+[[package]]
+name = "term"
+version = "0.4.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "term_size"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "thread-id"
-version = "3.0.0"
+version = "2.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "thread-id"
+version = "3.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "thread_local"
+version = "0.2.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "thread-id 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "thread-id 3.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "thread-id 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"unreachable 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
name = "tidy"
version = "0.1.0"
+[[package]]
+name = "time"
+version = "0.1.37"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "redox_syscall 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "toml"
version = "0.1.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "rustc-serialize 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "toml"
-version = "0.3.1"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "toml"
+version = "0.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "serde 0.9.15 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "toml"
+version = "0.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "serde 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "typed-arena"
+version = "1.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "unicode-bidi"
+version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "serde 0.9.11 (registry+https://github.com/rust-lang/crates.io-index)",
+ "matches 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
]
+[[package]]
+name = "unicode-normalization"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
[[package]]
name = "unicode-segmentation"
-version = "1.1.0"
+version = "0.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "unicode-segmentation"
+version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
+[[package]]
+name = "unicode-xid"
+version = "0.0.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "unicode-xid"
+version = "0.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
[[package]]
name = "unreachable"
version = "0.1.1"
"void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
+[[package]]
+name = "url"
+version = "1.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "idna 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "matches 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "url_serde"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "serde 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "url 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "user32-sys"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "utf8-ranges"
+version = "0.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
[[package]]
name = "utf8-ranges"
version = "1.0.0"
[[package]]
name = "vec_map"
-version = "0.7.0"
+version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
+[[package]]
+name = "walkdir"
+version = "1.0.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "same-file 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "winapi"
version = "0.2.8"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
+[[package]]
+name = "ws2_32-sys"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "xattr"
+version = "0.1.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "xz2"
+version = "0.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "lzma-sys 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "yaml-rust"
+version = "0.3.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
[metadata]
+"checksum advapi32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e06588080cb19d0acb6739808aafa5f26bfb2ca015b2b6370028b44cf7cb8a9a"
+"checksum aho-corasick 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ca972c2ea5f742bfce5687b9aef75506a764f61d37f8f649047846a9686ddb66"
"checksum aho-corasick 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)" = "500909c4f87a9e52355b26626d890833e9e1d53ac566db76c36faa984b889699"
"checksum ansi_term 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "23ac7c30002a5accbf7e8987d0632fa6de155b7c3d39d0067317a391e00a2ef6"
-"checksum atty 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "d912da0db7fa85514874458ca3651fe2cddace8d0b0505571dbdcd41ab490159"
+"checksum backtrace 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f551bc2ddd53aea015d453ef0b635af89444afa5ed2405dd0b2062ad5d600d80"
+"checksum backtrace-sys 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "d192fd129132fbc97497c1f2ec2c2c5174e376b95f535199ef4fe0a293d33842"
"checksum bitflags 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4f67931368edf3a9a51d29886d245f1c3db2f1ef0dcc9e35ff70341b78c10d23"
+"checksum bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "aad18937a628ec6abcd26d1489012cc0e18c21798210f491af69ded9b881106d"
"checksum bitflags 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1370e9fc2a6ae53aea8b7a5110edbd08836ed87c88736dfabccade1c2b44bff4"
-"checksum clap 2.22.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e17a4a72ffea176f77d6e2db609c6c919ef221f23862c9915e687fb54d833485"
-"checksum cmake 0.1.22 (registry+https://github.com/rust-lang/crates.io-index)" = "d18d68987ed4c516dcc3e7913659bfa4076f5182eea4a7e0038bb060953e76ac"
+"checksum bufstream 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "f2f382711e76b9de6c744cc00d0497baba02fb00a787f088c879f01d09468e32"
+"checksum cargo 0.20.0 (git+https://github.com/rust-lang/cargo)" = "<none>"
+"checksum cfg-if 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "de1e760d7b6535af4241fca8bd8adf68e2e7edacc6b29f5d399050c5e48cf88c"
+"checksum chrono 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d9123be86fd2a8f627836c235ecdf331fdd067ecf7ac05aa1a68fbcf2429f056"
+"checksum clap 2.19.3 (registry+https://github.com/rust-lang/crates.io-index)" = "95b78f3fe0fc94c13c731714363260e04b557a637166f33a4570d3189d642374"
+"checksum cmake 0.1.23 (registry+https://github.com/rust-lang/crates.io-index)" = "92278eb79412c8f75cfc89e707a1bb3a6490b68f7f2e78d15c774f30fe701122"
+"checksum crossbeam 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)" = "0c5ea215664ca264da8a9d9c3be80d2eaf30923c259d03e870388eb927508f97"
+"checksum curl 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "c90e1240ef340dd4027ade439e5c7c2064dd9dc652682117bd50d1486a3add7b"
+"checksum curl-sys 0.3.11 (registry+https://github.com/rust-lang/crates.io-index)" = "23e7e544dc5e1ba42c4a4a678bd47985e84b9c3f4d3404c29700622a029db9c3"
+"checksum dbghelp-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "97590ba53bcb8ac28279161ca943a924d1fd4a8fb3fa63302591647c4fc5b850"
+"checksum derive-new 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "41be6ca3b99e0c0483fb2389685448f650459c3ecbe4e18d7705d8010ec4ab8e"
"checksum diff 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "0a515461b6c8c08419850ced27bc29e86166dcdcde8fbe76f8b1f0589bb49472"
+"checksum docopt 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ab32ea6e284d87987066f21a9e809a73c14720571ef34516f0890b3d355ccfd8"
"checksum dtoa 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "80c8b71fd71146990a9742fc06dcbbde19161a267e0ad4e572c35162f4578c90"
+"checksum either 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "18785c1ba806c258137c937e44ada9ee7e69a37e3c72077542cd2f069d78562a"
+"checksum enum_primitive 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "be4551092f4d519593039259a9ed8daedf0da12e5109c5280338073eaeb81180"
+"checksum env_logger 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "15abd780e45b3ea4f76b4e9a26ff4843258dd8a3eed2775a0e7368c2e7936c2f"
"checksum env_logger 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e3856f1697098606fc6cb97a93de88ca3f3bc35bb878c725920e6e82ecf05e83"
+"checksum error-chain 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d9435d864e017c3c6afeac1654189b06cdb491cf2ff73dbf0d73b0f292f42ff8"
"checksum filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "5363ab8e4139b8568a6237db5248646e5a8a2f89bd5ccb02092182b11fd3e922"
-"checksum gcc 0.3.45 (registry+https://github.com/rust-lang/crates.io-index)" = "40899336fb50db0c78710f53e87afc54d8c7266fb76262fecc78ca1a7f09deae"
+"checksum flate2 0.2.19 (registry+https://github.com/rust-lang/crates.io-index)" = "36df0166e856739905cd3d7e0b210fe818592211a008862599845e012d8d304c"
+"checksum foreign-types 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3e4056b9bd47f8ac5ba12be771f77a0dae796d1bbaaf5fd0b9c2d38b69b8a29d"
+"checksum fs2 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "34edaee07555859dc13ca387e6ae05686bb4d0364c95d649b6dab959511f4baf"
+"checksum gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)" = "181e3cebba1d663bd92eb90e2da787e10597e027eb00de8d742b260a7850948f"
+"checksum gdi32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0912515a8ff24ba900422ecda800b52f4016a56251922d397c576bf92c690518"
"checksum getopts 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)" = "d9047cfbd08a437050b363d35ef160452c5fe8ea5187ae0a624708c91581d685"
-"checksum handlebars 0.25.2 (registry+https://github.com/rust-lang/crates.io-index)" = "663e1728d8037fb0d4e13bcd1b1909fb5d913690a9929eb385922df157c2ff8f"
+"checksum git2 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)" = "9de9df4358c17e448a778d90cd0272e1dab5eae30244502333fa2001c4e24357"
+"checksum git2-curl 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "68676bc784bf0bef83278898929bf64a251e87c0340723d0b93fa096c9c5bf8e"
+"checksum glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "8be18de09a56b60ed0edf84bc9df007e30040691af7acd1c41874faac5895bfb"
+"checksum hamcrest 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "bf088f042a467089e9baa4972f57f9247e42a0cc549ba264c7a04fbb8ecb89d4"
+"checksum handlebars 0.25.3 (registry+https://github.com/rust-lang/crates.io-index)" = "15bdf598fc3c2de40c6b340213028301c0d225eea55a2294e6cc148074e557a1"
+"checksum idna 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "6ac85ec3f80c8e4e99d9325521337e14ec7555c458a14e377d189659a427f375"
+"checksum itertools 0.5.10 (registry+https://github.com/rust-lang/crates.io-index)" = "4833d6978da405305126af4ac88569b5d71ff758581ce5a987dbfa3755f694fc"
"checksum itoa 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "eb2f404fbc66fd9aac13e998248505e7ecb2ad8e44ab6388684c5fb11c6c251c"
"checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
-"checksum lazy_static 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "4732c563b9a21a406565c4747daa7b46742f082911ae4753f390dc9ec7ee1a97"
-"checksum libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)" = "88ee81885f9f04bff991e306fea7c1c60a5f0f9e409e99f6b40e3311a3363135"
+"checksum languageserver-types 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "97c2985bfcbbcb0189cfa25e1c10c1ac7111df2b6214b652c690127aefdf4e5b"
+"checksum lazy_static 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "3b37545ab726dd833ec6420aaba8231c5b320814b9029ad585555d2a03e94fbf"
+"checksum libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)" = "babb8281da88cba992fa1f4ddec7d63ed96280a1a53ec9b919fd37b53d71e502"
+"checksum libgit2-sys 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)" = "dd89dd7196d5fa35b659c3eaf3c1b14b9bd961bfd1a07dfca49adeb8a6aa3763"
+"checksum libssh2-sys 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "0db4ec23611747ef772db1c4d650f8bd762f07b461727ec998f953c614024b75"
+"checksum libz-sys 1.0.13 (registry+https://github.com/rust-lang/crates.io-index)" = "e5ee912a45d686d393d5ac87fac15ba0ba18daae14e8e7543c63ebf7fb7e970c"
"checksum log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)" = "5141eca02775a762cc6cd564d8d2c50f67c0ea3a372cbf1c51592b3e029e10ad"
+"checksum lzma-sys 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "fedff6a5cbb24494ec6ee4784e9ac5c187161fede04c7767d49bf87544013afa"
+"checksum matches 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "efd7622e3022e1a6eaa602c4cea8912254e5582c9c692e9167714182244801b1"
"checksum mdbook 0.0.21 (registry+https://github.com/rust-lang/crates.io-index)" = "f1e2e9d848514dcfad4195788d0d42ae5153a477c191d75d5b84fab10f222fbd"
+"checksum memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "d8b629fb514376c675b98c1421e80b151d3817ac42d7c667717d282761418d20"
"checksum memchr 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1dbccc0e46f1ea47b9f17e6d67c5a96bd27030519c519c9c91327e31275a47b4"
+"checksum miniz-sys 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "28eaee17666671fa872e567547e8428e83308ebe5808cdf6a0e28397dbe2c726"
+"checksum miow 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "8c1f2f3b1cf331de6896aabf6e9d55dca90356cc9960cca7eaaf408a355ae919"
+"checksum multimap 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9223f4774d08e06185e44e555b9a7561243d387bac49c78a6205c42d6975fbf2"
+"checksum net2 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)" = "bc01404e7568680f1259aa5729539f221cb1e6d047a0d9053cab4be8a73b5d67"
+"checksum num 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)" = "98b15ba84e910ea7a1973bccd3df7b31ae282bf9d8bd2897779950c9b8303d40"
+"checksum num-bigint 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)" = "ba6d838b16e56da1b6c383d065ff1ec3c7d7797f65a3e8f6ba7092fd87820bac"
+"checksum num-complex 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)" = "148eb324ca772230853418731ffdf13531738b50f89b30692a01fcdcb0a64677"
+"checksum num-integer 0.1.34 (registry+https://github.com/rust-lang/crates.io-index)" = "ef1a4bf6f9174aa5783a9b4cc892cacd11aebad6c69ad027a0b65c6ca5f8aa37"
+"checksum num-iter 0.1.33 (registry+https://github.com/rust-lang/crates.io-index)" = "f7d1891bd7b936f12349b7d1403761c8a0b85a18b148e9da4429d5d102c1a41e"
+"checksum num-rational 0.1.36 (registry+https://github.com/rust-lang/crates.io-index)" = "c2dc5ea04020a8f18318ae485c751f8cfa1c0e69dcf465c29ddaaa64a313cc44"
"checksum num-traits 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)" = "e1cbfa3781f3fe73dc05321bed52a06d2d491eaa764c52335cf4399f046ece99"
"checksum num_cpus 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ca313f1862c7ec3e0dfe8ace9fa91b1d9cb5c84ace3d00f5ec4216238e93c167"
"checksum open 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3478ed1686bd1300c8a981a940abc92b06fac9cbef747f4c668d4e032ff7b842"
+"checksum openssl 0.9.12 (registry+https://github.com/rust-lang/crates.io-index)" = "bb5d1663b73d10c6a3eda53e2e9d0346f822394e7b858d7257718f65f61dfbe2"
+"checksum openssl-probe 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d98df0270d404ccd3c050a41d579c52d1db15375168bb3471e04ec0f5f378daf"
+"checksum openssl-sys 0.9.12 (registry+https://github.com/rust-lang/crates.io-index)" = "3a5886d87d3e2a0d890bf62dc8944f5e3769a405f7e1e9ef6e517e47fd7a0897"
+"checksum owning_ref 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "cdf84f41639e037b484f93433aa3897863b561ed65c6e59c7073d7c561710f37"
"checksum pest 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "0a6dda33d67c26f0aac90d324ab2eb7239c819fc7b2552fe9faa4fe88441edc8"
+"checksum pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "3a8b4c6b8165cd1a1cd4b9b120978131389f64bdaf456435caa41e630edba903"
+"checksum psapi-sys 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "abcd5d1a07d360e29727f757a9decb3ce8bc6e0efa8969cfaad669a8317a2478"
"checksum pulldown-cmark 0.0.14 (registry+https://github.com/rust-lang/crates.io-index)" = "d9ab1e588ef8efd702c7ed9d2bd774db5e6f4d878bb5a1a9f371828fbdff6973"
"checksum pulldown-cmark 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)" = "1058d7bb927ca067656537eec4e02c2b4b70eaaa129664c5b90c111e20326f41"
-"checksum quick-error 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0aad603e8d7fb67da22dbdf1f4b826ce8829e406124109e73cf1b2454b93a71c"
+"checksum quick-error 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3c36987d4978eb1be2e422b1e0423a557923a5c3e7e6f31d5699e9aafaefa469"
+"checksum quote 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "4c5cf478fe1006dbcc72567121d23dbdae5f1632386068c5c86ff4f645628504"
+"checksum quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6e920b65c65f10b2ae65c831a81a073a89edd28c7cce89475bff467ab4167a"
+"checksum racer 2.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "b0d72b3afd67882adfca61d609fafb8d7aa5f9e814f12c32fcc6e171995920e8"
+"checksum rand 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)" = "022e0636ec2519ddae48154b028864bdce4eaf7d35226ab8e65c611be97b189d"
+"checksum redox_syscall 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)" = "29dbdfd4b9df8ab31dec47c6087b7b13cbf4a776f335e4de8efba8288dda075b"
+"checksum regex 0.1.80 (registry+https://github.com/rust-lang/crates.io-index)" = "4fd4ace6a8cf7860714a2c2280d6c1f7e6a413486c13298bbc86fd3da019402f"
"checksum regex 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4278c17d0f6d62dfef0ab00028feb45bd7d2102843f80763474eeb1be8a10c01"
+"checksum regex-syntax 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "f9ec002c35e86791825ed294b50008eea9ddfc8def4420124fbc6b08db834957"
"checksum regex-syntax 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2f9191b1f57603095f105d317e375d19b1c9c5c3185ea9633a99a6dcbed04457"
+"checksum rls-analysis 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a62d88c341375c6f3f8b2e18b9b364896e7d3e7aa916907de717d0267e116506"
"checksum rls-data 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "fc4277ce3c57f456b11fe3145b181a844a25201bab5cbaa1978457e6e2f27d47"
"checksum rls-span 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5d7c7046dc6a92f2ae02ed302746db4382e75131b9ce20ce967259f6b5867a6a"
-"checksum rustc-serialize 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)" = "684ce48436d6465300c9ea783b6b14c4361d6b8dcbb1375b486a69cc19e2dfb0"
-"checksum serde 0.9.11 (registry+https://github.com/rust-lang/crates.io-index)" = "a702319c807c016e51f672e5c77d6f0b46afddd744b5e437d6b8436b888b458f"
-"checksum serde_json 0.9.9 (registry+https://github.com/rust-lang/crates.io-index)" = "dbc45439552eb8fb86907a2c41c1fd0ef97458efb87ff7f878db466eb581824e"
+"checksum rls-vfs 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "986eada111517bcb5a7a75205b3f2b70c82e7766653cca61a23f5afce79bdb94"
+"checksum rustc-demangle 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "3058a43ada2c2d0b92b3ae38007a2d0fa5e9db971be260e0171408a4ff471c95"
+"checksum rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)" = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda"
+"checksum rustfmt 0.8.4 (git+https://github.com/rust-lang-nursery/rustfmt)" = "<none>"
+"checksum same-file 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "d931a44fdaa43b8637009e7632a02adc4f2b2e0733c08caa4cf00e8da4a117a7"
+"checksum semver 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7a3186ec9e65071a2095434b1f5bb24838d4e8e130f584c790f6033c79943537"
+"checksum semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3"
+"checksum serde 0.9.15 (registry+https://github.com/rust-lang/crates.io-index)" = "34b623917345a631dc9608d5194cc206b3fe6c3554cd1c75b937e55e285254af"
+"checksum serde 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "38a3db3a5757f68069aba764b793823ea9fb9717c42c016f8903f8add50f508a"
+"checksum serde_derive 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "e46ef71ee001a4279a4513e79a6ebbb59da3a4987bf77a6df2e5534cd6f21d82"
+"checksum serde_derive_internals 0.15.0 (registry+https://github.com/rust-lang/crates.io-index)" = "021c338d22c7e30f957a6ab7e388cb6098499dda9fd4ba1661ee074ca7a180d1"
+"checksum serde_ignored 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "c10e798e4405d7dcec3658989e35ee6706f730a9ed7c1184d5ebd84317e82f46"
+"checksum serde_json 0.9.10 (registry+https://github.com/rust-lang/crates.io-index)" = "ad8bcf487be7d2e15d3d543f04312de991d631cfe1b43ea0ade69e6a8a5b16a1"
+"checksum serde_json 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "48b04779552e92037212c3615370f6bd57a40ebba7f20e554ff9f55e41a69a7b"
+"checksum shell-escape 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "dd5cc96481d54583947bfe88bf30c23d53f883c6cd0145368b69989d97b84ef8"
+"checksum stable_deref_trait 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "15132e0e364248108c5e2c02e3ab539be8d6f5d52a01ca9bbf27ed657316f02b"
+"checksum strings 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "54f86446ab480b4f60782188f4f78886465c5793aee248cbb48b7fdc0d022420"
+"checksum strsim 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "67f84c44fbb2f91db7fef94554e6b2ac05909c9c0b0bc23bb98d3a1aebfe7f7c"
"checksum strsim 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b4d15c810519a91cf877e7e36e63fe068815c678181439f2f29e2562147c3694"
+"checksum syn 0.11.11 (registry+https://github.com/rust-lang/crates.io-index)" = "d3b891b9015c88c576343b9b3e41c2c11a51c219ef067b264bd9c8aa9b441dad"
+"checksum syn 0.8.7 (registry+https://github.com/rust-lang/crates.io-index)" = "6ae6fb0dcc9bd85f89a1a4adc0df2fd90c90c98849d61433983dd7a9df6363f7"
+"checksum synom 0.11.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a393066ed9010ebaed60b9eafa373d4b1baac186dd7e008555b0f702b51945b6"
+"checksum syntex_errors 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9e52bffe6202cfb67587784cf23e0ec5bf26d331eef4922a16d5c42e12aa1e9b"
+"checksum syntex_errors 0.58.1 (registry+https://github.com/rust-lang/crates.io-index)" = "867cc5c2d7140ae7eaad2ae9e8bf39cb18a67ca651b7834f88d46ca98faadb9c"
+"checksum syntex_pos 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)" = "955ef4b16af4c468e4680d1497f873ff288f557d338180649e18f915af5e15ac"
+"checksum syntex_pos 0.58.1 (registry+https://github.com/rust-lang/crates.io-index)" = "13ad4762fe52abc9f4008e85c4fb1b1fe3aa91ccb99ff4826a439c7c598e1047"
+"checksum syntex_syntax 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)" = "76a302e717e348aa372ff577791c3832395650073b8d8432f8b3cb170b34afde"
+"checksum syntex_syntax 0.58.1 (registry+https://github.com/rust-lang/crates.io-index)" = "6e0e4dbae163dd98989464c23dd503161b338790640e11537686f2ef0f25c791"
+"checksum tar 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)" = "ab0ef9ead2fe0aa9e18475a96a207bfd5143f4124779ef7429503a8665416ce8"
+"checksum tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "87974a6f5c1dfb344d733055601650059a3363de2a6104819293baff662132d6"
+"checksum term 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)" = "d168af3930b369cfe245132550579d47dfd873d69470755a19c2c6568dbbd989"
"checksum term_size 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "07b6c1ac5b3fffd75073276bca1ceed01f67a28537097a2a9539e116e50fb21a"
-"checksum thread-id 3.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4437c97558c70d129e40629a5b385b3fb1ffac301e63941335e4d354081ec14a"
+"checksum thread-id 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a9539db560102d1cef46b8b78ce737ff0bb64e7e18d35b2a5688f7d097d0ff03"
+"checksum thread-id 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8df7875b676fddfadffd96deea3b1124e5ede707d4884248931077518cf1f773"
+"checksum thread_local 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)" = "8576dbbfcaef9641452d5cf0df9b0e7eeab7694956dd33bb61515fb8f18cfdd5"
"checksum thread_local 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "c85048c6260d17cf486ceae3282d9fb6b90be220bf5b28c400f5485ffc29f0c7"
+"checksum time 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)" = "ffd7ccbf969a892bf83f1e441126968a07a3941c24ff522a26af9f9f4585d1a3"
"checksum toml 0.1.30 (registry+https://github.com/rust-lang/crates.io-index)" = "0590d72182e50e879c4da3b11c6488dae18fccb1ae0c7a3eda18e16795844796"
-"checksum toml 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3474f3c6eaf32eedb4f4a66a26214f020f828a6d96c37e38a35e3a379bbcfd11"
-"checksum unicode-segmentation 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "18127285758f0e2c6cf325bb3f3d138a12fee27de4f23e146cd6a179f26c2cf3"
+"checksum toml 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "736b60249cb25337bc196faa43ee12c705e426f3d55c214d73a4e7be06f92cb4"
+"checksum toml 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "bd86ad9ebee246fdedd610e0f6d0587b754a3d81438db930a244d0480ed7878f"
+"checksum toml 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4cc5dbfb20a481e64b99eb7ae280859ec76730c7191570ba5edaa962394edb0a"
+"checksum typed-arena 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8e2f9dc90da4f9d66ffc9ad3ead2c7d57582a26f4a3292d2ce7011bd29965100"
+"checksum unicode-bidi 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "d3a078ebdd62c0e71a709c3d53d2af693fe09fe93fbff8344aebe289b78f9032"
+"checksum unicode-normalization 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "e28fa37426fceeb5cf8f41ee273faa7c82c47dc8fba5853402841e665fcd86ff"
+"checksum unicode-segmentation 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "c3bc443ded17b11305ffffe6b37e2076f328a5a8cb6aa877b1b98f77699e98b5"
+"checksum unicode-segmentation 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a8083c594e02b8ae1654ae26f0ade5158b119bd88ad0e8227a5d8fcd72407946"
"checksum unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "bf3a113775714a22dcb774d8ea3655c53a32debae63a063acc00a91cc586245f"
+"checksum unicode-xid 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "36dff09cafb4ec7c8cf0023eb0b686cb6ce65499116a12201c9e11840ca01beb"
+"checksum unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "8c1f860d7d29cf02cb2f3f359fd35991af3d30bac52c57d265a3c461074cb4dc"
"checksum unreachable 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1f2ae5ddb18e1c92664717616dd9549dde73f539f01bd7b77c2edb2446bdff91"
+"checksum url 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f5ba8a749fb4479b043733416c244fa9d1d3af3d7c23804944651c8a448cb87e"
+"checksum url_serde 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "74e7d099f1ee52f823d4bdd60c93c3602043c728f5db3b97bdb548467f7bddea"
+"checksum user32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4ef4711d107b21b410a3a974b1204d9accc8b10dad75d8324b5d755de1617d47"
+"checksum utf8-ranges 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a1ca13c08c41c9c3e04224ed9ff80461d97e121589ff27c753a16cb10830ae0f"
"checksum utf8-ranges 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "662fab6525a98beff2921d7f61a39e7d59e0b425ebc7d0d9e66d316e55124122"
-"checksum vec_map 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f8cdc8b93bd0198ed872357fb2e667f7125646b1762f16d60b2c96350d361897"
+"checksum vec_map 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "cac5efe5cb0fa14ec2f84f83c701c562ee63f6dcc680861b21d65c682adfb05f"
"checksum void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d"
+"checksum walkdir 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)" = "bb08f9e670fab86099470b97cd2b252d6527f0b3cc1401acdb595ffc9dd288ff"
"checksum winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a"
"checksum winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc"
+"checksum ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d59cefebd0c892fa2dd6de581e937301d8552cb44489cdff035c6187cb63fa5e"
+"checksum xattr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "5f04de8a1346489a2f9e9bd8526b73d135ec554227b17568456e86aa35b6f3fc"
+"checksum xz2 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "e9510bdf100731599107c61f77daf46713a69a568f75458999c1f9dbf6ba25b0"
+"checksum yaml-rust 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "e66366e18dc58b46801afbf2ca7661a9f59cc8c5962c29892b6039b4f86fa992"
"tools/build-manifest",
"tools/remote-test-client",
"tools/remote-test-server",
-]
-
-# These projects have their own Cargo.lock
-exclude = [
+ "tools/rust-installer",
"tools/cargo",
"tools/rls",
]
[profile.test]
debug = false
debug-assertions = false
+
+[replace]
+"https://github.com/rust-lang/cargo#0.20.0" = { path = "tools/cargo" }
[dependencies]
build_helper = { path = "../build_helper" }
-cmake = "0.1.17"
+cmake = "0.1.23"
filetime = "0.1"
num_cpus = "1.0"
toml = "0.1"
getopts = "0.2"
rustc-serialize = "0.3"
-gcc = "0.3.38"
+gcc = "0.3.46"
libc = "0.2"
use std::process::{Command, ExitStatus};
fn main() {
- let args = env::args_os().skip(1).collect::<Vec<_>>();
+ let mut args = env::args_os().skip(1).collect::<Vec<_>>();
+
+ // Append metadata suffix for internal crates. See the corresponding entry
+ // in bootstrap/lib.rs for details.
+ if let Ok(s) = env::var("RUSTC_METADATA_SUFFIX") {
+ for i in 1..args.len() {
+ // Dirty code for borrowing issues
+ let mut new = None;
+ if let Some(current_as_str) = args[i].to_str() {
+ if (&*args[i - 1] == "-C" && current_as_str.starts_with("metadata")) ||
+ current_as_str.starts_with("-Cmetadata") {
+ new = Some(format!("{}-{}", current_as_str, s));
+ }
+ }
+ if let Some(new) = new { args[i] = new.into(); }
+ }
+ }
+
// Detect whether or not we're a build script depending on whether --target
// is passed (a bit janky...)
let target = args.windows(2)
// do that we pass a weird flag to the compiler to get it to do
// so. Note that this is definitely a hack, and we should likely
// flesh out rpath support more fully in the future.
+ //
+ // FIXME: remove condition after next stage0
if stage != "0" {
cmd.arg("-Z").arg("osx-rpath-install-name");
}
cmd.arg("-Z").arg("unstable-options");
cmd.arg("-C").arg("target-feature=+crt-static");
}
+
+ // Force all crates compiled by this compiler to (a) be unstable and (b)
+ // allow the `rustc_private` feature to link to other unstable crates
+ // also in the sysroot.
+ //
+ // FIXME: remove condition after next stage0
+ if env::var_os("RUSTC_FORCE_UNSTABLE").is_some() {
+ if stage != "0" {
+ cmd.arg("-Z").arg("force-unstable-if-unmarked");
+ }
+ }
}
if verbose > 1 {
import datetime
import hashlib
import os
+import re
import shutil
import subprocess
import sys
shutil.move(tp, fp)
shutil.rmtree(os.path.join(dst, fname))
-def run(args, verbose=False, exception=False):
+def run(args, verbose=False, exception=False, cwd=None):
if verbose:
print("running: " + ' '.join(args))
sys.stdout.flush()
# Use Popen here instead of call() as it apparently allows powershell on
# Windows to not lock up waiting for input presumably.
- ret = subprocess.Popen(args)
+ ret = subprocess.Popen(args, cwd=cwd)
code = ret.wait()
if code != 0:
err = "failed to run: " + ' '.join(args)
def get_toml(self, key):
for line in self.config_toml.splitlines():
- if line.startswith(key + ' ='):
- return self.get_string(line)
+ match = re.match(r'^{}\s*=(.*)$'.format(key), line)
+ if match is not None:
+ value = match.group(1)
+ return self.get_string(value) or value.strip()
return None
def get_mk(self, key):
def get_string(self, line):
start = line.find('"')
+ if start == -1:
+ return None
end = start + 1 + line[start + 1:].find('"')
return line[start + 1:end]
args.append("--frozen")
self.run(args, env)
- def run(self, args, env):
- proc = subprocess.Popen(args, env=env)
+ def run(self, args, env=None, cwd=None):
+ proc = subprocess.Popen(args, env=env, cwd=cwd)
ret = proc.wait()
if ret != 0:
sys.exit(ret)
+ def output(self, args, env=None, cwd=None):
+ proc = subprocess.Popen(args, stdout=subprocess.PIPE, env=env, cwd=cwd)
+ (out, err) = proc.communicate()
+ ret = proc.wait()
+ if ret != 0:
+ print(out)
+ sys.exit(ret)
+ return out
+
def build_triple(self):
default_encoding = sys.getdefaultencoding()
config = self.get_toml('build')
return "{}-{}".format(cputype, ostype)
+ def update_submodules(self):
+ if (not os.path.exists(os.path.join(self.rust_root, ".git"))) or \
+ self.get_toml('submodules') == "false" or \
+ self.get_mk('CFG_DISABLE_MANAGE_SUBMODULES') == "1":
+ return
+
+ print('Updating submodules')
+ output = self.output(["git", "submodule", "status"], cwd=self.rust_root)
+ submodules = []
+ for line in output.splitlines():
+ # NOTE `git submodule status` output looks like this:
+ #
+ # -5066b7dcab7e700844b0e2ba71b8af9dc627a59b src/liblibc
+ # +b37ef24aa82d2be3a3cc0fe89bf82292f4ca181c src/compiler-rt (remotes/origin/..)
+ # e058ca661692a8d01f8cf9d35939dfe3105ce968 src/jemalloc (3.6.0-533-ge058ca6)
+ #
+ # The first character can be '-', '+' or ' ' and denotes the
+ # `State` of the submodule Right next to this character is the
+ # SHA-1 of the submodule HEAD And after that comes the path to the
+ # submodule
+ path = line[1:].split(' ')[1]
+ submodules.append([path, line[0]])
+
+ self.run(["git", "submodule", "sync"], cwd=self.rust_root)
+
+ for submod in submodules:
+ path, status = submod
+ if path.endswith(b"llvm") and \
+ (self.get_toml('llvm-config') or self.get_mk('CFG_LLVM_ROOT')):
+ continue
+ if path.endswith(b"jemalloc") and \
+ (self.get_toml('jemalloc') or self.get_mk('CFG_JEMALLOC_ROOT')):
+ continue
+ submod_path = os.path.join(self.rust_root, path)
+
+ if status == ' ':
+ self.run(["git", "reset", "--hard"], cwd=submod_path)
+ self.run(["git", "clean", "-fdx"], cwd=submod_path)
+ elif status == '+':
+ self.run(["git", "submodule", "update", path], cwd=self.rust_root)
+ self.run(["git", "reset", "--hard"], cwd=submod_path)
+ self.run(["git", "clean", "-fdx"], cwd=submod_path)
+ elif status == '-':
+ self.run(["git", "submodule", "init", path], cwd=self.rust_root)
+ self.run(["git", "submodule", "update", path], cwd=self.rust_root)
+ else:
+ raise ValueError('unknown submodule status: ' + status)
+
def bootstrap():
parser = argparse.ArgumentParser(description='Build rust')
parser.add_argument('--config')
else:
rb._download_url = 'https://static.rust-lang.org'
+ rb.update_submodules()
+
# Fetch/build the bootstrap
rb.build = rb.build_triple()
rb.download_stage0()
// Fallback musl-root for all targets
pub musl_root: Option<PathBuf>,
pub prefix: Option<PathBuf>,
+ pub sysconfdir: Option<PathBuf>,
pub docdir: Option<PathBuf>,
+ pub bindir: Option<PathBuf>,
pub libdir: Option<PathBuf>,
pub libdir_relative: Option<PathBuf>,
pub mandir: Option<PathBuf>,
#[derive(RustcDecodable, Default, Clone)]
struct Install {
prefix: Option<String>,
- mandir: Option<String>,
+ sysconfdir: Option<String>,
docdir: Option<String>,
+ bindir: Option<String>,
libdir: Option<String>,
+ mandir: Option<String>,
}
/// TOML representation of how the LLVM build is configured.
let table = match p.parse() {
Some(table) => table,
None => {
- println!("failed to parse TOML configuration:");
+ println!("failed to parse TOML configuration '{}':", file.to_str().unwrap());
for err in p.errors.iter() {
let (loline, locol) = p.to_linecol(err.lo);
let (hiline, hicol) = p.to_linecol(err.hi);
if let Some(ref install) = toml.install {
config.prefix = install.prefix.clone().map(PathBuf::from);
- config.mandir = install.mandir.clone().map(PathBuf::from);
+ config.sysconfdir = install.sysconfdir.clone().map(PathBuf::from);
config.docdir = install.docdir.clone().map(PathBuf::from);
+ config.bindir = install.bindir.clone().map(PathBuf::from);
config.libdir = install.libdir.clone().map(PathBuf::from);
+ config.mandir = install.mandir.clone().map(PathBuf::from);
}
if let Some(ref llvm) = toml.llvm {
"CFG_PREFIX" => {
self.prefix = Some(PathBuf::from(value));
}
+ "CFG_SYSCONFDIR" => {
+ self.sysconfdir = Some(PathBuf::from(value));
+ }
"CFG_DOCDIR" => {
self.docdir = Some(PathBuf::from(value));
}
+ "CFG_BINDIR" => {
+ self.bindir = Some(PathBuf::from(value));
+ }
"CFG_LIBDIR" => {
self.libdir = Some(PathBuf::from(value));
}
# Instead of installing to /usr/local, install to this path instead.
#prefix = "/usr/local"
+# Where to install system configuration files
+# If this is a relative path, it will get installed in `prefix` above
+#sysconfdir = "/etc"
+
+# Where to install documentation in `prefix` above
+#docdir = "share/doc/rust"
+
+# Where to install binaries in `prefix` above
+#bindir = "bin"
+
# Where to install libraries in `prefix` above
#libdir = "lib"
# Where to install man pages in `prefix` above
#mandir = "share/man"
-# Where to install documentation in `prefix` above
-#docdir = "share/doc/rust"
-
# =============================================================================
# Options for compiling Rust code itself
# =============================================================================
[rust]
# Whether or not to optimize the compiler and standard library
+# Note: the slowness of the non optimized compiler compiling itself usually
+# outweighs the time gains in not doing optimizations, therefore a
+# full bootstrap takes much more time with optimize set to false.
#optimize = true
# Number of codegen units to use for each compiler invocation. A value of 0
use build_helper::output;
-#[cfg(not(target_os = "solaris"))]
-const SH_CMD: &'static str = "sh";
-// On Solaris, sh is the historical bourne shell, not a POSIX shell, or bash.
-#[cfg(target_os = "solaris")]
-const SH_CMD: &'static str = "bash";
-
use {Build, Compiler, Mode};
use channel;
use util::{cp_r, libdir, is_dylib, cp_filtered, copy, exe};
build.out.join("tmp/dist")
}
+fn rust_installer(build: &Build) -> Command {
+ build.tool_cmd(&Compiler::new(0, &build.config.build), "rust-installer")
+}
+
/// Builds the `rust-docs` installer component.
///
/// Slurps up documentation from the `stage`'s `host`.
let src = build.out.join(host).join("doc");
cp_r(&src, &dst);
- let mut cmd = Command::new(SH_CMD);
- cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
+ let mut cmd = rust_installer(build);
+ cmd.arg("generate")
.arg("--product-name=Rust-Documentation")
.arg("--rel-manifest-dir=rustlib")
.arg("--success-message=Rust-documentation-is-installed.")
- .arg(format!("--image-dir={}", sanitize_sh(&image)))
- .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
- .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
+ .arg("--image-dir").arg(&image)
+ .arg("--work-dir").arg(&tmpdir(build))
+ .arg("--output-dir").arg(&distdir(build))
.arg(format!("--package-name={}-{}", name, host))
.arg("--component-name=rust-docs")
.arg("--legacy-manifest-dirs=rustlib,cargo")
.arg(host);
build.run(&mut cmd);
- let mut cmd = Command::new(SH_CMD);
- cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
+ let mut cmd = rust_installer(build);
+ cmd.arg("generate")
.arg("--product-name=Rust-MinGW")
.arg("--rel-manifest-dir=rustlib")
.arg("--success-message=Rust-MinGW-is-installed.")
- .arg(format!("--image-dir={}", sanitize_sh(&image)))
- .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
- .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
+ .arg("--image-dir").arg(&image)
+ .arg("--work-dir").arg(&tmpdir(build))
+ .arg("--output-dir").arg(&distdir(build))
.arg(format!("--package-name={}-{}", name, host))
.arg("--component-name=rust-mingw")
.arg("--legacy-manifest-dirs=rustlib,cargo");
}
// Finally, wrap everything up in a nice tarball!
- let mut cmd = Command::new(SH_CMD);
- cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
+ let mut cmd = rust_installer(build);
+ cmd.arg("generate")
.arg("--product-name=Rust")
.arg("--rel-manifest-dir=rustlib")
.arg("--success-message=Rust-is-ready-to-roll.")
- .arg(format!("--image-dir={}", sanitize_sh(&image)))
- .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
- .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
- .arg(format!("--non-installed-overlay={}", sanitize_sh(&overlay)))
+ .arg("--image-dir").arg(&image)
+ .arg("--work-dir").arg(&tmpdir(build))
+ .arg("--output-dir").arg(&distdir(build))
+ .arg("--non-installed-overlay").arg(&overlay)
.arg(format!("--package-name={}-{}", name, host))
.arg("--component-name=rustc")
.arg("--legacy-manifest-dirs=rustlib,cargo");
let src = build.sysroot(compiler).join("lib/rustlib");
cp_r(&src.join(target), &dst);
- let mut cmd = Command::new(SH_CMD);
- cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
+ let mut cmd = rust_installer(build);
+ cmd.arg("generate")
.arg("--product-name=Rust")
.arg("--rel-manifest-dir=rustlib")
.arg("--success-message=std-is-standing-at-the-ready.")
- .arg(format!("--image-dir={}", sanitize_sh(&image)))
- .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
- .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
+ .arg("--image-dir").arg(&image)
+ .arg("--work-dir").arg(&tmpdir(build))
+ .arg("--output-dir").arg(&distdir(build))
.arg(format!("--package-name={}-{}", name, target))
.arg(format!("--component-name=rust-std-{}", target))
.arg("--legacy-manifest-dirs=rustlib,cargo");
println!("image_src: {:?}, dst: {:?}", image_src, dst);
cp_r(&image_src, &dst);
- let mut cmd = Command::new(SH_CMD);
- cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
+ let mut cmd = rust_installer(build);
+ cmd.arg("generate")
.arg("--product-name=Rust")
.arg("--rel-manifest-dir=rustlib")
.arg("--success-message=save-analysis-saved.")
- .arg(format!("--image-dir={}", sanitize_sh(&image)))
- .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
- .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
+ .arg("--image-dir").arg(&image)
+ .arg("--work-dir").arg(&tmpdir(build))
+ .arg("--output-dir").arg(&distdir(build))
.arg(format!("--package-name={}-{}", name, target))
.arg(format!("--component-name=rust-analysis-{}", target))
.arg("--legacy-manifest-dirs=rustlib,cargo");
write_file(&plain_dst_src.join("version"), build.rust_version().as_bytes());
// Create plain source tarball
- let tarball = rust_src_location(build);
+ let mut tarball = rust_src_location(build);
+ tarball.set_extension(""); // strip .gz
+ tarball.set_extension(""); // strip .tar
if let Some(dir) = tarball.parent() {
t!(fs::create_dir_all(dir));
}
- let mut cmd = Command::new("tar");
- cmd.arg("-czf").arg(sanitize_sh(&tarball))
- .arg(&plain_name)
+ let mut cmd = rust_installer(build);
+ cmd.arg("tarball")
+ .arg("--input").arg(&plain_name)
+ .arg("--output").arg(&tarball)
+ .arg("--work-dir=.")
.current_dir(tmpdir(build));
build.run(&mut cmd);
}
// Create source tarball in rust-installer format
- let mut cmd = Command::new(SH_CMD);
- cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
+ let mut cmd = rust_installer(build);
+ cmd.arg("generate")
.arg("--product-name=Rust")
.arg("--rel-manifest-dir=rustlib")
.arg("--success-message=Awesome-Source.")
- .arg(format!("--image-dir={}", sanitize_sh(&image)))
- .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
- .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
+ .arg("--image-dir").arg(&image)
+ .arg("--work-dir").arg(&tmpdir(build))
+ .arg("--output-dir").arg(&distdir(build))
.arg(format!("--package-name={}", name))
.arg("--component-name=rust-src")
.arg("--legacy-manifest-dirs=rustlib,cargo");
// Prepare the image directory
t!(fs::create_dir_all(image.join("share/zsh/site-functions")));
- t!(fs::create_dir_all(image.join("etc/bash_completions.d")));
+ t!(fs::create_dir_all(image.join("etc/bash_completion.d")));
let cargo = build.cargo_out(&compiler, Mode::Tool, target)
.join(exe("cargo", target));
install(&cargo, &image.join("bin"), 0o755);
}
install(&etc.join("_cargo"), &image.join("share/zsh/site-functions"), 0o644);
copy(&etc.join("cargo.bashcomp.sh"),
- &image.join("etc/bash_completions.d/cargo"));
+ &image.join("etc/bash_completion.d/cargo"));
let doc = image.join("share/doc/cargo");
install(&src.join("README.md"), &doc, 0o644);
install(&src.join("LICENSE-MIT"), &doc, 0o644);
t!(t!(File::create(overlay.join("version"))).write_all(version.as_bytes()));
// Generate the installer tarball
- let mut cmd = Command::new("sh");
- cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
+ let mut cmd = rust_installer(build);
+ cmd.arg("generate")
.arg("--product-name=Rust")
.arg("--rel-manifest-dir=rustlib")
.arg("--success-message=Rust-is-ready-to-roll.")
- .arg(format!("--image-dir={}", sanitize_sh(&image)))
- .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
- .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
- .arg(format!("--non-installed-overlay={}", sanitize_sh(&overlay)))
+ .arg("--image-dir").arg(&image)
+ .arg("--work-dir").arg(&tmpdir(build))
+ .arg("--output-dir").arg(&distdir(build))
+ .arg("--non-installed-overlay").arg(&overlay)
.arg(format!("--package-name={}-{}", name, target))
.arg("--component-name=cargo")
.arg("--legacy-manifest-dirs=rustlib,cargo");
t!(t!(File::create(overlay.join("version"))).write_all(version.as_bytes()));
// Generate the installer tarball
- let mut cmd = Command::new("sh");
- cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
+ let mut cmd = rust_installer(build);
+ cmd.arg("generate")
.arg("--product-name=Rust")
.arg("--rel-manifest-dir=rustlib")
.arg("--success-message=RLS-ready-to-serve.")
- .arg(format!("--image-dir={}", sanitize_sh(&image)))
- .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
- .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
- .arg(format!("--non-installed-overlay={}", sanitize_sh(&overlay)))
+ .arg("--image-dir").arg(&image)
+ .arg("--work-dir").arg(&tmpdir(build))
+ .arg("--output-dir").arg(&distdir(build))
+ .arg("--non-installed-overlay").arg(&overlay)
.arg(format!("--package-name={}-{}", name, target))
.arg("--component-name=rls")
.arg("--legacy-manifest-dirs=rustlib,cargo");
// upgrades rustc was upgraded before rust-std. To avoid rustc clobbering
// the std files during uninstall. To do this ensure that rustc comes
// before rust-std in the list below.
- let mut input_tarballs = format!("{},{},{},{},{},{}",
- sanitize_sh(&rustc_installer),
- sanitize_sh(&cargo_installer),
- sanitize_sh(&rls_installer),
- sanitize_sh(&analysis_installer),
- sanitize_sh(&docs_installer),
- sanitize_sh(&std_installer));
+ let mut tarballs = vec![rustc_installer, cargo_installer, rls_installer,
+ analysis_installer, docs_installer, std_installer];
if target.contains("pc-windows-gnu") {
- input_tarballs.push_str(",");
- input_tarballs.push_str(&sanitize_sh(&mingw_installer));
+ tarballs.push(mingw_installer);
+ }
+ let mut input_tarballs = tarballs[0].as_os_str().to_owned();
+ for tarball in &tarballs[1..] {
+ input_tarballs.push(",");
+ input_tarballs.push(tarball);
}
- let mut cmd = Command::new(SH_CMD);
- cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/combine-installers.sh")))
+ let mut cmd = rust_installer(build);
+ cmd.arg("combine")
.arg("--product-name=Rust")
.arg("--rel-manifest-dir=rustlib")
.arg("--success-message=Rust-is-ready-to-roll.")
- .arg(format!("--work-dir={}", sanitize_sh(&work)))
- .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
+ .arg("--work-dir").arg(&work)
+ .arg("--output-dir").arg(&distdir(build))
.arg(format!("--package-name={}-{}", pkgname(build, "rust"), target))
.arg("--legacy-manifest-dirs=rustlib,cargo")
- .arg(format!("--input-tarballs={}", input_tarballs))
- .arg(format!("--non-installed-overlay={}", sanitize_sh(&overlay)));
+ .arg("--input-tarballs").arg(input_tarballs)
+ .arg("--non-installed-overlay").arg(&overlay);
build.run(&mut cmd);
let mut license = String::new();
/// Installs everything.
pub fn install(build: &Build, stage: u32, host: &str) {
let prefix_default = PathBuf::from("/usr/local");
+ let sysconfdir_default = PathBuf::from("/etc");
let docdir_default = PathBuf::from("share/doc/rust");
- let mandir_default = PathBuf::from("share/man");
+ let bindir_default = PathBuf::from("bin");
let libdir_default = PathBuf::from("lib");
+ let mandir_default = PathBuf::from("share/man");
let prefix = build.config.prefix.as_ref().unwrap_or(&prefix_default);
+ let sysconfdir = build.config.sysconfdir.as_ref().unwrap_or(&sysconfdir_default);
let docdir = build.config.docdir.as_ref().unwrap_or(&docdir_default);
+ let bindir = build.config.bindir.as_ref().unwrap_or(&bindir_default);
let libdir = build.config.libdir.as_ref().unwrap_or(&libdir_default);
let mandir = build.config.mandir.as_ref().unwrap_or(&mandir_default);
+ let sysconfdir = prefix.join(sysconfdir);
let docdir = prefix.join(docdir);
+ let bindir = prefix.join(bindir);
let libdir = prefix.join(libdir);
let mandir = prefix.join(mandir);
let destdir = env::var_os("DESTDIR").map(PathBuf::from);
let prefix = add_destdir(&prefix, &destdir);
+ let sysconfdir = add_destdir(&sysconfdir, &destdir);
let docdir = add_destdir(&docdir, &destdir);
+ let bindir = add_destdir(&bindir, &destdir);
let libdir = add_destdir(&libdir, &destdir);
let mandir = add_destdir(&mandir, &destdir);
t!(fs::create_dir_all(&empty_dir));
if build.config.docs {
install_sh(&build, "docs", "rust-docs", &build.rust_package_vers(),
- stage, host, &prefix, &docdir, &libdir, &mandir, &empty_dir);
+ stage, host, &prefix, &sysconfdir, &docdir, &bindir, &libdir,
+ &mandir, &empty_dir);
}
for target in build.config.target.iter() {
install_sh(&build, "std", "rust-std", &build.rust_package_vers(),
- stage, target, &prefix, &docdir, &libdir, &mandir, &empty_dir);
+ stage, target, &prefix, &sysconfdir, &docdir, &bindir, &libdir,
+ &mandir, &empty_dir);
}
if build.config.extended {
install_sh(&build, "cargo", "cargo", &build.cargo_package_vers(),
- stage, host, &prefix, &docdir, &libdir, &mandir, &empty_dir);
+ stage, host, &prefix, &sysconfdir, &docdir, &bindir, &libdir,
+ &mandir, &empty_dir);
install_sh(&build, "rls", "rls", &build.rls_package_vers(),
- stage, host, &prefix, &docdir, &libdir, &mandir, &empty_dir);
+ stage, host, &prefix, &sysconfdir, &docdir, &bindir, &libdir,
+ &mandir, &empty_dir);
}
install_sh(&build, "rustc", "rustc", &build.rust_package_vers(),
- stage, host, &prefix, &docdir, &libdir, &mandir, &empty_dir);
+ stage, host, &prefix, &sysconfdir, &docdir, &bindir, &libdir,
+ &mandir, &empty_dir);
t!(fs::remove_dir_all(&empty_dir));
}
fn install_sh(build: &Build, package: &str, name: &str, version: &str, stage: u32, host: &str,
- prefix: &Path, docdir: &Path, libdir: &Path, mandir: &Path, empty_dir: &Path) {
+ prefix: &Path, sysconfdir: &Path, docdir: &Path, bindir: &Path, libdir: &Path,
+ mandir: &Path, empty_dir: &Path) {
println!("Install {} stage{} ({})", package, stage, host);
let package_name = format!("{}-{}-{}", name, version, host);
cmd.current_dir(empty_dir)
.arg(sanitize_sh(&tmpdir(build).join(&package_name).join("install.sh")))
.arg(format!("--prefix={}", sanitize_sh(prefix)))
+ .arg(format!("--sysconfdir={}", sanitize_sh(sysconfdir)))
.arg(format!("--docdir={}", sanitize_sh(docdir)))
+ .arg(format!("--bindir={}", sanitize_sh(bindir)))
.arg(format!("--libdir={}", sanitize_sh(libdir)))
.arg(format!("--mandir={}", sanitize_sh(mandir)))
.arg("--disable-ldconfig");
use std::ffi::OsString;
use std::fs::{self, File};
use std::io::Read;
-use std::path::{Component, PathBuf, Path};
+use std::path::{PathBuf, Path};
use std::process::Command;
use build_helper::{run_silent, run_suppressed, output, mtime};
self.verbose(&format!("auto-detected local-rebuild {}", local_release));
self.local_rebuild = true;
}
- self.verbose("updating submodules");
- self.update_submodules();
self.verbose("learning about cargo");
metadata::build(self);
step::run(self);
}
- /// Updates all git submodules that we have.
- ///
- /// This will detect if any submodules are out of date an run the necessary
- /// commands to sync them all with upstream.
- fn update_submodules(&self) {
- struct Submodule<'a> {
- path: &'a Path,
- state: State,
- }
-
- enum State {
- // The submodule may have staged/unstaged changes
- MaybeDirty,
- // Or could be initialized but never updated
- NotInitialized,
- // The submodule, itself, has extra commits but those changes haven't been commited to
- // the (outer) git repository
- OutOfSync,
- }
-
- if !self.src_is_git || !self.config.submodules {
- return
- }
- let git = || {
- let mut cmd = Command::new("git");
- cmd.current_dir(&self.src);
- return cmd
- };
- let git_submodule = || {
- let mut cmd = Command::new("git");
- cmd.current_dir(&self.src).arg("submodule");
- return cmd
- };
-
- // FIXME: this takes a seriously long time to execute on Windows and a
- // nontrivial amount of time on Unix, we should have a better way
- // of detecting whether we need to run all the submodule commands
- // below.
- let out = output(git_submodule().arg("status"));
- let mut submodules = vec![];
- for line in out.lines() {
- // NOTE `git submodule status` output looks like this:
- //
- // -5066b7dcab7e700844b0e2ba71b8af9dc627a59b src/liblibc
- // +b37ef24aa82d2be3a3cc0fe89bf82292f4ca181c src/compiler-rt (remotes/origin/..)
- // e058ca661692a8d01f8cf9d35939dfe3105ce968 src/jemalloc (3.6.0-533-ge058ca6)
- //
- // The first character can be '-', '+' or ' ' and denotes the `State` of the submodule
- // Right next to this character is the SHA-1 of the submodule HEAD
- // And after that comes the path to the submodule
- let path = Path::new(line[1..].split(' ').skip(1).next().unwrap());
- let state = if line.starts_with('-') {
- State::NotInitialized
- } else if line.starts_with('+') {
- State::OutOfSync
- } else if line.starts_with(' ') {
- State::MaybeDirty
- } else {
- panic!("unexpected git submodule state: {:?}", line.chars().next());
- };
-
- submodules.push(Submodule { path: path, state: state })
- }
-
- self.run(git_submodule().arg("sync"));
-
- for submodule in submodules {
- // If using llvm-root then don't touch the llvm submodule.
- if submodule.path.components().any(|c| c == Component::Normal("llvm".as_ref())) &&
- self.config.target_config.get(&self.config.build)
- .and_then(|c| c.llvm_config.as_ref()).is_some()
- {
- continue
- }
-
- if submodule.path.components().any(|c| c == Component::Normal("jemalloc".as_ref())) &&
- !self.config.use_jemalloc
- {
- continue
- }
-
- // `submodule.path` is the relative path to a submodule (from the repository root)
- // `submodule_path` is the path to a submodule from the cwd
-
- // use `submodule.path` when e.g. executing a submodule specific command from the
- // repository root
- // use `submodule_path` when e.g. executing a normal git command for the submodule
- // (set via `current_dir`)
- let submodule_path = self.src.join(submodule.path);
-
- match submodule.state {
- State::MaybeDirty => {
- // drop staged changes
- self.run(git().current_dir(&submodule_path)
- .args(&["reset", "--hard"]));
- // drops unstaged changes
- self.run(git().current_dir(&submodule_path)
- .args(&["clean", "-fdx"]));
- },
- State::NotInitialized => {
- self.run(git_submodule().arg("init").arg(submodule.path));
- self.run(git_submodule().arg("update").arg(submodule.path));
- },
- State::OutOfSync => {
- // drops submodule commits that weren't reported to the (outer) git repository
- self.run(git_submodule().arg("update").arg(submodule.path));
- self.run(git().current_dir(&submodule_path)
- .args(&["reset", "--hard"]));
- self.run(git().current_dir(&submodule_path)
- .args(&["clean", "-fdx"]));
- },
- }
- }
- }
-
/// Clear out `dir` if `input` is newer.
///
/// After this executes, it will also ensure that `dir` exists.
.env("RUSTDOC_REAL", self.rustdoc(compiler))
.env("RUSTC_FLAGS", self.rustc_flags(target).join(" "));
- // Tools don't get debuginfo right now, e.g. cargo and rls don't get
- // compiled with debuginfo.
if mode != Mode::Tool {
- cargo.env("RUSTC_DEBUGINFO", self.config.rust_debuginfo.to_string())
- .env("RUSTC_DEBUGINFO_LINES", self.config.rust_debuginfo_lines.to_string());
+ // Tools don't get debuginfo right now, e.g. cargo and rls don't
+ // get compiled with debuginfo.
+ cargo.env("RUSTC_DEBUGINFO", self.config.rust_debuginfo.to_string())
+ .env("RUSTC_DEBUGINFO_LINES", self.config.rust_debuginfo_lines.to_string())
+ .env("RUSTC_FORCE_UNSTABLE", "1");
+
+ // Currently the compiler depends on crates from crates.io, and
+ // then other crates can depend on the compiler (e.g. proc-macro
+ // crates). Let's say, for example that rustc itself depends on the
+ // bitflags crate. If an external crate then depends on the
+ // bitflags crate as well, we need to make sure they don't
+ // conflict, even if they pick the same verison of bitflags. We'll
+ // want to make sure that e.g. a plugin and rustc each get their
+ // own copy of bitflags.
+
+ // Cargo ensures that this works in general through the -C metadata
+ // flag. This flag will frob the symbols in the binary to make sure
+ // they're different, even though the source code is the exact
+ // same. To solve this problem for the compiler we extend Cargo's
+ // already-passed -C metadata flag with our own. Our rustc.rs
+ // wrapper around the actual rustc will detect -C metadata being
+ // passed and frob it with this extra string we're passing in.
+ cargo.env("RUSTC_METADATA_SUFFIX", "rustc");
}
// Enable usage of unstable features
// the comipiler, libs, and tests are stable and we don't want to make
// their deps unstable (since this would break the first invariant
// above).
- if mode != Mode::Tool {
+ //
+ // FIXME: remove this after next stage0
+ if mode != Mode::Tool && stage == 0 {
cargo.env("RUSTBUILD_UNSTABLE", "1");
}
cfg.define("LLVM_USE_CRT_DEBUG", "MT");
cfg.define("LLVM_USE_CRT_RELEASE", "MT");
cfg.define("LLVM_USE_CRT_RELWITHDEBINFO", "MT");
+ cfg.static_crt(true);
}
if target.starts_with("i686") {
configure.arg("no-ssl3");
let os = match target {
+ "aarch64-linux-android" => "linux-aarch64",
"aarch64-unknown-linux-gnu" => "linux-aarch64",
+ "arm-linux-androideabi" => "android",
"arm-unknown-linux-gnueabi" => "linux-armv4",
"arm-unknown-linux-gnueabihf" => "linux-armv4",
+ "armv7-linux-androideabi" => "android-armv7",
"armv7-unknown-linux-gnueabihf" => "linux-armv4",
"i686-apple-darwin" => "darwin-i386-cc",
+ "i686-linux-android" => "android-x86",
"i686-unknown-freebsd" => "BSD-x86-elf",
"i686-unknown-linux-gnu" => "linux-elf",
"i686-unknown-linux-musl" => "linux-elf",
"powerpc64le-unknown-linux-gnu" => "linux-ppc64le",
"s390x-unknown-linux-gnu" => "linux64-s390x",
"x86_64-apple-darwin" => "darwin64-x86_64-cc",
+ "x86_64-linux-android" => "linux-x86_64",
"x86_64-unknown-freebsd" => "BSD-x86_64",
"x86_64-unknown-linux-gnu" => "linux-x86_64",
"x86_64-unknown-linux-musl" => "linux-x86_64",
for flag in build.cflags(target) {
configure.arg(flag);
}
+ // There is no specific os target for android aarch64 or x86_64,
+ // so we need to pass some extra cflags
+ if target == "aarch64-linux-android" || target == "x86_64-linux-android" {
+ configure.arg("-mandroid");
+ configure.arg("-fomit-frame-pointer");
+ }
+ // Make PIE binaries
+ // Non-PIE linker support was removed in Lollipop
+ // https://source.android.com/security/enhancements/enhancements50
+ if target == "i686-linux-android" {
+ configure.arg("no-asm");
+ }
configure.current_dir(&obj);
println!("Configuring openssl for {}", target);
build.run_quiet(&mut configure);
.dep(|s| s.name("maybe-clean-tools"))
.dep(|s| s.name("libstd-tool"))
.run(move |s| compile::tool(build, s.stage, s.target, "remote-test-client"));
+ rules.build("tool-rust-installer", "src/tools/rust-installer")
+ .dep(|s| s.name("maybe-clean-tools"))
+ .dep(|s| s.name("libstd-tool"))
+ .run(move |s| compile::tool(build, s.stage, s.target, "rust-installer"));
rules.build("tool-cargo", "src/tools/cargo")
.host(true)
.default(build.config.extended)
.host(true)
.only_host_build(true)
.default(true)
+ .dep(move |s| tool_rust_installer(build, s))
.run(move |s| dist::rustc(build, s.stage, s.target));
rules.dist("dist-std", "src/libstd")
.dep(move |s| {
})
.default(true)
.only_host_build(true)
+ .dep(move |s| tool_rust_installer(build, s))
.run(move |s| dist::std(build, &s.compiler(), s.target));
rules.dist("dist-mingw", "path/to/nowhere")
.default(true)
.only_host_build(true)
+ .dep(move |s| tool_rust_installer(build, s))
.run(move |s| {
if s.target.contains("pc-windows-gnu") {
dist::mingw(build, s.target)
.host(true)
.only_build(true)
.only_host_build(true)
+ .dep(move |s| tool_rust_installer(build, s))
.run(move |_| dist::rust_src(build));
rules.dist("dist-docs", "src/doc")
.default(true)
.only_host_build(true)
.dep(|s| s.name("default:doc"))
+ .dep(move |s| tool_rust_installer(build, s))
.run(move |s| dist::docs(build, s.stage, s.target));
rules.dist("dist-analysis", "analysis")
.default(build.config.extended)
.dep(|s| s.name("dist-std"))
.only_host_build(true)
+ .dep(move |s| tool_rust_installer(build, s))
.run(move |s| dist::analysis(build, &s.compiler(), s.target));
rules.dist("dist-rls", "rls")
.host(true)
.only_host_build(true)
.dep(|s| s.name("tool-rls"))
+ .dep(move |s| tool_rust_installer(build, s))
.run(move |s| dist::rls(build, s.stage, s.target));
rules.dist("install", "path/to/nowhere")
.dep(|s| s.name("default:dist"))
.host(true)
.only_host_build(true)
.dep(|s| s.name("tool-cargo"))
+ .dep(move |s| tool_rust_installer(build, s))
.run(move |s| dist::cargo(build, s.stage, s.target));
rules.dist("dist-extended", "extended")
.default(build.config.extended)
.dep(|d| d.name("dist-cargo"))
.dep(|d| d.name("dist-rls"))
.dep(|d| d.name("dist-analysis"))
+ .dep(move |s| tool_rust_installer(build, s))
.run(move |s| dist::extended(build, s.stage, s.target));
rules.dist("dist-sign", "hash-and-sign")
rules.verify();
return rules;
+
+ /// Helper to depend on a stage0 build-only rust-installer tool.
+ fn tool_rust_installer<'a>(build: &'a Build, step: &Step<'a>) -> Step<'a> {
+ step.name("tool-rust-installer")
+ .host(&build.config.build)
+ .target(&build.config.build)
+ .stage(0)
+ }
}
#[derive(PartialEq, Eq, Hash, Clone, Debug)]
--- /dev/null
+#!/bin/sh
+# Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+# file at the top-level directory of this distribution and at
+# http://rust-lang.org/COPYRIGHT.
+#
+# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+# option. This file may not be copied, modified, or distributed
+# except according to those terms.
+
+set -ex
+
+URL=https://dl.google.com/android/repository
+
+download_ndk() {
+ mkdir -p /android/ndk
+ cd /android/ndk
+ curl -O $URL/$1
+ unzip -q $1
+ rm $1
+ mv android-ndk-* ndk
+}
+
+make_standalone_toolchain() {
+ # See https://developer.android.com/ndk/guides/standalone_toolchain.htm
+ python2.7 /android/ndk/ndk/build/tools/make_standalone_toolchain.py \
+ --install-dir /android/ndk/$1-$2 \
+ --arch $1 \
+ --api $2
+}
+
+remove_ndk() {
+ rm -rf /android/ndk/ndk
+}
FROM ubuntu:16.04
-RUN dpkg --add-architecture i386 && \
- apt-get update && \
+RUN apt-get update && \
apt-get install -y --no-install-recommends \
g++ \
make \
git \
cmake \
unzip \
- expect \
- openjdk-9-jre-headless \
sudo \
- libstdc++6:i386 \
xz-utils \
libssl-dev \
pkg-config
-WORKDIR /android/
-ENV PATH=$PATH:/android/ndk-arm-9/bin:/android/sdk/tools:/android/sdk/platform-tools
-
-COPY install-ndk.sh install-sdk.sh accept-licenses.sh /android/
-RUN sh /android/install-ndk.sh
-RUN sh /android/install-sdk.sh
-
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
dpkg -i dumb-init_*.deb && \
rm dumb-init_*.deb
-COPY start-emulator.sh /android/
+RUN curl -o /usr/local/bin/sccache \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
+ chmod +x /usr/local/bin/sccache
-ENTRYPOINT ["/usr/bin/dumb-init", "--", "/android/start-emulator.sh"]
+# Install NDK
+COPY install-ndk.sh /tmp
+RUN . /tmp/install-ndk.sh && \
+ download_ndk android-ndk-r13b-linux-x86_64.zip && \
+ make_standalone_toolchain arm 9 && \
+ remove_ndk
-RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-02-24-sccache-x86_64-unknown-linux-gnu && \
- chmod +x /usr/local/bin/sccache
+# Install SDK
+RUN dpkg --add-architecture i386 && \
+ apt-get update && \
+ apt-get install -y --no-install-recommends \
+ openjdk-9-jre-headless \
+ tzdata \
+ libstdc++6:i386 \
+ libgl1-mesa-glx \
+ libpulse0
+
+COPY install-sdk.sh /tmp
+RUN . /tmp/install-sdk.sh && \
+ download_sdk tools_r25.2.5-linux.zip && \
+ download_sysimage armeabi-v7a 18 && \
+ create_avd armeabi-v7a 18
+
+# Setup env
+ENV PATH=$PATH:/android/sdk/tools
+ENV PATH=$PATH:/android/sdk/platform-tools
+
+ENV TARGETS=arm-linux-androideabi
ENV RUST_CONFIGURE_ARGS \
- --target=arm-linux-androideabi \
- --arm-linux-androideabi-ndk=/android/ndk-arm-9
+ --target=$TARGETS \
+ --arm-linux-androideabi-ndk=/android/ndk/arm-9
+
+ENV SCRIPT python2.7 ../x.py test --target $TARGETS --verbose
-ENV SCRIPT python2.7 ../x.py test --target arm-linux-androideabi
+# Entrypoint
+COPY start-emulator.sh /android/
+ENTRYPOINT ["/usr/bin/dumb-init", "--", "/android/start-emulator.sh"]
+++ /dev/null
-#!/usr/bin/expect -f
-# ignore-license
-
-set timeout 1800
-set cmd [lindex $argv 0]
-set licenses [lindex $argv 1]
-
-spawn {*}$cmd
-expect {
- "Do you accept the license '*'*" {
- exp_send "y\r"
- exp_continue
- }
- eof
-}
set -ex
-cpgdb() {
- cp android-ndk-r11c/prebuilt/linux-x86_64/bin/gdb /android/$1/bin/$2-gdb
- cp android-ndk-r11c/prebuilt/linux-x86_64/bin/gdb-orig /android/$1/bin/gdb-orig
- cp -r android-ndk-r11c/prebuilt/linux-x86_64/share /android/$1/share
+URL=https://dl.google.com/android/repository
+
+download_ndk() {
+ mkdir -p /android/ndk
+ cd /android/ndk
+ curl -O $URL/$1
+ unzip -q $1
+ rm $1
+ mv android-ndk-* ndk
}
-# Prep the Android NDK
-#
-# See https://github.com/servo/servo/wiki/Building-for-Android
-curl -O https://dl.google.com/android/repository/android-ndk-r11c-linux-x86_64.zip
-unzip -q android-ndk-r11c-linux-x86_64.zip
-bash android-ndk-r11c/build/tools/make-standalone-toolchain.sh \
- --platform=android-9 \
- --toolchain=arm-linux-androideabi-4.9 \
- --install-dir=/android/ndk-arm-9 \
- --ndk-dir=/android/android-ndk-r11c \
- --arch=arm
-cpgdb ndk-arm-9 arm-linux-androideabi
+make_standalone_toolchain() {
+ # See https://developer.android.com/ndk/guides/standalone_toolchain.html
+ python2.7 /android/ndk/ndk/build/tools/make_standalone_toolchain.py \
+ --install-dir /android/ndk/$1-$2 \
+ --arch $1 \
+ --api $2
+}
-rm -rf ./android-ndk-r11c-linux-x86_64.zip ./android-ndk-r11c
+remove_ndk() {
+ rm -rf /android/ndk/ndk
+}
#!/bin/sh
-# Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+# Copyright 2017 The Rust Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution and at
# http://rust-lang.org/COPYRIGHT.
#
set -ex
-# Prep the SDK and emulator
-#
-# Note that the update process requires that we accept a bunch of licenses, and
-# we can't just pipe `yes` into it for some reason, so we take the same strategy
-# located in https://github.com/appunite/docker by just wrapping it in a script
-# which apparently magically accepts the licenses.
+URL=https://dl.google.com/android/repository
+
+download_sdk() {
+ mkdir -p /android/sdk
+ cd /android/sdk
+ curl -O $URL/$1
+ unzip -q $1
+ rm -rf $1
+}
+
+download_sysimage() {
+ # See https://developer.android.com/studio/tools/help/android.html
+ abi=$1
+ api=$2
+
+ filter="platform-tools,android-$api"
+ filter="$filter,sys-img-$abi-android-$api"
-mkdir sdk
-curl https://dl.google.com/android/android-sdk_r24.4-linux.tgz | \
- tar xzf - -C sdk --strip-components=1
+ # Keep printing yes to accept the licenses
+ while true; do echo yes; sleep 10; done | \
+ /android/sdk/tools/android update sdk -a --no-ui \
+ --filter "$filter"
+}
-filter="platform-tools,android-18"
-filter="$filter,sys-img-armeabi-v7a-android-18"
+create_avd() {
+ # See https://developer.android.com/studio/tools/help/android.html
+ abi=$1
+ api=$2
-./accept-licenses.sh "android - update sdk -a --no-ui --filter $filter"
+ echo no | \
+ /android/sdk/tools/android create avd \
+ --name $abi-$api \
+ --target android-$api \
+ --abi $abi
+}
-echo "no" | android create avd \
- --name arm-18 \
- --target android-18 \
- --abi armeabi-v7a
# Setting SHELL to a file instead on a symlink helps android
# emulator identify the system
export SHELL=/bin/bash
-nohup nohup emulator @arm-18 -no-window -partition-size 2047 0<&- &>/dev/null &
+
+# Using the default qemu2 engine makes time::tests::since_epoch fails because
+# the emulator date is set to unix epoch (in armeabi-v7a-18 image). Using
+# classic engine the emulator starts with the current date and the tests run
+# fine. If another image is used, this need to be evaluated again.
+nohup nohup emulator @armeabi-v7a-18 \
+ -engine classic -no-window -partition-size 2047 0<&- &>/dev/null &
+
exec "$@"
RUN curl -O http://ftp.nl.debian.org/debian/dists/jessie/main/installer-armhf/current/images/device-tree/vexpress-v2p-ca15-tc1.dtb
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
pkg-config
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
--- /dev/null
+FROM ubuntu:16.04
+
+RUN apt-get update && \
+ apt-get install -y --no-install-recommends \
+ g++ \
+ make \
+ file \
+ curl \
+ ca-certificates \
+ python2.7 \
+ git \
+ cmake \
+ unzip \
+ sudo \
+ xz-utils \
+ libssl-dev \
+ pkg-config
+
+RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
+ dpkg -i dumb-init_*.deb && \
+ rm dumb-init_*.deb
+
+RUN curl -o /usr/local/bin/sccache \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
+ chmod +x /usr/local/bin/sccache
+
+ENTRYPOINT ["/usr/bin/dumb-init", "--"]
+
+COPY android-ndk.sh /
+RUN . /android-ndk.sh && \
+ download_ndk android-ndk-r13b-linux-x86_64.zip && \
+ make_standalone_toolchain arm64 21 && \
+ remove_ndk
+
+ENV PATH=$PATH:/android/ndk/arm64-21/bin
+
+ENV DEP_Z_ROOT=/android/ndk/arm64-21/sysroot/usr/
+
+ENV HOSTS=aarch64-linux-android
+
+ENV RUST_CONFIGURE_ARGS \
+ --host=$HOSTS \
+ --target=$HOSTS \
+ --aarch64-linux-android-ndk=/android/ndk/arm64-21 \
+ --disable-rpath \
+ --enable-extended \
+ --enable-cargo-openssl-static
+
+ENV SCRIPT python2.7 ../x.py dist --target $HOSTS --host $HOSTS
--- /dev/null
+FROM ubuntu:16.04
+
+RUN apt-get update && \
+ apt-get install -y --no-install-recommends \
+ g++ \
+ make \
+ file \
+ curl \
+ ca-certificates \
+ python2.7 \
+ git \
+ cmake \
+ unzip \
+ sudo \
+ xz-utils \
+ libssl-dev \
+ pkg-config
+
+RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
+ dpkg -i dumb-init_*.deb && \
+ rm dumb-init_*.deb
+
+RUN curl -o /usr/local/bin/sccache \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
+ chmod +x /usr/local/bin/sccache
+
+ENTRYPOINT ["/usr/bin/dumb-init", "--"]
+
+COPY android-ndk.sh /
+RUN . /android-ndk.sh && \
+ download_ndk android-ndk-r13b-linux-x86_64.zip && \
+ make_standalone_toolchain arm 9 && \
+ make_standalone_toolchain arm 21 && \
+ remove_ndk
+
+ENV PATH=$PATH:/android/ndk/arm-9/bin
+
+ENV DEP_Z_ROOT=/android/ndk/arm-9/sysroot/usr/
+
+ENV HOSTS=armv7-linux-androideabi
+
+ENV RUST_CONFIGURE_ARGS \
+ --host=$HOSTS \
+ --target=$HOSTS \
+ --armv7-linux-androideabi-ndk=/android/ndk/arm \
+ --disable-rpath \
+ --enable-extended \
+ --enable-cargo-openssl-static
+
+# We support api level 9, but api level 21 is required to build llvm. To
+# overcome this problem we use a ndk with api level 21 to build llvm and then
+# switch to a ndk with api level 9 to complete the build. When the linker is
+# invoked there are missing symbols (like sigsetempty, not available with api
+# level 9), the default linker behavior is to generate an error, to allow the
+# build to finish we use --warn-unresolved-symbols. Note that the missing
+# symbols does not affect std, only the compiler (llvm) and cargo (openssl).
+RUN chmod 777 /android/ndk && \
+ ln -s /android/ndk/arm-21 /android/ndk/arm
+
+ENV SCRIPT \
+ python2.7 ../x.py build src/llvm --host $HOSTS --target $HOSTS && \
+ (export RUSTFLAGS="\"-C link-arg=-Wl,--warn-unresolved-symbols\""; \
+ rm /android/ndk/arm && \
+ ln -s /android/ndk/arm-9 /android/ndk/arm && \
+ python2.7 ../x.py dist --host $HOSTS --target $HOSTS)
--- /dev/null
+FROM ubuntu:16.04
+
+RUN apt-get update && \
+ apt-get install -y --no-install-recommends \
+ g++ \
+ make \
+ file \
+ curl \
+ ca-certificates \
+ python2.7 \
+ git \
+ cmake \
+ unzip \
+ sudo \
+ xz-utils \
+ libssl-dev \
+ pkg-config
+
+RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
+ dpkg -i dumb-init_*.deb && \
+ rm dumb-init_*.deb
+
+RUN curl -o /usr/local/bin/sccache \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
+ chmod +x /usr/local/bin/sccache
+
+ENTRYPOINT ["/usr/bin/dumb-init", "--"]
+
+COPY android-ndk.sh /
+RUN . /android-ndk.sh && \
+ download_ndk android-ndk-r13b-linux-x86_64.zip && \
+ make_standalone_toolchain x86 9 && \
+ make_standalone_toolchain x86 21 && \
+ remove_ndk
+
+ENV PATH=$PATH:/android/ndk/x86-9/bin
+
+ENV DEP_Z_ROOT=/android/ndk/x86-9/sysroot/usr/
+
+ENV HOSTS=i686-linux-android
+
+ENV RUST_CONFIGURE_ARGS \
+ --host=$HOSTS \
+ --target=$HOSTS \
+ --i686-linux-android-ndk=/android/ndk/x86 \
+ --disable-rpath \
+ --enable-extended \
+ --enable-cargo-openssl-static
+
+# We support api level 9, but api level 21 is required to build llvm. To
+# overcome this problem we use a ndk with api level 21 to build llvm and then
+# switch to a ndk with api level 9 to complete the build. When the linker is
+# invoked there are missing symbols (like sigsetempty, not available with api
+# level 9), the default linker behavior is to generate an error, to allow the
+# build to finish we use --warn-unresolved-symbols. Note that the missing
+# symbols does not affect std, only the compiler (llvm) and cargo (openssl).
+RUN chmod 777 /android/ndk && \
+ ln -s /android/ndk/x86-21 /android/ndk/x86
+
+ENV SCRIPT \
+ python2.7 ../x.py build src/llvm --host $HOSTS --target $HOSTS && \
+ (export RUSTFLAGS="\"-C link-arg=-Wl,--warn-unresolved-symbols\""; \
+ rm /android/ndk/x86 && \
+ ln -s /android/ndk/x86-9 /android/ndk/x86 && \
+ python2.7 ../x.py dist --host $HOSTS --target $HOSTS)
--- /dev/null
+FROM ubuntu:16.04
+
+RUN apt-get update && \
+ apt-get install -y --no-install-recommends \
+ g++ \
+ make \
+ file \
+ curl \
+ ca-certificates \
+ python2.7 \
+ git \
+ cmake \
+ unzip \
+ sudo \
+ xz-utils \
+ libssl-dev \
+ pkg-config
+
+RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
+ dpkg -i dumb-init_*.deb && \
+ rm dumb-init_*.deb
+
+RUN curl -o /usr/local/bin/sccache \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
+ chmod +x /usr/local/bin/sccache
+
+ENTRYPOINT ["/usr/bin/dumb-init", "--"]
+
+COPY android-ndk.sh /
+RUN . /android-ndk.sh && \
+ download_ndk android-ndk-r13b-linux-x86_64.zip && \
+ make_standalone_toolchain x86_64 21 && \
+ remove_ndk
+
+ENV PATH=$PATH:/android/ndk/x86_64-21/bin
+
+ENV DEP_Z_ROOT=/android/ndk/x86_64-21/sysroot/usr/
+
+ENV HOSTS=x86_64-linux-android
+
+ENV RUST_CONFIGURE_ARGS \
+ --host=$HOSTS \
+ --target=$HOSTS \
+ --x86_64-linux-android-ndk=/android/ndk/x86_64-21 \
+ --disable-rpath \
+ --enable-extended \
+ --enable-cargo-openssl-static
+
+ENV SCRIPT python2.7 ../x.py dist --target $HOSTS --host $HOSTS
USER root
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENV PATH=$PATH:/x-tools/aarch64-unknown-linux-gnueabi/bin
FROM ubuntu:16.04
-RUN dpkg --add-architecture i386 && \
- apt-get update && \
+RUN apt-get update && \
apt-get install -y --no-install-recommends \
g++ \
make \
git \
cmake \
unzip \
- expect \
- openjdk-9-jre \
sudo \
- libstdc++6:i386 \
xz-utils \
libssl-dev \
pkg-config
-WORKDIR /android/
-ENV PATH=$PATH:/android/ndk-arm-9/bin:/android/sdk/tools:/android/sdk/platform-tools
-
-COPY install-ndk.sh /android/
-RUN sh /android/install-ndk.sh
-
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
dpkg -i dumb-init_*.deb && \
rm dumb-init_*.deb
-ENTRYPOINT ["/usr/bin/dumb-init", "--"]
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
- chmod +x /usr/local/bin/sccache
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
+ chmod +x /usr/local/bin/sccache
+
+ENTRYPOINT ["/usr/bin/dumb-init", "--"]
+
+# Install NDK
+COPY install-ndk.sh /tmp
+RUN . /tmp/install-ndk.sh && \
+ download_ndk android-ndk-r13b-linux-x86_64.zip && \
+ make_standalone_toolchain arm 9 && \
+ make_standalone_toolchain x86 9 && \
+ make_standalone_toolchain arm64 21 && \
+ make_standalone_toolchain x86_64 21 && \
+ remove_ndk
ENV TARGETS=arm-linux-androideabi
ENV TARGETS=$TARGETS,armv7-linux-androideabi
ENV RUST_CONFIGURE_ARGS \
--target=$TARGETS \
--enable-extended \
- --arm-linux-androideabi-ndk=/android/ndk-arm-9 \
- --armv7-linux-androideabi-ndk=/android/ndk-arm-9 \
- --i686-linux-android-ndk=/android/ndk-x86-9 \
- --aarch64-linux-android-ndk=/android/ndk-arm64-21 \
- --x86_64-linux-android-ndk=/android/ndk-x86_64-21
+ --arm-linux-androideabi-ndk=/android/ndk/arm-9 \
+ --armv7-linux-androideabi-ndk=/android/ndk/arm-9 \
+ --i686-linux-android-ndk=/android/ndk/x86-9 \
+ --aarch64-linux-android-ndk=/android/ndk/arm64-21 \
+ --x86_64-linux-android-ndk=/android/ndk/x86_64-21
ENV SCRIPT python2.7 ../x.py dist --target $TARGETS
set -ex
-# Prep the Android NDK
-#
-# See https://github.com/servo/servo/wiki/Building-for-Android
-curl -O https://dl.google.com/android/repository/android-ndk-r11c-linux-x86_64.zip
-unzip -q android-ndk-r11c-linux-x86_64.zip
-bash android-ndk-r11c/build/tools/make-standalone-toolchain.sh \
- --platform=android-9 \
- --toolchain=arm-linux-androideabi-4.9 \
- --install-dir=/android/ndk-arm-9 \
- --ndk-dir=/android/android-ndk-r11c \
- --arch=arm
-bash android-ndk-r11c/build/tools/make-standalone-toolchain.sh \
- --platform=android-21 \
- --toolchain=aarch64-linux-android-4.9 \
- --install-dir=/android/ndk-arm64-21 \
- --ndk-dir=/android/android-ndk-r11c \
- --arch=arm64
-bash android-ndk-r11c/build/tools/make-standalone-toolchain.sh \
- --platform=android-9 \
- --toolchain=x86-4.9 \
- --install-dir=/android/ndk-x86-9 \
- --ndk-dir=/android/android-ndk-r11c \
- --arch=x86
-bash android-ndk-r11c/build/tools/make-standalone-toolchain.sh \
- --platform=android-21 \
- --toolchain=x86_64-4.9 \
- --install-dir=/android/ndk-x86_64-21 \
- --ndk-dir=/android/android-ndk-r11c \
- --arch=x86_64
+URL=https://dl.google.com/android/repository
+
+download_ndk() {
+ mkdir -p /android/ndk
+ cd /android/ndk
+ curl -O $URL/$1
+ unzip -q $1
+ rm $1
+ mv android-ndk-* ndk
+}
+
+make_standalone_toolchain() {
+ # See https://developer.android.com/ndk/guides/standalone_toolchain.html
+ python2.7 /android/ndk/ndk/build/tools/make_standalone_toolchain.py \
+ --install-dir /android/ndk/$1-$2 \
+ --arch $1 \
+ --api $2
+}
-rm -rf ./android-ndk-r11c-linux-x86_64.zip ./android-ndk-r11c
+remove_ndk() {
+ rm -rf /android/ndk/ndk
+}
USER root
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENV PATH=$PATH:/x-tools/arm-unknown-linux-gnueabi/bin
USER root
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENV PATH=$PATH:/x-tools/arm-unknown-linux-gnueabihf/bin
USER root
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENV PATH=$PATH:/x-tools/armv7-unknown-linux-gnueabihf/bin
ENTRYPOINT ["/usr/bin/dumb-init", "--"]
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENV \
ENTRYPOINT ["/usr/bin/dumb-init", "--"]
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENV RUST_CONFIGURE_ARGS \
ENTRYPOINT ["/usr/bin/dumb-init", "--"]
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENV \
ENTRYPOINT ["/rustroot/dumb-init", "--"]
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENV HOSTS=i686-unknown-linux-gnu
pkg-config
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
pkg-config
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
pkg-config
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
pkg-config
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
USER root
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENV PATH=$PATH:/x-tools/powerpc-unknown-linux-gnu/bin
USER root
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENV PATH=$PATH:/x-tools/powerpc64-unknown-linux-gnu/bin
RUN ./build-powerpc64le-toolchain.sh
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENV \
USER root
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENV PATH=$PATH:/x-tools/s390x-ibm-linux-gnu/bin
ENTRYPOINT ["/usr/bin/dumb-init", "--"]
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENV \
ENTRYPOINT ["/rustroot/dumb-init", "--"]
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENV HOSTS=x86_64-unknown-linux-gnu
ENTRYPOINT ["/usr/bin/dumb-init", "--"]
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENV RUST_CONFIGURE_ARGS \
USER root
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENV PATH=$PATH:/x-tools/x86_64-unknown-netbsd/bin
lib32stdc++6
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
xz-utils
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
xz-utils
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
source "$ci_dir/shared.sh"
-retry docker \
- build \
- --rm \
- -t rust-ci \
- "`dirname "$script"`/$image"
+if [ -f "$docker_dir/$image/Dockerfile" ]; then
+ retry docker \
+ build \
+ --rm \
+ -t rust-ci \
+ "$docker_dir/$image"
+elif [ -f "$docker_dir/disabled/$image/Dockerfile" ]; then
+ if [ -n "$TRAVIS_OS_NAME" ]; then
+ echo Cannot run disabled images on travis!
+ exit 1
+ fi
+ retry docker \
+ build \
+ --rm \
+ -t rust-ci \
+ -f "$docker_dir/disabled/$image/Dockerfile" \
+ "$docker_dir"
+else
+ echo Invalid image: $image
+ exit 1
+fi
objdir=$root_dir/obj
pkg-config
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
xz-utils
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
pkg-config
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
xz-utils
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
xz-utils
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
xz-utils
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
xz-utils
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
xz-utils
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
-Subproject commit ad7de198561b3a12217ea2da76d796d9c7fc0ed3
+Subproject commit 97422981c53a00f7c3d6584d363443117f179fff
-Subproject commit 6b0de90d87dda15e323ef24cdf7ed873ac5cf4d3
+Subproject commit f7a108dfa9e90b07821700c55d01f08a9adf005c
- [peek](library-features/peek.md)
- [placement_in](library-features/placement-in.md)
- [placement_new_protocol](library-features/placement-new-protocol.md)
- - [print](library-features/print.md)
+ - [print_internals](library-features/print-internals.md)
- [proc_macro_internals](library-features/proc-macro-internals.md)
- [process_try_wait](library-features/process-try-wait.md)
- [question_mark_carrier](library-features/question-mark-carrier.md)
- [unique](library-features/unique.md)
- [unsize](library-features/unsize.md)
- [utf8_error_error_len](library-features/utf8-error-error-len.md)
+ - [vec_resize_default](library-features/vec-resize-default.md)
- [vec_remove_item](library-features/vec-remove-item.md)
- [windows_c](library-features/windows-c.md)
- [windows_handle](library-features/windows-handle.md)
[#23121]: https://github.com/rust-lang/rust/issues/23121
-See also [`slice_patterns`](slice-patterns.html).
+See also [`slice_patterns`](language-features/slice-patterns.html).
------------------------
[llvm-docs]: http://llvm.org/docs/LangRef.html#inline-assembler-expressions
If you need more power and don't mind losing some of the niceties of
-`asm!`, check out [global_asm](global_asm.html).
+`asm!`, check out [global_asm](language-features/global_asm.html).
[#29641]: https://github.com/rust-lang/rust/issues/29641
-See also [`box_syntax`](box-syntax.html)
+See also [`box_syntax`](language-features/box-syntax.html)
------------------------
[#27779]: https://github.com/rust-lang/rust/issues/27779
-See also [`box_patterns`](box-patterns.html)
+See also [`box_patterns`](language-features/box-patterns.html)
------------------------
If you don't need quite as much power and flexibility as
`global_asm!` provides, and you don't mind restricting your inline
-assembly to `fn` bodies only, you might try the [asm](asm.html)
-feature instead.
+assembly to `fn` bodies only, you might try the
+[asm](language-features/asm.html) feature instead.
[#37339]: https://github.com/rust-lang/rust/issues/37339
+Documentation to be appended to section G of the book.
+
------------------------
+### Loops as expressions
+
+Like most things in Rust, loops are expressions, and have a value; normally `()` unless the loop
+never exits.
+A `loop` can instead evaluate to a useful value via *break with value*:
+
+```rust
+#![feature(loop_break_value)]
+
+// Find the first square number over 1000:
+let mut n = 1;
+let square = loop {
+ if n * n > 1000 {
+ break n * n;
+ }
+ n += 1;
+};
+```
+
+The evaluation type may be specified externally:
+
+```rust
+#![feature(loop_break_value)]
+
+// Declare that value returned is unsigned 64-bit:
+let n: u64 = loop {
+ break 1;
+};
+```
+
+It is an error if types do not agree, either between a "break" value and an external requirement,
+or between multiple "break" values:
+
+```no_compile
+#![feature(loop_break_value)]
+
+loop {
+ if true {
+ break 1u32;
+ } else {
+ break 0u8; // error: types do not agree
+ }
+};
+
+let n: i32 = loop {
+ break 0u32; // error: type does not agree with external requirement
+};
+```
+
+#### Break: label, value
+
+Four forms of `break` are available, where EXPR is some expression which evaluates to a value:
+
+1. `break;`
+2. `break 'label;`
+3. `break EXPR;`
+4. `break 'label EXPR;`
+
+When no value is given, the value `()` is assumed, thus `break;` is equivalent to `break ();`.
+
+Using a label allows returning a value from an inner loop:
+```rust
+#![feature(loop_break_value)]
+let result = 'outer: loop {
+ for n in 1..10 {
+ if n > 4 {
+ break 'outer n;
+ }
+ }
+};
+```
[`plugin`] and `rustc_private` features as well. For more details, see
their docs.
-[`plugin`]: plugin.html
+[`plugin`]: language-features/plugin.html
------------------------
This feature is part of "compiler plugins." It will often be used with the
[`plugin_registrar`] and `rustc_private` features.
-[`plugin_registrar`]: plugin-registrar.html
+[`plugin_registrar`]: language-features/plugin-registrar.html
------------------------
------------------------
+This feature flag guards the new procedural macro features as laid out by [RFC 1566], which alongside the now-stable
+[custom derives], provide stabilizable alternatives to the compiler plugin API (which requires the use of
+perma-unstable internal APIs) for programmatically modifying Rust code at compile-time.
+The two new procedural macro kinds are:
+
+* Function-like procedural macros which are invoked like regular declarative macros, and:
+* Attribute-like procedural macros which can be applied to any item which built-in attributes can
+be applied to, and which can take arguments in their invocation as well.
+
+Additionally, this feature flag implicitly enables the [`use_extern_macros`](language-features/use-extern-macros.html) feature,
+which allows macros to be imported like any other item with `use` statements, as compared to
+applying `#[macro_use]` to an `extern crate` declaration. It is important to note that procedural macros may
+**only** be imported in this manner, and will throw an error otherwise.
+
+You **must** declare the `proc_macro` feature in both the crate declaring these new procedural macro kinds as well as
+in any crates that use them.
+
+### Common Concepts
+
+As with custom derives, procedural macros may only be declared in crates of the `proc-macro` type, and must be public
+functions. No other public items may be declared in `proc-macro` crates, but private items are fine.
+
+To declare your crate as a `proc-macro` crate, simply add:
+
+```toml
+[lib]
+proc-macro = true
+```
+
+to your `Cargo.toml`.
+
+Unlike custom derives, however, the name of the function implementing the procedural macro is used directly as the
+procedural macro's name, so choose carefully.
+
+Additionally, both new kinds of procedural macros return a `TokenStream` which *wholly* replaces the original
+invocation and its input.
+
+#### Importing
+
+As referenced above, the new procedural macros are not meant to be imported via `#[macro_use]` and will throw an
+error if they are. Instead, they are meant to be imported like any other item in Rust, with `use` statements:
+
+```rust,ignore
+#![feature(proc_macro)]
+
+// Where `my_proc_macros` is some crate of type `proc_macro`
+extern crate my_proc_macros;
+
+// And declares a `#[proc_macro] pub fn my_bang_macro()` at its root.
+use my_proc_macros::my_bang_macro;
+
+fn main() {
+ println!("{}", my_bang_macro!());
+}
+```
+
+#### Error Reporting
+
+Any panics in a procedural macro implementation will be caught by the compiler and turned into an error message pointing
+to the problematic invocation. Thus, it is important to make your panic messages as informative as possible: use
+`Option::expect` instead of `Option::unwrap` and `Result::expect` instead of `Result::unwrap`, and inform the user of
+the error condition as unambiguously as you can.
+
+#### `TokenStream`
+
+The `proc_macro::TokenStream` type is hardcoded into the signatures of procedural macro functions for both input and
+output. It is a wrapper around the compiler's internal representation for a given chunk of Rust code.
+
+### Function-like Procedural Macros
+
+These are procedural macros that are invoked like regular declarative macros. They are declared as public functions in
+crates of the `proc_macro` type and using the `#[proc_macro]` attribute. The name of the declared function becomes the
+name of the macro as it is to be imported and used. The function must be of the kind `fn(TokenStream) -> TokenStream`
+where the sole argument is the input to the macro and the return type is the macro's output.
+
+This kind of macro can expand to anything that is valid for the context it is invoked in, including expressions and
+statements, as well as items.
+
+**Note**: invocations of this kind of macro require a wrapping `[]`, `{}` or `()` like regular macros, but these do not
+appear in the input, only the tokens between them. The tokens between the braces do not need to be valid Rust syntax.
+
+<span class="filename">my_macro_crate/src/lib.rs</span>
+
+```rust,ignore
+#![feature(proc_macro)]
+
+// This is always necessary to get the `TokenStream` typedef.
+extern crate proc_macro;
+
+use proc_macro::TokenStream;
+
+#[proc_macro]
+pub fn say_hello(_input: TokenStream) -> TokenStream {
+ // This macro will accept any input because it ignores it.
+ // To enforce correctness in macros which don't take input,
+ // you may want to add `assert!(_input.to_string().is_empty());`.
+ "println!(\"Hello, world!\")".parse().unwrap()
+}
+```
+
+<span class="filename">my_macro_user/Cargo.toml</span>
+
+```toml
+[dependencies]
+my_macro_crate = { path = "<relative path to my_macro_crate>" }
+```
+
+<span class="filename">my_macro_user/src/lib.rs</span>
+
+```rust,ignore
+#![feature(proc_macro)]
+
+extern crate my_macro_crate;
+
+use my_macro_crate::say_hello;
+
+fn main() {
+ say_hello!();
+}
+```
+
+As expected, this prints `Hello, world!`.
+
+### Attribute-like Procedural Macros
+
+These are arguably the most powerful flavor of procedural macro as they can be applied anywhere attributes are allowed.
+
+They are declared as public functions in crates of the `proc-macro` type, using the `#[proc_macro_attribute]` attribute.
+The name of the function becomes the name of the attribute as it is to be imported and used. The function must be of the
+kind `fn(TokenStream, TokenStream) -> TokenStream` where:
+
+The first argument represents any metadata for the attribute (see [the reference chapter on attributes][refr-attr]).
+Only the metadata itself will appear in this argument, for example:
+
+ * `#[my_macro]` will get an empty string.
+ * `#[my_macro = "string"]` will get `= "string"`.
+ * `#[my_macro(ident)]` will get `(ident)`.
+ * etc.
+
+The second argument is the item that the attribute is applied to. It can be a function, a type definition,
+an impl block, an `extern` block, or a module—attribute invocations can take the inner form (`#![my_attr]`)
+or outer form (`#[my_attr]`).
+
+The return type is the output of the macro which *wholly* replaces the item it was applied to. Thus, if your intention
+is to merely modify an item, it *must* be copied to the output. The output must be an item; expressions, statements
+and bare blocks are not allowed.
+
+There is no restriction on how many items an attribute-like procedural macro can emit as long as they are valid in
+the given context.
+
+<span class="filename">my_macro_crate/src/lib.rs</span>
+
+```rust,ignore
+#![feature(proc_macro)]
+
+extern crate proc_macro;
+
+use proc_macro::TokenStream;
+
+/// Adds a `/// ### Panics` docstring to the end of the input's documentation
+///
+/// Does not assert that its receiver is a function or method.
+#[proc_macro_attribute]
+pub fn panics_note(args: TokenStream, input: TokenStream) -> TokenStream {
+ let args = args.to_string();
+ let mut input = input.to_string();
+
+ assert!(args.starts_with("= \""), "`#[panics_note]` requires an argument of the form \
+ `#[panics_note = \"panic note here\"]`");
+
+ // Get just the bare note string
+ let panics_note = args.trim_matches(&['=', ' ', '"'][..]);
+
+ // The input will include all docstrings regardless of where the attribute is placed,
+ // so we need to find the last index before the start of the item
+ let insert_idx = idx_after_last_docstring(&input);
+
+ // And insert our `### Panics` note there so it always appears at the end of an item's docs
+ input.insert_str(insert_idx, &format!("/// # Panics \n/// {}\n", panics_note));
+
+ input.parse().unwrap()
+}
+
+// `proc-macro` crates can contain any kind of private item still
+fn idx_after_last_docstring(input: &str) -> usize {
+ // Skip docstring lines to find the start of the item proper
+ input.lines().skip_while(|line| line.trim_left().starts_with("///")).next()
+ // Find the index of the first non-docstring line in the input
+ // Note: assumes this exact line is unique in the input
+ .and_then(|line_after| input.find(line_after))
+ // No docstrings in the input
+ .unwrap_or(0)
+}
+```
+
+<span class="filename">my_macro_user/Cargo.toml</span>
+
+```toml
+[dependencies]
+my_macro_crate = { path = "<relative path to my_macro_crate>" }
+```
+
+<span class="filename">my_macro_user/src/lib.rs</span>
+
+```rust,ignore
+#![feature(proc_macro)]
+
+extern crate my_macro_crate;
+
+use my_macro_crate::panics_note;
+
+/// Do the `foo` thing.
+#[panics_note = "Always."]
+pub fn foo() {
+ panic!()
+}
+```
+
+Then the rendered documentation for `pub fn foo` will look like this:
+
+> `pub fn foo()`
+>
+> ----
+> Do the `foo` thing.
+> # Panics
+> Always.
+
+[RFC 1566]: https://github.com/rust-lang/rfcs/blob/master/text/1566-proc-macros.md
+[custom derives]: https://doc.rust-lang.org/book/procedural-macros.html
+[rust-lang/rust#41430]: https://github.com/rust-lang/rust/issues/41430
+[refr-attr]: https://doc.rust-lang.org/reference/attributes.html
[#23121]: https://github.com/rust-lang/rust/issues/23121
-See also [`advanced_slice_patterns`](advanced-slice-patterns.html).
+See also
+[`advanced_slice_patterns`](language-features/advanced-slice-patterns.html).
------------------------
[#33082]: https://github.com/rust-lang/rust/issues/33082
-See also [`alloc_system`](alloc-system.html).
+See also [`alloc_system`](library-features/alloc-system.html).
------------------------
[#33082]: https://github.com/rust-lang/rust/issues/33082
-See also [`alloc_jemalloc`](alloc-jemalloc.html).
+See also [`alloc_jemalloc`](library-features/alloc-jemalloc.html).
------------------------
--- /dev/null
+# `print_internals`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+++ /dev/null
-# `print`
-
-This feature is internal to the Rust compiler and is not intended for general use.
-
-------------------------
--- /dev/null
+# `vec_resize_default`
+
+The tracking issue for this feature is: [#41758]
+
+[#41758]: https://github.com/rust-lang/rust/issues/41758
+
+------------------------
-Subproject commit 11bfb0dcf85f7aa92abd30524bb1e42e18d108c6
+Subproject commit 3288e0659c08fb5006f6d6dd4b5675ed0c2c432a
.env("AR", &ar)
.env("RANLIB", format!("{} s", ar.display()));
- if target.contains("windows") {
- // A bit of history here, this used to be --enable-lazy-lock added in
- // #14006 which was filed with jemalloc in jemalloc/jemalloc#83 which
- // was also reported to MinGW:
- //
- // http://sourceforge.net/p/mingw-w64/bugs/395/
- //
- // When updating jemalloc to 4.0, however, it was found that binaries
- // would exit with the status code STATUS_RESOURCE_NOT_OWNED indicating
- // that a thread was unlocking a mutex it never locked. Disabling this
- // "lazy lock" option seems to fix the issue, but it was enabled by
- // default for MinGW targets in 13473c7 for jemalloc.
- //
- // As a result of all that, force disabling lazy lock on Windows, and
- // after reading some code it at least *appears* that the initialization
- // of mutexes is otherwise ok in jemalloc, so shouldn't cause problems
- // hopefully...
- //
- // tl;dr: make windows behave like other platforms by disabling lazy
- // locking, but requires passing an option due to a historical
- // default with jemalloc.
- cmd.arg("--disable-lazy-lock");
- } else if target.contains("ios") {
+ if target.contains("ios") {
cmd.arg("--disable-tls");
} else if target.contains("android") {
// We force android to have prefixed symbols because apparently
//! objects of a single type.
#![crate_name = "arena"]
-#![unstable(feature = "rustc_private", issue = "27812")]
+#![cfg_attr(stage0, unstable(feature = "rustc_private", issue = "27812"))]
#![crate_type = "rlib"]
#![crate_type = "dylib"]
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
#![feature(core_intrinsics)]
#![feature(dropck_eyepatch)]
#![feature(generic_param_attrs)]
-#![feature(staged_api)]
+#![cfg_attr(stage0, feature(staged_api))]
#![cfg_attr(test, feature(test))]
#![allow(deprecated)]
#[unstable(feature = "fused", issue = "35602")]
impl<'a> FusedIterator for EncodeUtf16<'a> {}
-// Return the initial codepoint accumulator for the first byte.
-// The first byte is special, only want bottom 5 bits for width 2, 4 bits
-// for width 3, and 3 bits for width 4
-macro_rules! utf8_first_byte {
- ($byte:expr, $width:expr) => (($byte & (0x7F >> $width)) as u32)
-}
-
-// return the value of $ch updated with continuation byte $byte
-macro_rules! utf8_acc_cont_byte {
- ($ch:expr, $byte:expr) => (($ch << 6) | ($byte & 63) as u32)
-}
-
#[stable(feature = "rust1", since = "1.0.0")]
impl Borrow<str> for String {
#[inline]
}
impl<T: Clone> Vec<T> {
- /// Resizes the `Vec` in-place so that `len()` is equal to `new_len`.
+ /// Resizes the `Vec` in-place so that `len` is equal to `new_len`.
///
- /// If `new_len` is greater than `len()`, the `Vec` is extended by the
+ /// If `new_len` is greater than `len`, the `Vec` is extended by the
/// difference, with each additional slot filled with `value`.
- /// If `new_len` is less than `len()`, the `Vec` is simply truncated.
+ /// If `new_len` is less than `len`, the `Vec` is simply truncated.
+ ///
+ /// This method requires `Clone` to clone the passed value. If you'd
+ /// rather create a value with `Default` instead, see [`resize_default`].
///
/// # Examples
///
/// vec.resize(2, 0);
/// assert_eq!(vec, [1, 2]);
/// ```
+ ///
+ /// [`resize_default`]: #method.resize_default
#[stable(feature = "vec_resize", since = "1.5.0")]
pub fn resize(&mut self, new_len: usize, value: T) {
let len = self.len();
if new_len > len {
- self.extend_with_element(new_len - len, value);
+ self.extend_with(new_len - len, ExtendElement(value))
+ } else {
+ self.truncate(new_len);
+ }
+ }
+
+ /// Clones and appends all elements in a slice to the `Vec`.
+ ///
+ /// Iterates over the slice `other`, clones each element, and then appends
+ /// it to this `Vec`. The `other` vector is traversed in-order.
+ ///
+ /// Note that this function is same as `extend` except that it is
+ /// specialized to work with slices instead. If and when Rust gets
+ /// specialization this function will likely be deprecated (but still
+ /// available).
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// let mut vec = vec![1];
+ /// vec.extend_from_slice(&[2, 3, 4]);
+ /// assert_eq!(vec, [1, 2, 3, 4]);
+ /// ```
+ #[stable(feature = "vec_extend_from_slice", since = "1.6.0")]
+ pub fn extend_from_slice(&mut self, other: &[T]) {
+ self.spec_extend(other.iter())
+ }
+}
+
+impl<T: Default> Vec<T> {
+ /// Resizes the `Vec` in-place so that `len` is equal to `new_len`.
+ ///
+ /// If `new_len` is greater than `len`, the `Vec` is extended by the
+ /// difference, with each additional slot filled with `Default::default()`.
+ /// If `new_len` is less than `len`, the `Vec` is simply truncated.
+ ///
+ /// This method uses `Default` to create new values on every push. If
+ /// you'd rather `Clone` a given value, use [`resize`].
+ ///
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(vec_resize_default)]
+ ///
+ /// let mut vec = vec![1, 2, 3];
+ /// vec.resize_default(5);
+ /// assert_eq!(vec, [1, 2, 3, 0, 0]);
+ ///
+ /// let mut vec = vec![1, 2, 3, 4];
+ /// vec.resize_default(2);
+ /// assert_eq!(vec, [1, 2]);
+ /// ```
+ ///
+ /// [`resize`]: #method.resize
+ #[unstable(feature = "vec_resize_default", issue = "41758")]
+ pub fn resize_default(&mut self, new_len: usize) {
+ let len = self.len();
+
+ if new_len > len {
+ self.extend_with(new_len - len, ExtendDefault);
} else {
self.truncate(new_len);
}
}
+}
- /// Extend the vector by `n` additional clones of `value`.
- fn extend_with_element(&mut self, n: usize, value: T) {
+// This code generalises `extend_with_{element,default}`.
+trait ExtendWith<T> {
+ fn next(&self) -> T;
+ fn last(self) -> T;
+}
+
+struct ExtendElement<T>(T);
+impl<T: Clone> ExtendWith<T> for ExtendElement<T> {
+ fn next(&self) -> T { self.0.clone() }
+ fn last(self) -> T { self.0 }
+}
+
+struct ExtendDefault;
+impl<T: Default> ExtendWith<T> for ExtendDefault {
+ fn next(&self) -> T { Default::default() }
+ fn last(self) -> T { Default::default() }
+}
+impl<T> Vec<T> {
+ /// Extend the vector by `n` values, using the given generator.
+ fn extend_with<E: ExtendWith<T>>(&mut self, n: usize, value: E) {
self.reserve(n);
unsafe {
// Write all elements except the last one
for _ in 1..n {
- ptr::write(ptr, value.clone());
+ ptr::write(ptr, value.next());
ptr = ptr.offset(1);
- // Increment the length in every step in case clone() panics
+ // Increment the length in every step in case next() panics
local_len.increment_len(1);
}
if n > 0 {
// We can write the last element directly without cloning needlessly
- ptr::write(ptr, value);
+ ptr::write(ptr, value.last());
local_len.increment_len(1);
}
// len set by scope guard
}
}
-
- /// Clones and appends all elements in a slice to the `Vec`.
- ///
- /// Iterates over the slice `other`, clones each element, and then appends
- /// it to this `Vec`. The `other` vector is traversed in-order.
- ///
- /// Note that this function is same as `extend` except that it is
- /// specialized to work with slices instead. If and when Rust gets
- /// specialization this function will likely be deprecated (but still
- /// available).
- ///
- /// # Examples
- ///
- /// ```
- /// let mut vec = vec![1];
- /// vec.extend_from_slice(&[2, 3, 4]);
- /// assert_eq!(vec, [1, 2, 3, 4]);
- /// ```
- #[stable(feature = "vec_extend_from_slice", since = "1.6.0")]
- pub fn extend_from_slice(&mut self, other: &[T]) {
- self.spec_extend(other.iter())
- }
}
// Set the length of the vec when the `SetLenOnDrop` value goes out of scope.
impl<T: Clone> SpecFromElem for T {
default fn from_elem(elem: Self, n: usize) -> Vec<Self> {
let mut v = Vec::with_capacity(n);
- v.extend_with_element(n, elem);
+ v.extend_with(n, ExtendElement(elem));
v
}
}
}
}
let mut v = Vec::with_capacity(n);
- v.extend_with_element(n, elem);
+ v.extend_with(n, ExtendElement(elem));
v
}
}
{
unsafe {
let mut buf: [u8; 1024] = mem::uninitialized(); // enough for f32 and f64
- let mut parts: [flt2dec::Part; 5] = mem::uninitialized();
+ let mut parts: [flt2dec::Part; 4] = mem::uninitialized();
let formatted = flt2dec::to_exact_fixed_str(flt2dec::strategy::grisu::format_exact,
*num, sign, precision,
false, &mut buf, &mut parts);
unsafe {
// enough for f32 and f64
let mut buf: [u8; flt2dec::MAX_SIG_DIGITS] = mem::uninitialized();
- let mut parts: [flt2dec::Part; 5] = mem::uninitialized();
+ let mut parts: [flt2dec::Part; 4] = mem::uninitialized();
let formatted = flt2dec::to_shortest_str(flt2dec::strategy::grisu::format_shortest,
*num, sign, 0, false, &mut buf, &mut parts);
fmt.pad_formatted_parts(&formatted)
{
unsafe {
let mut buf: [u8; 1024] = mem::uninitialized(); // enough for f32 and f64
- let mut parts: [flt2dec::Part; 7] = mem::uninitialized();
+ let mut parts: [flt2dec::Part; 6] = mem::uninitialized();
let formatted = flt2dec::to_exact_exp_str(flt2dec::strategy::grisu::format_exact,
*num, sign, precision,
upper, &mut buf, &mut parts);
unsafe {
// enough for f32 and f64
let mut buf: [u8; flt2dec::MAX_SIG_DIGITS] = mem::uninitialized();
- let mut parts: [flt2dec::Part; 7] = mem::uninitialized();
+ let mut parts: [flt2dec::Part; 6] = mem::uninitialized();
let formatted = flt2dec::to_shortest_exp_str(flt2dec::strategy::grisu::format_shortest,
*num, sign, (0, 0), upper,
&mut buf, &mut parts);
/// // exactly wouldn't be possible without executing filter().
/// assert_eq!((0, Some(10)), iter.size_hint());
///
- /// // Let's add one five more numbers with chain()
+ /// // Let's add five more numbers with chain()
/// let iter = (0..10).filter(|x| x % 2 == 0).chain(15..20);
///
/// // now both bounds are increased by five
#[macro_use]
mod internal_macros;
-#[path = "num/float_macros.rs"]
-#[macro_use]
-mod float_macros;
-
#[path = "num/int_macros.rs"]
#[macro_use]
mod int_macros;
+++ /dev/null
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-#![doc(hidden)]
-
-macro_rules! assert_approx_eq {
- ($a:expr, $b:expr) => ({
- use num::Float;
- let (a, b) = (&$a, &$b);
- assert!((*a - *b).abs() < 1.0e-6,
- "{} is not approximately equal to {}", *a, *b);
- })
-}
/// it will only print given digits and nothing else.
///
/// The byte buffer should be at least `MAX_SIG_DIGITS` bytes long.
-/// There should be at least 5 parts available, due to the worst case like
-/// `[+][0.][0000][45][0000]` with `frac_digits = 10`.
+/// There should be at least 4 parts available, due to the worst case like
+/// `[+][0.][0000][2][0000]` with `frac_digits = 10`.
pub fn to_shortest_str<'a, T, F>(mut format_shortest: F, v: T,
sign: Sign, frac_digits: usize, _upper: bool,
buf: &'a mut [u8], parts: &'a mut [Part<'a>]) -> Formatted<'a>
/// cannot be in this range, avoiding any confusion.
///
/// The byte buffer should be at least `MAX_SIG_DIGITS` bytes long.
-/// There should be at least 7 parts available, due to the worst case like
-/// `[+][1][.][2345][e][-][67]`.
+/// There should be at least 6 parts available, due to the worst case like
+/// `[+][1][.][2345][e][-][6]`.
pub fn to_shortest_exp_str<'a, T, F>(mut format_shortest: F, v: T,
sign: Sign, dec_bounds: (i16, i16), upper: bool,
buf: &'a mut [u8], parts: &'a mut [Part<'a>]) -> Formatted<'a>
/// The byte buffer should be at least `ndigits` bytes long unless `ndigits` is
/// so large that only the fixed number of digits will be ever written.
/// (The tipping point for `f64` is about 800, so 1000 bytes should be enough.)
-/// There should be at least 7 parts available, due to the worst case like
-/// `[+][1][.][2345][e][-][67]`.
+/// There should be at least 6 parts available, due to the worst case like
+/// `[+][1][.][2345][e][-][6]`.
pub fn to_exact_exp_str<'a, T, F>(mut format_exact: F, v: T,
sign: Sign, ndigits: usize, upper: bool,
buf: &'a mut [u8], parts: &'a mut [Part<'a>]) -> Formatted<'a>
/// The byte buffer should be enough for the output unless `frac_digits` is
/// so large that only the fixed number of digits will be ever written.
/// (The tipping point for `f64` is about 800, and 1000 bytes should be enough.)
-/// There should be at least 5 parts available, due to the worst case like
-/// `[+][0.][0000][45][0000]` with `frac_digits = 10`.
+/// There should be at least 4 parts available, due to the worst case like
+/// `[+][0.][0000][2][0000]` with `frac_digits = 10`.
pub fn to_exact_fixed_str<'a, T, F>(mut format_exact: F, v: T,
sign: Sign, frac_digits: usize, _upper: bool,
buf: &'a mut [u8], parts: &'a mut [Part<'a>]) -> Formatted<'a>
pub mod bignum;
pub mod diy_float;
-macro_rules! checked_op {
- ($U:ty, $op:path, $x:expr, $y:expr) => {{
- let (result, overflowed) = unsafe { $op($x as $U, $y as $U) };
- if overflowed { None } else { Some(result as Self) }
- }}
-}
-
// `Int` + `SignedInt` implemented for signed integers
macro_rules! int_impl {
($SelfT:ty, $ActualT:ident, $UnsignedT:ty, $BITS:expr,
use ops::*;
+#[allow(unused_macros)]
macro_rules! sh_impl_signed {
($t:ident, $f:ident) => (
#[stable(feature = "rust1", since = "1.0.0")]
/// }
/// ```
///
+/// Here is an example of the same `Point` struct implementing the `Add` trait
+/// using generics.
+///
+/// ```
+/// use std::ops::Add;
+///
+/// #[derive(Debug)]
+/// struct Point<T> {
+/// x: T,
+/// y: T,
+/// }
+///
+/// // Notice that the implementation uses the `Output` associated type
+/// impl<T: Add<Output=T>> Add for Point<T> {
+/// type Output = Point<T>;
+///
+/// fn add(self, other: Point<T>) -> Point<T> {
+/// Point {
+/// x: self.x + other.x,
+/// y: self.y + other.y,
+/// }
+/// }
+/// }
+///
+/// impl<T: PartialEq> PartialEq for Point<T> {
+/// fn eq(&self, other: &Self) -> bool {
+/// self.x == other.x && self.y == other.y
+/// }
+/// }
+///
+/// fn main() {
+/// assert_eq!(Point { x: 1, y: 0 } + Point { x: 2, y: 3 },
+/// Point { x: 3, y: 3 });
+/// }
+/// ```
+///
/// Note that `RHS = Self` by default, but this is not mandatory. For example,
/// [std::time::SystemTime] implements `Add<Duration>`, which permits
/// operations of the form `SystemTime = SystemTime + Duration`.
($($t:ty)*) => { neg_impl_core!{ x => -x, $($t)*} }
}
+#[allow(unused_macros)]
macro_rules! neg_impl_unsigned {
($($t:ty)*) => {
neg_impl_core!{ x => {
/// invalid pointers, types, and double drops.
#[stable(feature = "drop_in_place", since = "1.8.0")]
#[lang="drop_in_place"]
-#[inline]
#[allow(unconditional_recursion)]
pub unsafe fn drop_in_place<T: ?Sized>(to_drop: *mut T) {
// Code here does not matter - this is replaced by the
//! [mz]: https://code.google.com/p/miniz/
#![crate_name = "flate"]
-#![unstable(feature = "rustc_private", issue = "27812")]
+#![cfg_attr(stage0, unstable(feature = "rustc_private", issue = "27812"))]
#![crate_type = "rlib"]
#![crate_type = "dylib"]
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
#![deny(warnings)]
#![feature(libc)]
-#![feature(staged_api)]
+#![cfg_attr(stage0, feature(staged_api))]
#![feature(unique)]
#![cfg_attr(test, feature(rand))]
//! generated instead.
#![crate_name = "fmt_macros"]
-#![unstable(feature = "rustc_private", issue = "27812")]
+#![cfg_attr(stage0, unstable(feature = "rustc_private", issue = "27812"))]
#![crate_type = "rlib"]
#![crate_type = "dylib"]
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
test(attr(deny(warnings))))]
#![deny(warnings)]
-#![feature(staged_api)]
+#![cfg_attr(stage0, feature(staged_api))]
#![feature(unicode)]
pub use self::Piece::*;
//! ```
#![crate_name = "getopts"]
-#![unstable(feature = "rustc_private",
+#![cfg_attr(stage0, unstable(feature = "rustc_private",
reason = "use the crates.io `getopts` library instead",
- issue = "27812")]
+ issue = "27812"))]
#![crate_type = "rlib"]
#![crate_type = "dylib"]
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
#![deny(missing_docs)]
#![deny(warnings)]
-#![feature(staged_api)]
+#![cfg_attr(stage0, feature(staged_api))]
use self::Name::*;
use self::HasArg::*;
//! * [DOT language](http://www.graphviz.org/doc/info/lang.html)
#![crate_name = "graphviz"]
-#![unstable(feature = "rustc_private", issue = "27812")]
-#![feature(staged_api)]
+#![cfg_attr(stage0, unstable(feature = "rustc_private", issue = "27812"))]
+#![cfg_attr(stage0, feature(staged_api))]
#![crate_type = "rlib"]
#![crate_type = "dylib"]
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
//! }
//! ```
#![crate_name = "proc_macro_plugin"]
-#![unstable(feature = "rustc_private", issue = "27812")]
+#![cfg_attr(stage0, unstable(feature = "rustc_private", issue = "27812"))]
#![feature(plugin_registrar)]
#![crate_type = "dylib"]
#![crate_type = "rlib"]
html_root_url = "https://doc.rust-lang.org/nightly/")]
#![deny(warnings)]
-#![feature(staged_api)]
+#![cfg_attr(stage0, feature(staged_api))]
#![feature(rustc_diagnostic_macros)]
-#![feature(rustc_private)]
+#![cfg_attr(stage0, feature(rustc_private))]
extern crate rustc_plugin;
extern crate syntax;
}
}
+impl Quote for usize {
+ fn quote(&self) -> TokenStream {
+ let integer_symbol = Symbol::intern(&self.to_string());
+ TokenTree::Token(DUMMY_SP, Token::Literal(token::Lit::Integer(integer_symbol), None))
+ .into()
+ }
+}
+
impl Quote for Ident {
fn quote(&self) -> TokenStream {
// FIXME(jseyfried) quote hygiene
impl Quote for Lit {
fn quote(&self) -> TokenStream {
macro_rules! gen_match {
- ($($i:ident),*) => {
+ ($($i:ident),*; $($raw:ident),*) => {
match *self {
$( Lit::$i(lit) => quote!(::syntax::parse::token::Lit::$i((quote lit))), )*
- _ => panic!("Unsupported literal"),
+ $( Lit::$raw(lit, n) => {
+ quote!(::syntax::parse::token::Lit::$raw((quote lit), (quote n)))
+ })*
}
}
}
- gen_match!(Byte, Char, Float, Str_, Integer, ByteStr)
+ gen_match!(Byte, Char, Float, Str_, Integer, ByteStr; StrRaw, ByteStrRaw)
}
}
fmt_macros = { path = "../libfmt_macros" }
graphviz = { path = "../libgraphviz" }
log = "0.3"
+owning_ref = "0.3.3"
rustc_back = { path = "../librustc_back" }
rustc_bitflags = { path = "../librustc_bitflags" }
rustc_const_math = { path = "../librustc_const_math" }
rustc_data_structures = { path = "../librustc_data_structures" }
rustc_errors = { path = "../librustc_errors" }
-rustc_llvm = { path = "../librustc_llvm" }
serialize = { path = "../libserialize" }
syntax = { path = "../libsyntax" }
syntax_pos = { path = "../libsyntax_pos" }
use rustc_data_structures::graph;
use cfg::*;
+use middle::region::CodeExtent;
use ty::{self, TyCtxt};
use syntax::ast;
use syntax::ptr::P;
scope_id: ast::NodeId,
to_index: CFGIndex) {
let mut data = CFGEdgeData { exiting_scopes: vec![] };
- let mut scope = self.tcx.node_extent(from_expr.id);
- let target_scope = self.tcx.node_extent(scope_id);
+ let mut scope = CodeExtent::Misc(from_expr.id);
+ let target_scope = CodeExtent::Misc(scope_id);
let region_maps = self.tcx.region_maps(self.owner_def_id);
while scope != target_scope {
data.exiting_scopes.push(scope.node_id());
UsedTraitImports(D),
ConstEval(D),
SymbolName(D),
+ SpecializationGraph(D),
+ ObjectSafety(D),
// The set of impls for a given trait. Ultimately, it would be
// nice to get more fine-grained here (e.g., to include a
// than changes in the impl body.
TraitImpls(D),
+ AllLocalTraitImpls,
+
// Nodes representing caches. To properly handle a true cache, we
// don't use a DepTrackingMap, but rather we push a task node.
// Otherwise the write into the map would be incorrectly
DefSpan(D),
Stability(D),
Deprecation(D),
+ ItemBodyNestedBodies(D),
+ ConstIsRvaluePromotableToStatic(D),
+ ImplParent(D),
+ TraitOfItem(D),
+ IsExportedSymbol(D),
+ IsMirAvailable(D),
+ ItemAttrs(D),
+ FnArgNames(D),
FileMap(D, Arc<String>),
}
UsedTraitImports(ref d) => op(d).map(UsedTraitImports),
ConstEval(ref d) => op(d).map(ConstEval),
SymbolName(ref d) => op(d).map(SymbolName),
+ SpecializationGraph(ref d) => op(d).map(SpecializationGraph),
+ ObjectSafety(ref d) => op(d).map(ObjectSafety),
TraitImpls(ref d) => op(d).map(TraitImpls),
+ AllLocalTraitImpls => Some(AllLocalTraitImpls),
TraitItems(ref d) => op(d).map(TraitItems),
ReprHints(ref d) => op(d).map(ReprHints),
TraitSelect { ref trait_def_id, ref input_def_id } => {
DefSpan(ref d) => op(d).map(DefSpan),
Stability(ref d) => op(d).map(Stability),
Deprecation(ref d) => op(d).map(Deprecation),
+ ItemAttrs(ref d) => op(d).map(ItemAttrs),
+ FnArgNames(ref d) => op(d).map(FnArgNames),
+ ImplParent(ref d) => op(d).map(ImplParent),
+ TraitOfItem(ref d) => op(d).map(TraitOfItem),
+ IsExportedSymbol(ref d) => op(d).map(IsExportedSymbol),
+ ItemBodyNestedBodies(ref d) => op(d).map(ItemBodyNestedBodies),
+ ConstIsRvaluePromotableToStatic(ref d) => op(d).map(ConstIsRvaluePromotableToStatic),
+ IsMirAvailable(ref d) => op(d).map(IsMirAvailable),
GlobalMetaData(ref d, kind) => op(d).map(|d| GlobalMetaData(d, kind)),
FileMap(ref d, ref file_name) => op(d).map(|d| FileMap(d, file_name.clone())),
}
[iss15872]: https://github.com/rust-lang/rust/issues/15872
"##,
+E0119: r##"
+There are conflicting trait implementations for the same type.
+Example of erroneous code:
+
+```compile_fail,E0119
+trait MyTrait {
+ fn get(&self) -> usize;
+}
+
+impl<T> MyTrait for T {
+ fn get(&self) -> usize { 0 }
+}
+
+struct Foo {
+ value: usize
+}
+
+impl MyTrait for Foo { // error: conflicting implementations of trait
+ // `MyTrait` for type `Foo`
+ fn get(&self) -> usize { self.value }
+}
+```
+
+When looking for the implementation for the trait, the compiler finds
+both the `impl<T> MyTrait for T` where T is all types and the `impl
+MyTrait for Foo`. Since a trait cannot be implemented multiple times,
+this is an error. So, when you write:
+
+```
+trait MyTrait {
+ fn get(&self) -> usize;
+}
+
+impl<T> MyTrait for T {
+ fn get(&self) -> usize { 0 }
+}
+```
+
+This makes the trait implemented on all types in the scope. So if you
+try to implement it on another one after that, the implementations will
+conflict. Example:
+
+```
+trait MyTrait {
+ fn get(&self) -> usize;
+}
+
+impl<T> MyTrait for T {
+ fn get(&self) -> usize { 0 }
+}
+
+struct Foo;
+
+fn main() {
+ let f = Foo;
+
+ f.get(); // the trait is implemented so we can use it
+}
+```
+"##,
+
E0133: r##"
Unsafe code was used outside of an unsafe function or block.
}
pub fn trait_impls(&self, trait_did: DefId) -> &'hir [NodeId] {
- self.dep_graph.read(DepNode::TraitImpls(trait_did));
+ self.dep_graph.read(DepNode::AllLocalTraitImpls);
// NB: intentionally bypass `self.forest.krate()` so that we
// do not trigger a read of the whole krate here
}
pub fn trait_default_impl(&self, trait_did: DefId) -> Option<NodeId> {
- self.dep_graph.read(DepNode::TraitImpls(trait_did));
+ self.dep_graph.read(DepNode::AllLocalTraitImpls);
// NB: intentionally bypass `self.forest.krate()` so that we
// do not trigger a read of the whole krate here
fingerprint
}
}
+
+impl<CTX> stable_hasher::HashStable<CTX> for Fingerprint {
+ fn hash_stable<W: stable_hasher::StableHasherResult>(&self,
+ _: &mut CTX,
+ hasher: &mut stable_hasher::StableHasher<W>) {
+ ::std::hash::Hash::hash(&self.0, hasher);
+ }
+}
use util::nodemap::NodeMap;
use std::hash as std_hash;
-use std::collections::{HashMap, HashSet};
+use std::collections::{HashMap, HashSet, BTreeMap};
use syntax::ast;
use syntax::attr;
hcx.tcx.hir.definitions().node_to_hir_id(*node_id).local_id
});
}
+
+
+pub fn hash_stable_btreemap<'a, 'tcx, K, V, SK, F, W>(hcx: &mut StableHashingContext<'a, 'tcx>,
+ hasher: &mut StableHasher<W>,
+ map: &BTreeMap<K, V>,
+ extract_stable_key: F)
+ where K: Eq + Ord,
+ V: HashStable<StableHashingContext<'a, 'tcx>>,
+ SK: HashStable<StableHashingContext<'a, 'tcx>> + Ord + Clone,
+ F: Fn(&mut StableHashingContext<'a, 'tcx>, &K) -> SK,
+ W: StableHasherResult,
+{
+ let mut keys: Vec<_> = map.keys()
+ .map(|k| (extract_stable_key(hcx, k), k))
+ .collect();
+ keys.sort_unstable_by_key(|&(ref stable_key, _)| stable_key.clone());
+ keys.len().hash_stable(hcx, hasher);
+ for (stable_key, key) in keys {
+ stable_key.hash_stable(hcx, hasher);
+ map[key].hash_stable(hcx, hasher);
+ }
+}
}
}
-impl<'a, 'tcx> HashStable<StableHashingContext<'a, 'tcx>> for ty::RegionKind<'tcx> {
+impl<'a, 'tcx> HashStable<StableHashingContext<'a, 'tcx>> for ty::RegionKind {
fn hash_stable<W: StableHasherResult>(&self,
hcx: &mut StableHashingContext<'a, 'tcx>,
hasher: &mut StableHasher<W>) {
db.depth.hash_stable(hcx, hasher);
i.hash_stable(hcx, hasher);
}
- ty::ReEarlyBound(ty::EarlyBoundRegion { index, name }) => {
+ ty::ReEarlyBound(ty::EarlyBoundRegion { def_id, index, name }) => {
+ def_id.hash_stable(hcx, hasher);
index.hash_stable(hcx, hasher);
name.hash_stable(hcx, hasher);
}
Free(call_site_scope_data, decl)
});
-impl_stable_hash_for!(struct ::middle::region::CallSiteScopeData {
- fn_id,
- body_id
-});
-
impl_stable_hash_for!(struct ty::DebruijnIndex {
depth
});
FnPtrAddrCast
});
-impl<'a, 'tcx> HashStable<StableHashingContext<'a, 'tcx>> for ::middle::region::CodeExtentData
+impl<'a, 'tcx> HashStable<StableHashingContext<'a, 'tcx>> for ::middle::region::CodeExtent
{
fn hash_stable<W: StableHasherResult>(&self,
hcx: &mut StableHashingContext<'a, 'tcx>,
hasher: &mut StableHasher<W>) {
- use middle::region::CodeExtentData;
+ use middle::region::CodeExtent;
mem::discriminant(self).hash_stable(hcx, hasher);
match *self {
- CodeExtentData::Misc(node_id) |
- CodeExtentData::DestructionScope(node_id) => {
+ CodeExtent::Misc(node_id) |
+ CodeExtent::DestructionScope(node_id) => {
node_id.hash_stable(hcx, hasher);
}
- CodeExtentData::CallSiteScope { fn_id, body_id } |
- CodeExtentData::ParameterScope { fn_id, body_id } => {
- fn_id.hash_stable(hcx, hasher);
+ CodeExtent::CallSiteScope(body_id) |
+ CodeExtent::ParameterScope(body_id) => {
body_id.hash_stable(hcx, hasher);
}
- CodeExtentData::Remainder(block_remainder) => {
+ CodeExtent::Remainder(block_remainder) => {
block_remainder.hash_stable(hcx, hasher);
}
}
custom_kind
});
-impl_stable_hash_for!(struct ty::FreeRegion<'tcx> {
+impl_stable_hash_for!(struct ty::FreeRegion {
scope,
bound_region
});
pub use self::fingerprint::Fingerprint;
pub use self::caching_codemap_view::CachingCodemapView;
pub use self::hcx::{StableHashingContext, NodeIdHashingMode, hash_stable_hashmap,
- hash_stable_hashset, hash_stable_nodemap};
+ hash_stable_hashset, hash_stable_nodemap,
+ hash_stable_btreemap};
mod fingerprint;
mod caching_codemap_view;
mod hcx;
use super::InferCtxt;
use super::{MiscVariable, TypeTrace};
+use hir::def_id::DefId;
use ty::{IntType, UintType};
use ty::{self, Ty, TyCtxt};
use ty::error::TypeError;
-use ty::fold::TypeFoldable;
-use ty::relate::{RelateResult, TypeRelation};
-use traits::PredicateObligations;
+use ty::relate::{self, Relate, RelateResult, TypeRelation};
+use ty::subst::Substs;
+use traits::{Obligation, PredicateObligations};
use syntax::ast;
use syntax_pos::Span;
// `'?2` and `?3` are fresh region/type inference
// variables. (Down below, we will relate `a_ty <: b_ty`,
// adding constraints like `'x: '?2` and `?1 <: ?3`.)
- let b_ty = self.generalize(a_ty, b_vid, dir == EqTo)?;
+ let Generalization { ty: b_ty, needs_wf } = self.generalize(a_ty, b_vid, dir)?;
debug!("instantiate(a_ty={:?}, dir={:?}, b_vid={:?}, generalized b_ty={:?})",
a_ty, dir, b_vid, b_ty);
self.infcx.type_variables.borrow_mut().instantiate(b_vid, b_ty);
+ if needs_wf {
+ self.obligations.push(Obligation::new(self.trace.cause.clone(),
+ ty::Predicate::WellFormed(b_ty)));
+ }
+
// Finally, relate `b_ty` to `a_ty`, as described in previous comment.
//
// FIXME(#16847): This code is non-ideal because all these subtype
/// Attempts to generalize `ty` for the type variable `for_vid`.
/// This checks for cycle -- that is, whether the type `ty`
- /// references `for_vid`. If `is_eq_relation` is false, it will
- /// also replace all regions/unbound-type-variables with fresh
- /// variables. Returns `TyError` in the case of a cycle, `Ok`
- /// otherwise.
+ /// references `for_vid`. The `dir` is the "direction" for which we
+ /// a performing the generalization (i.e., are we producing a type
+ /// that can be used as a supertype etc).
///
/// Preconditions:
///
fn generalize(&self,
ty: Ty<'tcx>,
for_vid: ty::TyVid,
- is_eq_relation: bool)
- -> RelateResult<'tcx, Ty<'tcx>>
+ dir: RelationDir)
+ -> RelateResult<'tcx, Generalization<'tcx>>
{
+ // Determine the ambient variance within which `ty` appears.
+ // The surrounding equation is:
+ //
+ // ty [op] ty2
+ //
+ // where `op` is either `==`, `<:`, or `:>`. This maps quite
+ // naturally.
+ let ambient_variance = match dir {
+ RelationDir::EqTo => ty::Invariant,
+ RelationDir::SubtypeOf => ty::Covariant,
+ RelationDir::SupertypeOf => ty::Contravariant,
+ };
+
let mut generalize = Generalizer {
infcx: self.infcx,
span: self.trace.cause.span,
for_vid_sub_root: self.infcx.type_variables.borrow_mut().sub_root_var(for_vid),
- is_eq_relation: is_eq_relation,
- cycle_detected: false
+ ambient_variance: ambient_variance,
+ needs_wf: false,
};
- let u = ty.fold_with(&mut generalize);
- if generalize.cycle_detected {
- Err(TypeError::CyclicTy)
- } else {
- Ok(u)
- }
+
+ let ty = generalize.relate(&ty, &ty)?;
+ let needs_wf = generalize.needs_wf;
+ Ok(Generalization { ty, needs_wf })
}
}
infcx: &'cx InferCtxt<'cx, 'gcx, 'tcx>,
span: Span,
for_vid_sub_root: ty::TyVid,
- is_eq_relation: bool,
- cycle_detected: bool,
+ ambient_variance: ty::Variance,
+ needs_wf: bool, // see the field `needs_wf` in `Generalization`
}
-impl<'cx, 'gcx, 'tcx> ty::fold::TypeFolder<'gcx, 'tcx> for Generalizer<'cx, 'gcx, 'tcx> {
- fn tcx<'a>(&'a self) -> TyCtxt<'a, 'gcx, 'tcx> {
+/// Result from a generalization operation. This includes
+/// not only the generalized type, but also a bool flag
+/// indicating whether further WF checks are needed.q
+struct Generalization<'tcx> {
+ ty: Ty<'tcx>,
+
+ /// If true, then the generalized type may not be well-formed,
+ /// even if the source type is well-formed, so we should add an
+ /// additional check to enforce that it is. This arises in
+ /// particular around 'bivariant' type parameters that are only
+ /// constrained by a where-clause. As an example, imagine a type:
+ ///
+ /// struct Foo<A, B> where A: Iterator<Item=B> {
+ /// data: A
+ /// }
+ ///
+ /// here, `A` will be covariant, but `B` is
+ /// unconstrained. However, whatever it is, for `Foo` to be WF, it
+ /// must be equal to `A::Item`. If we have an input `Foo<?A, ?B>`,
+ /// then after generalization we will wind up with a type like
+ /// `Foo<?C, ?D>`. When we enforce that `Foo<?A, ?B> <: Foo<?C,
+ /// ?D>` (or `>:`), we will wind up with the requirement that `?A
+ /// <: ?C`, but no particular relationship between `?B` and `?D`
+ /// (after all, we do not know the variance of the normalized form
+ /// of `A::Item` with respect to `A`). If we do nothing else, this
+ /// may mean that `?D` goes unconstrained (as in #41677). So, in
+ /// this scenario where we create a new type variable in a
+ /// bivariant context, we set the `needs_wf` flag to true. This
+ /// will force the calling code to check that `WF(Foo<?C, ?D>)`
+ /// holds, which in turn implies that `?C::Item == ?D`. So once
+ /// `?C` is constrained, that should suffice to restrict `?D`.
+ needs_wf: bool,
+}
+
+impl<'cx, 'gcx, 'tcx> TypeRelation<'cx, 'gcx, 'tcx> for Generalizer<'cx, 'gcx, 'tcx> {
+ fn tcx(&self) -> TyCtxt<'cx, 'gcx, 'tcx> {
self.infcx.tcx
}
- fn fold_ty(&mut self, t: Ty<'tcx>) -> Ty<'tcx> {
+ fn tag(&self) -> &'static str {
+ "Generalizer"
+ }
+
+ fn a_is_expected(&self) -> bool {
+ true
+ }
+
+ fn binders<T>(&mut self, a: &ty::Binder<T>, b: &ty::Binder<T>)
+ -> RelateResult<'tcx, ty::Binder<T>>
+ where T: Relate<'tcx>
+ {
+ Ok(ty::Binder(self.relate(a.skip_binder(), b.skip_binder())?))
+ }
+
+ fn relate_item_substs(&mut self,
+ item_def_id: DefId,
+ a_subst: &'tcx Substs<'tcx>,
+ b_subst: &'tcx Substs<'tcx>)
+ -> RelateResult<'tcx, &'tcx Substs<'tcx>>
+ {
+ if self.ambient_variance == ty::Variance::Invariant {
+ // Avoid fetching the variance if we are in an invariant
+ // context; no need, and it can induce dependency cycles
+ // (e.g. #41849).
+ relate::relate_substs(self, None, a_subst, b_subst)
+ } else {
+ let opt_variances = self.tcx().variances_of(item_def_id);
+ relate::relate_substs(self, Some(&opt_variances), a_subst, b_subst)
+ }
+ }
+
+ fn relate_with_variance<T: Relate<'tcx>>(&mut self,
+ variance: ty::Variance,
+ a: &T,
+ b: &T)
+ -> RelateResult<'tcx, T>
+ {
+ let old_ambient_variance = self.ambient_variance;
+ self.ambient_variance = self.ambient_variance.xform(variance);
+
+ let result = self.relate(a, b);
+ self.ambient_variance = old_ambient_variance;
+ result
+ }
+
+ fn tys(&mut self, t: Ty<'tcx>, t2: Ty<'tcx>) -> RelateResult<'tcx, Ty<'tcx>> {
+ assert_eq!(t, t2); // we are abusing TypeRelation here; both LHS and RHS ought to be ==
+
// Check to see whether the type we are genealizing references
// any other type variable related to `vid` via
// subtyping. This is basically our "occurs check", preventing
if sub_vid == self.for_vid_sub_root {
// If sub-roots are equal, then `for_vid` and
// `vid` are related via subtyping.
- self.cycle_detected = true;
- self.tcx().types.err
+ return Err(TypeError::CyclicTy);
} else {
match variables.probe_root(vid) {
Some(u) => {
drop(variables);
- self.fold_ty(u)
+ self.relate(&u, &u)
}
None => {
- if !self.is_eq_relation {
- let origin = variables.origin(vid);
- let new_var_id = variables.new_var(false, origin, None);
- let u = self.tcx().mk_var(new_var_id);
- debug!("generalize: replacing original vid={:?} with new={:?}",
- vid, u);
- u
- } else {
- t
+ match self.ambient_variance {
+ // Invariant: no need to make a fresh type variable.
+ ty::Invariant => return Ok(t),
+
+ // Bivariant: make a fresh var, but we
+ // may need a WF predicate. See
+ // comment on `needs_wf` field for
+ // more info.
+ ty::Bivariant => self.needs_wf = true,
+
+ // Co/contravariant: this will be
+ // sufficiently constrained later on.
+ ty::Covariant | ty::Contravariant => (),
}
+
+ let origin = variables.origin(vid);
+ let new_var_id = variables.new_var(false, origin, None);
+ let u = self.tcx().mk_var(new_var_id);
+ debug!("generalize: replacing original vid={:?} with new={:?}",
+ vid, u);
+ return Ok(u);
}
}
}
}
+ ty::TyInfer(ty::IntVar(_)) |
+ ty::TyInfer(ty::FloatVar(_)) => {
+ // No matter what mode we are in,
+ // integer/floating-point types must be equal to be
+ // relatable.
+ Ok(t)
+ }
_ => {
- t.super_fold_with(self)
+ relate::super_relate_tys(self, t, t)
}
}
}
- fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> {
+ fn regions(&mut self, r: ty::Region<'tcx>, r2: ty::Region<'tcx>)
+ -> RelateResult<'tcx, ty::Region<'tcx>> {
+ assert_eq!(r, r2); // we are abusing TypeRelation here; both LHS and RHS ought to be ==
+
match *r {
// Never make variables for regions bound within the type itself,
// nor for erased regions.
ty::ReLateBound(..) |
- ty::ReErased => { return r; }
-
- // Early-bound regions should really have been substituted away before
- // we get to this point.
- ty::ReEarlyBound(..) => {
- span_bug!(
- self.span,
- "Encountered early bound region when generalizing: {:?}",
- r);
+ ty::ReErased => {
+ return Ok(r);
}
// Always make a fresh region variable for skolemized regions;
ty::ReStatic |
ty::ReScope(..) |
ty::ReVar(..) |
+ ty::ReEarlyBound(..) |
ty::ReFree(..) => {
- if self.is_eq_relation {
- return r;
+ match self.ambient_variance {
+ ty::Invariant => return Ok(r),
+ ty::Bivariant | ty::Covariant | ty::Contravariant => (),
}
}
}
// FIXME: This is non-ideal because we don't give a
// very descriptive origin for this region variable.
- self.infcx.next_region_var(MiscVariable(self.span))
+ Ok(self.infcx.next_region_var(MiscVariable(self.span)))
}
}
use super::combine::{CombineFields, RelationDir};
use super::{Subtype};
+use hir::def_id::DefId;
+
use ty::{self, Ty, TyCtxt};
use ty::TyVar;
-use ty::relate::{Relate, RelateResult, TypeRelation};
+use ty::subst::Substs;
+use ty::relate::{self, Relate, RelateResult, TypeRelation};
/// Ensures `a` is made equal to `b`. Returns `a` on success.
pub struct Equate<'combine, 'infcx: 'combine, 'gcx: 'infcx+'tcx, 'tcx: 'infcx> {
fn a_is_expected(&self) -> bool { self.a_is_expected }
+ fn relate_item_substs(&mut self,
+ _item_def_id: DefId,
+ a_subst: &'tcx Substs<'tcx>,
+ b_subst: &'tcx Substs<'tcx>)
+ -> RelateResult<'tcx, &'tcx Substs<'tcx>>
+ {
+ // NB: Once we are equating types, we don't care about
+ // variance, so don't try to lookup the variance here. This
+ // also avoids some cycles (e.g. #41849) since looking up
+ // variance requires computing types which can require
+ // performing trait matching (which then performs equality
+ // unification).
+
+ relate::relate_substs(self, None, a_subst, b_subst)
+ }
+
fn relate_with_variance<T: Relate<'tcx>>(&mut self,
_: ty::Variance,
a: &T,
return;
}
};
- let scope_decorated_tag = match *scope {
- region::CodeExtentData::Misc(_) => tag,
- region::CodeExtentData::CallSiteScope { .. } => {
+ let scope_decorated_tag = match scope {
+ region::CodeExtent::Misc(_) => tag,
+ region::CodeExtent::CallSiteScope(_) => {
"scope of call-site for function"
}
- region::CodeExtentData::ParameterScope { .. } => {
+ region::CodeExtent::ParameterScope(_) => {
"scope of function body"
}
- region::CodeExtentData::DestructionScope(_) => {
+ region::CodeExtent::DestructionScope(_) => {
new_string = format!("destruction scope surrounding {}", tag);
&new_string[..]
}
- region::CodeExtentData::Remainder(r) => {
+ region::CodeExtent::Remainder(r) => {
new_string = format!("block suffix following statement {}",
r.first_statement_index);
&new_string[..]
explain_span(self, scope_decorated_tag, span)
}
- ty::ReFree(ref fr) => {
- let prefix = match fr.bound_region {
- ty::BrAnon(idx) => {
- format!("the anonymous lifetime #{} defined on", idx + 1)
+ ty::ReEarlyBound(_) |
+ ty::ReFree(_) => {
+ let scope = match *region {
+ ty::ReEarlyBound(ref br) => {
+ self.parent_def_id(br.def_id).unwrap()
}
- ty::BrFresh(_) => "an anonymous lifetime defined on".to_owned(),
- _ => {
- format!("the lifetime {} as defined on",
- fr.bound_region)
+ ty::ReFree(ref fr) => fr.scope,
+ _ => bug!()
+ };
+ let prefix = match *region {
+ ty::ReEarlyBound(ref br) => {
+ format!("the lifetime {} as defined on", br.name)
+ }
+ ty::ReFree(ref fr) => {
+ match fr.bound_region {
+ ty::BrAnon(idx) => {
+ format!("the anonymous lifetime #{} defined on", idx + 1)
+ }
+ ty::BrFresh(_) => "an anonymous lifetime defined on".to_owned(),
+ _ => {
+ format!("the lifetime {} as defined on",
+ fr.bound_region)
+ }
+ }
}
+ _ => bug!()
};
- let node = fr.scope.map(|s| s.node_id())
+ let node = self.hir.as_local_node_id(scope)
.unwrap_or(DUMMY_NODE_ID);
let unknown;
let tag = match self.hir.find(node) {
Some(_) => {
unknown = format!("unexpected node ({}) for scope {:?}. \
Please report a bug.",
- self.hir.node_to_string(node), fr.scope);
+ self.hir.node_to_string(node), scope);
&unknown
}
None => {
unknown = format!("unknown node for scope {:?}. \
- Please report a bug.", fr.scope);
+ Please report a bug.", scope);
&unknown
}
};
ty::ReEmpty => ("the empty lifetime".to_owned(), None),
- ty::ReEarlyBound(ref data) => (data.name.to_string(), None),
-
// FIXME(#13998) ReSkolemized should probably print like
// ReFree rather than dumping Debug output on the user.
//
}
let mut err = match *sub {
+ ty::ReEarlyBound(_) |
ty::ReFree(ty::FreeRegion {bound_region: ty::BrNamed(..), ..}) => {
// Does the required lifetime have a nice name we can print?
let mut err = struct_span_err!(self.tcx.sess,
fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> {
match *r {
- ty::ReEarlyBound(..) |
ty::ReLateBound(..) => {
// leave bound regions alone
r
}
ty::ReStatic |
+ ty::ReEarlyBound(..) |
ty::ReFree(_) |
ty::ReScope(_) |
ty::ReVar(_) |
-> ty::Region<'tcx> {
// Regions that pre-dated the LUB computation stay as they are.
if !is_var_in_set(new_vars, r0) {
- assert!(!r0.is_bound());
+ assert!(!r0.is_late_bound());
debug!("generalize_region(r0={:?}): not new variable", r0);
return r0;
}
debug!("generalize_region(r0={:?}): \
non-new-variables found in {:?}",
r0, tainted);
- assert!(!r0.is_bound());
+ assert!(!r0.is_late_bound());
return r0;
}
r0: ty::Region<'tcx>)
-> ty::Region<'tcx> {
if !is_var_in_set(new_vars, r0) {
- assert!(!r0.is_bound());
+ assert!(!r0.is_late_bound());
return r0;
}
return rev_lookup(infcx, span, a_map, a_r.unwrap());
} else if a_r.is_none() && b_r.is_none() {
// Not related to bound variables from either fn:
- assert!(!r0.is_bound());
+ assert!(!r0.is_late_bound());
return r0;
} else {
// Other:
-> (Option<&'a ty::TypeckTables<'tcx>>,
Option<ty::TypeckTables<'tcx>>,
Option<ty::ParameterEnvironment<'tcx>>) {
- let item_id = tcx.hir.body_owner(self);
- (Some(tcx.typeck_tables_of(tcx.hir.local_def_id(item_id))),
+ let def_id = tcx.hir.body_owner_def_id(self);
+ (Some(tcx.typeck_tables_of(def_id)),
None,
- Some(ty::ParameterEnvironment::for_item(tcx, item_id)))
+ Some(tcx.parameter_environment(def_id)))
}
}
}
pub fn add_given(&self,
- sub: ty::FreeRegion<'tcx>,
+ sub: ty::Region<'tcx>,
sup: ty::RegionVid)
{
self.region_vars.add_given(sub, sup);
pub fn resolve_regions_and_report_errors(&self,
region_context: DefId,
- region_map: &RegionMaps<'tcx>,
+ region_map: &RegionMaps,
free_regions: &FreeRegionMap<'tcx>) {
let region_rels = RegionRelations::new(self.tcx,
region_context,
graph_name: String,
region_rels: &'a RegionRelations<'a, 'gcx, 'tcx>,
map: &'a FxHashMap<Constraint<'tcx>, SubregionOrigin<'tcx>>,
- node_ids: FxHashMap<Node<'tcx>, usize>,
+ node_ids: FxHashMap<Node, usize>,
}
#[derive(Clone, Hash, PartialEq, Eq, Debug, Copy)]
-enum Node<'tcx> {
+enum Node {
RegionVid(ty::RegionVid),
- Region(ty::RegionKind<'tcx>),
+ Region(ty::RegionKind),
}
// type Edge = Constraint;
#[derive(Clone, PartialEq, Eq, Debug, Copy)]
enum Edge<'tcx> {
Constraint(Constraint<'tcx>),
- EnclScope(CodeExtent<'tcx>, CodeExtent<'tcx>),
+ EnclScope(CodeExtent, CodeExtent),
}
impl<'a, 'gcx, 'tcx> ConstraintGraph<'a, 'gcx, 'tcx> {
}
impl<'a, 'gcx, 'tcx> dot::Labeller<'a> for ConstraintGraph<'a, 'gcx, 'tcx> {
- type Node = Node<'tcx>;
+ type Node = Node;
type Edge = Edge<'tcx>;
fn graph_id(&self) -> dot::Id {
dot::Id::new(&*self.graph_name).unwrap()
}
}
-fn constraint_to_nodes<'tcx>(c: &Constraint<'tcx>) -> (Node<'tcx>, Node<'tcx>) {
+fn constraint_to_nodes(c: &Constraint) -> (Node, Node) {
match *c {
Constraint::ConstrainVarSubVar(rv_1, rv_2) =>
(Node::RegionVid(rv_1), Node::RegionVid(rv_2)),
}
}
-fn edge_to_nodes<'tcx>(e: &Edge<'tcx>) -> (Node<'tcx>, Node<'tcx>) {
+fn edge_to_nodes(e: &Edge) -> (Node, Node) {
match *e {
Edge::Constraint(ref c) => constraint_to_nodes(c),
Edge::EnclScope(sub, sup) => {
}
impl<'a, 'gcx, 'tcx> dot::GraphWalk<'a> for ConstraintGraph<'a, 'gcx, 'tcx> {
- type Node = Node<'tcx>;
+ type Node = Node;
type Edge = Edge<'tcx>;
- fn nodes(&self) -> dot::Nodes<Node<'tcx>> {
+ fn nodes(&self) -> dot::Nodes<Node> {
let mut set = FxHashSet();
for node in self.node_ids.keys() {
set.insert(*node);
debug!("region graph has {} edges", v.len());
Cow::Owned(v)
}
- fn source(&self, edge: &Edge<'tcx>) -> Node<'tcx> {
+ fn source(&self, edge: &Edge<'tcx>) -> Node {
let (n1, _) = edge_to_nodes(edge);
debug!("edge {:?} has source {:?}", edge, n1);
n1
}
- fn target(&self, edge: &Edge<'tcx>) -> Node<'tcx> {
+ fn target(&self, edge: &Edge<'tcx>) -> Node {
let (_, n2) = edge_to_nodes(edge);
debug!("edge {:?} has target {:?}", edge, n2);
n2
use ty::{ReLateBound, ReScope, ReVar, ReSkolemized, BrFresh};
use std::cell::{Cell, RefCell};
-use std::cmp::Ordering::{self, Less, Greater, Equal};
use std::fmt;
use std::mem;
use std::u32;
AddVerify(usize),
/// We added the given `given`
- AddGiven(ty::FreeRegion<'tcx>, ty::RegionVid),
+ AddGiven(Region<'tcx>, ty::RegionVid),
/// We added a GLB/LUB "combinaton variable"
AddCombination(CombineMapType, TwoRegions<'tcx>),
// record the fact that `'a <= 'b` is implied by the fn signature,
// and then ignore the constraint when solving equations. This is
// a bit of a hack but seems to work.
- givens: RefCell<FxHashSet<(ty::FreeRegion<'tcx>, ty::RegionVid)>>,
+ givens: RefCell<FxHashSet<(Region<'tcx>, ty::RegionVid)>>,
lubs: RefCell<CombineMap<'tcx>>,
glbs: RefCell<CombineMap<'tcx>>,
self.add_edge(a, b);
}
&AddGiven(a, b) => {
- self.add_edge(tcx.mk_region(ReFree(a)),
- tcx.mk_region(ReVar(b)));
+ self.add_edge(a, tcx.mk_region(ReVar(b)));
}
&AddVerify(i) => {
verifys[i].bound.for_each_region(&mut |b| {
}
}
- pub fn add_given(&self, sub: ty::FreeRegion<'tcx>, sup: ty::RegionVid) {
+ pub fn add_given(&self, sub: Region<'tcx>, sup: ty::RegionVid) {
// cannot add givens once regions are resolved
assert!(self.values_are_none());
origin);
match (sub, sup) {
- (&ReEarlyBound(..), _) |
(&ReLateBound(..), _) |
- (_, &ReEarlyBound(..)) |
(_, &ReLateBound(..)) => {
span_bug!(origin.span(),
"cannot relate bound region: {:?} <= {:?}",
match (a, b) {
(&ReLateBound(..), _) |
(_, &ReLateBound(..)) |
- (&ReEarlyBound(..), _) |
- (_, &ReEarlyBound(..)) |
(&ReErased, _) |
(_, &ReErased) => {
bug!("cannot relate region: LUB({:?}, {:?})", a, b);
b);
}
- (&ReFree(fr), &ReScope(s_id)) |
- (&ReScope(s_id), &ReFree(fr)) => {
+ (&ReEarlyBound(_), &ReScope(s_id)) |
+ (&ReScope(s_id), &ReEarlyBound(_)) |
+ (&ReFree(_), &ReScope(s_id)) |
+ (&ReScope(s_id), &ReFree(_)) => {
// A "free" region can be interpreted as "some region
- // at least as big as the block fr.scope_id". So, we can
+ // at least as big as fr.scope". So, we can
// reasonably compare free regions and scopes:
- if let Some(fr_scope) = fr.scope {
- let r_id = region_rels.region_maps.nearest_common_ancestor(fr_scope, s_id);
- if r_id == fr_scope {
- // if the free region's scope `fr.scope_id` is bigger than
- // the scope region `s_id`, then the LUB is the free
- // region itself:
- return self.tcx.mk_region(ReFree(fr));
+ let fr_scope = match (a, b) {
+ (&ReEarlyBound(ref br), _) | (_, &ReEarlyBound(ref br)) => {
+ region_rels.region_maps.early_free_extent(self.tcx, br)
+ }
+ (&ReFree(ref fr), _) | (_, &ReFree(ref fr)) => {
+ region_rels.region_maps.free_extent(self.tcx, fr)
+ }
+ _ => bug!()
+ };
+ let r_id = region_rels.region_maps.nearest_common_ancestor(fr_scope, s_id);
+ if r_id == fr_scope {
+ // if the free region's scope `fr.scope` is bigger than
+ // the scope region `s_id`, then the LUB is the free
+ // region itself:
+ match (a, b) {
+ (_, &ReScope(_)) => return a,
+ (&ReScope(_), _) => return b,
+ _ => bug!()
}
}
self.tcx.mk_region(ReScope(lub))
}
+ (&ReEarlyBound(_), &ReEarlyBound(_)) |
+ (&ReFree(_), &ReEarlyBound(_)) |
+ (&ReEarlyBound(_), &ReFree(_)) |
(&ReFree(_), &ReFree(_)) => {
region_rels.lub_free_regions(a, b)
}
let mut givens = self.givens.borrow_mut();
let seeds: Vec<_> = givens.iter().cloned().collect();
- for (fr, vid) in seeds {
+ for (r, vid) in seeds {
let seed_index = NodeIndex(vid.index as usize);
for succ_index in graph.depth_traverse(seed_index, OUTGOING) {
let succ_index = succ_index.0 as u32;
if succ_index < self.num_vars() {
let succ_vid = RegionVid { index: succ_index };
- givens.insert((fr, succ_vid));
+ givens.insert((r, succ_vid));
}
}
}
// Check if this relationship is implied by a given.
match *a_region {
- ty::ReFree(fr) => {
- if self.givens.borrow().contains(&(fr, b_vid)) {
+ ty::ReEarlyBound(_) |
+ ty::ReFree(_) => {
+ if self.givens.borrow().contains(&(a_region, b_vid)) {
debug!("given");
return false;
}
// We place free regions first because we are special casing
// SubSupConflict(ReFree, ReFree) when reporting error, and so
// the user will more likely get a specific suggestion.
- fn free_regions_first(a: &RegionAndOrigin, b: &RegionAndOrigin) -> Ordering {
- match (a.region, b.region) {
- (&ReFree(..), &ReFree(..)) => Equal,
- (&ReFree(..), _) => Less,
- (_, &ReFree(..)) => Greater,
- (..) => Equal,
+ fn region_order_key(x: &RegionAndOrigin) -> u8 {
+ match *x.region {
+ ReEarlyBound(_) => 0,
+ ReFree(_) => 1,
+ _ => 2
}
}
- lower_bounds.sort_by(|a, b| free_regions_first(a, b));
- upper_bounds.sort_by(|a, b| free_regions_first(a, b));
+ lower_bounds.sort_by_key(region_order_key);
+ upper_bounds.sort_by_key(region_order_key);
for lower_bound in &lower_bounds {
for upper_bound in &upper_bounds {
//! This API is completely unstable and subject to change.
#![crate_name = "rustc"]
-#![unstable(feature = "rustc_private", issue = "27812")]
#![crate_type = "dylib"]
#![crate_type = "rlib"]
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
#![feature(nonzero)]
#![feature(quote)]
#![feature(rustc_diagnostic_macros)]
-#![feature(rustc_private)]
#![feature(slice_patterns)]
#![feature(specialization)]
-#![feature(staged_api)]
#![feature(unboxed_closures)]
#![feature(discriminant_value)]
#![feature(sort_unstable)]
#![feature(trace_macros)]
+#![cfg_attr(stage0, unstable(feature = "rustc_private", issue = "27812"))]
+#![cfg_attr(stage0, feature(rustc_private))]
+#![cfg_attr(stage0, feature(staged_api))]
+
#![recursion_limit="128"]
extern crate arena;
extern crate getopts;
extern crate graphviz;
extern crate libc;
-extern crate rustc_llvm as llvm;
+extern crate owning_ref;
extern crate rustc_back;
extern crate rustc_data_structures;
extern crate serialize;
"detects unreachable patterns"
}
+declare_lint! {
+ pub UNUSED_MACROS,
+ Warn,
+ "detects macros that were not used"
+}
+
declare_lint! {
pub WARNINGS,
Warn,
DEAD_CODE,
UNREACHABLE_CODE,
UNREACHABLE_PATTERNS,
+ UNUSED_MACROS,
WARNINGS,
UNUSED_FEATURES,
STABLE_FEATURES,
use hir::def_id::LOCAL_CRATE;
use hir::intravisit as hir_visit;
use syntax::visit as ast_visit;
+use syntax::tokenstream::ThinTokenStream;
/// Information about the registered lints.
///
run_lints!(self, check_ident, early_passes, sp, id);
}
- fn visit_mod(&mut self, m: &'a ast::Mod, s: Span, n: ast::NodeId) {
+ fn visit_mod(&mut self, m: &'a ast::Mod, s: Span, _a: &[ast::Attribute], n: ast::NodeId) {
run_lints!(self, check_mod, early_passes, m, s, n);
ast_visit::walk_mod(self, m);
run_lints!(self, check_mod_post, early_passes, m, s, n);
fn visit_attribute(&mut self, attr: &'a ast::Attribute) {
run_lints!(self, check_attribute, early_passes, attr);
}
+
+ fn visit_mac_def(&mut self, _mac: &'a ThinTokenStream, id: ast::NodeId) {
+ let lints = self.sess.lints.borrow_mut().take(id);
+ for early_lint in lints {
+ self.early_lint(&early_lint);
+ }
+ }
}
enum CheckLintNameResult {
use util::nodemap::{NodeSet, DefIdMap};
use std::any::Any;
-use std::path::PathBuf;
+use std::path::{Path, PathBuf};
use std::rc::Rc;
+use owning_ref::ErasedBoxRef;
use syntax::ast;
use syntax::ext::base::SyntaxExtension;
use syntax::symbol::Symbol;
}
}
+/// The backend's way to give the crate store access to the metadata in a library.
+/// Note that it returns the raw metadata bytes stored in the library file, whether
+/// it is compressed, uncompressed, some weird mix, etc.
+/// rmeta files are backend independent and not handled here.
+///
+/// At the time of this writing, there is only one backend and one way to store
+/// metadata in library -- this trait just serves to decouple rustc_metadata from
+/// the archive reader, which depends on LLVM.
+pub trait MetadataLoader {
+ fn get_rlib_metadata(&self,
+ target: &Target,
+ filename: &Path)
+ -> Result<ErasedBoxRef<[u8]>, String>;
+ fn get_dylib_metadata(&self,
+ target: &Target,
+ filename: &Path)
+ -> Result<ErasedBoxRef<[u8]>, String>;
+}
+
/// A store of Rust crates, through with their metadata
/// can be accessed.
pub trait CrateStore {
fn crate_data_as_rc_any(&self, krate: CrateNum) -> Rc<Any>;
+ // access to the metadata loader
+ fn metadata_loader(&self) -> &MetadataLoader;
+
// item info
fn visibility(&self, def: DefId) -> ty::Visibility;
fn visible_parent_map<'a>(&'a self) -> ::std::cell::Ref<'a, DefIdMap<DefId>>;
fn item_generics_cloned(&self, def: DefId) -> ty::Generics;
- fn item_attrs(&self, def_id: DefId) -> Rc<[ast::Attribute]>;
- fn fn_arg_names(&self, did: DefId) -> Vec<ast::Name>;
// trait info
fn implementations_of_trait(&self, filter: Option<DefId>) -> Vec<DefId>;
// impl info
fn impl_defaultness(&self, def: DefId) -> hir::Defaultness;
- fn impl_parent(&self, impl_def_id: DefId) -> Option<DefId>;
// trait/impl-item info
- fn trait_of_item(&self, def_id: DefId) -> Option<DefId>;
fn associated_item_cloned(&self, def: DefId) -> ty::AssociatedItem;
// flags
fn is_const_fn(&self, did: DefId) -> bool;
fn is_default_impl(&self, impl_did: DefId) -> bool;
- fn is_foreign_item(&self, did: DefId) -> bool;
fn is_dllimport_foreign_item(&self, def: DefId) -> bool;
fn is_statically_included_foreign_item(&self, def_id: DefId) -> bool;
- fn is_exported_symbol(&self, def_id: DefId) -> bool;
// crate metadata
fn dylib_dependency_formats(&self, cnum: CrateNum)
fn export_macros(&self, cnum: CrateNum);
fn lang_items(&self, cnum: CrateNum) -> Vec<(DefIndex, usize)>;
fn missing_lang_items(&self, cnum: CrateNum) -> Vec<lang_items::LangItem>;
- fn is_staged_api(&self, cnum: CrateNum) -> bool;
fn is_allocator(&self, cnum: CrateNum) -> bool;
fn is_panic_runtime(&self, cnum: CrateNum) -> bool;
fn is_compiler_builtins(&self, cnum: CrateNum) -> bool;
fn used_link_args(&self) -> Vec<String>;
// utility functions
- fn metadata_filename(&self) -> &str;
- fn metadata_section_name(&self, target: &Target) -> &str;
fn used_crates(&self, prefer: LinkagePreference) -> Vec<(CrateNum, LibSource)>;
fn used_crate_source(&self, cnum: CrateNum) -> CrateSource;
fn extern_mod_stmt_cnum(&self, emod_id: ast::NodeId) -> Option<CrateNum>;
}
fn item_generics_cloned(&self, def: DefId) -> ty::Generics
{ bug!("item_generics_cloned") }
- fn item_attrs(&self, def_id: DefId) -> Rc<[ast::Attribute]> { bug!("item_attrs") }
- fn fn_arg_names(&self, did: DefId) -> Vec<ast::Name> { bug!("fn_arg_names") }
// trait info
fn implementations_of_trait(&self, filter: Option<DefId>) -> Vec<DefId> { vec![] }
// impl info
fn impl_defaultness(&self, def: DefId) -> hir::Defaultness { bug!("impl_defaultness") }
- fn impl_parent(&self, def: DefId) -> Option<DefId> { bug!("impl_parent") }
// trait/impl-item info
- fn trait_of_item(&self, def_id: DefId) -> Option<DefId> { bug!("trait_of_item") }
fn associated_item_cloned(&self, def: DefId) -> ty::AssociatedItem
{ bug!("associated_item_cloned") }
// flags
fn is_const_fn(&self, did: DefId) -> bool { bug!("is_const_fn") }
fn is_default_impl(&self, impl_did: DefId) -> bool { bug!("is_default_impl") }
- fn is_foreign_item(&self, did: DefId) -> bool { bug!("is_foreign_item") }
fn is_dllimport_foreign_item(&self, id: DefId) -> bool { false }
fn is_statically_included_foreign_item(&self, def_id: DefId) -> bool { false }
- fn is_exported_symbol(&self, def_id: DefId) -> bool { false }
// crate metadata
fn dylib_dependency_formats(&self, cnum: CrateNum)
{ bug!("lang_items") }
fn missing_lang_items(&self, cnum: CrateNum) -> Vec<lang_items::LangItem>
{ bug!("missing_lang_items") }
- fn is_staged_api(&self, cnum: CrateNum) -> bool { bug!("is_staged_api") }
fn dep_kind(&self, cnum: CrateNum) -> DepKind { bug!("is_explicitly_linked") }
fn export_macros(&self, cnum: CrateNum) { bug!("export_macros") }
fn is_allocator(&self, cnum: CrateNum) -> bool { bug!("is_allocator") }
fn used_link_args(&self) -> Vec<String> { vec![] }
// utility functions
- fn metadata_filename(&self) -> &str { bug!("metadata_filename") }
- fn metadata_section_name(&self, target: &Target) -> &str { bug!("metadata_section_name") }
fn used_crates(&self, prefer: LinkagePreference) -> Vec<(CrateNum, LibSource)>
{ vec![] }
fn used_crate_source(&self, cnum: CrateNum) -> CrateSource { bug!("used_crate_source") }
bug!("encode_metadata")
}
fn metadata_encoding_version(&self) -> &[u8] { bug!("metadata_encoding_version") }
+
+ // access to the metadata loader
+ fn metadata_loader(&self) -> &MetadataLoader { bug!("metadata_loader") }
}
pub trait CrateLoader {
} else if match self.tcx.hir.get_if_local(def_id) {
Some(hir::map::NodeForeignItem(..)) => true,
Some(..) => false,
- None => self.tcx.sess.cstore.is_foreign_item(def_id),
+ None => self.tcx.is_foreign_item(def_id),
} {
self.require_unsafe_ext(expr.id, expr.span, "use of extern static", true);
}
impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> {
pub fn new(delegate: &'a mut (Delegate<'tcx>+'a),
- region_maps: &'a RegionMaps<'tcx>,
+ region_maps: &'a RegionMaps,
infcx: &'a InferCtxt<'a, 'gcx, 'tcx>)
-> Self
{
pub fn with_options(delegate: &'a mut (Delegate<'tcx>+'a),
infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
- region_maps: &'a RegionMaps<'tcx>,
+ region_maps: &'a RegionMaps,
options: mc::MemCategorizationOptions)
-> Self
{
pub context: DefId,
/// region maps for the given context
- pub region_maps: &'a RegionMaps<'tcx>,
+ pub region_maps: &'a RegionMaps,
/// free-region relationships
pub free_regions: &'a FreeRegionMap<'tcx>,
pub fn new(
tcx: TyCtxt<'a, 'gcx, 'tcx>,
context: DefId,
- region_maps: &'a RegionMaps<'tcx>,
+ region_maps: &'a RegionMaps,
free_regions: &'a FreeRegionMap<'tcx>,
) -> Self {
Self {
(&ty::ReScope(sub_scope), &ty::ReScope(super_scope)) =>
self.region_maps.is_subscope_of(sub_scope, super_scope),
- (&ty::ReScope(sub_scope), &ty::ReFree(fr)) => {
- // 1. It is safe to unwrap `fr.scope` because we
- // should only ever wind up comparing against
- // `ReScope` in the context of a method or
- // body, where `fr.scope` should be `Some`.
- self.region_maps.is_subscope_of(sub_scope, fr.scope.unwrap() /*1*/) ||
- self.is_static(super_region)
+ (&ty::ReScope(sub_scope), &ty::ReEarlyBound(ref br)) => {
+ let fr_scope = self.region_maps.early_free_extent(self.tcx, br);
+ self.region_maps.is_subscope_of(sub_scope, fr_scope)
}
- (&ty::ReFree(_), &ty::ReFree(_)) =>
- self.free_regions.relation.contains(&sub_region, &super_region) ||
- self.is_static(super_region),
+ (&ty::ReScope(sub_scope), &ty::ReFree(ref fr)) => {
+ let fr_scope = self.region_maps.free_extent(self.tcx, fr);
+ self.region_maps.is_subscope_of(sub_scope, fr_scope)
+ }
- (&ty::ReStatic, &ty::ReFree(_)) =>
- self.is_static(super_region),
+ (&ty::ReEarlyBound(_), &ty::ReEarlyBound(_)) |
+ (&ty::ReFree(_), &ty::ReEarlyBound(_)) |
+ (&ty::ReEarlyBound(_), &ty::ReFree(_)) |
+ (&ty::ReFree(_), &ty::ReFree(_)) =>
+ self.free_regions.relation.contains(&sub_region, &super_region),
_ =>
false,
}
};
+ let result = result || self.is_static(super_region);
debug!("is_subregion_of(sub_region={:?}, super_region={:?}) = {:?}",
sub_region, super_region, result);
result
debug!("is_static(super_region={:?})", super_region);
match *super_region {
ty::ReStatic => true,
- ty::ReFree(_) => {
+ ty::ReEarlyBound(_) | ty::ReFree(_) => {
let re_static = self.tcx.mk_region(ty::ReStatic);
self.free_regions.relation.contains(&re_static, &super_region)
}
- _ => bug!("only free regions should be given to `is_static`")
+ _ => false
}
}
for implied_bound in implied_bounds {
debug!("implied bound: {:?}", implied_bound);
match *implied_bound {
- ImpliedBound::RegionSubRegion(a @ &ty::ReFree(_), b @ &ty::ReFree(_)) |
- ImpliedBound::RegionSubRegion(a @ &ty::ReStatic, b @ &ty::ReFree(_)) => {
+ ImpliedBound::RegionSubRegion(a, b) => {
self.relate_regions(a, b);
}
- ImpliedBound::RegionSubRegion(..) |
ImpliedBound::RegionSubParam(..) |
ImpliedBound::RegionSubProjection(..) => {
}
// No region bounds here
}
ty::Predicate::RegionOutlives(ty::Binder(ty::OutlivesPredicate(r_a, r_b))) => {
- match (r_a, r_b) {
- // `'static: 'x` is not notable
- (&ty::ReStatic, &ty::ReFree(_)) => {},
-
- (&ty::ReFree(_), &ty::ReStatic) |
- (&ty::ReFree(_), &ty::ReFree(_)) => {
- // Record that `'a:'b`. Or, put another way, `'b <= 'a`.
- self.relate_regions(r_b, r_a);
- }
-
- _ => {
- // All named regions are instantiated with free regions.
- bug!("record_region_bounds: non free region: {:?} / {:?}",
- r_a,
- r_b);
- }
- }
+ self.relate_regions(r_b, r_a);
}
}
}
}
+ // Record that `'sup:'sub`. Or, put another way, `'sub <= 'sup`.
+ // (with the exception that `'static: 'x` is not notable)
fn relate_regions(&mut self, sub: Region<'tcx>, sup: Region<'tcx>) {
- assert!(match *sub { ty::ReFree(_) | ty::ReStatic => true, _ => false });
- assert!(match *sup { ty::ReFree(_) | ty::ReStatic => true, _ => false });
- self.relation.add(sub, sup)
+ if (is_free(sub) || *sub == ty::ReStatic) && is_free(sup) {
+ self.relation.add(sub, sup)
+ }
}
pub fn lub_free_regions<'a, 'gcx>(&self,
r_a: Region<'tcx>,
r_b: Region<'tcx>)
-> Region<'tcx> {
- assert!(match *r_a { ty::ReFree(_) => true, _ => false });
- assert!(match *r_b { ty::ReFree(_) => true, _ => false });
+ assert!(is_free(r_a));
+ assert!(is_free(r_b));
let result = if r_a == r_b { r_a } else {
match self.relation.postdom_upper_bound(&r_a, &r_b) {
None => tcx.mk_region(ty::ReStatic),
}
}
+fn is_free(r: Region) -> bool {
+ match *r {
+ ty::ReEarlyBound(_) | ty::ReFree(_) => true,
+ _ => false
+ }
+}
+
impl_stable_hash_for!(struct FreeRegionMap<'tcx> {
relation
});
//!
//! - `fallthrough_ln`: a live node that represents a fallthrough
//!
-//! - `no_ret_var`: a synthetic variable that is only 'read' from, the
-//! fallthrough node. This allows us to detect functions where we fail
-//! to return explicitly.
//! - `clean_exit_var`: a synthetic variable that is only 'read' from the
//! fallthrough node. It is only live if the function could converge
//! via means other than an explicit `return` expression. That is, it is
use self::VarKind::*;
use hir::def::*;
-use ty::{self, TyCtxt, ParameterEnvironment};
-use traits::{self, Reveal};
-use ty::subst::Subst;
+use ty::{self, TyCtxt};
use lint;
use util::nodemap::NodeMap;
enum VarKind {
Arg(NodeId, ast::Name),
Local(LocalInfo),
- ImplicitRet,
CleanExit
}
Local(LocalInfo { id: node_id, .. }) | Arg(node_id, _) => {
self.variable_map.insert(node_id, v);
},
- ImplicitRet | CleanExit => {}
+ CleanExit => {}
}
debug!("{:?} is {:?}", v, vk);
Local(LocalInfo { name, .. }) | Arg(_, name) => {
name.to_string()
},
- ImplicitRet => "<implicit-ret>".to_string(),
CleanExit => "<clean-exit>".to_string()
}
}
// check for various error conditions
lsets.visit_body(body);
- lsets.check_ret(id, sp, entry_ln, body);
lsets.warn_about_unused_args(body, entry_ln);
}
struct Specials {
exit_ln: LiveNode,
fallthrough_ln: LiveNode,
- no_ret_var: Variable,
clean_exit_var: Variable
}
let specials = Specials {
exit_ln: ir.add_live_node(ExitNode),
fallthrough_ln: ir.add_live_node(ExitNode),
- no_ret_var: ir.add_variable(ImplicitRet),
clean_exit_var: ir.add_variable(CleanExit)
};
}
impl<'a, 'tcx> Liveness<'a, 'tcx> {
- fn check_ret(&self,
- id: NodeId,
- sp: Span,
- entry_ln: LiveNode,
- body: &hir::Body)
- {
- let fn_ty = self.ir.tcx.type_of(self.ir.tcx.hir.local_def_id(id));
- let fn_sig = match fn_ty.sty {
- ty::TyClosure(closure_def_id, substs) => {
- self.ir.tcx.closure_type(closure_def_id)
- .subst(self.ir.tcx, substs.substs)
- }
- _ => fn_ty.fn_sig()
- };
-
- let fn_ret = fn_sig.output();
-
- // within the fn body, late-bound regions are liberated
- // and must outlive the *call-site* of the function.
- let fn_ret =
- self.ir.tcx.liberate_late_bound_regions(
- Some(self.ir.tcx.call_site_extent(id, body.value.id)),
- &fn_ret);
-
- if !fn_ret.is_never() && self.live_on_entry(entry_ln, self.s.no_ret_var).is_some() {
- let param_env = ParameterEnvironment::for_item(self.ir.tcx, id);
- let t_ret_subst = fn_ret.subst(self.ir.tcx, ¶m_env.free_substs);
- let is_nil = self.ir.tcx.infer_ctxt(param_env, Reveal::All).enter(|infcx| {
- let cause = traits::ObligationCause::dummy();
- traits::fully_normalize(&infcx, cause, &t_ret_subst).unwrap().is_nil()
- });
-
- // for nil return types, it is ok to not return a value expl.
- if !is_nil {
- span_bug!(sp, "not all control paths return a value");
- }
- }
- }
-
fn check_lvalue(&mut self, expr: &'tcx Expr) {
match expr.node {
hir::ExprPath(hir::QPath::Resolved(_, ref path)) => {
#[derive(Clone)]
pub struct MemCategorizationContext<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
pub infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
- pub region_maps: &'a RegionMaps<'tcx>,
+ pub region_maps: &'a RegionMaps,
options: MemCategorizationOptions,
}
impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> {
/// Context should be the `DefId` we use to fetch region-maps.
pub fn new(infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
- region_maps: &'a RegionMaps<'tcx>)
+ region_maps: &'a RegionMaps)
-> MemCategorizationContext<'a, 'gcx, 'tcx> {
MemCategorizationContext::with_options(infcx,
region_maps,
}
pub fn with_options(infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
- region_maps: &'a RegionMaps<'tcx>,
+ region_maps: &'a RegionMaps,
options: MemCategorizationOptions)
-> MemCategorizationContext<'a, 'gcx, 'tcx> {
MemCategorizationContext {
cmt_result: cmt_<'tcx>)
-> cmt_<'tcx>
{
- // Look up the node ID of the closure body so we can construct
- // a free region within it
- let fn_body_id = {
- let fn_expr = match self.tcx().hir.find(upvar_id.closure_expr_id) {
- Some(hir_map::NodeExpr(e)) => e,
- _ => bug!()
- };
-
- match fn_expr.node {
- hir::ExprClosure(.., body_id, _) => body_id,
- _ => bug!()
- }
- };
-
// Region of environment pointer
let env_region = self.tcx().mk_region(ty::ReFree(ty::FreeRegion {
// The environment of a closure is guaranteed to
// outlive any bindings introduced in the body of the
// closure itself.
- scope: Some(self.tcx().item_extent(fn_body_id.node_id)),
+ scope: self.tcx().hir.local_def_id(upvar_id.closure_expr_id),
bound_region: ty::BrEnv
}));
pub fn temporary_scope(&self, id: ast::NodeId) -> (ty::Region<'tcx>, ty::Region<'tcx>)
{
let (scope, old_scope) =
- self.region_maps.old_and_new_temporary_scope(self.tcx(), id);
+ self.region_maps.old_and_new_temporary_scope(id);
(self.tcx().mk_region(match scope {
Some(scope) => ty::ReScope(scope),
None => ty::ReStatic
use std::mem;
use std::rc::Rc;
-use serialize;
use syntax::codemap;
use syntax::ast;
use syntax_pos::Span;
use hir::{Block, Arm, Pat, PatKind, Stmt, Expr, Local};
use mir::transform::MirSource;
-pub type CodeExtent<'tcx> = &'tcx CodeExtentData;
-
-impl<'tcx> serialize::UseSpecializedEncodable for CodeExtent<'tcx> {}
-impl<'tcx> serialize::UseSpecializedDecodable for CodeExtent<'tcx> {}
-
/// CodeExtent represents a statically-describable extent that can be
/// used to bound the lifetime/region for values.
///
/// actually attach a more meaningful ordering to scopes than the one
/// generated via deriving here.
#[derive(Clone, PartialEq, PartialOrd, Eq, Ord, Hash, Debug, Copy, RustcEncodable, RustcDecodable)]
-pub enum CodeExtentData {
+pub enum CodeExtent {
Misc(ast::NodeId),
// extent of the call-site for a function or closure (outlives
// the parameters as well as the body).
- CallSiteScope { fn_id: ast::NodeId, body_id: ast::NodeId },
+ CallSiteScope(hir::BodyId),
// extent of parameters passed to a function or closure (they
// outlive its body)
- ParameterScope { fn_id: ast::NodeId, body_id: ast::NodeId },
+ ParameterScope(hir::BodyId),
// extent of destructors for temporaries of node-id
DestructionScope(ast::NodeId),
Remainder(BlockRemainder)
}
-/// extent of call-site for a function/method.
-#[derive(Clone, PartialEq, PartialOrd, Eq, Ord, Hash, RustcEncodable,
- RustcDecodable, Debug, Copy)]
-pub struct CallSiteScopeData {
- pub fn_id: ast::NodeId, pub body_id: ast::NodeId,
-}
-
-impl CallSiteScopeData {
- pub fn to_code_extent<'a, 'tcx, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> CodeExtent<'tcx> {
- tcx.intern_code_extent(
- match *self {
- CallSiteScopeData { fn_id, body_id } =>
- CodeExtentData::CallSiteScope { fn_id: fn_id, body_id: body_id },
- })
- }
-}
-
/// Represents a subscope of `block` for a binding that is introduced
/// by `block.stmts[first_statement_index]`. Such subscopes represent
/// a suffix of the block. Note that each subscope does not include
/// * the subscope with `first_statement_index == 0` is scope of both
/// `a` and `b`; it does not include EXPR_1, but does include
/// everything after that first `let`. (If you want a scope that
-/// includes EXPR_1 as well, then do not use `CodeExtentData::Remainder`,
+/// includes EXPR_1 as well, then do not use `CodeExtent::Remainder`,
/// but instead another `CodeExtent` that encompasses the whole block,
-/// e.g. `CodeExtentData::Misc`.
+/// e.g. `CodeExtent::Misc`.
///
/// * the subscope with `first_statement_index == 1` is scope of `c`,
/// and thus does not include EXPR_2, but covers the `...`.
pub first_statement_index: u32,
}
-impl CodeExtentData {
+impl CodeExtent {
/// Returns a node id associated with this scope.
///
/// NB: likely to be replaced as API is refined; e.g. pnkfelix
/// anticipates `fn entry_node_id` and `fn each_exit_node_id`.
pub fn node_id(&self) -> ast::NodeId {
match *self {
- CodeExtentData::Misc(node_id) => node_id,
+ CodeExtent::Misc(node_id) => node_id,
// These cases all return rough approximations to the
// precise extent denoted by `self`.
- CodeExtentData::Remainder(br) => br.block,
- CodeExtentData::DestructionScope(node_id) => node_id,
- CodeExtentData::CallSiteScope { fn_id: _, body_id } |
- CodeExtentData::ParameterScope { fn_id: _, body_id } => body_id,
+ CodeExtent::Remainder(br) => br.block,
+ CodeExtent::DestructionScope(node_id) => node_id,
+ CodeExtent::CallSiteScope(body_id) |
+ CodeExtent::ParameterScope(body_id) => body_id.node_id,
}
}
match hir_map.find(self.node_id()) {
Some(hir_map::NodeBlock(ref blk)) => {
match *self {
- CodeExtentData::CallSiteScope { .. } |
- CodeExtentData::ParameterScope { .. } |
- CodeExtentData::Misc(_) |
- CodeExtentData::DestructionScope(_) => Some(blk.span),
+ CodeExtent::CallSiteScope(_) |
+ CodeExtent::ParameterScope(_) |
+ CodeExtent::Misc(_) |
+ CodeExtent::DestructionScope(_) => Some(blk.span),
- CodeExtentData::Remainder(r) => {
+ CodeExtent::Remainder(r) => {
assert_eq!(r.block, blk.id);
// Want span for extent starting after the
// indexed statement and ending at end of
}
/// The region maps encode information about region relationships.
-pub struct RegionMaps<'tcx> {
+pub struct RegionMaps {
+ /// If not empty, this body is the root of this region hierarchy.
+ root_body: Option<hir::BodyId>,
+
+ /// The parent of the root body owner, if the latter is an
+ /// an associated const or method, as impls/traits can also
+ /// have lifetime parameters free in this body.
+ root_parent: Option<ast::NodeId>,
+
/// `scope_map` maps from a scope id to the enclosing scope id;
/// this is usually corresponding to the lexical nesting, though
/// in the case of closures the parent scope is the innermost
/// conditional expression or repeating block. (Note that the
/// enclosing scope id for the block associated with a closure is
/// the closure itself.)
- scope_map: FxHashMap<CodeExtent<'tcx>, CodeExtent<'tcx>>,
+ scope_map: FxHashMap<CodeExtent, CodeExtent>,
/// `var_map` maps from a variable or binding id to the block in
/// which that variable is declared.
- var_map: NodeMap<CodeExtent<'tcx>>,
+ var_map: NodeMap<CodeExtent>,
/// maps from a node-id to the associated destruction scope (if any)
- destruction_scopes: NodeMap<CodeExtent<'tcx>>,
+ destruction_scopes: NodeMap<CodeExtent>,
/// `rvalue_scopes` includes entries for those expressions whose cleanup scope is
/// larger than the default. The map goes from the expression id
/// table, the appropriate cleanup scope is the innermost
/// enclosing statement, conditional expression, or repeating
/// block (see `terminating_scopes`).
- rvalue_scopes: NodeMap<CodeExtent<'tcx>>,
+ rvalue_scopes: NodeMap<CodeExtent>,
/// Records the value of rvalue scopes before they were shrunk by
/// #36082, for error reporting.
///
/// FIXME: this should be temporary. Remove this by 1.18.0 or
/// so.
- shrunk_rvalue_scopes: NodeMap<CodeExtent<'tcx>>,
+ shrunk_rvalue_scopes: NodeMap<CodeExtent>,
/// Encodes the hierarchy of fn bodies. Every fn body (including
/// closures) forms its own distinct region hierarchy, rooted in
}
#[derive(Debug, Copy, Clone)]
-pub struct Context<'tcx> {
+pub struct Context {
/// the root of the current region tree. This is typically the id
/// of the innermost fn body. Each fn forms its own disjoint tree
/// in the region hierarchy. These fn bodies are themselves
root_id: Option<ast::NodeId>,
/// the scope that contains any new variables declared
- var_parent: Option<CodeExtent<'tcx>>,
+ var_parent: Option<CodeExtent>,
/// region parent of expressions etc
- parent: Option<CodeExtent<'tcx>>,
+ parent: Option<CodeExtent>,
}
struct RegionResolutionVisitor<'a, 'tcx: 'a> {
tcx: TyCtxt<'a, 'tcx, 'tcx>,
// Generated maps:
- region_maps: &'a mut RegionMaps<'tcx>,
-
- cx: Context<'tcx>,
+ region_maps: RegionMaps,
- map: &'a hir_map::Map<'tcx>,
+ cx: Context,
/// `terminating_scopes` is a set containing the ids of each
/// statement, or conditional/repeating expression. These scopes
}
-impl<'tcx> RegionMaps<'tcx> {
+impl<'tcx> RegionMaps {
pub fn new() -> Self {
RegionMaps {
+ root_body: None,
+ root_parent: None,
scope_map: FxHashMap(),
destruction_scopes: FxHashMap(),
var_map: NodeMap(),
}
pub fn record_code_extent(&mut self,
- child: CodeExtent<'tcx>,
- parent: Option<CodeExtent<'tcx>>) {
+ child: CodeExtent,
+ parent: Option<CodeExtent>) {
debug!("{:?}.parent = {:?}", child, parent);
if let Some(p) = parent {
}
// record the destruction scopes for later so we can query them
- if let &CodeExtentData::DestructionScope(n) = child {
+ if let CodeExtent::DestructionScope(n) = child {
self.destruction_scopes.insert(n, child);
}
}
- pub fn each_encl_scope<E>(&self, mut e:E) where E: FnMut(CodeExtent<'tcx>, CodeExtent<'tcx>) {
+ pub fn each_encl_scope<E>(&self, mut e:E) where E: FnMut(CodeExtent, CodeExtent) {
for (&child, &parent) in &self.scope_map {
e(child, parent)
}
}
- pub fn each_var_scope<E>(&self, mut e:E) where E: FnMut(&ast::NodeId, CodeExtent<'tcx>) {
- for (child, parent) in self.var_map.iter() {
+ pub fn each_var_scope<E>(&self, mut e:E) where E: FnMut(&ast::NodeId, CodeExtent) {
+ for (child, &parent) in self.var_map.iter() {
e(child, parent)
}
}
- pub fn opt_destruction_extent(&self, n: ast::NodeId) -> Option<CodeExtent<'tcx>> {
+ pub fn opt_destruction_extent(&self, n: ast::NodeId) -> Option<CodeExtent> {
self.destruction_scopes.get(&n).cloned()
}
}
}
- fn record_var_scope(&mut self, var: ast::NodeId, lifetime: CodeExtent<'tcx>) {
+ fn record_var_scope(&mut self, var: ast::NodeId, lifetime: CodeExtent) {
debug!("record_var_scope(sub={:?}, sup={:?})", var, lifetime);
assert!(var != lifetime.node_id());
self.var_map.insert(var, lifetime);
}
- fn record_rvalue_scope(&mut self, var: ast::NodeId, lifetime: CodeExtent<'tcx>) {
+ fn record_rvalue_scope(&mut self, var: ast::NodeId, lifetime: CodeExtent) {
debug!("record_rvalue_scope(sub={:?}, sup={:?})", var, lifetime);
assert!(var != lifetime.node_id());
self.rvalue_scopes.insert(var, lifetime);
}
- fn record_shrunk_rvalue_scope(&mut self, var: ast::NodeId, lifetime: CodeExtent<'tcx>) {
+ fn record_shrunk_rvalue_scope(&mut self, var: ast::NodeId, lifetime: CodeExtent) {
debug!("record_rvalue_scope(sub={:?}, sup={:?})", var, lifetime);
assert!(var != lifetime.node_id());
self.shrunk_rvalue_scopes.insert(var, lifetime);
}
- pub fn opt_encl_scope(&self, id: CodeExtent<'tcx>) -> Option<CodeExtent<'tcx>> {
+ pub fn opt_encl_scope(&self, id: CodeExtent) -> Option<CodeExtent> {
//! Returns the narrowest scope that encloses `id`, if any.
self.scope_map.get(&id).cloned()
}
#[allow(dead_code)] // used in cfg
- pub fn encl_scope(&self, id: CodeExtent<'tcx>) -> CodeExtent<'tcx> {
+ pub fn encl_scope(&self, id: CodeExtent) -> CodeExtent {
//! Returns the narrowest scope that encloses `id`, if any.
self.opt_encl_scope(id).unwrap()
}
/// Returns the lifetime of the local variable `var_id`
- pub fn var_scope(&self, var_id: ast::NodeId) -> CodeExtent<'tcx> {
+ pub fn var_scope(&self, var_id: ast::NodeId) -> CodeExtent {
match self.var_map.get(&var_id) {
Some(&r) => r,
None => { bug!("no enclosing scope for id {:?}", var_id); }
}
}
- pub fn temporary_scope2<'a, 'gcx: 'tcx>(&self,
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
- expr_id: ast::NodeId)
- -> (Option<CodeExtent<'tcx>>, bool) {
- let temporary_scope = self.temporary_scope(tcx, expr_id);
+ pub fn temporary_scope2(&self, expr_id: ast::NodeId)
+ -> (Option<CodeExtent>, bool) {
+ let temporary_scope = self.temporary_scope(expr_id);
let was_shrunk = match self.shrunk_rvalue_scopes.get(&expr_id) {
Some(&s) => {
info!("temporary_scope2({:?}, scope={:?}, shrunk={:?})",
(temporary_scope, was_shrunk)
}
- pub fn old_and_new_temporary_scope<'a, 'gcx: 'tcx>(&self,
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
- expr_id: ast::NodeId)
- -> (Option<CodeExtent<'tcx>>,
- Option<CodeExtent<'tcx>>)
+ pub fn old_and_new_temporary_scope(&self, expr_id: ast::NodeId)
+ -> (Option<CodeExtent>,
+ Option<CodeExtent>)
{
- let temporary_scope = self.temporary_scope(tcx, expr_id);
+ let temporary_scope = self.temporary_scope(expr_id);
(temporary_scope,
self.shrunk_rvalue_scopes
.get(&expr_id).cloned()
.or(temporary_scope))
}
- pub fn temporary_scope<'a, 'gcx: 'tcx>(&self,
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
- expr_id: ast::NodeId)
- -> Option<CodeExtent<'tcx>> {
+ pub fn temporary_scope(&self, expr_id: ast::NodeId) -> Option<CodeExtent> {
//! Returns the scope when temp created by expr_id will be cleaned up
// check for a designated rvalue scope
// if there's one. Static items, for instance, won't
// have an enclosing scope, hence no scope will be
// returned.
- let mut id = tcx.node_extent(expr_id);
+ let mut id = CodeExtent::Misc(expr_id);
- while let Some(&p) = self.scope_map.get(id) {
- match *p {
- CodeExtentData::DestructionScope(..) => {
+ while let Some(&p) = self.scope_map.get(&id) {
+ match p {
+ CodeExtent::DestructionScope(..) => {
debug!("temporary_scope({:?}) = {:?} [enclosing]",
expr_id, id);
return Some(id);
return None;
}
- pub fn var_region(&self, id: ast::NodeId) -> ty::RegionKind<'tcx> {
+ pub fn var_region(&self, id: ast::NodeId) -> ty::RegionKind {
//! Returns the lifetime of the variable `id`.
let scope = ty::ReScope(self.var_scope(id));
/// Finds the nearest common ancestor (if any) of two scopes. That is, finds the smallest
/// scope which is greater than or equal to both `scope_a` and `scope_b`.
pub fn nearest_common_ancestor(&self,
- scope_a: CodeExtent<'tcx>,
- scope_b: CodeExtent<'tcx>)
- -> CodeExtent<'tcx> {
+ scope_a: CodeExtent,
+ scope_b: CodeExtent)
+ -> CodeExtent {
if scope_a == scope_b { return scope_a; }
/// [1] The initial values for `a_buf` and `b_buf` are not used.
/// is re-initialized with new values (or else fallback to a
/// heap-allocated vector).
let mut a_buf: [CodeExtent; 32] = [scope_a /* [1] */; 32];
- let mut a_vec: Vec<CodeExtent<'tcx>> = vec![];
+ let mut a_vec: Vec<CodeExtent> = vec![];
let mut b_buf: [CodeExtent; 32] = [scope_b /* [1] */; 32];
- let mut b_vec: Vec<CodeExtent<'tcx>> = vec![];
+ let mut b_vec: Vec<CodeExtent> = vec![];
let scope_map = &self.scope_map;
let a_ancestors = ancestors_of(scope_map, scope_a, &mut a_buf, &mut a_vec);
let b_ancestors = ancestors_of(scope_map, scope_b, &mut b_buf, &mut b_vec);
let a_root_scope = a_ancestors[a_index];
let b_root_scope = a_ancestors[a_index];
return match (a_root_scope, b_root_scope) {
- (&CodeExtentData::DestructionScope(a_root_id),
- &CodeExtentData::DestructionScope(b_root_id)) => {
+ (CodeExtent::DestructionScope(a_root_id),
+ CodeExtent::DestructionScope(b_root_id)) => {
if self.fn_is_enclosed_by(a_root_id, b_root_id) {
// `a` is enclosed by `b`, hence `b` is the ancestor of everything in `a`
scope_b
}
}
- fn ancestors_of<'a, 'tcx>(scope_map: &FxHashMap<CodeExtent<'tcx>, CodeExtent<'tcx>>,
- scope: CodeExtent<'tcx>,
- buf: &'a mut [CodeExtent<'tcx>; 32],
- vec: &'a mut Vec<CodeExtent<'tcx>>)
- -> &'a [CodeExtent<'tcx>] {
+ fn ancestors_of<'a, 'tcx>(scope_map: &FxHashMap<CodeExtent, CodeExtent>,
+ scope: CodeExtent,
+ buf: &'a mut [CodeExtent; 32],
+ vec: &'a mut Vec<CodeExtent>)
+ -> &'a [CodeExtent] {
// debug!("ancestors_of(scope={:?})", scope);
let mut scope = scope;
while i < 32 {
buf[i] = scope;
match scope_map.get(&scope) {
- Some(superscope) => scope = superscope,
+ Some(&superscope) => scope = superscope,
_ => return &buf[..i+1]
}
i += 1;
loop {
vec.push(scope);
match scope_map.get(&scope) {
- Some(superscope) => scope = superscope,
+ Some(&superscope) => scope = superscope,
_ => return &*vec
}
}
}
}
+
+ /// Assuming that the provided region was defined within this `RegionMaps`,
+ /// returns the outermost `CodeExtent` that the region outlives.
+ pub fn early_free_extent<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>,
+ br: &ty::EarlyBoundRegion)
+ -> CodeExtent {
+ let param_owner = tcx.parent_def_id(br.def_id).unwrap();
+
+ let param_owner_id = tcx.hir.as_local_node_id(param_owner).unwrap();
+ let body_id = tcx.hir.maybe_body_owned_by(param_owner_id).unwrap_or_else(|| {
+ // The lifetime was defined on node that doesn't own a body,
+ // which in practice can only mean a trait or an impl, that
+ // is the parent of a method, and that is enforced below.
+ assert_eq!(Some(param_owner_id), self.root_parent,
+ "free_extent: {:?} not recognized by the region maps for {:?}",
+ param_owner,
+ self.root_body.map(|body| tcx.hir.body_owner_def_id(body)));
+
+ // The trait/impl lifetime is in scope for the method's body.
+ self.root_body.unwrap()
+ });
+
+ CodeExtent::CallSiteScope(body_id)
+ }
+
+ /// Assuming that the provided region was defined within this `RegionMaps`,
+ /// returns the outermost `CodeExtent` that the region outlives.
+ pub fn free_extent<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, fr: &ty::FreeRegion)
+ -> CodeExtent {
+ let param_owner = match fr.bound_region {
+ ty::BoundRegion::BrNamed(def_id, _) => {
+ tcx.parent_def_id(def_id).unwrap()
+ }
+ _ => fr.scope
+ };
+
+ // Ensure that the named late-bound lifetimes were defined
+ // on the same function that they ended up being freed in.
+ assert_eq!(param_owner, fr.scope);
+
+ let param_owner_id = tcx.hir.as_local_node_id(param_owner).unwrap();
+ CodeExtent::CallSiteScope(tcx.hir.body_owned_by(param_owner_id))
+ }
}
/// Records the lifetime of a local variable as `cx.var_parent`
debug!("resolve_block(blk.id={:?})", blk.id);
let prev_cx = visitor.cx;
- let block_extent = visitor.new_node_extent_with_dtor(blk.id);
// We treat the tail expression in the block (if any) somewhat
// differently from the statements. The issue has to do with
// `other_argument()` has run and also the call to `quux(..)`
// itself has returned.
- visitor.cx = Context {
- root_id: prev_cx.root_id,
- var_parent: Some(block_extent),
- parent: Some(block_extent),
- };
+ visitor.enter_node_extent_with_dtor(blk.id);
+ visitor.cx.var_parent = visitor.cx.parent;
{
// This block should be kept approximately in sync with
// has the previous subscope in the block as a parent,
// except for the first such subscope, which has the
// block itself as a parent.
- let stmt_extent = visitor.new_code_extent(
- CodeExtentData::Remainder(BlockRemainder {
+ visitor.enter_code_extent(
+ CodeExtent::Remainder(BlockRemainder {
block: blk.id,
first_statement_index: i as u32
})
);
- visitor.cx = Context {
- root_id: prev_cx.root_id,
- var_parent: Some(stmt_extent),
- parent: Some(stmt_extent),
- };
+ visitor.cx.var_parent = visitor.cx.parent;
}
visitor.visit_stmt(statement)
}
}
fn resolve_pat<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>, pat: &'tcx hir::Pat) {
- visitor.new_node_extent(pat.id);
+ visitor.record_code_extent(CodeExtent::Misc(pat.id));
// If this is a binding then record the lifetime of that binding.
if let PatKind::Binding(..) = pat.node {
// statement plus its destructors, and thus the extent for which
// regions referenced by the destructors need to survive.
visitor.terminating_scopes.insert(stmt_id);
- let stmt_extent = visitor.new_node_extent_with_dtor(stmt_id);
let prev_parent = visitor.cx.parent;
- visitor.cx.parent = Some(stmt_extent);
+ visitor.enter_node_extent_with_dtor(stmt_id);
+
intravisit::walk_stmt(visitor, stmt);
+
visitor.cx.parent = prev_parent;
}
fn resolve_expr<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>, expr: &'tcx hir::Expr) {
debug!("resolve_expr(expr.id={:?})", expr.id);
- let expr_extent = visitor.new_node_extent_with_dtor(expr.id);
let prev_cx = visitor.cx;
- visitor.cx.parent = Some(expr_extent);
+ visitor.enter_node_extent_with_dtor(expr.id);
{
let terminating_scopes = &mut visitor.terminating_scopes;
}
hir::ExprMatch(..) => {
- visitor.cx.var_parent = Some(expr_extent);
+ visitor.cx.var_parent = visitor.cx.parent;
}
hir::ExprAssignOp(..) | hir::ExprIndex(..) |
fn record_rvalue_scope_if_borrow_expr<'a, 'tcx>(
visitor: &mut RegionResolutionVisitor<'a, 'tcx>,
expr: &hir::Expr,
- blk_id: CodeExtent<'tcx>)
+ blk_id: CodeExtent)
{
match expr.node {
hir::ExprAddrOf(_, ref subexpr) => {
/// Note: ET is intended to match "rvalues or lvalues based on rvalues".
fn record_rvalue_scope<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>,
expr: &hir::Expr,
- blk_scope: CodeExtent<'tcx>,
+ blk_scope: CodeExtent,
is_shrunk: bool) {
let mut expr = expr;
loop {
}
impl<'a, 'tcx> RegionResolutionVisitor<'a, 'tcx> {
- pub fn intern_code_extent(&mut self,
- data: CodeExtentData,
- parent: Option<CodeExtent<'tcx>>)
- -> CodeExtent<'tcx> {
- let code_extent = self.tcx.intern_code_extent(data);
- self.region_maps.record_code_extent(code_extent, parent);
- code_extent
- }
-
- pub fn intern_node(&mut self,
- n: ast::NodeId,
- parent: Option<CodeExtent<'tcx>>) -> CodeExtent<'tcx> {
- self.intern_code_extent(CodeExtentData::Misc(n), parent)
- }
-
/// Records the current parent (if any) as the parent of `child_scope`.
- fn new_code_extent(&mut self, child_scope: CodeExtentData) -> CodeExtent<'tcx> {
+ fn record_code_extent(&mut self, child_scope: CodeExtent) {
let parent = self.cx.parent;
- self.intern_code_extent(child_scope, parent)
+ self.region_maps.record_code_extent(child_scope, parent);
}
- fn new_node_extent(&mut self, child_scope: ast::NodeId) -> CodeExtent<'tcx> {
- self.new_code_extent(CodeExtentData::Misc(child_scope))
+ /// Records the current parent (if any) as the parent of `child_scope`,
+ /// and sets `child_scope` as the new current parent.
+ fn enter_code_extent(&mut self, child_scope: CodeExtent) {
+ self.record_code_extent(child_scope);
+ self.cx.parent = Some(child_scope);
}
- fn new_node_extent_with_dtor(&mut self, id: ast::NodeId) -> CodeExtent<'tcx> {
+ fn enter_node_extent_with_dtor(&mut self, id: ast::NodeId) {
// If node was previously marked as a terminating scope during the
// recursive visit of its parent node in the AST, then we need to
// account for the destruction scope representing the extent of
// the destructors that run immediately after it completes.
if self.terminating_scopes.contains(&id) {
- let ds = self.new_code_extent(
- CodeExtentData::DestructionScope(id));
- self.intern_node(id, Some(ds))
- } else {
- self.new_node_extent(id)
+ self.enter_code_extent(CodeExtent::DestructionScope(id));
}
+ self.enter_code_extent(CodeExtent::Misc(id));
}
}
fn visit_body(&mut self, body: &'tcx hir::Body) {
let body_id = body.id();
- let owner_id = self.map.body_owner(body_id);
+ let owner_id = self.tcx.hir.body_owner(body_id);
debug!("visit_body(id={:?}, span={:?}, body.id={:?}, cx.parent={:?})",
owner_id,
}
self.cx.root_id = Some(body_id.node_id);
- self.cx.parent = Some(self.new_code_extent(
- CodeExtentData::CallSiteScope { fn_id: owner_id, body_id: body_id.node_id }));
- self.cx.parent = Some(self.new_code_extent(
- CodeExtentData::ParameterScope { fn_id: owner_id, body_id: body_id.node_id }));
+ self.enter_code_extent(CodeExtent::CallSiteScope(body_id));
+ self.enter_code_extent(CodeExtent::ParameterScope(body_id));
// The arguments and `self` are parented to the fn.
self.cx.var_parent = self.cx.parent.take();
}
fn region_maps<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId)
- -> Rc<RegionMaps<'tcx>>
+ -> Rc<RegionMaps>
{
let closure_base_def_id = tcx.closure_base_def_id(def_id);
if closure_base_def_id != def_id {
return tcx.region_maps(closure_base_def_id);
}
- let mut maps = RegionMaps::new();
-
let id = tcx.hir.as_local_node_id(def_id).unwrap();
- if let Some(body) = tcx.hir.maybe_body_owned_by(id) {
+ let maps = if let Some(body) = tcx.hir.maybe_body_owned_by(id) {
let mut visitor = RegionResolutionVisitor {
- tcx: tcx,
- region_maps: &mut maps,
- map: &tcx.hir,
+ tcx,
+ region_maps: RegionMaps::new(),
cx: Context {
root_id: None,
parent: None,
terminating_scopes: NodeSet(),
};
+ visitor.region_maps.root_body = Some(body);
+
+ // If the item is an associated const or a method,
+ // record its impl/trait parent, as it can also have
+ // lifetime parameters free in this body.
+ match tcx.hir.get(id) {
+ hir::map::NodeImplItem(_) |
+ hir::map::NodeTraitItem(_) => {
+ visitor.region_maps.root_parent = Some(tcx.hir.get_parent(id));
+ }
+ _ => {}
+ }
+
visitor.visit_body(tcx.hir.body(body));
- }
+
+ visitor.region_maps
+ } else {
+ RegionMaps::new()
+ };
Rc::new(maps)
}
use session::Session;
use hir::def::Def;
use hir::def_id::DefId;
-use middle::region;
use ty;
use std::cell::Cell;
EarlyBound(/* index */ u32, /* lifetime decl */ ast::NodeId),
LateBound(ty::DebruijnIndex, /* lifetime decl */ ast::NodeId),
LateBoundAnon(ty::DebruijnIndex, /* anon index */ u32),
- Free(region::CallSiteScopeData, /* lifetime decl */ ast::NodeId),
+ Free(DefId, /* lifetime decl */ ast::NodeId),
}
impl Region {
};
if let Some(mut def) = result {
- if let Some(body_id) = outermost_body {
+ if let Region::EarlyBound(..) = def {
+ // Do not free early-bound regions, only late-bound ones.
+ } else if let Some(body_id) = outermost_body {
let fn_id = self.hir_map.body_owner(body_id);
- let scope_data = region::CallSiteScopeData {
- fn_id: fn_id, body_id: body_id.node_id
- };
match self.hir_map.get(fn_id) {
hir::map::NodeItem(&hir::Item {
node: hir::ItemFn(..), ..
hir::map::NodeImplItem(&hir::ImplItem {
node: hir::ImplItemKind::Method(..), ..
}) => {
- def = Region::Free(scope_data, def.id().unwrap());
+ let scope = self.hir_map.local_def_id(fn_id);
+ def = Region::Free(scope, def.id().unwrap());
}
_ => {}
}
pub use self::StabilityLevel::*;
-use hir::map as hir_map;
use lint;
use hir::def::Def;
use hir::def_id::{CrateNum, CRATE_DEF_INDEX, DefId, DefIndex, LOCAL_CRATE};
use ty::{self, TyCtxt};
use middle::privacy::AccessLevels;
+use session::Session;
use syntax::symbol::Symbol;
use syntax_pos::{Span, DUMMY_SP};
use syntax::ast;
item_sp: Span, kind: AnnotationKind, visit_children: F)
where F: FnOnce(&mut Self)
{
- if self.index.staged_api[&LOCAL_CRATE] && self.tcx.sess.features.borrow().staged_api {
+ if self.index.staged_api[&LOCAL_CRATE] {
debug!("annotate(id = {:?}, attrs = {:?})", id, attrs);
if let Some(..) = attr::find_deprecation(self.tcx.sess.diagnostic(), attrs, item_sp) {
self.tcx.sess.span_err(item_sp, "`#[deprecated]` cannot be used in staged api, \
parent_depr: None,
in_trait_impl: false,
};
+
+ // If the `-Z force-unstable-if-unmarked` flag is passed then we provide
+ // a parent stability annotation which indicates that this is private
+ // with the `rustc_private` feature. This is intended for use when
+ // compiling librustc crates themselves so we can leverage crates.io
+ // while maintaining the invariant that all sysroot crates are unstable
+ // by default and are unable to be used.
+ if tcx.sess.opts.debugging_opts.force_unstable_if_unmarked {
+ let reason = "this crate is being loaded from the sysroot, and \
+ unstable location; did you mean to load this crate \
+ from crates.io via `Cargo.toml` instead?";
+ let stability = tcx.intern_stability(Stability {
+ level: attr::StabilityLevel::Unstable {
+ reason: Some(Symbol::intern(reason)),
+ issue: 27812,
+ },
+ feature: Symbol::intern("rustc_private"),
+ rustc_depr: None,
+ });
+ annotator.parent_stab = Some(stability);
+ }
+
annotator.annotate(ast::CRATE_NODE_ID, &krate.attrs, krate.span, AnnotationKind::Required,
|v| intravisit::walk_crate(v, krate));
}
- pub fn new(hir_map: &hir_map::Map) -> Index<'tcx> {
- let krate = hir_map.krate();
-
- let mut is_staged_api = false;
- for attr in &krate.attrs {
- if attr.path == "stable" || attr.path == "unstable" {
- is_staged_api = true;
- break
- }
- }
+ pub fn new(sess: &Session) -> Index<'tcx> {
+ let is_staged_api =
+ sess.opts.debugging_opts.force_unstable_if_unmarked ||
+ sess.features.borrow().staged_api;
let mut staged_api = FxHashMap();
staged_api.insert(LOCAL_CRATE, is_staged_api);
}
}
- let is_staged_api = *self.stability.borrow_mut().staged_api.entry(def_id.krate)
- .or_insert_with(|| self.sess.cstore.is_staged_api(def_id.krate));
+ let is_staged_api = self.lookup_stability(DefId {
+ index: CRATE_DEF_INDEX,
+ ..def_id
+ }).is_some();
if !is_staged_api {
return;
}
match stability {
Some(&Stability { level: attr::Unstable {ref reason, issue}, ref feature, .. }) => {
- if !self.stability.borrow().active_features.contains(feature) {
- let msg = match *reason {
- Some(ref r) => format!("use of unstable library feature '{}': {}",
- feature.as_str(), &r),
- None => format!("use of unstable library feature '{}'", &feature)
- };
- emit_feature_err(&self.sess.parse_sess, &feature.as_str(), span,
- GateIssue::Library(Some(issue)), &msg);
+ if self.stability.borrow().active_features.contains(feature) {
+ return
}
+
+ // When we're compiling the compiler itself we may pull in
+ // crates from crates.io, but those crates may depend on other
+ // crates also pulled in from crates.io. We want to ideally be
+ // able to compile everything without requiring upstream
+ // modifications, so in the case that this looks like a
+ // rustc_private crate (e.g. a compiler crate) and we also have
+ // the `-Z force-unstable-if-unmarked` flag present (we're
+ // compiling a compiler crate), then let this missing feature
+ // annotation slide.
+ if *feature == "rustc_private" && issue == 27812 {
+ if self.sess.opts.debugging_opts.force_unstable_if_unmarked {
+ return
+ }
+ }
+
+ let msg = match *reason {
+ Some(ref r) => format!("use of unstable library feature '{}': {}",
+ feature.as_str(), &r),
+ None => format!("use of unstable library feature '{}'", &feature)
+ };
+ emit_feature_err(&self.sess.parse_sess, &feature.as_str(), span,
+ GateIssue::Library(Some(issue)), &msg);
}
Some(_) => {
// Stable APIs are always ok to call and deprecated APIs are
let access_levels = &tcx.privacy_access_levels(LOCAL_CRATE);
- if tcx.stability.borrow().staged_api[&LOCAL_CRATE] && tcx.sess.features.borrow().staged_api {
+ if tcx.stability.borrow().staged_api[&LOCAL_CRATE] {
let krate = tcx.hir.krate();
let mut missing = MissingStabilityAnnotations {
tcx: tcx,
StorageDead(Lvalue<'tcx>),
InlineAsm {
- asm: InlineAsm,
+ asm: Box<InlineAsm>,
outputs: Vec<Lvalue<'tcx>>,
inputs: Vec<Operand<'tcx>>
},
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable)]
pub enum Operand<'tcx> {
Consume(Lvalue<'tcx>),
- Constant(Constant<'tcx>),
+ Constant(Box<Constant<'tcx>>),
}
impl<'tcx> Debug for Operand<'tcx> {
substs: &'tcx Substs<'tcx>,
span: Span,
) -> Self {
- Operand::Constant(Constant {
+ Operand::Constant(box Constant {
span: span,
ty: tcx.type_of(def_id).subst(tcx, substs),
literal: Literal::Value { value: ConstVal::Function(def_id, substs) },
/// ..., y: ... }` from `dest.x = ...; dest.y = ...;` in the case
/// that `Foo` has a destructor. These rvalues can be optimized
/// away after type-checking and before lowering.
- Aggregate(AggregateKind<'tcx>, Vec<Operand<'tcx>>),
+ Aggregate(Box<AggregateKind<'tcx>>, Vec<Operand<'tcx>>),
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, RustcEncodable, RustcDecodable)]
tuple_fmt.finish()
}
- match *kind {
+ match **kind {
AggregateKind::Array(_) => write!(fmt, "{:?}", lvs),
AggregateKind::Tuple => {
Discriminant(ref lval) => Discriminant(lval.fold_with(folder)),
Box(ty) => Box(ty.fold_with(folder)),
Aggregate(ref kind, ref fields) => {
- let kind = match *kind {
+ let kind = box match **kind {
AggregateKind::Array(ty) => AggregateKind::Array(ty.fold_with(folder)),
AggregateKind::Tuple => AggregateKind::Tuple,
AggregateKind::Adt(def, v, substs, n) =>
Discriminant(ref lval) => lval.visit_with(visitor),
Box(ty) => ty.visit_with(visitor),
Aggregate(ref kind, ref fields) => {
- (match *kind {
+ (match **kind {
AggregateKind::Array(ty) => ty.visit_with(visitor),
AggregateKind::Tuple => false,
AggregateKind::Adt(_, _, substs, _) => substs.visit_with(visitor),
tcx.mk_box(t)
}
Rvalue::Aggregate(ref ak, ref ops) => {
- match *ak {
+ match **ak {
AggregateKind::Array(ty) => {
tcx.mk_array(ty, ops.len())
}
Rvalue::Aggregate(ref $($mutability)* kind,
ref $($mutability)* operands) => {
+ let kind = &$($mutability)* **kind;
match *kind {
AggregateKind::Array(ref $($mutability)* ty) => {
self.visit_ty(ty);
}
);
-#[derive(Clone, PartialEq, Eq)]
+#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub enum PrintRequest {
FileNames,
Sysroot,
"attempt to recover from parse errors (experimental)"),
incremental: Option<String> = (None, parse_opt_string, [UNTRACKED],
"enable incremental compilation (experimental)"),
- incremental_cc: bool = (false, parse_bool, [UNTRACKED],
+ incremental_cc: bool = (true, parse_bool, [UNTRACKED],
"enable cross-crate incremental compilation (even more experimental)"),
incremental_info: bool = (false, parse_bool, [UNTRACKED],
"print high-level information about incremental reuse (or the lack thereof)"),
"add a source pattern to the file path remapping config"),
remap_path_prefix_to: Vec<String> = (vec![], parse_string_push, [TRACKED],
"add a mapping target to the file path remapping config"),
+ force_unstable_if_unmarked: bool = (false, parse_bool, [TRACKED],
+ "force all crates to be `rustc_private` unstable"),
}
pub fn default_lib_output() -> CrateType {
use rustc_back::{LinkerFlavor, PanicStrategy};
use rustc_back::target::Target;
use rustc_data_structures::flock;
-use llvm;
use std::path::{Path, PathBuf};
use std::cell::{self, Cell, RefCell};
use std::collections::HashMap;
use std::env;
-use std::ffi::CString;
use std::io::Write;
use std::rc::Rc;
use std::fmt;
use std::time::Duration;
use std::sync::Arc;
-use libc::c_int;
mod code_stats;
pub mod config;
out_of_fuel: Cell::new(false),
};
- init_llvm(&sess);
-
sess
}
}
}
-fn init_llvm(sess: &Session) {
- unsafe {
- // Before we touch LLVM, make sure that multithreading is enabled.
- use std::sync::Once;
- static INIT: Once = Once::new();
- static mut POISONED: bool = false;
- INIT.call_once(|| {
- if llvm::LLVMStartMultithreaded() != 1 {
- // use an extra bool to make sure that all future usage of LLVM
- // cannot proceed despite the Once not running more than once.
- POISONED = true;
- }
-
- configure_llvm(sess);
- });
-
- if POISONED {
- bug!("couldn't enable multi-threaded LLVM");
- }
- }
-}
-
-unsafe fn configure_llvm(sess: &Session) {
- let mut llvm_c_strs = Vec::new();
- let mut llvm_args = Vec::new();
-
- {
- let mut add = |arg: &str| {
- let s = CString::new(arg).unwrap();
- llvm_args.push(s.as_ptr());
- llvm_c_strs.push(s);
- };
- add("rustc"); // fake program name
- if sess.time_llvm_passes() { add("-time-passes"); }
- if sess.print_llvm_passes() { add("-debug-pass=Structure"); }
-
- for arg in &sess.opts.cg.llvm_args {
- add(&(*arg));
- }
- }
-
- llvm::LLVMInitializePasses();
-
- llvm::initialize_available_targets();
-
- llvm::LLVMRustSetLLVMOptions(llvm_args.len() as c_int,
- llvm_args.as_ptr());
-}
-
pub fn early_error(output: config::ErrorOutputType, msg: &str) -> ! {
let emitter: Box<Emitter> = match output {
config::ErrorOutputType::HumanReadable(color_config) => {
debug!("get_vtable_methods({:?})", trait_ref);
supertraits(tcx, trait_ref).flat_map(move |trait_ref| {
- tcx.populate_implementations_for_trait_if_necessary(trait_ref.def_id());
-
let trait_methods = tcx.associated_items(trait_ref.def_id())
.filter(|item| item.kind == ty::AssociatedKind::Method);
ty::Binder(self.predicate.skip_binder().self_ty())
}
}
+
+pub fn provide(providers: &mut ty::maps::Providers) {
+ *providers = ty::maps::Providers {
+ is_object_safe: object_safety::is_object_safe_provider,
+ specialization_graph_of: specialize::specialization_graph_provider,
+ ..*providers
+ };
+}
+
+pub fn provide_extern(providers: &mut ty::maps::Providers) {
+ *providers = ty::maps::Providers {
+ is_object_safe: object_safety::is_object_safe_provider,
+ specialization_graph_of: specialize::specialization_graph_provider,
+ ..*providers
+ };
+}
}
impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
- pub fn is_object_safe(self, trait_def_id: DefId) -> bool {
- // Because we query yes/no results frequently, we keep a cache:
- let def = self.trait_def(trait_def_id);
-
- let result = def.object_safety().unwrap_or_else(|| {
- let result = self.object_safety_violations(trait_def_id).is_empty();
-
- // Record just a yes/no result in the cache; this is what is
- // queried most frequently. Note that this may overwrite a
- // previous result, but always with the same thing.
- def.set_object_safety(result);
-
- result
- });
-
- debug!("is_object_safe({:?}) = {}", trait_def_id, result);
-
- result
- }
/// Returns the object safety violations that affect
/// astconv - currently, Self in supertraits. This is needed
};
// Search for a predicate like `Self : Sized` amongst the trait bounds.
- let free_substs = self.construct_free_substs(def_id, None);
let predicates = self.predicates_of(def_id);
- let predicates = predicates.instantiate(self, free_substs).predicates;
+ let predicates = predicates.instantiate_identity(self).predicates;
elaborate_predicates(self, predicates)
.any(|predicate| {
match predicate {
error
}
}
+
+pub(super) fn is_object_safe_provider<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ trait_def_id: DefId)
+ -> bool {
+ tcx.object_safety_violations(trait_def_id).is_empty()
+}
// In either case, we handle this by not adding a
// candidate for an impl if it contains a `default`
// type.
- let opt_node_item = assoc_ty_def(selcx,
- impl_data.impl_def_id,
- obligation.predicate.item_name);
- let new_candidate = if let Some(node_item) = opt_node_item {
- let is_default = if node_item.node.is_from_trait() {
- // If true, the impl inherited a `type Foo = Bar`
- // given in the trait, which is implicitly default.
- // Otherwise, the impl did not specify `type` and
- // neither did the trait:
- //
- // ```rust
- // trait Foo { type T; }
- // impl Foo for Bar { }
- // ```
- //
- // This is an error, but it will be
- // reported in `check_impl_items_against_trait`.
- // We accept it here but will flag it as
- // an error when we confirm the candidate
- // (which will ultimately lead to `normalize_to_error`
- // being invoked).
- node_item.item.defaultness.has_value()
- } else {
- node_item.item.defaultness.is_default() ||
- selcx.tcx().impl_is_default(node_item.node.def_id())
- };
-
- // Only reveal a specializable default if we're past type-checking
- // and the obligations is monomorphic, otherwise passes such as
- // transmute checking and polymorphic MIR optimizations could
- // get a result which isn't correct for all monomorphizations.
- if !is_default {
+ let node_item = assoc_ty_def(selcx,
+ impl_data.impl_def_id,
+ obligation.predicate.item_name);
+
+ let is_default = if node_item.node.is_from_trait() {
+ // If true, the impl inherited a `type Foo = Bar`
+ // given in the trait, which is implicitly default.
+ // Otherwise, the impl did not specify `type` and
+ // neither did the trait:
+ //
+ // ```rust
+ // trait Foo { type T; }
+ // impl Foo for Bar { }
+ // ```
+ //
+ // This is an error, but it will be
+ // reported in `check_impl_items_against_trait`.
+ // We accept it here but will flag it as
+ // an error when we confirm the candidate
+ // (which will ultimately lead to `normalize_to_error`
+ // being invoked).
+ node_item.item.defaultness.has_value()
+ } else {
+ node_item.item.defaultness.is_default() ||
+ selcx.tcx().impl_is_default(node_item.node.def_id())
+ };
+
+ // Only reveal a specializable default if we're past type-checking
+ // and the obligations is monomorphic, otherwise passes such as
+ // transmute checking and polymorphic MIR optimizations could
+ // get a result which isn't correct for all monomorphizations.
+ let new_candidate = if !is_default {
+ Some(ProjectionTyCandidate::Select)
+ } else if selcx.projection_mode() == Reveal::All {
+ assert!(!poly_trait_ref.needs_infer());
+ if !poly_trait_ref.needs_subst() {
Some(ProjectionTyCandidate::Select)
- } else if selcx.projection_mode() == Reveal::All {
- assert!(!poly_trait_ref.needs_infer());
- if !poly_trait_ref.needs_subst() {
- Some(ProjectionTyCandidate::Select)
- } else {
- None
- }
} else {
None
}
} else {
- // This is saying that neither the trait nor
- // the impl contain a definition for this
- // associated type. Normally this situation
- // could only arise through a compiler bug --
- // if the user wrote a bad item name, it
- // should have failed in astconv. **However**,
- // at coherence-checking time, we only look at
- // the topmost impl (we don't even consider
- // the trait itself) for the definition -- and
- // so in that case it may be that the trait
- // *DOES* have a declaration, but we don't see
- // it, and we end up in this branch.
- //
- // This is kind of tricky to handle actually.
- // For now, we just unconditionally ICE,
- // because otherwise, examples like the
- // following will succeed:
- //
- // ```
- // trait Assoc {
- // type Output;
- // }
- //
- // impl<T> Assoc for T {
- // default type Output = bool;
- // }
- //
- // impl Assoc for u8 {}
- // impl Assoc for u16 {}
- //
- // trait Foo {}
- // impl Foo for <u8 as Assoc>::Output {}
- // impl Foo for <u16 as Assoc>::Output {}
- // return None;
- // }
- // ```
- //
- // The essential problem here is that the
- // projection fails, leaving two unnormalized
- // types, which appear not to unify -- so the
- // overlap check succeeds, when it should
- // fail.
- span_bug!(obligation.cause.span,
- "Tried to project an inherited associated type during \
- coherence checking, which is currently not supported.");
+ None
};
+
candidate_set.vec.extend(new_candidate);
}
super::VtableParam(..) => {
let VtableImplData { substs, nested, impl_def_id } = impl_vtable;
let tcx = selcx.tcx();
- let trait_ref = obligation.predicate.trait_ref;
let assoc_ty = assoc_ty_def(selcx, impl_def_id, obligation.predicate.item_name);
- match assoc_ty {
- Some(node_item) => {
- let ty = if !node_item.item.defaultness.has_value() {
- // This means that the impl is missing a definition for the
- // associated type. This error will be reported by the type
- // checker method `check_impl_items_against_trait`, so here we
- // just return TyError.
- debug!("confirm_impl_candidate: no associated type {:?} for {:?}",
- node_item.item.name,
- obligation.predicate.trait_ref);
- tcx.types.err
- } else {
- tcx.type_of(node_item.item.def_id)
- };
- let substs = translate_substs(selcx.infcx(), impl_def_id, substs, node_item.node);
- Progress {
- ty: ty.subst(tcx, substs),
- obligations: nested,
- cacheable: true
- }
- }
- None => {
- span_bug!(obligation.cause.span,
- "No associated type for {:?}",
- trait_ref);
- }
+ let ty = if !assoc_ty.item.defaultness.has_value() {
+ // This means that the impl is missing a definition for the
+ // associated type. This error will be reported by the type
+ // checker method `check_impl_items_against_trait`, so here we
+ // just return TyError.
+ debug!("confirm_impl_candidate: no associated type {:?} for {:?}",
+ assoc_ty.item.name,
+ obligation.predicate.trait_ref);
+ tcx.types.err
+ } else {
+ tcx.type_of(assoc_ty.item.def_id)
+ };
+ let substs = translate_substs(selcx.infcx(), impl_def_id, substs, assoc_ty.node);
+ Progress {
+ ty: ty.subst(tcx, substs),
+ obligations: nested,
+ cacheable: true
}
}
selcx: &SelectionContext<'cx, 'gcx, 'tcx>,
impl_def_id: DefId,
assoc_ty_name: ast::Name)
- -> Option<specialization_graph::NodeItem<ty::AssociatedItem>>
+ -> specialization_graph::NodeItem<ty::AssociatedItem>
{
- let trait_def_id = selcx.tcx().impl_trait_ref(impl_def_id).unwrap().def_id;
- let trait_def = selcx.tcx().trait_def(trait_def_id);
-
- if !trait_def.is_complete(selcx.tcx()) {
- let impl_node = specialization_graph::Node::Impl(impl_def_id);
- for item in impl_node.items(selcx.tcx()) {
- if item.kind == ty::AssociatedKind::Type && item.name == assoc_ty_name {
- return Some(specialization_graph::NodeItem {
- node: specialization_graph::Node::Impl(impl_def_id),
- item: item,
- });
- }
+ let tcx = selcx.tcx();
+ let trait_def_id = tcx.impl_trait_ref(impl_def_id).unwrap().def_id;
+ let trait_def = tcx.trait_def(trait_def_id);
+
+ // This function may be called while we are still building the
+ // specialization graph that is queried below (via TraidDef::ancestors()),
+ // so, in order to avoid unnecessary infinite recursion, we manually look
+ // for the associated item at the given impl.
+ // If there is no such item in that impl, this function will fail with a
+ // cycle error if the specialization graph is currently being built.
+ let impl_node = specialization_graph::Node::Impl(impl_def_id);
+ for item in impl_node.items(tcx) {
+ if item.kind == ty::AssociatedKind::Type && item.name == assoc_ty_name {
+ return specialization_graph::NodeItem {
+ node: specialization_graph::Node::Impl(impl_def_id),
+ item: item,
+ };
}
- None
+ }
+
+ if let Some(assoc_item) = trait_def
+ .ancestors(tcx, impl_def_id)
+ .defs(tcx, assoc_ty_name, ty::AssociatedKind::Type)
+ .next() {
+ assoc_item
} else {
- trait_def
- .ancestors(impl_def_id)
- .defs(selcx.tcx(), assoc_ty_name, ty::AssociatedKind::Type)
- .next()
+ // This is saying that neither the trait nor
+ // the impl contain a definition for this
+ // associated type. Normally this situation
+ // could only arise through a compiler bug --
+ // if the user wrote a bad item name, it
+ // should have failed in astconv.
+ bug!("No associated type `{}` for {}",
+ assoc_ty_name,
+ tcx.item_path_str(impl_def_id))
}
}
use traits::{self, Reveal, ObligationCause};
use ty::{self, TyCtxt, TypeFoldable};
use syntax_pos::DUMMY_SP;
+use std::rc::Rc;
pub mod specialization_graph;
let trait_def_id = tcx.trait_id_of_impl(impl_data.impl_def_id).unwrap();
let trait_def = tcx.trait_def(trait_def_id);
- let ancestors = trait_def.ancestors(impl_data.impl_def_id);
+ let ancestors = trait_def.ancestors(tcx, impl_data.impl_def_id);
match ancestors.defs(tcx, item.name, item.kind).next() {
Some(node_item) => {
let substs = tcx.infer_ctxt((), Reveal::All).enter(|infcx| {
}
// create a parameter environment corresponding to a (skolemized) instantiation of impl1
- let penv = tcx.construct_parameter_environment(DUMMY_SP,
- impl1_def_id,
- None);
- let impl1_trait_ref = tcx.impl_trait_ref(impl1_def_id)
- .unwrap()
- .subst(tcx, &penv.free_substs);
+ let penv = tcx.parameter_environment(impl1_def_id);
+ let impl1_trait_ref = tcx.impl_trait_ref(impl1_def_id).unwrap();
// Create a infcx, taking the predicates of impl1 as assumptions:
let result = tcx.infer_ctxt(penv, Reveal::UserFacing).enter(|infcx| {
self.map.insert((a, b), result);
}
}
+
+// Query provider for `specialization_graph_of`.
+pub(super) fn specialization_graph_provider<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ trait_id: DefId)
+ -> Rc<specialization_graph::Graph> {
+ let mut sg = specialization_graph::Graph::new();
+
+ let mut trait_impls: Vec<DefId> = tcx.trait_impls_of(trait_id).iter().collect();
+
+ // The coherence checking implementation seems to rely on impls being
+ // iterated over (roughly) in definition order, so we are sorting by
+ // negated CrateNum (so remote definitions are visited first) and then
+ // by a flattend version of the DefIndex.
+ trait_impls.sort_unstable_by_key(|def_id| {
+ (-(def_id.krate.as_u32() as i64),
+ def_id.index.address_space().index(),
+ def_id.index.as_array_index())
+ });
+
+ for impl_def_id in trait_impls {
+ if impl_def_id.is_local() {
+ // This is where impl overlap checking happens:
+ let insert_result = sg.insert(tcx, impl_def_id);
+ // Report error if there was one.
+ if let Err(overlap) = insert_result {
+ let mut err = struct_span_err!(tcx.sess,
+ tcx.span_of_impl(impl_def_id).unwrap(),
+ E0119,
+ "conflicting implementations of trait `{}`{}:",
+ overlap.trait_desc,
+ overlap.self_desc.clone().map_or(String::new(),
+ |ty| {
+ format!(" for type `{}`", ty)
+ }));
+
+ match tcx.span_of_impl(overlap.with_impl) {
+ Ok(span) => {
+ err.span_label(span, format!("first implementation here"));
+ err.span_label(tcx.span_of_impl(impl_def_id).unwrap(),
+ format!("conflicting implementation{}",
+ overlap.self_desc
+ .map_or(String::new(),
+ |ty| format!(" for `{}`", ty))));
+ }
+ Err(cname) => {
+ err.note(&format!("conflicting implementation in crate `{}`", cname));
+ }
+ }
+
+ err.emit();
+ }
+ } else {
+ let parent = tcx.impl_parent(impl_def_id).unwrap_or(trait_id);
+ sg.record_impl_from_cstore(tcx, parent, impl_def_id)
+ }
+ }
+
+ Rc::new(sg)
+}
use hir::def_id::DefId;
use traits::{self, Reveal};
-use ty::{self, TyCtxt, TraitDef, TypeFoldable};
+use ty::{self, TyCtxt, TypeFoldable};
use ty::fast_reject::{self, SimplifiedType};
+use std::rc::Rc;
use syntax::ast::Name;
use util::nodemap::{DefIdMap, FxHashMap};
}
}
-pub struct Ancestors<'a> {
- trait_def: &'a TraitDef,
+pub struct Ancestors {
+ trait_def_id: DefId,
+ specialization_graph: Rc<Graph>,
current_source: Option<Node>,
}
-impl<'a> Iterator for Ancestors<'a> {
+impl Iterator for Ancestors {
type Item = Node;
fn next(&mut self) -> Option<Node> {
let cur = self.current_source.take();
if let Some(Node::Impl(cur_impl)) = cur {
- let parent = self.trait_def.specialization_graph.borrow().parent(cur_impl);
- if parent == self.trait_def.def_id {
+ let parent = self.specialization_graph.parent(cur_impl);
+ if parent == self.trait_def_id {
self.current_source = Some(Node::Trait(parent));
} else {
self.current_source = Some(Node::Impl(parent));
}
}
-impl<'a, 'gcx, 'tcx> Ancestors<'a> {
+impl<'a, 'gcx, 'tcx> Ancestors {
/// Search the items from the given ancestors, returning each definition
/// with the given name and the given kind.
#[inline] // FIXME(#35870) Avoid closures being unexported due to impl Trait.
/// Walk up the specialization ancestors of a given impl, starting with that
/// impl itself.
-pub fn ancestors<'a>(trait_def: &'a TraitDef, start_from_impl: DefId) -> Ancestors<'a> {
+pub fn ancestors(tcx: TyCtxt,
+ trait_def_id: DefId,
+ start_from_impl: DefId)
+ -> Ancestors {
+ let specialization_graph = tcx.specialization_graph_of(trait_def_id);
Ancestors {
- trait_def: trait_def,
+ trait_def_id,
+ specialization_graph,
current_source: Some(Node::Impl(start_from_impl)),
}
}
// I want to be conservative. --nmatsakis
let ty_max = data.skip_binder().0;
let r_min = data.skip_binder().1;
- if r_min.is_bound() {
+ if r_min.is_late_bound() {
return;
}
tcx.outlives_components(ty_max)
.into_iter()
.filter_map(|component| match component {
- Component::Region(r) => if r.is_bound() {
+ Component::Region(r) => if r.is_late_bound() {
None
} else {
Some(ty::Predicate::RegionOutlives(
use hir::map::DisambiguatedDefPathData;
use middle::free_region::FreeRegionMap;
use middle::lang_items;
-use middle::region::{CodeExtent, CodeExtentData};
use middle::resolve_lifetime;
use middle::stability;
use mir::Mir;
type_: RefCell<FxHashSet<Interned<'tcx, TyS<'tcx>>>>,
type_list: RefCell<FxHashSet<Interned<'tcx, Slice<Ty<'tcx>>>>>,
substs: RefCell<FxHashSet<Interned<'tcx, Substs<'tcx>>>>,
- region: RefCell<FxHashSet<Interned<'tcx, RegionKind<'tcx>>>>,
+ region: RefCell<FxHashSet<Interned<'tcx, RegionKind>>>,
existential_predicates: RefCell<FxHashSet<Interned<'tcx, Slice<ExistentialPredicate<'tcx>>>>>,
predicates: RefCell<FxHashSet<Interned<'tcx, Slice<Predicate<'tcx>>>>>,
}
layout_interner: RefCell<FxHashSet<&'tcx Layout>>,
- code_extent_interner: RefCell<FxHashSet<CodeExtent<'tcx>>>,
-
/// A vector of every trait accessible in the whole crate
/// (i.e. including those from subcrates). This is used only for
/// error reporting, and so is lazily initialised and generally
interned
}
- pub fn node_extent(self, n: ast::NodeId) -> CodeExtent<'gcx> {
- self.intern_code_extent(CodeExtentData::Misc(n))
- }
-
- // Returns the code extent for an item - the destruction scope.
- pub fn item_extent(self, n: ast::NodeId) -> CodeExtent<'gcx> {
- self.intern_code_extent(CodeExtentData::DestructionScope(n))
- }
-
- pub fn call_site_extent(self, fn_id: ast::NodeId, body_id: ast::NodeId) -> CodeExtent<'gcx> {
- assert!(fn_id != body_id);
- self.intern_code_extent(CodeExtentData::CallSiteScope { fn_id: fn_id, body_id: body_id })
- }
-
- pub fn intern_code_extent(self, data: CodeExtentData) -> CodeExtent<'gcx> {
- if let Some(st) = self.code_extent_interner.borrow().get(&data) {
- return st;
- }
-
- let interned = self.global_interners.arena.alloc(data);
- if let Some(prev) = self.code_extent_interner.borrow_mut().replace(interned) {
- bug!("Tried to overwrite interned code-extent: {:?}", prev)
- }
- interned
- }
-
pub fn intern_layout(self, layout: Layout) -> &'gcx Layout {
if let Some(layout) = self.layout_interner.borrow().get(&layout) {
return layout;
data_layout: data_layout,
layout_cache: RefCell::new(FxHashMap()),
layout_interner: RefCell::new(FxHashSet()),
- code_extent_interner: RefCell::new(FxHashSet()),
layout_depth: Cell::new(0),
derive_macros: RefCell::new(NodeMap()),
stability_interner: RefCell::new(FxHashSet()),
}
}
-impl<'a, 'tcx> Lift<'tcx> for ty::FreeRegion<'a> {
- type Lifted = ty::FreeRegion<'tcx>;
- fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
- let scope = self.scope.map(|code_extent| tcx.intern_code_extent(*code_extent));
- let bound_region = self.bound_region;
- Some(ty::FreeRegion { scope, bound_region })
- }
-}
-
impl<'a, 'tcx> Lift<'tcx> for Region<'a> {
type Lifted = Region<'tcx>;
fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Region<'tcx>> {
}
}
-impl<'tcx> Borrow<RegionKind<'tcx>> for Interned<'tcx, RegionKind<'tcx>> {
- fn borrow<'a>(&'a self) -> &'a RegionKind<'tcx> {
+impl<'tcx> Borrow<RegionKind> for Interned<'tcx, RegionKind> {
+ fn borrow<'a>(&'a self) -> &'a RegionKind {
&self.0
}
}
&ty::ReVar(_) | &ty::ReSkolemized(..) => true,
_ => false
}
- }) -> RegionKind<'tcx>
+ }) -> RegionKind
);
macro_rules! slice_interners {
//! These methods return true to indicate that the visitor has found what it is looking for
//! and does not need to visit anything else.
-use middle::region;
use ty::subst::Substs;
use ty::adjustment;
use ty::{self, Binder, Ty, TyCtxt, TypeFlags};
(result, replacer.map)
}
-
- /// Replace any late-bound regions bound in `value` with free variants attached to scope-id
- /// `scope_id`.
- pub fn liberate_late_bound_regions<T>(self,
- all_outlive_scope: Option<region::CodeExtent<'tcx>>,
- value: &Binder<T>)
- -> T
- where T : TypeFoldable<'tcx>
- {
- self.replace_late_bound_regions(value, |br| {
- self.mk_region(ty::ReFree(ty::FreeRegion {
- scope: all_outlive_scope,
- bound_region: br
- }))
- }).0
- }
-
/// Flattens two binding levels into one. So `for<'a> for<'b> Foo`
/// becomes `for<'a,'b> Foo`.
pub fn flatten_late_bound_regions<T>(self, bound2_value: &Binder<Binder<T>>)
// regions. See comment on `shift_regions_through_binders` method in
// `subst.rs` for more details.
-pub fn shift_region<'tcx>(region: ty::RegionKind<'tcx>, amount: u32) -> ty::RegionKind<'tcx> {
+pub fn shift_region(region: ty::RegionKind, amount: u32) -> ty::RegionKind {
match region {
ty::ReLateBound(debruijn, br) => {
ty::ReLateBound(debruijn.shifted(amount), br)
let kind = if def.is_enum() || def.variants[0].fields.len() == 0{
StructKind::AlwaysSizedUnivariant
} else {
- let param_env = tcx.construct_parameter_environment(DUMMY_SP,
- def.did, None);
+ let param_env = tcx.parameter_environment(def.did);
let fields = &def.variants[0].fields;
let last_field = &fields[fields.len()-1];
- let always_sized = last_field.ty(tcx, param_env.free_substs)
+ let always_sized = tcx.type_of(last_field.did)
.is_sized(tcx, ¶m_env, DUMMY_SP);
if !always_sized { StructKind::MaybeUnsizedUnivariant }
else { StructKind::AlwaysSizedUnivariant }
use mir;
use mir::transform::{MirSuite, MirPassIndex};
use session::CompileResult;
+use traits::specialization_graph;
use ty::{self, CrateInherentImpls, Ty, TyCtxt};
use ty::item_path;
use ty::steal::Steal;
use ty::subst::Substs;
+use ty::fast_reject::SimplifiedType;
use util::nodemap::{DefIdSet, NodeSet};
use rustc_data_structures::indexed_vec::IndexVec;
use std::rc::Rc;
use syntax_pos::{Span, DUMMY_SP};
use syntax::attr;
+use syntax::ast;
use syntax::symbol::Symbol;
pub trait Key: Clone + Hash + Eq + Debug {
}
}
+impl Key for (DefId, SimplifiedType) {
+ fn map_crate(&self) -> CrateNum {
+ self.0.krate
+ }
+ fn default_span(&self, tcx: TyCtxt) -> Span {
+ self.0.default_span(tcx)
+ }
+}
+
impl<'tcx> Key for (DefId, &'tcx Substs<'tcx>) {
fn map_crate(&self) -> CrateNum {
self.0.krate
}
}
+impl<'tcx> QueryDescription for queries::item_attrs<'tcx> {
+ fn describe(_: TyCtxt, _: DefId) -> String {
+ bug!("item_attrs")
+ }
+}
+
+impl<'tcx> QueryDescription for queries::is_exported_symbol<'tcx> {
+ fn describe(_: TyCtxt, _: DefId) -> String {
+ bug!("is_exported_symbol")
+ }
+}
+
+impl<'tcx> QueryDescription for queries::fn_arg_names<'tcx> {
+ fn describe(_: TyCtxt, _: DefId) -> String {
+ bug!("fn_arg_names")
+ }
+}
+
+impl<'tcx> QueryDescription for queries::impl_parent<'tcx> {
+ fn describe(_: TyCtxt, _: DefId) -> String {
+ bug!("impl_parent")
+ }
+}
+
+impl<'tcx> QueryDescription for queries::trait_of_item<'tcx> {
+ fn describe(_: TyCtxt, _: DefId) -> String {
+ bug!("trait_of_item")
+ }
+}
+
impl<'tcx> QueryDescription for queries::item_body_nested_bodies<'tcx> {
fn describe(tcx: TyCtxt, def_id: DefId) -> String {
format!("nested item bodies of `{}`", tcx.item_path_str(def_id))
}
}
+impl<'tcx> QueryDescription for queries::trait_impls_of<'tcx> {
+ fn describe(tcx: TyCtxt, def_id: DefId) -> String {
+ format!("trait impls of `{}`", tcx.item_path_str(def_id))
+ }
+}
+
+impl<'tcx> QueryDescription for queries::relevant_trait_impls_for<'tcx> {
+ fn describe(tcx: TyCtxt, (def_id, ty): (DefId, SimplifiedType)) -> String {
+ format!("relevant impls for: `({}, {:?})`", tcx.item_path_str(def_id), ty)
+ }
+}
+
+impl<'tcx> QueryDescription for queries::is_object_safe<'tcx> {
+ fn describe(tcx: TyCtxt, def_id: DefId) -> String {
+ format!("determine object safety of trait `{}`", tcx.item_path_str(def_id))
+ }
+}
+
macro_rules! define_maps {
(<$tcx:tt>
$($(#[$attr:meta])*
output: $output:tt) => {
define_map_struct! {
tcx: $tcx,
- ready: ([pub] $attrs $name),
+ ready: ([] $attrs $name),
input: ($($input)*),
output: $output
}
/// Per-function `RegionMaps`. The `DefId` should be the owner-def-id for the fn body;
/// in the case of closures or "inline" expressions, this will be redirected to the enclosing
/// fn item.
- [] region_maps: RegionMaps(DefId) -> Rc<RegionMaps<'tcx>>,
+ [] region_maps: RegionMaps(DefId) -> Rc<RegionMaps>,
[] mir_shims: mir_shim_dep_node(ty::InstanceDef<'tcx>) -> &'tcx mir::Mir<'tcx>,
[] def_span: DefSpan(DefId) -> Span,
[] stability: Stability(DefId) -> Option<attr::Stability>,
[] deprecation: Deprecation(DefId) -> Option<attr::Deprecation>,
- [] item_body_nested_bodies: metadata_dep_node(DefId) -> Rc<BTreeMap<hir::BodyId, hir::Body>>,
- [] const_is_rvalue_promotable_to_static: metadata_dep_node(DefId) -> bool,
- [] is_mir_available: metadata_dep_node(DefId) -> bool,
+ [] item_attrs: ItemAttrs(DefId) -> Rc<[ast::Attribute]>,
+ [] fn_arg_names: FnArgNames(DefId) -> Vec<ast::Name>,
+ [] impl_parent: ImplParent(DefId) -> Option<DefId>,
+ [] trait_of_item: TraitOfItem(DefId) -> Option<DefId>,
+ [] is_exported_symbol: IsExportedSymbol(DefId) -> bool,
+ [] item_body_nested_bodies: ItemBodyNestedBodies(DefId) -> Rc<BTreeMap<hir::BodyId, hir::Body>>,
+ [] const_is_rvalue_promotable_to_static: ConstIsRvaluePromotableToStatic(DefId) -> bool,
+ [] is_mir_available: IsMirAvailable(DefId) -> bool,
+
+ [] trait_impls_of: TraitImpls(DefId) -> ty::trait_def::TraitImpls,
+ // Note that TraitDef::for_each_relevant_impl() will do type simplication for you.
+ [] relevant_trait_impls_for: relevant_trait_impls_for((DefId, SimplifiedType))
+ -> ty::trait_def::TraitImpls,
+ [] specialization_graph_of: SpecializationGraph(DefId) -> Rc<specialization_graph::Graph>,
+ [] is_object_safe: ObjectSafety(DefId) -> bool,
}
fn coherent_trait_dep_node((_, def_id): (CrateNum, DefId)) -> DepNode<DefId> {
DepNode::Reachability
}
-fn metadata_dep_node(def_id: DefId) -> DepNode<DefId> {
- DepNode::MetaData(def_id)
-}
-
fn mir_shim_dep_node(instance: ty::InstanceDef) -> DepNode<DefId> {
instance.dep_node()
}
fn crate_variances(_: CrateNum) -> DepNode<DefId> {
DepNode::CrateVariances
}
+
+fn relevant_trait_impls_for((def_id, _): (DefId, SimplifiedType)) -> DepNode<DefId> {
+ DepNode::TraitImpls(def_id)
+}
use middle::const_val::ConstVal;
use middle::lang_items::{FnTraitLangItem, FnMutTraitLangItem, FnOnceTraitLangItem};
use middle::privacy::AccessLevels;
-use middle::region::CodeExtent;
use middle::resolve_lifetime::ObjectLifetimeDefault;
+use middle::region::CodeExtent;
use mir::Mir;
use traits;
use ty;
pub use self::instance::{Instance, InstanceDef};
-pub use self::trait_def::{TraitDef, TraitFlags};
+pub use self::trait_def::TraitDef;
pub use self::maps::queries;
pub empty_variance: Rc<Vec<ty::Variance>>,
}
+impl Variance {
+ /// `a.xform(b)` combines the variance of a context with the
+ /// variance of a type with the following meaning. If we are in a
+ /// context with variance `a`, and we encounter a type argument in
+ /// a position with variance `b`, then `a.xform(b)` is the new
+ /// variance with which the argument appears.
+ ///
+ /// Example 1:
+ ///
+ /// *mut Vec<i32>
+ ///
+ /// Here, the "ambient" variance starts as covariant. `*mut T` is
+ /// invariant with respect to `T`, so the variance in which the
+ /// `Vec<i32>` appears is `Covariant.xform(Invariant)`, which
+ /// yields `Invariant`. Now, the type `Vec<T>` is covariant with
+ /// respect to its type argument `T`, and hence the variance of
+ /// the `i32` here is `Invariant.xform(Covariant)`, which results
+ /// (again) in `Invariant`.
+ ///
+ /// Example 2:
+ ///
+ /// fn(*const Vec<i32>, *mut Vec<i32)
+ ///
+ /// The ambient variance is covariant. A `fn` type is
+ /// contravariant with respect to its parameters, so the variance
+ /// within which both pointer types appear is
+ /// `Covariant.xform(Contravariant)`, or `Contravariant`. `*const
+ /// T` is covariant with respect to `T`, so the variance within
+ /// which the first `Vec<i32>` appears is
+ /// `Contravariant.xform(Covariant)` or `Contravariant`. The same
+ /// is true for its `i32` argument. In the `*mut T` case, the
+ /// variance of `Vec<i32>` is `Contravariant.xform(Invariant)`,
+ /// and hence the outermost type is `Invariant` with respect to
+ /// `Vec<i32>` (and its `i32` argument).
+ ///
+ /// Source: Figure 1 of "Taming the Wildcards:
+ /// Combining Definition- and Use-Site Variance" published in PLDI'11.
+ pub fn xform(self, v: ty::Variance) -> ty::Variance {
+ match (self, v) {
+ // Figure 1, column 1.
+ (ty::Covariant, ty::Covariant) => ty::Covariant,
+ (ty::Covariant, ty::Contravariant) => ty::Contravariant,
+ (ty::Covariant, ty::Invariant) => ty::Invariant,
+ (ty::Covariant, ty::Bivariant) => ty::Bivariant,
+
+ // Figure 1, column 2.
+ (ty::Contravariant, ty::Covariant) => ty::Contravariant,
+ (ty::Contravariant, ty::Contravariant) => ty::Covariant,
+ (ty::Contravariant, ty::Invariant) => ty::Invariant,
+ (ty::Contravariant, ty::Bivariant) => ty::Bivariant,
+
+ // Figure 1, column 3.
+ (ty::Invariant, _) => ty::Invariant,
+
+ // Figure 1, column 4.
+ (ty::Bivariant, _) => ty::Bivariant,
+ }
+ }
+}
+
#[derive(Clone, Copy, Debug, RustcDecodable, RustcEncodable)]
pub struct MethodCallee<'tcx> {
/// Impl method ID, for inherent methods, or trait method ID, otherwise.
impl RegionParameterDef {
pub fn to_early_bound_region_data(&self) -> ty::EarlyBoundRegion {
ty::EarlyBoundRegion {
+ def_id: self.def_id,
index: self.index,
name: self.name,
}
}
+ pub fn to_bound_region(&self) -> ty::BoundRegion {
+ self.to_early_bound_region_data().to_bound_region()
+ }
+}
+
+impl ty::EarlyBoundRegion {
pub fn to_bound_region(&self) -> ty::BoundRegion {
ty::BoundRegion::BrNamed(self.def_id, self.name)
}
instantiated.predicates.extend(self.predicates.iter().map(|p| p.subst(tcx, substs)))
}
+ pub fn instantiate_identity(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>)
+ -> InstantiatedPredicates<'tcx> {
+ let mut instantiated = InstantiatedPredicates::empty();
+ self.instantiate_identity_into(tcx, &mut instantiated);
+ instantiated
+ }
+
+ fn instantiate_identity_into(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>,
+ instantiated: &mut InstantiatedPredicates<'tcx>) {
+ if let Some(def_id) = self.parent {
+ tcx.predicates_of(def_id).instantiate_identity_into(tcx, instantiated);
+ }
+ instantiated.predicates.extend(&self.predicates)
+ }
+
pub fn instantiate_supertrait(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>,
poly_trait_ref: &ty::PolyTraitRef<'tcx>)
-> InstantiatedPredicates<'tcx>
/// more distinctions clearer.
#[derive(Clone)]
pub struct ParameterEnvironment<'tcx> {
- /// See `construct_free_substs` for details.
- pub free_substs: &'tcx Substs<'tcx>,
-
- /// Each type parameter has an implicit region bound that
- /// indicates it must outlive at least the function body (the user
- /// may specify stronger requirements). This field indicates the
- /// region of the callee. If it is `None`, then the parameter
- /// environment is for an item or something where the "callee" is
- /// not clear.
- pub implicit_region_bound: Option<ty::Region<'tcx>>,
-
/// Obligations that the caller must satisfy. This is basically
/// the set of bounds on the in-scope type parameters, translated
/// into Obligations, and elaborated and normalized.
pub caller_bounds: &'tcx [ty::Predicate<'tcx>],
- /// Scope that is attached to free regions for this scope. This is
- /// usually the id of the fn body, but for more abstract scopes
- /// like structs we use None or the item extent.
- ///
- /// FIXME(#3696). It would be nice to refactor so that free
- /// regions don't have this implicit scope and instead introduce
- /// relationships in the environment.
- pub free_id_outlive: Option<CodeExtent<'tcx>>,
-
/// A cache for `moves_by_default`.
pub is_copy_cache: RefCell<FxHashMap<Ty<'tcx>, bool>>,
-> ParameterEnvironment<'tcx>
{
ParameterEnvironment {
- free_substs: self.free_substs,
- implicit_region_bound: self.implicit_region_bound,
caller_bounds: caller_bounds,
- free_id_outlive: self.free_id_outlive,
is_copy_cache: RefCell::new(FxHashMap()),
is_sized_cache: RefCell::new(FxHashMap()),
is_freeze_cache: RefCell::new(FxHashMap()),
}
}
-
- /// Construct a parameter environment given an item, impl item, or trait item
- pub fn for_item(tcx: TyCtxt<'a, 'tcx, 'tcx>, id: NodeId)
- -> ParameterEnvironment<'tcx> {
- match tcx.hir.find(id) {
- Some(hir_map::NodeImplItem(ref impl_item)) => {
- match impl_item.node {
- hir::ImplItemKind::Type(_) => {
- // associated types don't have their own entry (for some reason),
- // so for now just grab environment for the impl
- let impl_id = tcx.hir.get_parent(id);
- let impl_def_id = tcx.hir.local_def_id(impl_id);
- tcx.construct_parameter_environment(impl_item.span,
- impl_def_id,
- Some(tcx.item_extent(id)))
- }
- hir::ImplItemKind::Const(_, body) |
- hir::ImplItemKind::Method(_, body) => {
- tcx.construct_parameter_environment(
- impl_item.span,
- tcx.hir.local_def_id(id),
- Some(tcx.call_site_extent(id, body.node_id)))
- }
- }
- }
- Some(hir_map::NodeTraitItem(trait_item)) => {
- match trait_item.node {
- hir::TraitItemKind::Type(..) |
- hir::TraitItemKind::Const(_, None) |
- hir::TraitItemKind::Method(_, hir::TraitMethod::Required(_))=> {
- tcx.construct_parameter_environment(trait_item.span,
- tcx.hir.local_def_id(id),
- Some(tcx.item_extent(id)))
- }
- hir::TraitItemKind::Const(_, Some(body)) |
- hir::TraitItemKind::Method(_, hir::TraitMethod::Provided(body)) => {
- tcx.construct_parameter_environment(
- trait_item.span,
- tcx.hir.local_def_id(id),
- Some(tcx.call_site_extent(id, body.node_id)))
- }
- }
- }
- Some(hir_map::NodeItem(item)) => {
- match item.node {
- hir::ItemConst(_, body) |
- hir::ItemStatic(.., body) |
- hir::ItemFn(.., body) => {
- tcx.construct_parameter_environment(
- item.span,
- tcx.hir.local_def_id(id),
- Some(tcx.call_site_extent(id, body.node_id)))
- }
- hir::ItemEnum(..) |
- hir::ItemStruct(..) |
- hir::ItemUnion(..) |
- hir::ItemTy(..) |
- hir::ItemImpl(..) |
- hir::ItemTrait(..) => {
- let def_id = tcx.hir.local_def_id(id);
- tcx.construct_parameter_environment(item.span,
- def_id,
- Some(tcx.item_extent(id)))
- }
- _ => {
- span_bug!(item.span,
- "ParameterEnvironment::for_item():
- can't create a parameter \
- environment for this kind of item")
- }
- }
- }
- Some(hir_map::NodeExpr(expr)) => {
- // This is a convenience to allow closures to work.
- if let hir::ExprClosure(.., body, _) = expr.node {
- let def_id = tcx.hir.local_def_id(id);
- let base_def_id = tcx.closure_base_def_id(def_id);
- tcx.construct_parameter_environment(
- expr.span,
- base_def_id,
- Some(tcx.call_site_extent(id, body.node_id)))
- } else {
- tcx.empty_parameter_environment()
- }
- }
- Some(hir_map::NodeForeignItem(item)) => {
- let def_id = tcx.hir.local_def_id(id);
- tcx.construct_parameter_environment(item.span,
- def_id,
- None)
- }
- Some(hir_map::NodeStructCtor(..)) |
- Some(hir_map::NodeVariant(..)) => {
- let def_id = tcx.hir.local_def_id(id);
- tcx.construct_parameter_environment(tcx.hir.span(id),
- def_id,
- None)
- }
- it => {
- bug!("ParameterEnvironment::from_item(): \
- `{}` = {:?} is unsupported",
- tcx.hir.node_to_string(id), it)
- }
- }
- }
}
#[derive(Copy, Clone, Debug)]
if let Some(id) = self.hir.as_local_node_id(did) {
Attributes::Borrowed(self.hir.attrs(id))
} else {
- Attributes::Owned(self.sess.cstore.item_attrs(did))
+ Attributes::Owned(self.item_attrs(did))
}
}
}
pub fn trait_has_default_impl(self, trait_def_id: DefId) -> bool {
- let def = self.trait_def(trait_def_id);
- def.flags.get().intersects(TraitFlags::HAS_DEFAULT_IMPL)
- }
-
- /// Populates the type context with all the implementations for the given
- /// trait if necessary.
- pub fn populate_implementations_for_trait_if_necessary(self, trait_id: DefId) {
- if trait_id.is_local() {
- return
- }
-
- // The type is not local, hence we are reading this out of
- // metadata and don't need to track edges.
- let _ignore = self.dep_graph.in_ignore();
-
- let def = self.trait_def(trait_id);
- if def.flags.get().intersects(TraitFlags::HAS_REMOTE_IMPLS) {
- return;
- }
-
- debug!("populate_implementations_for_trait_if_necessary: searching for {:?}", def);
-
- for impl_def_id in self.sess.cstore.implementations_of_trait(Some(trait_id)) {
- let trait_ref = self.impl_trait_ref(impl_def_id).unwrap();
-
- // Record the trait->implementation mapping.
- let parent = self.sess.cstore.impl_parent(impl_def_id).unwrap_or(trait_id);
- def.record_remote_impl(self, impl_def_id, trait_ref, parent);
- }
-
- def.flags.set(def.flags.get() | TraitFlags::HAS_REMOTE_IMPLS);
+ self.trait_def(trait_def_id).has_default_impl
}
/// Given the def_id of an impl, return the def_id of the trait it implements.
}
}
- /// If the given def ID describes an item belonging to a trait,
- /// return the ID of the trait that the trait item belongs to.
- /// Otherwise, return `None`.
- pub fn trait_of_item(self, def_id: DefId) -> Option<DefId> {
- if def_id.krate != LOCAL_CRATE {
- return self.sess.cstore.trait_of_item(def_id);
- }
- self.opt_associated_item(def_id)
- .and_then(|associated_item| {
- match associated_item.container {
- TraitContainer(def_id) => Some(def_id),
- ImplContainer(_) => None
- }
- })
- }
-
/// Construct a parameter environment suitable for static contexts or other contexts where there
/// are no free type/lifetime parameters in scope.
pub fn empty_parameter_environment(self) -> ParameterEnvironment<'tcx> {
ty::ParameterEnvironment {
- free_substs: self.intern_substs(&[]),
caller_bounds: Slice::empty(),
- implicit_region_bound: None,
- free_id_outlive: None,
is_copy_cache: RefCell::new(FxHashMap()),
is_sized_cache: RefCell::new(FxHashMap()),
is_freeze_cache: RefCell::new(FxHashMap()),
}
}
- /// Constructs and returns a substitution that can be applied to move from
- /// the "outer" view of a type or method to the "inner" view.
- /// In general, this means converting from bound parameters to
- /// free parameters. Since we currently represent bound/free type
- /// parameters in the same way, this only has an effect on regions.
- pub fn construct_free_substs(self,
- def_id: DefId,
- free_id_outlive: Option<CodeExtent<'gcx>>)
- -> &'gcx Substs<'gcx> {
-
- let substs = Substs::for_item(self.global_tcx(), def_id, |def, _| {
- // map bound 'a => free 'a
- self.global_tcx().mk_region(ReFree(FreeRegion {
- scope: free_id_outlive,
- bound_region: def.to_bound_region()
- }))
- }, |def, _| {
- // map T => T
- self.global_tcx().mk_param_from_def(def)
- });
-
- debug!("construct_parameter_environment: {:?}", substs);
- substs
- }
-
/// See `ParameterEnvironment` struct def'n for details.
- /// If you were using `free_id: NodeId`, you might try `self.region_maps().item_extent(free_id)`
- /// for the `free_id_outlive` parameter. (But note that this is not always quite right.)
- pub fn construct_parameter_environment(self,
- span: Span,
- def_id: DefId,
- free_id_outlive: Option<CodeExtent<'gcx>>)
- -> ParameterEnvironment<'gcx>
- {
- //
- // Construct the free substs.
- //
-
- let free_substs = self.construct_free_substs(def_id, free_id_outlive);
-
+ pub fn parameter_environment(self, def_id: DefId) -> ParameterEnvironment<'gcx> {
//
// Compute the bounds on Self and the type parameters.
//
let tcx = self.global_tcx();
- let generic_predicates = tcx.predicates_of(def_id);
- let bounds = generic_predicates.instantiate(tcx, free_substs);
- let bounds = tcx.liberate_late_bound_regions(free_id_outlive, &ty::Binder(bounds));
+ let bounds = tcx.predicates_of(def_id).instantiate_identity(tcx);
let predicates = bounds.predicates;
// Finally, we have to normalize the bounds in the environment, in
//
let unnormalized_env = ty::ParameterEnvironment {
- free_substs: free_substs,
- implicit_region_bound: free_id_outlive.map(|f| tcx.mk_region(ty::ReScope(f))),
caller_bounds: tcx.intern_predicates(&predicates),
- free_id_outlive: free_id_outlive,
is_copy_cache: RefCell::new(FxHashMap()),
is_sized_cache: RefCell::new(FxHashMap()),
is_freeze_cache: RefCell::new(FxHashMap()),
};
- let body_id = free_id_outlive.map(|f| f.node_id())
- .unwrap_or(DUMMY_NODE_ID);
- let cause = traits::ObligationCause::misc(span, body_id);
+ let body_id = self.hir.as_local_node_id(def_id).map_or(DUMMY_NODE_ID, |id| {
+ self.hir.maybe_body_owned_by(id).map_or(id, |body| body.node_id)
+ });
+ let cause = traits::ObligationCause::misc(tcx.def_span(def_id), body_id);
traits::normalize_param_env_or_error(tcx, def_id, unnormalized_env, cause)
}
pub fn node_scope_region(self, id: NodeId) -> Region<'tcx> {
- self.mk_region(ty::ReScope(self.node_extent(id)))
+ self.mk_region(ty::ReScope(CodeExtent::Misc(id)))
}
/// Looks up the span of `impl_did` if the impl is local; otherwise returns `Err`
tcx.hir.span_if_local(def_id).unwrap()
}
+/// If the given def ID describes an item belonging to a trait,
+/// return the ID of the trait that the trait item belongs to.
+/// Otherwise, return `None`.
+fn trait_of_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> Option<DefId> {
+ tcx.opt_associated_item(def_id)
+ .and_then(|associated_item| {
+ match associated_item.container {
+ TraitContainer(def_id) => Some(def_id),
+ ImplContainer(_) => None
+ }
+ })
+}
+
+
pub fn provide(providers: &mut ty::maps::Providers) {
*providers = ty::maps::Providers {
associated_item,
adt_sized_constraint,
adt_dtorck_constraint,
def_span,
+ trait_of_item,
+ trait_impls_of: trait_def::trait_impls_of_provider,
+ relevant_trait_impls_for: trait_def::relevant_trait_impls_provider,
..*providers
};
}
*providers = ty::maps::Providers {
adt_sized_constraint,
adt_dtorck_constraint,
+ trait_impls_of: trait_def::trait_impls_of_provider,
+ relevant_trait_impls_for: trait_def::relevant_trait_impls_provider,
..*providers
};
}
fn push_region_constraints<'tcx>(out: &mut Vec<Component<'tcx>>, regions: Vec<ty::Region<'tcx>>) {
for r in regions {
- if !r.is_bound() {
+ if !r.is_late_bound() {
out.push(Component::Region(r));
}
}
Relate::relate(self, a, b)
}
+ /// Relate the two substitutions for the given item. The default
+ /// is to look up the variance for the item and proceed
+ /// accordingly.
+ fn relate_item_substs(&mut self,
+ item_def_id: DefId,
+ a_subst: &'tcx Substs<'tcx>,
+ b_subst: &'tcx Substs<'tcx>)
+ -> RelateResult<'tcx, &'tcx Substs<'tcx>>
+ {
+ debug!("relate_item_substs(item_def_id={:?}, a_subst={:?}, b_subst={:?})",
+ item_def_id,
+ a_subst,
+ b_subst);
+
+ let opt_variances = self.tcx().variances_of(item_def_id);
+ relate_substs(self, Some(&opt_variances), a_subst, b_subst)
+ }
+
/// Switch variance for the purpose of relating `a` and `b`.
fn relate_with_variance<T: Relate<'tcx>>(&mut self,
variance: ty::Variance,
}
}
-// substitutions are not themselves relatable without more context,
-// but they is an important subroutine for things that ARE relatable,
-// like traits etc.
-fn relate_item_substs<'a, 'gcx, 'tcx, R>(relation: &mut R,
- item_def_id: DefId,
- a_subst: &'tcx Substs<'tcx>,
- b_subst: &'tcx Substs<'tcx>)
- -> RelateResult<'tcx, &'tcx Substs<'tcx>>
- where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a
-{
- debug!("substs: item_def_id={:?} a_subst={:?} b_subst={:?}",
- item_def_id,
- a_subst,
- b_subst);
-
- let opt_variances = relation.tcx().variances_of(item_def_id);
- relate_substs(relation, Some(&opt_variances), a_subst, b_subst)
-}
-
pub fn relate_substs<'a, 'gcx, 'tcx, R>(relation: &mut R,
variances: Option<&Vec<ty::Variance>>,
a_subst: &'tcx Substs<'tcx>,
if a.def_id != b.def_id {
Err(TypeError::Traits(expected_found(relation, &a.def_id, &b.def_id)))
} else {
- let substs = relate_item_substs(relation, a.def_id, a.substs, b.substs)?;
+ let substs = relation.relate_item_substs(a.def_id, a.substs, b.substs)?;
Ok(ty::TraitRef { def_id: a.def_id, substs: substs })
}
}
if a.def_id != b.def_id {
Err(TypeError::Traits(expected_found(relation, &a.def_id, &b.def_id)))
} else {
- let substs = relate_item_substs(relation, a.def_id, a.substs, b.substs)?;
+ let substs = relation.relate_item_substs(a.def_id, a.substs, b.substs)?;
Ok(ty::ExistentialTraitRef { def_id: a.def_id, substs: substs })
}
}
(&ty::TyAdt(a_def, a_substs), &ty::TyAdt(b_def, b_substs))
if a_def == b_def =>
{
- let substs = relate_item_substs(relation, a_def.did, a_substs, b_substs)?;
+ let substs = relation.relate_item_substs(a_def.did, a_substs, b_substs)?;
Ok(tcx.mk_adt(a_def, substs))
}
RustcEncodable, RustcDecodable, Copy)]
/// A "free" region `fr` can be interpreted as "some region
/// at least as big as the scope `fr.scope`".
-///
-/// If `fr.scope` is None, then this is in some context (e.g., an
-/// impl) where lifetimes are more abstract and the notion of the
-/// caller/callee stack frames are not applicable.
-pub struct FreeRegion<'tcx> {
- pub scope: Option<region::CodeExtent<'tcx>>,
+pub struct FreeRegion {
+ pub scope: DefId,
pub bound_region: BoundRegion,
}
/// Fresh bound identifiers created during GLB computations.
BrFresh(u32),
- // Anonymous region for the implicit env pointer parameter
- // to a closure
+ /// Anonymous region for the implicit env pointer parameter
+ /// to a closure
BrEnv,
}
pub region_name: ast::Name,
}
-// NB: If you change this, you'll probably want to change the corresponding
-// AST structure in libsyntax/ast.rs as well.
+/// NB: If you change this, you'll probably want to change the corresponding
+/// AST structure in libsyntax/ast.rs as well.
#[derive(Clone, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)]
pub enum TypeVariants<'tcx> {
/// The primitive boolean type. Written as `bool`.
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
pub enum ExistentialPredicate<'tcx> {
- // e.g. Iterator
+ /// e.g. Iterator
Trait(ExistentialTraitRef<'tcx>),
- // e.g. Iterator::Item = T
+ /// e.g. Iterator::Item = T
Projection(ExistentialProjection<'tcx>),
- // e.g. Send
+ /// e.g. Send
AutoTrait(DefId),
}
/// [dbi]: http://en.wikipedia.org/wiki/De_Bruijn_index
#[derive(Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, Debug, Copy)]
pub struct DebruijnIndex {
- // We maintain the invariant that this is never 0. So 1 indicates
- // the innermost binder. To ensure this, create with `DebruijnIndex::new`.
+ /// We maintain the invariant that this is never 0. So 1 indicates
+ /// the innermost binder. To ensure this, create with `DebruijnIndex::new`.
pub depth: u32,
}
-pub type Region<'tcx> = &'tcx RegionKind<'tcx>;
+pub type Region<'tcx> = &'tcx RegionKind;
/// Representation of regions.
///
/// [1] http://smallcultfollowing.com/babysteps/blog/2013/10/29/intermingled-parameter-lists/
/// [2] http://smallcultfollowing.com/babysteps/blog/2013/11/04/intermingled-parameter-lists/
#[derive(Clone, PartialEq, Eq, Hash, Copy, RustcEncodable, RustcDecodable)]
-pub enum RegionKind<'tcx> {
+pub enum RegionKind {
// Region bound in a type or fn declaration which will be
// substituted 'early' -- that is, at the same time when type
// parameters are substituted.
/// When checking a function body, the types of all arguments and so forth
/// that refer to bound region parameters are modified to refer to free
/// region parameters.
- ReFree(FreeRegion<'tcx>),
+ ReFree(FreeRegion),
/// A concrete region naming some statically determined extent
/// (e.g. an expression or sequence of statements) within the
/// current function.
- ReScope(region::CodeExtent<'tcx>),
+ ReScope(region::CodeExtent),
/// Static data that has an "infinite" lifetime. Top in the region lattice.
ReStatic,
#[derive(Copy, Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, Debug)]
pub struct EarlyBoundRegion {
+ pub def_id: DefId,
pub index: u32,
pub name: Name,
}
}
}
-// Region utilities
-impl<'tcx> RegionKind<'tcx> {
- pub fn is_bound(&self) -> bool {
+/// Region utilities
+impl RegionKind {
+ pub fn is_late_bound(&self) -> bool {
match *self {
- ty::ReEarlyBound(..) => true,
ty::ReLateBound(..) => true,
_ => false,
}
}
/// Returns the depth of `self` from the (1-based) binding level `depth`
- pub fn from_depth(&self, depth: u32) -> RegionKind<'tcx> {
+ pub fn from_depth(&self, depth: u32) -> RegionKind {
match *self {
ty::ReLateBound(debruijn, r) => ty::ReLateBound(DebruijnIndex {
depth: debruijn.depth - (depth - 1)
}
}
-// Type utilities
+/// Type utilities
impl<'a, 'gcx, 'tcx> TyS<'tcx> {
pub fn as_opt_param_ty(&self) -> Option<ty::ParamTy> {
match self.sty {
}
}
- // Test whether this is a `()` which was produced by defaulting a
- // diverging type variable with feature(never_type) disabled.
+ /// Test whether this is a `()` which was produced by defaulting a
+ /// diverging type variable with feature(never_type) disabled.
pub fn is_defaulted_unit(&self) -> bool {
match self.sty {
TyTuple(_, true) => true,
}
}
+ /// panics if called on any type other than `Box<T>`
pub fn boxed_ty(&self) -> Ty<'tcx> {
match self.sty {
TyAdt(def, substs) if def.is_box() => substs.type_at(0),
}
}
- /*
- A scalar type is one that denotes an atomic datum, with no sub-components.
- (A TyRawPtr is scalar because it represents a non-managed pointer, so its
- contents are abstract to rustc.)
- */
+ /// A scalar type is one that denotes an atomic datum, with no sub-components.
+ /// (A TyRawPtr is scalar because it represents a non-managed pointer, so its
+ /// contents are abstract to rustc.)
pub fn is_scalar(&self) -> bool {
match self.sty {
TyBool | TyChar | TyInt(_) | TyFloat(_) | TyUint(_) |
}
}
- // Returns the type and mutability of *ty.
- //
- // The parameter `explicit` indicates if this is an *explicit* dereference.
- // Some types---notably unsafe ptrs---can only be dereferenced explicitly.
+ /// Returns the type and mutability of *ty.
+ ///
+ /// The parameter `explicit` indicates if this is an *explicit* dereference.
+ /// Some types---notably unsafe ptrs---can only be dereferenced explicitly.
pub fn builtin_deref(&self, explicit: bool, pref: ty::LvaluePreference)
-> Option<TypeAndMut<'tcx>>
{
}
}
- // Returns the type of ty[i]
+ /// Returns the type of ty[i]
pub fn builtin_index(&self) -> Option<Ty<'tcx>> {
match self.sty {
TyArray(ty, _) | TySlice(ty) => Some(ty),
}
}
- // Type accessors for substructures of types
+ /// Type accessors for substructures of types
pub fn fn_args(&self) -> ty::Binder<&'tcx [Ty<'tcx>]> {
self.fn_sig().inputs()
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use dep_graph::DepNode;
-use hir::def_id::{DefId, LOCAL_CRATE};
-use traits::{self, specialization_graph};
-use ty;
+use hir::def_id::DefId;
+use traits::specialization_graph;
use ty::fast_reject;
-use ty::{Ty, TyCtxt, TraitRef};
-use std::cell::{Cell, RefCell};
+use ty::fold::TypeFoldable;
+use ty::{Ty, TyCtxt};
+use std::rc::Rc;
use hir;
-use util::nodemap::FxHashMap;
-
-use syntax::ast;
-use syntax_pos::DUMMY_SP;
/// A trait's definition with type information.
pub struct TraitDef {
/// be usable with the sugar (or without it).
pub paren_sugar: bool,
- // Impls of a trait. To allow for quicker lookup, the impls are indexed by a
- // simplified version of their `Self` type: impls with a simplifiable `Self`
- // are stored in `nonblanket_impls` keyed by it, while all other impls are
- // stored in `blanket_impls`.
- //
- // A similar division is used within `specialization_graph`, but the ones
- // here are (1) stored as a flat list for the trait and (2) populated prior
- // to -- and used while -- determining specialization order.
- //
- // FIXME: solve the reentrancy issues and remove these lists in favor of the
- // ones in `specialization_graph`.
- //
- // These lists are tracked by `DepNode::TraitImpls`; we don't use
- // a DepTrackingMap but instead have the `TraitDef` insert the
- // required reads/writes.
-
- /// Impls of the trait.
- nonblanket_impls: RefCell<
- FxHashMap<fast_reject::SimplifiedType, Vec<DefId>>
- >,
-
- /// Blanket impls associated with the trait.
- blanket_impls: RefCell<Vec<DefId>>,
-
- /// The specialization order for impls of this trait.
- pub specialization_graph: RefCell<traits::specialization_graph::Graph>,
-
- /// Various flags
- pub flags: Cell<TraitFlags>,
-
- /// The number of impls we've added from the local crate.
- /// When this number matches up the list in the HIR map,
- /// we're done, and the specialization graph is correct.
- local_impl_count: Cell<usize>,
+ pub has_default_impl: bool,
/// The ICH of this trait's DefPath, cached here so it doesn't have to be
/// recomputed all the time.
pub def_path_hash: u64,
}
-impl<'a, 'gcx, 'tcx> TraitDef {
- pub fn new(def_id: DefId,
- unsafety: hir::Unsafety,
- paren_sugar: bool,
- def_path_hash: u64)
- -> TraitDef {
- TraitDef {
- def_id: def_id,
- paren_sugar: paren_sugar,
- unsafety: unsafety,
- nonblanket_impls: RefCell::new(FxHashMap()),
- blanket_impls: RefCell::new(vec![]),
- flags: Cell::new(ty::TraitFlags::NO_TRAIT_FLAGS),
- local_impl_count: Cell::new(0),
- specialization_graph: RefCell::new(traits::specialization_graph::Graph::new()),
- def_path_hash: def_path_hash,
- }
- }
+// We don't store the list of impls in a flat list because each cached list of
+// `relevant_impls_for` we would then duplicate all blanket impls. By keeping
+// blanket and non-blanket impls separate, we can share the list of blanket
+// impls.
+#[derive(Clone)]
+pub struct TraitImpls {
+ blanket_impls: Rc<Vec<DefId>>,
+ non_blanket_impls: Rc<Vec<DefId>>,
+}
- // returns None if not yet calculated
- pub fn object_safety(&self) -> Option<bool> {
- if self.flags.get().intersects(TraitFlags::OBJECT_SAFETY_VALID) {
- Some(self.flags.get().intersects(TraitFlags::IS_OBJECT_SAFE))
- } else {
- None
+impl TraitImpls {
+ pub fn iter(&self) -> TraitImplsIter {
+ TraitImplsIter {
+ blanket_impls: self.blanket_impls.clone(),
+ non_blanket_impls: self.non_blanket_impls.clone(),
+ index: 0
}
}
+}
- pub fn set_object_safety(&self, is_safe: bool) {
- assert!(self.object_safety().map(|cs| cs == is_safe).unwrap_or(true));
- self.flags.set(
- self.flags.get() | if is_safe {
- TraitFlags::OBJECT_SAFETY_VALID | TraitFlags::IS_OBJECT_SAFE
- } else {
- TraitFlags::OBJECT_SAFETY_VALID
- }
- );
- }
-
- fn write_trait_impls(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) {
- tcx.dep_graph.write(DepNode::TraitImpls(self.def_id));
- }
-
- fn read_trait_impls(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) {
- tcx.dep_graph.read(DepNode::TraitImpls(self.def_id));
- }
-
- /// Records a basic trait-to-implementation mapping.
- ///
- /// Returns `true` iff the impl has not previously been recorded.
- fn record_impl(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>,
- impl_def_id: DefId,
- impl_trait_ref: TraitRef<'tcx>)
- -> bool {
- debug!("TraitDef::record_impl for {:?}, from {:?}",
- self, impl_trait_ref);
+#[derive(Clone)]
+pub struct TraitImplsIter {
+ blanket_impls: Rc<Vec<DefId>>,
+ non_blanket_impls: Rc<Vec<DefId>>,
+ index: usize,
+}
- // Record the write into the impl set, but only for local
- // impls: external impls are handled differently.
- if impl_def_id.is_local() {
- self.write_trait_impls(tcx);
- }
+impl Iterator for TraitImplsIter {
+ type Item = DefId;
- // We don't want to borrow_mut after we already populated all impls,
- // so check if an impl is present with an immutable borrow first.
- if let Some(sty) = fast_reject::simplify_type(tcx,
- impl_trait_ref.self_ty(), false) {
- if let Some(is) = self.nonblanket_impls.borrow().get(&sty) {
- if is.contains(&impl_def_id) {
- return false; // duplicate - skip
- }
- }
-
- self.nonblanket_impls.borrow_mut().entry(sty).or_insert(vec![]).push(impl_def_id)
+ fn next(&mut self) -> Option<DefId> {
+ if self.index < self.blanket_impls.len() {
+ let bi_index = self.index;
+ self.index += 1;
+ Some(self.blanket_impls[bi_index])
} else {
- if self.blanket_impls.borrow().contains(&impl_def_id) {
- return false; // duplicate - skip
+ let nbi_index = self.index - self.blanket_impls.len();
+ if nbi_index < self.non_blanket_impls.len() {
+ self.index += 1;
+ Some(self.non_blanket_impls[nbi_index])
+ } else {
+ None
}
- self.blanket_impls.borrow_mut().push(impl_def_id)
}
-
- true
- }
-
- /// Records a trait-to-implementation mapping for a crate-local impl.
- pub fn record_local_impl(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>,
- impl_def_id: DefId,
- impl_trait_ref: TraitRef<'tcx>) {
- assert!(impl_def_id.is_local());
- let was_new = self.record_impl(tcx, impl_def_id, impl_trait_ref);
- assert!(was_new);
-
- self.local_impl_count.set(self.local_impl_count.get() + 1);
}
- /// Records a trait-to-implementation mapping.
- pub fn record_has_default_impl(&self) {
- self.flags.set(self.flags.get() | TraitFlags::HAS_DEFAULT_IMPL);
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ let items_left = (self.blanket_impls.len() + self.non_blanket_impls.len()) - self.index;
+ (items_left, Some(items_left))
}
+}
- /// Records a trait-to-implementation mapping for a non-local impl.
- ///
- /// The `parent_impl` is the immediately-less-specialized impl, or the
- /// trait's def ID if the impl is not a specialization -- information that
- /// should be pulled from the metadata.
- pub fn record_remote_impl(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>,
- impl_def_id: DefId,
- impl_trait_ref: TraitRef<'tcx>,
- parent_impl: DefId) {
- assert!(!impl_def_id.is_local());
+impl ExactSizeIterator for TraitImplsIter {}
- // if the impl has not previously been recorded
- if self.record_impl(tcx, impl_def_id, impl_trait_ref) {
- // if the impl is non-local, it's placed directly into the
- // specialization graph using parent information drawn from metadata.
- self.specialization_graph.borrow_mut()
- .record_impl_from_cstore(tcx, parent_impl, impl_def_id)
+impl<'a, 'gcx, 'tcx> TraitDef {
+ pub fn new(def_id: DefId,
+ unsafety: hir::Unsafety,
+ paren_sugar: bool,
+ has_default_impl: bool,
+ def_path_hash: u64)
+ -> TraitDef {
+ TraitDef {
+ def_id,
+ paren_sugar,
+ unsafety,
+ has_default_impl,
+ def_path_hash,
}
}
- /// Adds a local impl into the specialization graph, returning an error with
- /// overlap information if the impl overlaps but does not specialize an
- /// existing impl.
- pub fn add_impl_for_specialization(&self,
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
- impl_def_id: DefId)
- -> Result<(), traits::OverlapError> {
- assert!(impl_def_id.is_local());
-
- self.specialization_graph.borrow_mut()
- .insert(tcx, impl_def_id)
- }
-
- pub fn ancestors(&'a self, of_impl: DefId) -> specialization_graph::Ancestors<'a> {
- specialization_graph::ancestors(self, of_impl)
- }
-
- /// Whether the impl set and specialization graphs are complete.
- pub fn is_complete(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> bool {
- tcx.populate_implementations_for_trait_if_necessary(self.def_id);
- ty::queries::coherent_trait::try_get(tcx, DUMMY_SP, (LOCAL_CRATE, self.def_id)).is_ok()
- }
-
- /// If any local impls haven't been added yet, returns
- /// Some(list of local impls for this trait).
- fn missing_local_impls(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>)
- -> Option<&'gcx [ast::NodeId]> {
- if self.flags.get().intersects(TraitFlags::HAS_LOCAL_IMPLS) {
- return None;
- }
-
- if self.is_complete(tcx) {
- self.flags.set(self.flags.get() | TraitFlags::HAS_LOCAL_IMPLS);
- return None;
- }
-
- let impls = tcx.hir.trait_impls(self.def_id);
- assert!(self.local_impl_count.get() <= impls.len());
- if self.local_impl_count.get() == impls.len() {
- self.flags.set(self.flags.get() | TraitFlags::HAS_LOCAL_IMPLS);
- return None;
- }
-
- Some(impls)
+ pub fn ancestors(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>,
+ of_impl: DefId)
+ -> specialization_graph::Ancestors {
+ specialization_graph::ancestors(tcx, self.def_id, of_impl)
}
pub fn for_each_impl<F: FnMut(DefId)>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, mut f: F) {
- self.read_trait_impls(tcx);
- tcx.populate_implementations_for_trait_if_necessary(self.def_id);
-
- let local_impls = self.missing_local_impls(tcx);
- if let Some(impls) = local_impls {
- for &id in impls {
- f(tcx.hir.local_def_id(id));
- }
- }
- let mut f = |def_id: DefId| {
- if !(local_impls.is_some() && def_id.is_local()) {
- f(def_id);
- }
- };
-
- for &impl_def_id in self.blanket_impls.borrow().iter() {
+ for impl_def_id in tcx.trait_impls_of(self.def_id).iter() {
f(impl_def_id);
}
-
- for v in self.nonblanket_impls.borrow().values() {
- for &impl_def_id in v {
- f(impl_def_id);
- }
- }
}
/// Iterate over every impl that could possibly match the
self_ty: Ty<'tcx>,
mut f: F)
{
- self.read_trait_impls(tcx);
- tcx.populate_implementations_for_trait_if_necessary(self.def_id);
-
- let local_impls = self.missing_local_impls(tcx);
- if let Some(impls) = local_impls {
- for &id in impls {
- f(tcx.hir.local_def_id(id));
- }
- }
- let mut f = |def_id: DefId| {
- if !(local_impls.is_some() && def_id.is_local()) {
- f(def_id);
- }
- };
-
- for &impl_def_id in self.blanket_impls.borrow().iter() {
- f(impl_def_id);
- }
-
// simplify_type(.., false) basically replaces type parameters and
// projections with infer-variables. This is, of course, done on
// the impl trait-ref when it is instantiated, but not on the
// replace `S` with anything - this impl of course can't be
// selected, and as there are hundreds of similar impls,
// considering them would significantly harm performance.
- if let Some(simp) = fast_reject::simplify_type(tcx, self_ty, true) {
- if let Some(impls) = self.nonblanket_impls.borrow().get(&simp) {
- for &impl_def_id in impls {
- f(impl_def_id);
- }
- }
+ let relevant_impls = if let Some(simplified_self_ty) =
+ fast_reject::simplify_type(tcx, self_ty, true) {
+ tcx.relevant_trait_impls_for((self.def_id, simplified_self_ty))
} else {
- for v in self.nonblanket_impls.borrow().values() {
- for &impl_def_id in v {
- f(impl_def_id);
- }
- }
+ tcx.trait_impls_of(self.def_id)
+ };
+
+ for impl_def_id in relevant_impls.iter() {
+ f(impl_def_id);
}
}
}
-bitflags! {
- flags TraitFlags: u32 {
- const NO_TRAIT_FLAGS = 0,
- const HAS_DEFAULT_IMPL = 1 << 0,
- const IS_OBJECT_SAFE = 1 << 1,
- const OBJECT_SAFETY_VALID = 1 << 2,
- const HAS_REMOTE_IMPLS = 1 << 3,
- const HAS_LOCAL_IMPLS = 1 << 4,
+// Query provider for `trait_impls_of`.
+pub(super) fn trait_impls_of_provider<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ trait_id: DefId)
+ -> TraitImpls {
+ let remote_impls = if trait_id.is_local() {
+ // Traits defined in the current crate can't have impls in upstream
+ // crates, so we don't bother querying the cstore.
+ Vec::new()
+ } else {
+ tcx.sess.cstore.implementations_of_trait(Some(trait_id))
+ };
+
+ let mut blanket_impls = Vec::new();
+ let mut non_blanket_impls = Vec::new();
+
+ let local_impls = tcx.hir
+ .trait_impls(trait_id)
+ .into_iter()
+ .map(|&node_id| tcx.hir.local_def_id(node_id));
+
+ for impl_def_id in local_impls.chain(remote_impls.into_iter()) {
+ let impl_self_ty = tcx.type_of(impl_def_id);
+ if impl_def_id.is_local() && impl_self_ty.references_error() {
+ continue
+ }
+
+ if fast_reject::simplify_type(tcx, impl_self_ty, false).is_some() {
+ non_blanket_impls.push(impl_def_id);
+ } else {
+ blanket_impls.push(impl_def_id);
+ }
+ }
+
+ TraitImpls {
+ blanket_impls: Rc::new(blanket_impls),
+ non_blanket_impls: Rc::new(non_blanket_impls),
+ }
+}
+
+// Query provider for `relevant_trait_impls_for`.
+pub(super) fn relevant_trait_impls_provider<'a, 'tcx>(
+ tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ (trait_id, self_ty): (DefId, fast_reject::SimplifiedType))
+ -> TraitImpls
+{
+ let all_trait_impls = tcx.trait_impls_of(trait_id);
+
+ let relevant: Vec<DefId> = all_trait_impls
+ .non_blanket_impls
+ .iter()
+ .cloned()
+ .filter(|&impl_def_id| {
+ let impl_self_ty = tcx.type_of(impl_def_id);
+ let impl_simple_self_ty = fast_reject::simplify_type(tcx,
+ impl_self_ty,
+ false).unwrap();
+ impl_simple_self_ty == self_ty
+ })
+ .collect();
+
+ if all_trait_impls.non_blanket_impls.len() == relevant.len() {
+ // If we didn't filter anything out, re-use the existing vec.
+ all_trait_impls
+ } else {
+ TraitImpls {
+ blanket_impls: all_trait_impls.blanket_impls.clone(),
+ non_blanket_impls: Rc::new(relevant),
+ }
}
}
self.hash(db.depth);
self.hash(i);
}
- ty::ReEarlyBound(ty::EarlyBoundRegion { index, name }) => {
- self.hash(index);
- self.hash(name.as_str());
+ ty::ReEarlyBound(ty::EarlyBoundRegion { def_id, .. }) => {
+ self.def_id(def_id);
}
ty::ReLateBound(..) |
ty::ReFree(..) |
}
// Like std::macros::try!, but for Option<>.
+#[cfg(unix)]
macro_rules! option_try(
($e:expr) => (match $e { Some(e) => e, None => return None })
);
}
}
-impl<'tcx> fmt::Debug for ty::RegionKind<'tcx> {
+impl fmt::Debug for ty::RegionKind {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
ty::ReEarlyBound(ref data) => {
impl<'tcx> fmt::Debug for ty::ParameterEnvironment<'tcx> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- write!(f, "ParameterEnvironment(\
- free_substs={:?}, \
- implicit_region_bound={:?}, \
- caller_bounds={:?})",
- self.free_substs,
- self.implicit_region_bound,
- self.caller_bounds)
+ write!(f, "ParameterEnvironment({:?})", self.caller_bounds)
}
}
-impl<'tcx> fmt::Display for ty::RegionKind<'tcx> {
+impl<'tcx> fmt::Display for ty::RegionKind {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if verbose() {
return write!(f, "{:?}", *self);
}
}
-impl<'tcx> fmt::Debug for ty::FreeRegion<'tcx> {
+impl fmt::Debug for ty::FreeRegion {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "ReFree({:?}, {:?})",
self.scope, self.bound_region)
//! build speedups.
#![crate_name = "rustc_back"]
-#![unstable(feature = "rustc_private", issue = "27812")]
#![crate_type = "dylib"]
#![crate_type = "rlib"]
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
#![feature(const_fn)]
#![feature(libc)]
#![feature(rand)]
-#![feature(rustc_private)]
-#![feature(staged_api)]
#![cfg_attr(test, feature(rand))]
+#![cfg_attr(stage0, unstable(feature = "rustc_private", issue = "27812"))]
+#![cfg_attr(stage0, feature(rustc_private))]
+#![cfg_attr(stage0, feature(staged_api))]
+
extern crate syntax;
extern crate libc;
extern crate serialize;
#![crate_name = "rustc_bitflags"]
#![feature(associated_consts)]
-#![feature(staged_api)]
#![crate_type = "rlib"]
#![no_std]
-#![unstable(feature = "rustc_private", issue = "27812")]
#![deny(warnings)]
+#![cfg_attr(stage0, unstable(feature = "rustc_private", issue = "27812"))]
+#![cfg_attr(stage0, feature(staged_api))]
//! A typesafe bitmask flag generator.
})
}
- pub fn each_in_scope_loan<F>(&self, scope: region::CodeExtent<'tcx>, mut op: F) -> bool where
+ pub fn each_in_scope_loan<F>(&self, scope: region::CodeExtent, mut op: F) -> bool where
F: FnMut(&Loan<'tcx>) -> bool,
{
//! Like `each_issued_loan()`, but only considers loans that are
}
fn each_in_scope_loan_affecting_path<F>(&self,
- scope: region::CodeExtent<'tcx>,
+ scope: region::CodeExtent,
loan_path: &LoanPath<'tcx>,
mut op: F)
-> bool where
let mut ret = UseOk;
self.each_in_scope_loan_affecting_path(
- self.tcx().node_extent(expr_id), use_path, |loan| {
+ region::CodeExtent::Misc(expr_id), use_path, |loan| {
if !compatible_borrow_kinds(loan.kind, borrow_kind) {
ret = UseWhileBorrowed(loan.loan_path.clone(), loan.span);
false
// Check that we don't invalidate any outstanding loans
if let Some(loan_path) = opt_loan_path(&assignee_cmt) {
- let scope = self.tcx().node_extent(assignment_id);
+ let scope = region::CodeExtent::Misc(assignment_id);
self.each_in_scope_loan_affecting_path(scope, &loan_path, |loan| {
self.report_illegal_mutation(assignment_span, &loan_path, loan);
false
type R = Result<(),()>;
pub fn guarantee_lifetime<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>,
- item_scope: region::CodeExtent<'tcx>,
+ item_scope: region::CodeExtent,
span: Span,
cause: euv::LoanCause,
cmt: mc::cmt<'tcx>,
bccx: &'a BorrowckCtxt<'a, 'tcx>,
// the scope of the function body for the enclosing item
- item_scope: region::CodeExtent<'tcx>,
+ item_scope: region::CodeExtent,
span: Span,
cause: euv::LoanCause,
bccx: bccx,
infcx: &infcx,
all_loans: Vec::new(),
- item_ub: bccx.tcx.node_extent(body.node_id),
+ item_ub: region::CodeExtent::Misc(body.node_id),
move_data: MoveData::new(),
move_error_collector: move_error::MoveErrorCollector::new(),
};
all_loans: Vec<Loan<'tcx>>,
/// `item_ub` is used as an upper-bound on the lifetime whenever we
/// ask for the scope of an expression categorized as an upvar.
- item_ub: region::CodeExtent<'tcx>,
+ item_ub: region::CodeExtent,
}
impl<'a, 'tcx> euv::Delegate<'tcx> for GatherLoanCtxt<'a, 'tcx> {
let loan_scope = match *loan_region {
ty::ReScope(scope) => scope,
- ty::ReFree(ref fr) => fr.scope.unwrap_or(self.item_ub),
+ ty::ReEarlyBound(ref br) => {
+ self.bccx.region_maps.early_free_extent(self.tcx(), br)
+ }
+
+ ty::ReFree(ref fr) => {
+ self.bccx.region_maps.free_extent(self.tcx(), fr)
+ }
ty::ReStatic => self.item_ub,
ty::ReEmpty |
ty::ReLateBound(..) |
- ty::ReEarlyBound(..) |
ty::ReVar(..) |
ty::ReSkolemized(..) |
ty::ReErased => {
};
debug!("loan_scope = {:?}", loan_scope);
- let borrow_scope = self.tcx().node_extent(borrow_id);
+ let borrow_scope = region::CodeExtent::Misc(borrow_id);
let gen_scope = self.compute_gen_scope(borrow_scope, loan_scope);
debug!("gen_scope = {:?}", gen_scope);
}
pub fn compute_gen_scope(&self,
- borrow_scope: region::CodeExtent<'tcx>,
- loan_scope: region::CodeExtent<'tcx>)
- -> region::CodeExtent<'tcx> {
+ borrow_scope: region::CodeExtent,
+ loan_scope: region::CodeExtent)
+ -> region::CodeExtent {
//! Determine when to introduce the loan. Typically the loan
//! is introduced at the point of the borrow, but in some cases,
//! notably method arguments, the loan may be introduced only
}
}
- pub fn compute_kill_scope(&self, loan_scope: region::CodeExtent<'tcx>, lp: &LoanPath<'tcx>)
- -> region::CodeExtent<'tcx> {
+ pub fn compute_kill_scope(&self, loan_scope: region::CodeExtent, lp: &LoanPath<'tcx>)
+ -> region::CodeExtent {
//! Determine when the loan restrictions go out of scope.
//! This is either when the lifetime expires or when the
//! local variable which roots the loan-path goes out of scope,
_ => return
}
let id = src.item_id();
- let param_env = ty::ParameterEnvironment::for_item(tcx, id);
+ let param_env = tcx.parameter_environment(tcx.hir.local_def_id(id));
let move_data = MoveData::gather_moves(mir, tcx, ¶m_env);
let elaborate_patch = {
let mir = &*mir;
}
fn constant_bool(&self, span: Span, val: bool) -> Rvalue<'tcx> {
- Rvalue::Use(Operand::Constant(Constant {
+ Rvalue::Use(Operand::Constant(Box::new(Constant {
span: span,
ty: self.tcx.types.bool,
literal: Literal::Value { value: ConstVal::Bool(val) }
- }))
+ })))
}
fn set_drop_flag(&mut self, loc: Location, path: MovePathIndex, val: DropFlagState) {
// steals it, but it forces the `borrowck` query.
let mir = &tcx.mir_validated(def_id).borrow();
- let param_env = ty::ParameterEnvironment::for_item(tcx, id);
+ let param_env = tcx.parameter_environment(def_id);
let move_data = MoveData::gather_moves(mir, tcx, ¶m_env);
let mdpe = MoveDataParamEnv { move_data: move_data, param_env: param_env };
let dead_unwinds = IdxSetBuf::new_empty(mir.basic_blocks().len());
// Some in `borrowck_fn` and cleared later
tables: &'a ty::TypeckTables<'tcx>,
- region_maps: Rc<RegionMaps<'tcx>>,
+ region_maps: Rc<RegionMaps>,
owner_def_id: DefId,
}
/// cases, notably method arguments, the loan may be introduced
/// only later, once it comes into scope. See also
/// `GatherLoanCtxt::compute_gen_scope`.
- gen_scope: region::CodeExtent<'tcx>,
+ gen_scope: region::CodeExtent,
/// kill_scope indicates when the loan goes out of scope. This is
/// either when the lifetime expires or when the local variable
/// which roots the loan-path goes out of scope, whichever happens
/// faster. See also `GatherLoanCtxt::compute_kill_scope`.
- kill_scope: region::CodeExtent<'tcx>,
+ kill_scope: region::CodeExtent,
span: Span,
cause: euv::LoanCause,
}
}
impl<'a, 'tcx> LoanPath<'tcx> {
- pub fn kill_scope(&self, bccx: &BorrowckCtxt<'a, 'tcx>) -> region::CodeExtent<'tcx> {
+ pub fn kill_scope(&self, bccx: &BorrowckCtxt<'a, 'tcx>) -> region::CodeExtent {
match self.kind {
LpVar(local_id) => bccx.region_maps.var_scope(local_id),
LpUpvar(upvar_id) => {
let block_id = closure_to_block(upvar_id.closure_expr_id, bccx.tcx);
- bccx.tcx.node_extent(block_id)
+ region::CodeExtent::Misc(block_id)
}
LpDowncast(ref base, _) |
LpExtend(ref base, ..) => base.kill_scope(bccx),
match (&err.code, &err.cause) {
(&err_out_of_scope(&ty::ReScope(_), &ty::ReStatic, _),
&BorrowViolation(euv::ClosureCapture(span))) |
+ (&err_out_of_scope(&ty::ReScope(_), &ty::ReEarlyBound(..), _),
+ &BorrowViolation(euv::ClosureCapture(span))) |
(&err_out_of_scope(&ty::ReScope(_), &ty::ReFree(..), _),
&BorrowViolation(euv::ClosureCapture(span))) => {
return self.report_out_of_scope_escaping_closure_capture(&err, span);
// except according to those terms.
#![crate_name = "rustc_borrowck"]
-#![unstable(feature = "rustc_private", issue = "27812")]
#![crate_type = "dylib"]
#![crate_type = "rlib"]
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
#![feature(quote)]
#![feature(rustc_diagnostic_macros)]
-#![feature(rustc_private)]
-#![feature(staged_api)]
#![feature(associated_consts)]
#![feature(nonzero)]
+
+#![cfg_attr(stage0, unstable(feature = "rustc_private", issue = "27812"))]
+#![cfg_attr(stage0, feature(rustc_private))]
+#![cfg_attr(stage0, feature(staged_api))]
+
#[macro_use] extern crate log;
#[macro_use] extern crate syntax;
extern crate syntax_pos;
b: hir::BodyId, s: Span, id: ast::NodeId) {
intravisit::walk_fn(self, fk, fd, b, s, id);
- let region_context = self.tcx.hir.local_def_id(id);
- let region_maps = self.tcx.region_maps(region_context);
+ let def_id = self.tcx.hir.local_def_id(id);
MatchVisitor {
tcx: self.tcx,
tables: self.tcx.body_tables(b),
- region_maps: ®ion_maps,
- param_env: &ty::ParameterEnvironment::for_item(self.tcx, id)
+ region_maps: &self.tcx.region_maps(def_id),
+ param_env: &self.tcx.parameter_environment(def_id)
}.visit_body(self.tcx.hir.body(b));
}
}
tcx: TyCtxt<'a, 'tcx, 'tcx>,
tables: &'a ty::TypeckTables<'tcx>,
param_env: &'a ty::ParameterEnvironment<'tcx>,
- region_maps: &'a RegionMaps<'tcx>,
+ region_maps: &'a RegionMaps,
}
impl<'a, 'tcx> Visitor<'tcx> for MatchVisitor<'a, 'tcx> {
// constants, we only try to find the expression for a
// trait-associated const if the caller gives us the
// substitutions for the reference to it.
- if tcx.sess.cstore.trait_of_item(def_id).is_some() {
+ if tcx.trait_of_item(def_id).is_some() {
resolve_trait_associated_const(tcx, def_id, substs)
} else {
Some((def_id, substs))
//! This API is completely unstable and subject to change.
#![crate_name = "rustc_const_eval"]
-#![unstable(feature = "rustc_private", issue = "27812")]
#![crate_type = "dylib"]
#![crate_type = "rlib"]
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_root_url = "https://doc.rust-lang.org/nightly/")]
#![deny(warnings)]
-#![feature(rustc_private)]
-#![feature(staged_api)]
#![feature(rustc_diagnostic_macros)]
#![feature(slice_patterns)]
#![feature(box_patterns)]
#![feature(const_fn)]
#![feature(i128_type)]
+#![cfg_attr(stage0, unstable(feature = "rustc_private", issue = "27812"))]
+#![cfg_attr(stage0, feature(rustc_private))]
+#![cfg_attr(stage0, feature(staged_api))]
+
extern crate arena;
#[macro_use] extern crate syntax;
#[macro_use] extern crate log;
//! This API is completely unstable and subject to change.
#![crate_name = "rustc_const_math"]
-#![unstable(feature = "rustc_private", issue = "27812")]
#![crate_type = "dylib"]
#![crate_type = "rlib"]
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_root_url = "https://doc.rust-lang.org/nightly/")]
#![deny(warnings)]
-#![feature(rustc_private)]
-#![feature(staged_api)]
#![feature(const_fn)]
#![feature(i128)]
#![feature(i128_type)]
+#![cfg_attr(stage0, unstable(feature = "rustc_private", issue = "27812"))]
+#![cfg_attr(stage0, feature(rustc_private))]
+#![cfg_attr(stage0, feature(staged_api))]
+
extern crate syntax;
extern crate serialize as rustc_serialize; // used by deriving
//! This API is completely unstable and subject to change.
#![crate_name = "rustc_data_structures"]
-#![unstable(feature = "rustc_private", issue = "27812")]
#![crate_type = "dylib"]
#![crate_type = "rlib"]
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
#![feature(shared)]
#![feature(collections_range)]
#![feature(nonzero)]
-#![feature(rustc_private)]
-#![feature(staged_api)]
#![feature(unboxed_closures)]
#![feature(fn_traits)]
#![feature(untagged_unions)]
#![feature(manually_drop)]
#![feature(struct_field_attributes)]
+#![cfg_attr(stage0, unstable(feature = "rustc_private", issue = "27812"))]
+#![cfg_attr(stage0, feature(rustc_private))]
+#![cfg_attr(stage0, feature(staged_api))]
+
#![cfg_attr(unix, feature(libc))]
#![cfg_attr(test, feature(test))]
rustc_errors = { path = "../librustc_errors" }
rustc_incremental = { path = "../librustc_incremental" }
rustc_lint = { path = "../librustc_lint" }
-rustc_llvm = { path = "../librustc_llvm" }
rustc_metadata = { path = "../librustc_metadata" }
rustc_mir = { path = "../librustc_mir" }
rustc_passes = { path = "../librustc_passes" }
use rustc::middle::privacy::AccessLevels;
use rustc::mir::transform::{MIR_CONST, MIR_VALIDATED, MIR_OPTIMIZED, Passes};
use rustc::ty::{self, TyCtxt, Resolutions, GlobalArenas};
+use rustc::traits;
use rustc::util::common::time;
use rustc::util::nodemap::NodeSet;
use rustc::util::fs::rename_or_copy_remove;
let krate = ecx.monotonic_expander().expand_crate(krate);
+ ecx.check_unused_macros();
+
let mut missing_fragment_specifiers: Vec<_> =
ecx.parse_sess.missing_fragment_specifiers.borrow().iter().cloned().collect();
missing_fragment_specifiers.sort();
"static item recursion checking",
|| static_recursion::check_crate(sess, &hir_map))?;
- let index = stability::Index::new(&hir_map);
+ let index = stability::Index::new(&sess);
let mut local_providers = ty::maps::Providers::default();
borrowck::provide(&mut local_providers);
trans::provide(&mut local_providers);
typeck::provide(&mut local_providers);
ty::provide(&mut local_providers);
+ traits::provide(&mut local_providers);
reachable::provide(&mut local_providers);
rustc_const_eval::provide(&mut local_providers);
middle::region::provide(&mut local_providers);
cstore::provide(&mut extern_providers);
trans::provide(&mut extern_providers);
ty::provide_extern(&mut extern_providers);
+ traits::provide_extern(&mut extern_providers);
// FIXME(eddyb) get rid of this once we replace const_eval with miri.
rustc_const_eval::provide(&mut extern_providers);
//! This API is completely unstable and subject to change.
#![crate_name = "rustc_driver"]
-#![unstable(feature = "rustc_private", issue = "27812")]
#![crate_type = "dylib"]
#![crate_type = "rlib"]
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
#![feature(libc)]
#![feature(quote)]
#![feature(rustc_diagnostic_macros)]
-#![feature(rustc_private)]
#![feature(set_stdio)]
-#![feature(staged_api)]
+
+#![cfg_attr(stage0, unstable(feature = "rustc_private", issue = "27812"))]
+#![cfg_attr(stage0, feature(rustc_private))]
+#![cfg_attr(stage0, feature(staged_api))]
extern crate arena;
extern crate getopts;
extern crate rustc_trans;
extern crate rustc_typeck;
extern crate serialize;
-extern crate rustc_llvm as llvm;
#[macro_use]
extern crate log;
extern crate syntax;
use rustc_save_analysis as save;
use rustc_save_analysis::DumpHandler;
use rustc_trans::back::link;
-use rustc_trans::back::write::{create_target_machine, RELOC_MODEL_ARGS, CODE_GEN_MODEL_ARGS};
+use rustc_trans::back::write::{RELOC_MODEL_ARGS, CODE_GEN_MODEL_ARGS};
use rustc::dep_graph::DepGraph;
use rustc::session::{self, config, Session, build_session, CompileResult};
use rustc::session::config::{Input, PrintRequest, OutputType, ErrorOutputType};
let (sopts, cfg) = config::build_session_options_and_crate_config(&matches);
if sopts.debugging_opts.debug_llvm {
- unsafe { llvm::LLVMRustSetDebug(1); }
+ rustc_trans::enable_llvm_debug();
}
let descriptions = diagnostics_registry();
};
let dep_graph = DepGraph::new(sopts.build_dep_graph());
- let cstore = Rc::new(CStore::new(&dep_graph));
+ let cstore = Rc::new(CStore::new(&dep_graph, box rustc_trans::LlvmMetadataLoader));
let loader = file_loader.unwrap_or(box RealFileLoader);
let codemap = Rc::new(CodeMap::with_file_loader(loader, sopts.file_path_mapping()));
let mut sess = session::build_session_with_codemap(
sopts, &dep_graph, input_file_path, descriptions, cstore.clone(), codemap, emitter_dest,
);
+ rustc_trans::init(&sess);
rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess));
let mut cfg = config::build_configuration(&sess, cfg);
return None;
}
let dep_graph = DepGraph::new(sopts.build_dep_graph());
- let cstore = Rc::new(CStore::new(&dep_graph));
+ let cstore = Rc::new(CStore::new(&dep_graph, box rustc_trans::LlvmMetadataLoader));
let mut sess = build_session(sopts.clone(),
&dep_graph,
None,
descriptions.clone(),
cstore.clone());
+ rustc_trans::init(&sess);
rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess));
let mut cfg = config::build_configuration(&sess, cfg.clone());
target_features::add_configuration(&mut cfg, &sess);
&Input::File(ref ifile) => {
let path = &(*ifile);
let mut v = Vec::new();
- locator::list_file_metadata(&sess.target.target, path, &mut v).unwrap();
+ locator::list_file_metadata(&sess.target.target,
+ path,
+ sess.cstore.metadata_loader(),
+ &mut v)
+ .unwrap();
println!("{}", String::from_utf8(v).unwrap());
}
&Input::Str { .. } => {
node: ast::MetaItemKind::Word,
span: DUMMY_SP,
});
- if !allow_unstable_cfg && gated_cfg.is_some() {
- continue;
+
+ // Note that crt-static is a specially recognized cfg
+ // directive that's printed out here as part of
+ // rust-lang/rust#37406, but in general the
+ // `target_feature` cfg is gated under
+ // rust-lang/rust#29717. For now this is just
+ // specifically allowing the crt-static cfg and that's
+ // it, this is intended to get into Cargo and then go
+ // through to build scripts.
+ let value = value.as_ref().map(|s| s.as_str());
+ let value = value.as_ref().map(|s| s.as_ref());
+ if name != "target_feature" || value != Some("crt-static") {
+ if !allow_unstable_cfg && gated_cfg.is_some() {
+ continue;
+ }
}
- cfgs.push(if let &Some(ref value) = value {
+ cfgs.push(if let Some(value) = value {
format!("{}=\"{}\"", name, value)
} else {
format!("{}", name)
println!("{}", cfg);
}
}
- PrintRequest::TargetCPUs => {
- let tm = create_target_machine(sess);
- unsafe { llvm::LLVMRustPrintTargetCPUs(tm); }
- }
- PrintRequest::TargetFeatures => {
- let tm = create_target_machine(sess);
- unsafe { llvm::LLVMRustPrintTargetFeatures(tm); }
- }
PrintRequest::RelocationModels => {
println!("Available relocation models:");
for &(name, _) in RELOC_MODEL_ARGS.iter() {
}
println!("");
}
+ PrintRequest::TargetCPUs | PrintRequest::TargetFeatures => {
+ rustc_trans::print(*req, sess);
+ }
}
}
return Compilation::Stop;
println!("commit-date: {}", unw(commit_date_str()));
println!("host: {}", config::host_triple());
println!("release: {}", unw(release_str()));
- unsafe {
- println!("LLVM version: {}.{}",
- llvm::LLVMRustVersionMajor(), llvm::LLVMRustVersionMinor());
- }
+ rustc_trans::print_version();
}
}
}
if cg_flags.contains(&"passes=list".to_string()) {
- unsafe {
- ::llvm::LLVMRustPrintPasses();
- }
+ rustc_trans::print_passes();
return None;
}
// except according to those terms.
use syntax::ast;
-use llvm::LLVMRustHasFeature;
use rustc::session::Session;
-use rustc_trans::back::write::create_target_machine;
-use syntax::feature_gate::UnstableFeatures;
use syntax::symbol::Symbol;
-use libc::c_char;
-
-// WARNING: the features must be known to LLVM or the feature
-// detection code will walk past the end of the feature array,
-// leading to crashes.
-
-const ARM_WHITELIST: &'static [&'static str] = &["neon\0", "vfp2\0", "vfp3\0", "vfp4\0"];
-
-const X86_WHITELIST: &'static [&'static str] = &["avx\0", "avx2\0", "bmi\0", "bmi2\0", "sse\0",
- "sse2\0", "sse3\0", "sse4.1\0", "sse4.2\0",
- "ssse3\0", "tbm\0", "lzcnt\0", "popcnt\0",
- "sse4a\0", "rdrnd\0", "rdseed\0", "fma\0"];
-
-const HEXAGON_WHITELIST: &'static [&'static str] = &["hvx\0", "hvx-double\0"];
+use rustc_trans;
/// Add `target_feature = "..."` cfgs for a variety of platform
/// specific features (SSE, NEON etc.).
/// This is performed by checking whether a whitelisted set of
/// features is available on the target machine, by querying LLVM.
pub fn add_configuration(cfg: &mut ast::CrateConfig, sess: &Session) {
- let target_machine = create_target_machine(sess);
-
- let whitelist = match &*sess.target.target.arch {
- "arm" => ARM_WHITELIST,
- "x86" | "x86_64" => X86_WHITELIST,
- "hexagon" => HEXAGON_WHITELIST,
- _ => &[],
- };
-
let tf = Symbol::intern("target_feature");
- for feat in whitelist {
- assert_eq!(feat.chars().last(), Some('\0'));
- if unsafe { LLVMRustHasFeature(target_machine, feat.as_ptr() as *const c_char) } {
- cfg.insert((tf, Some(Symbol::intern(&feat[..feat.len() - 1]))));
- }
+
+ for feat in rustc_trans::target_features(sess) {
+ cfg.insert((tf, Some(feat)));
}
let requested_features = sess.opts.cg.target_feature.split(',');
- let unstable_options = sess.opts.debugging_opts.unstable_options;
- let is_nightly = UnstableFeatures::from_environment().is_nightly_build();
let found_negative = requested_features.clone().any(|r| r == "-crt-static");
let found_positive = requested_features.clone().any(|r| r == "+crt-static");
found_positive
};
- // If we switched from the default then that's only allowed on nightly, so
- // gate that here.
- if (found_positive || found_negative) && (!is_nightly || !unstable_options) {
- sess.fatal("specifying the `crt-static` target feature is only allowed \
- on the nightly channel with `-Z unstable-options` passed \
- as well");
- }
-
if crt_static {
cfg.insert((tf, Some(Symbol::intern("crt-static"))));
}
use rustc::dep_graph::DepGraph;
use rustc_lint;
use rustc_resolve::MakeGlobMap;
+use rustc_trans;
use rustc::middle::lang_items;
use rustc::middle::free_region::FreeRegionMap;
use rustc::middle::region::{CodeExtent, RegionMaps};
-use rustc::middle::region::CodeExtentData;
use rustc::middle::resolve_lifetime;
use rustc::middle::stability;
use rustc::ty::subst::{Kind, Subst};
struct Env<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> {
infcx: &'a infer::InferCtxt<'a, 'gcx, 'tcx>,
- region_maps: &'a mut RegionMaps<'tcx>,
+ region_maps: &'a mut RegionMaps,
}
struct RH<'a> {
let dep_graph = DepGraph::new(false);
let _ignore = dep_graph.in_ignore();
- let cstore = Rc::new(CStore::new(&dep_graph));
+ let cstore = Rc::new(CStore::new(&dep_graph, box rustc_trans::LlvmMetadataLoader));
let sess = session::build_session_(options,
&dep_graph,
None,
diagnostic_handler,
Rc::new(CodeMap::new(FilePathMapping::empty())),
cstore.clone());
+ rustc_trans::init(&sess);
rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess));
let input = config::Input::Str {
name: driver::anon_src(),
// run just enough stuff to build a tcx:
let lang_items = lang_items::collect_language_items(&sess, &hir_map);
let named_region_map = resolve_lifetime::krate(&sess, &hir_map);
- let index = stability::Index::new(&hir_map);
+ let index = stability::Index::new(&sess);
TyCtxt::create_and_enter(&sess,
ty::maps::Providers::default(),
ty::maps::Providers::default(),
self.infcx.tcx
}
- pub fn create_region_hierarchy(&mut self, rh: &RH, parent: CodeExtent<'tcx>) {
- let me = self.tcx().intern_code_extent(CodeExtentData::Misc(rh.id));
+ pub fn create_region_hierarchy(&mut self, rh: &RH, parent: CodeExtent) {
+ let me = CodeExtent::Misc(rh.id);
self.region_maps.record_code_extent(me, Some(parent));
for child_rh in rh.sub {
self.create_region_hierarchy(child_rh, me);
// children of 1, etc
let node = ast::NodeId::from_u32;
- let dscope = self.tcx().intern_code_extent(CodeExtentData::DestructionScope(node(1)));
+ let dscope = CodeExtent::DestructionScope(node(1));
self.region_maps.record_code_extent(dscope, None);
self.create_region_hierarchy(&RH {
id: node(1),
pub fn re_early_bound(&self, index: u32, name: &'static str) -> ty::Region<'tcx> {
let name = Symbol::intern(name);
self.infcx.tcx.mk_region(ty::ReEarlyBound(ty::EarlyBoundRegion {
- index: index,
- name: name,
+ def_id: self.infcx.tcx.hir.local_def_id(ast::CRATE_NODE_ID),
+ index,
+ name,
}))
}
}
pub fn t_rptr_scope(&self, id: u32) -> Ty<'tcx> {
- let r = ty::ReScope(self.tcx().node_extent(ast::NodeId::from_u32(id)));
+ let r = ty::ReScope(CodeExtent::Misc(ast::NodeId::from_u32(id)));
self.infcx.tcx.mk_imm_ref(self.infcx.tcx.mk_region(r), self.tcx().types.isize)
}
- pub fn re_free(&self, nid: ast::NodeId, id: u32) -> ty::Region<'tcx> {
+ pub fn re_free(&self, id: u32) -> ty::Region<'tcx> {
self.infcx.tcx.mk_region(ty::ReFree(ty::FreeRegion {
- scope: Some(self.tcx().node_extent(nid)),
+ scope: self.infcx.tcx.hir.local_def_id(ast::CRATE_NODE_ID),
bound_region: ty::BrAnon(id),
}))
}
- pub fn t_rptr_free(&self, nid: u32, id: u32) -> Ty<'tcx> {
- let r = self.re_free(ast::NodeId::from_u32(nid), id);
+ pub fn t_rptr_free(&self, id: u32) -> Ty<'tcx> {
+ let r = self.re_free(id);
self.infcx.tcx.mk_imm_ref(r, self.tcx().types.isize)
}
test_env(EMPTY_SOURCE_STR, errors(&[]), |mut env| {
env.create_simple_region_hierarchy();
- let t_rptr_free1 = env.t_rptr_free(1, 1);
+ let t_rptr_free1 = env.t_rptr_free(1);
let t_rptr_bound1 = env.t_rptr_late_bound(1);
env.check_not_sub(env.t_fn(&[t_rptr_free1], env.tcx().types.isize),
env.t_fn(&[t_rptr_bound1], env.tcx().types.isize));
test_env(EMPTY_SOURCE_STR, errors(&[]), |mut env| {
env.create_simple_region_hierarchy();
let t_rptr_bound1 = env.t_rptr_late_bound(1);
- let t_rptr_free1 = env.t_rptr_free(1, 1);
+ let t_rptr_free1 = env.t_rptr_free(1);
env.check_sub(env.t_fn(&[t_rptr_bound1], env.tcx().types.isize),
env.t_fn(&[t_rptr_free1], env.tcx().types.isize));
})
env.create_simple_region_hierarchy();
let t_infer1 = env.infcx.next_ty_var(TypeVariableOrigin::MiscVariable(DUMMY_SP));
let t_rptr_bound1 = env.t_rptr_late_bound(1);
- let t_rptr_free1 = env.t_rptr_free(1, 1);
+ let t_rptr_free1 = env.t_rptr_free(1);
env.check_lub(env.t_fn(&[t_infer1], env.tcx().types.isize),
env.t_fn(&[t_rptr_bound1], env.tcx().types.isize),
env.t_fn(&[t_rptr_free1], env.tcx().types.isize));
test_env(EMPTY_SOURCE_STR, errors(&[]), |mut env| {
env.create_simple_region_hierarchy();
let t_rptr_bound1 = env.t_rptr_late_bound(1);
- let t_rptr_free1 = env.t_rptr_free(1, 1);
+ let t_rptr_free1 = env.t_rptr_free(1);
env.check_lub(env.t_fn(&[t_rptr_bound1], env.tcx().types.isize),
env.t_fn(&[t_rptr_free1], env.tcx().types.isize),
env.t_fn(&[t_rptr_free1], env.tcx().types.isize));
fn lub_free_free() {
test_env(EMPTY_SOURCE_STR, errors(&[]), |mut env| {
env.create_simple_region_hierarchy();
- let t_rptr_free1 = env.t_rptr_free(1, 1);
- let t_rptr_free2 = env.t_rptr_free(1, 2);
+ let t_rptr_free1 = env.t_rptr_free(1);
+ let t_rptr_free2 = env.t_rptr_free(2);
let t_rptr_static = env.t_rptr_static();
env.check_lub(env.t_fn(&[t_rptr_free1], env.tcx().types.isize),
env.t_fn(&[t_rptr_free2], env.tcx().types.isize),
fn glb_free_free_with_common_scope() {
test_env(EMPTY_SOURCE_STR, errors(&[]), |mut env| {
env.create_simple_region_hierarchy();
- let t_rptr_free1 = env.t_rptr_free(1, 1);
- let t_rptr_free2 = env.t_rptr_free(1, 2);
+ let t_rptr_free1 = env.t_rptr_free(1);
+ let t_rptr_free2 = env.t_rptr_free(2);
let t_rptr_scope = env.t_rptr_scope(1);
env.check_glb(env.t_fn(&[t_rptr_free1], env.tcx().types.isize),
env.t_fn(&[t_rptr_free2], env.tcx().types.isize),
test_env(EMPTY_SOURCE_STR, errors(&[]), |mut env| {
env.create_simple_region_hierarchy();
let t_rptr_bound1 = env.t_rptr_late_bound(1);
- let t_rptr_free1 = env.t_rptr_free(1, 1);
+ let t_rptr_free1 = env.t_rptr_free(1);
env.check_glb(env.t_fn(&[t_rptr_bound1], env.tcx().types.isize),
env.t_fn(&[t_rptr_free1], env.tcx().types.isize),
env.t_fn(&[t_rptr_bound1], env.tcx().types.isize));
assert!(!env.t_nil().has_escaping_regions());
- let t_rptr_free1 = env.t_rptr_free(1, 1);
+ let t_rptr_free1 = env.t_rptr_free(1);
assert!(!t_rptr_free1.has_escaping_regions());
let t_rptr_bound1 = env.t_rptr_late_bound_with_debruijn(1, ty::DebruijnIndex::new(1));
// except according to those terms.
use CodeSuggestion;
+use Substitution;
use Level;
use RenderSpan;
use std::fmt;
pub code: Option<String>,
pub span: MultiSpan,
pub children: Vec<SubDiagnostic>,
- pub suggestion: Option<CodeSuggestion>,
+ pub suggestions: Vec<CodeSuggestion>,
}
/// For example a note attached to an error.
code: code,
span: MultiSpan::new(),
children: vec![],
- suggestion: None,
+ suggestions: vec![],
}
}
///
/// See `diagnostic::CodeSuggestion` for more information.
pub fn span_suggestion(&mut self, sp: Span, msg: &str, suggestion: String) -> &mut Self {
- assert!(self.suggestion.is_none());
- self.suggestion = Some(CodeSuggestion {
- msp: sp.into(),
- substitutes: vec![suggestion],
+ self.suggestions.push(CodeSuggestion {
+ substitution_parts: vec![Substitution {
+ span: sp,
+ substitutions: vec![suggestion],
+ }],
+ msg: msg.to_owned(),
+ });
+ self
+ }
+
+ pub fn span_suggestions(&mut self, sp: Span, msg: &str, suggestions: Vec<String>) -> &mut Self {
+ self.suggestions.push(CodeSuggestion {
+ substitution_parts: vec![Substitution {
+ span: sp,
+ substitutions: suggestions,
+ }],
msg: msg.to_owned(),
});
self
self.handler.emitter.borrow_mut().emit(&self);
self.cancel();
- self.handler.panic_if_treat_err_as_bug();
+
+ if self.level == Level::Error {
+ self.handler.panic_if_treat_err_as_bug();
+ }
// if self.is_fatal() {
// panic!(FatalError);
msg: &str,
suggestion: String)
-> &mut Self);
+ forward!(pub fn span_suggestions(&mut self,
+ sp: Span,
+ msg: &str,
+ suggestions: Vec<String>)
+ -> &mut Self);
forward!(pub fn set_span<S: Into<MultiSpan>>(&mut self, sp: S) -> &mut Self);
forward!(pub fn code(&mut self, s: String) -> &mut Self);
let mut primary_span = db.span.clone();
let mut children = db.children.clone();
- if let Some(sugg) = db.suggestion.clone() {
- assert_eq!(sugg.msp.primary_spans().len(), sugg.substitutes.len());
- // don't display multispans as labels
- if sugg.substitutes.len() == 1 &&
+ if let Some((sugg, rest)) = db.suggestions.split_first() {
+ if rest.is_empty() &&
+ // don't display multipart suggestions as labels
+ sugg.substitution_parts.len() == 1 &&
+ // don't display multi-suggestions as labels
+ sugg.substitutions() == 1 &&
// don't display long messages as labels
sugg.msg.split_whitespace().count() < 10 &&
// don't display multiline suggestions as labels
- sugg.substitutes[0].find('\n').is_none() {
- let msg = format!("help: {} `{}`", sugg.msg, sugg.substitutes[0]);
- primary_span.push_span_label(sugg.msp.primary_spans()[0], msg);
+ sugg.substitution_parts[0].substitutions[0].find('\n').is_none() {
+ let substitution = &sugg.substitution_parts[0].substitutions[0];
+ let msg = format!("help: {} `{}`", sugg.msg, substitution);
+ primary_span.push_span_label(sugg.substitution_spans().next().unwrap(), msg);
} else {
- children.push(SubDiagnostic {
- level: Level::Help,
- message: Vec::new(),
- span: MultiSpan::new(),
- render_span: Some(Suggestion(sugg)),
- });
+ // if there are multiple suggestions, print them all in full
+ // to be consistent. We could try to figure out if we can
+ // make one (or the first one) inline, but that would give
+ // undue importance to a semi-random suggestion
+ for sugg in &db.suggestions {
+ children.push(SubDiagnostic {
+ level: Level::Help,
+ message: Vec::new(),
+ span: MultiSpan::new(),
+ render_span: Some(Suggestion(sugg.clone())),
+ });
+ }
}
}
/// maximum number of lines we will print for each error; arbitrary.
pub const MAX_HIGHLIGHT_LINES: usize = 6;
+/// maximum number of suggestions to be shown
+///
+/// Arbitrary, but taken from trait import suggestion limit
+pub const MAX_SUGGESTIONS: usize = 4;
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum ColorConfig {
multiline_depth: usize,
}
-
-/// Do not use this for messages that end in `\n` – use `println_maybe_styled` instead. See
-/// `EmitterWriter::print_maybe_styled` for details.
-macro_rules! print_maybe_styled {
- ($dst: expr, $style: expr, $($arg: tt)*) => {
- $dst.print_maybe_styled(format_args!($($arg)*), $style, false)
- }
-}
-
-macro_rules! println_maybe_styled {
- ($dst: expr, $style: expr, $($arg: tt)*) => {
- $dst.print_maybe_styled(format_args!($($arg)*), $style, true)
- }
-}
-
impl EmitterWriter {
pub fn stderr(color_config: ColorConfig, code_map: Option<Rc<CodeMapper>>) -> EmitterWriter {
if color_config.use_color() {
-> io::Result<()> {
use std::borrow::Borrow;
- let primary_span = suggestion.msp.primary_span().unwrap();
+ let primary_span = suggestion.substitution_spans().next().unwrap();
if let Some(ref cm) = self.cm {
let mut buffer = StyledBuffer::new();
- buffer.append(0, &level.to_string(), Style::Level(level.clone()));
- buffer.append(0, ": ", Style::HeaderMsg);
- self.msg_to_buffer(&mut buffer,
- &[(suggestion.msg.to_owned(), Style::NoStyle)],
- max_line_num_len,
- "suggestion",
- Some(Style::HeaderMsg));
-
let lines = cm.span_to_lines(primary_span).unwrap();
assert!(!lines.lines.is_empty());
- let complete = suggestion.splice_lines(cm.borrow());
+ buffer.append(0, &level.to_string(), Style::Level(level.clone()));
+ buffer.append(0, ": ", Style::HeaderMsg);
+ self.msg_to_buffer(&mut buffer,
+ &[(suggestion.msg.to_owned(), Style::NoStyle)],
+ max_line_num_len,
+ "suggestion",
+ Some(Style::HeaderMsg));
- // print the suggestion without any line numbers, but leave
- // space for them. This helps with lining up with previous
- // snippets from the actual error being reported.
- let mut lines = complete.lines();
+ let suggestions = suggestion.splice_lines(cm.borrow());
let mut row_num = 1;
- for line in lines.by_ref().take(MAX_HIGHLIGHT_LINES) {
- draw_col_separator(&mut buffer, row_num, max_line_num_len + 1);
- buffer.append(row_num, line, Style::NoStyle);
- row_num += 1;
- }
+ for complete in suggestions.iter().take(MAX_SUGGESTIONS) {
+
+ // print the suggestion without any line numbers, but leave
+ // space for them. This helps with lining up with previous
+ // snippets from the actual error being reported.
+ let mut lines = complete.lines();
+ for line in lines.by_ref().take(MAX_HIGHLIGHT_LINES) {
+ draw_col_separator(&mut buffer, row_num, max_line_num_len + 1);
+ buffer.append(row_num, line, Style::NoStyle);
+ row_num += 1;
+ }
- // if we elided some lines, add an ellipsis
- if let Some(_) = lines.next() {
- buffer.append(row_num, "...", Style::NoStyle);
+ // if we elided some lines, add an ellipsis
+ if let Some(_) = lines.next() {
+ buffer.append(row_num, "...", Style::NoStyle);
+ }
+ }
+ if suggestions.len() > MAX_SUGGESTIONS {
+ let msg = format!("and {} other candidates", suggestions.len() - MAX_SUGGESTIONS);
+ buffer.append(row_num, &msg, Style::NoStyle);
}
emit_to_destination(&buffer.render(), level, &mut self.dst)?;
}
// except according to those terms.
#![crate_name = "rustc_errors"]
-#![unstable(feature = "rustc_private", issue = "27812")]
#![crate_type = "dylib"]
#![crate_type = "rlib"]
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
#![feature(custom_attribute)]
#![allow(unused_attributes)]
-#![feature(rustc_private)]
-#![feature(staged_api)]
#![feature(range_contains)]
#![feature(libc)]
+#![feature(conservative_impl_trait)]
+
+#![cfg_attr(stage0, unstable(feature = "rustc_private", issue = "27812"))]
+#![cfg_attr(stage0, feature(rustc_private))]
+#![cfg_attr(stage0, feature(staged_api))]
extern crate term;
extern crate libc;
#[derive(Clone, Debug, PartialEq, RustcEncodable, RustcDecodable)]
pub struct CodeSuggestion {
- pub msp: MultiSpan,
- pub substitutes: Vec<String>,
+ /// Each substitute can have multiple variants due to multiple
+ /// applicable suggestions
+ ///
+ /// `foo.bar` might be replaced with `a.b` or `x.y` by replacing
+ /// `foo` and `bar` on their own:
+ ///
+ /// ```
+ /// vec![
+ /// (0..3, vec!["a", "x"]),
+ /// (4..7, vec!["b", "y"]),
+ /// ]
+ /// ```
+ ///
+ /// or by replacing the entire span:
+ ///
+ /// ```
+ /// vec![(0..7, vec!["a.b", "x.y"])]
+ /// ```
+ pub substitution_parts: Vec<Substitution>,
pub msg: String,
}
+#[derive(Clone, Debug, PartialEq, RustcEncodable, RustcDecodable)]
+/// See the docs on `CodeSuggestion::substitutions`
+pub struct Substitution {
+ pub span: Span,
+ pub substitutions: Vec<String>,
+}
+
pub trait CodeMapper {
fn lookup_char_pos(&self, pos: BytePos) -> Loc;
fn span_to_lines(&self, sp: Span) -> FileLinesResult;
}
impl CodeSuggestion {
- /// Returns the assembled code suggestion.
- pub fn splice_lines(&self, cm: &CodeMapper) -> String {
+ /// Returns the number of substitutions
+ fn substitutions(&self) -> usize {
+ self.substitution_parts[0].substitutions.len()
+ }
+
+ /// Returns the number of substitutions
+ pub fn substitution_spans<'a>(&'a self) -> impl Iterator<Item = Span> + 'a {
+ self.substitution_parts.iter().map(|sub| sub.span)
+ }
+
+ /// Returns the assembled code suggestions.
+ pub fn splice_lines(&self, cm: &CodeMapper) -> Vec<String> {
use syntax_pos::{CharPos, Loc, Pos};
fn push_trailing(buf: &mut String,
}
}
- let mut primary_spans = self.msp.primary_spans().to_owned();
-
- assert_eq!(primary_spans.len(), self.substitutes.len());
- if primary_spans.is_empty() {
- return format!("");
+ if self.substitution_parts.is_empty() {
+ return vec![String::new()];
}
+ let mut primary_spans: Vec<_> = self.substitution_parts
+ .iter()
+ .map(|sub| (sub.span, &sub.substitutions))
+ .collect();
+
// Assumption: all spans are in the same file, and all spans
// are disjoint. Sort in ascending order.
- primary_spans.sort_by_key(|sp| sp.lo);
+ primary_spans.sort_by_key(|sp| sp.0.lo);
// Find the bounding span.
- let lo = primary_spans.iter().map(|sp| sp.lo).min().unwrap();
- let hi = primary_spans.iter().map(|sp| sp.hi).min().unwrap();
+ let lo = primary_spans.iter().map(|sp| sp.0.lo).min().unwrap();
+ let hi = primary_spans.iter().map(|sp| sp.0.hi).min().unwrap();
let bounding_span = Span {
lo: lo,
hi: hi,
prev_hi.col = CharPos::from_usize(0);
let mut prev_line = fm.get_line(lines.lines[0].line_index);
- let mut buf = String::new();
+ let mut bufs = vec![String::new(); self.substitutions()];
- for (sp, substitute) in primary_spans.iter().zip(self.substitutes.iter()) {
+ for (sp, substitutes) in primary_spans {
let cur_lo = cm.lookup_char_pos(sp.lo);
- if prev_hi.line == cur_lo.line {
- push_trailing(&mut buf, prev_line, &prev_hi, Some(&cur_lo));
- } else {
- push_trailing(&mut buf, prev_line, &prev_hi, None);
- // push lines between the previous and current span (if any)
- for idx in prev_hi.line..(cur_lo.line - 1) {
- if let Some(line) = fm.get_line(idx) {
- buf.push_str(line);
- buf.push('\n');
+ for (buf, substitute) in bufs.iter_mut().zip(substitutes) {
+ if prev_hi.line == cur_lo.line {
+ push_trailing(buf, prev_line, &prev_hi, Some(&cur_lo));
+ } else {
+ push_trailing(buf, prev_line, &prev_hi, None);
+ // push lines between the previous and current span (if any)
+ for idx in prev_hi.line..(cur_lo.line - 1) {
+ if let Some(line) = fm.get_line(idx) {
+ buf.push_str(line);
+ buf.push('\n');
+ }
+ }
+ if let Some(cur_line) = fm.get_line(cur_lo.line - 1) {
+ buf.push_str(&cur_line[..cur_lo.col.to_usize()]);
}
}
- if let Some(cur_line) = fm.get_line(cur_lo.line - 1) {
- buf.push_str(&cur_line[..cur_lo.col.to_usize()]);
- }
+ buf.push_str(substitute);
}
- buf.push_str(substitute);
prev_hi = cm.lookup_char_pos(sp.hi);
prev_line = fm.get_line(prev_hi.line - 1);
}
- push_trailing(&mut buf, prev_line, &prev_hi, None);
- // remove trailing newline
- buf.pop();
- buf
+ for buf in &mut bufs {
+ // if the replacement already ends with a newline, don't print the next line
+ if !buf.ends_with('\n') {
+ push_trailing(buf, prev_line, &prev_hi, None);
+ }
+ // remove trailing newline
+ buf.pop();
+ }
+ bufs
}
}
use rustc::hir::itemlikevisit::ItemLikeVisitor;
use rustc::ich::{Fingerprint, StableHashingContext};
use rustc::ty::TyCtxt;
+use rustc::util::common::record_time;
use rustc_data_structures::stable_hasher::{StableHasher, HashStable};
use rustc_data_structures::fx::FxHashMap;
-use rustc::util::common::record_time;
+use rustc_data_structures::accumulate_vec::AccumulateVec;
pub type IchHasher = StableHasher<Fingerprint>;
// difference, filter them out.
return None
}
+ DepNode::AllLocalTraitImpls => {
+ // These are already covered by hashing
+ // the HIR.
+ return None
+ }
ref other => {
bug!("Found unexpected DepNode during \
SVH computation: {:?}",
true,
(module, (span, attrs)));
}
+
+ fn compute_and_store_ich_for_trait_impls(&mut self, krate: &'tcx hir::Crate)
+ {
+ let tcx = self.hcx.tcx();
+
+ let mut impls: Vec<(u64, Fingerprint)> = krate
+ .trait_impls
+ .iter()
+ .map(|(&trait_id, impls)| {
+ let trait_id = tcx.def_path_hash(trait_id);
+ let mut impls: AccumulateVec<[_; 32]> = impls
+ .iter()
+ .map(|&node_id| {
+ let def_id = tcx.hir.local_def_id(node_id);
+ tcx.def_path_hash(def_id)
+ })
+ .collect();
+
+ impls.sort_unstable();
+ let mut hasher = StableHasher::new();
+ impls.hash_stable(&mut self.hcx, &mut hasher);
+ (trait_id, hasher.finish())
+ })
+ .collect();
+
+ impls.sort_unstable();
+
+ let mut default_impls: AccumulateVec<[_; 32]> = krate
+ .trait_default_impl
+ .iter()
+ .map(|(&trait_def_id, &impl_node_id)| {
+ let impl_def_id = tcx.hir.local_def_id(impl_node_id);
+ (tcx.def_path_hash(trait_def_id), tcx.def_path_hash(impl_def_id))
+ })
+ .collect();
+
+ default_impls.sort_unstable();
+
+ let mut hasher = StableHasher::new();
+ impls.hash_stable(&mut self.hcx, &mut hasher);
+
+ self.hashes.insert(DepNode::AllLocalTraitImpls, hasher.finish());
+ }
}
impl<'a, 'tcx: 'a> ItemLikeVisitor<'tcx> for ComputeItemHashesVisitor<'a, 'tcx> {
}
}
+
+
pub fn compute_incremental_hashes_map<'a, 'tcx: 'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>)
-> IncrementalHashesMap {
let _ignore = tcx.dep_graph.in_ignore();
let fingerprint = hasher.finish();
visitor.hashes.insert(dep_node, fingerprint);
}
+
+ visitor.compute_and_store_ich_for_trait_impls(krate);
});
tcx.sess.perf_stats.incr_comp_hashes_count.set(visitor.hashes.len() as u64);
//! Support for serializing the dep-graph and reloading it.
#![crate_name = "rustc_incremental"]
-#![unstable(feature = "rustc_private", issue = "27812")]
#![crate_type = "dylib"]
#![crate_type = "rlib"]
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_root_url = "https://doc.rust-lang.org/nightly/")]
#![deny(warnings)]
-#![feature(rustc_private)]
-#![feature(staged_api)]
#![feature(rand)]
#![feature(conservative_impl_trait)]
#![feature(sort_unstable)]
+#![cfg_attr(stage0, unstable(feature = "rustc_private", issue = "27812"))]
+#![cfg_attr(stage0, feature(rustc_private))]
+#![cfg_attr(stage0, feature(staged_api))]
+
extern crate graphviz;
#[macro_use] extern crate rustc;
extern crate rustc_data_structures;
DepNode::FileMap(def_id, ref name) => {
if def_id.is_local() {
- Some(self.incremental_hashes_map[dep_node])
+ // We will have been able to retrace the DefId (which is
+ // always the local CRATE_DEF_INDEX), but the file with the
+ // given name might have been removed, so we use get() in
+ // order to allow for that case.
+ self.incremental_hashes_map.get(dep_node).map(|x| *x)
} else {
Some(self.metadata_hash(DepNode::FileMap(def_id, name.clone()),
def_id.krate,
}
}
}
-
-macro_rules! set {
- ($( $value:expr ),*) => {
- {
- use $crate::rustc_data_structures::fx::FxHashSet;
- let mut set = FxHashSet();
- $(set.insert($value);)*
- set
- }
- }
-}
cx.span_lint(ILLEGAL_FLOATING_POINT_LITERAL_PATTERN,
l.span,
"floating-point literals cannot be used in patterns");
- error!("span mc spanspam");
},
_ => (),
}
traits::Obligation::new(traits::ObligationCause::misc(span, expr_id),
trait_ref.to_poly_trait_predicate());
- // unwrap() is ok here b/c `method` is the method
- // defined in this crate whose body we are
- // checking, so it's always local
- let node_id = tcx.hir.as_local_node_id(method.def_id).unwrap();
-
- let param_env = ty::ParameterEnvironment::for_item(tcx, node_id);
+ let param_env = tcx.parameter_environment(method.def_id);
tcx.infer_ctxt(param_env, Reveal::UserFacing).enter(|infcx| {
let mut selcx = traits::SelectionContext::new(&infcx);
match selcx.select(&obligation) {
impl<'a, 'tcx> LateLintPass<'a, 'tcx> for UnionsWithDropFields {
fn check_item(&mut self, ctx: &LateContext, item: &hir::Item) {
if let hir::ItemUnion(ref vdata, _) = item.node {
- let param_env = &ty::ParameterEnvironment::for_item(ctx.tcx, item.id);
+ let param_env = &ctx.tcx.parameter_environment(ctx.tcx.hir.local_def_id(item.id));
for field in vdata.fields() {
let field_ty = ctx.tcx.type_of(ctx.tcx.hir.local_def_id(field.id));
if field_ty.needs_drop(ctx.tcx, param_env) {
//! This API is completely unstable and subject to change.
#![crate_name = "rustc_lint"]
-#![unstable(feature = "rustc_private", issue = "27812")]
#![crate_type = "dylib"]
#![crate_type = "rlib"]
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
#![feature(i128_type)]
#![feature(quote)]
#![feature(rustc_diagnostic_macros)]
-#![feature(rustc_private)]
#![feature(slice_patterns)]
-#![feature(staged_api)]
+
+#![cfg_attr(stage0, unstable(feature = "rustc_private", issue = "27812"))]
+#![cfg_attr(stage0, feature(rustc_private))]
+#![cfg_attr(stage0, feature(staged_api))]
#[macro_use]
extern crate syntax;
UNUSED_MUST_USE,
UNUSED_UNSAFE,
PATH_STATEMENTS,
- UNUSED_ATTRIBUTES);
+ UNUSED_ATTRIBUTES,
+ UNUSED_MACROS);
// Guidelines for creating a future incompatibility lint:
//
#![allow(dead_code)]
#![crate_name = "rustc_llvm"]
-#![unstable(feature = "rustc_private", issue = "27812")]
#![crate_type = "dylib"]
#![crate_type = "rlib"]
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
#![feature(concat_idents)]
#![feature(libc)]
#![feature(link_args)]
-#![feature(staged_api)]
-#![feature(rustc_private)]
#![feature(static_nobundle)]
+#![cfg_attr(stage0, unstable(feature = "rustc_private", issue = "27812"))]
+#![cfg_attr(stage0, feature(rustc_private))]
+#![cfg_attr(stage0, feature(staged_api))]
+
extern crate libc;
#[macro_use]
#[no_link]
[dependencies]
flate = { path = "../libflate" }
log = "0.3"
+owning_ref = "0.3.3"
proc_macro = { path = "../libproc_macro" }
rustc = { path = "../librustc" }
rustc_back = { path = "../librustc_back" }
rustc_const_math = { path = "../librustc_const_math" }
rustc_data_structures = { path = "../librustc_data_structures" }
rustc_errors = { path = "../librustc_errors" }
-rustc_llvm = { path = "../librustc_llvm" }
serialize = { path = "../libserialize" }
syntax = { path = "../libsyntax" }
syntax_ext = { path = "../libsyntax_ext" }
let exported_symbols = crate_root.exported_symbols
.map(|x| x.decode(&metadata).collect());
+ let trait_impls = crate_root
+ .impls
+ .map(|impls| {
+ impls.decode(&metadata)
+ .map(|trait_impls| (trait_impls.trait_id, trait_impls.impls))
+ .collect()
+ });
+
let mut cmeta = cstore::CrateMetadata {
name: name,
extern_crate: Cell::new(None),
def_path_table: def_path_table,
exported_symbols: exported_symbols,
+ trait_impls: trait_impls,
proc_macros: crate_root.macro_derive_registrar.map(|_| {
self.load_derive_macros(&crate_root, dylib.clone().map(|p| p.0), span)
}),
rejected_via_filename: vec![],
should_match_name: true,
is_proc_macro: Some(false),
+ metadata_loader: &*self.cstore.metadata_loader,
};
self.load(&mut locate_ctxt).or_else(|| {
rejected_via_filename: vec![],
should_match_name: true,
is_proc_macro: None,
+ metadata_loader: &*self.cstore.metadata_loader,
};
let library = self.load(&mut locate_ctxt).or_else(|| {
if !is_cross {
// The crate store - a central repo for information collected about external
// crates and libraries
-use locator;
use schema::{self, Tracked};
use rustc::dep_graph::{DepGraph, DepNode, GlobalMetaDataKind};
use rustc::hir::def_id::{CRATE_DEF_INDEX, LOCAL_CRATE, CrateNum, DefIndex, DefId};
use rustc::hir::map::definitions::DefPathTable;
use rustc::hir::svh::Svh;
-use rustc::middle::cstore::{DepKind, ExternCrate};
+use rustc::middle::cstore::{DepKind, ExternCrate, MetadataLoader};
use rustc_back::PanicStrategy;
use rustc_data_structures::indexed_vec::IndexVec;
use rustc::util::nodemap::{FxHashMap, FxHashSet, NodeMap, DefIdMap};
use std::cell::{RefCell, Cell};
use std::rc::Rc;
-use flate::Bytes;
+use owning_ref::ErasedBoxRef;
use syntax::{ast, attr};
use syntax::ext::base::SyntaxExtension;
use syntax::symbol::Symbol;
// own crate numbers.
pub type CrateNumMap = IndexVec<CrateNum, CrateNum>;
-pub enum MetadataBlob {
- Inflated(Bytes),
- Archive(locator::ArchiveMetadata),
- Raw(Vec<u8>),
-}
+pub struct MetadataBlob(pub ErasedBoxRef<[u8]>);
/// Holds information about a syntax_pos::FileMap imported from another crate.
/// See `imported_filemaps()` for more information.
pub exported_symbols: Tracked<FxHashSet<DefIndex>>,
+ pub trait_impls: Tracked<FxHashMap<(u32, DefIndex), schema::LazySeq<DefIndex>>>,
+
pub dep_kind: Cell<DepKind>,
pub source: CrateSource,
statically_included_foreign_items: RefCell<FxHashSet<DefIndex>>,
pub dllimport_foreign_items: RefCell<FxHashSet<DefIndex>>,
pub visible_parent_map: RefCell<DefIdMap<DefId>>,
+ pub metadata_loader: Box<MetadataLoader>,
}
impl CStore {
- pub fn new(dep_graph: &DepGraph) -> CStore {
+ pub fn new(dep_graph: &DepGraph, metadata_loader: Box<MetadataLoader>) -> CStore {
CStore {
dep_graph: dep_graph.clone(),
metas: RefCell::new(FxHashMap()),
statically_included_foreign_items: RefCell::new(FxHashSet()),
dllimport_foreign_items: RefCell::new(FxHashSet()),
visible_parent_map: RefCell::new(FxHashMap()),
+ metadata_loader: metadata_loader,
}
}
self.root.disambiguator
}
- pub fn is_staged_api(&self, dep_graph: &DepGraph) -> bool {
- for attr in self.get_item_attrs(CRATE_DEF_INDEX, dep_graph).iter() {
- if attr.path == "stable" || attr.path == "unstable" {
- return true;
- }
- }
- false
- }
-
pub fn is_allocator(&self, dep_graph: &DepGraph) -> bool {
let attrs = self.get_item_attrs(CRATE_DEF_INDEX, dep_graph);
attr::contains_name(&attrs, "allocator")
use cstore;
use encoder;
-use locator;
use schema;
use rustc::dep_graph::DepTrackingMapConfig;
use rustc::middle::cstore::{CrateStore, CrateSource, LibSource, DepKind,
- ExternCrate, NativeLibrary, LinkMeta,
+ ExternCrate, NativeLibrary, MetadataLoader, LinkMeta,
LinkagePreference, LoadedMacro, EncodedMetadata};
use rustc::hir::def;
use rustc::middle::lang_items;
use syntax::symbol::Symbol;
use syntax_pos::{Span, NO_EXPANSION};
use rustc::hir::svh::Svh;
-use rustc_back::target::Target;
use rustc::hir;
-use std::collections::BTreeMap;
-
macro_rules! provide {
(<$lt:tt> $tcx:ident, $def_id:ident, $cdata:ident $($name:ident => $compute:block)*) => {
pub fn provide<$lt>(providers: &mut Providers<$lt>) {
def_span => { cdata.get_span(def_id.index, &tcx.sess) }
stability => { cdata.get_stability(def_id.index) }
deprecation => { cdata.get_deprecation(def_id.index) }
- item_body_nested_bodies => {
- let map: BTreeMap<_, _> = cdata.entry(def_id.index).ast.into_iter().flat_map(|ast| {
- ast.decode(cdata).nested_bodies.decode(cdata).map(|body| (body.id(), body))
- }).collect();
-
- Rc::new(map)
- }
+ item_attrs => { cdata.get_item_attrs(def_id.index, &tcx.dep_graph) }
+ // FIXME(#38501) We've skipped a `read` on the `HirBody` of
+ // a `fn` when encoding, so the dep-tracking wouldn't work.
+ // This is only used by rustdoc anyway, which shouldn't have
+ // incremental recompilation ever enabled.
+ fn_arg_names => { cdata.get_fn_arg_names(def_id.index) }
+ impl_parent => { cdata.get_parent_impl(def_id.index) }
+ trait_of_item => { cdata.get_trait_of_item(def_id.index) }
+ is_exported_symbol => {
+ let dep_node = cdata.metadata_dep_node(GlobalMetaDataKind::ExportedSymbols);
+ cdata.exported_symbols.get(&tcx.dep_graph, dep_node).contains(&def_id.index)
+ }
+ item_body_nested_bodies => { Rc::new(cdata.item_body_nested_bodies(def_id.index)) }
const_is_rvalue_promotable_to_static => {
- cdata.entry(def_id.index).ast.expect("const item missing `ast`")
- .decode(cdata).rvalue_promotable_to_static
- }
- is_mir_available => {
- !cdata.is_proc_macro(def_id.index) &&
- cdata.maybe_entry(def_id.index).and_then(|item| item.decode(cdata).mir).is_some()
+ cdata.const_is_rvalue_promotable_to_static(def_id.index)
}
+ is_mir_available => { cdata.is_item_mir_available(def_id.index) }
}
impl CrateStore for cstore::CStore {
self.get_crate_data(krate)
}
+ fn metadata_loader(&self) -> &MetadataLoader {
+ &*self.metadata_loader
+ }
+
fn visibility(&self, def: DefId) -> ty::Visibility {
self.dep_graph.read(DepNode::MetaData(def));
self.get_crate_data(def.krate).get_visibility(def.index)
self.get_crate_data(def.krate).get_generics(def.index)
}
- fn item_attrs(&self, def_id: DefId) -> Rc<[ast::Attribute]>
- {
- self.get_crate_data(def_id.krate)
- .get_item_attrs(def_id.index, &self.dep_graph)
- }
-
- fn fn_arg_names(&self, did: DefId) -> Vec<ast::Name>
- {
- // FIXME(#38501) We've skipped a `read` on the `HirBody` of
- // a `fn` when encoding, so the dep-tracking wouldn't work.
- // This is only used by rustdoc anyway, which shouldn't have
- // incremental recompilation ever enabled.
- assert!(!self.dep_graph.is_fully_enabled());
- self.get_crate_data(did.krate).get_fn_arg_names(did.index)
- }
-
fn implementations_of_trait(&self, filter: Option<DefId>) -> Vec<DefId>
{
- if let Some(def_id) = filter {
- self.dep_graph.read(DepNode::MetaData(def_id));
- }
let mut result = vec![];
+
self.iter_crate_data(|_, cdata| {
cdata.get_implementations_for_trait(filter, &self.dep_graph, &mut result)
});
self.get_crate_data(def.krate).get_impl_defaultness(def.index)
}
- fn impl_parent(&self, impl_def: DefId) -> Option<DefId> {
- self.dep_graph.read(DepNode::MetaData(impl_def));
- self.get_crate_data(impl_def.krate).get_parent_impl(impl_def.index)
- }
-
- fn trait_of_item(&self, def_id: DefId) -> Option<DefId> {
- self.dep_graph.read(DepNode::MetaData(def_id));
- self.get_crate_data(def_id.krate).get_trait_of_item(def_id.index)
- }
-
fn associated_item_cloned(&self, def: DefId) -> ty::AssociatedItem
{
self.dep_graph.read(DepNode::MetaData(def));
self.get_crate_data(impl_did.krate).is_default_impl(impl_did.index)
}
- fn is_foreign_item(&self, did: DefId) -> bool {
- self.get_crate_data(did.krate).is_foreign_item(did.index)
- }
-
fn is_statically_included_foreign_item(&self, def_id: DefId) -> bool
{
self.do_is_statically_included_foreign_item(def_id)
}
- fn is_exported_symbol(&self, def_id: DefId) -> bool {
- let data = self.get_crate_data(def_id.krate);
- let dep_node = data.metadata_dep_node(GlobalMetaDataKind::ExportedSymbols);
- data.exported_symbols
- .get(&self.dep_graph, dep_node)
- .contains(&def_id.index)
- }
-
fn is_dllimport_foreign_item(&self, def_id: DefId) -> bool {
if def_id.krate == LOCAL_CRATE {
self.dllimport_foreign_items.borrow().contains(&def_id.index)
self.get_crate_data(cnum).get_missing_lang_items(&self.dep_graph)
}
- fn is_staged_api(&self, cnum: CrateNum) -> bool
- {
- self.get_crate_data(cnum).is_staged_api(&self.dep_graph)
- }
-
fn is_allocator(&self, cnum: CrateNum) -> bool
{
self.get_crate_data(cnum).is_allocator(&self.dep_graph)
{
self.get_used_link_args().borrow().clone()
}
-
- fn metadata_filename(&self) -> &str
- {
- locator::METADATA_FILENAME
- }
-
- fn metadata_section_name(&self, target: &Target) -> &str
- {
- locator::meta_section_name(target)
- }
-
fn used_crates(&self, prefer: LinkagePreference) -> Vec<(CrateNum, LibSource)>
{
self.do_get_used_crates(prefer)
drop(visible_parent_map);
self.visible_parent_map.borrow()
}
-}
+}
\ No newline at end of file
use rustc::hir::def::{self, Def, CtorKind};
use rustc::hir::def_id::{CrateNum, DefId, DefIndex, CRATE_DEF_INDEX, LOCAL_CRATE};
use rustc::middle::lang_items;
-use rustc::middle::region;
use rustc::session::Session;
use rustc::ty::{self, Ty, TyCtxt};
use rustc::ty::subst::Substs;
use std::borrow::Cow;
use std::cell::Ref;
+use std::collections::BTreeMap;
use std::io;
use std::mem;
use std::rc::Rc;
impl<'a, 'tcx> Metadata<'a, 'tcx> for &'a MetadataBlob {
fn raw_bytes(self) -> &'a [u8] {
- match *self {
- MetadataBlob::Inflated(ref vec) => vec,
- MetadataBlob::Archive(ref ar) => ar.as_slice(),
- MetadataBlob::Raw(ref vec) => vec,
- }
+ &self.0
}
}
}
}
-impl<'a, 'tcx> SpecializedDecoder<region::CodeExtent<'tcx>> for DecodeContext<'a, 'tcx> {
- fn specialized_decode(&mut self) -> Result<region::CodeExtent<'tcx>, Self::Error> {
- Ok(self.tcx().intern_code_extent(Decodable::decode(self)?))
- }
-}
-
impl<'a, 'tcx> SpecializedDecoder<&'tcx ty::Slice<Ty<'tcx>>> for DecodeContext<'a, 'tcx> {
fn specialized_decode(&mut self) -> Result<&'tcx ty::Slice<Ty<'tcx>>, Self::Error> {
Ok(self.tcx().mk_type_list((0..self.read_usize()?).map(|_| Decodable::decode(self)))?)
}
impl<'a, 'tcx> CrateMetadata {
- pub fn is_proc_macro(&self, id: DefIndex) -> bool {
+ fn is_proc_macro(&self, id: DefIndex) -> bool {
self.proc_macros.is_some() && id != CRATE_DEF_INDEX
}
- pub fn maybe_entry(&self, item_id: DefIndex) -> Option<Lazy<Entry<'tcx>>> {
+ fn maybe_entry(&self, item_id: DefIndex) -> Option<Lazy<Entry<'tcx>>> {
assert!(!self.is_proc_macro(item_id));
self.root.index.lookup(self.blob.raw_bytes(), item_id)
}
- pub fn entry(&self, item_id: DefIndex) -> Entry<'tcx> {
+ fn entry(&self, item_id: DefIndex) -> Entry<'tcx> {
match self.maybe_entry(item_id) {
None => {
bug!("entry: id not found: {:?} in crate {:?} with number {}",
_ => bug!(),
};
- let def = ty::TraitDef::new(self.local_def_id(item_id),
- data.unsafety,
- data.paren_sugar,
- self.def_path_table.def_path_hash(item_id));
-
- if data.has_default_impl {
- def.record_has_default_impl();
- }
-
- def
+ ty::TraitDef::new(self.local_def_id(item_id),
+ data.unsafety,
+ data.paren_sugar,
+ data.has_default_impl,
+ self.def_path_table.def_path_hash(item_id))
}
fn get_variant(&self, item: &Entry, index: DefIndex) -> ty::VariantDef {
tcx.alloc_tables(ast.tables.decode((self, tcx)))
}
+ pub fn item_body_nested_bodies(&self, id: DefIndex) -> BTreeMap<hir::BodyId, hir::Body> {
+ self.entry(id).ast.into_iter().flat_map(|ast| {
+ ast.decode(self).nested_bodies.decode(self).map(|body| (body.id(), body))
+ }).collect()
+ }
+
+ pub fn const_is_rvalue_promotable_to_static(&self, id: DefIndex) -> bool {
+ self.entry(id).ast.expect("const item missing `ast`")
+ .decode(self).rvalue_promotable_to_static
+ }
+
+ pub fn is_item_mir_available(&self, id: DefIndex) -> bool {
+ !self.is_proc_macro(id) &&
+ self.maybe_entry(id).and_then(|item| item.decode(self).mir).is_some()
+ }
+
pub fn maybe_get_optimized_mir(&self,
tcx: TyCtxt<'a, 'tcx, 'tcx>,
id: DefIndex)
None => None,
};
- // FIXME(eddyb) Make this O(1) instead of O(n).
let dep_node = self.metadata_dep_node(GlobalMetaDataKind::Impls);
- for trait_impls in self.root.impls.get(dep_graph, dep_node).decode(self) {
- if filter.is_some() && filter != Some(trait_impls.trait_id) {
- continue;
- }
-
- result.extend(trait_impls.impls.decode(self).map(|index| self.local_def_id(index)));
- if filter.is_some() {
- break;
+ if let Some(filter) = filter {
+ if let Some(impls) = self.trait_impls
+ .get(dep_graph, dep_node)
+ .get(&filter) {
+ result.extend(impls.decode(self).map(|idx| self.local_def_id(idx)));
+ }
+ } else {
+ for impls in self.trait_impls.get(dep_graph, dep_node).values() {
+ result.extend(impls.decode(self).map(|idx| self.local_def_id(idx)));
}
}
}
let trait_ref = tcx.impl_trait_ref(def_id);
let parent = if let Some(trait_ref) = trait_ref {
let trait_def = tcx.trait_def(trait_ref.def_id);
- trait_def.ancestors(def_id).skip(1).next().and_then(|node| {
+ trait_def.ancestors(tcx, def_id).skip(1).next().and_then(|node| {
match node {
specialization_graph::Node::Impl(parent) => Some(parent),
_ => None,
/// Encodes an index, mapping each trait to its (local) implementations.
fn encode_impls(&mut self, _: ()) -> LazySeq<TraitImpls> {
+ debug!("IsolatedEncoder::encode_impls()");
+ let tcx = self.tcx;
let mut visitor = ImplVisitor {
- tcx: self.tcx,
+ tcx: tcx,
impls: FxHashMap(),
};
- self.tcx.hir.krate().visit_all_item_likes(&mut visitor);
+ tcx.hir.krate().visit_all_item_likes(&mut visitor);
+
+ let mut all_impls: Vec<_> = visitor.impls.into_iter().collect();
- let all_impls: Vec<_> = visitor.impls
+ // Bring everything into deterministic order for hashing
+ all_impls.sort_unstable_by_key(|&(trait_def_id, _)| {
+ tcx.def_path_hash(trait_def_id)
+ });
+
+ let all_impls: Vec<_> = all_impls
.into_iter()
- .map(|(trait_def_id, impls)| {
+ .map(|(trait_def_id, mut impls)| {
+ // Bring everything into deterministic order for hashing
+ impls.sort_unstable_by_key(|&def_index| {
+ tcx.hir.definitions().def_path_hash(def_index)
+ });
+
TraitImpls {
trait_id: (trait_def_id.krate.as_u32(), trait_def_id.index),
- impls: self.lazy_seq(impls),
+ impls: self.lazy_seq_from_slice(&impls[..]),
}
})
.collect();
- self.lazy_seq(all_impls)
+ self.lazy_seq_from_slice(&all_impls[..])
}
// Encodes all symbols exported from this crate into the metadata.
// except according to those terms.
#![crate_name = "rustc_metadata"]
-#![unstable(feature = "rustc_private", issue = "27812")]
#![crate_type = "dylib"]
#![crate_type = "rlib"]
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
#![feature(proc_macro_internals)]
#![feature(quote)]
#![feature(rustc_diagnostic_macros)]
-#![feature(rustc_private)]
#![feature(specialization)]
-#![feature(staged_api)]
#![feature(discriminant_value)]
+#![feature(rustc_private)]
+
+#![cfg_attr(stage0, unstable(feature = "rustc_private", issue = "27812"))]
+#![cfg_attr(stage0, feature(staged_api))]
+#![feature(sort_unstable)]
#[macro_use]
extern crate log;
extern crate syntax_pos;
extern crate flate;
extern crate serialize as rustc_serialize; // used by deriving
+extern crate owning_ref;
extern crate rustc_errors as errors;
extern crate syntax_ext;
extern crate proc_macro;
extern crate rustc_back;
extern crate rustc_const_math;
extern crate rustc_data_structures;
-extern crate rustc_llvm;
mod diagnostics;
use schema::{METADATA_HEADER, rustc_version};
use rustc::hir::svh::Svh;
+use rustc::middle::cstore::MetadataLoader;
use rustc::session::{config, Session};
use rustc::session::filesearch::{FileSearch, FileMatches, FileDoesntMatch};
use rustc::session::search_paths::PathKind;
-use rustc::util::common;
use rustc::util::nodemap::FxHashMap;
-use rustc_llvm as llvm;
-use rustc_llvm::{False, ObjectFile, mk_section_iter};
-use rustc_llvm::archive_ro::ArchiveRO;
use errors::DiagnosticBuilder;
use syntax::symbol::Symbol;
use syntax_pos::Span;
use std::fs::{self, File};
use std::io::{self, Read};
use std::path::{Path, PathBuf};
-use std::ptr;
-use std::slice;
use std::time::Instant;
use flate;
+use owning_ref::{ErasedBoxRef, OwningRef};
pub struct CrateMismatch {
path: PathBuf,
pub rejected_via_filename: Vec<CrateMismatch>,
pub should_match_name: bool,
pub is_proc_macro: Option<bool>,
-}
-
-pub struct ArchiveMetadata {
- _archive: ArchiveRO,
- // points into self._archive
- data: *const [u8],
+ pub metadata_loader: &'a MetadataLoader,
}
pub struct CratePaths {
pub rmeta: Option<PathBuf>,
}
-pub const METADATA_FILENAME: &'static str = "rust.metadata.bin";
-
#[derive(Copy, Clone, PartialEq)]
enum CrateFlavor {
Rlib,
let mut err: Option<DiagnosticBuilder> = None;
for (lib, kind) in m {
info!("{} reading metadata from: {}", flavor, lib.display());
- let (hash, metadata) = match get_metadata_section(self.target, flavor, &lib) {
- Ok(blob) => {
- if let Some(h) = self.crate_matches(&blob, &lib) {
- (h, blob)
- } else {
- info!("metadata mismatch");
+ let (hash, metadata) =
+ match get_metadata_section(self.target, flavor, &lib, self.metadata_loader) {
+ Ok(blob) => {
+ if let Some(h) = self.crate_matches(&blob, &lib) {
+ (h, blob)
+ } else {
+ info!("metadata mismatch");
+ continue;
+ }
+ }
+ Err(err) => {
+ info!("no metadata found: {}", err);
continue;
}
- }
- Err(err) => {
- info!("no metadata found: {}", err);
- continue;
- }
- };
+ };
// If we see multiple hashes, emit an error about duplicate candidates.
if slot.as_ref().map_or(false, |s| s.0 != hash) {
let mut e = struct_span_err!(self.sess,
err.note(&format!("crate name: {}", name));
}
-impl ArchiveMetadata {
- fn new(ar: ArchiveRO) -> Option<ArchiveMetadata> {
- let data = {
- let section = ar.iter()
- .filter_map(|s| s.ok())
- .find(|sect| sect.name() == Some(METADATA_FILENAME));
- match section {
- Some(s) => s.data() as *const [u8],
- None => {
- debug!("didn't find '{}' in the archive", METADATA_FILENAME);
- return None;
- }
- }
- };
-
- Some(ArchiveMetadata {
- _archive: ar,
- data: data,
- })
- }
-
- pub fn as_slice<'a>(&'a self) -> &'a [u8] {
- unsafe { &*self.data }
- }
-}
-
-fn verify_decompressed_encoding_version(blob: &MetadataBlob,
- filename: &Path)
- -> Result<(), String> {
- if !blob.is_compatible() {
- Err((format!("incompatible metadata version found: '{}'",
- filename.display())))
- } else {
- Ok(())
- }
-}
-
// Just a small wrapper to time how long reading metadata takes.
fn get_metadata_section(target: &Target,
flavor: CrateFlavor,
- filename: &Path)
+ filename: &Path,
+ loader: &MetadataLoader)
-> Result<MetadataBlob, String> {
let start = Instant::now();
- let ret = get_metadata_section_imp(target, flavor, filename);
+ let ret = get_metadata_section_imp(target, flavor, filename, loader);
info!("reading {:?} => {:?}",
filename.file_name().unwrap(),
start.elapsed());
fn get_metadata_section_imp(target: &Target,
flavor: CrateFlavor,
- filename: &Path)
+ filename: &Path,
+ loader: &MetadataLoader)
-> Result<MetadataBlob, String> {
if !filename.exists() {
return Err(format!("no such file: '{}'", filename.display()));
}
- if flavor == CrateFlavor::Rlib {
- // Use ArchiveRO for speed here, it's backed by LLVM and uses mmap
- // internally to read the file. We also avoid even using a memcpy by
- // just keeping the archive along while the metadata is in use.
- let archive = match ArchiveRO::open(filename) {
- Some(ar) => ar,
- None => {
- debug!("llvm didn't like `{}`", filename.display());
- return Err(format!("failed to read rlib metadata: '{}'", filename.display()));
+ let raw_bytes: ErasedBoxRef<[u8]> = match flavor {
+ CrateFlavor::Rlib => loader.get_rlib_metadata(target, filename)?,
+ CrateFlavor::Dylib => {
+ let buf = loader.get_dylib_metadata(target, filename)?;
+ // The header is uncompressed
+ let header_len = METADATA_HEADER.len();
+ debug!("checking {} bytes of metadata-version stamp", header_len);
+ let header = &buf[..cmp::min(header_len, buf.len())];
+ if header != METADATA_HEADER {
+ return Err(format!("incompatible metadata version found: '{}'",
+ filename.display()));
}
- };
- return match ArchiveMetadata::new(archive).map(|ar| MetadataBlob::Archive(ar)) {
- None => Err(format!("failed to read rlib metadata: '{}'", filename.display())),
- Some(blob) => {
- verify_decompressed_encoding_version(&blob, filename)?;
- Ok(blob)
- }
- };
- } else if flavor == CrateFlavor::Rmeta {
- let mut file = File::open(filename).map_err(|_|
- format!("could not open file: '{}'", filename.display()))?;
- let mut buf = vec![];
- file.read_to_end(&mut buf).map_err(|_|
- format!("failed to read rlib metadata: '{}'", filename.display()))?;
- let blob = MetadataBlob::Raw(buf);
- verify_decompressed_encoding_version(&blob, filename)?;
- return Ok(blob);
- }
- unsafe {
- let buf = common::path2cstr(filename);
- let mb = llvm::LLVMRustCreateMemoryBufferWithContentsOfFile(buf.as_ptr());
- if mb as isize == 0 {
- return Err(format!("error reading library: '{}'", filename.display()));
- }
- let of = match ObjectFile::new(mb) {
- Some(of) => of,
- _ => {
- return Err((format!("provided path not an object file: '{}'", filename.display())))
- }
- };
- let si = mk_section_iter(of.llof);
- while llvm::LLVMIsSectionIteratorAtEnd(of.llof, si.llsi) == False {
- let mut name_buf = ptr::null();
- let name_len = llvm::LLVMRustGetSectionName(si.llsi, &mut name_buf);
- let name = slice::from_raw_parts(name_buf as *const u8, name_len as usize).to_vec();
- let name = String::from_utf8(name).unwrap();
- debug!("get_metadata_section: name {}", name);
- if read_meta_section_name(target) == name {
- let cbuf = llvm::LLVMGetSectionContents(si.llsi);
- let csz = llvm::LLVMGetSectionSize(si.llsi) as usize;
- let cvbuf: *const u8 = cbuf as *const u8;
- let vlen = METADATA_HEADER.len();
- debug!("checking {} bytes of metadata-version stamp", vlen);
- let minsz = cmp::min(vlen, csz);
- let buf0 = slice::from_raw_parts(cvbuf, minsz);
- let version_ok = buf0 == METADATA_HEADER;
- if !version_ok {
- return Err((format!("incompatible metadata version found: '{}'",
- filename.display())));
- }
- let cvbuf1 = cvbuf.offset(vlen as isize);
- debug!("inflating {} bytes of compressed metadata", csz - vlen);
- let bytes = slice::from_raw_parts(cvbuf1, csz - vlen);
- match flate::inflate_bytes(bytes) {
- Ok(inflated) => {
- let blob = MetadataBlob::Inflated(inflated);
- verify_decompressed_encoding_version(&blob, filename)?;
- return Ok(blob);
- }
- Err(_) => {}
+ // Header is okay -> inflate the actual metadata
+ let compressed_bytes = &buf[header_len..];
+ debug!("inflating {} bytes of compressed metadata", compressed_bytes.len());
+ match flate::inflate_bytes(compressed_bytes) {
+ Ok(inflated) => {
+ let buf = unsafe { OwningRef::new_assert_stable_address(inflated) };
+ buf.map_owner_box().erase_owner()
+ }
+ Err(_) => {
+ return Err(format!("failed to decompress metadata: {}", filename.display()));
}
}
- llvm::LLVMMoveToNextSection(si.llsi);
}
- Err(format!("metadata not found: '{}'", filename.display()))
- }
-}
-
-pub fn meta_section_name(target: &Target) -> &'static str {
- // Historical note:
- //
- // When using link.exe it was seen that the section name `.note.rustc`
- // was getting shortened to `.note.ru`, and according to the PE and COFF
- // specification:
- //
- // > Executable images do not use a string table and do not support
- // > section names longer than 8 characters
- //
- // https://msdn.microsoft.com/en-us/library/windows/hardware/gg463119.aspx
- //
- // As a result, we choose a slightly shorter name! As to why
- // `.note.rustc` works on MinGW, that's another good question...
-
- if target.options.is_like_osx {
- "__DATA,.rustc"
+ CrateFlavor::Rmeta => {
+ let mut file = File::open(filename).map_err(|_|
+ format!("could not open file: '{}'", filename.display()))?;
+ let mut buf = vec![];
+ file.read_to_end(&mut buf).map_err(|_|
+ format!("failed to read rmeta metadata: '{}'", filename.display()))?;
+ OwningRef::new(buf).map_owner_box().erase_owner()
+ }
+ };
+ let blob = MetadataBlob(raw_bytes);
+ if blob.is_compatible() {
+ Ok(blob)
} else {
- ".rustc"
+ Err(format!("incompatible metadata version found: '{}'", filename.display()))
}
}
-pub fn read_meta_section_name(_target: &Target) -> &'static str {
- ".rustc"
-}
-
// A diagnostic function for dumping crate metadata to an output stream
-pub fn list_file_metadata(target: &Target, path: &Path, out: &mut io::Write) -> io::Result<()> {
+pub fn list_file_metadata(target: &Target,
+ path: &Path,
+ loader: &MetadataLoader,
+ out: &mut io::Write)
+ -> io::Result<()> {
let filename = path.file_name().unwrap().to_str().unwrap();
let flavor = if filename.ends_with(".rlib") {
CrateFlavor::Rlib
} else {
CrateFlavor::Dylib
};
- match get_metadata_section(target, flavor, path) {
+ match get_metadata_section(target, flavor, path, loader) {
Ok(metadata) => metadata.list_crate_metadata(out),
Err(msg) => write!(out, "{}\n", msg),
}
}
}
+impl<'a, 'tcx, T> HashStable<StableHashingContext<'a, 'tcx>> for Tracked<T>
+ where T: HashStable<StableHashingContext<'a, 'tcx>>
+{
+ fn hash_stable<W: StableHasherResult>(&self,
+ hcx: &mut StableHashingContext<'a, 'tcx>,
+ hasher: &mut StableHasher<W>) {
+ let Tracked {
+ ref state
+ } = *self;
+
+ state.hash_stable(hcx, hasher);
+ }
+}
+
#[derive(RustcEncodable, RustcDecodable)]
pub struct CrateRoot {
temp: &Lvalue<'tcx>,
constant: Constant<'tcx>) {
self.push_assign(block, source_info, temp,
- Rvalue::Use(Operand::Constant(constant)));
+ Rvalue::Use(Operand::Constant(box constant)));
}
pub fn push_assign_unit(&mut self,
source_info: SourceInfo,
lvalue: &Lvalue<'tcx>) {
self.push_assign(block, source_info, lvalue, Rvalue::Aggregate(
- AggregateKind::Tuple, vec![]
+ box AggregateKind::Tuple, vec![]
));
}
/// The operand is known to be live until the end of `scope`.
pub fn as_operand<M>(&mut self,
block: BasicBlock,
- scope: Option<CodeExtent<'tcx>>,
+ scope: Option<CodeExtent>,
expr: M) -> BlockAnd<Operand<'tcx>>
where M: Mirror<'tcx, Output = Expr<'tcx>>
{
fn expr_as_operand(&mut self,
mut block: BasicBlock,
- scope: Option<CodeExtent<'tcx>>,
+ scope: Option<CodeExtent>,
expr: Expr<'tcx>)
-> BlockAnd<Operand<'tcx>> {
debug!("expr_as_operand(block={:?}, expr={:?})", block, expr);
match category {
Category::Constant => {
let constant = this.as_constant(expr);
- block.and(Operand::Constant(constant))
+ block.and(Operand::Constant(box constant))
}
Category::Lvalue |
Category::Rvalue(..) => {
}
/// Compile `expr`, yielding an rvalue.
- pub fn as_rvalue<M>(&mut self, block: BasicBlock, scope: Option<CodeExtent<'tcx>>, expr: M)
+ pub fn as_rvalue<M>(&mut self, block: BasicBlock, scope: Option<CodeExtent>, expr: M)
-> BlockAnd<Rvalue<'tcx>>
where M: Mirror<'tcx, Output = Expr<'tcx>>
{
fn expr_as_rvalue(&mut self,
mut block: BasicBlock,
- scope: Option<CodeExtent<'tcx>>,
+ scope: Option<CodeExtent>,
expr: Expr<'tcx>)
-> BlockAnd<Rvalue<'tcx>> {
debug!("expr_as_rvalue(block={:?}, expr={:?})", block, expr);
.map(|f| unpack!(block = this.as_operand(block, scope, f)))
.collect();
- block.and(Rvalue::Aggregate(AggregateKind::Array(el_ty), fields))
+ block.and(Rvalue::Aggregate(box AggregateKind::Array(el_ty), fields))
}
ExprKind::Tuple { fields } => { // see (*) above
// first process the set of fields
.map(|f| unpack!(block = this.as_operand(block, scope, f)))
.collect();
- block.and(Rvalue::Aggregate(AggregateKind::Tuple, fields))
+ block.and(Rvalue::Aggregate(box AggregateKind::Tuple, fields))
}
ExprKind::Closure { closure_id, substs, upvars } => { // see (*) above
let upvars =
upvars.into_iter()
.map(|upvar| unpack!(block = this.as_operand(block, scope, upvar)))
.collect();
- block.and(Rvalue::Aggregate(AggregateKind::Closure(closure_id, substs), upvars))
+ block.and(Rvalue::Aggregate(box AggregateKind::Closure(closure_id, substs), upvars))
}
ExprKind::Adt {
adt_def, variant_index, substs, fields, base
field_names.iter().filter_map(|n| fields_map.get(n).cloned()).collect()
};
- let adt = AggregateKind::Adt(adt_def, variant_index, substs, active_field_index);
+ let adt =
+ box AggregateKind::Adt(adt_def, variant_index, substs, active_field_index);
block.and(Rvalue::Aggregate(adt, fields))
}
ExprKind::Assign { .. } |
/// up rvalues so as to freeze the value that will be consumed.
pub fn as_temp<M>(&mut self,
block: BasicBlock,
- temp_lifetime: Option<CodeExtent<'tcx>>,
+ temp_lifetime: Option<CodeExtent>,
expr: M)
-> BlockAnd<Lvalue<'tcx>>
where M: Mirror<'tcx, Output = Expr<'tcx>>
fn expr_as_temp(&mut self,
mut block: BasicBlock,
- temp_lifetime: Option<CodeExtent<'tcx>>,
+ temp_lifetime: Option<CodeExtent>,
expr: Expr<'tcx>)
-> BlockAnd<Lvalue<'tcx>> {
debug!("expr_as_temp(block={:?}, expr={:?})", block, expr);
this.cfg.push(block, Statement {
source_info: source_info,
kind: StatementKind::InlineAsm {
- asm: asm.clone(),
+ asm: box asm.clone(),
outputs: outputs,
inputs: inputs
},
let eq_block = self.cfg.start_new_block();
let cleanup = self.diverge_cleanup();
self.cfg.terminate(block, source_info, TerminatorKind::Call {
- func: Operand::Constant(Constant {
+ func: Operand::Constant(box Constant {
span: test.span,
ty: mty,
literal: method
ty: Ty<'tcx>,
literal: Literal<'tcx>)
-> Operand<'tcx> {
- let constant = Constant {
+ let constant = box Constant {
span: span,
ty: ty,
literal: literal,
}
pub fn unit_rvalue(&mut self) -> Rvalue<'tcx> {
- Rvalue::Aggregate(AggregateKind::Tuple, vec![])
+ Rvalue::Aggregate(box AggregateKind::Tuple, vec![])
}
// Returns a zero literal operand for the appropriate type, works for
use hair::Pattern;
use rustc::hir;
use rustc::hir::def_id::DefId;
-use rustc::middle::region::{CodeExtent, CodeExtentData};
+use rustc::middle::region::CodeExtent;
use rustc::mir::*;
use rustc::mir::transform::MirSource;
use rustc::mir::visit::MutVisitor;
{
let span = tcx.hir.span(ctor_id);
if let hir::VariantData::Tuple(ref fields, ctor_id) = *v {
- let pe = ty::ParameterEnvironment::for_item(tcx, ctor_id);
+ let pe = tcx.parameter_environment(tcx.hir.local_def_id(ctor_id));
tcx.infer_ctxt(pe, Reveal::UserFacing).enter(|infcx| {
let (mut mir, src) =
shim::build_adt_ctor(&infcx, ctor_id, fields, span);
-> Ty<'tcx> {
let closure_ty = tcx.body_tables(body_id).node_id_to_type(closure_expr_id);
+ let closure_def_id = tcx.hir.local_def_id(closure_expr_id);
let region = ty::ReFree(ty::FreeRegion {
- scope: Some(tcx.item_extent(body_id.node_id)),
+ scope: closure_def_id,
bound_region: ty::BoundRegion::BrEnv,
});
let region = tcx.mk_region(region);
- match tcx.closure_kind(tcx.hir.local_def_id(closure_expr_id)) {
+ match tcx.closure_kind(closure_def_id) {
ty::ClosureKind::Fn =>
tcx.mk_ref(region,
ty::TypeAndMut { ty: closure_ty,
let span = tcx.hir.span(fn_id);
let mut builder = Builder::new(hir.clone(), span, arguments.len(), return_ty);
- let call_site_extent =
- tcx.intern_code_extent(
- CodeExtentData::CallSiteScope { fn_id: fn_id, body_id: body.value.id });
- let arg_extent =
- tcx.intern_code_extent(
- CodeExtentData::ParameterScope { fn_id: fn_id, body_id: body.value.id });
+ let call_site_extent = CodeExtent::CallSiteScope(body.id());
+ let arg_extent = CodeExtent::ParameterScope(body.id());
let mut block = START_BLOCK;
unpack!(block = builder.in_scope(call_site_extent, block, |builder| {
unpack!(block = builder.in_scope(arg_extent, block, |builder| {
let span = tcx.hir.span(owner_id);
let mut builder = Builder::new(hir.clone(), span, 0, ty);
- let extent = hir.region_maps.temporary_scope(tcx, ast_expr.id)
- .unwrap_or(tcx.item_extent(owner_id));
let mut block = START_BLOCK;
- let _ = builder.in_scope(extent, block, |builder| {
- let expr = builder.hir.mirror(ast_expr);
- unpack!(block = builder.into(&Lvalue::Local(RETURN_POINTER), block, expr));
+ let expr = builder.hir.mirror(ast_expr);
+ unpack!(block = builder.into_expr(&Lvalue::Local(RETURN_POINTER), block, expr));
- let source_info = builder.source_info(span);
- let return_block = builder.return_block();
- builder.cfg.terminate(block, source_info,
- TerminatorKind::Goto { target: return_block });
- builder.cfg.terminate(return_block, source_info,
- TerminatorKind::Return);
+ let source_info = builder.source_info(span);
+ builder.cfg.terminate(block, source_info, TerminatorKind::Return);
- return_block.unit()
- });
+ // Constants can't `return` so a return block should not be created.
+ assert_eq!(builder.cached_return_block, None);
builder.finish(vec![], ty)
}
fn args_and_body(&mut self,
mut block: BasicBlock,
arguments: &[(Ty<'gcx>, Option<&'gcx hir::Pat>)],
- argument_extent: CodeExtent<'tcx>,
+ argument_extent: CodeExtent,
ast_body: &'gcx hir::Expr)
-> BlockAnd<()>
{
*/
use build::{BlockAnd, BlockAndExtension, Builder, CFG};
-use rustc::middle::region::{CodeExtent, CodeExtentData};
+use rustc::middle::region::CodeExtent;
use rustc::middle::lang_items;
use rustc::middle::const_val::ConstVal;
use rustc::ty::subst::{Kind, Subst};
visibility_scope: VisibilityScope,
/// the extent of this scope within source code.
- extent: CodeExtent<'tcx>,
+ extent: CodeExtent,
/// Whether there's anything to do for the cleanup path, that is,
/// when unwinding through this scope. This includes destructors,
free: Option<FreeData<'tcx>>,
/// The cache for drop chain on “normal” exit into a particular BasicBlock.
- cached_exits: FxHashMap<(BasicBlock, CodeExtent<'tcx>), BasicBlock>,
+ cached_exits: FxHashMap<(BasicBlock, CodeExtent), BasicBlock>,
}
struct DropData<'tcx> {
#[derive(Clone, Debug)]
pub struct BreakableScope<'tcx> {
/// Extent of the loop
- pub extent: CodeExtent<'tcx>,
+ pub extent: CodeExtent,
/// Where the body of the loop begins. `None` if block
pub continue_block: Option<BasicBlock>,
/// Block to branch into when the loop or block terminates (either by being `break`-en out
/// Convenience wrapper that pushes a scope and then executes `f`
/// to build its contents, popping the scope afterwards.
pub fn in_scope<F, R>(&mut self,
- extent: CodeExtent<'tcx>,
+ extent: CodeExtent,
mut block: BasicBlock,
f: F)
-> BlockAnd<R>
/// scope and call `pop_scope` afterwards. Note that these two
/// calls must be paired; using `in_scope` as a convenience
/// wrapper maybe preferable.
- pub fn push_scope(&mut self, extent: CodeExtent<'tcx>) {
+ pub fn push_scope(&mut self, extent: CodeExtent) {
debug!("push_scope({:?})", extent);
let vis_scope = self.visibility_scope;
self.scopes.push(Scope {
/// drops onto the end of `block` that are needed. This must
/// match 1-to-1 with `push_scope`.
pub fn pop_scope(&mut self,
- extent: CodeExtent<'tcx>,
+ extent: CodeExtent,
mut block: BasicBlock)
-> BlockAnd<()> {
debug!("pop_scope({:?}, {:?})", extent, block);
/// module comment for details.
pub fn exit_scope(&mut self,
span: Span,
- extent: CodeExtent<'tcx>,
+ extent: CodeExtent,
mut block: BasicBlock,
target: BasicBlock) {
debug!("exit_scope(extent={:?}, block={:?}, target={:?})", extent, block, target);
/// resolving `break` and `continue`.
pub fn find_breakable_scope(&mut self,
span: Span,
- label: CodeExtent<'tcx>)
+ label: CodeExtent)
-> &mut BreakableScope<'tcx> {
// find the loop-scope with the correct id
self.breakable_scopes.iter_mut()
/// Returns the extent of the scope which should be exited by a
/// return.
- pub fn extent_of_return_scope(&self) -> CodeExtent<'tcx> {
+ pub fn extent_of_return_scope(&self) -> CodeExtent {
// The outermost scope (`scopes[0]`) will be the `CallSiteScope`.
// We want `scopes[1]`, which is the `ParameterScope`.
assert!(self.scopes.len() >= 2);
- assert!(match *self.scopes[1].extent {
- CodeExtentData::ParameterScope { .. } => true,
+ assert!(match self.scopes[1].extent {
+ CodeExtent::ParameterScope(_) => true,
_ => false,
});
self.scopes[1].extent
/// Returns the topmost active scope, which is known to be alive until
/// the next scope expression.
- pub fn topmost_scope(&self) -> CodeExtent<'tcx> {
+ pub fn topmost_scope(&self) -> CodeExtent {
self.scopes.last().expect("topmost_scope: no scopes present").extent
}
/// `extent`.
pub fn schedule_drop(&mut self,
span: Span,
- extent: CodeExtent<'tcx>,
+ extent: CodeExtent,
lvalue: &Lvalue<'tcx>,
lvalue_ty: Ty<'tcx>) {
let needs_drop = self.hir.needs_drop(lvalue_ty);
/// There may only be one “free” scheduled in any given scope.
pub fn schedule_box_free(&mut self,
span: Span,
- extent: CodeExtent<'tcx>,
+ extent: CodeExtent,
value: &Lvalue<'tcx>,
item_ty: Ty<'tcx>) {
for scope in self.scopes.iter_mut().rev() {
let free_func = tcx.require_lang_item(lang_items::BoxFreeFnLangItem);
let substs = tcx.intern_substs(&[Kind::from(data.item_ty)]);
TerminatorKind::Call {
- func: Operand::Constant(Constant {
+ func: Operand::Constant(box Constant {
span: data.span,
ty: tcx.type_of(free_func).subst(tcx, substs),
literal: Literal::Value {
use hair::*;
use hair::cx::Cx;
use hair::cx::to_ref::ToRef;
-use rustc::middle::region::{BlockRemainder, CodeExtentData};
+use rustc::middle::region::{BlockRemainder, CodeExtent};
use rustc::hir;
use syntax::ast;
let stmts = mirror_stmts(cx, self.id, &*self.stmts);
Block {
targeted_by_break: self.targeted_by_break,
- extent: cx.tcx.node_extent(self.id),
+ extent: CodeExtent::Misc(self.id),
span: self.span,
stmts: stmts,
expr: self.expr.to_ref(),
result.push(StmtRef::Mirror(Box::new(Stmt {
span: stmt.span,
kind: StmtKind::Expr {
- scope: cx.tcx.node_extent(id),
+ scope: CodeExtent::Misc(id),
expr: expr.to_ref(),
},
})))
// ignore for purposes of the MIR
}
hir::DeclLocal(ref local) => {
- let remainder_extent = CodeExtentData::Remainder(BlockRemainder {
+ let remainder_extent = CodeExtent::Remainder(BlockRemainder {
block: block_id,
first_statement_index: index as u32,
});
- let remainder_extent =
- cx.tcx.intern_code_extent(remainder_extent);
let pattern = Pattern::from_hir(cx.tcx, cx.tables(), &local.pat);
result.push(StmtRef::Mirror(Box::new(Stmt {
span: stmt.span,
kind: StmtKind::Let {
remainder_scope: remainder_extent,
- init_scope: cx.tcx.node_extent(id),
+ init_scope: CodeExtent::Misc(id),
pattern: pattern,
initializer: local.init.to_ref(),
},
block: &'tcx hir::Block)
-> ExprRef<'tcx> {
let block_ty = cx.tables().node_id_to_type(block.id);
- let (temp_lifetime, was_shrunk) = cx.region_maps.temporary_scope2(cx.tcx, block.id);
+ let (temp_lifetime, was_shrunk) = cx.region_maps.temporary_scope2(block.id);
let expr = Expr {
ty: block_ty,
temp_lifetime: temp_lifetime,
use hair::cx::Cx;
use hair::cx::block;
use hair::cx::to_ref::ToRef;
-use rustc::hir::map;
use rustc::hir::def::{Def, CtorKind};
use rustc::middle::const_val::ConstVal;
use rustc::ty::{self, AdtKind, VariantDef, Ty};
type Output = Expr<'tcx>;
fn make_mirror<'a, 'gcx>(self, cx: &mut Cx<'a, 'gcx, 'tcx>) -> Expr<'tcx> {
- let (temp_lifetime, was_shrunk) = cx.region_maps.temporary_scope2(cx.tcx, self.id);
- let expr_extent = cx.tcx.node_extent(self.id);
+ let (temp_lifetime, was_shrunk) = cx.region_maps.temporary_scope2(self.id);
+ let expr_extent = CodeExtent::Misc(self.id);
debug!("Expr::make_mirror(): id={}, span={:?}", self.id, self.span);
expr: &'tcx hir::Expr)
-> Expr<'tcx> {
let expr_ty = cx.tables().expr_ty(expr);
- let (temp_lifetime, was_shrunk) = cx.region_maps.temporary_scope2(cx.tcx, expr.id);
+ let (temp_lifetime, was_shrunk) = cx.region_maps.temporary_scope2(expr.id);
let kind = match expr.node {
// Here comes the interesting stuff:
match dest.target_id {
hir::ScopeTarget::Block(target_id) |
hir::ScopeTarget::Loop(hir::LoopIdResult::Ok(target_id)) => ExprKind::Break {
- label: cx.tcx.node_extent(target_id),
+ label: CodeExtent::Misc(target_id),
value: value.to_ref(),
},
hir::ScopeTarget::Loop(hir::LoopIdResult::Err(err)) =>
match dest.target_id {
hir::ScopeTarget::Block(_) => bug!("cannot continue to blocks"),
hir::ScopeTarget::Loop(hir::LoopIdResult::Ok(loop_id)) => ExprKind::Continue {
- label: cx.tcx.node_extent(loop_id),
+ label: CodeExtent::Misc(loop_id),
},
hir::ScopeTarget::Loop(hir::LoopIdResult::Err(err)) =>
bug!("invalid loop id for continue: {}", err)
hir::ExprBox(ref value) => {
ExprKind::Box {
value: value.to_ref(),
- value_extents: cx.tcx.node_extent(value.id),
+ value_extents: CodeExtent::Misc(value.id),
}
}
hir::ExprArray(ref fields) => ExprKind::Array { fields: fields.to_ref() },
method_call: ty::MethodCall)
-> Expr<'tcx> {
let callee = cx.tables().method_map[&method_call];
- let (temp_lifetime, was_shrunk) = cx.region_maps.temporary_scope2(cx.tcx, expr.id);
+ let (temp_lifetime, was_shrunk) = cx.region_maps.temporary_scope2(expr.id);
Expr {
temp_lifetime: temp_lifetime,
temp_lifetime_was_shrunk: was_shrunk,
expr: &'tcx hir::Expr,
def: Def)
-> ExprKind<'tcx> {
- let (temp_lifetime, was_shrunk) = cx.region_maps.temporary_scope2(cx.tcx, expr.id);
+ let (temp_lifetime, was_shrunk) = cx.region_maps.temporary_scope2(expr.id);
match def {
Def::Local(def_id) => {
closure_expr_id);
let var_ty = cx.tables().node_id_to_type(id_var);
- let body_id = match cx.tcx.hir.find(closure_expr_id) {
- Some(map::NodeExpr(expr)) => {
- match expr.node {
- hir::ExprClosure(.., body, _) => body.node_id,
- _ => {
- span_bug!(expr.span, "closure expr is not a closure expr");
- }
- }
- }
- _ => {
- span_bug!(expr.span, "ast-map has garbage for closure expr");
- }
- };
-
// FIXME free regions in closures are not right
let closure_ty = cx.tables().node_id_to_type(closure_expr_id);
// FIXME we're just hard-coding the idea that the
// signature will be &self or &mut self and hence will
// have a bound region with number 0
+ let closure_def_id = cx.tcx.hir.local_def_id(closure_expr_id);
let region = ty::ReFree(ty::FreeRegion {
- scope: Some(cx.tcx.node_extent(body_id)),
+ scope: closure_def_id,
bound_region: ty::BoundRegion::BrAnon(0),
});
let region = cx.tcx.mk_region(region);
- let self_expr = match cx.tcx.closure_kind(cx.tcx.hir.local_def_id(closure_expr_id)) {
+ let self_expr = match cx.tcx.closure_kind(closure_def_id) {
ty::ClosureKind::Fn => {
let ref_closure_ty = cx.tcx.mk_ref(region,
ty::TypeAndMut {
PassArgs::ByRef => {
let region = cx.tcx.node_scope_region(expr.id);
let (temp_lifetime, was_shrunk) =
- cx.region_maps.temporary_scope2(cx.tcx, expr.id);
+ cx.region_maps.temporary_scope2(expr.id);
argrefs.extend(args.iter()
.map(|arg| {
let arg_ty = cx.tables().expr_ty_adjusted(arg);
// construct the complete expression `foo()` for the overloaded call,
// which will yield the &T type
- let (temp_lifetime, was_shrunk) = cx.region_maps.temporary_scope2(cx.tcx, expr.id);
+ let (temp_lifetime, was_shrunk) = cx.region_maps.temporary_scope2(expr.id);
let ref_kind = overloaded_operator(cx, expr, method_call, pass_args, receiver, args);
let ref_expr = Expr {
temp_lifetime: temp_lifetime,
closure_expr_id: closure_expr.id,
};
let upvar_capture = cx.tables().upvar_capture(upvar_id).unwrap();
- let (temp_lifetime, was_shrunk) = cx.region_maps.temporary_scope2(cx.tcx, closure_expr.id);
+ let (temp_lifetime, was_shrunk) = cx.region_maps.temporary_scope2(closure_expr.id);
let var_ty = cx.tables().node_id_to_type(id_var);
let captured_var = Expr {
temp_lifetime: temp_lifetime,
pub struct Cx<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> {
tcx: TyCtxt<'a, 'gcx, 'tcx>,
infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
- pub region_maps: Rc<RegionMaps<'tcx>>,
+ pub region_maps: Rc<RegionMaps>,
constness: hir::Constness,
/// True if this constant/function needs overflow checks.
#[derive(Clone, Debug)]
pub struct Block<'tcx> {
pub targeted_by_break: bool,
- pub extent: CodeExtent<'tcx>,
+ pub extent: CodeExtent,
pub span: Span,
pub stmts: Vec<StmtRef<'tcx>>,
pub expr: Option<ExprRef<'tcx>>,
pub enum StmtKind<'tcx> {
Expr {
/// scope for this statement; may be used as lifetime of temporaries
- scope: CodeExtent<'tcx>,
+ scope: CodeExtent,
/// expression being evaluated in this statement
expr: ExprRef<'tcx>,
Let {
/// scope for variables bound in this let; covers this and
/// remaining statements in block
- remainder_scope: CodeExtent<'tcx>,
+ remainder_scope: CodeExtent,
/// scope for the initialization itself; might be used as
/// lifetime of temporaries
- init_scope: CodeExtent<'tcx>,
+ init_scope: CodeExtent,
/// let <PAT> = ...
pattern: Pattern<'tcx>,
/// lifetime of this expression if it should be spilled into a
/// temporary; should be None only if in a constant context
- pub temp_lifetime: Option<CodeExtent<'tcx>>,
+ pub temp_lifetime: Option<CodeExtent>,
/// whether this temp lifetime was shrunk by #36082.
pub temp_lifetime_was_shrunk: bool,
#[derive(Clone, Debug)]
pub enum ExprKind<'tcx> {
Scope {
- extent: CodeExtent<'tcx>,
+ extent: CodeExtent,
value: ExprRef<'tcx>,
},
Box {
value: ExprRef<'tcx>,
- value_extents: CodeExtent<'tcx>,
+ value_extents: CodeExtent,
},
Call {
ty: ty::Ty<'tcx>,
arg: ExprRef<'tcx>,
},
Break {
- label: CodeExtent<'tcx>,
+ label: CodeExtent,
value: Option<ExprRef<'tcx>>,
},
Continue {
- label: CodeExtent<'tcx>,
+ label: CodeExtent,
},
Return {
value: Option<ExprRef<'tcx>>,
#![crate_type = "rlib"]
#![crate_type = "dylib"]
#![deny(warnings)]
-#![unstable(feature = "rustc_private", issue = "27812")]
#![feature(associated_consts)]
#![feature(box_patterns)]
#![feature(box_syntax)]
#![feature(i128_type)]
#![feature(rustc_diagnostic_macros)]
-#![feature(rustc_private)]
-#![feature(staged_api)]
#![feature(placement_in_syntax)]
#![feature(collection_placement)]
+#![cfg_attr(stage0, unstable(feature = "rustc_private", issue = "27812"))]
+#![cfg_attr(stage0, feature(rustc_private))]
+#![cfg_attr(stage0, feature(staged_api))]
+
#[macro_use] extern crate log;
extern crate graphviz as dot;
#[macro_use]
use rustc::mir::*;
use rustc::mir::transform::MirSource;
use rustc::ty::{self, Ty};
-use rustc::ty::subst::{Kind, Subst};
+use rustc::ty::subst::{Kind, Subst, Substs};
use rustc::ty::maps::Providers;
use rustc_data_structures::indexed_vec::{IndexVec, Idx};
{
debug!("make_shim({:?})", instance);
let did = instance.def_id();
- let span = tcx.def_span(did);
- let param_env = tcx.construct_parameter_environment(span, did, None);
+ let param_env = tcx.parameter_environment(did);
let mut result = match instance {
ty::InstanceDef::Item(..) =>
build_call_shim(
tcx,
- ¶m_env,
def_id,
adjustment,
CallKind::Indirect,
// trans::mir knows to turn to an actual virtual call.
build_call_shim(
tcx,
- ¶m_env,
def_id,
Adjustment::Identity,
CallKind::Direct(def_id),
build_call_shim(
tcx,
- ¶m_env,
call_once,
Adjustment::RefMut,
CallKind::Direct(call_mut),
let substs = if let Some(ty) = ty {
tcx.mk_substs(iter::once(Kind::from(ty)))
} else {
- param_env.free_substs
+ Substs::identity_for_item(tcx, def_id)
};
let fn_ty = tcx.type_of(def_id).subst(tcx, substs);
let sig = tcx.erase_late_bound_regions(&fn_ty.fn_sig());
/// If `untuple_args` is a vec of types, the second argument of the
/// function will be untupled as these types.
fn build_call_shim<'a, 'tcx>(tcx: ty::TyCtxt<'a, 'tcx, 'tcx>,
- param_env: &ty::ParameterEnvironment<'tcx>,
def_id: DefId,
rcvr_adjustment: Adjustment,
call_kind: CallKind,
call_kind={:?}, untuple_args={:?})",
def_id, rcvr_adjustment, call_kind, untuple_args);
- let fn_ty = tcx.type_of(def_id).subst(tcx, param_env.free_substs);
+ let fn_ty = tcx.type_of(def_id);
let sig = tcx.erase_late_bound_regions(&fn_ty.fn_sig());
let span = tcx.def_span(def_id);
let (callee, mut args) = match call_kind {
CallKind::Indirect => (rcvr, vec![]),
CallKind::Direct(def_id) => (
- Operand::Constant(Constant {
+ Operand::Constant(box Constant {
span: span,
- ty: tcx.type_of(def_id).subst(tcx, param_env.free_substs),
+ ty: tcx.type_of(def_id),
literal: Literal::Value {
- value: ConstVal::Function(def_id, param_env.free_substs),
+ value: ConstVal::Function(def_id,
+ Substs::identity_for_item(tcx, def_id)),
},
}),
vec![rcvr]
kind: StatementKind::Assign(
Lvalue::Local(RETURN_POINTER),
Rvalue::Aggregate(
- AggregateKind::Adt(adt_def, variant_no, substs, None),
+ box AggregateKind::Adt(adt_def, variant_no, substs, None),
(1..sig.inputs().len()+1).map(|i| {
Operand::Consume(Lvalue::Local(Local::new(i)))
}).collect()
_ => return,
}
- *operand = Operand::Constant(self.constant.clone());
+ *operand = Operand::Constant(box self.constant.clone());
self.uses_replaced += 1
}
}
&Rvalue::Aggregate(ref agg_kind, ref operands) => (agg_kind, operands),
_ => span_bug!(src_info.span, "expected aggregate, not {:?}", rhs),
};
- let (adt_def, variant, substs) = match agg_kind {
- &AggregateKind::Adt(adt_def, variant, substs, None)
+ let (adt_def, variant, substs) = match **agg_kind {
+ AggregateKind::Adt(adt_def, variant, substs, None)
=> (adt_def, variant, substs),
_ => span_bug!(src_info.span, "expected struct, not {:?}", rhs),
};
&Rvalue::Aggregate(ref kind, ref operands) => (kind, operands),
_ => continue,
};
- let (adt_def, variant) = match kind {
- &AggregateKind::Adt(adt_def, variant, _, None) => (adt_def, variant),
+ let (adt_def, variant) = match **kind {
+ AggregateKind::Adt(adt_def, variant, _, None) => (adt_def, variant),
_ => continue,
};
if operands.len() == 0 {
// FIXME: Give a bonus to functions with only a single caller
- let param_env = ty::ParameterEnvironment::for_item(tcx, self.source.item_id());
+ let def_id = tcx.hir.local_def_id(self.source.item_id());
+ let param_env = tcx.parameter_environment(def_id);
let mut first_block = true;
let mut cost = 0;
(if self.keep_original {
rhs.clone()
} else {
- let unit = Rvalue::Aggregate(AggregateKind::Tuple, vec![]);
+ let unit = Rvalue::Aggregate(box AggregateKind::Tuple, vec![]);
mem::replace(rhs, unit)
}, statement.source_info)
};
fn promote_candidate(mut self, candidate: Candidate) {
let span = self.promoted.span;
- let new_operand = Operand::Constant(Constant {
+ let new_operand = Operand::Constant(box Constant {
span: span,
ty: self.promoted.return_ty,
literal: Literal::Promoted {
}
Rvalue::Aggregate(ref kind, _) => {
- if let AggregateKind::Adt(def, ..) = *kind {
+ if let AggregateKind::Adt(def, ..) = **kind {
if def.has_dtor(self.tcx) {
self.add(Qualif::NEEDS_DROP);
self.deny_drop();
return Qualif::NOT_CONST.bits();
}
- let node_id = tcx.hir.as_local_node_id(def_id).unwrap();
- let param_env = ty::ParameterEnvironment::for_item(tcx, node_id);
+ let param_env = tcx.parameter_environment(def_id);
let mut qualifier = Qualifier::new(tcx, param_env, def_id, mir, Mode::Const);
qualifier.qualify_const().bits()
MirSource::Const(_) |
MirSource::Promoted(..) => return
};
- let param_env = ty::ParameterEnvironment::for_item(tcx, id);
+ let param_env = tcx.parameter_environment(def_id);
if mode == Mode::Fn || mode == Mode::ConstFn {
// This is ugly because Qualifier holds onto mir,
for block in mir.basic_blocks_mut() {
let terminator = block.terminator_mut();
terminator.kind = match terminator.kind {
- TerminatorKind::SwitchInt { discr: Operand::Constant(Constant {
+ TerminatorKind::SwitchInt { discr: Operand::Constant(box Constant {
literal: Literal::Value { ref value }, ..
}), ref values, ref targets, .. } => {
if let Some(ref constint) = value.to_const_int() {
continue
}
},
- TerminatorKind::Assert { target, cond: Operand::Constant(Constant {
+ TerminatorKind::Assert { target, cond: Operand::Constant(box Constant {
literal: Literal::Value {
value: ConstVal::Bool(cond)
}, ..
}
}
}
-
fn is_box_free(&self, operand: &Operand<'tcx>) -> bool {
match operand {
- &Operand::Constant(Constant {
+ &Operand::Constant(box Constant {
literal: Literal::Value {
value: ConstVal::Function(def_id, _), ..
}, ..
// broken MIR, so try not to report duplicate errors.
return;
}
- let param_env = ty::ParameterEnvironment::for_item(tcx, item_id);
+ let param_env = tcx.parameter_environment(def_id);
tcx.infer_ctxt(param_env, Reveal::UserFacing).enter(|infcx| {
let mut checker = TypeChecker::new(&infcx, item_id);
{
MirSource::Promoted(_, i) => write!(w, "{:?} in", i)?
}
- write!(w, " {}", tcx.node_path_str(src.item_id()))?;
+ item_path::with_forced_impl_filename_line(|| { // see notes on #41697 elsewhere
+ write!(w, " {}", tcx.node_path_str(src.item_id()))
+ })?;
if let MirSource::Fn(_) = src {
write!(w, "(")?;
impl<'v> ast_visit::Visitor<'v> for StatCollector<'v> {
- fn visit_mod(&mut self, m: &'v ast::Mod, _s: Span, _n: NodeId) {
+ fn visit_mod(&mut self, m: &'v ast::Mod, _s: Span, _a: &[ast::Attribute], _n: NodeId) {
self.record("Mod", Id::None, m);
ast_visit::walk_mod(self, m)
}
//! This API is completely unstable and subject to change.
#![crate_name = "rustc_passes"]
-#![unstable(feature = "rustc_private", issue = "27812")]
#![crate_type = "dylib"]
#![crate_type = "rlib"]
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
#![deny(warnings)]
#![feature(rustc_diagnostic_macros)]
-#![feature(staged_api)]
-#![feature(rustc_private)]
+
+#![cfg_attr(stage0, unstable(feature = "rustc_private", issue = "27812"))]
+#![cfg_attr(stage0, feature(rustc_private))]
+#![cfg_attr(stage0, feature(staged_api))]
#[macro_use]
extern crate rustc;
Rvalue::Aggregate(ref kind, ref _operands) => {
// AggregateKind is not distinguished by visit API, so
// record it. (`super_rvalue` handles `_operands`.)
- self.record(match *kind {
+ self.record(match **kind {
AggregateKind::Array(_) => "AggregateKind::Array",
AggregateKind::Tuple => "AggregateKind::Tuple",
AggregateKind::Adt(..) => "AggregateKind::Adt",
// except according to those terms.
#![crate_name = "rustc_platform_intrinsics"]
-#![unstable(feature = "rustc_private", issue = "27812")]
+#![cfg_attr(stage0, unstable(feature = "rustc_private", issue = "27812"))]
#![crate_type = "dylib"]
#![crate_type = "rlib"]
-#![feature(staged_api)]
+#![cfg_attr(stage0, feature(staged_api))]
#![deny(warnings)]
#![allow(bad_style)]
//! for more examples.
#![crate_name = "rustc_plugin"]
-#![unstable(feature = "rustc_private", issue = "27812")]
#![crate_type = "dylib"]
#![crate_type = "rlib"]
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_root_url = "https://doc.rust-lang.org/nightly/")]
#![deny(warnings)]
-#![feature(staged_api)]
#![feature(rustc_diagnostic_macros)]
-#![feature(rustc_private)]
+
+#![cfg_attr(stage0, unstable(feature = "rustc_private", issue = "27812"))]
+#![cfg_attr(stage0, feature(rustc_private))]
+#![cfg_attr(stage0, feature(staged_api))]
#[macro_use] extern crate syntax;
}
self.syntax_exts.push((name, match extension {
NormalTT(ext, _, allow_internal_unstable) => {
- NormalTT(ext, Some(self.krate_span), allow_internal_unstable)
+ let nid = ast::CRATE_NODE_ID;
+ NormalTT(ext, Some((nid, self.krate_span)), allow_internal_unstable)
}
IdentTT(ext, _, allow_internal_unstable) => {
IdentTT(ext, Some(self.krate_span), allow_internal_unstable)
// except according to those terms.
#![crate_name = "rustc_privacy"]
-#![unstable(feature = "rustc_private", issue = "27812")]
#![crate_type = "dylib"]
#![crate_type = "rlib"]
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
#![deny(warnings)]
#![feature(rustc_diagnostic_macros)]
-#![feature(rustc_private)]
-#![feature(staged_api)]
+
+#![cfg_attr(stage0, unstable(feature = "rustc_private", issue = "27812"))]
+#![cfg_attr(stage0, feature(rustc_private))]
+#![cfg_attr(stage0, feature(staged_api))]
extern crate rustc;
#[macro_use] extern crate syntax;
view_path.span,
ResolutionError::SelfImportsOnlyAllowedWithin);
} else if source_name == "$crate" && full_path.segments.len() == 1 {
- let crate_root = self.resolve_crate_var(source.ctxt);
+ let crate_root = self.resolve_crate_var(source.ctxt, item.span);
let crate_name = match crate_root.kind {
ModuleKind::Def(_, name) => name,
ModuleKind::Block(..) => unreachable!(),
// n.b. we don't need to look at the path option here, because cstore already did
let crate_id = self.session.cstore.extern_mod_stmt_cnum(item.id).unwrap();
- let module = self.get_extern_crate_root(crate_id);
+ let module = self.get_extern_crate_root(crate_id, item.span);
self.populate_module_if_necessary(module);
let used = self.process_legacy_macro_imports(item, module, expansion);
let binding =
no_implicit_prelude: parent.no_implicit_prelude || {
attr::contains_name(&item.attrs, "no_implicit_prelude")
},
- ..ModuleData::new(Some(parent), module_kind, def_id)
+ ..ModuleData::new(Some(parent), module_kind, def_id, item.span)
});
self.define(parent, ident, TypeNS, (module, vis, sp, expansion));
self.module_map.insert(def_id, module);
ItemKind::Enum(ref enum_definition, _) => {
let def = Def::Enum(self.definitions.local_def_id(item.id));
let module_kind = ModuleKind::Def(def, ident.name);
- let module = self.new_module(parent, module_kind, parent.normal_ancestor_id);
+ let module = self.new_module(parent,
+ module_kind,
+ parent.normal_ancestor_id,
+ item.span);
self.define(parent, ident, TypeNS, (module, vis, sp, expansion));
for variant in &(*enum_definition).variants {
// Add all the items within to a new module.
let module_kind = ModuleKind::Def(Def::Trait(def_id), ident.name);
- let module = self.new_module(parent, module_kind, parent.normal_ancestor_id);
+ let module = self.new_module(parent,
+ module_kind,
+ parent.normal_ancestor_id,
+ item.span);
self.define(parent, ident, TypeNS, (module, vis, sp, expansion));
self.current_module = module;
}
fn build_reduced_graph_for_block(&mut self, block: &Block) {
let parent = self.current_module;
if self.block_needs_anonymous_module(block) {
- let module =
- self.new_module(parent, ModuleKind::Block(block.id), parent.normal_ancestor_id);
+ let module = self.new_module(parent,
+ ModuleKind::Block(block.id),
+ parent.normal_ancestor_id,
+ block.span);
self.block_map.insert(block.id, module);
self.current_module = module; // Descend into the block.
}
let def = child.def;
let def_id = def.def_id();
let vis = self.session.cstore.visibility(def_id);
+ let span = child.span;
match def {
Def::Mod(..) | Def::Enum(..) => {
- let module = self.new_module(parent, ModuleKind::Def(def, ident.name), def_id);
+ let module = self.new_module(parent,
+ ModuleKind::Def(def, ident.name),
+ def_id,
+ span);
self.define(parent, ident, TypeNS, (module, vis, DUMMY_SP, Mark::root()));
}
Def::Variant(..) | Def::TyAlias(..) => {
}
Def::Trait(..) => {
let module_kind = ModuleKind::Def(def, ident.name);
- let module = self.new_module(parent, module_kind, parent.normal_ancestor_id);
+ let module = self.new_module(parent,
+ module_kind,
+ parent.normal_ancestor_id,
+ span);
self.define(parent, ident, TypeNS, (module, vis, DUMMY_SP, Mark::root()));
for child in self.session.cstore.item_children(def_id) {
}
}
- fn get_extern_crate_root(&mut self, cnum: CrateNum) -> Module<'a> {
+ fn get_extern_crate_root(&mut self, cnum: CrateNum, span: Span) -> Module<'a> {
let def_id = DefId { krate: cnum, index: CRATE_DEF_INDEX };
let name = self.session.cstore.crate_name(cnum);
let macros_only = self.session.cstore.dep_kind(cnum).macros_only();
let module_kind = ModuleKind::Def(Def::Mod(def_id), name);
let arenas = self.arenas;
*self.extern_crate_roots.entry((cnum, macros_only)).or_insert_with(|| {
- arenas.alloc_module(ModuleData::new(None, module_kind, def_id))
+ arenas.alloc_module(ModuleData::new(None, module_kind, def_id, span))
})
}
- pub fn macro_def_scope(&mut self, expansion: Mark) -> Module<'a> {
+ pub fn macro_def_scope(&mut self, expansion: Mark, span: Span) -> Module<'a> {
let def_id = self.macro_defs[&expansion];
if let Some(id) = self.definitions.as_local_node_id(def_id) {
self.local_macro_def_scopes[&id]
self.graph_root
} else {
let module_def_id = ty::DefIdTree::parent(&*self, def_id).unwrap();
- self.get_extern_crate_root(module_def_id.krate)
+ self.get_extern_crate_root(module_def_id.krate, span)
}
}
} else {
for (name, span) in legacy_imports.imports {
let ident = Ident::with_empty_ctxt(name);
- let result = self.resolve_ident_in_module(module, ident, MacroNS, false, None);
+ let result = self.resolve_ident_in_module(module, ident, MacroNS,
+ false, false, span);
if let Ok(binding) = result {
let directive = macro_use_directive(span);
self.potentially_unused_imports.push(directive);
for (name, span) in legacy_imports.reexports {
self.session.cstore.export_macros(module.def_id().unwrap().krate);
let ident = Ident::with_empty_ctxt(name);
- let result = self.resolve_ident_in_module(module, ident, MacroNS, false, None);
+ let result = self.resolve_ident_in_module(module, ident, MacroNS, false, false, span);
if let Ok(binding) = result {
self.macro_exports.push(Export { name: name, def: binding.def(), span: span });
} else {
"##,
E0435: r##"
-A non-constant value was used to initialise a constant.
+A non-constant value was used in a constant expression.
Erroneous code example:
```compile_fail,E0435
-let foo = 42u32;
-const FOO : u32 = foo; // error: attempt to use a non-constant value in a
- // constant
+let foo = 42;
+let a: [u8; foo]; // error: attempt to use a non-constant value in a constant
```
To fix this error, please replace the value with a constant. Example:
```
-const FOO : u32 = 42u32; // ok!
+let a: [u8; 42]; // ok!
```
Or:
```
-const OTHER_FOO : u32 = 42u32;
-const FOO : u32 = OTHER_FOO; // ok!
+const FOO: usize = 42;
+let a: [u8; FOO]; // ok!
```
"##,
// E0157, unused error code
// E0257,
// E0258,
- E0402, // cannot use an outer type parameter in this context
+// E0402, // cannot use an outer type parameter in this context
// E0406, merged into 420
// E0410, merged into 408
// E0413, merged into 530
// except according to those terms.
#![crate_name = "rustc_resolve"]
-#![unstable(feature = "rustc_private", issue = "27812")]
#![crate_type = "dylib"]
#![crate_type = "rlib"]
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
#![feature(associated_consts)]
#![feature(rustc_diagnostic_macros)]
-#![feature(rustc_private)]
-#![feature(staged_api)]
+
+#![cfg_attr(stage0, unstable(feature = "rustc_private", issue = "27812"))]
+#![cfg_attr(stage0, feature(rustc_private))]
+#![cfg_attr(stage0, feature(staged_api))]
#[macro_use]
extern crate log;
enum ResolutionError<'a> {
/// error E0401: can't use type parameters from outer function
TypeParametersFromOuterFunction,
- /// error E0402: cannot use an outer type parameter in this context
- OuterTypeParameterContext,
/// error E0403: the name is already used for a type parameter in this type parameter list
NameAlreadyUsedInTypeParameterList(Name, &'a Span),
/// error E0407: method is not a member of trait
err.span_label(span, "use of type variable from outer function");
err
}
- ResolutionError::OuterTypeParameterContext => {
- struct_span_err!(resolver.session,
- span,
- E0402,
- "cannot use an outer type parameter in this context")
- }
ResolutionError::NameAlreadyUsedInTypeParameterList(name, first_use_span) => {
let mut err = struct_span_err!(resolver.session,
span,
self.smart_resolve_path(ty.id, qself.as_ref(), path, PathSource::Type);
} else if let TyKind::ImplicitSelf = ty.node {
let self_ty = keywords::SelfType.ident();
- let def = self.resolve_ident_in_lexical_scope(self_ty, TypeNS, Some(ty.span))
+ let def = self.resolve_ident_in_lexical_scope(self_ty, TypeNS, true, ty.span)
.map_or(Def::Err, |d| d.def());
self.record_def(ty.id, PathResolution::new(def));
} else if let TyKind::Array(ref element, ref length) = ty.node {
// access the children must be preceded with a
// `populate_module_if_necessary` call.
populated: Cell<bool>,
+
+ /// Span of the module itself. Used for error reporting.
+ span: Span,
}
pub type Module<'a> = &'a ModuleData<'a>;
impl<'a> ModuleData<'a> {
- fn new(parent: Option<Module<'a>>, kind: ModuleKind, normal_ancestor_id: DefId) -> Self {
+ fn new(parent: Option<Module<'a>>,
+ kind: ModuleKind,
+ normal_ancestor_id: DefId,
+ span: Span) -> Self {
ModuleData {
parent: parent,
kind: kind,
globs: RefCell::new((Vec::new())),
traits: RefCell::new(None),
populated: Cell::new(normal_ancestor_id.is_local()),
+ span: span,
}
}
pub whitelisted_legacy_custom_derives: Vec<Name>,
pub found_unresolved_macro: bool,
+ // List of crate local macros that we need to warn about as being unused.
+ // Right now this only includes macro_rules! macros.
+ unused_macros: FxHashSet<DefId>,
+
// Maps the `Mark` of an expansion to its containing module or block.
invocations: FxHashMap<Mark, &'a InvocationData<'a>>,
let namespace = if is_value { ValueNS } else { TypeNS };
let hir::Path { ref segments, span, ref mut def } = *path;
let path: Vec<_> = segments.iter().map(|seg| Ident::with_empty_ctxt(seg.name)).collect();
- match self.resolve_path(&path, Some(namespace), Some(span)) {
+ match self.resolve_path(&path, Some(namespace), true, span) {
PathResult::Module(module) => *def = module.def().unwrap(),
PathResult::NonModule(path_res) if path_res.unresolved_segments() == 0 =>
*def = path_res.base_def(),
- PathResult::NonModule(..) => match self.resolve_path(&path, None, Some(span)) {
+ PathResult::NonModule(..) => match self.resolve_path(&path, None, true, span) {
PathResult::Failed(msg, _) => {
resolve_error(self, span, ResolutionError::FailedToResolve(&msg));
}
let root_module_kind = ModuleKind::Def(Def::Mod(root_def_id), keywords::Invalid.name());
let graph_root = arenas.alloc_module(ModuleData {
no_implicit_prelude: attr::contains_name(&krate.attrs, "no_implicit_prelude"),
- ..ModuleData::new(None, root_module_kind, root_def_id)
+ ..ModuleData::new(None, root_module_kind, root_def_id, krate.span)
});
let mut module_map = FxHashMap();
module_map.insert(DefId::local(CRATE_DEF_INDEX), graph_root);
potentially_unused_imports: Vec::new(),
struct_constructors: DefIdMap(),
found_unresolved_macro: false,
+ unused_macros: FxHashSet(),
}
}
self.crate_loader.postprocess(krate);
}
- fn new_module(&self, parent: Module<'a>, kind: ModuleKind, normal_ancestor_id: DefId)
- -> Module<'a> {
- self.arenas.alloc_module(ModuleData::new(Some(parent), kind, normal_ancestor_id))
+ fn new_module(
+ &self,
+ parent: Module<'a>,
+ kind: ModuleKind,
+ normal_ancestor_id: DefId,
+ span: Span,
+ ) -> Module<'a> {
+ self.arenas.alloc_module(ModuleData::new(Some(parent), kind, normal_ancestor_id, span))
}
fn record_use(&mut self, ident: Ident, ns: Namespace, binding: &'a NameBinding<'a>, span: Span)
fn resolve_ident_in_lexical_scope(&mut self,
mut ident: Ident,
ns: Namespace,
- record_used: Option<Span>)
+ record_used: bool,
+ path_span: Span)
-> Option<LexicalScopeBinding<'a>> {
if ns == TypeNS {
ident = ident.unhygienize();
if let Some(def) = self.ribs[ns][i].bindings.get(&ident).cloned() {
// The ident resolves to a type parameter or local variable.
return Some(LexicalScopeBinding::Def(
- self.adjust_local_def(ns, i, def, record_used)
+ self.adjust_local_def(ns, i, def, record_used, path_span)
));
}
if let ModuleRibKind(module) = self.ribs[ns][i].kind {
- let item = self.resolve_ident_in_module(module, ident, ns, false, record_used);
+ let item = self.resolve_ident_in_module(module, ident, ns, false,
+ record_used, path_span);
if let Ok(binding) = item {
// The ident resolves to an item.
return Some(LexicalScopeBinding::Item(binding));
if let ModuleKind::Block(..) = module.kind { // We can see through blocks
} else if !module.no_implicit_prelude {
return self.prelude.and_then(|prelude| {
- self.resolve_ident_in_module(prelude, ident, ns, false, None).ok()
+ self.resolve_ident_in_module(prelude, ident, ns, false,
+ false, path_span).ok()
}).map(LexicalScopeBinding::Item)
} else {
return None;
None
}
- fn resolve_crate_var(&mut self, crate_var_ctxt: SyntaxContext) -> Module<'a> {
+ fn resolve_crate_var(&mut self, crate_var_ctxt: SyntaxContext, span: Span) -> Module<'a> {
let mut ctxt_data = crate_var_ctxt.data();
while ctxt_data.prev_ctxt != SyntaxContext::empty() {
ctxt_data = ctxt_data.prev_ctxt.data();
}
- let module = self.macro_def_scope(ctxt_data.outer_mark);
+ let module = self.macro_def_scope(ctxt_data.outer_mark, span);
if module.is_local() { self.graph_root } else { module }
}
this.check_proc_macro_attrs(&trait_item.attrs);
match trait_item.node {
- TraitItemKind::Const(_, ref default) => {
+ TraitItemKind::Const(ref ty, ref default) => {
+ this.visit_ty(ty);
+
// Only impose the restrictions of
- // ConstRibKind if there's an actual constant
+ // ConstRibKind for an actual constant
// expression in a provided default.
- if default.is_some() {
+ if let Some(ref expr) = *default{
this.with_constant_rib(|this| {
- visit::walk_trait_item(this, trait_item)
+ this.visit_expr(expr);
});
- } else {
- visit::walk_trait_item(this, trait_item)
}
}
TraitItemKind::Method(ref sig, _) => {
});
}
- ItemKind::Const(..) | ItemKind::Static(..) => {
- self.with_constant_rib(|this| {
- visit::walk_item(this, item);
+ ItemKind::Static(ref ty, _, ref expr) |
+ ItemKind::Const(ref ty, ref expr) => {
+ self.with_item_rib(|this| {
+ this.visit_ty(ty);
+ this.with_constant_rib(|this| {
+ this.visit_expr(expr);
+ });
});
}
self.label_ribs.pop();
}
+ fn with_item_rib<F>(&mut self, f: F)
+ where F: FnOnce(&mut Resolver)
+ {
+ self.ribs[ValueNS].push(Rib::new(ItemRibKind));
+ self.ribs[TypeNS].push(Rib::new(ItemRibKind));
+ f(self);
+ self.ribs[TypeNS].pop();
+ self.ribs[ValueNS].pop();
+ }
+
fn with_constant_rib<F>(&mut self, f: F)
where F: FnOnce(&mut Resolver)
{
self.ribs[ValueNS].push(Rib::new(ConstantItemRibKind));
- self.ribs[TypeNS].push(Rib::new(ConstantItemRibKind));
f(self);
- self.ribs[TypeNS].pop();
self.ribs[ValueNS].pop();
}
PatKind::Ident(bmode, ref ident, ref opt_pat) => {
// First try to resolve the identifier as some existing
// entity, then fall back to a fresh binding.
- let binding = self.resolve_ident_in_lexical_scope(ident.node, ValueNS, None)
+ let binding = self.resolve_ident_in_lexical_scope(ident.node, ValueNS,
+ false, pat.span)
.and_then(LexicalScopeBinding::item);
let resolution = binding.map(NameBinding::def).and_then(|def| {
let always_binding = !pat_src.is_refutable() || opt_pat.is_some() ||
(format!(""), format!("the crate root"))
} else {
let mod_path = &path[..path.len() - 1];
- let mod_prefix = match this.resolve_path(mod_path, Some(TypeNS), None) {
+ let mod_prefix = match this.resolve_path(mod_path, Some(TypeNS), false, span) {
PathResult::Module(module) => module.def(),
_ => None,
}.map_or(format!(""), |def| format!("{} ", def.kind_name()));
let name = path.last().unwrap().name;
let candidates = this.lookup_import_candidates(name, ns, is_expected);
if !candidates.is_empty() {
+ let mut module_span = this.current_module.span;
+ module_span.hi = module_span.lo;
// Report import candidates as help and proceed searching for labels.
- show_candidates(&mut err, &candidates, def.is_some());
+ show_candidates(&mut err, module_span, &candidates, def.is_some());
} else if is_expected(Def::Enum(DefId::local(CRATE_DEF_INDEX))) {
let enum_candidates = this.lookup_import_candidates(name, ns, is_enum_variant);
let mut enum_candidates = enum_candidates.iter()
}
}
}
- if path.len() == 1 && this.self_type_is_available() {
+ if path.len() == 1 && this.self_type_is_available(span) {
if let Some(candidate) = this.lookup_assoc_candidate(name, ns, is_expected) {
- let self_is_available = this.self_value_is_available(path[0].ctxt);
+ let self_is_available = this.self_value_is_available(path[0].ctxt, span);
match candidate {
AssocSuggestion::Field => {
err.span_label(span, format!("did you mean `self.{}`?", path_str));
let mut levenshtein_worked = false;
// Try Levenshtein.
- if let Some(candidate) = this.lookup_typo_candidate(path, ns, is_expected) {
+ if let Some(candidate) = this.lookup_typo_candidate(path, ns, is_expected, span) {
err.span_label(ident_span, format!("did you mean `{}`?", candidate));
levenshtein_worked = true;
}
resolution
}
- fn self_type_is_available(&mut self) -> bool {
- let binding = self.resolve_ident_in_lexical_scope(keywords::SelfType.ident(), TypeNS, None);
+ fn self_type_is_available(&mut self, span: Span) -> bool {
+ let binding = self.resolve_ident_in_lexical_scope(keywords::SelfType.ident(),
+ TypeNS, false, span);
if let Some(LexicalScopeBinding::Def(def)) = binding { def != Def::Err } else { false }
}
- fn self_value_is_available(&mut self, ctxt: SyntaxContext) -> bool {
+ fn self_value_is_available(&mut self, ctxt: SyntaxContext, span: Span) -> bool {
let ident = Ident { name: keywords::SelfValue.name(), ctxt: ctxt };
- let binding = self.resolve_ident_in_lexical_scope(ident, ValueNS, None);
+ let binding = self.resolve_ident_in_lexical_scope(ident, ValueNS, false, span);
if let Some(LexicalScopeBinding::Def(def)) = binding { def != Def::Err } else { false }
}
));
}
- let result = match self.resolve_path(&path, Some(ns), Some(span)) {
+ let result = match self.resolve_path(&path, Some(ns), true, span) {
PathResult::NonModule(path_res) => path_res,
PathResult::Module(module) if !module.is_normal() => {
PathResolution::new(module.def().unwrap())
if path.len() > 1 && !global_by_default && result.base_def() != Def::Err &&
path[0].name != keywords::CrateRoot.name() && path[0].name != "$crate" {
let unqualified_result = {
- match self.resolve_path(&[*path.last().unwrap()], Some(ns), None) {
+ match self.resolve_path(&[*path.last().unwrap()], Some(ns), false, span) {
PathResult::NonModule(path_res) => path_res.base_def(),
PathResult::Module(module) => module.def().unwrap(),
_ => return Some(result),
fn resolve_path(&mut self,
path: &[Ident],
opt_ns: Option<Namespace>, // `None` indicates a module path
- record_used: Option<Span>)
+ record_used: bool,
+ path_span: Span)
-> PathResult<'a> {
let mut module = None;
let mut allow_super = true;
module = Some(self.graph_root);
continue
} else if i == 0 && ns == TypeNS && ident.name == "$crate" {
- module = Some(self.resolve_crate_var(ident.ctxt));
+ module = Some(self.resolve_crate_var(ident.ctxt, path_span));
continue
}
let binding = if let Some(module) = module {
- self.resolve_ident_in_module(module, ident, ns, false, record_used)
+ self.resolve_ident_in_module(module, ident, ns, false, record_used, path_span)
} else if opt_ns == Some(MacroNS) {
- self.resolve_lexical_macro_path_segment(ident, ns, record_used)
+ self.resolve_lexical_macro_path_segment(ident, ns, record_used, path_span)
.map(MacroBinding::binding)
} else {
- match self.resolve_ident_in_lexical_scope(ident, ns, record_used) {
+ match self.resolve_ident_in_lexical_scope(ident, ns, record_used, path_span) {
Some(LexicalScopeBinding::Item(binding)) => Ok(binding),
Some(LexicalScopeBinding::Def(def))
if opt_ns == Some(TypeNS) || opt_ns == Some(ValueNS) => {
def, path.len() - 1
));
}
- _ => Err(if record_used.is_some() { Determined } else { Undetermined }),
+ _ => Err(if record_used { Determined } else { Undetermined }),
}
};
ns: Namespace,
rib_index: usize,
mut def: Def,
- record_used: Option<Span>) -> Def {
+ record_used: bool,
+ span: Span) -> Def {
let ribs = &self.ribs[ns][rib_index + 1..];
// An invalid forward use of a type parameter from a previous default.
if let ForwardTyParamBanRibKind = self.ribs[ns][rib_index].kind {
- if let Some(span) = record_used {
+ if record_used {
resolve_error(self, span,
ResolutionError::ForwardDeclaredTyParam);
}
match def {
Def::Upvar(..) => {
- span_bug!(record_used.unwrap_or(DUMMY_SP), "unexpected {:?} in bindings", def)
+ span_bug!(span, "unexpected {:?} in bindings", def)
}
Def::Local(def_id) => {
for rib in ribs {
let depth = vec.len();
def = Def::Upvar(def_id, depth, function_id);
- if let Some(span) = record_used {
+ if record_used {
vec.push(Freevar {
def: prev_def,
span: span,
// This was an attempt to access an upvar inside a
// named function item. This is not allowed, so we
// report an error.
- if let Some(span) = record_used {
+ if record_used {
resolve_error(self, span,
ResolutionError::CannotCaptureDynamicEnvironmentInFnItem);
}
}
ConstantItemRibKind => {
// Still doesn't deal with upvars
- if let Some(span) = record_used {
+ if record_used {
resolve_error(self, span,
ResolutionError::AttemptToUseNonConstantValueInConstant);
}
for rib in ribs {
match rib.kind {
NormalRibKind | MethodRibKind(_) | ClosureRibKind(..) |
- ModuleRibKind(..) | MacroDefinition(..) | ForwardTyParamBanRibKind => {
+ ModuleRibKind(..) | MacroDefinition(..) | ForwardTyParamBanRibKind |
+ ConstantItemRibKind => {
// Nothing to do. Continue.
}
ItemRibKind => {
// This was an attempt to use a type parameter outside
// its scope.
- if let Some(span) = record_used {
+ if record_used {
resolve_error(self, span,
ResolutionError::TypeParametersFromOuterFunction);
}
return Def::Err;
}
- ConstantItemRibKind => {
- // see #9186
- if let Some(span) = record_used {
- resolve_error(self, span,
- ResolutionError::OuterTypeParameterContext);
- }
- return Def::Err;
- }
}
}
}
fn lookup_typo_candidate<FilterFn>(&mut self,
path: &[Ident],
ns: Namespace,
- filter_fn: FilterFn)
+ filter_fn: FilterFn,
+ span: Span)
-> Option<Symbol>
where FilterFn: Fn(Def) -> bool
{
} else {
// Search in module.
let mod_path = &path[..path.len() - 1];
- if let PathResult::Module(module) = self.resolve_path(mod_path, Some(TypeNS), None) {
+ if let PathResult::Module(module) = self.resolve_path(mod_path, Some(TypeNS),
+ false, span) {
add_module_candidates(module, &mut names);
}
}
continue
}
let ident = attr.path.segments[0].identifier;
- let result = self.resolve_lexical_macro_path_segment(ident, MacroNS, None);
+ let result = self.resolve_lexical_macro_path_segment(ident,
+ MacroNS,
+ false,
+ attr.path.span);
if let Ok(binding) = result {
if let SyntaxExtension::AttrProcMacro(..) = *binding.binding().get_macro(self) {
attr::mark_known(attr);
/// When an entity with a given name is not available in scope, we search for
/// entities with that name in all crates. This method allows outputting the
/// results of this search in a programmer-friendly way
-fn show_candidates(session: &mut DiagnosticBuilder,
+fn show_candidates(err: &mut DiagnosticBuilder,
+ span: Span,
candidates: &[ImportSuggestion],
better: bool) {
- // don't show more than MAX_CANDIDATES results, so
- // we're consistent with the trait suggestions
- const MAX_CANDIDATES: usize = 4;
// we want consistent results across executions, but candidates are produced
// by iterating through a hash map, so make sure they are ordered:
1 => " is found in another module, you can import it",
_ => "s are found in other modules, you can import them",
};
+ let msg = format!("possible {}candidate{} into scope", better, msg_diff);
+
+ for candidate in &mut path_strings {
+ *candidate = format!("use {};\n", candidate);
+ }
- let end = cmp::min(MAX_CANDIDATES, path_strings.len());
- session.help(&format!("possible {}candidate{} into scope:{}{}",
- better,
- msg_diff,
- &path_strings[0..end].iter().map(|candidate| {
- format!("\n `use {};`", candidate)
- }).collect::<String>(),
- if path_strings.len() > MAX_CANDIDATES {
- format!("\nand {} other candidates",
- path_strings.len() - MAX_CANDIDATES)
- } else {
- "".to_owned()
- }
- ));
+ err.span_suggestions(span, &msg, path_strings);
}
/// A somewhat inefficient routine to obtain the name of a module.
use rustc::hir::def_id::{DefId, BUILTIN_MACROS_CRATE, CRATE_DEF_INDEX, DefIndex};
use rustc::hir::def::{Def, Export};
use rustc::hir::map::{self, DefCollector};
-use rustc::ty;
+use rustc::{ty, lint};
use syntax::ast::{self, Name, Ident};
use syntax::attr::{self, HasAttrs};
use syntax::errors::DiagnosticBuilder;
}
fn eliminate_crate_var(&mut self, item: P<ast::Item>) -> P<ast::Item> {
- struct EliminateCrateVar<'b, 'a: 'b>(&'b mut Resolver<'a>);
+ struct EliminateCrateVar<'b, 'a: 'b>(&'b mut Resolver<'a>, Span);
impl<'a, 'b> Folder for EliminateCrateVar<'a, 'b> {
fn fold_path(&mut self, mut path: ast::Path) -> ast::Path {
let ident = path.segments[0].identifier;
if ident.name == "$crate" {
path.segments[0].identifier.name = keywords::CrateRoot.name();
- let module = self.0.resolve_crate_var(ident.ctxt);
+ let module = self.0.resolve_crate_var(ident.ctxt, self.1);
if !module.is_local() {
let span = path.segments[0].span;
path.segments.insert(1, match module.kind {
}
}
- EliminateCrateVar(self).fold_item(item).expect_one("")
+ EliminateCrateVar(self, item.span).fold_item(item).expect_one("")
}
fn is_whitelisted_legacy_custom_derive(&self, name: Name) -> bool {
},
};
self.macro_defs.insert(invoc.expansion_data.mark, def.def_id());
+ self.unused_macros.remove(&def.def_id());
Ok(Some(self.get_macro(def)))
}
fn resolve_macro(&mut self, scope: Mark, path: &ast::Path, kind: MacroKind, force: bool)
-> Result<Rc<SyntaxExtension>, Determinacy> {
- self.resolve_macro_to_def(scope, path, kind, force).map(|def| self.get_macro(def))
+ self.resolve_macro_to_def(scope, path, kind, force).map(|def| {
+ self.unused_macros.remove(&def.def_id());
+ self.get_macro(def)
+ })
+ }
+
+ fn check_unused_macros(&self) {
+ for did in self.unused_macros.iter() {
+ let id_span = match *self.macro_map[did] {
+ SyntaxExtension::NormalTT(_, isp, _) => isp,
+ _ => None,
+ };
+ if let Some((id, span)) = id_span {
+ let lint = lint::builtin::UNUSED_MACROS;
+ let msg = "unused macro definition".to_string();
+ self.session.add_lint(lint, id, span, msg);
+ } else {
+ bug!("attempted to create unused macro error, but span not available");
+ }
+ }
}
}
return Err(Determinacy::Determined);
}
- let def = match self.resolve_path(&path, Some(MacroNS), None) {
+ let def = match self.resolve_path(&path, Some(MacroNS), false, span) {
PathResult::NonModule(path_res) => match path_res.base_def() {
Def::Err => Err(Determinacy::Determined),
def @ _ => Ok(def),
let result = if let Some(MacroBinding::Legacy(binding)) = legacy_resolution {
Ok(Def::Macro(binding.def_id, MacroKind::Bang))
} else {
- match self.resolve_lexical_macro_path_segment(path[0], MacroNS, None) {
+ match self.resolve_lexical_macro_path_segment(path[0], MacroNS, false, span) {
Ok(binding) => Ok(binding.binding().def_ignoring_ambiguity()),
Err(Determinacy::Undetermined) if !force => return Err(Determinacy::Undetermined),
Err(_) => {
pub fn resolve_lexical_macro_path_segment(&mut self,
ident: Ident,
ns: Namespace,
- record_used: Option<Span>)
+ record_used: bool,
+ path_span: Span)
-> Result<MacroBinding<'a>, Determinacy> {
let mut module = Some(self.current_module);
let mut potential_illegal_shadower = Err(Determinacy::Determined);
let determinacy =
- if record_used.is_some() { Determinacy::Determined } else { Determinacy::Undetermined };
+ if record_used { Determinacy::Determined } else { Determinacy::Undetermined };
loop {
let result = if let Some(module) = module {
// Since expanded macros may not shadow the lexical scope and
// globs may not shadow global macros (both enforced below),
// we resolve with restricted shadowing (indicated by the penultimate argument).
- self.resolve_ident_in_module(module, ident, ns, true, record_used)
+ self.resolve_ident_in_module(module, ident, ns, true, record_used, path_span)
.map(MacroBinding::Modern)
} else {
self.global_macros.get(&ident.name).cloned().ok_or(determinacy)
match result.map(MacroBinding::binding) {
Ok(binding) => {
- let span = match record_used {
- Some(span) => span,
- None => return result,
- };
+ if !record_used {
+ return result;
+ }
if let Ok(MacroBinding::Modern(shadower)) = potential_illegal_shadower {
if shadower.def() != binding.def() {
let name = ident.name;
self.ambiguity_errors.push(AmbiguityError {
- span: span, name: name, b1: shadower, b2: binding, lexical: true,
+ span: path_span,
+ name: name,
+ b1: shadower,
+ b2: binding,
+ lexical: true,
legacy: false,
});
return potential_illegal_shadower;
pub fn finalize_current_module_macro_resolutions(&mut self) {
let module = self.current_module;
for &(ref path, span) in module.macro_resolutions.borrow().iter() {
- match self.resolve_path(path, Some(MacroNS), Some(span)) {
+ match self.resolve_path(path, Some(MacroNS), true, span) {
PathResult::NonModule(_) => {},
PathResult::Failed(msg, _) => {
resolve_error(self, span, ResolutionError::FailedToResolve(&msg));
for &(mark, ident, span, kind) in module.legacy_macro_resolutions.borrow().iter() {
let legacy_scope = &self.invocations[&mark].legacy_scope;
let legacy_resolution = self.resolve_legacy_scope(legacy_scope, ident.name, true);
- let resolution = self.resolve_lexical_macro_path_segment(ident, MacroNS, Some(span));
+ let resolution = self.resolve_lexical_macro_path_segment(ident, MacroNS, true, span);
match (legacy_resolution, resolution) {
(Some(MacroBinding::Legacy(legacy_binding)), Ok(MacroBinding::Modern(binding))) => {
let msg1 = format!("`{}` could refer to the macro defined here", ident);
format!("cannot find derive macro `{}` in this scope", ident),
};
let mut err = self.session.struct_span_err(span, &msg);
- self.suggest_macro_name(&ident.name.as_str(), kind, &mut err);
+ self.suggest_macro_name(&ident.name.as_str(), kind, &mut err, span);
err.emit();
},
_ => {},
}
fn suggest_macro_name(&mut self, name: &str, kind: MacroKind,
- err: &mut DiagnosticBuilder<'a>) {
+ err: &mut DiagnosticBuilder<'a>, span: Span) {
// First check if this is a locally-defined bang macro.
let suggestion = if let MacroKind::Bang = kind {
find_best_match_for_name(self.macro_names.iter(), name, None)
}
};
let ident = Ident::from_str(name);
- self.lookup_typo_candidate(&vec![ident], MacroNS, is_macro)
+ self.lookup_typo_candidate(&vec![ident], MacroNS, is_macro, span)
});
if let Some(suggestion) = suggestion {
if attr::contains_name(&item.attrs, "macro_export") {
let def = Def::Macro(def_id, MacroKind::Bang);
self.macro_exports.push(Export { name: ident.name, def: def, span: item.span });
+ } else {
+ self.unused_macros.insert(def_id);
}
}
ident: Ident,
ns: Namespace,
restricted_shadowing: bool,
- record_used: Option<Span>)
+ record_used: bool,
+ path_span: Span)
-> Result<&'a NameBinding<'a>, Determinacy> {
self.populate_module_if_necessary(module);
.try_borrow_mut()
.map_err(|_| Determined)?; // This happens when there is a cycle of imports
- if let Some(span) = record_used {
+ if record_used {
if let Some(binding) = resolution.binding {
if let Some(shadowed_glob) = resolution.shadows_glob {
let name = ident.name;
ns != MacroNS && // In MacroNS, `try_define` always forbids this shadowing
binding.def() != shadowed_glob.def() {
self.ambiguity_errors.push(AmbiguityError {
- span: span, name: name, lexical: false, b1: binding, b2: shadowed_glob,
+ span: path_span,
+ name: name,
+ lexical: false,
+ b1: binding,
+ b2: shadowed_glob,
legacy: false,
});
}
}
- if self.record_use(ident, ns, binding, span) {
+ if self.record_use(ident, ns, binding, path_span) {
return Ok(self.dummy_binding);
}
if !self.is_accessible(binding.vis) {
- self.privacy_errors.push(PrivacyError(span, ident.name, binding));
+ self.privacy_errors.push(PrivacyError(path_span, ident.name, binding));
}
}
SingleImport { source, .. } => source,
_ => unreachable!(),
};
- match self.resolve_ident_in_module(module, ident, ns, false, None) {
+ match self.resolve_ident_in_module(module, ident, ns, false, false, path_span) {
Err(Determined) => {}
_ => return Err(Undetermined),
}
for directive in module.globs.borrow().iter() {
if self.is_accessible(directive.vis.get()) {
if let Some(module) = directive.imported_module.get() {
- let result = self.resolve_ident_in_module(module, ident, ns, false, None);
+ let result = self.resolve_ident_in_module(module,
+ ident,
+ ns,
+ false,
+ false,
+ path_span);
if let Err(Undetermined) = result {
return Err(Undetermined);
}
// For better failure detection, pretend that the import will not define any names
// while resolving its module path.
directive.vis.set(ty::Visibility::Invisible);
- let result = self.resolve_path(&directive.module_path, None, None);
+ let result = self.resolve_path(&directive.module_path, None, false, directive.span);
directive.vis.set(vis);
match result {
let mut indeterminate = false;
self.per_ns(|this, ns| if !type_ns_only || ns == TypeNS {
if let Err(Undetermined) = result[ns].get() {
- result[ns].set(this.resolve_ident_in_module(module, source, ns, false, None));
+ result[ns].set(this.resolve_ident_in_module(module,
+ source,
+ ns,
+ false,
+ false,
+ directive.span));
} else {
return
};
self.current_module = directive.parent;
let ImportDirective { ref module_path, span, .. } = *directive;
- let module_result = self.resolve_path(&module_path, None, Some(span));
+ let module_result = self.resolve_path(&module_path, None, true, span);
let module = match module_result {
PathResult::Module(module) => module,
PathResult::Failed(msg, _) => {
let (mut self_path, mut self_result) = (module_path.clone(), None);
if !self_path.is_empty() && !token::Ident(self_path[0]).is_path_segment_keyword() {
self_path[0].name = keywords::SelfValue.name();
- self_result = Some(self.resolve_path(&self_path, None, None));
+ self_result = Some(self.resolve_path(&self_path, None, false, span));
}
return if let Some(PathResult::Module(..)) = self_result {
Some(format!("Did you mean `{}`?", names_to_string(&self_path)))
Some(this.dummy_binding);
}
}
- } else if let Ok(binding) = this.resolve_ident_in_module(module, ident, ns, false, None) {
+ } else if let Ok(binding) = this.resolve_ident_in_module(module,
+ ident,
+ ns,
+ false,
+ false,
+ directive.span) {
legacy_self_import = Some(directive);
let binding = this.arenas.alloc_name_binding(NameBinding {
kind: NameBindingKind::Import {
}
let mut all_ns_failed = true;
self.per_ns(|this, ns| if !type_ns_only || ns == TypeNS {
- match this.resolve_ident_in_module(module, ident, ns, false, Some(span)) {
+ match this.resolve_ident_in_module(module, ident, ns, false, true, span) {
Ok(_) => all_ns_failed = false,
_ => {}
}
pub items: Vec<NodeId>,
pub visibility: Visibility,
pub docs: String,
- pub sig: Signature,
+ pub sig: Option<Signature>,
pub attributes: Vec<Attribute>,
}
}
impl<'l, 'tcx: 'l, 'll, D: Dump +'ll> Visitor<'l> for DumpVisitor<'l, 'tcx, 'll, D> {
+ fn visit_mod(&mut self, m: &'l ast::Mod, span: Span, attrs: &[ast::Attribute], id: NodeId) {
+ // Since we handle explicit modules ourselves in visit_item, this should
+ // only get called for the root module of a crate.
+ assert_eq!(id, ast::CRATE_NODE_ID);
+
+ let qualname = format!("::{}", self.tcx.node_path_str(id));
+
+ let cm = self.tcx.sess.codemap();
+ let filename = cm.span_to_filename(span);
+ self.dumper.mod_data(ModData {
+ id: id,
+ name: String::new(),
+ qualname: qualname,
+ span: span,
+ scope: id,
+ filename: filename,
+ items: m.items.iter().map(|i| i.id).collect(),
+ visibility: Visibility::Public,
+ docs: docs_for_attrs(attrs),
+ sig: None,
+ attributes: attrs.to_owned(),
+ }.lower(self.tcx));
+ self.nest_scope(id, |v| visit::walk_mod(v, m));
+ }
+
fn visit_item(&mut self, item: &'l ast::Item) {
use syntax::ast::ItemKind::*;
self.process_macro_use(item.span, item.id);
pub items: Vec<DefId>,
pub visibility: Visibility,
pub docs: String,
- pub sig: Signature,
+ pub sig: Option<Signature>,
pub attributes: Vec<Attribute>,
}
items: self.items.into_iter().map(|id| make_def_id(id, &tcx.hir)).collect(),
visibility: self.visibility,
docs: self.docs,
- sig: self.sig.lower(tcx),
+ sig: self.sig.map(|s| s.lower(tcx)),
attributes: self.attributes.lower(tcx),
}
}
parent: None,
decl_id: None,
docs: self.docs,
- sig: Some(self.sig.into()),
+ sig: self.sig.map(|s| s.into()),
attributes: vec![],
}),
_ => None,
children: data.items.into_iter().map(|id| id_from_def_id(id)).collect(),
decl_id: None,
docs: data.docs,
- sig: Some(data.sig.into()),
+ sig: data.sig.map(|s| s.into()),
attributes: data.attributes.into_iter().map(|a| a.into()).collect(),
};
if def.span.file_name.to_str().unwrap() != def.value {
// except according to those terms.
#![crate_name = "rustc_save_analysis"]
-#![unstable(feature = "rustc_private", issue = "27812")]
#![crate_type = "dylib"]
#![crate_type = "rlib"]
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
#![feature(custom_attribute)]
#![allow(unused_attributes)]
-#![feature(rustc_private)]
-#![feature(staged_api)]
+
+#![cfg_attr(stage0, unstable(feature = "rustc_private", issue = "27812"))]
+#![cfg_attr(stage0, feature(rustc_private))]
+#![cfg_attr(stage0, feature(staged_api))]
#[macro_use] extern crate rustc;
items: m.items.iter().map(|i| i.id).collect(),
visibility: From::from(&item.vis),
docs: docs_for_attrs(&item.attrs),
- sig: self.sig_base(item),
+ sig: Some(self.sig_base(item)),
attributes: item.attrs.clone(),
}))
}
[dependencies]
flate = { path = "../libflate" }
log = "0.3"
+owning_ref = "0.3.3"
rustc = { path = "../librustc" }
rustc_back = { path = "../librustc_back" }
rustc_bitflags = { path = "../librustc_bitflags" }
use libc;
use llvm::archive_ro::{ArchiveRO, Child};
use llvm::{self, ArchiveKind};
+use metadata::METADATA_FILENAME;
use rustc::session::Session;
pub struct ArchiveConfig<'a> {
// Ignoring all bytecode files, no matter of
// name
let bc_ext = ".bytecode.deflate";
- let metadata_filename =
- self.config.sess.cstore.metadata_filename().to_owned();
self.add_archive(rlib, move |fname: &str| {
- if fname.ends_with(bc_ext) || fname == metadata_filename {
+ if fname.ends_with(bc_ext) || fname == METADATA_FILENAME {
return true
}
use super::rpath::RPathConfig;
use super::rpath;
use super::msvc;
+use metadata::METADATA_FILENAME;
use session::config;
use session::config::NoDebugInfo;
use session::config::{OutputFilenames, Input, OutputType};
// contain the metadata in a separate file. We use a temp directory
// here so concurrent builds in the same directory don't try to use
// the same filename for metadata (stomping over one another)
- let metadata = tmpdir.join(sess.cstore.metadata_filename());
+ let metadata = tmpdir.join(METADATA_FILENAME);
emit_metadata(sess, trans, &metadata);
ab.add_file(&metadata);
archive.update_symbols();
for f in archive.src_files() {
- if f.ends_with("bytecode.deflate") ||
- f == sess.cstore.metadata_filename() {
+ if f.ends_with("bytecode.deflate") || f == METADATA_FILENAME {
archive.remove_file(&f);
continue
}
let mut any_objects = false;
for f in archive.src_files() {
- if f.ends_with("bytecode.deflate") ||
- f == sess.cstore.metadata_filename() {
+ if f.ends_with("bytecode.deflate") || f == METADATA_FILENAME {
archive.remove_file(&f);
continue
}
//! comments can also be found below leading through the various code paths.
// A simple macro to make this option mess easier to read
+#[cfg(windows)]
macro_rules! otry {
($expr:expr) => (match $expr {
Some(val) => val,
_ => false
}
} else {
- tcx.sess.cstore.is_foreign_item(def_id)
+ tcx.is_foreign_item(def_id)
};
if let Some(name) = weak_lang_items::link_name(&attrs) {
use back::symbol_export::{self, ExportedSymbols};
use llvm::{ContextRef, Linkage, ModuleRef, ValueRef, Vector, get_param};
use llvm;
+use metadata;
use rustc::hir::def_id::LOCAL_CRATE;
use middle::lang_items::StartFnLangItem;
use middle::cstore::EncodedMetadata;
};
unsafe {
llvm::LLVMSetInitializer(llglobal, llconst);
- let section_name =
- tcx.sess.cstore.metadata_section_name(&tcx.sess.target.target);
+ let section_name = metadata::metadata_section_name(&tcx.sess.target.target);
let name = CString::new(section_name).unwrap();
llvm::LLVMSetSection(llglobal, name.as_ptr());
}
Some(_) => true,
None => {
- if tcx.sess.cstore.is_exported_symbol(def_id) ||
- tcx.sess.cstore.is_foreign_item(def_id)
+ if tcx.is_exported_symbol(def_id) ||
+ tcx.is_foreign_item(def_id)
{
// We can link to the item in question, no instance needed
// in this crate
if is_inline_instance(tcx, instance) {
return true
}
+ if let ty::InstanceDef::DropGlue(..) = instance.def {
+ // Drop glue wants to be instantiated at every translation
+ // unit, but without an #[inline] hint. We should make this
+ // available to normal end-users.
+ return true
+ }
attr::requests_inline(&instance.def.attrs(tcx)[..])
}
llvm::set_thread_local(g, true);
}
}
- if ccx.use_dll_storage_attrs() && !ccx.sess().cstore.is_foreign_item(def_id) {
+ if ccx.use_dll_storage_attrs() && !ccx.tcx().is_foreign_item(def_id) {
// This item is external but not foreign, i.e. it originates from an external Rust
// crate. Since we don't know whether this crate will be linked dynamically or
// statically in the final application, we always mark such symbols as 'dllimport'.
//! This API is completely unstable and subject to change.
#![crate_name = "rustc_trans"]
-#![unstable(feature = "rustc_private", issue = "27812")]
#![crate_type = "dylib"]
#![crate_type = "rlib"]
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
#![feature(libc)]
#![feature(quote)]
#![feature(rustc_diagnostic_macros)]
-#![feature(rustc_private)]
#![feature(slice_patterns)]
-#![feature(staged_api)]
#![feature(unicode)]
#![feature(conservative_impl_trait)]
+#![cfg_attr(stage0, unstable(feature = "rustc_private", issue = "27812"))]
+#![cfg_attr(stage0, feature(rustc_private))]
+#![cfg_attr(stage0, feature(staged_api))]
+
use rustc::dep_graph::WorkProduct;
use syntax_pos::symbol::Symbol;
extern crate flate;
extern crate libc;
+extern crate owning_ref;
#[macro_use] extern crate rustc;
extern crate rustc_back;
extern crate rustc_data_structures;
pub use base::trans_crate;
pub use back::symbol_names::provide;
+pub use metadata::LlvmMetadataLoader;
+pub use llvm_util::{init, target_features, print_version, print_passes, print, enable_llvm_debug};
+
pub mod back {
pub use rustc::hir::svh;
pub mod diagnostics;
-#[macro_use]
-mod macros;
-
mod abi;
mod adt;
mod asm;
mod declare;
mod glue;
mod intrinsic;
+mod llvm_util;
mod machine;
+mod metadata;
mod meth;
mod mir;
mod monomorphize;
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use syntax_pos::symbol::Symbol;
+use back::write::create_target_machine;
+use llvm;
+use rustc::session::Session;
+use rustc::session::config::PrintRequest;
+use libc::{c_int, c_char};
+use std::ffi::CString;
+
+use std::sync::atomic::{AtomicBool, Ordering};
+use std::sync::Once;
+
+pub fn init(sess: &Session) {
+ unsafe {
+ // Before we touch LLVM, make sure that multithreading is enabled.
+ static POISONED: AtomicBool = AtomicBool::new(false);
+ static INIT: Once = Once::new();
+ INIT.call_once(|| {
+ if llvm::LLVMStartMultithreaded() != 1 {
+ // use an extra bool to make sure that all future usage of LLVM
+ // cannot proceed despite the Once not running more than once.
+ POISONED.store(true, Ordering::SeqCst);
+ }
+
+ configure_llvm(sess);
+ });
+
+ if POISONED.load(Ordering::SeqCst) {
+ bug!("couldn't enable multi-threaded LLVM");
+ }
+ }
+}
+
+unsafe fn configure_llvm(sess: &Session) {
+ let mut llvm_c_strs = Vec::new();
+ let mut llvm_args = Vec::new();
+
+ {
+ let mut add = |arg: &str| {
+ let s = CString::new(arg).unwrap();
+ llvm_args.push(s.as_ptr());
+ llvm_c_strs.push(s);
+ };
+ add("rustc"); // fake program name
+ if sess.time_llvm_passes() { add("-time-passes"); }
+ if sess.print_llvm_passes() { add("-debug-pass=Structure"); }
+
+ for arg in &sess.opts.cg.llvm_args {
+ add(&(*arg));
+ }
+ }
+
+ llvm::LLVMInitializePasses();
+
+ llvm::initialize_available_targets();
+
+ llvm::LLVMRustSetLLVMOptions(llvm_args.len() as c_int,
+ llvm_args.as_ptr());
+}
+
+// WARNING: the features must be known to LLVM or the feature
+// detection code will walk past the end of the feature array,
+// leading to crashes.
+
+const ARM_WHITELIST: &'static [&'static str] = &["neon\0", "vfp2\0", "vfp3\0", "vfp4\0"];
+
+const X86_WHITELIST: &'static [&'static str] = &["avx\0", "avx2\0", "bmi\0", "bmi2\0", "sse\0",
+ "sse2\0", "sse3\0", "sse4.1\0", "sse4.2\0",
+ "ssse3\0", "tbm\0", "lzcnt\0", "popcnt\0",
+ "sse4a\0", "rdrnd\0", "rdseed\0", "fma\0"];
+
+const HEXAGON_WHITELIST: &'static [&'static str] = &["hvx\0", "hvx-double\0"];
+
+pub fn target_features(sess: &Session) -> Vec<Symbol> {
+ let target_machine = create_target_machine(sess);
+
+ let whitelist = match &*sess.target.target.arch {
+ "arm" => ARM_WHITELIST,
+ "x86" | "x86_64" => X86_WHITELIST,
+ "hexagon" => HEXAGON_WHITELIST,
+ _ => &[],
+ };
+
+ let mut features = Vec::new();
+ for feat in whitelist {
+ assert_eq!(feat.chars().last(), Some('\0'));
+ if unsafe { llvm::LLVMRustHasFeature(target_machine, feat.as_ptr() as *const c_char) } {
+ features.push(Symbol::intern(&feat[..feat.len() - 1]));
+ }
+ }
+ features
+}
+
+pub fn print_version() {
+ unsafe {
+ println!("LLVM version: {}.{}",
+ llvm::LLVMRustVersionMajor(), llvm::LLVMRustVersionMinor());
+ }
+}
+
+pub fn print_passes() {
+ unsafe { llvm::LLVMRustPrintPasses(); }
+}
+
+pub fn print(req: PrintRequest, sess: &Session) {
+ let tm = create_target_machine(sess);
+ unsafe {
+ match req {
+ PrintRequest::TargetCPUs => llvm::LLVMRustPrintTargetCPUs(tm),
+ PrintRequest::TargetFeatures => llvm::LLVMRustPrintTargetFeatures(tm),
+ _ => bug!("rustc_trans can't handle print request: {:?}", req),
+ }
+ }
+}
+
+pub fn enable_llvm_debug() {
+ unsafe { llvm::LLVMRustSetDebug(1); }
+}
+++ /dev/null
-// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-macro_rules! unpack_datum {
- ($bcx: ident, $inp: expr) => (
- {
- let db = $inp;
- $bcx = db.bcx;
- db.datum
- }
- )
-}
-
-macro_rules! unpack_result {
- ($bcx: ident, $inp: expr) => (
- {
- let db = $inp;
- $bcx = db.bcx;
- db.val
- }
- )
-}
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use rustc::util::common;
+use rustc::middle::cstore::MetadataLoader;
+use rustc_back::target::Target;
+use llvm;
+use llvm::{False, ObjectFile, mk_section_iter};
+use llvm::archive_ro::ArchiveRO;
+
+use owning_ref::{ErasedBoxRef, OwningRef};
+use std::path::Path;
+use std::ptr;
+use std::slice;
+
+pub const METADATA_FILENAME: &str = "rust.metadata.bin";
+
+pub struct LlvmMetadataLoader;
+
+impl MetadataLoader for LlvmMetadataLoader {
+ fn get_rlib_metadata(&self, _: &Target, filename: &Path) -> Result<ErasedBoxRef<[u8]>, String> {
+ // Use ArchiveRO for speed here, it's backed by LLVM and uses mmap
+ // internally to read the file. We also avoid even using a memcpy by
+ // just keeping the archive along while the metadata is in use.
+ let archive = ArchiveRO::open(filename)
+ .map(|ar| OwningRef::new(box ar))
+ .ok_or_else(|| {
+ debug!("llvm didn't like `{}`", filename.display());
+ format!("failed to read rlib metadata: '{}'", filename.display())
+ })?;
+ let buf: OwningRef<_, [u8]> = archive
+ .try_map(|ar| {
+ ar.iter()
+ .filter_map(|s| s.ok())
+ .find(|sect| sect.name() == Some(METADATA_FILENAME))
+ .map(|s| s.data())
+ .ok_or_else(|| {
+ debug!("didn't find '{}' in the archive", METADATA_FILENAME);
+ format!("failed to read rlib metadata: '{}'",
+ filename.display())
+ })
+ })?;
+ Ok(buf.erase_owner())
+ }
+
+ fn get_dylib_metadata(&self,
+ target: &Target,
+ filename: &Path)
+ -> Result<ErasedBoxRef<[u8]>, String> {
+ unsafe {
+ let buf = common::path2cstr(filename);
+ let mb = llvm::LLVMRustCreateMemoryBufferWithContentsOfFile(buf.as_ptr());
+ if mb as isize == 0 {
+ return Err(format!("error reading library: '{}'", filename.display()));
+ }
+ let of = ObjectFile::new(mb)
+ .map(|of| OwningRef::new(box of))
+ .ok_or_else(|| format!("provided path not an object file: '{}'",
+ filename.display()))?;
+ let buf = of.try_map(|of| search_meta_section(of, target, filename))?;
+ Ok(buf.erase_owner())
+ }
+ }
+}
+
+fn search_meta_section<'a>(of: &'a ObjectFile,
+ target: &Target,
+ filename: &Path)
+ -> Result<&'a [u8], String> {
+ unsafe {
+ let si = mk_section_iter(of.llof);
+ while llvm::LLVMIsSectionIteratorAtEnd(of.llof, si.llsi) == False {
+ let mut name_buf = ptr::null();
+ let name_len = llvm::LLVMRustGetSectionName(si.llsi, &mut name_buf);
+ let name = slice::from_raw_parts(name_buf as *const u8, name_len as usize).to_vec();
+ let name = String::from_utf8(name).unwrap();
+ debug!("get_metadata_section: name {}", name);
+ if read_metadata_section_name(target) == name {
+ let cbuf = llvm::LLVMGetSectionContents(si.llsi);
+ let csz = llvm::LLVMGetSectionSize(si.llsi) as usize;
+ // The buffer is valid while the object file is around
+ let buf: &'a [u8] = slice::from_raw_parts(cbuf as *const u8, csz);
+ return Ok(buf);
+ }
+ llvm::LLVMMoveToNextSection(si.llsi);
+ }
+ }
+ Err(format!("metadata not found: '{}'", filename.display()))
+}
+
+pub fn metadata_section_name(target: &Target) -> &'static str {
+ // Historical note:
+ //
+ // When using link.exe it was seen that the section name `.note.rustc`
+ // was getting shortened to `.note.ru`, and according to the PE and COFF
+ // specification:
+ //
+ // > Executable images do not use a string table and do not support
+ // > section names longer than 8 characters
+ //
+ // https://msdn.microsoft.com/en-us/library/windows/hardware/gg463119.aspx
+ //
+ // As a result, we choose a slightly shorter name! As to why
+ // `.note.rustc` works on MinGW, that's another good question...
+
+ if target.options.is_like_osx {
+ "__DATA,.rustc"
+ } else {
+ ".rustc"
+ }
+}
+
+fn read_metadata_section_name(_target: &Target) -> &'static str {
+ ".rustc"
+}
location: Location) {
match *kind {
mir::TerminatorKind::Call {
- func: mir::Operand::Constant(mir::Constant {
+ func: mir::Operand::Constant(box mir::Constant {
literal: Literal::Value {
value: ConstVal::Function(def_id, _), ..
}, ..
Value(base));
}
if projected_ty.is_bool() {
- unsafe {
- val = llvm::LLVMConstTrunc(val, Type::i1(self.ccx).to_ref());
+ let i1_type = Type::i1(self.ccx);
+ if val_ty(val) != i1_type {
+ unsafe {
+ val = llvm::LLVMConstTrunc(val, i1_type.to_ref());
+ }
}
}
(Base::Value(val), extra)
}
failure?;
- match *kind {
+ match **kind {
mir::AggregateKind::Array(_) => {
self.const_array(dest_ty, &fields)
}
}
mir::Rvalue::Aggregate(ref kind, ref operands) => {
- match *kind {
+ match **kind {
mir::AggregateKind::Adt(adt_def, variant_index, substs, active_field_index) => {
let discr = adt_def.discriminant_for_variant(bcx.tcx(), variant_index)
.to_u128_unchecked() as u64;
fn get_type_parameter_bounds(&self, span: Span, def_id: DefId)
-> ty::GenericPredicates<'tcx>;
- /// Return an (optional) substitution to convert bound type parameters that
- /// are in scope into free ones. This function should only return Some
- /// within a fn body.
- /// See ParameterEnvironment::free_substs for more information.
- fn get_free_substs(&self) -> Option<&Substs<'tcx>>;
-
/// What lifetime should we use when a lifetime is omitted (and not elided)?
fn re_infer(&self, span: Span, _def: Option<&ty::RegionParameterDef>)
-> Option<ty::Region<'tcx>>;
Some(&rl::Region::EarlyBound(index, id)) => {
let name = tcx.hir.name(id);
tcx.mk_region(ty::ReEarlyBound(ty::EarlyBoundRegion {
+ def_id: tcx.hir.local_def_id(id),
index: index,
name: name
}))
Some(&rl::Region::Free(scope, id)) => {
let name = tcx.hir.name(id);
tcx.mk_region(ty::ReFree(ty::FreeRegion {
- scope: Some(scope.to_code_extent(tcx)),
+ scope,
bound_region: ty::BrNamed(tcx.hir.local_def_id(id), name)
}))
}
};
- let trait_ref = if let Some(free_substs) = self.get_free_substs() {
- trait_ref.subst(tcx, free_substs)
- } else {
- trait_ref
- };
-
let candidates =
traits::supertraits(tcx, ty::Binder(trait_ref))
.filter(|r| self.trait_defines_associated_type_named(r.def_id(),
assert_eq!(opt_self_ty, None);
self.prohibit_type_params(&path.segments);
- let ty = tcx.at(span).type_of(def_id);
- if let Some(free_substs) = self.get_free_substs() {
- ty.subst(tcx, free_substs)
- } else {
- ty
- }
+ tcx.at(span).type_of(def_id)
}
Def::SelfTy(Some(_), None) => {
// Self in trait.
use astconv::AstConv;
use rustc::infer::type_variable::TypeVariableOrigin;
use rustc::ty::{self, ToPolyTraitRef, Ty};
+use rustc::ty::subst::Substs;
use std::cmp;
use std::iter;
use syntax::abi::Abi;
decl,
Abi::RustCall,
expected_sig);
+ // `deduce_expectations_from_expected_type` introduces late-bound
+ // lifetimes defined elsewhere, which we need to anonymize away.
+ let sig = self.tcx.anonymize_late_bound_regions(&sig);
// Create type variables (for now) to represent the transformed
// types of upvars. These will be unified during the upvar
// inference phase (`upvar.rs`).
+ let base_substs = Substs::identity_for_item(self.tcx,
+ self.tcx.closure_base_def_id(expr_def_id));
let closure_type = self.tcx.mk_closure(expr_def_id,
- self.parameter_environment.free_substs.extend_to(self.tcx, expr_def_id,
+ base_substs.extend_to(self.tcx, expr_def_id,
|_, _| span_bug!(expr.span, "closure has region param"),
|_, _| self.infcx.next_ty_var(TypeVariableOrigin::TransformedUpvar(expr.span))
)
debug!("check_closure: expr.id={:?} closure_type={:?}", expr.id, closure_type);
- let extent = self.tcx.call_site_extent(expr.id, body.value.id);
- let fn_sig = self.tcx.liberate_late_bound_regions(Some(extent), &sig);
+ let fn_sig = self.liberate_late_bound_regions(expr_def_id, &sig);
let fn_sig = self.inh.normalize_associated_types_in(body.value.span,
body.value.id, &fn_sig);
// Create a parameter environment that represents the implementation's
// method.
- let impl_param_env = ty::ParameterEnvironment::for_item(tcx, impl_m_node_id);
+ let impl_param_env = tcx.parameter_environment(impl_m.def_id);
// Create mapping from impl to skolemized.
- let impl_to_skol_substs = &impl_param_env.free_substs;
+ let impl_to_skol_substs = Substs::identity_for_item(tcx, impl_m.def_id);
// Create mapping from trait to skolemized.
let trait_to_skol_substs = impl_to_skol_substs.rebase_onto(tcx,
impl_m.container.id(),
- trait_to_impl_substs.subst(tcx,
- impl_to_skol_substs));
+ trait_to_impl_substs);
debug!("compare_impl_method: trait_to_skol_substs={:?}",
trait_to_skol_substs);
impl_m,
&trait_m_generics,
&impl_m_generics,
- trait_to_skol_substs,
- impl_to_skol_substs)?;
+ trait_to_skol_substs)?;
// Create obligations for each predicate declared by the impl
// definition in the context of the trait's parameter
// however, because we want to replace all late-bound regions with
// region variables.
let impl_predicates = tcx.predicates_of(impl_m_predicates.parent.unwrap());
- let mut hybrid_preds = impl_predicates.instantiate(tcx, impl_to_skol_substs);
+ let mut hybrid_preds = impl_predicates.instantiate_identity(tcx);
debug!("compare_impl_method: impl_bounds={:?}", hybrid_preds);
normalize_cause.clone());
tcx.infer_ctxt(trait_param_env, Reveal::UserFacing).enter(|infcx| {
- let inh = Inherited::new(infcx);
+ let inh = Inherited::new(infcx, impl_m.def_id);
let infcx = &inh.infcx;
debug!("compare_impl_method: caller_bounds={:?}",
infcx.replace_late_bound_regions_with_fresh_var(impl_m_span,
infer::HigherRankedType,
&m_sig(impl_m));
- let impl_sig =
- impl_sig.subst(tcx, impl_to_skol_substs);
let impl_sig =
inh.normalize_associated_types_in(impl_m_span,
impl_m_node_id,
let impl_fty = tcx.mk_fn_ptr(ty::Binder(impl_sig));
debug!("compare_impl_method: impl_fty={:?}", impl_fty);
- let trait_sig = tcx.liberate_late_bound_regions(
- infcx.parameter_environment.free_id_outlive,
+ let trait_sig = inh.liberate_late_bound_regions(
+ impl_m.def_id,
&m_sig(trait_m));
let trait_sig =
trait_sig.subst(tcx, trait_to_skol_substs);
impl_m: &ty::AssociatedItem,
trait_generics: &ty::Generics,
impl_generics: &ty::Generics,
- trait_to_skol_substs: &Substs<'tcx>,
- impl_to_skol_substs: &Substs<'tcx>)
+ trait_to_skol_substs: &Substs<'tcx>)
-> Result<(), ErrorReported> {
let trait_params = &trait_generics.regions[..];
let impl_params = &impl_generics.regions[..];
debug!("check_region_bounds_on_impl_method: \
trait_generics={:?} \
impl_generics={:?} \
- trait_to_skol_substs={:?} \
- impl_to_skol_substs={:?}",
+ trait_to_skol_substs={:?}",
trait_generics,
impl_generics,
- trait_to_skol_substs,
- impl_to_skol_substs);
+ trait_to_skol_substs);
// Must have same number of early-bound lifetime parameters.
// Unfortunately, if the user screws up the bounds, then this
debug!("compare_const_impl(impl_trait_ref={:?})", impl_trait_ref);
tcx.infer_ctxt((), Reveal::UserFacing).enter(|infcx| {
- let inh = Inherited::new(infcx);
+ let inh = Inherited::new(infcx, impl_c.def_id);
let infcx = &inh.infcx;
// The below is for the most part highly similar to the procedure
// Create a parameter environment that represents the implementation's
// method.
let impl_c_node_id = tcx.hir.as_local_node_id(impl_c.def_id).unwrap();
- let impl_param_env = ty::ParameterEnvironment::for_item(tcx, impl_c_node_id);
-
- // Create mapping from impl to skolemized.
- let impl_to_skol_substs = &impl_param_env.free_substs;
-
- // Create mapping from trait to skolemized.
- let trait_to_skol_substs = impl_to_skol_substs.rebase_onto(tcx,
- impl_c.container.id(),
- trait_to_impl_substs.subst(tcx,
- impl_to_skol_substs));
- debug!("compare_const_impl: trait_to_skol_substs={:?}",
- trait_to_skol_substs);
// Compute skolemized form of impl and trait const tys.
- let impl_ty = tcx.type_of(impl_c.def_id).subst(tcx, impl_to_skol_substs);
- let trait_ty = tcx.type_of(trait_c.def_id).subst(tcx, trait_to_skol_substs);
+ let impl_ty = tcx.type_of(impl_c.def_id);
+ let trait_ty = tcx.type_of(trait_c.def_id).subst(tcx, trait_to_impl_substs);
let mut cause = ObligationCause::misc(impl_c_span, impl_c_node_id);
// There is no "body" here, so just pass dummy id.
-> Result<(), ErrorReported>
{
let drop_impl_node_id = tcx.hir.as_local_node_id(drop_impl_did).unwrap();
- let self_type_node_id = tcx.hir.as_local_node_id(self_type_did).unwrap();
// check that the impl type can be made to match the trait type.
- let impl_param_env = ty::ParameterEnvironment::for_item(tcx, self_type_node_id);
+ let impl_param_env = tcx.parameter_environment(self_type_did);
tcx.infer_ctxt(impl_param_env, Reveal::UserFacing).enter(|ref infcx| {
let tcx = infcx.tcx;
let mut fulfillment_cx = traits::FulfillmentContext::new();
let named_type = tcx.type_of(self_type_did);
- let named_type = named_type.subst(tcx, &infcx.parameter_environment.free_substs);
let drop_impl_span = tcx.def_span(drop_impl_did);
let fresh_impl_substs =
fulfillment_cx.register_predicate_obligations(infcx, obligations);
}
Err(_) => {
- let item_span = tcx.hir.span(self_type_node_id);
+ let item_span = tcx.def_span(self_type_did);
struct_span_err!(tcx.sess, drop_impl_span, E0366,
"Implementations of Drop cannot be specialized")
.span_note(item_span,
rcx: &mut RegionCtxt<'a, 'gcx, 'tcx>,
ty: ty::Ty<'tcx>,
span: Span,
- scope: region::CodeExtent<'tcx>)
+ scope: region::CodeExtent)
-> Result<(), ErrorReported>
{
debug!("check_safety_of_destructor_if_necessary typ: {:?} scope: {:?}",
let bound_list = unsatisfied_predicates.iter()
.map(|p| format!("`{} : {}`", p.self_ty(), p))
.collect::<Vec<_>>()
- .join(", ");
+ .join("\n");
err.note(&format!("the method `{}` exists but the following trait bounds \
- were not satisfied: {}",
+ were not satisfied:\n{}",
item_name,
bound_list));
}
use rustc_back::slice::ref_slice;
use rustc::infer::{self, InferCtxt, InferOk, RegionVariableOrigin};
use rustc::infer::type_variable::{TypeVariableOrigin};
+use rustc::middle::region::CodeExtent;
use rustc::ty::subst::{Kind, Subst, Substs};
use rustc::traits::{self, FulfillmentContext, ObligationCause, ObligationCauseCode, Reveal};
-use rustc::ty::{ParamTy, ParameterEnvironment};
-use rustc::ty::{LvaluePreference, NoPreference, PreferMutLvalue};
+use rustc::ty::{ParamTy, LvaluePreference, NoPreference, PreferMutLvalue};
use rustc::ty::{self, Ty, TyCtxt, Visibility};
use rustc::ty::{MethodCall, MethodCallee};
use rustc::ty::adjustment::{Adjust, Adjustment, AutoBorrow};
// variables to get the concrete type, which can be used to
// deanonymize TyAnon, after typeck is done with all functions.
anon_types: RefCell<NodeMap<Ty<'tcx>>>,
+
+ /// Each type parameter has an implicit region bound that
+ /// indicates it must outlive at least the function body (the user
+ /// may specify stronger requirements). This field indicates the
+ /// region of the callee. If it is `None`, then the parameter
+ /// environment is for an item or something where the "callee" is
+ /// not clear.
+ implicit_region_bound: Option<ty::Region<'tcx>>,
}
impl<'a, 'gcx, 'tcx> Deref for Inherited<'a, 'gcx, 'tcx> {
/// Necessary because we can't write the following bound:
/// F: for<'b, 'tcx> where 'gcx: 'tcx FnOnce(Inherited<'b, 'gcx, 'tcx>).
pub struct InheritedBuilder<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
- infcx: infer::InferCtxtBuilder<'a, 'gcx, 'tcx>
+ infcx: infer::InferCtxtBuilder<'a, 'gcx, 'tcx>,
+ def_id: DefId,
}
impl<'a, 'gcx, 'tcx> Inherited<'a, 'gcx, 'tcx> {
- pub fn build(tcx: TyCtxt<'a, 'gcx, 'gcx>, id: ast::NodeId)
+ pub fn build(tcx: TyCtxt<'a, 'gcx, 'gcx>, def_id: DefId)
-> InheritedBuilder<'a, 'gcx, 'tcx> {
let tables = ty::TypeckTables::empty();
- let param_env = ParameterEnvironment::for_item(tcx, id);
+ let param_env = tcx.parameter_environment(def_id);
InheritedBuilder {
- infcx: tcx.infer_ctxt((tables, param_env), Reveal::UserFacing)
+ infcx: tcx.infer_ctxt((tables, param_env), Reveal::UserFacing),
+ def_id,
}
}
}
fn enter<F, R>(&'tcx mut self, f: F) -> R
where F: for<'b> FnOnce(Inherited<'b, 'gcx, 'tcx>) -> R
{
- self.infcx.enter(|infcx| f(Inherited::new(infcx)))
+ let def_id = self.def_id;
+ self.infcx.enter(|infcx| f(Inherited::new(infcx, def_id)))
}
}
impl<'a, 'gcx, 'tcx> Inherited<'a, 'gcx, 'tcx> {
- fn new(infcx: InferCtxt<'a, 'gcx, 'tcx>) -> Self {
+ fn new(infcx: InferCtxt<'a, 'gcx, 'tcx>, def_id: DefId) -> Self {
+ let tcx = infcx.tcx;
+ let item_id = tcx.hir.as_local_node_id(def_id);
+ let body_id = item_id.and_then(|id| tcx.hir.maybe_body_owned_by(id));
+ let implicit_region_bound = body_id.map(|body| {
+ tcx.mk_region(ty::ReScope(CodeExtent::CallSiteScope(body)))
+ });
+
Inherited {
infcx: infcx,
fulfillment_cx: RefCell::new(traits::FulfillmentContext::new()),
deferred_call_resolutions: RefCell::new(DefIdMap()),
deferred_cast_checks: RefCell::new(Vec::new()),
anon_types: RefCell::new(NodeMap()),
+ implicit_region_bound,
}
}
obligations);
InferOk { value, obligations }
}
+
+ /// Replace any late-bound regions bound in `value` with
+ /// free variants attached to `all_outlive_scope`.
+ fn liberate_late_bound_regions<T>(&self,
+ all_outlive_scope: DefId,
+ value: &ty::Binder<T>)
+ -> T
+ where T: TypeFoldable<'tcx>
+ {
+ self.tcx.replace_late_bound_regions(value, |br| {
+ self.tcx.mk_region(ty::ReFree(ty::FreeRegion {
+ scope: all_outlive_scope,
+ bound_region: br
+ }))
+ }).0
+ }
}
struct CheckItemTypesVisitor<'a, 'tcx: 'a> { tcx: TyCtxt<'a, 'tcx, 'tcx> }
});
let body = tcx.hir.body(body_id);
- Inherited::build(tcx, id).enter(|inh| {
+ Inherited::build(tcx, def_id).enter(|inh| {
let fcx = if let Some(decl) = fn_decl {
let fn_sig = tcx.type_of(def_id).fn_sig();
check_abi(tcx, span, fn_sig.abi());
// Compute the fty from point of view of inside fn.
- let fn_scope = inh.tcx.call_site_extent(id, body_id.node_id);
- let fn_sig =
- fn_sig.subst(inh.tcx, &inh.parameter_environment.free_substs);
let fn_sig =
- inh.tcx.liberate_late_bound_regions(Some(fn_scope), &fn_sig);
+ inh.liberate_late_bound_regions(def_id, &fn_sig);
let fn_sig =
inh.normalize_associated_types_in(body.value.span, body_id.node_id, &fn_sig);
impl_id: DefId,
impl_item: &hir::ImplItem)
{
- let ancestors = trait_def.ancestors(impl_id);
+ let ancestors = trait_def.ancestors(tcx, impl_id);
let kind = match impl_item.node {
hir::ImplItemKind::Const(..) => ty::AssociatedKind::Const,
let mut invalidated_items = Vec::new();
let associated_type_overridden = overridden_associated_type.is_some();
for trait_item in tcx.associated_items(impl_trait_ref.def_id) {
- let is_implemented = trait_def.ancestors(impl_id)
+ let is_implemented = trait_def.ancestors(tcx, impl_id)
.defs(tcx, trait_item.name, trait_item.kind)
.next()
.map(|node_item| !node_item.node.is_from_trait())
impl<'a, 'gcx, 'tcx> AstConv<'gcx, 'tcx> for FnCtxt<'a, 'gcx, 'tcx> {
fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> { self.tcx }
- fn get_free_substs(&self) -> Option<&Substs<'tcx>> {
- Some(&self.parameter_environment.free_substs)
- }
-
fn get_type_parameter_bounds(&self, _: Span, def_id: DefId)
-> ty::GenericPredicates<'tcx>
{
use middle::free_region::FreeRegionMap;
use middle::mem_categorization as mc;
use middle::mem_categorization::Categorization;
-use middle::region::{self, CodeExtent, RegionMaps};
+use middle::region::{CodeExtent, RegionMaps};
use rustc::hir::def_id::DefId;
use rustc::ty::subst::Substs;
use rustc::traits;
region_bound_pairs: Vec<(ty::Region<'tcx>, GenericKind<'tcx>)>,
- pub region_maps: Rc<RegionMaps<'tcx>>,
+ pub region_maps: Rc<RegionMaps>,
free_region_map: FreeRegionMap<'tcx>,
body_id: ast::NodeId,
// call_site scope of innermost fn
- call_site_scope: Option<CodeExtent<'tcx>>,
+ call_site_scope: Option<CodeExtent>,
// id of innermost fn or loop
repeating_scope: ast::NodeId,
}
}
- fn set_call_site_scope(&mut self, call_site_scope: Option<CodeExtent<'tcx>>)
- -> Option<CodeExtent<'tcx>> {
+ fn set_call_site_scope(&mut self, call_site_scope: Option<CodeExtent>)
+ -> Option<CodeExtent> {
mem::replace(&mut self.call_site_scope, call_site_scope)
}
let body_id = body.id();
- let call_site = self.tcx.intern_code_extent(
- region::CodeExtentData::CallSiteScope { fn_id: id, body_id: body_id.node_id });
+ let call_site = CodeExtent::CallSiteScope(body_id);
let old_call_site_scope = self.set_call_site_scope(Some(call_site));
let fn_sig = {
let old_body_id = self.set_body_id(body_id.node_id);
self.relate_free_regions(&fn_sig_tys[..], body_id.node_id, span);
- self.link_fn_args(self.tcx.node_extent(body_id.node_id), &body.arguments);
+ self.link_fn_args(CodeExtent::Misc(body_id.node_id), &body.arguments);
self.visit_body(body);
self.visit_region_obligations(body_id.node_id);
for implication in implied_bounds {
debug!("implication: {:?}", implication);
match implication {
- ImpliedBound::RegionSubRegion(&ty::ReFree(free_a),
+ ImpliedBound::RegionSubRegion(r_a @ &ty::ReEarlyBound(_),
+ &ty::ReVar(vid_b)) |
+ ImpliedBound::RegionSubRegion(r_a @ &ty::ReFree(_),
&ty::ReVar(vid_b)) => {
- self.add_given(free_a, vid_b);
+ self.add_given(r_a, vid_b);
}
ImpliedBound::RegionSubParam(r_a, param_b) => {
self.region_bound_pairs.push((r_a, GenericKind::Param(param_b)));
// call occurs.
//
// FIXME(#6268) to support nested method calls, should be callee_id
- let callee_scope = self.tcx.node_extent(call_expr.id);
+ let callee_scope = CodeExtent::Misc(call_expr.id);
let callee_region = self.tcx.mk_region(ty::ReScope(callee_scope));
debug!("callee_region={:?}", callee_region);
debug!("constrain_index(index_expr=?, indexed_ty={}",
self.ty_to_string(indexed_ty));
- let r_index_expr = ty::ReScope(self.tcx.node_extent(index_expr.id));
+ let r_index_expr = ty::ReScope(CodeExtent::Misc(index_expr.id));
if let ty::TyRef(r_ptr, mt) = indexed_ty.sty {
match mt.ty.sty {
ty::TySlice(_) | ty::TyStr => {
/// Computes the guarantors for any ref bindings in a match and
/// then ensures that the lifetime of the resulting pointer is
/// linked to the lifetime of its guarantor (if any).
- fn link_fn_args(&self, body_scope: CodeExtent<'tcx>, args: &[hir::Arg]) {
+ fn link_fn_args(&self, body_scope: CodeExtent, args: &[hir::Arg]) {
debug!("regionck::link_fn_args(body_scope={:?})", body_scope);
let mc = &mc::MemCategorizationContext::new(self, &self.region_maps);
for arg in args {
/// must outlive `callee_scope`.
fn link_by_ref(&self,
expr: &hir::Expr,
- callee_scope: CodeExtent<'tcx>) {
+ callee_scope: CodeExtent) {
debug!("link_by_ref(expr={:?}, callee_scope={:?})",
expr, callee_scope);
let mc = mc::MemCategorizationContext::new(self, &self.region_maps);
}
fn param_bound(&self, param_ty: ty::ParamTy) -> VerifyBound<'tcx> {
- let param_env = &self.parameter_environment;
-
debug!("param_bound(param_ty={:?})",
param_ty);
// Add in the default bound of fn body that applies to all in
// scope type parameters:
- param_bounds.extend(param_env.implicit_region_bound);
+ param_bounds.extend(self.implicit_region_bound);
VerifyBound::AnyRegion(param_bounds)
}
}
let mut regions = ty.regions();
- regions.retain(|r| !r.is_bound()); // ignore late-bound regions
+ regions.retain(|r| !r.is_late_bound()); // ignore late-bound regions
bounds.push(VerifyBound::AllRegions(regions));
// remove bounds that must hold, since they are not interesting
use constrained_type_params::{identify_constrained_type_params, Parameter};
use hir::def_id::DefId;
-use middle::region::{CodeExtent};
use rustc::traits::{self, ObligationCauseCode};
use rustc::ty::{self, Ty, TyCtxt};
use rustc::util::nodemap::{FxHashSet, FxHashMap};
error_192(tcx, item.span);
}
}
- hir::ItemFn(.., body_id) => {
- self.check_item_fn(item, body_id);
+ hir::ItemFn(..) => {
+ self.check_item_fn(item);
}
hir::ItemStatic(..) => {
self.check_item_type(item);
sig_if_method: Option<&hir::MethodSig>) {
let code = self.code.clone();
self.for_id(item_id, span).with_fcx(|fcx, this| {
- let free_substs = &fcx.parameter_environment.free_substs;
- let free_id_outlive = fcx.parameter_environment.free_id_outlive;
-
let item = fcx.tcx.associated_item(fcx.tcx.hir.local_def_id(item_id));
let (mut implied_bounds, self_ty) = match item.container {
match item.kind {
ty::AssociatedKind::Const => {
let ty = fcx.tcx.type_of(item.def_id);
- let ty = fcx.instantiate_type_scheme(span, free_substs, &ty);
+ let ty = fcx.normalize_associated_types_in(span, &ty);
fcx.register_wf_obligation(ty, span, code.clone());
}
ty::AssociatedKind::Method => {
reject_shadowing_type_parameters(fcx.tcx, item.def_id);
let method_ty = fcx.tcx.type_of(item.def_id);
- let method_ty = fcx.instantiate_type_scheme(span, free_substs, &method_ty);
- let predicates = fcx.instantiate_bounds(span, item.def_id, free_substs);
+ let method_ty = fcx.normalize_associated_types_in(span, &method_ty);
+ let predicates = fcx.tcx.predicates_of(item.def_id)
+ .instantiate_identity(fcx.tcx);
+ let predicates = fcx.normalize_associated_types_in(span, &predicates);
let sig = method_ty.fn_sig();
this.check_fn_or_method(fcx, span, sig, &predicates,
- free_id_outlive, &mut implied_bounds);
+ item.def_id, &mut implied_bounds);
let sig_if_method = sig_if_method.expect("bad signature for method");
- this.check_method_receiver(fcx, sig_if_method, &item,
- free_id_outlive, self_ty);
+ this.check_method_receiver(fcx, sig_if_method, &item, self_ty);
}
ty::AssociatedKind::Type => {
if item.defaultness.has_value() {
let ty = fcx.tcx.type_of(item.def_id);
- let ty = fcx.instantiate_type_scheme(span, free_substs, &ty);
+ let ty = fcx.normalize_associated_types_in(span, &ty);
fcx.register_wf_obligation(ty, span, code.clone());
}
}
fn for_id<'tcx>(&self, id: ast::NodeId, span: Span)
-> CheckWfFcxBuilder<'a, 'gcx, 'tcx> {
CheckWfFcxBuilder {
- inherited: Inherited::build(self.tcx, id),
+ inherited: Inherited::build(self.tcx, self.tcx.hir.local_def_id(id)),
code: self.code.clone(),
id: id,
span: span
}
}
- let free_substs = &fcx.parameter_environment.free_substs;
let def_id = fcx.tcx.hir.local_def_id(item.id);
- let predicates = fcx.instantiate_bounds(item.span, def_id, free_substs);
+ let predicates = fcx.tcx.predicates_of(def_id).instantiate_identity(fcx.tcx);
+ let predicates = fcx.normalize_associated_types_in(item.span, &predicates);
this.check_where_clauses(fcx, item.span, &predicates);
vec![] // no implied bounds in a struct def'n
}
self.for_item(item).with_fcx(|fcx, this| {
- let free_substs = &fcx.parameter_environment.free_substs;
- let predicates = fcx.instantiate_bounds(item.span, trait_def_id, free_substs);
+ let predicates = fcx.tcx.predicates_of(trait_def_id).instantiate_identity(fcx.tcx);
+ let predicates = fcx.normalize_associated_types_in(item.span, &predicates);
this.check_where_clauses(fcx, item.span, &predicates);
vec![]
});
}
- fn check_item_fn(&mut self,
- item: &hir::Item,
- body_id: hir::BodyId)
- {
+ fn check_item_fn(&mut self, item: &hir::Item) {
self.for_item(item).with_fcx(|fcx, this| {
- let free_substs = &fcx.parameter_environment.free_substs;
let def_id = fcx.tcx.hir.local_def_id(item.id);
let ty = fcx.tcx.type_of(def_id);
- let item_ty = fcx.instantiate_type_scheme(item.span, free_substs, &ty);
+ let item_ty = fcx.normalize_associated_types_in(item.span, &ty);
let sig = item_ty.fn_sig();
- let predicates = fcx.instantiate_bounds(item.span, def_id, free_substs);
+ let predicates = fcx.tcx.predicates_of(def_id).instantiate_identity(fcx.tcx);
+ let predicates = fcx.normalize_associated_types_in(item.span, &predicates);
let mut implied_bounds = vec![];
- let free_id_outlive = fcx.tcx.call_site_extent(item.id, body_id.node_id);
this.check_fn_or_method(fcx, item.span, sig, &predicates,
- Some(free_id_outlive), &mut implied_bounds);
+ def_id, &mut implied_bounds);
implied_bounds
})
}
self.for_item(item).with_fcx(|fcx, this| {
let ty = fcx.tcx.type_of(fcx.tcx.hir.local_def_id(item.id));
- let item_ty = fcx.instantiate_type_scheme(item.span,
- &fcx.parameter_environment
- .free_substs,
- &ty);
+ let item_ty = fcx.normalize_associated_types_in(item.span, &ty);
fcx.register_wf_obligation(item_ty, item.span, this.code.clone());
debug!("check_impl: {:?}", item);
self.for_item(item).with_fcx(|fcx, this| {
- let free_substs = &fcx.parameter_environment.free_substs;
let item_def_id = fcx.tcx.hir.local_def_id(item.id);
match *ast_trait_ref {
Some(ref ast_trait_ref) => {
let trait_ref = fcx.tcx.impl_trait_ref(item_def_id).unwrap();
let trait_ref =
- fcx.instantiate_type_scheme(
- ast_trait_ref.path.span, free_substs, &trait_ref);
+ fcx.normalize_associated_types_in(
+ ast_trait_ref.path.span, &trait_ref);
let obligations =
ty::wf::trait_obligations(fcx,
fcx.body_id,
}
None => {
let self_ty = fcx.tcx.type_of(item_def_id);
- let self_ty = fcx.instantiate_type_scheme(item.span, free_substs, &self_ty);
+ let self_ty = fcx.normalize_associated_types_in(item.span, &self_ty);
fcx.register_wf_obligation(self_ty, ast_self_ty.span, this.code.clone());
}
}
- let predicates = fcx.instantiate_bounds(item.span, item_def_id, free_substs);
+ let predicates = fcx.tcx.predicates_of(item_def_id).instantiate_identity(fcx.tcx);
+ let predicates = fcx.normalize_associated_types_in(item.span, &predicates);
this.check_where_clauses(fcx, item.span, &predicates);
fcx.impl_implied_bounds(item_def_id, item.span)
span: Span,
sig: ty::PolyFnSig<'tcx>,
predicates: &ty::InstantiatedPredicates<'tcx>,
- free_id_outlive: Option<CodeExtent<'tcx>>,
+ def_id: DefId,
implied_bounds: &mut Vec<Ty<'tcx>>)
{
- let free_substs = &fcx.parameter_environment.free_substs;
- let sig = fcx.instantiate_type_scheme(span, free_substs, &sig);
- let sig = fcx.tcx.liberate_late_bound_regions(free_id_outlive, &sig);
+ let sig = fcx.normalize_associated_types_in(span, &sig);
+ let sig = fcx.liberate_late_bound_regions(def_id, &sig);
for input_ty in sig.inputs() {
fcx.register_wf_obligation(&input_ty, span, self.code.clone());
fcx: &FnCtxt<'fcx, 'gcx, 'tcx>,
method_sig: &hir::MethodSig,
method: &ty::AssociatedItem,
- free_id_outlive: Option<CodeExtent<'tcx>>,
self_ty: ty::Ty<'tcx>)
{
// check that the type of the method's receiver matches the
let span = method_sig.decl.inputs[0].span;
- let free_substs = &fcx.parameter_environment.free_substs;
let method_ty = fcx.tcx.type_of(method.def_id);
- let fty = fcx.instantiate_type_scheme(span, free_substs, &method_ty);
- let sig = fcx.tcx.liberate_late_bound_regions(free_id_outlive, &fty.fn_sig());
+ let fty = fcx.normalize_associated_types_in(span, &method_ty);
+ let sig = fcx.liberate_late_bound_regions(method.def_id, &fty.fn_sig());
debug!("check_method_receiver: sig={:?}", sig);
}
ExplicitSelf::ByBox => fcx.tcx.mk_box(self_ty)
};
- let rcvr_ty = fcx.instantiate_type_scheme(span, free_substs, &rcvr_ty);
- let rcvr_ty = fcx.tcx.liberate_late_bound_regions(free_id_outlive,
- &ty::Binder(rcvr_ty));
+ let rcvr_ty = fcx.normalize_associated_types_in(span, &rcvr_ty);
+ let rcvr_ty = fcx.liberate_late_bound_regions(method.def_id,
+ &ty::Binder(rcvr_ty));
debug!("check_method_receiver: receiver ty = {:?}", rcvr_ty);
struct_def.fields().iter()
.map(|field| {
let field_ty = self.tcx.type_of(self.tcx.hir.local_def_id(field.id));
- let field_ty = self.instantiate_type_scheme(field.span,
- &self.parameter_environment
- .free_substs,
- &field_ty);
+ let field_ty = self.normalize_associated_types_in(field.span,
+ &field_ty);
AdtField { ty: field_ty, span: field.span }
})
.collect();
}
fn impl_implied_bounds(&self, impl_def_id: DefId, span: Span) -> Vec<Ty<'tcx>> {
- let free_substs = &self.parameter_environment.free_substs;
match self.tcx.impl_trait_ref(impl_def_id) {
Some(ref trait_ref) => {
// Trait impl: take implied bounds from all types that
// appear in the trait reference.
- let trait_ref = self.instantiate_type_scheme(span, free_substs, trait_ref);
+ let trait_ref = self.normalize_associated_types_in(span, trait_ref);
trait_ref.substs.types().collect()
}
None => {
// Inherent impl: take implied bounds from the self type.
let self_ty = self.tcx.type_of(impl_def_id);
- let self_ty = self.instantiate_type_scheme(span, free_substs, &self_ty);
+ let self_ty = self.normalize_associated_types_in(span, &self_ty);
vec![self_ty]
}
}
use rustc::ty::{self, Ty, TyCtxt, MethodCall, MethodCallee};
use rustc::ty::adjustment;
use rustc::ty::fold::{TypeFolder,TypeFoldable};
-use rustc::util::nodemap::{DefIdMap, DefIdSet};
+use rustc::util::nodemap::DefIdSet;
use syntax::ast;
use syntax_pos::Span;
use std::mem;
tables: ty::TypeckTables<'gcx>,
- // Mapping from free regions of the function to the
- // early-bound versions of them, visible from the
- // outside of the function. This is needed by, and
- // only populated if there are any `impl Trait`.
- free_to_bound_regions: DefIdMap<ty::Region<'gcx>>,
-
body: &'gcx hir::Body,
}
impl<'cx, 'gcx, 'tcx> WritebackCx<'cx, 'gcx, 'tcx> {
fn new(fcx: &'cx FnCtxt<'cx, 'gcx, 'tcx>, body: &'gcx hir::Body)
-> WritebackCx<'cx, 'gcx, 'tcx> {
- let mut wbcx = WritebackCx {
+ WritebackCx {
fcx: fcx,
tables: ty::TypeckTables::empty(),
- free_to_bound_regions: DefIdMap(),
body: body
- };
-
- // Only build the reverse mapping if `impl Trait` is used.
- if fcx.anon_types.borrow().is_empty() {
- return wbcx;
}
-
- let gcx = fcx.tcx.global_tcx();
- let free_substs = fcx.parameter_environment.free_substs;
- for (i, k) in free_substs.iter().enumerate() {
- let r = if let Some(r) = k.as_region() {
- r
- } else {
- continue;
- };
- match *r {
- ty::ReFree(ty::FreeRegion {
- bound_region: ty::BoundRegion::BrNamed(def_id, name), ..
- }) => {
- let bound_region = gcx.mk_region(ty::ReEarlyBound(ty::EarlyBoundRegion {
- index: i as u32,
- name: name,
- }));
- wbcx.free_to_bound_regions.insert(def_id, bound_region);
- }
- _ => {
- bug!("{:?} is not a free region for an early-bound lifetime", r);
- }
- }
- }
-
- wbcx
}
fn tcx(&self) -> TyCtxt<'cx, 'gcx, 'tcx> {
let inside_ty = self.resolve(&concrete_ty, &node_id);
// Convert the type from the function into a type valid outside
- // the function, by replacing free regions with early-bound ones.
+ // the function, by replacing invalid regions with 'static,
+ // after producing an error for each of them.
let outside_ty = gcx.fold_regions(&inside_ty, &mut false, |r, _| {
match *r {
- // 'static is valid everywhere.
- ty::ReStatic => gcx.types.re_static,
- ty::ReEmpty => gcx.types.re_empty,
-
- // Free regions that come from early-bound regions are valid.
- ty::ReFree(ty::FreeRegion {
- bound_region: ty::BoundRegion::BrNamed(def_id, ..), ..
- }) if self.free_to_bound_regions.contains_key(&def_id) => {
- self.free_to_bound_regions[&def_id]
- }
+ // 'static and early-bound regions are valid.
+ ty::ReStatic |
+ ty::ReEarlyBound(_) |
+ ty::ReEmpty => r,
ty::ReFree(_) |
- ty::ReEarlyBound(_) |
ty::ReLateBound(..) |
ty::ReScope(_) |
ty::ReSkolemized(..) => {
use rustc::traits::{self, ObligationCause, Reveal};
use rustc::ty::{self, Ty, TyCtxt};
-use rustc::ty::ParameterEnvironment;
use rustc::ty::TypeFoldable;
use rustc::ty::adjustment::CoerceUnsizedInfo;
-use rustc::ty::subst::Subst;
use rustc::ty::util::CopyImplementationError;
use rustc::infer;
self_type);
let span = tcx.hir.span(impl_node_id);
- let param_env = ParameterEnvironment::for_item(tcx, impl_node_id);
- let self_type = self_type.subst(tcx, ¶m_env.free_substs);
+ let param_env = tcx.parameter_environment(impl_did);
assert!(!self_type.has_escaping_regions());
debug!("visit_implementation_of_copy: self_type={:?} (free)",
target);
let span = tcx.hir.span(impl_node_id);
- let param_env = ParameterEnvironment::for_item(tcx, impl_node_id);
- let source = source.subst(tcx, ¶m_env.free_substs);
- let target = target.subst(tcx, ¶m_env.free_substs);
+ let param_env = tcx.parameter_environment(impl_did);
assert!(!source.has_escaping_regions());
let err_info = CoerceUnsizedInfo { custom_kind: None };
return err_info;
}
+ // Here we are considering a case of converting
+ // `S<P0...Pn>` to S<Q0...Qn>`. As an example, let's imagine a struct `Foo<T, U>`,
+ // which acts like a pointer to `U`, but carries along some extra data of type `T`:
+ //
+ // struct Foo<T, U> {
+ // extra: T,
+ // ptr: *mut U,
+ // }
+ //
+ // We might have an impl that allows (e.g.) `Foo<T, [i32; 3]>` to be unsized
+ // to `Foo<T, [i32]>`. That impl would look like:
+ //
+ // impl<T, U: Unsize<V>, V> CoerceUnsized<Foo<T, V>> for Foo<T, U> {}
+ //
+ // Here `U = [i32; 3]` and `V = [i32]`. At runtime,
+ // when this coercion occurs, we would be changing the
+ // field `ptr` from a thin pointer of type `*mut [i32;
+ // 3]` to a fat pointer of type `*mut [i32]` (with
+ // extra data `3`). **The purpose of this check is to
+ // make sure that we know how to do this conversion.**
+ //
+ // To check if this impl is legal, we would walk down
+ // the fields of `Foo` and consider their types with
+ // both substitutes. We are looking to find that
+ // exactly one (non-phantom) field has changed its
+ // type, which we will expect to be the pointer that
+ // is becoming fat (we could probably generalize this
+ // to mutiple thin pointers of the same type becoming
+ // fat, but we don't). In this case:
+ //
+ // - `extra` has type `T` before and type `T` after
+ // - `ptr` has type `*mut U` before and type `*mut V` after
+ //
+ // Since just one field changed, we would then check
+ // that `*mut U: CoerceUnsized<*mut V>` is implemented
+ // (in other words, that we know how to do this
+ // conversion). This will work out because `U:
+ // Unsize<V>`, and we have a builtin rule that `*mut
+ // U` can be coerced to `*mut V` if `U: Unsize<V>`.
let fields = &def_a.struct_variant().fields;
let diff_fields = fields.iter()
.enumerate()
return None;
}
- // Ignore fields that aren't significantly changed
- if let Ok(ok) = infcx.sub_types(false, &cause, b, a) {
+ // Ignore fields that aren't changed; it may
+ // be that we could get away with subtyping or
+ // something more accepting, but we use
+ // equality because we want to be able to
+ // perform this check without computing
+ // variance where possible. (This is because
+ // we may have to evaluate constraint
+ // expressions in the course of execution.)
+ // See e.g. #41936.
+ if let Ok(ok) = infcx.eq_types(false, &cause, b, a) {
if ok.obligations.is_empty() {
return None;
}
}
enforce_trait_manually_implementable(tcx, impl_def_id, trait_ref.def_id);
- let trait_def = tcx.trait_def(trait_ref.def_id);
- trait_def.record_local_impl(tcx, impl_def_id, trait_ref);
}
}
fn coherent_trait<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
(_, def_id): (CrateNum, DefId)) {
- tcx.populate_implementations_for_trait_if_necessary(def_id);
-
let impls = tcx.hir.trait_impls(def_id);
for &impl_id in impls {
check_impl(tcx, impl_id);
let _task =
tcx.dep_graph.in_task(DepNode::CoherenceOverlapCheck(trait_def_id));
- let def = tcx.trait_def(trait_def_id);
-
- // attempt to insert into the specialization graph
- let insert_result = def.add_impl_for_specialization(tcx, impl_def_id);
-
- // insertion failed due to overlap
- if let Err(overlap) = insert_result {
- let mut err = struct_span_err!(tcx.sess,
- tcx.span_of_impl(impl_def_id).unwrap(),
- E0119,
- "conflicting implementations of trait `{}`{}:",
- overlap.trait_desc,
- overlap.self_desc.clone().map_or(String::new(),
- |ty| {
- format!(" for type `{}`", ty)
- }));
-
- match tcx.span_of_impl(overlap.with_impl) {
- Ok(span) => {
- err.span_label(span, "first implementation here");
- err.span_label(tcx.span_of_impl(impl_def_id).unwrap(),
- format!("conflicting implementation{}",
- overlap.self_desc
- .map_or(String::new(),
- |ty| format!(" for `{}`", ty))));
- }
- Err(cname) => {
- err.note(&format!("conflicting implementation in crate `{}`", cname));
- }
- }
+ // Trigger building the specialization graph for the trait of this impl.
+ // This will detect any overlap errors.
+ tcx.specialization_graph_of(trait_def_id);
- err.emit();
- }
// check for overlap with the automatic `impl Trait for Trait`
if let ty::TyDynamic(ref data, ..) = trait_ref.self_ty().sty {
self.tcx.at(span).type_param_predicates((self.item_def_id, def_id))
}
- fn get_free_substs(&self) -> Option<&Substs<'tcx>> {
- None
- }
-
fn re_infer(&self, _span: Span, _def: Option<&ty::RegionParameterDef>)
-> Option<ty::Region<'tcx>> {
None
}
let def_path_hash = tcx.def_path_hash(def_id);
- let def = ty::TraitDef::new(def_id, unsafety, paren_sugar, def_path_hash);
-
- if tcx.hir.trait_is_auto(def_id) {
- def.record_has_default_impl();
- }
-
+ let has_default_impl = tcx.hir.trait_is_auto(def_id);
+ let def = ty::TraitDef::new(def_id,
+ unsafety,
+ paren_sugar,
+ has_default_impl,
+ def_path_hash);
tcx.alloc_trait_def(def)
}
let mut index = parent_count + has_own_self as u32;
for param in early_bound_lifetimes_from_generics(tcx, ast_generics) {
let region = tcx.mk_region(ty::ReEarlyBound(ty::EarlyBoundRegion {
+ def_id: tcx.hir.local_def_id(param.lifetime.id),
index: index,
name: param.lifetime.name
}));
```
"##,
-E0119: r##"
-There are conflicting trait implementations for the same type.
-Example of erroneous code:
-
-```compile_fail,E0119
-trait MyTrait {
- fn get(&self) -> usize;
-}
-
-impl<T> MyTrait for T {
- fn get(&self) -> usize { 0 }
-}
-
-struct Foo {
- value: usize
-}
-
-impl MyTrait for Foo { // error: conflicting implementations of trait
- // `MyTrait` for type `Foo`
- fn get(&self) -> usize { self.value }
-}
-```
-
-When looking for the implementation for the trait, the compiler finds
-both the `impl<T> MyTrait for T` where T is all types and the `impl
-MyTrait for Foo`. Since a trait cannot be implemented multiple times,
-this is an error. So, when you write:
-
-```
-trait MyTrait {
- fn get(&self) -> usize;
-}
-
-impl<T> MyTrait for T {
- fn get(&self) -> usize { 0 }
-}
-```
-
-This makes the trait implemented on all types in the scope. So if you
-try to implement it on another one after that, the implementations will
-conflict. Example:
-
-```
-trait MyTrait {
- fn get(&self) -> usize;
-}
-
-impl<T> MyTrait for T {
- fn get(&self) -> usize { 0 }
-}
-
-struct Foo;
-
-fn main() {
- let f = Foo;
-
- f.get(); // the trait is implemented so we can use it
-}
-```
-"##,
-
E0120: r##"
An attempt was made to implement Drop on a trait, which is not allowed: only
structs and enums can implement Drop. An example causing this error:
*/
#![crate_name = "rustc_typeck"]
-#![unstable(feature = "rustc_private", issue = "27812")]
#![crate_type = "dylib"]
#![crate_type = "rlib"]
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
#![feature(never_type)]
#![feature(quote)]
#![feature(rustc_diagnostic_macros)]
-#![feature(rustc_private)]
-#![feature(staged_api)]
+
+#![cfg_attr(stage0, unstable(feature = "rustc_private", issue = "27812"))]
+#![cfg_attr(stage0, feature(rustc_private))]
+#![cfg_attr(stage0, feature(staged_api))]
#[macro_use] extern crate log;
#[macro_use] extern crate syntax;
use super::terms::*;
use super::terms::VarianceTerm::*;
-use super::xform::*;
pub struct ConstraintContext<'a, 'tcx: 'a> {
pub terms_cx: TermsContext<'a, 'tcx>,
use rustc::ty;
-pub trait Xform {
- fn xform(self, v: Self) -> Self;
-}
-
-impl Xform for ty::Variance {
- fn xform(self, v: ty::Variance) -> ty::Variance {
- // "Variance transformation", Figure 1 of The Paper
- match (self, v) {
- // Figure 1, column 1.
- (ty::Covariant, ty::Covariant) => ty::Covariant,
- (ty::Covariant, ty::Contravariant) => ty::Contravariant,
- (ty::Covariant, ty::Invariant) => ty::Invariant,
- (ty::Covariant, ty::Bivariant) => ty::Bivariant,
-
- // Figure 1, column 2.
- (ty::Contravariant, ty::Covariant) => ty::Contravariant,
- (ty::Contravariant, ty::Contravariant) => ty::Covariant,
- (ty::Contravariant, ty::Invariant) => ty::Invariant,
- (ty::Contravariant, ty::Bivariant) => ty::Bivariant,
-
- // Figure 1, column 3.
- (ty::Invariant, _) => ty::Invariant,
-
- // Figure 1, column 4.
- (ty::Bivariant, _) => ty::Bivariant,
- }
- }
-}
-
pub fn glb(v1: ty::Variance, v2: ty::Variance) -> ty::Variance {
// Greatest lower bound of the variance lattice as
// defined in The Paper:
}
}
-impl<'tcx> Clean<Option<Lifetime>> for ty::RegionKind<'tcx> {
+impl Clean<Option<Lifetime>> for ty::RegionKind {
fn clean(&self, cx: &DocContext) -> Option<Lifetime> {
match *self {
ty::ReStatic => Some(Lifetime::statik()),
let mut names = if cx.tcx.hir.as_local_node_id(did).is_some() {
vec![].into_iter()
} else {
- cx.tcx.sess.cstore.fn_arg_names(did).into_iter()
+ cx.tcx.fn_arg_names(did).into_iter()
}.peekable();
FnDecl {
output: Return(sig.skip_binder().output().clean(cx)),
use rustc::hir::map as hir_map;
use rustc::lint;
use rustc::util::nodemap::FxHashMap;
+use rustc_trans;
use rustc_trans::back::link;
use rustc_resolve as resolve;
use rustc_metadata::cstore::CStore;
let dep_graph = DepGraph::new(false);
let _ignore = dep_graph.in_ignore();
- let cstore = Rc::new(CStore::new(&dep_graph));
+ let cstore = Rc::new(CStore::new(&dep_graph, box rustc_trans::LlvmMetadataLoader));
let mut sess = session::build_session_(
sessopts, &dep_graph, cpath, diagnostic_handler, codemap, cstore.clone()
);
+ rustc_trans::init(&sess);
rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess));
let mut cfg = config::build_configuration(&sess, config::parse_cfgspecs(cfgs));
use std::io;
use std::path::Path;
use std::str;
+use html::markdown::{Markdown, RenderType};
#[derive(Clone)]
pub struct ExternalHtml{
}
impl ExternalHtml {
- pub fn load(in_header: &[String], before_content: &[String], after_content: &[String])
+ pub fn load(in_header: &[String], before_content: &[String], after_content: &[String],
+ md_before_content: &[String], md_after_content: &[String], render: RenderType)
-> Option<ExternalHtml> {
load_external_files(in_header)
.and_then(|ih|
load_external_files(before_content)
.map(|bc| (ih, bc))
)
+ .and_then(|(ih, bc)|
+ load_external_files(md_before_content)
+ .map(|m_bc| (ih, format!("{}{}", bc, Markdown(&m_bc, render))))
+ )
.and_then(|(ih, bc)|
load_external_files(after_content)
.map(|ac| (ih, bc, ac))
)
+ .and_then(|(ih, bc, ac)|
+ load_external_files(md_after_content)
+ .map(|m_ac| (ih, bc, format!("{}{}", ac, Markdown(&m_ac, render))))
+ )
.map(|(ih, bc, ac)|
ExternalHtml {
in_header: ih,
let quot = if f.alternate() { "\"" } else { """ };
match self.0 {
Abi::Rust => Ok(()),
- Abi::C => write!(f, "extern "),
abi => write!(f, "extern {0}{1}{0} ", quot, abi.name()),
}
}
if (hasClass(main, 'content')) {
removeClass(main, 'hidden');
}
- var search = document.getElementById('search');
- if (hasClass(main, 'content')) {
- addClass(main, 'hidden');
+ var search_c = document.getElementById('search');
+ if (hasClass(search_c, 'content')) {
+ addClass(search_c, 'hidden');
}
}
// Revert to the previous title manually since the History
// perform the search. This will empty the bar if there's
// nothing there, which lets you really go back to a
// previous state with nothing in the bar.
- document.getElementsByClassName('search-input')[0].value = params.search;
+ if (params.search) {
+ document.getElementsByClassName('search-input')[0].value = params.search;
+ } else {
+ document.getElementsByClassName('search-input')[0].value = '';
+ }
// Some browsers fire 'onpopstate' for every page load
// (Chrome), while others fire the event only when actually
// popping a state (Firefox), which is why search() is
font-size: 17px;
margin: 30px 0 20px 0;
text-align: center;
+ word-wrap: break-word;
}
.location:empty {
overflow: auto;
padding-left: 0;
}
+#search {
+ margin-left: 230px;
+}
.content pre.line-numbers {
float: left;
border: none;
top: 0;
}
+h3 > .collapse-toggle, h4 > .collapse-toggle {
+ font-size: 0.8em;
+ top: 5px;
+}
+
.toggle-wrapper > .collapse-toggle {
left: -24px;
margin-top: 0px;
// except according to those terms.
#![crate_name = "rustdoc"]
-#![unstable(feature = "rustc_private", issue = "27812")]
#![crate_type = "dylib"]
#![crate_type = "rlib"]
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
#![feature(box_patterns)]
#![feature(box_syntax)]
#![feature(libc)]
-#![feature(rustc_private)]
#![feature(set_stdio)]
#![feature(slice_patterns)]
-#![feature(staged_api)]
#![feature(test)]
#![feature(unicode)]
#![feature(vec_remove_item)]
+#![cfg_attr(stage0, unstable(feature = "rustc_private", issue = "27812"))]
+#![cfg_attr(stage0, feature(rustc_private))]
+#![cfg_attr(stage0, feature(staged_api))]
+
extern crate arena;
extern crate getopts;
extern crate env_logger;
"files to include inline between the content and </body> of a rendered \
Markdown file or generated documentation",
"FILES")),
+ unstable(optmulti("", "markdown-before-content",
+ "files to include inline between <body> and the content of a rendered \
+ Markdown file or generated documentation",
+ "FILES")),
+ unstable(optmulti("", "markdown-after-content",
+ "files to include inline between the content and </body> of a rendered \
+ Markdown file or generated documentation",
+ "FILES")),
stable(optopt("", "markdown-playground-url",
"URL to send code snippets to", "URL")),
stable(optflag("", "markdown-no-toc", "don't include table of contents")),
let external_html = match ExternalHtml::load(
&matches.opt_strs("html-in-header"),
&matches.opt_strs("html-before-content"),
- &matches.opt_strs("html-after-content")) {
+ &matches.opt_strs("html-after-content"),
+ &matches.opt_strs("markdown-before-content"),
+ &matches.opt_strs("markdown-after-content"),
+ render_type) {
Some(eh) => eh,
None => return 3,
};
use rustc_driver::driver::phase_2_configure_and_expand;
use rustc_metadata::cstore::CStore;
use rustc_resolve::MakeGlobMap;
+use rustc_trans;
use rustc_trans::back::link;
use syntax::ast;
use syntax::codemap::CodeMap;
let dep_graph = DepGraph::new(false);
let _ignore = dep_graph.in_ignore();
- let cstore = Rc::new(CStore::new(&dep_graph));
+ let cstore = Rc::new(CStore::new(&dep_graph, box rustc_trans::LlvmMetadataLoader));
let mut sess = session::build_session_(
sessopts, &dep_graph, Some(input_path.clone()), handler, codemap.clone(), cstore.clone(),
);
+ rustc_trans::init(&sess);
rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess));
sess.parse_sess.config =
config::build_configuration(&sess, config::parse_cfgspecs(cfgs.clone()));
let diagnostic_handler = errors::Handler::with_emitter(true, false, box emitter);
let dep_graph = DepGraph::new(false);
- let cstore = Rc::new(CStore::new(&dep_graph));
+ let cstore = Rc::new(CStore::new(&dep_graph, box rustc_trans::LlvmMetadataLoader));
let mut sess = session::build_session_(
sessopts, &dep_graph, None, diagnostic_handler, codemap, cstore.clone(),
);
+ rustc_trans::init(&sess);
rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess));
let outdir = Mutex::new(TempDir::new("rustdoctest").ok().expect("rustdoc needs a tempdir"));
*/
#![crate_name = "serialize"]
-#![unstable(feature = "rustc_private",
+#![cfg_attr(stage0, unstable(feature = "rustc_private",
reason = "deprecated in favor of rustc-serialize on crates.io",
- issue = "27812")]
+ issue = "27812"))]
#![crate_type = "rlib"]
#![crate_type = "dylib"]
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
#![feature(core_intrinsics)]
#![feature(i128_type)]
#![feature(specialization)]
-#![feature(staged_api)]
+#![cfg_attr(stage0, feature(staged_api))]
#![cfg_attr(test, feature(test))]
extern crate collections;
/// type is a static guarantee that the underlying bytes contain no interior 0
/// bytes and the final byte is 0.
///
-/// A `CString` is created from either a byte slice or a byte vector. After
-/// being created, a `CString` predominately inherits all of its methods from
-/// the `Deref` implementation to `[c_char]`. Note that the underlying array
-/// is represented as an array of `c_char` as opposed to `u8`. A `u8` slice
-/// can be obtained with the `as_bytes` method. Slices produced from a `CString`
-/// do *not* contain the trailing nul terminator unless otherwise specified.
+/// A `CString` is created from either a byte slice or a byte vector. A `u8`
+/// slice can be obtained with the `as_bytes` method. Slices produced from a
+/// `CString` do *not* contain the trailing nul terminator unless otherwise
+/// specified.
///
/// # Examples
///
#[stable(feature = "rust1", since = "1.0.0")]
pub use self::util::{copy, sink, Sink, empty, Empty, repeat, Repeat};
#[stable(feature = "rust1", since = "1.0.0")]
-pub use self::stdio::{stdin, stdout, stderr, _print, Stdin, Stdout, Stderr};
+pub use self::stdio::{stdin, stdout, stderr, Stdin, Stdout, Stderr};
#[stable(feature = "rust1", since = "1.0.0")]
pub use self::stdio::{StdoutLock, StderrLock, StdinLock};
+#[unstable(feature = "print_internals", issue = "0")]
+pub use self::stdio::{_print, _eprint};
#[unstable(feature = "libstd_io_internals", issue = "0")]
#[doc(no_inline, hidden)]
pub use self::stdio::{set_panic, set_print};
use sync::{Arc, Mutex, MutexGuard};
use sys::stdio;
use sys_common::remutex::{ReentrantMutex, ReentrantMutexGuard};
-use thread::LocalKeyState;
+use thread::{LocalKey, LocalKeyState};
/// Stdout used by print! and println! macros
thread_local! {
})
}
-#[unstable(feature = "print",
- reason = "implementation detail which may disappear or be replaced at any time",
- issue = "0")]
-#[doc(hidden)]
-pub fn _print(args: fmt::Arguments) {
- // As an implementation of the `println!` macro, we want to try our best to
- // not panic wherever possible and get the output somewhere. There are
- // currently two possible vectors for panics we take care of here:
- //
- // 1. If the TLS key for the local stdout has been destroyed, accessing it
- // would cause a panic. Note that we just lump in the uninitialized case
- // here for convenience, we're not trying to avoid a panic.
- // 2. If the local stdout is currently in use (e.g. we're in the middle of
- // already printing) then accessing again would cause a panic.
- //
- // If, however, the actual I/O causes an error, we do indeed panic.
- let result = match LOCAL_STDOUT.state() {
+/// Write `args` to output stream `local_s` if possible, `global_s`
+/// otherwise. `label` identifies the stream in a panic message.
+///
+/// This function is used to print error messages, so it takes extra
+/// care to avoid causing a panic when `local_stream` is unusable.
+/// For instance, if the TLS key for the local stream is uninitialized
+/// or already destroyed, or if the local stream is locked by another
+/// thread, it will just fall back to the global stream.
+///
+/// However, if the actual I/O causes an error, this function does panic.
+fn print_to<T>(args: fmt::Arguments,
+ local_s: &'static LocalKey<RefCell<Option<Box<Write+Send>>>>,
+ global_s: fn() -> T,
+ label: &str) where T: Write {
+ let result = match local_s.state() {
LocalKeyState::Uninitialized |
- LocalKeyState::Destroyed => stdout().write_fmt(args),
+ LocalKeyState::Destroyed => global_s().write_fmt(args),
LocalKeyState::Valid => {
- LOCAL_STDOUT.with(|s| {
+ local_s.with(|s| {
if let Ok(mut borrowed) = s.try_borrow_mut() {
if let Some(w) = borrowed.as_mut() {
return w.write_fmt(args);
}
}
- stdout().write_fmt(args)
+ global_s().write_fmt(args)
})
}
};
if let Err(e) = result {
- panic!("failed printing to stdout: {}", e);
+ panic!("failed printing to {}: {}", label, e);
}
}
+#[unstable(feature = "print_internals",
+ reason = "implementation detail which may disappear or be replaced at any time",
+ issue = "0")]
+#[doc(hidden)]
+pub fn _print(args: fmt::Arguments) {
+ print_to(args, &LOCAL_STDOUT, stdout, "stdout");
+}
+
+#[unstable(feature = "print_internals",
+ reason = "implementation detail which may disappear or be replaced at any time",
+ issue = "0")]
+#[doc(hidden)]
+pub fn _eprint(args: fmt::Arguments) {
+ use panicking::LOCAL_STDERR;
+ print_to(args, &LOCAL_STDERR, stderr, "stderr");
+}
+
#[cfg(test)]
mod tests {
use thread;
/// necessary to use `io::stdout().flush()` to ensure the output is emitted
/// immediately.
///
+/// Use `print!` only for the primary output of your program. Use
+/// `eprint!` instead to print error and progress messages.
+///
/// # Panics
///
/// Panics if writing to `io::stdout()` fails.
/// Use the `format!` syntax to write data to the standard output.
/// See `std::fmt` for more information.
///
+/// Use `println!` only for the primary output of your program. Use
+/// `eprintln!` instead to print error and progress messages.
+///
/// # Panics
///
-/// Panics if writing to `io::stdout()` fails.
+/// Panics if writing to `io::stdout` fails.
///
/// # Examples
///
($fmt:expr, $($arg:tt)*) => (print!(concat!($fmt, "\n"), $($arg)*));
}
+/// Macro for printing to the standard error.
+///
+/// Equivalent to the `print!` macro, except that output goes to
+/// `io::stderr` instead of `io::stdout`. See `print!` for
+/// example usage.
+///
+/// Use `eprint!` only for error and progress messages. Use `print!`
+/// instead for the primary output of your program.
+///
+/// # Panics
+///
+/// Panics if writing to `io::stderr` fails.
+#[macro_export]
+#[stable(feature = "eprint", since="1.18.0")]
+#[allow_internal_unstable]
+macro_rules! eprint {
+ ($($arg:tt)*) => ($crate::io::_eprint(format_args!($($arg)*)));
+}
+
+/// Macro for printing to the standard error, with a newline.
+///
+/// Equivalent to the `println!` macro, except that output goes to
+/// `io::stderr` instead of `io::stdout`. See `println!` for
+/// example usage.
+///
+/// Use `eprintln!` only for error and progress messages. Use `println!`
+/// instead for the primary output of your program.
+///
+/// # Panics
+///
+/// Panics if writing to `io::stderr` fails.
+#[macro_export]
+#[stable(feature = "eprint", since="1.18.0")]
+macro_rules! eprintln {
+ () => (eprint!("\n"));
+ ($fmt:expr) => (eprint!(concat!($fmt, "\n")));
+ ($fmt:expr, $($arg:tt)*) => (eprint!(concat!($fmt, "\n"), $($arg)*));
+}
+
/// A macro to select an event from a number of receivers.
///
/// This macro is used to wait for the first event to occur on a number of
pub const INVALID_HANDLE_VALUE: HANDLE = !0 as HANDLE;
+pub const FACILITY_NT_BIT: DWORD = 0x1000_0000;
+
pub const FORMAT_MESSAGE_FROM_SYSTEM: DWORD = 0x00001000;
+pub const FORMAT_MESSAGE_FROM_HMODULE: DWORD = 0x00000800;
pub const FORMAT_MESSAGE_IGNORE_INSERTS: DWORD = 0x00000200;
pub const TLS_OUT_OF_INDEXES: DWORD = 0xFFFFFFFF;
/// Creates an `OsString` from a potentially ill-formed UTF-16 slice of
/// 16-bit code units.
///
- /// This is lossless: calling `.encode_wide()` on the resulting string
+ /// This is lossless: calling [`encode_wide`] on the resulting string
/// will always return the original code units.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::ffi::OsString;
+ /// use std::os::windows::prelude::*;
+ ///
+ /// // UTF-16 encoding for "Unicode".
+ /// let source = [0x0055, 0x006E, 0x0069, 0x0063, 0x006F, 0x0064, 0x0065];
+ ///
+ /// let string = OsString::from_wide(&source[..]);
+ /// ```
+ ///
+ /// [`encode_wide`]: ./trait.OsStrExt.html#tymethod.encode_wide
#[stable(feature = "rust1", since = "1.0.0")]
fn from_wide(wide: &[u16]) -> Self;
}
/// Windows-specific extensions to `OsStr`.
#[stable(feature = "rust1", since = "1.0.0")]
pub trait OsStrExt {
- /// Re-encodes an `OsStr` as a wide character sequence,
- /// i.e. potentially ill-formed UTF-16.
+ /// Re-encodes an `OsStr` as a wide character sequence, i.e. potentially
+ /// ill-formed UTF-16.
+ ///
+ /// This is lossless: calling [`OsString::from_wide`] and then
+ /// `encode_wide` on the result will yield the original code units.
+ /// Note that the encoding does not add a final null terminator.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::ffi::OsString;
+ /// use std::os::windows::prelude::*;
+ ///
+ /// // UTF-16 encoding for "Unicode".
+ /// let source = [0x0055, 0x006E, 0x0069, 0x0063, 0x006F, 0x0064, 0x0065];
+ ///
+ /// let string = OsString::from_wide(&source[..]);
+ ///
+ /// let result: Vec<u16> = string.encode_wide().collect();
+ /// assert_eq!(&source[..], &result[..]);
+ /// ```
///
- /// This is lossless. Note that the encoding does not include a final
- /// null.
+ /// [`OsString::from_wide`]: ./trait.OsStringExt.html#tymethod.from_wide
#[stable(feature = "rust1", since = "1.0.0")]
fn encode_wide(&self) -> EncodeWide;
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-//! Windows-specific extensions for the primitives in `std::fs`
+//! Windows-specific extensions for the primitives in the `std::fs` module.
#![stable(feature = "rust1", since = "1.0.0")]
use sys;
use sys_common::{AsInnerMut, AsInner};
-/// Windows-specific extensions to `File`
+/// Windows-specific extensions to [`File`].
+///
+/// [`File`]: ../../../fs/struct.File.html
#[stable(feature = "file_offset", since = "1.15.0")]
pub trait FileExt {
/// Seeks to a given position and reads a number of bytes.
/// Note that similar to `File::read`, it is not an error to return with a
/// short read. When returning from such a short read, the file pointer is
/// still updated.
+ ///
+ /// # Examples
+ ///
+ /// ```no_run
+ /// use std::io;
+ /// use std::fs::File;
+ /// use std::os::windows::prelude::*;
+ ///
+ /// # fn foo() -> io::Result<()> {
+ /// let mut file = File::open("foo.txt")?;
+ /// let mut buffer = [0; 10];
+ ///
+ /// // Read 10 bytes, starting 72 bytes from the
+ /// // start of the file.
+ /// file.seek_read(&mut buffer[..], 72)?;
+ /// # Ok(())
+ /// # }
+ /// ```
#[stable(feature = "file_offset", since = "1.15.0")]
fn seek_read(&self, buf: &mut [u8], offset: u64) -> io::Result<usize>;
/// Note that similar to `File::write`, it is not an error to return a
/// short write. When returning from such a short write, the file pointer
/// is still updated.
+ ///
+ /// # Examples
+ ///
+ /// ```no_run
+ /// use std::fs::File;
+ /// use std::os::windows::prelude::*;
+ ///
+ /// # fn foo() -> std::io::Result<()> {
+ /// let mut buffer = File::create("foo.txt")?;
+ ///
+ /// // Write a byte string starting 72 bytes from
+ /// // the start of the file.
+ /// buffer.seek_write(b"some bytes", 72)?;
+ /// # Ok(())
+ /// # }
+ /// ```
#[stable(feature = "file_offset", since = "1.15.0")]
fn seek_write(&self, buf: &[u8], offset: u64) -> io::Result<usize>;
}
}
}
-/// Windows-specific extensions to `OpenOptions`
+/// Windows-specific extensions to [`OpenOptions`].
+///
+/// [`OpenOptions`]: ../../../fs/struct.OpenOptions.html
#[stable(feature = "open_options_ext", since = "1.10.0")]
pub trait OpenOptionsExt {
- /// Overrides the `dwDesiredAccess` argument to the call to `CreateFile`
+ /// Overrides the `dwDesiredAccess` argument to the call to [`CreateFile`]
/// with the specified value.
///
/// This will override the `read`, `write`, and `append` flags on the
/// `OpenOptions` structure. This method provides fine-grained control over
/// the permissions to read, write and append data, attributes (like hidden
- /// and system) and extended attributes.
+ /// and system), and extended attributes.
///
/// # Examples
///
/// ```no_run
/// use std::fs::OpenOptions;
- /// use std::os::windows::fs::OpenOptionsExt;
+ /// use std::os::windows::prelude::*;
///
/// // Open without read and write permission, for example if you only need
- /// // to call `stat()` on the file
+ /// // to call `stat` on the file
/// let file = OpenOptions::new().access_mode(0).open("foo.txt");
/// ```
+ ///
+ /// [`CreateFile`]: https://msdn.microsoft.com/en-us/library/windows/desktop/aa363858.aspx
#[stable(feature = "open_options_ext", since = "1.10.0")]
fn access_mode(&mut self, access: u32) -> &mut Self;
- /// Overrides the `dwShareMode` argument to the call to `CreateFile` with
+ /// Overrides the `dwShareMode` argument to the call to [`CreateFile`] with
/// the specified value.
///
/// By default `share_mode` is set to
- /// `FILE_SHARE_READ | FILE_SHARE_WRITE | FILE_SHARE_DELETE`. Specifying
- /// less permissions denies others to read from, write to and/or delete the
- /// file while it is open.
+ /// `FILE_SHARE_READ | FILE_SHARE_WRITE | FILE_SHARE_DELETE`. This allows
+ /// other processes to to read, write, and delete/rename the same file
+ /// while it is open. Removing any of the flags will prevent other
+ /// processes from performing the corresponding operation until the file
+ /// handle is closed.
///
/// # Examples
///
/// ```no_run
/// use std::fs::OpenOptions;
- /// use std::os::windows::fs::OpenOptionsExt;
+ /// use std::os::windows::prelude::*;
///
/// // Do not allow others to read or modify this file while we have it open
- /// // for writing
- /// let file = OpenOptions::new().write(true)
- /// .share_mode(0)
- /// .open("foo.txt");
+ /// // for writing.
+ /// let file = OpenOptions::new()
+ /// .write(true)
+ /// .share_mode(0)
+ /// .open("foo.txt");
/// ```
+ ///
+ /// [`CreateFile`]: https://msdn.microsoft.com/en-us/library/windows/desktop/aa363858.aspx
#[stable(feature = "open_options_ext", since = "1.10.0")]
fn share_mode(&mut self, val: u32) -> &mut Self;
/// Sets extra flags for the `dwFileFlags` argument to the call to
- /// `CreateFile2` (or combines it with `attributes` and `security_qos_flags`
- /// to set the `dwFlagsAndAttributes` for `CreateFile`).
+ /// [`CreateFile2`] to the specified value (or combines it with
+ /// `attributes` and `security_qos_flags` to set the `dwFlagsAndAttributes`
+ /// for [`CreateFile`]).
///
- /// Custom flags can only set flags, not remove flags set by Rusts options.
- /// This options overwrites any previously set custom flags.
+ /// Custom flags can only set flags, not remove flags set by Rust's options.
+ /// This option overwrites any previously set custom flags.
///
/// # Examples
///
- /// ```rust,ignore
+ /// ```ignore
/// extern crate winapi;
+ ///
/// use std::fs::OpenOptions;
- /// use std::os::windows::fs::OpenOptionsExt;
- ///
- /// let mut options = OpenOptions::new();
- /// options.create(true).write(true);
- /// if cfg!(windows) {
- /// options.custom_flags(winapi::FILE_FLAG_DELETE_ON_CLOSE);
- /// }
- /// let file = options.open("foo.txt");
+ /// use std::os::windows::prelude::*;
+ ///
+ /// let file = OpenOptions::new()
+ /// .create(true)
+ /// .write(true)
+ /// .custom_flags(winapi::FILE_FLAG_DELETE_ON_CLOSE)
+ /// .open("foo.txt");
/// ```
+ ///
+ /// [`CreateFile`]: https://msdn.microsoft.com/en-us/library/windows/desktop/aa363858.aspx
+ /// [`CreateFile2`]: https://msdn.microsoft.com/en-us/library/windows/desktop/hh449422.aspx
#[stable(feature = "open_options_ext", since = "1.10.0")]
fn custom_flags(&mut self, flags: u32) -> &mut Self;
- /// Sets the `dwFileAttributes` argument to the call to `CreateFile2` to
+ /// Sets the `dwFileAttributes` argument to the call to [`CreateFile2`] to
/// the specified value (or combines it with `custom_flags` and
/// `security_qos_flags` to set the `dwFlagsAndAttributes` for
- /// `CreateFile`).
+ /// [`CreateFile`]).
///
/// If a _new_ file is created because it does not yet exist and
/// `.create(true)` or `.create_new(true)` are specified, the new file is
///
/// # Examples
///
- /// ```rust,ignore
+ /// ```ignore
/// extern crate winapi;
+ ///
/// use std::fs::OpenOptions;
- /// use std::os::windows::fs::OpenOptionsExt;
+ /// use std::os::windows::prelude::*;
///
- /// let file = OpenOptions::new().write(true).create(true)
- /// .attributes(winapi::FILE_ATTRIBUTE_HIDDEN)
- /// .open("foo.txt");
+ /// let file = OpenOptions::new()
+ /// .write(true)
+ /// .create(true)
+ /// .attributes(winapi::FILE_ATTRIBUTE_HIDDEN)
+ /// .open("foo.txt");
/// ```
+ ///
+ /// [`CreateFile`]: https://msdn.microsoft.com/en-us/library/windows/desktop/aa363858.aspx
+ /// [`CreateFile2`]: https://msdn.microsoft.com/en-us/library/windows/desktop/hh449422.aspx
#[stable(feature = "open_options_ext", since = "1.10.0")]
fn attributes(&mut self, val: u32) -> &mut Self;
- /// Sets the `dwSecurityQosFlags` argument to the call to `CreateFile2` to
+ /// Sets the `dwSecurityQosFlags` argument to the call to [`CreateFile2`] to
/// the specified value (or combines it with `custom_flags` and `attributes`
- /// to set the `dwFlagsAndAttributes` for `CreateFile`).
+ /// to set the `dwFlagsAndAttributes` for [`CreateFile`]).
+ ///
+ /// By default, `security_qos_flags` is set to `SECURITY_ANONYMOUS`. For
+ /// information about possible values, see [Impersonation Levels] on the
+ /// Windows Dev Center site.
+ ///
+ /// # Examples
+ ///
+ /// ```no_run
+ /// use std::fs::OpenOptions;
+ /// use std::os::windows::prelude::*;
+ ///
+ /// let file = OpenOptions::new()
+ /// .write(true)
+ /// .create(true)
+ ///
+ /// // Sets the flag value to `SecurityIdentification`.
+ /// .security_qos_flags(1)
+ ///
+ /// .open("foo.txt");
+ /// ```
+ ///
+ /// [`CreateFile`]: https://msdn.microsoft.com/en-us/library/windows/desktop/aa363858.aspx
+ /// [`CreateFile2`]: https://msdn.microsoft.com/en-us/library/windows/desktop/hh449422.aspx
+ /// [Impersonation Levels]:
+ /// https://msdn.microsoft.com/en-us/library/windows/desktop/aa379572.aspx
#[stable(feature = "open_options_ext", since = "1.10.0")]
fn security_qos_flags(&mut self, flags: u32) -> &mut OpenOptions;
}
}
}
-/// Extension methods for `fs::Metadata` to access the raw fields contained
+/// Extension methods for [`fs::Metadata`] to access the raw fields contained
/// within.
+///
+/// The data members that this trait exposes correspond to the members
+/// of the [`BY_HANDLE_FILE_INFORMATION`] structure.
+///
+/// [`fs::Metadata`]: ../../../fs/struct.Metadata.html
+/// [`BY_HANDLE_FILE_INFORMATION`]:
+/// https://msdn.microsoft.com/en-us/library/windows/desktop/aa363788.aspx
#[stable(feature = "metadata_ext", since = "1.1.0")]
pub trait MetadataExt {
/// Returns the value of the `dwFileAttributes` field of this metadata.
///
/// This field contains the file system attribute information for a file
- /// or directory.
+ /// or directory. For possible values and their descriptions, see
+ /// [File Attribute Constants] in the Windows Dev Center.
+ ///
+ /// # Examples
+ ///
+ /// ```no_run
+ /// use std::io;
+ /// use std::fs;
+ /// use std::os::windows::prelude::*;
+ ///
+ /// # fn foo() -> io::Result<()> {
+ /// let metadata = fs::metadata("foo.txt")?;
+ /// let attributes = metadata.file_attributes();
+ /// # Ok(())
+ /// # }
+ /// ```
+ ///
+ /// [File Attribute Constants]:
+ /// https://msdn.microsoft.com/en-us/library/windows/desktop/gg258117.aspx
#[stable(feature = "metadata_ext", since = "1.1.0")]
fn file_attributes(&self) -> u32;
/// Returns the value of the `ftCreationTime` field of this metadata.
///
- /// The returned 64-bit value represents the number of 100-nanosecond
- /// intervals since January 1, 1601 (UTC).
+ /// The returned 64-bit value is equivalent to a [`FILETIME`] struct,
+ /// which represents the number of 100-nanosecond intervals since
+ /// January 1, 1601 (UTC). The struct is automatically
+ /// converted to a `u64` value, as that is the recommended way
+ /// to use it.
+ ///
+ /// If the underlying filesystem does not support creation time, the
+ /// returned value is 0.
+ ///
+ /// # Examples
+ ///
+ /// ```no_run
+ /// use std::io;
+ /// use std::fs;
+ /// use std::os::windows::prelude::*;
+ ///
+ /// # fn foo() -> io::Result<()> {
+ /// let metadata = fs::metadata("foo.txt")?;
+ /// let creation_time = metadata.creation_time();
+ /// # Ok(())
+ /// # }
+ /// ```
+ ///
+ /// [`FILETIME`]: https://msdn.microsoft.com/en-us/library/windows/desktop/ms724284.aspx
#[stable(feature = "metadata_ext", since = "1.1.0")]
fn creation_time(&self) -> u64;
/// Returns the value of the `ftLastAccessTime` field of this metadata.
///
- /// The returned 64-bit value represents the number of 100-nanosecond
- /// intervals since January 1, 1601 (UTC).
+ /// The returned 64-bit value is equivalent to a [`FILETIME`] struct,
+ /// which represents the number of 100-nanosecond intervals since
+ /// January 1, 1601 (UTC). The struct is automatically
+ /// converted to a `u64` value, as that is the recommended way
+ /// to use it.
+ ///
+ /// For a file, the value specifies the last time that a file was read
+ /// from or written to. For a directory, the value specifies when
+ /// the directory was created. For both files and directories, the
+ /// specified date is correct, but the time of day is always set to
+ /// midnight.
+ ///
+ /// If the underlying filesystem does not support last access time, the
+ /// returned value is 0.
+ ///
+ /// # Examples
+ ///
+ /// ```no_run
+ /// use std::io;
+ /// use std::fs;
+ /// use std::os::windows::prelude::*;
+ ///
+ /// # fn foo() -> io::Result<()> {
+ /// let metadata = fs::metadata("foo.txt")?;
+ /// let last_access_time = metadata.last_access_time();
+ /// # Ok(())
+ /// # }
+ /// ```
+ ///
+ /// [`FILETIME`]: https://msdn.microsoft.com/en-us/library/windows/desktop/ms724284.aspx
#[stable(feature = "metadata_ext", since = "1.1.0")]
fn last_access_time(&self) -> u64;
/// Returns the value of the `ftLastWriteTime` field of this metadata.
///
- /// The returned 64-bit value represents the number of 100-nanosecond
- /// intervals since January 1, 1601 (UTC).
+ /// The returned 64-bit value is equivalent to a [`FILETIME`] struct,
+ /// which represents the number of 100-nanosecond intervals since
+ /// January 1, 1601 (UTC). The struct is automatically
+ /// converted to a `u64` value, as that is the recommended way
+ /// to use it.
+ ///
+ /// For a file, the value specifies the last time that a file was written
+ /// to. For a directory, the structure specifies when the directory was
+ /// created.
+ ///
+ /// If the underlying filesystem does not support the last write time
+ /// time, the returned value is 0.
+ ///
+ /// # Examples
+ ///
+ /// ```no_run
+ /// use std::io;
+ /// use std::fs;
+ /// use std::os::windows::prelude::*;
+ ///
+ /// # fn foo() -> io::Result<()> {
+ /// let metadata = fs::metadata("foo.txt")?;
+ /// let last_write_time = metadata.last_write_time();
+ /// # Ok(())
+ /// # }
+ /// ```
+ ///
+ /// [`FILETIME`]: https://msdn.microsoft.com/en-us/library/windows/desktop/ms724284.aspx
#[stable(feature = "metadata_ext", since = "1.1.0")]
fn last_write_time(&self) -> u64;
/// metadata.
///
/// The returned value does not have meaning for directories.
+ ///
+ /// # Examples
+ ///
+ /// ```no_run
+ /// use std::io;
+ /// use std::fs;
+ /// use std::os::windows::prelude::*;
+ ///
+ /// # fn foo() -> io::Result<()> {
+ /// let metadata = fs::metadata("foo.txt")?;
+ /// let file_size = metadata.file_size();
+ /// # Ok(())
+ /// # }
+ /// ```
#[stable(feature = "metadata_ext", since = "1.1.0")]
fn file_size(&self) -> u64;
}
///
/// # Examples
///
-/// ```ignore
+/// ```no_run
/// use std::os::windows::fs;
///
/// # fn foo() -> std::io::Result<()> {
///
/// # Examples
///
-/// ```ignore
+/// ```no_run
/// use std::os::windows::fs;
///
/// # fn foo() -> std::io::Result<()> {
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-//! Experimental extensions to `std` for Windows.
+//! Platform-specific extensions to `std` for Windows.
//!
-//! For now, this module is limited to extracting handles, file
-//! descriptors, and sockets, but its functionality will grow over
-//! time.
+//! Provides access to platform-level information for Windows, and exposes
+//! Windows-specific idioms that would otherwise be inappropriate as part
+//! the core `std` library. These extensions allow developers to use
+//! `std` types and idioms with Windows in a way that the normal
+//! platform-agnostic idioms would not normally support.
#![stable(feature = "rust1", since = "1.0.0")]
}
/// Gets a detailed string description for the given error number.
-pub fn error_string(errnum: i32) -> String {
+pub fn error_string(mut errnum: i32) -> String {
// This value is calculated from the macro
// MAKELANGID(LANG_SYSTEM_DEFAULT, SUBLANG_SYS_DEFAULT)
let langId = 0x0800 as c::DWORD;
let mut buf = [0 as c::WCHAR; 2048];
unsafe {
- let res = c::FormatMessageW(c::FORMAT_MESSAGE_FROM_SYSTEM |
+ let mut module = ptr::null_mut();
+ let mut flags = 0;
+
+ // NTSTATUS errors may be encoded as HRESULT, which may returned from
+ // GetLastError. For more information about Windows error codes, see
+ // `[MS-ERREF]`: https://msdn.microsoft.com/en-us/library/cc231198.aspx
+ if (errnum & c::FACILITY_NT_BIT as i32) != 0 {
+ // format according to https://support.microsoft.com/en-us/help/259693
+ const NTDLL_DLL: &'static [u16] = &['N' as _, 'T' as _, 'D' as _, 'L' as _, 'L' as _,
+ '.' as _, 'D' as _, 'L' as _, 'L' as _, 0];
+ module = c::GetModuleHandleW(NTDLL_DLL.as_ptr());
+
+ if module != ptr::null_mut() {
+ errnum ^= c::FACILITY_NT_BIT as i32;
+ flags = c::FORMAT_MESSAGE_FROM_HMODULE;
+ }
+ }
+
+ let res = c::FormatMessageW(flags | c::FORMAT_MESSAGE_FROM_SYSTEM |
c::FORMAT_MESSAGE_IGNORE_INSERTS,
- ptr::null_mut(),
+ module,
errnum as c::DWORD,
langId,
buf.as_mut_ptr(),
pub fn exit(code: i32) -> ! {
unsafe { c::ExitProcess(code as c::UINT) }
}
+
+#[cfg(test)]
+mod tests {
+ use io::Error;
+ use sys::c;
+
+ // tests `error_string` above
+ #[test]
+ fn ntstatus_error() {
+ const STATUS_UNSUCCESSFUL: u32 = 0xc000_0001;
+ assert!(!Error::from_raw_os_error((STATUS_UNSUCCESSFUL | c::FACILITY_NT_BIT) as _)
+ .to_string().contains("FormatMessageW() returned error"));
+ }
+}
}
}
+/// Generates a wide character sequence for potentially ill-formed UTF-16.
#[stable(feature = "rust1", since = "1.0.0")]
#[derive(Clone)]
pub struct EncodeWide<'a> {
/// A thread local storage key which owns its contents.
///
/// This key uses the fastest possible implementation available to it for the
-/// target platform. It is instantiated with the `thread_local!` macro and the
-/// primary method is the `with` method.
+/// target platform. It is instantiated with the [`thread_local!`] macro and the
+/// primary method is the [`with`] method.
///
-/// The `with` method yields a reference to the contained value which cannot be
+/// The [`with`] method yields a reference to the contained value which cannot be
/// sent across threads or escape the given closure.
///
/// # Initialization and Destruction
///
-/// Initialization is dynamically performed on the first call to `with()`
-/// within a thread, and values that implement `Drop` get destructed when a
+/// Initialization is dynamically performed on the first call to [`with`]
+/// within a thread, and values that implement [`Drop`] get destructed when a
/// thread exits. Some caveats apply, which are explained below.
///
/// # Examples
/// 3. On macOS, initializing TLS during destruction of other TLS slots can
/// sometimes cancel *all* destructors for the current thread, whether or not
/// the slots have already had their destructors run or not.
+///
+/// [`with`]: ../../std/thread/struct.LocalKey.html#method.with
+/// [`thread_local!`]: ../../std/macro.thread_local.html
+/// [`Drop`]: ../../std/ops/trait.Drop.html
#[stable(feature = "rust1", since = "1.0.0")]
pub struct LocalKey<T: 'static> {
// This outer `LocalKey<T>` type is what's going to be stored in statics,
}
}
-/// Declare a new thread local storage key of type `std::thread::LocalKey`.
+/// Declare a new thread local storage key of type [`std::thread::LocalKey`].
///
/// # Syntax
///
/// # fn main() {}
/// ```
///
-/// See [LocalKey documentation](thread/struct.LocalKey.html) for more
+/// See [LocalKey documentation][`std::thread::LocalKey`] for more
/// information.
+///
+/// [`std::thread::LocalKey`]: ../std/thread/struct.LocalKey.html
#[macro_export]
#[stable(feature = "rust1", since = "1.0.0")]
#[allow_internal_unstable]
#[derive(Debug, Eq, PartialEq, Copy, Clone)]
pub enum LocalKeyState {
/// All keys are in this state whenever a thread starts. Keys will
- /// transition to the `Valid` state once the first call to `with` happens
+ /// transition to the `Valid` state once the first call to [`with`] happens
/// and the initialization expression succeeds.
///
/// Keys in the `Uninitialized` state will yield a reference to the closure
- /// passed to `with` so long as the initialization routine does not panic.
+ /// passed to [`with`] so long as the initialization routine does not panic.
+ ///
+ /// [`with`]: ../../std/thread/struct.LocalKey.html#method.with
Uninitialized,
/// Once a key has been accessed successfully, it will enter the `Valid`
/// `Destroyed` state.
///
/// Keys in the `Valid` state will be guaranteed to yield a reference to the
- /// closure passed to `with`.
+ /// closure passed to [`with`].
+ ///
+ /// [`with`]: ../../std/thread/struct.LocalKey.html#method.with
Valid,
/// When a thread exits, the destructors for keys will be run (if
/// destructor has run, a key is in the `Destroyed` state.
///
/// Keys in the `Destroyed` states will trigger a panic when accessed via
- /// `with`.
+ /// [`with`].
+ ///
+ /// [`with`]: ../../std/thread/struct.LocalKey.html#method.with
Destroyed,
}
/// Query the current state of this key.
///
/// A key is initially in the `Uninitialized` state whenever a thread
- /// starts. It will remain in this state up until the first call to `with`
+ /// starts. It will remain in this state up until the first call to [`with`]
/// within a thread has run the initialization expression successfully.
///
/// Once the initialization expression succeeds, the key transitions to the
- /// `Valid` state which will guarantee that future calls to `with` will
+ /// `Valid` state which will guarantee that future calls to [`with`] will
/// succeed within the thread.
///
/// When a thread exits, each key will be destroyed in turn, and as keys are
/// destroyed they will enter the `Destroyed` state just before the
/// destructor starts to run. Keys may remain in the `Destroyed` state after
/// destruction has completed. Keys without destructors (e.g. with types
- /// that are `Copy`), may never enter the `Destroyed` state.
+ /// that are [`Copy`]), may never enter the `Destroyed` state.
///
/// Keys in the `Uninitialized` state can be accessed so long as the
/// initialization does not panic. Keys in the `Valid` state are guaranteed
/// to be able to be accessed. Keys in the `Destroyed` state will panic on
- /// any call to `with`.
+ /// any call to [`with`].
+ ///
+ /// [`with`]: ../../std/thread/struct.LocalKey.html#method.with
+ /// [`Copy`]: ../../std/marker/trait.Copy.html
#[unstable(feature = "thread_local_state",
reason = "state querying was recently added",
issue = "27716")]
// Builder
////////////////////////////////////////////////////////////////////////////////
-/// Thread configuration. Provides detailed control over the properties
-/// and behavior of new threads.
+/// Thread factory, which can be used in order to configure the properties of
+/// a new thread.
+///
+/// Methods can be chained on it in order to configure it.
+///
+/// The two configurations available are:
+///
+/// - [`name`]: allows to give a name to the thread which is currently
+/// only used in `panic` messages.
+/// - [`stack_size`]: specifies the desired stack size. Note that this can
+/// be overriden by the OS.
+///
+/// If the [`stack_size`] field is not specified, the stack size
+/// will be the `RUST_MIN_STACK` environment variable. If it is
+/// not specified either, a sensible default will be set.
+///
+/// If the [`name`] field is not specified, the thread will not be named.
+///
+/// The [`spawn`] method will take ownership of the builder and create an
+/// [`io::Result`] to the thread handle with the given configuration.
+///
+/// The [`thread::spawn`] free function uses a `Builder` with default
+/// configuration and [`unwrap`]s its return value.
+///
+/// You may want to use [`spawn`] instead of [`thread::spawn`], when you want
+/// to recover from a failure to launch a thread, indeed the free function will
+/// panick where the `Builder` method will return a [`io::Result`].
///
/// # Examples
///
///
/// handler.join().unwrap();
/// ```
+///
+/// [`thread::spawn`]: ../../std/thread/fn.spawn.html
+/// [`stack_size`]: ../../std/thread/struct.Builder.html#method.stack_size
+/// [`name`]: ../../std/thread/struct.Builder.html#method.name
+/// [`spawn`]: ../../std/thread/struct.Builder.html#method.spawn
+/// [`io::Result`]: ../../std/io/type.Result.html
+/// [`unwrap`]: ../../std/result/enum.Result.html#method.unwrap
#[stable(feature = "rust1", since = "1.0.0")]
#[derive(Debug)]
pub struct Builder {
/// Generates the base configuration for spawning a thread, from which
/// configuration methods can be chained.
///
- /// If the [`stack_size`] field is not specified, the stack size
- /// will be the `RUST_MIN_STACK` environment variable, if it is
- /// not specified either, a sensible default size will be set (2MB as
- /// of the writting of this doc).
- ///
/// # Examples
///
/// ```
///
/// handler.join().unwrap();
/// ```
- ///
- /// [`stack_size`]: ../../std/thread/struct.Builder.html#method.stack_size
#[stable(feature = "rust1", since = "1.0.0")]
pub fn new() -> Builder {
Builder {
self
}
- /// Spawns a new thread, and returns a join handle for it.
+ /// Spawns a new thread by taking ownership of the `Builder`, and returns an
+ /// [`io::Result`] to its [`JoinHandle`].
///
- /// The child thread may outlive the parent (unless the parent thread
+ /// The spawned thread may outlive the caller (unless the caller thread
/// is the main thread; the whole process is terminated when the main
/// thread finishes). The join handle can be used to block on
/// termination of the child thread, including recovering its panics.
///
/// [`spawn`]: ../../std/thread/fn.spawn.html
/// [`io::Result`]: ../../std/io/type.Result.html
+ /// [`JoinHandle`]: ../../std/thread/struct.JoinHandle.html
///
/// # Examples
///
/// Cooperatively gives up a timeslice to the OS scheduler.
///
+/// This is used when the programmer knows that the thread will have nothing
+/// to do for some time, and thus avoid wasting computing time.
+///
+/// For example when polling on a resource, it is common to check that it is
+/// available, and if not to yield in order to avoid busy waiting.
+///
+/// Thus the pattern of `yield`ing after a failed poll is rather common when
+/// implementing low-level shared resources or synchronization primitives.
+///
+/// However programmers will usualy prefer to use, [`channel`]s, [`Condvar`]s,
+/// [`Mutex`]es or [`join`] for their synchronisation routines, as they avoid
+/// thinking about thread schedulling.
+///
+/// Note that [`channel`]s for example are implemented using this primitive.
+/// Indeed when you call `send` or `recv`, which are blocking, they will yield
+/// if the channel is not available.
+///
/// # Examples
///
/// ```
///
/// thread::yield_now();
/// ```
+///
+/// [`channel`]: ../../std/sync/mpsc/index.html
+/// [`spawn`]: ../../std/thread/fn.spawn.html
+/// [`join`]: ../../std/thread/struct.JoinHandle.html#method.join
+/// [`Mutex`]: ../../std/sync/struct.Mutex.html
+/// [`Condvar`]: ../../std/sync/struct.Condvar.html
#[stable(feature = "rust1", since = "1.0.0")]
pub fn yield_now() {
imp::Thread::yield_now()
StmtKind::Mac(mac) => StmtKind::Mac(mac.map(|(mac, _style, attrs)| {
(mac, MacStmtStyle::Semicolon, attrs)
})),
- node @ _ => node,
+ node => node,
};
self
}
pub fn is_unsuffixed(&self) -> bool {
match *self {
// unsuffixed variants
- LitKind::Str(..) => true,
- LitKind::ByteStr(..) => true,
- LitKind::Byte(..) => true,
- LitKind::Char(..) => true,
- LitKind::Int(_, LitIntType::Unsuffixed) => true,
- LitKind::FloatUnsuffixed(..) => true,
+ LitKind::Str(..) |
+ LitKind::ByteStr(..) |
+ LitKind::Byte(..) |
+ LitKind::Char(..) |
+ LitKind::Int(_, LitIntType::Unsuffixed) |
+ LitKind::FloatUnsuffixed(..) |
LitKind::Bool(..) => true,
// suffixed variants
- LitKind::Int(_, LitIntType::Signed(..)) => false,
- LitKind::Int(_, LitIntType::Unsigned(..)) => false,
+ LitKind::Int(_, LitIntType::Signed(..)) |
+ LitKind::Int(_, LitIntType::Unsigned(..)) |
LitKind::Float(..) => false,
}
}
/// Returns the MetaItem if self is a NestedMetaItemKind::MetaItem.
pub fn meta_item(&self) -> Option<&MetaItem> {
match self.node {
- NestedMetaItemKind::MetaItem(ref item) => Some(&item),
+ NestedMetaItemKind::MetaItem(ref item) => Some(item),
_ => None
}
}
/// Returns the Lit if self is a NestedMetaItemKind::Literal.
pub fn literal(&self) -> Option<&Lit> {
match self.node {
- NestedMetaItemKind::Literal(ref lit) => Some(&lit),
+ NestedMetaItemKind::Literal(ref lit) => Some(lit),
_ => None
}
}
match self.node {
MetaItemKind::NameValue(ref v) => {
match v.node {
- LitKind::Str(ref s, _) => Some((*s).clone()),
+ LitKind::Str(ref s, _) => Some(*s),
_ => None,
}
},
Token::Literal(token::Lit::Float(symbol), Some(Symbol::intern(ty.ty_to_string())))
}
LitKind::FloatUnsuffixed(symbol) => Token::Literal(token::Lit::Float(symbol), None),
- LitKind::Bool(value) => Token::Ident(Ident::with_empty_ctxt(Symbol::intern(match value {
- true => "true",
- false => "false",
+ LitKind::Bool(value) => Token::Ident(Ident::with_empty_ctxt(Symbol::intern(if value {
+ "true"
+ } else {
+ "false"
}))),
}
}
impl HasAttrs for Vec<Attribute> {
fn attrs(&self) -> &[Attribute] {
- &self
+ self
}
fn map_attrs<F: FnOnce(Vec<Attribute>) -> Vec<Attribute>>(self, f: F) -> Self {
f(self)
impl HasAttrs for ThinVec<Attribute> {
fn attrs(&self) -> &[Attribute] {
- &self
+ self
}
fn map_attrs<F: FnOnce(Vec<Attribute>) -> Vec<Attribute>>(self, f: F) -> Self {
f(self.into()).into()
match self.span_to_snippet(sp) {
Ok(snippet) => {
let snippet = snippet.split(c).nth(0).unwrap_or("").trim_right();
- if snippet.len() > 0 && !snippet.contains('\n') {
+ if !snippet.is_empty() && !snippet.contains('\n') {
Span { hi: BytePos(sp.lo.0 + snippet.len() as u32), ..sp }
} else {
sp
pub fn get_filemap(&self, filename: &str) -> Option<Rc<FileMap>> {
for fm in self.files.borrow().iter() {
if filename == fm.name {
- (self.dep_tracking_callback.borrow())(&fm);
+ (self.dep_tracking_callback.borrow())(fm);
return Some(fm.clone());
}
}
return false;
}
- let mis = if !is_cfg(&attr) {
+ let mis = if !is_cfg(attr) {
return true;
} else if let Some(mis) = attr.meta_item_list() {
mis
// flag the offending attributes
for attr in attrs.iter() {
if !self.features.map(|features| features.stmt_expr_attributes).unwrap_or(true) {
- let mut err = feature_err(&self.sess,
+ let mut err = feature_err(self.sess,
"stmt_expr_attributes",
attr.span,
GateIssue::Language,
pub fn configure_struct_expr_field(&mut self, field: ast::Field) -> Option<ast::Field> {
if !self.features.map(|features| features.struct_field_attributes).unwrap_or(true) {
if !field.attrs.is_empty() {
- let mut err = feature_err(&self.sess,
+ let mut err = feature_err(self.sess,
"struct_field_attributes",
field.span,
GateIssue::Language,
for attr in attrs.iter() {
if !self.features.map(|features| features.struct_field_attributes).unwrap_or(true) {
let mut err = feature_err(
- &self.sess,
+ self.sess,
"struct_field_attributes",
attr.span,
GateIssue::Language,
// URLs can be unavoidably longer than the line limit, so we allow them.
// Allowed format is: `[name]: https://www.rust-lang.org/`
- let is_url = |l: &str| l.starts_with('[') && l.contains("]:") && l.contains("http");
+ let is_url = |l: &str| l.starts_with("[") && l.contains("]:") && l.contains("http");
if msg.lines().any(|line| line.len() > MAX_DESCRIPTION_WIDTH && !is_url(line)) {
ecx.span_err(span, &format!(
if let Err(e) = output_metadata(ecx,
&target_triple,
&crate_name.name.as_str(),
- &diagnostics) {
+ diagnostics) {
ecx.span_bug(span, &format!(
"error writing metadata for triple `{}` and crate `{}`, error: {}, \
cause: {:?}",
MacEager::items(SmallVector::many(vec![
P(ast::Item {
- ident: name.clone(),
+ ident: *name,
attrs: Vec::new(),
id: ast::DUMMY_NODE_ID,
node: ast::ItemKind::Const(
///
/// The `bool` dictates whether the contents of the macro can
/// directly use `#[unstable]` things (true == yes).
- NormalTT(Box<TTMacroExpander>, Option<Span>, bool),
+ NormalTT(Box<TTMacroExpander>, Option<(ast::NodeId, Span)>, bool),
/// A function-like syntax extension that has an extra ident before
/// the block.
-> Result<Option<Rc<SyntaxExtension>>, Determinacy>;
fn resolve_macro(&mut self, scope: Mark, path: &ast::Path, kind: MacroKind, force: bool)
-> Result<Rc<SyntaxExtension>, Determinacy>;
+ fn check_unused_macros(&self);
}
#[derive(Copy, Clone, Debug)]
_force: bool) -> Result<Rc<SyntaxExtension>, Determinacy> {
Err(Determinacy::Determined)
}
+ fn check_unused_macros(&self) {}
}
#[derive(Clone)]
}
/// One of these is made during expansion and incrementally updated as we go;
-/// when a macro expansion occurs, the resulting nodes have the backtrace()
-/// -> expn_info of their expansion context stored into their span.
+/// when a macro expansion occurs, the resulting nodes have the `backtrace()
+/// -> expn_info` of their expansion context stored into their span.
pub struct ExtCtxt<'a> {
pub parse_sess: &'a parse::ParseSess,
pub ecfg: expand::ExpansionConfig<'a>,
}
ctxt = info.call_site.ctxt;
last_macro = Some(info.call_site);
- return Some(());
+ Some(())
}).is_none() {
break
}
}
pub fn trace_macros_diag(&self) {
for (sp, notes) in self.expansions.iter() {
- let mut db = self.parse_sess.span_diagnostic.span_note_diag(*sp, &"trace_macro");
+ let mut db = self.parse_sess.span_diagnostic.span_note_diag(*sp, "trace_macro");
for note in notes {
- db.note(¬e);
+ db.note(note);
}
db.emit();
}
v.push(self.ident_of(s));
}
v.extend(components.iter().map(|s| self.ident_of(s)));
- return v
+ v
}
pub fn name_of(&self, st: &str) -> ast::Name {
Symbol::intern(st)
}
+
+ pub fn check_unused_macros(&self) {
+ self.resolver.check_unused_macros();
+ }
}
/// Extract a string literal from the macro expanded version of `expr`,
match *ext {
MultiModifier(ref mac) => {
- let meta = panictry!(attr.parse_meta(&self.cx.parse_sess));
+ let meta = panictry!(attr.parse_meta(self.cx.parse_sess));
let item = mac.expand(self.cx, attr.span, &meta, item);
kind.expect_from_annotatables(item)
}
MultiDecorator(ref mac) => {
let mut items = Vec::new();
- let meta = panictry!(attr.parse_meta(&self.cx.parse_sess));
+ let meta = panictry!(attr.parse_meta(self.cx.parse_sess));
mac.expand(self.cx, attr.span, &meta, &item, &mut |item| items.push(item));
items.push(item);
kind.expect_from_annotatables(items)
}
SyntaxExtension::AttrProcMacro(ref mac) => {
- let item_toks = stream_for_item(&item, &self.cx.parse_sess);
+ let item_toks = stream_for_item(&item, self.cx.parse_sess);
let span = Span { ctxt: self.cx.backtrace(), ..attr.span };
let tok_result = mac.expand(self.cx, attr.span, attr.tokens, item_toks);
}
_ => {
let msg = &format!("macro `{}` may not be used in attributes", attr.path);
- self.cx.span_err(attr.span, &msg);
+ self.cx.span_err(attr.span, msg);
kind.dummy(attr.span)
}
}
};
let path = &mac.node.path;
- let ident = ident.unwrap_or(keywords::Invalid.ident());
+ let ident = ident.unwrap_or_else(|| keywords::Invalid.ident());
let marked_tts = noop_fold_tts(mac.node.stream(), &mut Marker(mark));
let opt_expanded = match *ext {
NormalTT(ref expandfun, exp_span, allow_internal_unstable) => {
call_site: span,
callee: NameAndSpan {
format: MacroBang(Symbol::intern(&format!("{}", path))),
- span: exp_span,
+ span: exp_span.map(|(_, s)| s),
allow_internal_unstable: allow_internal_unstable,
},
});
}
_ => {
let msg = &format!("macro `{}` may not be used for derive attributes", attr.path);
- self.cx.span_err(span, &msg);
+ self.cx.span_err(span, msg);
kind.dummy(span)
}
}
fn check_attributes(&mut self, attrs: &[ast::Attribute]) {
let features = self.cx.ecfg.features.unwrap();
for attr in attrs.iter() {
- feature_gate::check_attribute(&attr, &self.cx.parse_sess, features);
+ feature_gate::check_attribute(attr, self.cx.parse_sess, features);
}
}
}
pub fn find_attr_invoc(attrs: &mut Vec<ast::Attribute>) -> Option<ast::Attribute> {
- for i in 0 .. attrs.len() {
- if !attr::is_known(&attrs[i]) && !is_builtin_attr(&attrs[i]) {
- return Some(attrs.remove(i));
- }
- }
-
- None
+ attrs.iter()
+ .position(|a| !attr::is_known(a) && !is_builtin_attr(a))
+ .map(|i| attrs.remove(i))
}
// These are pretty nasty. Ideally, we would keep the tokens around, linked from
let result = noop_fold_item(item, self);
self.cx.current_expansion.module = orig_module;
self.cx.current_expansion.directory_ownership = orig_directory_ownership;
- return result;
+ result
}
// Ensure that test functions are accessible from the test harness.
ast::ItemKind::Fn(..) if self.cx.ecfg.should_test => {
///
/// This is registered as a set of expression syntax extension called quote!
/// that lifts its argument token-tree to an AST representing the
-/// construction of the same token tree, with token::SubstNt interpreted
+/// construction of the same token tree, with `token::SubstNt` interpreted
/// as antiquotes (splices).
pub mod rt {
result = results.pop().unwrap();
result.push(tree);
}
- tree @ _ => result.push(tree),
+ tree => result.push(tree),
}
}
result
#[allow(non_upper_case_globals)]
fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
macro_rules! mk_lit {
- ($name: expr, $suffix: expr, $($args: expr),*) => {{
- let inner = cx.expr_call(sp, mk_token_path(cx, sp, $name), vec![$($args),*]);
+ ($name: expr, $suffix: expr, $content: expr $(, $count: expr)*) => {{
+ let name = mk_name(cx, sp, ast::Ident::with_empty_ctxt($content));
+ let inner = cx.expr_call(sp, mk_token_path(cx, sp, $name), vec![
+ name $(, cx.expr_usize(sp, $count))*
+ ]);
let suffix = match $suffix {
Some(name) => cx.expr_some(sp, mk_name(cx, sp, ast::Ident::with_empty_ctxt(name))),
None => cx.expr_none(sp)
cx.expr_call(sp, mk_token_path(cx, sp, "Literal"), vec![inner, suffix])
}}
}
- match *tok {
+
+ let name = match *tok {
token::BinOp(binop) => {
return cx.expr_call(sp, mk_token_path(cx, sp, "BinOp"), vec![mk_binop(cx, sp, binop)]);
}
vec![mk_delim(cx, sp, delim)]);
}
- token::Literal(token::Byte(i), suf) => {
- let e_byte = mk_name(cx, sp, ast::Ident::with_empty_ctxt(i));
- return mk_lit!("Byte", suf, e_byte);
- }
-
- token::Literal(token::Char(i), suf) => {
- let e_char = mk_name(cx, sp, ast::Ident::with_empty_ctxt(i));
- return mk_lit!("Char", suf, e_char);
- }
-
- token::Literal(token::Integer(i), suf) => {
- let e_int = mk_name(cx, sp, ast::Ident::with_empty_ctxt(i));
- return mk_lit!("Integer", suf, e_int);
- }
-
- token::Literal(token::Float(fident), suf) => {
- let e_fident = mk_name(cx, sp, ast::Ident::with_empty_ctxt(fident));
- return mk_lit!("Float", suf, e_fident);
- }
-
- token::Literal(token::Str_(ident), suf) => {
- return mk_lit!("Str_", suf, mk_name(cx, sp, ast::Ident::with_empty_ctxt(ident)))
- }
-
- token::Literal(token::StrRaw(ident, n), suf) => {
- return mk_lit!("StrRaw", suf, mk_name(cx, sp, ast::Ident::with_empty_ctxt(ident)),
- cx.expr_usize(sp, n))
- }
+ token::Literal(token::Byte(i), suf) => return mk_lit!("Byte", suf, i),
+ token::Literal(token::Char(i), suf) => return mk_lit!("Char", suf, i),
+ token::Literal(token::Integer(i), suf) => return mk_lit!("Integer", suf, i),
+ token::Literal(token::Float(i), suf) => return mk_lit!("Float", suf, i),
+ token::Literal(token::Str_(i), suf) => return mk_lit!("Str_", suf, i),
+ token::Literal(token::StrRaw(i, n), suf) => return mk_lit!("StrRaw", suf, i, n),
+ token::Literal(token::ByteStr(i), suf) => return mk_lit!("ByteStr", suf, i),
+ token::Literal(token::ByteStrRaw(i, n), suf) => return mk_lit!("ByteStrRaw", suf, i, n),
token::Ident(ident) => {
return cx.expr_call(sp,
token::Interpolated(_) => panic!("quote! with interpolated token"),
- _ => ()
- }
-
- let name = match *tok {
token::Eq => "Eq",
token::Lt => "Lt",
token::Le => "Le",
token::At => "At",
token::Dot => "Dot",
token::DotDot => "DotDot",
+ token::DotDotDot => "DotDotDot",
token::Comma => "Comma",
token::Semi => "Semi",
token::Colon => "Colon",
token::Question => "Question",
token::Underscore => "Underscore",
token::Eof => "Eof",
- _ => panic!("unhandled token in quote!"),
+
+ token::Whitespace | token::SubstNt(_) | token::Comment | token::Shebang(_) => {
+ panic!("unhandled token in quote!");
+ }
};
mk_token_path(cx, sp, name)
}
cx.span_err(sp,
&format!("{} wasn't a utf-8 file",
file.display()));
- return DummyResult::expr(sp);
+ DummyResult::expr(sp)
}
}
}
Err(e) => {
cx.span_err(sp,
&format!("couldn't read {}: {}", file.display(), e));
- return DummyResult::expr(sp);
+ DummyResult::expr(sp)
}
Ok(..) => {
// Add this input file to the code map to make it available as
//! repetitions indicated by Kleene stars. It only advances or calls out to the
//! real Rust parser when no `cur_eis` items remain
//!
-//! Example: Start parsing `a a a a b` against [· a $( a )* a b].
+//! Example:
//!
-//! Remaining input: `a a a a b`
+//! ```text, ignore
+//! Start parsing a a a a b against [· a $( a )* a b].
+//!
+//! Remaining input: a a a a b
//! next_eis: [· a $( a )* a b]
//!
-//! - - - Advance over an `a`. - - -
+//! - - - Advance over an a. - - -
//!
-//! Remaining input: `a a a b`
+//! Remaining input: a a a b
//! cur: [a · $( a )* a b]
//! Descend/Skip (first item).
//! next: [a $( · a )* a b] [a $( a )* · a b].
//!
-//! - - - Advance over an `a`. - - -
+//! - - - Advance over an a. - - -
//!
-//! Remaining input: `a a b`
+//! Remaining input: a a b
//! cur: [a $( a · )* a b] next: [a $( a )* a · b]
//! Finish/Repeat (first item)
//! next: [a $( a )* · a b] [a $( · a )* a b] [a $( a )* a · b]
//!
-//! - - - Advance over an `a`. - - - (this looks exactly like the last step)
+//! - - - Advance over an a. - - - (this looks exactly like the last step)
//!
-//! Remaining input: `a b`
+//! Remaining input: a b
//! cur: [a $( a · )* a b] next: [a $( a )* a · b]
//! Finish/Repeat (first item)
//! next: [a $( a )* · a b] [a $( · a )* a b] [a $( a )* a · b]
//!
-//! - - - Advance over an `a`. - - - (this looks exactly like the last step)
+//! - - - Advance over an a. - - - (this looks exactly like the last step)
//!
-//! Remaining input: `b`
+//! Remaining input: b
//! cur: [a $( a · )* a b] next: [a $( a )* a · b]
//! Finish/Repeat (first item)
//! next: [a $( a )* · a b] [a $( · a )* a b]
//!
-//! - - - Advance over a `b`. - - -
+//! - - - Advance over a b. - - -
//!
-//! Remaining input: ``
+//! Remaining input: ''
//! eof: [a $( a )* a b ·]
+//! ```
pub use self::NamedMatch::*;
pub use self::ParseResult::*;
})
}
-/// NamedMatch is a pattern-match result for a single token::MATCH_NONTERMINAL:
+/// `NamedMatch` is a pattern-match result for a single `token::MATCH_NONTERMINAL`:
/// so it is associated with a single ident in a parse, and all
-/// `MatchedNonterminal`s in the NamedMatch have the same nonterminal type
-/// (expr, item, etc). Each leaf in a single NamedMatch corresponds to a
-/// single token::MATCH_NONTERMINAL in the TokenTree that produced it.
+/// `MatchedNonterminal`s in the `NamedMatch` have the same nonterminal type
+/// (expr, item, etc). Each leaf in a single `NamedMatch` corresponds to a
+/// single `token::MATCH_NONTERMINAL` in the `TokenTree` that produced it.
///
-/// The in-memory structure of a particular NamedMatch represents the match
+/// The in-memory structure of a particular `NamedMatch` represents the match
/// that occurred when a particular subset of a matcher was applied to a
/// particular token tree.
///
-/// The width of each MatchedSeq in the NamedMatch, and the identity of the
-/// `MatchedNonterminal`s, will depend on the token tree it was applied to:
-/// each MatchedSeq corresponds to a single TTSeq in the originating
-/// token tree. The depth of the NamedMatch structure will therefore depend
+/// The width of each `MatchedSeq` in the `NamedMatch`, and the identity of
+/// the `MatchedNonterminal`s, will depend on the token tree it was applied
+/// to: each `MatchedSeq` corresponds to a single `TTSeq` in the originating
+/// token tree. The depth of the `NamedMatch` structure will therefore depend
/// only on the nesting depth of `ast::TTSeq`s in the originating
/// token tree it was derived from.
/// Perform a token equality check, ignoring syntax context (that is, an unhygienic comparison)
fn token_name_eq(t1 : &Token, t2 : &Token) -> bool {
- match (t1,t2) {
- (&token::Ident(id1),&token::Ident(id2))
- | (&token::Lifetime(id1),&token::Lifetime(id2)) =>
- id1.name == id2.name,
- _ => *t1 == *t2
+ if let (Some(id1), Some(id2)) = (t1.ident(), t2.ident()) {
+ id1.name == id2.name
+ } else if let (&token::Lifetime(id1), &token::Lifetime(id2)) = (t1, t2) {
+ id1.name == id2.name
+ } else {
+ *t1 == *t2
}
}
// Check if we need a separator
if idx == len && ei.sep.is_some() {
// We have a separator, and it is the current token.
- if ei.sep.as_ref().map(|ref sep| token_name_eq(&token, sep)).unwrap_or(false) {
+ if ei.sep.as_ref().map(|sep| token_name_eq(token, sep)).unwrap_or(false) {
ei.idx += 1;
next_eis.push(ei);
}
cur_eis.push(ei);
}
TokenTree::Token(_, ref t) => {
- if token_name_eq(t, &token) {
+ if token_name_eq(t, token) {
ei.idx += 1;
next_eis.push(ei);
}
}
fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal {
- match name {
- "tt" => {
- return token::NtTT(p.parse_token_tree());
- }
- _ => {}
+ if name == "tt" {
+ return token::NtTT(p.parse_token_tree());
}
// check at the beginning and the parser checks after each bump
p.process_potential_macro_variable();
-> Box<MacResult+'cx> {
if cx.trace_macros() {
let sp = sp.macro_backtrace().last().map(|trace| trace.call_site).unwrap_or(sp);
- let mut values: &mut Vec<String> = cx.expansions.entry(sp).or_insert(vec![]);
+ let mut values: &mut Vec<String> = cx.expansions.entry(sp).or_insert_with(Vec::new);
values.push(format!("expands to `{}! {{ {} }}`", name, arg));
}
let mut valid = true;
// Extract the arguments:
- let lhses = match **argument_map.get(&lhs_nm).unwrap() {
+ let lhses = match *argument_map[&lhs_nm] {
MatchedSeq(ref s, _) => {
s.iter().map(|m| {
if let MatchedNonterminal(ref nt) = **m {
_ => sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
};
- let rhses = match **argument_map.get(&rhs_nm).unwrap() {
+ let rhses = match *argument_map[&rhs_nm] {
MatchedSeq(ref s, _) => {
s.iter().map(|m| {
if let MatchedNonterminal(ref nt) = **m {
valid: valid,
});
- NormalTT(exp, Some(def.span), attr::contains_name(&def.attrs, "allow_internal_unstable"))
+ NormalTT(exp,
+ Some((def.id, def.span)),
+ attr::contains_name(&def.attrs, "allow_internal_unstable"))
}
fn check_lhs_nt_follows(sess: &ParseSess,
lhs: "ed::TokenTree) -> bool {
// lhs is going to be like TokenTree::Delimited(...), where the
// entire lhs is those tts. Or, it can be a "bare sequence", not wrapped in parens.
- match lhs {
- "ed::TokenTree::Delimited(_, ref tts) => check_matcher(sess, features, &tts.tts),
- _ => {
- let msg = "invalid macro matcher; matchers must be contained in balanced delimiters";
- sess.span_diagnostic.span_err(lhs.span(), msg);
- false
- }
+ if let quoted::TokenTree::Delimited(_, ref tts) = *lhs {
+ check_matcher(sess, features, &tts.tts)
+ } else {
+ let msg = "invalid macro matcher; matchers must be contained in balanced delimiters";
+ sess.span_diagnostic.span_err(lhs.span(), msg);
+ false
}
// we don't abort on errors on rejection, the driver will do that for us
// after parsing/expansion. we can report every error in every macro this way.
return false;
},
TokenTree::Sequence(span, ref seq) => {
- if seq.separator.is_none() {
- if seq.tts.iter().all(|seq_tt| {
- match *seq_tt {
- TokenTree::Sequence(_, ref sub_seq) =>
- sub_seq.op == quoted::KleeneOp::ZeroOrMore,
- _ => false,
- }
- }) {
- sess.span_diagnostic.span_err(span, "repetition matches empty token tree");
- return false;
+ if seq.separator.is_none() && seq.tts.iter().all(|seq_tt| {
+ match *seq_tt {
+ TokenTree::Sequence(_, ref sub_seq) =>
+ sub_seq.op == quoted::KleeneOp::ZeroOrMore,
+ _ => false,
}
+ }) {
+ sess.span_diagnostic.span_err(span, "repetition matches empty token tree");
+ return false;
}
if !check_lhs_no_empty_seq(sess, &seq.tts) {
return false;
}
}
- return first;
+ first
}
}
// we only exit the loop if `tts` was empty or if every
// element of `tts` matches the empty sequence.
assert!(first.maybe_empty);
- return first;
+ first
}
}
let build_suffix_first = || {
let mut s = first_sets.first(suffix);
if s.maybe_empty { s.add_all(follow); }
- return s;
+ s
};
// (we build `suffix_first` on demand below; you can tell
match *tt {
quoted::TokenTree::Token(_, ref tok) => ::print::pprust::token_to_string(tok),
quoted::TokenTree::MetaVarDecl(_, name, kind) => format!("${}:{}", name, kind),
- _ => panic!("unexpected quoted::TokenTree::{Sequence or Delimited} in follow set checker"),
+ _ => panic!("unexpected quoted::TokenTree::{{Sequence or Delimited}} \
+ in follow set checker"),
}
}
}
}
+ pub fn is_empty(&self) -> bool {
+ match *self {
+ TokenTree::Delimited(_, ref delimed) => match delimed.delim {
+ token::NoDelim => delimed.tts.is_empty(),
+ _ => false,
+ },
+ TokenTree::Sequence(_, ref seq) => seq.tts.is_empty(),
+ _ => true,
+ }
+ }
+
pub fn get_tt(&self, index: usize) -> TokenTree {
match (self, index) {
(&TokenTree::Delimited(_, ref delimed), _) if delimed.delim == token::NoDelim => {
}
_ => end_sp,
},
- tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span),
+ tree => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span),
},
- tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(start_sp),
+ tree => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(start_sp),
};
sess.missing_fragment_specifiers.borrow_mut().insert(span);
result.push(TokenTree::MetaVarDecl(span, ident, keywords::Invalid.ident()));
Some(op) => return (Some(tok), op),
None => span,
},
- tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span),
+ tree => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span),
}
},
- tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span),
+ tree => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span),
};
sess.span_diagnostic.span_err(span, "expected `*` or `+`");
&repeats) {
LockstepIterSize::Unconstrained => {
panic!(sp_diag.span_fatal(
- sp.clone(), /* blame macro writer */
+ sp, /* blame macro writer */
"attempted to repeat an expression \
containing no syntax \
variables matched as repeating at this depth"));
}
LockstepIterSize::Contradiction(ref msg) => {
// FIXME #2887 blame macro invoker instead
- panic!(sp_diag.span_fatal(sp.clone(), &msg[..]));
+ panic!(sp_diag.span_fatal(sp, &msg[..]));
}
LockstepIterSize::Constraint(len, _) => {
if len == 0 {
if seq.op == quoted::KleeneOp::OneOrMore {
// FIXME #2887 blame invoker
- panic!(sp_diag.span_fatal(sp.clone(),
+ panic!(sp_diag.span_fatal(sp,
"this must repeat at least once"));
}
} else {
impl ::std::fmt::Debug for AttributeGate {
fn fmt(&self, fmt: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
match *self {
- Gated(ref stab, ref name, ref expl, _) =>
+ Gated(ref stab, name, expl, _) =>
write!(fmt, "Gated({:?}, {}, {})", stab, name, expl),
Ungated => write!(fmt, "Ungated")
}
];
// cfg(...)'s that are feature gated
-const GATED_CFGS: &'static [(&'static str, &'static str, fn(&Features) -> bool)] = &[
+const GATED_CFGS: &[(&str, &str, fn(&Features) -> bool)] = &[
// (name in cfg, feature, function to check if the feature is enabled)
("target_feature", "cfg_target_feature", cfg_fn!(cfg_target_feature)),
("target_vendor", "cfg_target_vendor", cfg_fn!(cfg_target_vendor)),
let name = unwrap_or!(attr.name(), return).as_str();
for &(n, ty, ref gateage) in BUILTIN_ATTRIBUTES {
if name == n {
- if let &Gated(_, ref name, ref desc, ref has_feature) = gateage {
+ if let Gated(_, name, desc, ref has_feature) = *gateage {
gate_feature_fn!(self, has_feature, attr.span, name, desc);
}
debug!("check_attribute: {:?} is builtin, {:?}, {:?}", attr.path, ty, gateage);
NameValue(ref lit) => !lit.node.is_str(),
List(ref list) => list.iter().any(|li| {
match li.node {
- MetaItem(ref mi) => contains_novel_literal(&mi),
+ MetaItem(ref mi) => contains_novel_literal(mi),
Literal(_) => true,
}
}),
return
}
- let meta = panictry!(attr.parse_meta(&self.context.parse_sess));
+ let meta = panictry!(attr.parse_meta(self.context.parse_sess));
if contains_novel_literal(&meta) {
gate_feature_post!(&self, attr_literals, attr.span,
"non-string literals in attributes, or string \
}
ast::ItemKind::Impl(_, polarity, defaultness, _, _, _, _) => {
- match polarity {
- ast::ImplPolarity::Negative => {
- gate_feature_post!(&self, optin_builtin_traits,
- i.span,
- "negative trait bounds are not yet fully implemented; \
- use marker types for now");
- },
- _ => {}
+ if polarity == ast::ImplPolarity::Negative {
+ gate_feature_post!(&self, optin_builtin_traits,
+ i.span,
+ "negative trait bounds are not yet fully implemented; \
+ use marker types for now");
}
if let ast::Defaultness::Default = defaultness {
fn visit_fn_ret_ty(&mut self, ret_ty: &'a ast::FunctionRetTy) {
if let ast::FunctionRetTy::Ty(ref output_ty) = *ret_ty {
- match output_ty.node {
- ast::TyKind::Never => return,
- _ => (),
- };
- self.visit_ty(output_ty)
+ if output_ty.node != ast::TyKind::Never {
+ self.visit_ty(output_ty)
+ }
}
}
span: Span,
_node_id: NodeId) {
// check for const fn declarations
- match fn_kind {
- FnKind::ItemFn(_, _, _, Spanned { node: ast::Constness::Const, .. }, _, _, _) => {
- gate_feature_post!(&self, const_fn, span, "const fn is unstable");
- }
- _ => {
- // stability of const fn methods are covered in
- // visit_trait_item and visit_impl_item below; this is
- // because default methods don't pass through this
- // point.
- }
+ if let FnKind::ItemFn(_, _, _, Spanned { node: ast::Constness::Const, .. }, _, _, _) =
+ fn_kind {
+ gate_feature_post!(&self, const_fn, span, "const fn is unstable");
}
+ // stability of const fn methods are covered in
+ // visit_trait_item and visit_impl_item below; this is
+ // because default methods don't pass through this
+ // point.
match fn_kind {
FnKind::ItemFn(_, _, _, _, abi, _, _) |
use codemap::{CodeMap, FilePathMapping};
use syntax_pos::{self, MacroBacktrace, Span, SpanLabel, MultiSpan};
use errors::registry::Registry;
-use errors::{Level, DiagnosticBuilder, SubDiagnostic, RenderSpan, CodeSuggestion, CodeMapper};
+use errors::{DiagnosticBuilder, SubDiagnostic, RenderSpan, CodeSuggestion, CodeMapper};
use errors::emitter::Emitter;
-use errors::snippet::Style;
use std::rc::Rc;
use std::io::{self, Write};
fn from_diagnostic_builder(db: &DiagnosticBuilder,
je: &JsonEmitter)
-> Diagnostic {
- let sugg = db.suggestion.as_ref().map(|sugg| {
- SubDiagnostic {
- level: Level::Help,
- message: vec![(sugg.msg.clone(), Style::NoStyle)],
- span: MultiSpan::new(),
- render_span: Some(RenderSpan::Suggestion(sugg.clone())),
- }
+ let sugg = db.suggestions.iter().flat_map(|sugg| {
+ je.render(sugg).into_iter().map(move |rendered| {
+ Diagnostic {
+ message: sugg.msg.clone(),
+ code: None,
+ level: "help",
+ spans: DiagnosticSpan::from_suggestion(sugg, je),
+ children: vec![],
+ rendered: Some(rendered),
+ }
+ })
});
- let sugg = sugg.as_ref();
Diagnostic {
message: db.message(),
code: DiagnosticCode::map_opt_string(db.code.clone(), je),
level: db.level.to_str(),
spans: DiagnosticSpan::from_multispan(&db.span, je),
- children: db.children.iter().chain(sugg).map(|c| {
+ children: db.children.iter().map(|c| {
Diagnostic::from_sub_diagnostic(c, je)
- }).collect(),
+ }).chain(sugg).collect(),
rendered: None,
}
}
.map(|sp| DiagnosticSpan::from_render_span(sp, je))
.unwrap_or_else(|| DiagnosticSpan::from_multispan(&db.span, je)),
children: vec![],
- rendered: db.render_span.as_ref()
- .and_then(|rsp| je.render(rsp)),
+ rendered: None,
}
}
}
fn from_suggestion(suggestion: &CodeSuggestion, je: &JsonEmitter)
-> Vec<DiagnosticSpan> {
- assert_eq!(suggestion.msp.span_labels().len(), suggestion.substitutes.len());
- suggestion.msp.span_labels()
- .into_iter()
- .zip(&suggestion.substitutes)
- .map(|(span_label, suggestion)| {
- DiagnosticSpan::from_span_label(span_label,
- Some(suggestion),
- je)
+ suggestion.substitution_parts
+ .iter()
+ .flat_map(|substitution| {
+ substitution.substitutions.iter().map(move |suggestion| {
+ let span_label = SpanLabel {
+ span: substitution.span,
+ is_primary: true,
+ label: None,
+ };
+ DiagnosticSpan::from_span_label(span_label,
+ Some(suggestion),
+ je)
+ })
})
.collect()
}
match *rsp {
RenderSpan::FullSpan(ref msp) =>
DiagnosticSpan::from_multispan(msp, je),
- RenderSpan::Suggestion(ref suggestion) =>
- DiagnosticSpan::from_suggestion(suggestion, je),
+ // regular diagnostics don't produce this anymore
+ // FIXME(oli_obk): remove it entirely
+ RenderSpan::Suggestion(_) => unreachable!(),
}
}
}
})
.collect()
})
- .unwrap_or(vec![])
+ .unwrap_or_else(|_| vec![])
}
}
}
impl JsonEmitter {
- fn render(&self, render_span: &RenderSpan) -> Option<String> {
- use std::borrow::Borrow;
-
- match *render_span {
- RenderSpan::FullSpan(_) => {
- None
- }
- RenderSpan::Suggestion(ref suggestion) => {
- Some(suggestion.splice_lines(self.cm.borrow()))
- }
- }
+ fn render(&self, suggestion: &CodeSuggestion) -> Vec<String> {
+ suggestion.splice_lines(&*self.cm)
}
}
//! This API is completely unstable and subject to change.
#![crate_name = "syntax"]
-#![unstable(feature = "rustc_private", issue = "27812")]
#![crate_type = "dylib"]
#![crate_type = "rlib"]
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
test(attr(deny(warnings))))]
#![deny(warnings)]
-#![feature(rustc_private)]
-#![feature(staged_api)]
#![feature(unicode)]
#![feature(rustc_diagnostic_macros)]
#![feature(i128_type)]
+#![cfg_attr(stage0, unstable(feature = "rustc_private", issue = "27812"))]
+#![cfg_attr(stage0, feature(rustc_private))]
+#![cfg_attr(stage0, feature(staged_api))]
+
extern crate serialize;
#[macro_use] extern crate log;
#[macro_use] extern crate bitflags;
_ => break,
}
}
- return Ok(attrs);
+ Ok(attrs)
}
/// Matches `attribute = # ! [ meta_item ]`
}
let attr = self.parse_attribute(true)?;
- assert!(attr.style == ast::AttrStyle::Inner);
+ assert_eq!(attr.style, ast::AttrStyle::Inner);
attrs.push(attr);
}
token::DocComment(s) => {
}
/// this statement requires a semicolon after it.
-/// note that in one case (stmt_semi), we've already
+/// note that in one case (`stmt_semi`), we've already
/// seen the semicolon, and thus don't need another.
pub fn stmt_ends_with_semi(stmt: &ast::StmtKind) -> bool {
match *stmt {
ast::StmtKind::Local(_) => true,
- ast::StmtKind::Item(_) => false,
ast::StmtKind::Expr(ref e) => expr_requires_semi_to_be_stmt(e),
- ast::StmtKind::Semi(..) => false,
+ ast::StmtKind::Item(_) |
+ ast::StmtKind::Semi(..) |
ast::StmtKind::Mac(..) => false,
}
}
use parse::token;
-/// SeqSep : a sequence separator (token)
+/// `SeqSep` : a sequence separator (token)
/// and whether a trailing separator is allowed.
pub struct SeqSep {
pub sep: Option<token::Token>,
while j > i && lines[j - 1].trim().is_empty() {
j -= 1;
}
- lines[i..j].iter().cloned().collect()
+ lines[i..j].to_vec()
}
/// remove a "[ \t]*\*" block from each line, if possible
impl<'a> StringReader<'a> {
/// For comments.rs, which hackily pokes into next_pos and ch
- pub fn new_raw<'b>(sess: &'a ParseSess, filemap: Rc<syntax_pos::FileMap>) -> Self {
+ pub fn new_raw(sess: &'a ParseSess, filemap: Rc<syntax_pos::FileMap>) -> Self {
let mut sr = StringReader::new_raw_internal(sess, filemap);
sr.bump();
sr
pub fn new(sess: &'a ParseSess, filemap: Rc<syntax_pos::FileMap>) -> Self {
let mut sr = StringReader::new_raw(sess, filemap);
- if let Err(_) = sr.advance_token() {
+ if sr.advance_token().is_err() {
sr.emit_fatal_errors();
panic!(FatalError);
}
sr.bump();
- if let Err(_) = sr.advance_token() {
+ if sr.advance_token().is_err() {
sr.emit_fatal_errors();
panic!(FatalError);
}
self.bump();
}
- return if doc_comment {
+ if doc_comment {
self.with_str_from(start_bpos, |string| {
// comments with only more "/"s are not doc comments
let tok = if is_doc_comment(string) {
tok: token::Comment,
sp: mk_sp(start_bpos, self.pos),
})
- };
+ }
}
Some('*') => {
self.bump();
// integer literal followed by field/method access or a range pattern
// (`0..2` and `12.foo()`)
if self.ch_is('.') && !self.nextch_is('.') &&
- !self.nextch()
- .unwrap_or('\0')
- .is_xid_start() {
+ !ident_start(self.nextch()) {
// might have stuff after the ., and if it does, it needs to start
// with a number
self.bump();
}
let pos = self.pos;
self.check_float_base(start_bpos, pos, base);
- return token::Float(self.name_from(start_bpos));
+ token::Float(self.name_from(start_bpos))
} else {
// it might be a float if it has an exponent
if self.ch_is('e') || self.ch_is('E') {
return token::Float(self.name_from(start_bpos));
}
// but we certainly have an integer!
- return token::Integer(self.name_from(start_bpos));
+ token::Integer(self.name_from(start_bpos))
}
}
self.bump();
if self.ch_is('=') {
self.bump();
- return token::BinOpEq(op);
+ token::BinOpEq(op)
} else {
- return token::BinOp(op);
+ token::BinOp(op)
}
}
// One-byte tokens.
';' => {
self.bump();
- return Ok(token::Semi);
+ Ok(token::Semi)
}
',' => {
self.bump();
- return Ok(token::Comma);
+ Ok(token::Comma)
}
'.' => {
self.bump();
- return if self.ch_is('.') {
+ if self.ch_is('.') {
self.bump();
if self.ch_is('.') {
self.bump();
}
} else {
Ok(token::Dot)
- };
+ }
}
'(' => {
self.bump();
- return Ok(token::OpenDelim(token::Paren));
+ Ok(token::OpenDelim(token::Paren))
}
')' => {
self.bump();
- return Ok(token::CloseDelim(token::Paren));
+ Ok(token::CloseDelim(token::Paren))
}
'{' => {
self.bump();
- return Ok(token::OpenDelim(token::Brace));
+ Ok(token::OpenDelim(token::Brace))
}
'}' => {
self.bump();
- return Ok(token::CloseDelim(token::Brace));
+ Ok(token::CloseDelim(token::Brace))
}
'[' => {
self.bump();
- return Ok(token::OpenDelim(token::Bracket));
+ Ok(token::OpenDelim(token::Bracket))
}
']' => {
self.bump();
- return Ok(token::CloseDelim(token::Bracket));
+ Ok(token::CloseDelim(token::Bracket))
}
'@' => {
self.bump();
- return Ok(token::At);
+ Ok(token::At)
}
'#' => {
self.bump();
- return Ok(token::Pound);
+ Ok(token::Pound)
}
'~' => {
self.bump();
- return Ok(token::Tilde);
+ Ok(token::Tilde)
}
'?' => {
self.bump();
- return Ok(token::Question);
+ Ok(token::Question)
}
':' => {
self.bump();
if self.ch_is(':') {
self.bump();
- return Ok(token::ModSep);
+ Ok(token::ModSep)
} else {
- return Ok(token::Colon);
+ Ok(token::Colon)
}
}
'$' => {
self.bump();
- return Ok(token::Dollar);
+ Ok(token::Dollar)
}
// Multi-byte tokens.
self.bump();
if self.ch_is('=') {
self.bump();
- return Ok(token::EqEq);
+ Ok(token::EqEq)
} else if self.ch_is('>') {
self.bump();
- return Ok(token::FatArrow);
+ Ok(token::FatArrow)
} else {
- return Ok(token::Eq);
+ Ok(token::Eq)
}
}
'!' => {
self.bump();
if self.ch_is('=') {
self.bump();
- return Ok(token::Ne);
+ Ok(token::Ne)
} else {
- return Ok(token::Not);
+ Ok(token::Not)
}
}
'<' => {
match self.ch.unwrap_or('\x00') {
'=' => {
self.bump();
- return Ok(token::Le);
+ Ok(token::Le)
}
'<' => {
- return Ok(self.binop(token::Shl));
+ Ok(self.binop(token::Shl))
}
'-' => {
self.bump();
match self.ch.unwrap_or('\x00') {
_ => {
- return Ok(token::LArrow);
+ Ok(token::LArrow)
}
}
}
_ => {
- return Ok(token::Lt);
+ Ok(token::Lt)
}
}
}
match self.ch.unwrap_or('\x00') {
'=' => {
self.bump();
- return Ok(token::Ge);
+ Ok(token::Ge)
}
'>' => {
- return Ok(self.binop(token::Shr));
+ Ok(self.binop(token::Shr))
}
_ => {
- return Ok(token::Gt);
+ Ok(token::Gt)
}
}
}
};
self.bump(); // advance ch past token
let suffix = self.scan_optional_raw_name();
- return Ok(token::Literal(token::Char(id), suffix));
+ Ok(token::Literal(token::Char(id), suffix))
}
'b' => {
self.bump();
_ => unreachable!(), // Should have been a token::Ident above.
};
let suffix = self.scan_optional_raw_name();
- return Ok(token::Literal(lit, suffix));
+ Ok(token::Literal(lit, suffix))
}
'"' => {
let start_bpos = self.pos;
};
self.bump();
let suffix = self.scan_optional_raw_name();
- return Ok(token::Literal(token::Str_(id), suffix));
+ Ok(token::Literal(token::Str_(id), suffix))
}
'r' => {
let start_bpos = self.pos;
Symbol::intern("??")
};
let suffix = self.scan_optional_raw_name();
- return Ok(token::Literal(token::StrRaw(id, hash_count), suffix));
+ Ok(token::Literal(token::StrRaw(id, hash_count), suffix))
}
'-' => {
if self.nextch_is('>') {
self.bump();
self.bump();
- return Ok(token::RArrow);
+ Ok(token::RArrow)
} else {
- return Ok(self.binop(token::Minus));
+ Ok(self.binop(token::Minus))
}
}
'&' => {
if self.nextch_is('&') {
self.bump();
self.bump();
- return Ok(token::AndAnd);
+ Ok(token::AndAnd)
} else {
- return Ok(self.binop(token::And));
+ Ok(self.binop(token::And))
}
}
'|' => {
Some('|') => {
self.bump();
self.bump();
- return Ok(token::OrOr);
+ Ok(token::OrOr)
}
_ => {
- return Ok(self.binop(token::Or));
+ Ok(self.binop(token::Or))
}
}
}
'+' => {
- return Ok(self.binop(token::Plus));
+ Ok(self.binop(token::Plus))
}
'*' => {
- return Ok(self.binop(token::Star));
+ Ok(self.binop(token::Star))
}
'/' => {
- return Ok(self.binop(token::Slash));
+ Ok(self.binop(token::Slash))
}
'^' => {
- return Ok(self.binop(token::Caret));
+ Ok(self.binop(token::Caret))
}
'%' => {
- return Ok(self.binop(token::Percent));
+ Ok(self.binop(token::Percent))
}
c => {
let last_bpos = self.pos;
bpos,
"unknown start of token",
c);
- unicode_chars::check_for_substitution(&self, c, &mut err);
+ unicode_chars::check_for_substitution(self, c, &mut err);
self.fatal_errs.push(err);
Err(())
}
if self.ch_is('\n') {
self.bump();
}
- return val;
+ val
}
fn read_one_line_comment(&mut self) -> String {
let val = self.read_to_eol();
assert!((val.as_bytes()[0] == b'/' && val.as_bytes()[1] == b'/') ||
(val.as_bytes()[0] == b'#' && val.as_bytes()[1] == b'!'));
- return val;
+ val
}
fn consume_non_eol_whitespace(&mut self) {
Symbol::intern("?")
};
self.bump(); // advance ch past token
- return token::Byte(id);
+ token::Byte(id)
}
fn scan_byte_escape(&mut self, delim: char, below_0x7f_only: bool) -> bool {
Symbol::intern("??")
};
self.bump();
- return token::ByteStr(id);
+ token::ByteStr(id)
}
fn scan_raw_byte_string(&mut self) -> token::Lit {
self.bump();
}
self.bump();
- return token::ByteStrRaw(self.name_from_to(content_start_bpos, content_end_bpos),
- hash_count);
+ token::ByteStrRaw(self.name_from_to(content_start_bpos, content_end_bpos),
+ hash_count)
}
}
}
fn is_dec_digit(c: Option<char>) -> bool {
- return in_range(c, '0', '9');
+ in_range(c, '0', '9')
}
pub fn is_doc_comment(s: &str) -> bool {
parser.parse_inner_attributes()
}
-pub fn parse_crate_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
- -> PResult<'a, ast::Crate> {
+pub fn parse_crate_from_source_str(name: String, source: String, sess: &ParseSess)
+ -> PResult<ast::Crate> {
new_parser_from_source_str(sess, name, source).parse_crate_mod()
}
-pub fn parse_crate_attrs_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
- -> PResult<'a, Vec<ast::Attribute>> {
+pub fn parse_crate_attrs_from_source_str(name: String, source: String, sess: &ParseSess)
+ -> PResult<Vec<ast::Attribute>> {
new_parser_from_source_str(sess, name, source).parse_inner_attributes()
}
-pub fn parse_expr_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
- -> PResult<'a, P<ast::Expr>> {
+pub fn parse_expr_from_source_str(name: String, source: String, sess: &ParseSess)
+ -> PResult<P<ast::Expr>> {
new_parser_from_source_str(sess, name, source).parse_expr()
}
///
/// Returns `Ok(Some(item))` when successful, `Ok(None)` when no item was found, and`Err`
/// when a syntax error occurred.
-pub fn parse_item_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
- -> PResult<'a, Option<P<ast::Item>>> {
+pub fn parse_item_from_source_str(name: String, source: String, sess: &ParseSess)
+ -> PResult<Option<P<ast::Item>>> {
new_parser_from_source_str(sess, name, source).parse_item()
}
-pub fn parse_meta_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
- -> PResult<'a, ast::MetaItem> {
+pub fn parse_meta_from_source_str(name: String, source: String, sess: &ParseSess)
+ -> PResult<ast::MetaItem> {
new_parser_from_source_str(sess, name, source).parse_meta_item()
}
-pub fn parse_stmt_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
- -> PResult<'a, Option<ast::Stmt>> {
+pub fn parse_stmt_from_source_str(name: String, source: String, sess: &ParseSess)
+ -> PResult<Option<ast::Stmt>> {
new_parser_from_source_str(sess, name, source).parse_stmt()
}
-pub fn parse_stream_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
+pub fn parse_stream_from_source_str(name: String, source: String, sess: &ParseSess)
-> TokenStream {
filemap_to_stream(sess, sess.codemap().new_filemap(name, source))
}
// Create a new parser from a source string
-pub fn new_parser_from_source_str<'a>(sess: &'a ParseSess, name: String, source: String)
- -> Parser<'a> {
+pub fn new_parser_from_source_str(sess: &ParseSess, name: String, source: String)
+ -> Parser {
filemap_to_parser(sess, sess.codemap().new_filemap(name, source))
}
}
/// Given a filemap and config, return a parser
-pub fn filemap_to_parser<'a>(sess: &'a ParseSess, filemap: Rc<FileMap>, ) -> Parser<'a> {
+pub fn filemap_to_parser(sess: & ParseSess, filemap: Rc<FileMap>, ) -> Parser {
let end_pos = filemap.end_pos;
let mut parser = stream_to_parser(sess, filemap_to_stream(sess, filemap));
// must preserve old name for now, because quote! from the *existing*
// compiler expands into it
-pub fn new_parser_from_tts<'a>(sess: &'a ParseSess, tts: Vec<TokenTree>) -> Parser<'a> {
+pub fn new_parser_from_tts(sess: &ParseSess, tts: Vec<TokenTree>) -> Parser {
stream_to_parser(sess, tts.into_iter().collect())
}
panictry!(srdr.parse_all_token_trees())
}
-/// Given stream and the ParseSess, produce a parser
-pub fn stream_to_parser<'a>(sess: &'a ParseSess, stream: TokenStream) -> Parser<'a> {
+/// Given stream and the `ParseSess`, produce a parser
+pub fn stream_to_parser(sess: &ParseSess, stream: TokenStream) -> Parser {
Parser::new(sess, stream, None, false)
}
(c, 4)
}
'u' => {
- assert!(lit.as_bytes()[2] == b'{');
+ assert_eq!(lit.as_bytes()[2], b'{');
let idx = lit.find('}').unwrap();
let v = u32::from_str_radix(&lit[3..idx], 16).unwrap();
let c = char::from_u32(v).unwrap();
}
let mut chars = lit.char_indices().peekable();
- loop {
- match chars.next() {
- Some((i, c)) => {
- match c {
- '\\' => {
- let ch = chars.peek().unwrap_or_else(|| {
- panic!("{}", error(i))
- }).1;
-
- if ch == '\n' {
- eat(&mut chars);
- } else if ch == '\r' {
- chars.next();
- let ch = chars.peek().unwrap_or_else(|| {
- panic!("{}", error(i))
- }).1;
-
- if ch != '\n' {
- panic!("lexer accepted bare CR");
- }
- eat(&mut chars);
- } else {
- // otherwise, a normal escape
- let (c, n) = char_lit(&lit[i..]);
- for _ in 0..n - 1 { // we don't need to move past the first \
- chars.next();
- }
- res.push(c);
- }
- },
- '\r' => {
- let ch = chars.peek().unwrap_or_else(|| {
- panic!("{}", error(i))
- }).1;
+ while let Some((i, c)) = chars.next() {
+ match c {
+ '\\' => {
+ let ch = chars.peek().unwrap_or_else(|| {
+ panic!("{}", error(i))
+ }).1;
+
+ if ch == '\n' {
+ eat(&mut chars);
+ } else if ch == '\r' {
+ chars.next();
+ let ch = chars.peek().unwrap_or_else(|| {
+ panic!("{}", error(i))
+ }).1;
- if ch != '\n' {
- panic!("lexer accepted bare CR");
- }
+ if ch != '\n' {
+ panic!("lexer accepted bare CR");
+ }
+ eat(&mut chars);
+ } else {
+ // otherwise, a normal escape
+ let (c, n) = char_lit(&lit[i..]);
+ for _ in 0..n - 1 { // we don't need to move past the first \
chars.next();
- res.push('\n');
}
- c => res.push(c),
+ res.push(c);
}
},
- None => break
+ '\r' => {
+ let ch = chars.peek().unwrap_or_else(|| {
+ panic!("{}", error(i))
+ }).1;
+
+ if ch != '\n' {
+ panic!("lexer accepted bare CR");
+ }
+ chars.next();
+ res.push('\n');
+ }
+ c => res.push(c),
}
}
debug!("raw_str_lit: given {}", escape_default(lit));
let mut res = String::with_capacity(lit.len());
- // FIXME #8372: This could be a for-loop if it didn't borrow the iterator
let mut chars = lit.chars().peekable();
- loop {
- match chars.next() {
- Some(c) => {
- if c == '\r' {
- if *chars.peek().unwrap() != '\n' {
- panic!("lexer accepted bare CR");
- }
- chars.next();
- res.push('\n');
- } else {
- res.push(c);
- }
- },
- None => break
+ while let Some(c) = chars.next() {
+ if c == '\r' {
+ if *chars.peek().unwrap() != '\n' {
+ panic!("lexer accepted bare CR");
+ }
+ chars.next();
+ res.push('\n');
+ } else {
+ res.push(c);
}
}
if lit.len() == 1 {
(lit.as_bytes()[0], 1)
} else {
- assert!(lit.as_bytes()[0] == b'\\', err(0));
+ assert_eq!(lit.as_bytes()[0], b'\\', "{}", err(0));
let b = match lit.as_bytes()[1] {
b'"' => b'"',
b'n' => b'\n',
}
}
};
- return (b, 2);
+ (b, 2)
}
}
let error = |i| format!("lexer should have rejected {} at {}", lit, i);
/// Eat everything up to a non-whitespace
- fn eat<'a, I: Iterator<Item=(usize, u8)>>(it: &mut iter::Peekable<I>) {
+ fn eat<I: Iterator<Item=(usize, u8)>>(it: &mut iter::Peekable<I>) {
loop {
match it.peek().map(|x| x.1) {
Some(b' ') | Some(b'\n') | Some(b'\r') | Some(b'\t') => {
if let Some(err) = err {
err!(diag, |span, diag| diag.span_err(span, err));
}
- return filtered_float_lit(Symbol::intern(&s), Some(suf), diag)
+ return filtered_float_lit(Symbol::intern(s), Some(suf), diag)
}
}
if !self.obsolete_set.contains(&kind) &&
(error || self.sess.span_diagnostic.can_emit_warnings) {
- err.note(&format!("{}", desc));
+ err.note(desc);
self.obsolete_set.insert(kind);
}
err.emit();
fn next_desugared(&mut self) -> TokenAndSpan {
let (sp, name) = match self.next() {
TokenAndSpan { sp, tok: token::DocComment(name) } => (sp, name),
- tok @ _ => return tok,
+ tok => return tok,
};
let stripped = strip_doc_comment_decoration(&name.as_str());
}
impl Error {
- pub fn span_err<'a>(self, sp: Span, handler: &'a errors::Handler) -> DiagnosticBuilder<'a> {
+ pub fn span_err(self, sp: Span, handler: &errors::Handler) -> DiagnosticBuilder {
match self {
Error::FileNotFoundForModule { ref mod_name,
ref default_path,
}
fn next_tok(&mut self) -> TokenAndSpan {
- let mut next = match self.desugar_doc_comments {
- true => self.token_cursor.next_desugared(),
- false => self.token_cursor.next(),
+ let mut next = if self.desugar_doc_comments {
+ self.token_cursor.next_desugared()
+ } else {
+ self.token_cursor.next()
};
if next.sp == syntax_pos::DUMMY_SP {
next.sp = self.prev_span;
// This might be a sign we need a connect method on Iterator.
let b = i.next()
.map_or("".to_string(), |t| t.to_string());
- i.enumerate().fold(b, |mut b, (i, ref a)| {
+ i.enumerate().fold(b, |mut b, (i, a)| {
if tokens.len() > 2 && i == tokens.len() - 2 {
b.push_str(", or ");
} else if tokens.len() == 2 && i == tokens.len() - 2 {
token::CloseDelim(..) | token::Eof => break,
_ => {}
};
- match sep.sep {
- Some(ref t) => {
- if first {
- first = false;
- } else {
- if let Err(e) = self.expect(t) {
- fe(e);
- break;
- }
+ if let Some(ref t) = sep.sep {
+ if first {
+ first = false;
+ } else {
+ if let Err(e) = self.expect(t) {
+ fe(e);
+ break;
}
}
- _ => ()
}
if sep.trailing_sep_allowed && kets.iter().any(|k| self.check(k)) {
break;
let sum_span = ty.span.to(self.prev_span);
let mut err = struct_span_err!(self.sess.span_diagnostic, sum_span, E0178,
- "expected a path on the left-hand side of `+`, not `{}`", pprust::ty_to_string(&ty));
+ "expected a path on the left-hand side of `+`, not `{}`", pprust::ty_to_string(ty));
match ty.node {
TyKind::Rptr(ref lifetime, ref mut_ty) => {
pub fn is_named_argument(&mut self) -> bool {
let offset = match self.token {
- token::BinOp(token::And) => 1,
+ token::BinOp(token::And) |
token::AndAnd => 1,
_ if self.token.is_keyword(keywords::Mut) => 1,
_ => 0
let attrs = self.parse_outer_attributes()?;
let pats = self.parse_pats()?;
- let mut guard = None;
- if self.eat_keyword(keywords::If) {
- guard = Some(self.parse_expr()?);
- }
+ let guard = if self.eat_keyword(keywords::If) {
+ Some(self.parse_expr()?)
+ } else {
+ None
+ };
self.expect(&token::FatArrow)?;
let expr = self.parse_expr_res(RESTRICTION_STMT_EXPR, None)?;
let lo = self.span;
let pat = self.parse_pat()?;
- let mut ty = None;
- if self.eat(&token::Colon) {
- ty = Some(self.parse_ty()?);
- }
+ let ty = if self.eat(&token::Colon) {
+ Some(self.parse_ty()?)
+ } else {
+ None
+ };
let init = self.parse_initializer()?;
Ok(P(ast::Local {
ty: ty,
},
None => {
let unused_attrs = |attrs: &[_], s: &mut Self| {
- if attrs.len() > 0 {
+ if !attrs.is_empty() {
if s.prev_token_kind == PrevTokenKind::DocComment {
s.span_fatal_err(s.prev_span, Error::UselessDocComment).emit();
} else {
self.expect(&token::Not)?;
}
- self.complain_if_pub_macro(&vis, prev_span);
+ self.complain_if_pub_macro(vis, prev_span);
// eat a matched-delimiter token tree:
*at_end = true;
}
}
} else {
- match polarity {
- ast::ImplPolarity::Negative => {
- // This is a negated type implementation
- // `impl !MyType {}`, which is not allowed.
- self.span_err(neg_span, "inherent implementation can't be negated");
- },
- _ => {}
+ if polarity == ast::ImplPolarity::Negative {
+ // This is a negated type implementation
+ // `impl !MyType {}`, which is not allowed.
+ self.span_err(neg_span, "inherent implementation can't be negated");
}
None
};
let path_span = self.prev_span;
let help_msg = format!("make this visible only to module `{}` with `in`:", path);
self.expect(&token::CloseDelim(token::Paren))?; // `)`
- let mut err = self.span_fatal_help(path_span, &msg, &suggestion);
+ let mut err = self.span_fatal_help(path_span, msg, suggestion);
err.span_suggestion(path_span, &help_msg, format!("in {}", path));
err.emit(); // emit diagnostic, but continue with public visibility
}
pub fn len(self) -> usize {
if self == NoDelim { 0 } else { 1 }
}
+
+ pub fn is_empty(self) -> bool {
+ self == NoDelim
+ }
}
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug, Copy)]
pub fn can_begin_expr(&self) -> bool {
match *self {
Ident(ident) => ident_can_begin_expr(ident), // value name or keyword
- OpenDelim(..) => true, // tuple, array or block
- Literal(..) => true, // literal
- Not => true, // operator not
- BinOp(Minus) => true, // unary minus
- BinOp(Star) => true, // dereference
- BinOp(Or) | OrOr => true, // closure
- BinOp(And) => true, // reference
- AndAnd => true, // double reference
- DotDot | DotDotDot => true, // range notation
- Lt | BinOp(Shl) => true, // associated path
- ModSep => true, // global path
+ OpenDelim(..) | // tuple, array or block
+ Literal(..) | // literal
+ Not | // operator not
+ BinOp(Minus) | // unary minus
+ BinOp(Star) | // dereference
+ BinOp(Or) | OrOr | // closure
+ BinOp(And) | // reference
+ AndAnd | // double reference
+ DotDot | DotDotDot | // range notation
+ Lt | BinOp(Shl) | // associated path
+ ModSep | // global path
Pound => true, // expression attributes
Interpolated(ref nt) => match **nt {
NtIdent(..) | NtExpr(..) | NtBlock(..) | NtPath(..) => true,
pub fn can_begin_type(&self) -> bool {
match *self {
Ident(ident) => ident_can_begin_type(ident), // type name or keyword
- OpenDelim(Paren) => true, // tuple
- OpenDelim(Bracket) => true, // array
- Underscore => true, // placeholder
- Not => true, // never
- BinOp(Star) => true, // raw pointer
- BinOp(And) => true, // reference
- AndAnd => true, // double reference
- Question => true, // maybe bound in trait object
- Lifetime(..) => true, // lifetime bound in trait object
- Lt | BinOp(Shl) => true, // associated path
+ OpenDelim(Paren) | // tuple
+ OpenDelim(Bracket) | // array
+ Underscore | // placeholder
+ Not | // never
+ BinOp(Star) | // raw pointer
+ BinOp(And) | // reference
+ AndAnd | // double reference
+ Question | // maybe bound in trait object
+ Lifetime(..) | // lifetime bound in trait object
+ Lt | BinOp(Shl) | // associated path
ModSep => true, // global path
Interpolated(ref nt) => match **nt {
NtIdent(..) | NtTy(..) | NtPath(..) => true,
//! between using 'left' and 'right' terms to denote the wrapped-to-ring-buffer
//! and point-in-infinite-stream senses freely.
//!
-//! There is a parallel ring buffer, 'size', that holds the calculated size of
+//! There is a parallel ring buffer, `size`, that holds the calculated size of
//! each token. Why calculated? Because for Begin/End pairs, the "size"
//! includes everything between the pair. That is, the "size" of Begin is
//! actually the sum of the sizes of everything between Begin and the paired
-//! End that follows. Since that is arbitrarily far in the future, 'size' is
+//! End that follows. Since that is arbitrarily far in the future, `size` is
//! being rewritten regularly while the printer runs; in fact most of the
-//! machinery is here to work out 'size' entries on the fly (and give up when
+//! machinery is here to work out `size` entries on the fly (and give up when
//! they're so obviously over-long that "infinity" is a good enough
//! approximation for purposes of line breaking).
//!
//! The "input side" of the printer is managed as an abstract process called
-//! SCAN, which uses 'scan_stack', to manage calculating 'size'. SCAN is, in
+//! SCAN, which uses `scan_stack`, to manage calculating `size`. SCAN is, in
//! other words, the process of calculating 'size' entries.
//!
//! The "output side" of the printer is managed by an abstract process called
-//! PRINT, which uses 'print_stack', 'margin' and 'space' to figure out what to
+//! PRINT, which uses `print_stack`, `margin` and `space` to figure out what to
//! do with each token/size pair it consumes as it goes. It's trying to consume
//! the entire buffered window, but can't output anything until the size is >=
//! 0 (sizes are set to negative while they're pending calculation).
pub fn advance_right(&mut self) {
self.right += 1;
self.right %= self.buf_len;
- assert!(self.right != self.left);
+ assert_ne!(self.right, self.left);
}
pub fn advance_left(&mut self) -> io::Result<()> {
debug!("advance_left Vec<{},{}>, sizeof({})={}", self.left, self.right,
token::CloseDelim(token::Bracket) => "]".to_string(),
token::OpenDelim(token::Brace) => "{".to_string(),
token::CloseDelim(token::Brace) => "}".to_string(),
- token::OpenDelim(token::NoDelim) => " ".to_string(),
+ token::OpenDelim(token::NoDelim) |
token::CloseDelim(token::NoDelim) => " ".to_string(),
token::Pound => "#".to_string(),
token::Dollar => "$".to_string(),
let mut out = match lit {
token::Byte(b) => format!("b'{}'", b),
token::Char(c) => format!("'{}'", c),
- token::Float(c) => c.to_string(),
+ token::Float(c) |
token::Integer(c) => c.to_string(),
token::Str_(s) => format!("\"{}\"", s),
token::StrRaw(s, n) => format!("r{delim}\"{string}\"{delim}",
token::Shebang(s) => format!("/* shebang: {}*/", s),
token::Interpolated(ref nt) => match **nt {
- token::NtExpr(ref e) => expr_to_string(&e),
- token::NtMeta(ref e) => meta_item_to_string(&e),
- token::NtTy(ref e) => ty_to_string(&e),
- token::NtPath(ref e) => path_to_string(&e),
- token::NtItem(ref e) => item_to_string(&e),
- token::NtBlock(ref e) => block_to_string(&e),
- token::NtStmt(ref e) => stmt_to_string(&e),
- token::NtPat(ref e) => pat_to_string(&e),
+ token::NtExpr(ref e) => expr_to_string(e),
+ token::NtMeta(ref e) => meta_item_to_string(e),
+ token::NtTy(ref e) => ty_to_string(e),
+ token::NtPath(ref e) => path_to_string(e),
+ token::NtItem(ref e) => item_to_string(e),
+ token::NtBlock(ref e) => block_to_string(e),
+ token::NtStmt(ref e) => stmt_to_string(e),
+ token::NtPat(ref e) => pat_to_string(e),
token::NtIdent(ref e) => ident_to_string(e.node),
token::NtTT(ref tree) => tt_to_string(tree.clone()),
- token::NtArm(ref e) => arm_to_string(&e),
- token::NtImplItem(ref e) => impl_item_to_string(&e),
- token::NtTraitItem(ref e) => trait_item_to_string(&e),
- token::NtGenerics(ref e) => generics_to_string(&e),
- token::NtWhereClause(ref e) => where_clause_to_string(&e),
- token::NtArg(ref e) => arg_to_string(&e),
- token::NtVis(ref e) => vis_to_string(&e),
+ token::NtArm(ref e) => arm_to_string(e),
+ token::NtImplItem(ref e) => impl_item_to_string(e),
+ token::NtTraitItem(ref e) => trait_item_to_string(e),
+ token::NtGenerics(ref e) => generics_to_string(e),
+ token::NtWhereClause(ref e) => where_clause_to_string(e),
+ token::NtArg(ref e) => arg_to_string(e),
+ token::NtVis(ref e) => vis_to_string(e),
}
}
}
let mut result = None;
- if let &Some(ref lits) = self.literals()
- {
+ if let Some(ref lits) = *self.literals() {
while cur_lit < lits.len() {
let ltrl = (*lits)[cur_lit].clone();
if ltrl.pos > pos { break; }
fn print_literal(&mut self, lit: &ast::Lit) -> io::Result<()> {
self.maybe_print_comment(lit.span.lo)?;
- match self.next_lit(lit.span.lo) {
- Some(ref ltrl) => {
- return word(self.writer(), &(*ltrl).lit);
- }
- _ => ()
+ if let Some(ref ltrl) = self.next_lit(lit.span.lo) {
+ return word(self.writer(), &(*ltrl).lit);
}
match lit.node {
ast::LitKind::Str(st, style) => self.print_string(&st.as_str(), style),
self.popen()?;
self.commasep(Consistent,
&items[..],
- |s, i| s.print_meta_list_item(&i))?;
+ |s, i| s.print_meta_list_item(i))?;
self.pclose()?;
}
}
pub fn commasep_exprs(&mut self, b: Breaks,
exprs: &[P<ast::Expr>]) -> io::Result<()> {
- self.commasep_cmnt(b, exprs, |s, e| s.print_expr(&e), |e| e.span)
+ self.commasep_cmnt(b, exprs, |s, e| s.print_expr(e), |e| e.span)
}
pub fn print_mod(&mut self, _mod: &ast::Mod,
attrs: &[ast::Attribute]) -> io::Result<()> {
self.print_inner_attributes(attrs)?;
for item in &_mod.items {
- self.print_item(&item)?;
+ self.print_item(item)?;
}
Ok(())
}
match ty.node {
ast::TyKind::Slice(ref ty) => {
word(&mut self.s, "[")?;
- self.print_type(&ty)?;
+ self.print_type(ty)?;
word(&mut self.s, "]")?;
}
ast::TyKind::Ptr(ref mt) => {
ast::TyKind::Tup(ref elts) => {
self.popen()?;
self.commasep(Inconsistent, &elts[..],
- |s, ty| s.print_type(&ty))?;
+ |s, ty| s.print_type(ty))?;
if elts.len() == 1 {
word(&mut self.s, ",")?;
}
}
ast::TyKind::Paren(ref typ) => {
self.popen()?;
- self.print_type(&typ)?;
+ self.print_type(typ)?;
self.pclose()?;
}
ast::TyKind::BareFn(ref f) => {
}
ast::TyKind::Array(ref ty, ref v) => {
word(&mut self.s, "[")?;
- self.print_type(&ty)?;
+ self.print_type(ty)?;
word(&mut self.s, "; ")?;
- self.print_expr(&v)?;
+ self.print_expr(v)?;
word(&mut self.s, "]")?;
}
ast::TyKind::Typeof(ref e) => {
word(&mut self.s, "typeof(")?;
- self.print_expr(&e)?;
+ self.print_expr(e)?;
word(&mut self.s, ")")?;
}
ast::TyKind::Infer => {
}
self.print_ident(item.ident)?;
self.word_space(":")?;
- self.print_type(&t)?;
+ self.print_type(t)?;
word(&mut self.s, ";")?;
self.end()?; // end the head-ibox
self.end() // end the outer cbox
self.head(&visibility_qualified(&item.vis, "extern crate"))?;
if let Some(p) = *optional_path {
let val = p.as_str();
- if val.contains("-") {
+ if val.contains('-') {
self.print_string(&val, ast::StrStyle::Cooked)?;
} else {
self.print_name(p)?;
}
ast::ItemKind::Use(ref vp) => {
self.head(&visibility_qualified(&item.vis, "use"))?;
- self.print_view_path(&vp)?;
+ self.print_view_path(vp)?;
word(&mut self.s, ";")?;
self.end()?; // end inner head-block
self.end()?; // end outer head-block
}
self.print_ident(item.ident)?;
self.word_space(":")?;
- self.print_type(&ty)?;
+ self.print_type(ty)?;
space(&mut self.s)?;
self.end()?; // end the head-ibox
self.word_space("=")?;
- self.print_expr(&expr)?;
+ self.print_expr(expr)?;
word(&mut self.s, ";")?;
self.end()?; // end the outer cbox
}
self.head(&visibility_qualified(&item.vis, "const"))?;
self.print_ident(item.ident)?;
self.word_space(":")?;
- self.print_type(&ty)?;
+ self.print_type(ty)?;
space(&mut self.s)?;
self.end()?; // end the head-ibox
self.word_space("=")?;
- self.print_expr(&expr)?;
+ self.print_expr(expr)?;
word(&mut self.s, ";")?;
self.end()?; // end the outer cbox
}
&item.vis
)?;
word(&mut self.s, " ")?;
- self.print_block_with_attrs(&body, &item.attrs)?;
+ self.print_block_with_attrs(body, &item.attrs)?;
}
ast::ItemKind::Mod(ref _mod) => {
self.head(&visibility_qualified(&item.vis, "mod"))?;
self.print_where_clause(¶ms.where_clause)?;
space(&mut self.s)?;
self.word_space("=")?;
- self.print_type(&ty)?;
+ self.print_type(ty)?;
word(&mut self.s, ";")?;
self.end()?; // end the outer ibox
}
}
ast::ItemKind::Struct(ref struct_def, ref generics) => {
self.head(&visibility_qualified(&item.vis, "struct"))?;
- self.print_struct(&struct_def, generics, item.ident, item.span, true)?;
+ self.print_struct(struct_def, generics, item.ident, item.span, true)?;
}
ast::ItemKind::Union(ref struct_def, ref generics) => {
self.head(&visibility_qualified(&item.vis, "union"))?;
- self.print_struct(&struct_def, generics, item.ident, item.span, true)?;
+ self.print_struct(struct_def, generics, item.ident, item.span, true)?;
}
ast::ItemKind::DefaultImpl(unsafety, ref trait_ref) => {
self.head("")?;
space(&mut self.s)?;
}
- match polarity {
- ast::ImplPolarity::Negative => {
- word(&mut self.s, "!")?;
- },
- _ => {}
+ if polarity == ast::ImplPolarity::Negative {
+ word(&mut self.s, "!")?;
}
if let Some(ref t) = *opt_trait {
self.word_space("for")?;
}
- self.print_type(&ty)?;
+ self.print_type(ty)?;
self.print_where_clause(&generics.where_clause)?;
space(&mut self.s)?;
Some(ref d) => {
space(&mut self.s)?;
self.word_space("=")?;
- self.print_expr(&d)
+ self.print_expr(d)
}
_ => Ok(())
}
self.print_outer_attributes(&ti.attrs)?;
match ti.node {
ast::TraitItemKind::Const(ref ty, ref default) => {
- self.print_associated_const(ti.ident, &ty,
+ self.print_associated_const(ti.ident, ty,
default.as_ref().map(|expr| &**expr),
&ast::Visibility::Inherited)?;
}
self.print_defaultness(ii.defaultness)?;
match ii.node {
ast::ImplItemKind::Const(ref ty, ref expr) => {
- self.print_associated_const(ii.ident, &ty, Some(&expr), &ii.vis)?;
+ self.print_associated_const(ii.ident, ty, Some(expr), &ii.vis)?;
}
ast::ImplItemKind::Method(ref sig, ref body) => {
self.head("")?;
self.word_nbsp("let")?;
self.ibox(INDENT_UNIT)?;
- self.print_local_decl(&loc)?;
+ self.print_local_decl(loc)?;
self.end()?;
if let Some(ref init) = loc.init {
self.nbsp()?;
self.word_space("=")?;
- self.print_expr(&init)?;
+ self.print_expr(init)?;
}
word(&mut self.s, ";")?;
self.end()?;
}
- ast::StmtKind::Item(ref item) => self.print_item(&item)?,
+ ast::StmtKind::Item(ref item) => self.print_item(item)?,
ast::StmtKind::Expr(ref expr) => {
self.space_if_not_bol()?;
- self.print_expr_outer_attr_style(&expr, false)?;
+ self.print_expr_outer_attr_style(expr, false)?;
if parse::classify::expr_requires_semi_to_be_stmt(expr) {
word(&mut self.s, ";")?;
}
}
ast::StmtKind::Semi(ref expr) => {
self.space_if_not_bol()?;
- self.print_expr_outer_attr_style(&expr, false)?;
+ self.print_expr_outer_attr_style(expr, false)?;
word(&mut self.s, ";")?;
}
ast::StmtKind::Mac(ref mac) => {
let (ref mac, style, ref attrs) = **mac;
self.space_if_not_bol()?;
- self.print_outer_attributes(&attrs)?;
+ self.print_outer_attributes(attrs)?;
let delim = match style {
ast::MacStmtStyle::Braces => token::Brace,
_ => token::Paren
};
- self.print_mac(&mac, delim)?;
+ self.print_mac(mac, delim)?;
if style == ast::MacStmtStyle::Semicolon {
word(&mut self.s, ";")?;
}
ast::StmtKind::Expr(ref expr) if i == blk.stmts.len() - 1 => {
self.maybe_print_comment(st.span.lo)?;
self.space_if_not_bol()?;
- self.print_expr_outer_attr_style(&expr, false)?;
+ self.print_expr_outer_attr_style(expr, false)?;
self.maybe_print_trailing_comment(expr.span, Some(blk.span.hi))?;
}
_ => self.print_stmt(st)?,
self.cbox(INDENT_UNIT - 1)?;
self.ibox(0)?;
word(&mut self.s, " else if ")?;
- self.print_expr(&i)?;
+ self.print_expr(i)?;
space(&mut self.s)?;
- self.print_block(&then)?;
+ self.print_block(then)?;
self.print_else(e.as_ref().map(|e| &**e))
}
// "another else-if-let"
self.cbox(INDENT_UNIT - 1)?;
self.ibox(0)?;
word(&mut self.s, " else if let ")?;
- self.print_pat(&pat)?;
+ self.print_pat(pat)?;
space(&mut self.s)?;
self.word_space("=")?;
- self.print_expr(&expr)?;
+ self.print_expr(expr)?;
space(&mut self.s)?;
- self.print_block(&then)?;
+ self.print_block(then)?;
self.print_else(e.as_ref().map(|e| &**e))
}
// "final else"
self.cbox(INDENT_UNIT - 1)?;
self.ibox(0)?;
word(&mut self.s, " else ")?;
- self.print_block(&b)
+ self.print_block(b)
}
// BLEAH, constraints would be great here
_ => {
binop: ast::BinOp) -> bool {
match sub_expr.node {
ast::ExprKind::Binary(ref sub_op, _, _) => {
- if AssocOp::from_ast_binop(sub_op.node).precedence() <
- AssocOp::from_ast_binop(binop.node).precedence() {
- true
- } else {
- false
- }
+ AssocOp::from_ast_binop(sub_op.node).precedence() <
+ AssocOp::from_ast_binop(binop.node).precedence()
}
_ => true
}
space(&mut self.s)?;
}
word(&mut self.s, "..")?;
- self.print_expr(&expr)?;
+ self.print_expr(expr)?;
self.end()?;
}
_ => if !fields.is_empty() {
if !tys.is_empty() {
word(&mut self.s, "::<")?;
self.commasep(Inconsistent, tys,
- |s, ty| s.print_type(&ty))?;
+ |s, ty| s.print_type(ty))?;
word(&mut self.s, ">")?;
}
self.print_call_post(base_args)
self.print_expr_vec(&exprs[..], attrs)?;
}
ast::ExprKind::Repeat(ref element, ref count) => {
- self.print_expr_repeat(&element, &count, attrs)?;
+ self.print_expr_repeat(element, count, attrs)?;
}
ast::ExprKind::Struct(ref path, ref fields, ref wth) => {
self.print_expr_struct(path, &fields[..], wth, attrs)?;
self.print_expr_tup(&exprs[..], attrs)?;
}
ast::ExprKind::Call(ref func, ref args) => {
- self.print_expr_call(&func, &args[..])?;
+ self.print_expr_call(func, &args[..])?;
}
ast::ExprKind::MethodCall(ident, ref tys, ref args) => {
self.print_expr_method_call(ident, &tys[..], &args[..])?;
}
ast::ExprKind::Binary(op, ref lhs, ref rhs) => {
- self.print_expr_binary(op, &lhs, &rhs)?;
+ self.print_expr_binary(op, lhs, rhs)?;
}
ast::ExprKind::Unary(op, ref expr) => {
- self.print_expr_unary(op, &expr)?;
+ self.print_expr_unary(op, expr)?;
}
ast::ExprKind::AddrOf(m, ref expr) => {
- self.print_expr_addr_of(m, &expr)?;
+ self.print_expr_addr_of(m, expr)?;
}
ast::ExprKind::Lit(ref lit) => {
- self.print_literal(&lit)?;
+ self.print_literal(lit)?;
}
ast::ExprKind::Cast(ref expr, ref ty) => {
if let ast::ExprKind::Cast(..) = expr.node {
- self.print_expr(&expr)?;
+ self.print_expr(expr)?;
} else {
- self.print_expr_maybe_paren(&expr)?;
+ self.print_expr_maybe_paren(expr)?;
}
space(&mut self.s)?;
self.word_space("as")?;
- self.print_type(&ty)?;
+ self.print_type(ty)?;
}
ast::ExprKind::Type(ref expr, ref ty) => {
- self.print_expr(&expr)?;
+ self.print_expr(expr)?;
self.word_space(":")?;
- self.print_type(&ty)?;
+ self.print_type(ty)?;
}
ast::ExprKind::If(ref test, ref blk, ref elseopt) => {
- self.print_if(&test, &blk, elseopt.as_ref().map(|e| &**e))?;
+ self.print_if(test, blk, elseopt.as_ref().map(|e| &**e))?;
}
ast::ExprKind::IfLet(ref pat, ref expr, ref blk, ref elseopt) => {
- self.print_if_let(&pat, &expr, &blk, elseopt.as_ref().map(|e| &**e))?;
+ self.print_if_let(pat, expr, blk, elseopt.as_ref().map(|e| &**e))?;
}
ast::ExprKind::While(ref test, ref blk, opt_ident) => {
if let Some(ident) = opt_ident {
self.word_space(":")?;
}
self.head("while")?;
- self.print_expr(&test)?;
+ self.print_expr(test)?;
space(&mut self.s)?;
- self.print_block_with_attrs(&blk, attrs)?;
+ self.print_block_with_attrs(blk, attrs)?;
}
ast::ExprKind::WhileLet(ref pat, ref expr, ref blk, opt_ident) => {
if let Some(ident) = opt_ident {
self.word_space(":")?;
}
self.head("while let")?;
- self.print_pat(&pat)?;
+ self.print_pat(pat)?;
space(&mut self.s)?;
self.word_space("=")?;
- self.print_expr(&expr)?;
+ self.print_expr(expr)?;
space(&mut self.s)?;
- self.print_block_with_attrs(&blk, attrs)?;
+ self.print_block_with_attrs(blk, attrs)?;
}
ast::ExprKind::ForLoop(ref pat, ref iter, ref blk, opt_ident) => {
if let Some(ident) = opt_ident {
self.word_space(":")?;
}
self.head("for")?;
- self.print_pat(&pat)?;
+ self.print_pat(pat)?;
space(&mut self.s)?;
self.word_space("in")?;
- self.print_expr(&iter)?;
+ self.print_expr(iter)?;
space(&mut self.s)?;
- self.print_block_with_attrs(&blk, attrs)?;
+ self.print_block_with_attrs(blk, attrs)?;
}
ast::ExprKind::Loop(ref blk, opt_ident) => {
if let Some(ident) = opt_ident {
}
self.head("loop")?;
space(&mut self.s)?;
- self.print_block_with_attrs(&blk, attrs)?;
+ self.print_block_with_attrs(blk, attrs)?;
}
ast::ExprKind::Match(ref expr, ref arms) => {
self.cbox(INDENT_UNIT)?;
self.ibox(4)?;
self.word_nbsp("match")?;
- self.print_expr(&expr)?;
+ self.print_expr(expr)?;
space(&mut self.s)?;
self.bopen()?;
self.print_inner_attributes_no_trailing_hardbreak(attrs)?;
ast::ExprKind::Closure(capture_clause, ref decl, ref body, _) => {
self.print_capture_clause(capture_clause)?;
- self.print_fn_block_args(&decl)?;
+ self.print_fn_block_args(decl)?;
space(&mut self.s)?;
self.print_expr(body)?;
self.end()?; // need to close a box
self.cbox(INDENT_UNIT)?;
// head-box, will be closed by print-block after {
self.ibox(0)?;
- self.print_block_with_attrs(&blk, attrs)?;
+ self.print_block_with_attrs(blk, attrs)?;
}
ast::ExprKind::Assign(ref lhs, ref rhs) => {
- self.print_expr(&lhs)?;
+ self.print_expr(lhs)?;
space(&mut self.s)?;
self.word_space("=")?;
- self.print_expr(&rhs)?;
+ self.print_expr(rhs)?;
}
ast::ExprKind::AssignOp(op, ref lhs, ref rhs) => {
- self.print_expr(&lhs)?;
+ self.print_expr(lhs)?;
space(&mut self.s)?;
word(&mut self.s, op.node.to_string())?;
self.word_space("=")?;
- self.print_expr(&rhs)?;
+ self.print_expr(rhs)?;
}
ast::ExprKind::Field(ref expr, id) => {
- self.print_expr(&expr)?;
+ self.print_expr(expr)?;
word(&mut self.s, ".")?;
self.print_ident(id.node)?;
}
ast::ExprKind::TupField(ref expr, id) => {
- self.print_expr(&expr)?;
+ self.print_expr(expr)?;
word(&mut self.s, ".")?;
self.print_usize(id.node)?;
}
ast::ExprKind::Index(ref expr, ref index) => {
- self.print_expr(&expr)?;
+ self.print_expr(expr)?;
word(&mut self.s, "[")?;
- self.print_expr(&index)?;
+ self.print_expr(index)?;
word(&mut self.s, "]")?;
}
ast::ExprKind::Range(ref start, ref end, limits) => {
- if let &Some(ref e) = start {
- self.print_expr(&e)?;
+ if let Some(ref e) = *start {
+ self.print_expr(e)?;
}
if limits == ast::RangeLimits::HalfOpen {
word(&mut self.s, "..")?;
} else {
word(&mut self.s, "...")?;
}
- if let &Some(ref e) = end {
- self.print_expr(&e)?;
+ if let Some(ref e) = *end {
+ self.print_expr(e)?;
}
}
ast::ExprKind::Path(None, ref path) => {
}
ast::ExprKind::Ret(ref result) => {
word(&mut self.s, "return")?;
- match *result {
- Some(ref expr) => {
- word(&mut self.s, " ")?;
- self.print_expr(&expr)?;
- }
- _ => ()
+ if let Some(ref expr) = *result {
+ word(&mut self.s, " ")?;
+ self.print_expr(expr)?;
}
}
ast::ExprKind::InlineAsm(ref a) => {
self.commasep(Inconsistent, &a.inputs, |s, &(co, ref o)| {
s.print_string(&co.as_str(), ast::StrStyle::Cooked)?;
s.popen()?;
- s.print_expr(&o)?;
+ s.print_expr(o)?;
s.pclose()?;
Ok(())
})?;
ast::ExprKind::Paren(ref e) => {
self.popen()?;
self.print_inner_attributes_inline(attrs)?;
- self.print_expr(&e)?;
+ self.print_expr(e)?;
self.pclose()?;
},
ast::ExprKind::Try(ref e) => {
ast::ExprKind::Catch(ref blk) => {
self.head("do catch")?;
space(&mut self.s)?;
- self.print_block_with_attrs(&blk, attrs)?
+ self.print_block_with_attrs(blk, attrs)?
}
}
self.ann.post(self, NodeExpr(expr))?;
self.print_pat(&loc.pat)?;
if let Some(ref ty) = loc.ty {
self.word_space(":")?;
- self.print_type(&ty)?;
+ self.print_type(ty)?;
}
Ok(())
}
space(&mut self.s)?;
self.word_space("as")?;
let depth = path.segments.len() - qself.position;
- self.print_path(&path, false, depth, false)?;
+ self.print_path(path, false, depth, false)?;
}
word(&mut self.s, ">")?;
word(&mut self.s, "::")?;
self.commasep(
Inconsistent,
&data.types,
- |s, ty| s.print_type(&ty))?;
+ |s, ty| s.print_type(ty))?;
comma = true;
}
self.commasep(
Inconsistent,
&data.inputs,
- |s, ty| s.print_type(&ty))?;
+ |s, ty| s.print_type(ty))?;
word(&mut self.s, ")")?;
if let Some(ref ty) = data.output {
self.space_if_not_bol()?;
self.word_space("->")?;
- self.print_type(&ty)?;
+ self.print_type(ty)?;
}
}
}
self.print_ident(path1.node)?;
if let Some(ref p) = *sub {
word(&mut self.s, "@")?;
- self.print_pat(&p)?;
+ self.print_pat(p)?;
}
}
PatKind::TupleStruct(ref path, ref elts, ddpos) => {
self.print_path(path, true, 0, false)?;
self.popen()?;
if let Some(ddpos) = ddpos {
- self.commasep(Inconsistent, &elts[..ddpos], |s, p| s.print_pat(&p))?;
+ self.commasep(Inconsistent, &elts[..ddpos], |s, p| s.print_pat(p))?;
if ddpos != 0 {
self.word_space(",")?;
}
word(&mut self.s, "..")?;
if ddpos != elts.len() {
word(&mut self.s, ",")?;
- self.commasep(Inconsistent, &elts[ddpos..], |s, p| s.print_pat(&p))?;
+ self.commasep(Inconsistent, &elts[ddpos..], |s, p| s.print_pat(p))?;
}
} else {
- self.commasep(Inconsistent, &elts[..], |s, p| s.print_pat(&p))?;
+ self.commasep(Inconsistent, &elts[..], |s, p| s.print_pat(p))?;
}
self.pclose()?;
}
PatKind::Tuple(ref elts, ddpos) => {
self.popen()?;
if let Some(ddpos) = ddpos {
- self.commasep(Inconsistent, &elts[..ddpos], |s, p| s.print_pat(&p))?;
+ self.commasep(Inconsistent, &elts[..ddpos], |s, p| s.print_pat(p))?;
if ddpos != 0 {
self.word_space(",")?;
}
word(&mut self.s, "..")?;
if ddpos != elts.len() {
word(&mut self.s, ",")?;
- self.commasep(Inconsistent, &elts[ddpos..], |s, p| s.print_pat(&p))?;
+ self.commasep(Inconsistent, &elts[ddpos..], |s, p| s.print_pat(p))?;
}
} else {
- self.commasep(Inconsistent, &elts[..], |s, p| s.print_pat(&p))?;
+ self.commasep(Inconsistent, &elts[..], |s, p| s.print_pat(p))?;
if elts.len() == 1 {
word(&mut self.s, ",")?;
}
}
PatKind::Box(ref inner) => {
word(&mut self.s, "box ")?;
- self.print_pat(&inner)?;
+ self.print_pat(inner)?;
}
PatKind::Ref(ref inner, mutbl) => {
word(&mut self.s, "&")?;
if mutbl == ast::Mutability::Mutable {
word(&mut self.s, "mut ")?;
}
- self.print_pat(&inner)?;
+ self.print_pat(inner)?;
}
PatKind::Lit(ref e) => self.print_expr(&**e)?,
PatKind::Range(ref begin, ref end, ref end_kind) => {
- self.print_expr(&begin)?;
+ self.print_expr(begin)?;
space(&mut self.s)?;
match *end_kind {
RangeEnd::Included => word(&mut self.s, "...")?,
RangeEnd::Excluded => word(&mut self.s, "..")?,
}
- self.print_expr(&end)?;
+ self.print_expr(end)?;
}
PatKind::Slice(ref before, ref slice, ref after) => {
word(&mut self.s, "[")?;
self.commasep(Inconsistent,
&before[..],
- |s, p| s.print_pat(&p))?;
+ |s, p| s.print_pat(p))?;
if let Some(ref p) = *slice {
if !before.is_empty() { self.word_space(",")?; }
if p.node != PatKind::Wild {
- self.print_pat(&p)?;
+ self.print_pat(p)?;
}
word(&mut self.s, "..")?;
if !after.is_empty() { self.word_space(",")?; }
}
self.commasep(Inconsistent,
&after[..],
- |s, p| s.print_pat(&p))?;
+ |s, p| s.print_pat(p))?;
word(&mut self.s, "]")?;
}
PatKind::Mac(ref m) => self.print_mac(m, token::Paren)?,
space(&mut self.s)?;
self.word_space("|")?;
}
- self.print_pat(&p)?;
+ self.print_pat(p)?;
}
space(&mut self.s)?;
if let Some(ref e) = arm.guard {
self.word_space("if")?;
- self.print_expr(&e)?;
+ self.print_expr(e)?;
space(&mut self.s)?;
}
self.word_space("=>")?;
match arm.body.node {
ast::ExprKind::Block(ref blk) => {
// the block will close the pattern's ibox
- self.print_block_unclosed_indent(&blk, INDENT_UNIT)?;
+ self.print_block_unclosed_indent(blk, INDENT_UNIT)?;
// If it is a user-provided unsafe block, print a comma after it
if let BlockCheckMode::Unsafe(ast::UserProvided) = blk.rules {
self.print_mutability(m)?;
word(&mut self.s, "self")?;
self.word_space(":")?;
- self.print_type(&typ)
+ self.print_type(typ)
}
}
}
self.word_space("->")?;
match decl.output {
ast::FunctionRetTy::Ty(ref ty) => {
- self.print_type(&ty)?;
+ self.print_type(ty)?;
self.maybe_print_comment(ty.span.lo)
}
ast::FunctionRetTy::Default(..) => unreachable!(),
Some(ref default) => {
space(&mut self.s)?;
self.word_space("=")?;
- self.print_type(&default)
+ self.print_type(default)
}
_ => Ok(())
}
ref bounds,
..}) => {
self.print_formal_lifetime_list(bound_lifetimes)?;
- self.print_type(&bounded_ty)?;
+ self.print_type(bounded_ty)?;
self.print_bounds(":", bounds)?;
}
ast::WherePredicate::RegionPredicate(ast::WhereRegionPredicate{ref lifetime,
match decl.output {
ast::FunctionRetTy::Default(..) => unreachable!(),
ast::FunctionRetTy::Ty(ref ty) =>
- self.print_type(&ty)?
+ self.print_type(ty)?
}
self.end()?;
if self.next_comment().is_none() {
hardbreak(&mut self.s)?;
}
- loop {
- match self.next_comment() {
- Some(ref cmnt) => {
- self.print_comment(cmnt)?;
- self.cur_cmnt_and_lit.cur_cmnt += 1;
- }
- _ => break
- }
+ while let Some(ref cmnt) = self.next_comment() {
+ self.print_comment(cmnt)?;
+ self.cur_cmnt_and_lit.cur_cmnt += 1;
}
Ok(())
}
use tokenstream::TokenStream;
/// Craft a span that will be ignored by the stability lint's
-/// call to codemap's is_internal check.
+/// call to codemap's `is_internal` check.
/// The expanded code uses the unstable `#[prelude_import]` attribute.
fn ignored_span(sp: Span) -> Span {
let mark = Mark::fresh();
None => return krate,
};
- let crate_name = Symbol::intern(&alt_std_name.unwrap_or(name.to_string()));
+ let crate_name = Symbol::intern(&alt_std_name.unwrap_or_else(|| name.to_string()));
krate.module.items.insert(0, P(ast::Item {
attrs: vec![attr::mk_attr_outer(DUMMY_SP,
// Add a special __test module to the crate that will contain code
// generated for the test harness
let (mod_, reexport) = mk_test_module(&mut self.cx);
- match reexport {
- Some(re) => folded.module.items.push(re),
- None => {}
+ if let Some(re) = reexport {
+ folded.module.items.push(re)
}
folded.module.items.push(mod_);
folded
let parent = if parent == ast::DUMMY_NODE_ID { ast::CRATE_NODE_ID } else { parent };
cx.ext_cx.current_expansion.mark = cx.ext_cx.resolver.get_module_scope(parent);
let it = cx.ext_cx.monotonic_expander().fold_item(P(ast::Item {
- ident: sym.clone(),
+ ident: sym,
attrs: Vec::new(),
id: ast::DUMMY_NODE_ID,
node: ast::ItemKind::Mod(reexport_mod),
}
/// Craft a span that will be ignored by the stability lint's
-/// call to codemap's is_internal check.
+/// call to codemap's `is_internal` check.
/// The expanded code calls some unstable functions in the test crate.
fn ignored_span(cx: &TestCtxt, sp: Span) -> Span {
Span { ctxt: cx.ctxt, ..sp }
}
}
- return has_test_attr && has_test_signature(i) == Yes;
+ has_test_attr && has_test_signature(i) == Yes
}
fn is_bench_fn(cx: &TestCtxt, i: &ast::Item) -> bool {
`fn(&mut Bencher) -> ()`");
}
- return has_bench_attr && has_test_signature(i);
+ has_bench_attr && has_test_signature(i)
}
fn is_ignored(i: &ast::Item) -> bool {
ast::Unsafety::Normal,
dummy_spanned(ast::Constness::NotConst),
::abi::Abi::Rust, ast::Generics::default(), main_body);
- let main = P(ast::Item {
+ P(ast::Item {
ident: Ident::from_str("main"),
attrs: vec![main_attr],
id: ast::DUMMY_NODE_ID,
node: main,
vis: ast::Visibility::Public,
span: sp
- });
-
- return main;
+ })
}
fn mk_test_module(cx: &mut TestCtxt) -> (P<ast::Item>, Option<P<ast::Item>>) {
//! # Token Streams
//!
-//! TokenStreams represent syntactic objects before they are converted into ASTs.
+//! `TokenStream`s represent syntactic objects before they are converted into ASTs.
//! A `TokenStream` is, roughly speaking, a sequence (eg stream) of `TokenTree`s,
//! which are themselves a single `Token` or a `Delimited` subsequence of tokens.
//!
//! ## Ownership
-//! TokenStreams are persistent data structures constructed as ropes with reference
-//! counted-children. In general, this means that calling an operation on a TokenStream
-//! (such as `slice`) produces an entirely new TokenStream from the borrowed reference to
-//! the original. This essentially coerces TokenStreams into 'views' of their subparts,
-//! and a borrowed TokenStream is sufficient to build an owned TokenStream without taking
+//! `TokenStreams` are persistent data structures constructed as ropes with reference
+//! counted-children. In general, this means that calling an operation on a `TokenStream`
+//! (such as `slice`) produces an entirely new `TokenStream` from the borrowed reference to
+//! the original. This essentially coerces `TokenStream`s into 'views' of their subparts,
+//! and a borrowed `TokenStream` is sufficient to build an owned `TokenStream` without taking
//! ownership of the original.
use syntax_pos::{BytePos, Span, DUMMY_SP};
/// If the syntax extension is an MBE macro, it will attempt to match its
/// LHS token tree against the provided token tree, and if it finds a
/// match, will transcribe the RHS token tree, splicing in any captured
-/// macro_parser::matched_nonterminals into the `SubstNt`s it finds.
+/// `macro_parser::matched_nonterminals` into the `SubstNt`s it finds.
///
/// The RHS of an MBE macro is the only place `SubstNt`s are substituted.
/// Nothing special happens to misnamed or misplaced `SubstNt`s.
iter_names
.filter_map(|&name| {
let dist = lev_distance(lookup, &name.as_str());
- match dist <= max_dist { // filter the unwanted cases
- true => Some((name, dist)),
- false => None,
+ if dist <= max_dist { // filter the unwanted cases
+ Some((name, dist))
+ } else {
+ None
}
})
.min_by_key(|&(_, val)| val) // extract the tuple containing the minimum edit distance
// move the read_i'th item out of the vector and map it
// to an iterator
let e = ptr::read(self.get_unchecked(read_i));
- let mut iter = f(e).into_iter();
+ let iter = f(e).into_iter();
read_i += 1;
- while let Some(e) = iter.next() {
+ for e in iter {
if write_i < read_i {
ptr::write(self.get_unchecked_mut(write_i), e);
write_i += 1;
// move the read_i'th item out of the vector and map it
// to an iterator
let e = ptr::read(self.get_unchecked(read_i));
- let mut iter = f(e).into_iter();
+ let iter = f(e).into_iter();
read_i += 1;
- while let Some(e) = iter.next() {
+ for e in iter {
if write_i < read_i {
ptr::write(self.get_unchecked_mut(write_i), e);
write_i += 1;
self.count += 1;
walk_ident(self, span, ident);
}
- fn visit_mod(&mut self, m: &Mod, _s: Span, _n: NodeId) {
+ fn visit_mod(&mut self, m: &Mod, _s: Span, _a: &[Attribute], _n: NodeId) {
self.count += 1;
walk_mod(self, m)
}
use ast::*;
use syntax_pos::Span;
use codemap::Spanned;
+use tokenstream::ThinTokenStream;
#[derive(Copy, Clone, PartialEq, Eq)]
pub enum FnKind<'a> {
fn visit_ident(&mut self, span: Span, ident: Ident) {
walk_ident(self, span, ident);
}
- fn visit_mod(&mut self, m: &'ast Mod, _s: Span, _n: NodeId) { walk_mod(self, m) }
+ fn visit_mod(&mut self, m: &'ast Mod, _s: Span, _attrs: &[Attribute], _n: NodeId) {
+ walk_mod(self, m);
+ }
fn visit_foreign_item(&mut self, i: &'ast ForeignItem) { walk_foreign_item(self, i) }
fn visit_global_asm(&mut self, ga: &'ast GlobalAsm) { walk_global_asm(self, ga) }
fn visit_item(&mut self, i: &'ast Item) { walk_item(self, i) }
// definition in your trait impl:
// visit::walk_mac(self, _mac)
}
+ fn visit_mac_def(&mut self, _mac: &'ast ThinTokenStream, _id: NodeId) {
+ // Nothing to do
+ }
fn visit_path(&mut self, path: &'ast Path, _id: NodeId) {
walk_path(self, path)
}
}
pub fn walk_crate<'a, V: Visitor<'a>>(visitor: &mut V, krate: &'a Crate) {
- visitor.visit_mod(&krate.module, krate.span, CRATE_NODE_ID);
+ visitor.visit_mod(&krate.module, krate.span, &krate.attrs, CRATE_NODE_ID);
walk_list!(visitor, visit_attribute, &krate.attrs);
}
item.id)
}
ItemKind::Mod(ref module) => {
- visitor.visit_mod(module, item.span, item.id)
+ visitor.visit_mod(module, item.span, &item.attrs, item.id)
}
ItemKind::ForeignMod(ref foreign_module) => {
walk_list!(visitor, visit_foreign_item, &foreign_module.items);
walk_list!(visitor, visit_trait_item, methods);
}
ItemKind::Mac(ref mac) => visitor.visit_mac(mac),
- ItemKind::MacroDef(..) => {},
+ ItemKind::MacroDef(ref ts) => visitor.visit_mac_def(ts, item.id),
}
walk_list!(visitor, visit_attribute, &item.attrs);
}
visitor.visit_ty(ty);
visitor.visit_expr(expression)
}
- TyKind::TraitObject(ref bounds) => {
- walk_list!(visitor, visit_ty_param_bound, bounds);
- }
+ TyKind::TraitObject(ref bounds) |
TyKind::ImplTrait(ref bounds) => {
walk_list!(visitor, visit_ty_param_bound, bounds);
}
walk_fn_decl(visitor, declaration);
visitor.visit_block(body);
}
- FnKind::Method(_, ref sig, _, body) => {
+ FnKind::Method(_, sig, _, body) => {
visitor.visit_generics(&sig.generics);
walk_fn_decl(visitor, declaration);
visitor.visit_block(body);
}
ExprKind::InlineAsm(ref ia) => {
for &(_, ref input) in &ia.inputs {
- visitor.visit_expr(&input)
+ visitor.visit_expr(input)
}
for output in &ia.outputs {
visitor.visit_expr(&output.expr)
)
}
-macro_rules! path {
- ($($x:tt)*) => (
- ::ext::deriving::generic::ty::Path::new( pathvec![ $($x)* ] )
- )
-}
-
macro_rules! path_local {
($x:ident) => (
::deriving::generic::ty::Path::new_local(stringify!($x))
//! Syntax extensions in the Rust compiler.
#![crate_name = "syntax_ext"]
-#![unstable(feature = "rustc_private", issue = "27812")]
#![crate_type = "dylib"]
#![crate_type = "rlib"]
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
#![deny(warnings)]
#![feature(proc_macro_internals)]
-#![feature(rustc_private)]
-#![feature(staged_api)]
+
+#![cfg_attr(stage0, unstable(feature = "rustc_private", issue = "27812"))]
+#![cfg_attr(stage0, feature(rustc_private))]
+#![cfg_attr(stage0, feature(staged_api))]
extern crate fmt_macros;
extern crate log;
visit::walk_item(self, item);
}
- fn visit_mod(&mut self, m: &'a ast::Mod, _s: Span, id: NodeId) {
+ fn visit_mod(&mut self, m: &'a ast::Mod, _s: Span, _a: &[ast::Attribute], id: NodeId) {
let mut prev_in_root = self.in_root;
if id != ast::CRATE_NODE_ID {
prev_in_root = mem::replace(&mut self.in_root, false);
//! This API is completely unstable and subject to change.
#![crate_name = "syntax_pos"]
-#![unstable(feature = "rustc_private", issue = "27812")]
#![crate_type = "dylib"]
#![crate_type = "rlib"]
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
#![feature(custom_attribute)]
#![feature(optin_builtin_traits)]
#![allow(unused_attributes)]
-#![feature(rustc_private)]
-#![feature(staged_api)]
#![feature(specialization)]
+#![cfg_attr(stage0, unstable(feature = "rustc_private", issue = "27812"))]
+#![cfg_attr(stage0, feature(rustc_private))]
+#![cfg_attr(stage0, feature(staged_api))]
+
use std::cell::{Cell, RefCell};
use std::ops::{Add, Sub};
use std::rc::Rc;
//! [ti]: https://en.wikipedia.org/wiki/Terminfo
#![crate_name = "term"]
-#![unstable(feature = "rustc_private",
- reason = "use the crates.io `term` library instead",
- issue = "27812")]
#![crate_type = "rlib"]
#![crate_type = "dylib"]
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
#![deny(missing_docs)]
#![deny(warnings)]
-#![feature(box_syntax)]
#![feature(staged_api)]
#![cfg_attr(windows, feature(libc))]
// Handle rustfmt skips
#![feature(custom_attribute)]
#![allow(unused_attributes)]
+#![cfg_attr(stage0, unstable(feature = "rustc_private", issue = "27812"))]
+#![cfg_attr(stage0, feature(staged_api))]
+
use std::io::prelude::*;
pub use terminfo::TerminfoTerminal;
-Subproject commit cf85b5a8da7853c4de5cc57766da8b7988c06461
+Subproject commit 1ef3b9128e1baaed61b42d5b0de79dee100acf17
+++ /dev/null
-Subproject commit 2e6417f6af5218a29a8ee72ed17af085560b9b9c
# If this file is modified, then llvm will be (optionally) cleaned and then rebuilt.
# The actual contents of this file do not matter, but to trigger a change on the
# build bots then the contents should be changed so git updates the mtime.
-2017-05-06
+2017-05-13
// aux-build:bang_proc_macro.rs
#![feature(proc_macro)]
+#![allow(unused_macros)]
#[macro_use]
extern crate derive_foo;
fn main () {
let foo = 42u32;
- const FOO : u32 = foo; //~ ERROR E0435
+ let _: [u8; foo]; //~ ERROR E0435
//~| NOTE non-constant used with constant
}
pub fn test<A: Foo, B: Foo>() {
let _array = [4; <A as Foo>::Y];
- //~^ ERROR cannot use an outer type parameter in this context [E0402]
+ //~^ ERROR the trait bound `A: Foo` is not satisfied [E0277]
}
fn main() {
pub fn test<A: Foo, B: Foo>() {
let _array: [u32; <A as Foo>::Y];
- //~^ ERROR cannot use an outer type parameter in this context [E0402]
+ //~^ ERROR the trait bound `A: Foo` is not satisfied [E0277]
}
fn main() {
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Formerly this ICEd with the following message:
+// Tried to project an inherited associated type during coherence checking,
+// which is currently not supported.
+//
+// No we expect to run into a more user-friendly cycle error instead.
+
+#![feature(specialization)]
+
+trait Trait<T> { type Assoc; }
+//~^ unsupported cyclic reference between types/traits detected [E0391]
+
+impl<T> Trait<T> for Vec<T> {
+ type Assoc = ();
+}
+
+impl Trait<u8> for Vec<u8> {}
+
+impl<T> Trait<T> for String {
+ type Assoc = ();
+}
+
+impl Trait<<Vec<u8> as Trait<u8>>::Assoc> for String {}
+
+fn main() {}
+++ /dev/null
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// compile-flags:-C target-feature=+crt-static
-// error-pattern: specifying the `crt-static` target feature is only allowed
-
-fn main() {}
// gate-test-allow_internal_unstable
+#![allow(unused_macros)]
+
macro_rules! bar {
() => {
// more layers don't help:
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+#![allow(unused_macros)]
+
#[allow_internal_unstable] //~ ERROR allow_internal_unstable side-steps
macro_rules! foo {
() => {}
fn foo<T>() {
static a: Bar<T> = Bar::What;
- //~^ ERROR cannot use an outer type parameter in this context
+//~^ ERROR can't use type parameters from outer function; try using a local type parameter instead
}
fn main() {
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+#![allow(unused_macros)]
+
macro_rules! invalid {
_ => (); //~ ERROR invalid macro matcher
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+#![allow(unused_macros)]
+
macro_rules! test { ($wrong:t_ty ..) => () }
//~^ ERROR: invalid fragment specifier `t_ty`
fn main() {
let foo = 42u32;
const FOO : u32 = foo;
- //~^ ERROR attempt to use a non-constant value in a constant
+ //~^ ERROR can't capture dynamic environment
}
pub trait Resources<'a> {}
pub trait Buffer<'a, R: Resources<'a>> {
+ //~^ NOTE the lifetime 'a as defined on the trait at 13:0...
+ //~| NOTE ...does not necessarily outlive the lifetime 'a as defined on the trait
+
fn select(&self) -> BufferViewHandle<R>;
//~^ ERROR mismatched types
//~| lifetime mismatch
//~| NOTE expected type `Resources<'_>`
- //~| NOTE the lifetime 'a as defined on the method body at 14:4...
//~| NOTE ...does not necessarily outlive the anonymous lifetime #1 defined on the method body
//~| ERROR mismatched types
//~| lifetime mismatch
//~| NOTE expected type `Resources<'_>`
- //~| NOTE the anonymous lifetime #1 defined on the method body at 14:4...
- //~| NOTE ...does not necessarily outlive the lifetime 'a as defined on the method body
+ //~| NOTE the anonymous lifetime #1 defined on the method body at 17:4...
}
pub struct BufferViewHandle<'a, R: 'a+Resources<'a>>(&'a R);
let foo = 100;
static y: isize = foo + 1;
- //~^ ERROR attempt to use a non-constant value in a constant
+ //~^ ERROR can't capture dynamic environment
println!("{}", y);
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-enum Fruit {
+// these two HELPs are actually in a new line between this line and the `enum Fruit` line
+enum Fruit { //~ HELP possible candidate is found in another module, you can import it into scope
+ //~^ HELP possible candidate is found in another module, you can import it into scope
Apple(i64),
//~^ HELP there is an enum variant `Fruit::Apple`, did you mean to use `Fruit`?
//~| HELP there is an enum variant `Fruit::Apple`, did you mean to use `Fruit`?
Apple(5)
//~^ ERROR cannot find function `Apple` in this scope
//~| NOTE not found in this scope
- //~| HELP possible candidate is found in another module, you can import it into scope
}
fn should_return_fruit_too() -> Fruit::Apple {
Apple(5)
//~^ ERROR cannot find function `Apple` in this scope
//~| NOTE not found in this scope
- //~| HELP possible candidate is found in another module, you can import it into scope
}
fn foo() -> Ok {
fn f(x:isize) {
static child: isize = x + 1;
- //~^ ERROR attempt to use a non-constant value in a constant
+ //~^ ERROR can't capture dynamic environment
}
fn main() {}
impl PTrait for P {
fn getChildOption(&self) -> Option<Box<P>> {
static childVal: Box<P> = self.child.get();
- //~^ ERROR attempt to use a non-constant value in a constant
+ //~^ ERROR can't capture dynamic environment
panic!();
}
}
struct RepeatMut<'a, T>(T, &'a ());
impl<'a, T: 'a> Iterator for RepeatMut<'a, T> {
+ //~^ NOTE ...does not necessarily outlive the lifetime 'a as defined on the impl
+
type Item = &'a mut T;
fn next(&'a mut self) -> Option<Self::Item>
//~^ ERROR method not compatible with trait
//~| lifetime mismatch
//~| NOTE expected type `fn(&mut RepeatMut<'a, T>) -> std::option::Option<&mut T>`
+ //~| NOTE the anonymous lifetime #1 defined on the method body
{
- //~^ NOTE the anonymous lifetime #1 defined on the body
- //~| NOTE ...does not necessarily outlive the lifetime 'a as defined on the body
Some(&mut self.0)
}
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+#![allow(unused_macros)]
+
macro_rules! assign {
(($($a:tt)*) = ($($b:tt))*) => { //~ ERROR expected `*` or `+`
$($a)* = $($b)*
// except according to those terms.
#![deny(missing_fragment_specifier)] //~ NOTE lint level defined here
+#![allow(unused_macros)]
macro_rules! m { ($i) => {} }
//~^ ERROR missing fragment specifier
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+trait Dim {
+ fn dim() -> usize;
+}
+
+enum Dim3 {}
+
+impl Dim for Dim3 {
+ fn dim() -> usize {
+ 3
+ }
+}
+
+fn main() {
+ let array: [usize; Dim3::dim()]
+ //~^ ERROR calls in constants are limited to constant functions
+ = [0; Dim3::dim()];
+ //~^ ERROR calls in constants are limited to constant functions
+}
pub struct Vector<T, D: Dim> {
entries: [T; D::dim()]
- //~^ ERROR cannot use an outer type parameter in this context
+ //~^ ERROR no associated item named `dim` found for type `D` in the current scope
}
-fn main() {
- let array: [usize; Dim3::dim()]
- //~^ ERROR calls in constants are limited to constant functions
- = [0; Dim3::dim()];
- //~^ ERROR calls in constants are limited to constant functions
-}
+fn main() {}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+#![allow(unused_macros)]
+
macro_rules! foo {
( $()* ) => {};
//~^ ERROR repetition matches empty token tree
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+#![allow(unused_macros)]
+
mod macros_cant_escape_fns {
fn f() {
macro_rules! m { () => { 3 + 4 } }
//
// Check the macro follow sets (see corresponding rpass test).
+#![allow(unused_macros)]
+
// FOLLOW(pat) = {FatArrow, Comma, Eq, Or, Ident(if), Ident(in)}
macro_rules! follow_pat {
($p:pat ()) => {}; //~ERROR `$p:pat` is followed by `(`
// Regression test for issue #25436: check that things which can be
// followed by any token also permit X* to come afterwards.
+#![allow(unused_macros)]
+
macro_rules! foo {
( $a:expr $($b:tt)* ) => { }; //~ ERROR not allowed for `expr` fragments
( $a:ty $($b:tt)* ) => { }; //~ ERROR not allowed for `ty` fragments
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+#![allow(unused_macros)]
+
macro_rules! errors_everywhere {
($ty:ty <) => (); //~ ERROR `$ty:ty` is followed by `<`, which is not allowed for `ty`
($ty:ty < foo ,) => (); //~ ERROR `$ty:ty` is followed by `<`, which is not allowed for `ty`
// aux-build:two_macros.rs
+#![allow(unused_macros)]
+
macro_rules! foo { () => {} }
macro_rules! macro_one { () => {} }
#[macro_use(macro_two)] extern crate two_macros;
+++ /dev/null
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-struct Foo;
-
-fn main() {
- let a: Result<(), Foo> = Ok(());
- a.unwrap();
- //~^ ERROR no method named `unwrap` found for type `std::result::Result<(), Foo>`
- //~| NOTE the following trait bounds were not satisfied: `Foo : std::fmt::Debug`
-}
// over time, but this test used to exhibit some pretty bogus messages
// that were not remotely helpful.
-// error-pattern:cannot infer
-// error-pattern:cannot outlive the lifetime 'a
-// error-pattern:must be valid for the static lifetime
+// error-pattern:the lifetime 'a
+// error-pattern:the static lifetime
struct Invariant<'a>(Option<&'a mut &'a mut ()>);
fn foo3<'a,'b>(x: &'a mut Dummy) -> &'b mut Dummy {
// Without knowing 'a:'b, we can't coerce
- x //~ ERROR cannot infer an appropriate lifetime
+ x //~ ERROR lifetime bound not satisfied
//~^ ERROR cannot infer an appropriate lifetime
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+#![allow(unused_macros)]
+
// Issue #21370
macro_rules! test {
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+#![allow(unused_macros)]
+
macro_rules! test {
($e:expr +) => () //~ ERROR not allowed for `expr` fragments
}
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![deny(unused_macros)]
+
+// Most simple case
+macro_rules! unused { //~ ERROR: unused macro definition
+ () => {};
+}
+
+// Test macros created by macros
+macro_rules! create_macro {
+ () => {
+ macro_rules! m { //~ ERROR: unused macro definition
+ () => {};
+ }
+ };
+}
+create_macro!();
+
+#[allow(unused_macros)]
+mod bar {
+ // Test that putting the #[deny] close to the macro's definition
+ // works.
+
+ #[deny(unused_macros)]
+ macro_rules! unused { //~ ERROR: unused macro definition
+ () => {};
+ }
+}
+
+fn main() {}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+#![allow(unused_macros)]
+
macro_rules! macro_rules { () => {} } //~ ERROR user-defined macros may not be named `macro_rules`
-> Box<Get<&'min i32>>
where 'max : 'min
{
- v //~ ERROR cannot infer an appropriate lifetime
+ v //~ ERROR mismatched types
}
fn get_max_from_min<'min, 'max, G>(v: Box<Get<&'min i32>>)
where 'max : 'min
{
// Previously OK:
- v //~ ERROR cannot infer an appropriate lifetime
+ v //~ ERROR mismatched types
}
fn main() { }
where 'max : 'min
{
// Previously OK, now an error as traits are invariant.
- v //~ ERROR cannot infer an appropriate lifetime
+ v //~ ERROR mismatched types
}
fn get_max_from_min<'min, 'max, G>(v: Box<Get<&'min i32>>)
-> Box<Get<&'max i32>>
where 'max : 'min
{
- v //~ ERROR cannot infer an appropriate lifetime
+ v //~ ERROR mismatched types
}
fn main() { }
-> Box<Get<&'min i32>>
where 'max : 'min
{
- v //~ ERROR cannot infer an appropriate lifetime
+ v //~ ERROR mismatched types
}
fn get_max_from_min<'min, 'max, G>(v: Box<Get<&'min i32>>)
-> Box<Get<&'max i32>>
where 'max : 'min
{
- v //~ ERROR cannot infer an appropriate lifetime
+ v //~ ERROR mismatched types
}
fn main() { }
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: -Z incremental-cc
-
pub struct Point {
pub x: f32,
pub y: f32,
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: -Z incremental-cc
-
#![crate_type="rlib"]
#[cfg(rpass1)]
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: -Z incremental-cc
-
pub struct Point {
pub x: f32,
pub y: f32,
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: -Z incremental-cc
-
pub struct Point {
pub x: f32,
pub y: f32,
// ignore-tidy-linelength
-// aux-build:extern_crate.rs
//[rpass1] compile-flags: -g
//[rpass2] compile-flags: -g
//[rpass3] compile-flags: -g -Zremap-path-prefix-from={{src-base}} -Zremap-path-prefix-to=/the/src
// except according to those terms.
// revisions:rpass1 rpass2 rpass3
-// compile-flags: -Z query-dep-graph -g
+// compile-flags: -Z query-dep-graph -g -Zincremental-cc
// aux-build:extern_crate.rs
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: -Z incremental-cc
-
#![allow(warnings)]
#![crate_name = "a"]
#![crate_type = "rlib"]
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+pub fn print_hello() {
+ println!("hello");
+}
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// This test case makes sure that the compiler doesn't crash due to a failing
+// table lookup when a source file is removed.
+
+// revisions:rpass1 rpass2
+
+// Note that we specify -g so that the FileMaps actually get referenced by the
+// incr. comp. cache:
+// compile-flags: -Z query-dep-graph -g
+
+#[cfg(rpass1)]
+mod auxiliary;
+
+#[cfg(rpass1)]
+fn main() {
+ auxiliary::print_hello();
+}
+
+#[cfg(rpass2)]
+fn main() {
+ println!("hello");
+}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: -Z incremental-cc
// no-prefer-dynamic
#![crate_type="rlib"]
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: -Z incremental-cc
-
#![crate_type="rlib"]
#[cfg(rpass1)]
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: -Z incremental-cc
-
#![crate_type="rlib"]
#[cfg(rpass1)]
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Regression test for #41697. Using dump-mir was triggering
+// artificial cycles: during type-checking, we had to get the MIR for
+// the constant expressions in `[u8; 2]`, which in turn would trigger
+// an attempt to get the item-path, which in turn would request the
+// types of the impl, which would trigger a cycle. We supressed this
+// cycle now by forcing mir-dump to avoid asking for types of an impl.
+
+#![feature(rustc_attrs)]
+
+use std::sync::Arc;
+
+trait Foo {
+ fn get(&self) -> [u8; 2];
+}
+
+impl Foo for [u8; 2] {
+ fn get(&self) -> [u8; 2] {
+ *self
+ }
+}
+
+struct Bar<T: ?Sized>(T);
+
+fn unsize_fat_ptr<'a>(x: &'a Bar<Foo + Send + 'a>) -> &'a Bar<Foo + 'a> {
+ x
+}
+
+fn unsize_nested_fat_ptr(x: Arc<Foo + Send>) -> Arc<Foo> {
+ x
+}
+
+fn main() {
+ let x: Box<Bar<Foo + Send>> = Box::new(Bar([1,2]));
+ assert_eq!(unsize_fat_ptr(&*x).0.get(), [1, 2]);
+
+ let x: Arc<Foo + Send> = Arc::new([3, 4]);
+ assert_eq!(unsize_nested_fat_ptr(x).get(), [3, 4]);
+}
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn main() {
+ let a = 42._; //~ ERROR unexpected token: `_`
+}
extern crate rustc_lint;
extern crate rustc_metadata;
extern crate rustc_errors;
+extern crate rustc_trans;
extern crate syntax;
use rustc::dep_graph::DepGraph;
let descriptions = Registry::new(&rustc::DIAGNOSTICS);
let dep_graph = DepGraph::new(opts.build_dep_graph());
- let cstore = Rc::new(CStore::new(&dep_graph));
+ let cstore = Rc::new(CStore::new(&dep_graph, Box::new(rustc_trans::LlvmMetadataLoader)));
let sess = build_session(opts, &dep_graph, None, descriptions, cstore.clone());
+ rustc_trans::init(&sess);
rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess));
(sess, cstore)
}
extern crate rustc;
extern crate rustc_plugin;
+extern crate rustc_trans;
#[link(name = "llvm-function-pass", kind = "static")]
#[link(name = "llvm-module-pass", kind = "static")]
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// ignore-stage1
+// ignore-cross-compile
+#![feature(quote, rustc_private)]
+
+extern crate syntax;
+
+use syntax::ext::base::{ExtCtxt, DummyResolver};
+use syntax::ext::expand::ExpansionConfig;
+use syntax::parse::ParseSess;
+use syntax::codemap::{FilePathMapping, dummy_spanned};
+use syntax::print::pprust::expr_to_string;
+use syntax::ast::{Expr, ExprKind, LitKind, StrStyle, RangeLimits};
+use syntax::symbol::Symbol;
+use syntax::ptr::P;
+
+use std::rc::Rc;
+
+fn main() {
+ let parse_sess = ParseSess::new(FilePathMapping::empty());
+ let exp_cfg = ExpansionConfig::default("issue_35829".to_owned());
+ let mut resolver = DummyResolver;
+ let cx = ExtCtxt::new(&parse_sess, exp_cfg, &mut resolver);
+
+ // check byte string
+ let byte_string = quote_expr!(&cx, b"one");
+ let byte_string_lit_kind = LitKind::ByteStr(Rc::new(b"one".to_vec()));
+ assert_eq!(byte_string.node, ExprKind::Lit(P(dummy_spanned(byte_string_lit_kind))));
+
+ // check raw byte string
+ let raw_byte_string = quote_expr!(&cx, br###"#"two"#"###);
+ let raw_byte_string_lit_kind = LitKind::ByteStr(Rc::new(b"#\"two\"#".to_vec()));
+ assert_eq!(raw_byte_string.node, ExprKind::Lit(P(dummy_spanned(raw_byte_string_lit_kind))));
+
+ // check dotdotdot
+ let closed_range = quote_expr!(&cx, 0 ... 1);
+ assert_eq!(closed_range.node, ExprKind::Range(
+ Some(quote_expr!(&cx, 0)),
+ Some(quote_expr!(&cx, 1)),
+ RangeLimits::Closed
+ ));
+
+ // test case from 35829
+ let expr_35829 = quote_expr!(&cx, std::io::stdout().write(b"one"));
+ assert_eq!(expr_to_string(&expr_35829), r#"std::io::stdout().write(b"one")"#);
+}
extern crate syntax;
extern crate syntax_pos;
-use syntax::ast::Ident;
-use syntax::parse::token;
+use syntax::ast::{Ident, Name};
+use syntax::parse::token::{self, Token, Lit};
use syntax::tokenstream::TokenTree;
fn main() {
let true_tok = token::Ident(Ident::from_str("true"));
assert!(quote!(true).eq_unspanned(&true_tok.into()));
+
+ // issue #35829, extended check to proc_macro.
+ let triple_dot_tok = Token::DotDotDot;
+ assert!(quote!(...).eq_unspanned(&triple_dot_tok.into()));
+
+ let byte_str_tok = Token::Literal(Lit::ByteStr(Name::intern("one")), None);
+ assert!(quote!(b"one").eq_unspanned(&byte_str_tok.into()));
+
+ let byte_str_raw_tok = Token::Literal(Lit::ByteStrRaw(Name::intern("#\"two\"#"), 3), None);
+ assert!(quote!(br###"#"two"#"###).eq_unspanned(&byte_str_raw_tok.into()));
+
+ let str_raw_tok = Token::Literal(Lit::StrRaw(Name::intern("#\"three\"#"), 2), None);
+ assert!(quote!(r##"#"three"#"##).eq_unspanned(&str_raw_tok.into()));
}
A::X - B::X
}
+trait Bar: Foo {
+ const Y: i32 = Self::X;
+}
+
fn main() {
assert_eq!(11, Abc::X);
assert_eq!(97, Def::X);
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Regression test for #41677. The local variable was winding up with
+// a type `Receiver<?T, H>` where `?T` was unconstrained, because we
+// failed to enforce the WF obligations and `?T` is a bivariant type
+// parameter position.
+
+#![allow(unused_variables, dead_code)]
+
+use std::marker::PhantomData;
+
+trait Handle {
+ type Inner;
+}
+
+struct ResizingHandle<H>(PhantomData<H>);
+impl<H> Handle for ResizingHandle<H> {
+ type Inner = H;
+}
+
+struct Receiver<T, H: Handle<Inner=T>>(PhantomData<H>);
+
+fn channel<T>(size: usize) -> Receiver<T, ResizingHandle<T>> {
+ let rx = Receiver(PhantomData);
+ rx
+}
+
+fn main() {
+}
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// this used to cause exponential code-size blowup during LLVM passes.
+// min-llvm-version 3.9
+
+#![feature(test)]
+
+extern crate test;
+
+struct MayUnwind;
+
+impl Drop for MayUnwind {
+ fn drop(&mut self) {
+ if test::black_box(false) {
+ panic!()
+ }
+ }
+}
+
+struct DS<U> {
+ may_unwind: MayUnwind,
+ name: String,
+ next: U,
+}
+
+fn add<U>(ds: DS<U>, name: String) -> DS<DS<U>> {
+ DS {
+ may_unwind: MayUnwind,
+ name: "?".to_owned(),
+ next: ds,
+ }
+}
+
+fn main() {
+ let deserializers = DS { may_unwind: MayUnwind, name: "?".to_owned(), next: () };
+ let deserializers = add(deserializers, "?".to_owned());
+ let deserializers = add(deserializers, "?".to_owned());
+ let deserializers = add(deserializers, "?".to_owned());
+ let deserializers = add(deserializers, "?".to_owned());
+ let deserializers = add(deserializers, "?".to_owned());
+ let deserializers = add(deserializers, "?".to_owned());
+ let deserializers = add(deserializers, "?".to_owned()); // 0.7s
+ let deserializers = add(deserializers, "?".to_owned()); // 1.3s
+ let deserializers = add(deserializers, "?".to_owned()); // 2.4s
+ let deserializers = add(deserializers, "?".to_owned()); // 6.7s
+ let deserializers = add(deserializers, "?".to_owned()); // 26.0s
+ let deserializers = add(deserializers, "?".to_owned()); // 114.0s
+ let deserializers = add(deserializers, "?".to_owned()); // 228.0s
+ let deserializers = add(deserializers, "?".to_owned()); // 400.0s
+ let deserializers = add(deserializers, "?".to_owned()); // 800.0s
+ let deserializers = add(deserializers, "?".to_owned()); // 1600.0s
+ let deserializers = add(deserializers, "?".to_owned()); // 3200.0s
+}
+++ /dev/null
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// compile-flags:-Zdump-mir=NEVER_MATCHED
-
-// Regression test for #41697. Using dump-mir was triggering
-// artificial cycles: during type-checking, we had to get the MIR for
-// the constant expressions in `[u8; 2]`, which in turn would trigger
-// an attempt to get the item-path, which in turn would request the
-// types of the impl, which would trigger a cycle. We supressed this
-// cycle now by forcing mir-dump to avoid asking for types of an impl.
-
-#![feature(rustc_attrs)]
-
-use std::sync::Arc;
-
-trait Foo {
- fn get(&self) -> [u8; 2];
-}
-
-impl Foo for [u8; 2] {
- fn get(&self) -> [u8; 2] {
- *self
- }
-}
-
-struct Bar<T: ?Sized>(T);
-
-fn unsize_fat_ptr<'a>(x: &'a Bar<Foo + Send + 'a>) -> &'a Bar<Foo + 'a> {
- x
-}
-
-fn unsize_nested_fat_ptr(x: Arc<Foo + Send>) -> Arc<Foo> {
- x
-}
-
-fn main() {
- let x: Box<Bar<Foo + Send>> = Box::new(Bar([1,2]));
- assert_eq!(unsize_fat_ptr(&*x).0.get(), [1, 2]);
-
- let x: Arc<Foo + Send> = Arc::new([3, 4]);
- assert_eq!(unsize_nested_fat_ptr(x).get(), [3, 4]);
-}
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+trait Tc {}
+impl Tc for bool {}
+
+fn main() {
+ let _: &[&Tc] = &[&true];
+}
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+/// A compile-time map from identifiers to arbitrary (heterogeneous) expressions
+macro_rules! ident_map {
+ ( $name:ident = { $($key:ident => $e:expr,)* } ) => {
+ macro_rules! $name {
+ $(
+ ( $key ) => { $e };
+ )*
+ // Empty invocation expands to nothing. Needed when the map is empty.
+ () => {};
+ }
+ };
+}
+
+ident_map!(my_map = {
+ main => 0,
+});
+
+fn main() {
+ my_map!(main);
+}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Regression test for #41849.
+
+use std::ops::Mul;
+
+const C: usize = 1;
+const CAPACITY: usize = 1 * C;
+
+struct A<X> {
+ f: [X; CAPACITY],
+}
+
+struct B<T> {
+ f: T,
+}
+
+impl<T> Mul for B<T> {
+ type Output = Self;
+ fn mul(self, _rhs: B<T>) -> Self::Output {
+ self
+ }
+}
+
+impl<T> Mul<usize> for B<T> {
+ type Output = Self;
+ fn mul(self, _rhs: usize) -> Self::Output {
+ self
+ }
+}
+
+fn main() {
+ let a = A { f: [1] };
+ let _ = B { f: a };
+}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Regression test for #41936. The coerce-unsized trait check in
+// coherence was using subtyping, which triggered variance
+// computation, which failed because it required type info for fields
+// that had not (yet) been computed.
+
+#![feature(unsize)]
+#![feature(coerce_unsized)]
+
+use std::{marker,ops};
+
+// Change the array to a non-array, and error disappears
+// Adding a new field to the end keeps the error
+struct LogDataBuf([u8;8]);
+
+struct Aref<T: ?Sized>
+{
+ // Inner structure triggers the error, removing the inner removes the message.
+ ptr: Box<ArefInner<T>>,
+}
+impl<T: ?Sized + marker::Unsize<U>, U: ?Sized> ops::CoerceUnsized<Aref<U>> for Aref<T> {}
+
+struct ArefInner<T: ?Sized>
+{
+ // Even with this field commented out, the error is raised.
+ data: T,
+}
+
+fn main(){}
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// ignore-emscripten spawning processes is not supported
+
+use std::{env, process};
+
+fn child() {
+ print!("[stdout 0]");
+ print!("[stdout {}]", 1);
+ println!("[stdout {}]", 2);
+ println!();
+ eprint!("[stderr 0]");
+ eprint!("[stderr {}]", 1);
+ eprintln!("[stderr {}]", 2);
+ eprintln!();
+}
+
+fn parent() {
+ let this = env::args().next().unwrap();
+ let output = process::Command::new(this).arg("-").output().unwrap();
+ assert!(output.status.success());
+
+ let stdout = String::from_utf8(output.stdout).unwrap();
+ let stderr = String::from_utf8(output.stderr).unwrap();
+
+ assert_eq!(stdout, "[stdout 0][stdout 1][stdout 2]\n\n");
+ assert_eq!(stderr, "[stderr 0][stderr 1][stderr 2]\n\n");
+}
+
+fn main() {
+ if env::args().count() == 2 { child() } else { parent() }
+}
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Make sure we don't crash with a cycle error during coherence.
+
+#![feature(specialization)]
+
+trait Trait<T> {
+ type Assoc;
+}
+
+impl<T> Trait<T> for Vec<T> {
+ default type Assoc = ();
+}
+
+impl Trait<u8> for Vec<u8> {
+ type Assoc = u8;
+}
+
+impl<T> Trait<T> for String {
+ type Assoc = ();
+}
+
+impl Trait<<Vec<u8> as Trait<u8>>::Assoc> for String {}
+
+fn main() {}
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+trait Tr : Sized {
+ fn _method_on_numbers(self) {}
+}
+
+impl Tr for i32 {}
+
+fn main() {
+ 42._method_on_numbers();
+}
pub fn rust0() {}
// @has - '//code' 'fn rust1()'
pub extern "Rust" fn rust1() {}
- // @has - '//code' 'extern fn c0()'
+ // @has - '//code' 'extern "C" fn c0()'
pub extern fn c0() {}
- // @has - '//code' 'extern fn c1()'
+ // @has - '//code' 'extern "C" fn c1()'
pub extern "C" fn c1() {}
// @has - '//code' 'extern "system" fn system0()'
pub extern "system" fn system0() {}
// @has - '//code' 'impl Bar for fn()'
impl Bar for fn() {}
-// @has - '//code' 'impl Bar for extern fn()'
+// @has - '//code' 'impl Bar for extern "C" fn()'
impl Bar for extern fn() {}
// @has - '//code' 'impl Bar for extern "system" fn()'
impl Bar for extern "system" fn() {}
extern crate rustdoc_ffi as lib;
-// @has ffi/fn.foreigner.html //pre 'pub unsafe extern fn foreigner(cold_as_ice: u32)'
+// @has ffi/fn.foreigner.html //pre 'pub unsafe extern "C" fn foreigner(cold_as_ice: u32)'
pub use lib::foreigner;
extern "C" {
- // @has ffi/fn.another.html //pre 'pub unsafe extern fn another(cold_as_ice: u32)'
+ // @has ffi/fn.another.html //pre 'pub unsafe extern "C" fn another(cold_as_ice: u32)'
pub fn another(cold_as_ice: u32);
}
extern {
// @has issue_22038/fn.foo1.html \
- // '//*[@class="rust fn"]' 'pub unsafe extern fn foo1()'
+ // '//*[@class="rust fn"]' 'pub unsafe extern "C" fn foo1()'
pub fn foo1();
}
}
// @has issue_22038/fn.bar.html \
-// '//*[@class="rust fn"]' 'pub extern fn bar()'
+// '//*[@class="rust fn"]' 'pub extern "C" fn bar()'
pub extern fn bar() {}
// @has issue_22038/fn.baz.html \
// except according to those terms.
extern "C" {
- // @has variadic/fn.foo.html //pre 'pub unsafe extern fn foo(x: i32, ...)'
+ // @has variadic/fn.foo.html //pre 'pub unsafe extern "C" fn foo(x: i32, ...)'
pub fn foo(x: i32, ...);
}
12 | if x > y { x } else { y }
| ^
|
-note: ...the reference is valid for the lifetime 'a as defined on the body at 11:43...
- --> $DIR/ex1-return-one-existing-name-if-else.rs:11:44
+note: ...the reference is valid for the lifetime 'a as defined on the function body at 11:0...
+ --> $DIR/ex1-return-one-existing-name-if-else.rs:11:1
|
-11 | fn foo<'a>(x: &'a i32, y: &i32) -> &'a i32 {
- | ____________________________________________^
+11 | / fn foo<'a>(x: &'a i32, y: &i32) -> &'a i32 {
12 | | if x > y { x } else { y }
13 | | }
| |_^
-note: ...but the borrowed content is only valid for the anonymous lifetime #1 defined on the body at 11:43
- --> $DIR/ex1-return-one-existing-name-if-else.rs:11:44
+note: ...but the borrowed content is only valid for the anonymous lifetime #1 defined on the function body at 11:0
+ --> $DIR/ex1-return-one-existing-name-if-else.rs:11:1
|
-11 | fn foo<'a>(x: &'a i32, y: &i32) -> &'a i32 {
- | ____________________________________________^
+11 | / fn foo<'a>(x: &'a i32, y: &i32) -> &'a i32 {
12 | | if x > y { x } else { y }
13 | | }
| |_^
|
= note: expected type `Ref<'a, _>`
found type `Ref<'_, _>`
-note: the anonymous lifetime #2 defined on the body at 15:51...
- --> $DIR/ex2a-push-one-existing-name.rs:15:52
+note: the anonymous lifetime #2 defined on the function body at 15:0...
+ --> $DIR/ex2a-push-one-existing-name.rs:15:1
|
-15 | fn foo<'a>(x: &mut Vec<Ref<'a, i32>>, y: Ref<i32>) {
- | ____________________________________________________^
+15 | / fn foo<'a>(x: &mut Vec<Ref<'a, i32>>, y: Ref<i32>) {
16 | | x.push(y);
17 | | }
| |_^
-note: ...does not necessarily outlive the lifetime 'a as defined on the body at 15:51
- --> $DIR/ex2a-push-one-existing-name.rs:15:52
+note: ...does not necessarily outlive the lifetime 'a as defined on the function body at 15:0
+ --> $DIR/ex2a-push-one-existing-name.rs:15:1
|
-15 | fn foo<'a>(x: &mut Vec<Ref<'a, i32>>, y: Ref<i32>) {
- | ____________________________________________________^
+15 | / fn foo<'a>(x: &mut Vec<Ref<'a, i32>>, y: Ref<i32>) {
16 | | x.push(y);
17 | | }
| |_^
|
= note: expected type `Ref<'_, _>`
found type `Ref<'_, _>`
-note: the anonymous lifetime #3 defined on the body at 15:43...
- --> $DIR/ex2b-push-no-existing-names.rs:15:44
+note: the anonymous lifetime #3 defined on the function body at 15:0...
+ --> $DIR/ex2b-push-no-existing-names.rs:15:1
|
-15 | fn foo(x: &mut Vec<Ref<i32>>, y: Ref<i32>) {
- | ____________________________________________^
+15 | / fn foo(x: &mut Vec<Ref<i32>>, y: Ref<i32>) {
16 | | x.push(y);
17 | | }
| |_^
-note: ...does not necessarily outlive the anonymous lifetime #2 defined on the body at 15:43
- --> $DIR/ex2b-push-no-existing-names.rs:15:44
+note: ...does not necessarily outlive the anonymous lifetime #2 defined on the function body at 15:0
+ --> $DIR/ex2b-push-no-existing-names.rs:15:1
|
-15 | fn foo(x: &mut Vec<Ref<i32>>, y: Ref<i32>) {
- | ____________________________________________^
+15 | / fn foo(x: &mut Vec<Ref<i32>>, y: Ref<i32>) {
16 | | x.push(y);
17 | | }
| |_^
16 | let z = Ref { data: y.data };
| ^^^
|
-note: first, the lifetime cannot outlive the lifetime 'c as defined on the body at 15:66...
- --> $DIR/ex2c-push-inference-variable.rs:15:67
+note: first, the lifetime cannot outlive the lifetime 'c as defined on the function body at 15:0...
+ --> $DIR/ex2c-push-inference-variable.rs:15:1
|
-15 | fn foo<'a, 'b, 'c>(x: &'a mut Vec<Ref<'b, i32>>, y: Ref<'c, i32>) {
- | ___________________________________________________________________^
+15 | / fn foo<'a, 'b, 'c>(x: &'a mut Vec<Ref<'b, i32>>, y: Ref<'c, i32>) {
16 | | let z = Ref { data: y.data };
17 | | x.push(z);
18 | | }
|
16 | let z = Ref { data: y.data };
| ^^^^^^
-note: but, the lifetime must be valid for the lifetime 'b as defined on the body at 15:66...
- --> $DIR/ex2c-push-inference-variable.rs:15:67
+note: but, the lifetime must be valid for the lifetime 'b as defined on the function body at 15:0...
+ --> $DIR/ex2c-push-inference-variable.rs:15:1
|
-15 | fn foo<'a, 'b, 'c>(x: &'a mut Vec<Ref<'b, i32>>, y: Ref<'c, i32>) {
- | ___________________________________________________________________^
+15 | / fn foo<'a, 'b, 'c>(x: &'a mut Vec<Ref<'b, i32>>, y: Ref<'c, i32>) {
16 | | let z = Ref { data: y.data };
17 | | x.push(z);
18 | | }
17 | let b = Ref { data: y.data };
| ^^^
|
-note: first, the lifetime cannot outlive the lifetime 'c as defined on the body at 15:66...
- --> $DIR/ex2d-push-inference-variable-2.rs:15:67
+note: first, the lifetime cannot outlive the lifetime 'c as defined on the function body at 15:0...
+ --> $DIR/ex2d-push-inference-variable-2.rs:15:1
|
-15 | fn foo<'a, 'b, 'c>(x: &'a mut Vec<Ref<'b, i32>>, y: Ref<'c, i32>) {
- | ___________________________________________________________________^
+15 | / fn foo<'a, 'b, 'c>(x: &'a mut Vec<Ref<'b, i32>>, y: Ref<'c, i32>) {
16 | | let a: &mut Vec<Ref<i32>> = x;
17 | | let b = Ref { data: y.data };
18 | | a.push(b);
|
17 | let b = Ref { data: y.data };
| ^^^^^^
-note: but, the lifetime must be valid for the lifetime 'b as defined on the body at 15:66...
- --> $DIR/ex2d-push-inference-variable-2.rs:15:67
+note: but, the lifetime must be valid for the lifetime 'b as defined on the function body at 15:0...
+ --> $DIR/ex2d-push-inference-variable-2.rs:15:1
|
-15 | fn foo<'a, 'b, 'c>(x: &'a mut Vec<Ref<'b, i32>>, y: Ref<'c, i32>) {
- | ___________________________________________________________________^
+15 | / fn foo<'a, 'b, 'c>(x: &'a mut Vec<Ref<'b, i32>>, y: Ref<'c, i32>) {
16 | | let a: &mut Vec<Ref<i32>> = x;
17 | | let b = Ref { data: y.data };
18 | | a.push(b);
17 | let b = Ref { data: y.data };
| ^^^
|
-note: first, the lifetime cannot outlive the lifetime 'c as defined on the body at 15:66...
- --> $DIR/ex2e-push-inference-variable-3.rs:15:67
+note: first, the lifetime cannot outlive the lifetime 'c as defined on the function body at 15:0...
+ --> $DIR/ex2e-push-inference-variable-3.rs:15:1
|
-15 | fn foo<'a, 'b, 'c>(x: &'a mut Vec<Ref<'b, i32>>, y: Ref<'c, i32>) {
- | ___________________________________________________________________^
+15 | / fn foo<'a, 'b, 'c>(x: &'a mut Vec<Ref<'b, i32>>, y: Ref<'c, i32>) {
16 | | let a: &mut Vec<Ref<i32>> = x;
17 | | let b = Ref { data: y.data };
18 | | Vec::push(a, b);
|
17 | let b = Ref { data: y.data };
| ^^^^^^
-note: but, the lifetime must be valid for the lifetime 'b as defined on the body at 15:66...
- --> $DIR/ex2e-push-inference-variable-3.rs:15:67
+note: but, the lifetime must be valid for the lifetime 'b as defined on the function body at 15:0...
+ --> $DIR/ex2e-push-inference-variable-3.rs:15:1
|
-15 | fn foo<'a, 'b, 'c>(x: &'a mut Vec<Ref<'b, i32>>, y: Ref<'c, i32>) {
- | ___________________________________________________________________^
+15 | / fn foo<'a, 'b, 'c>(x: &'a mut Vec<Ref<'b, i32>>, y: Ref<'c, i32>) {
16 | | let a: &mut Vec<Ref<i32>> = x;
17 | | let b = Ref { data: y.data };
18 | | Vec::push(a, b);
17 | once::<&str>("str").fuse().filter(|a: &str| true).count();
| ^^^^^
|
- = note: the method `count` exists but the following trait bounds were not satisfied: `[closure@$DIR/issue-36053-2.rs:17:39: 17:53] : std::ops::FnMut<(&_,)>`, `std::iter::Filter<std::iter::Fuse<std::iter::Once<&str>>, [closure@$DIR/issue-36053-2.rs:17:39: 17:53]> : std::iter::Iterator`
+ = note: the method `count` exists but the following trait bounds were not satisfied:
+ `[closure@$DIR/issue-36053-2.rs:17:39: 17:53] : std::ops::FnMut<(&_,)>`
+ `std::iter::Filter<std::iter::Fuse<std::iter::Once<&str>>, [closure@$DIR/issue-36053-2.rs:17:39: 17:53]> : std::iter::Iterator`
error[E0281]: type mismatch: `[closure@$DIR/issue-36053-2.rs:17:39: 17:53]` implements the trait `for<'r> std::ops::FnMut<(&'r str,)>`, but the trait `for<'r> std::ops::FnMut<(&'r &str,)>` is required
--> $DIR/issue-36053-2.rs:17:32
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+struct Foo;
+
+fn main() {
+ let a: Result<(), Foo> = Ok(());
+ a.unwrap();
+ //~^ ERROR no method named `unwrap` found for type `std::result::Result<(), Foo>`
+ //~| NOTE the method `unwrap` exists but the following trait bounds were not satisfied
+}
--- /dev/null
+error: no method named `unwrap` found for type `std::result::Result<(), Foo>` in the current scope
+ --> $DIR/method-help-unsatisfied-bound.rs:15:7
+ |
+15 | a.unwrap();
+ | ^^^^^^
+ |
+ = note: the method `unwrap` exists but the following trait bounds were not satisfied:
+ `Foo : std::fmt::Debug`
+
+error: aborting due to previous error
+
15 | let _ = namespaced_enums::A;
| ^ not found in `namespaced_enums`
|
- = help: possible candidate is found in another module, you can import it into scope:
- `use namespaced_enums::Foo::A;`
+help: possible candidate is found in another module, you can import it into scope
+ | use namespaced_enums::Foo::A;
error[E0425]: cannot find function `B` in module `namespaced_enums`
--> $DIR/enums-are-namespaced-xc.rs:18:31
18 | let _ = namespaced_enums::B(10);
| ^ not found in `namespaced_enums`
|
- = help: possible candidate is found in another module, you can import it into scope:
- `use namespaced_enums::Foo::B;`
+help: possible candidate is found in another module, you can import it into scope
+ | use namespaced_enums::Foo::B;
error[E0422]: cannot find struct, variant or union type `C` in module `namespaced_enums`
--> $DIR/enums-are-namespaced-xc.rs:21:31
21 | let _ = namespaced_enums::C { a: 10 };
| ^ not found in `namespaced_enums`
|
- = help: possible candidate is found in another module, you can import it into scope:
- `use namespaced_enums::Foo::C;`
+help: possible candidate is found in another module, you can import it into scope
+ | use namespaced_enums::Foo::C;
error: aborting due to 3 previous errors
19 | Result {
| ^^^^^^ not a struct, variant or union type
|
- = help: possible better candidates are found in other modules, you can import them into scope:
- `use std::fmt::Result;`
- `use std::io::Result;`
- `use std::thread::Result;`
+help: possible better candidates are found in other modules, you can import them into scope
+ | use std::fmt::Result;
+ | use std::io::Result;
+ | use std::thread::Result;
error: aborting due to previous error
16 | E { name: "foobar" }; //~ ERROR unresolved struct, variant or union type `E`
| ^ not found in this scope
|
- = help: possible candidate is found in another module, you can import it into scope:
- `use SomeEnum::E;`
+help: possible candidate is found in another module, you can import it into scope
+ | use SomeEnum::E;
error: aborting due to previous error
53 | impl Mul for Foo {
| ^^^ not found in this scope
|
- = help: possible candidates are found in other modules, you can import them into scope:
- `use mul1::Mul;`
- `use mul2::Mul;`
- `use std::ops::Mul;`
+help: possible candidates are found in other modules, you can import them into scope
+ | use mul1::Mul;
+ | use mul2::Mul;
+ | use std::ops::Mul;
error[E0412]: cannot find type `Mul` in this scope
--> $DIR/issue-21221-1.rs:72:16
72 | fn getMul() -> Mul {
| ^^^ not found in this scope
|
- = help: possible candidates are found in other modules, you can import them into scope:
- `use mul1::Mul;`
- `use mul2::Mul;`
- `use mul3::Mul;`
- `use mul4::Mul;`
- and 2 other candidates
+help: possible candidates are found in other modules, you can import them into scope
+ | use mul1::Mul;
+ | use mul2::Mul;
+ | use mul3::Mul;
+ | use mul4::Mul;
+and 2 other candidates
error[E0405]: cannot find trait `ThisTraitReallyDoesntExistInAnyModuleReally` in this scope
--> $DIR/issue-21221-1.rs:83:6
88 | impl Div for Foo {
| ^^^ not found in this scope
|
- = help: possible candidate is found in another module, you can import it into scope:
- `use std::ops::Div;`
+help: possible candidate is found in another module, you can import it into scope
+ | use std::ops::Div;
error: cannot continue compilation due to previous error
28 | impl T for Foo { }
| ^ not found in this scope
|
- = help: possible candidate is found in another module, you can import it into scope:
- `use foo::bar::T;`
+help: possible candidate is found in another module, you can import it into scope
+ | use foo::bar::T;
error: main function not found
25 | impl OuterTrait for Foo {}
| ^^^^^^^^^^ not found in this scope
|
- = help: possible candidate is found in another module, you can import it into scope:
- `use issue_21221_3::outer::OuterTrait;`
+help: possible candidate is found in another module, you can import it into scope
+ | use issue_21221_3::outer::OuterTrait;
error: cannot continue compilation due to previous error
20 | impl T for Foo {}
| ^ not found in this scope
|
- = help: possible candidate is found in another module, you can import it into scope:
- `use issue_21221_4::T;`
+help: possible candidate is found in another module, you can import it into scope
+ | use issue_21221_4::T;
error: cannot continue compilation due to previous error
20 | impl Foo for S { //~ ERROR expected trait, found type alias `Foo`
| ^^^ type aliases cannot be used for traits
|
- = help: possible better candidate is found in another module, you can import it into scope:
- `use issue_3907::Foo;`
+help: possible better candidate is found in another module, you can import it into scope
+ | use issue_3907::Foo;
error: cannot continue compilation due to previous error
| did you mean `S`?
| constructor is not visible here due to private fields
|
- = help: possible better candidate is found in another module, you can import it into scope:
- `use m::n::Z;`
+help: possible better candidate is found in another module, you can import it into scope
+ | use m::n::Z;
error[E0423]: expected value, found struct `S`
--> $DIR/privacy-struct-ctor.rs:36:5
| did you mean `S { /* fields */ }`?
| constructor is not visible here due to private fields
|
- = help: possible better candidate is found in another module, you can import it into scope:
- `use m::S;`
+help: possible better candidate is found in another module, you can import it into scope
+ | use m::S;
error[E0423]: expected value, found struct `xcrate::S`
--> $DIR/privacy-struct-ctor.rs:42:5
| did you mean `xcrate::S { /* fields */ }`?
| constructor is not visible here due to private fields
|
- = help: possible better candidate is found in another module, you can import it into scope:
- `use m::S;`
+help: possible better candidate is found in another module, you can import it into scope
+ | use m::S;
error: tuple struct `Z` is private
--> $DIR/privacy-struct-ctor.rs:25:9
15 | impl<T: Clone, Add> Add for Foo<T> {
| ^^^ not a trait
|
- = help: possible better candidate is found in another module, you can import it into scope:
- `use std::ops::Add;`
+help: possible better candidate is found in another module, you can import it into scope
+ | use std::ops::Add;
error: main function not found
-Subproject commit cf17c9f7118f544ec304ed6f50d92b3759487123
+Subproject commit 397359840ecad02d5fe69b2a0cf328e98235ffea
format!("-command={}", debugger_script.to_str().unwrap())];
let mut gdb_path = tool_path;
- gdb_path.push_str(&format!("/bin/{}-gdb", self.config.target));
+ gdb_path.push_str("/bin/gdb");
let procsrv::Result {
out,
err,
exe_file.to_str().unwrap()
.replace(r"\", r"\\")));
+ // Force GDB to print values in the Rust format.
+ if self.config.gdb_native_rust {
+ script_str.push_str("set language rust\n");
+ }
+
// Add line breakpoints
for line in &breakpoint_lines {
script_str.push_str(&format!("break '{}':{}\n",
}
fn main() {
- let docs = env::args().nth(1).unwrap();
+ let docs = env::args_os().nth(1).unwrap();
let docs = env::current_dir().unwrap().join(docs);
let mut errors = false;
walk(&mut HashMap::new(), &docs, &docs, &mut errors);
struct FileEntry {
source: String,
ids: HashSet<String>,
- names: HashSet<String>,
}
type Cache = HashMap<PathBuf, FileEntry>;
impl FileEntry {
fn parse_ids(&mut self, file: &Path, contents: &str, errors: &mut bool) {
if self.ids.is_empty() {
- with_attrs_in_source(contents, " id", |fragment, i| {
+ with_attrs_in_source(contents, " id", |fragment, i, _| {
let frag = fragment.trim_left_matches("#").to_owned();
if !self.ids.insert(frag) {
*errors = true;
});
}
}
-
- fn parse_names(&mut self, contents: &str) {
- if self.names.is_empty() {
- with_attrs_in_source(contents, " name", |fragment, _| {
- let frag = fragment.trim_left_matches("#").to_owned();
- self.names.insert(frag);
- });
- }
- }
}
fn walk(cache: &mut Cache, root: &Path, dir: &Path, errors: &mut bool) {
file: &Path,
errors: &mut bool)
-> Option<PathBuf> {
- // ignore js files as they are not prone to errors as the rest of the
- // documentation is and they otherwise bring up false positives.
- if file.extension().and_then(|s| s.to_str()) == Some("js") {
- return None;
- }
-
- // ignore handlebars files as they use {{}} to build links, we only
- // want to test the generated files
- if file.extension().and_then(|s| s.to_str()) == Some("hbs") {
+ // Ignore none HTML files.
+ if file.extension().and_then(|s| s.to_str()) != Some("html") {
return None;
}
return None;
}
- // mdbook uses the HTML <base> tag to handle links for subdirectories, which
- // linkchecker doesn't support
- if file.to_str().unwrap().contains("unstable-book") {
- return None;
- }
-
- let res = load_file(cache, root, PathBuf::from(file), SkipRedirect);
+ let res = load_file(cache, root, file, SkipRedirect);
let (pretty_file, contents) = match res {
Ok(res) => res,
Err(_) => return None,
cache.get_mut(&pretty_file)
.unwrap()
.parse_ids(&pretty_file, &contents, errors);
- cache.get_mut(&pretty_file)
- .unwrap()
- .parse_names(&contents);
}
// Search for anything that's the regex 'href[ ]*=[ ]*".*?"'
- with_attrs_in_source(&contents, " href", |url, i| {
+ with_attrs_in_source(&contents, " href", |url, i, base| {
// Ignore external URLs
if url.starts_with("http:") || url.starts_with("https:") ||
url.starts_with("javascript:") || url.starts_with("ftp:") ||
// Once we've plucked out the URL, parse it using our base url and
// then try to extract a file path.
let mut path = file.to_path_buf();
- if !url.is_empty() {
+ if !base.is_empty() || !url.is_empty() {
path.pop();
- for part in Path::new(url).components() {
+ for part in Path::new(base).join(url).components() {
match part {
Component::Prefix(_) |
Component::RootDir => panic!(),
}
}
- if let Some(extension) = path.extension() {
- // don't check these files
- if extension == "png" {
- return;
- }
- }
-
// Alright, if we've found a file name then this file had better
// exist! If it doesn't then we register and print an error.
if path.exists() {
pretty_path.display());
return;
}
- let res = load_file(cache, root, path.clone(), FromRedirect(false));
+ if let Some(extension) = path.extension() {
+ // Ignore none HTML files.
+ if extension != "html" {
+ return;
+ }
+ }
+ let res = load_file(cache, root, &path, FromRedirect(false));
let (pretty_path, contents) = match res {
Ok(res) => res,
Err(LoadError::IOError(err)) => {
- panic!(format!("error loading {}: {}", path.display(), err));
+ panic!("error loading {}: {}", path.display(), err);
}
Err(LoadError::BrokenRedirect(target, _)) => {
*errors = true;
let entry = &mut cache.get_mut(&pretty_path).unwrap();
entry.parse_ids(&pretty_path, &contents, errors);
- entry.parse_names(&contents);
- if !(entry.ids.contains(*fragment) || entry.names.contains(*fragment)) {
+ if !entry.ids.contains(*fragment) {
*errors = true;
- print!("{}:{}: broken link fragment ",
+ print!("{}:{}: broken link fragment ",
pretty_file.display(),
i + 1);
println!("`#{}` pointing to `{}`", fragment, pretty_path.display());
fn load_file(cache: &mut Cache,
root: &Path,
- mut file: PathBuf,
+ file: &Path,
redirect: Redirect)
-> Result<(PathBuf, String), LoadError> {
let mut contents = String::new();
None
}
Entry::Vacant(entry) => {
- let mut fp = File::open(file.clone()).map_err(|err| {
+ let mut fp = File::open(file).map_err(|err| {
if let FromRedirect(true) = redirect {
- LoadError::BrokenRedirect(file.clone(), err)
+ LoadError::BrokenRedirect(file.to_path_buf(), err)
} else {
LoadError::IOError(err)
}
entry.insert(FileEntry {
source: contents.clone(),
ids: HashSet::new(),
- names: HashSet::new(),
});
}
maybe
}
};
- file.pop();
- match maybe_redirect.map(|url| file.join(url)) {
+ match maybe_redirect.map(|url| file.parent().unwrap().join(url)) {
Some(redirect_file) => {
- let path = PathBuf::from(redirect_file);
- load_file(cache, root, path, FromRedirect(true))
+ load_file(cache, root, &redirect_file, FromRedirect(true))
}
None => Ok((pretty_file, contents)),
}
})
}
-fn with_attrs_in_source<F: FnMut(&str, usize)>(contents: &str, attr: &str, mut f: F) {
+fn with_attrs_in_source<F: FnMut(&str, usize, &str)>(contents: &str, attr: &str, mut f: F) {
+ let mut base = "";
for (i, mut line) in contents.lines().enumerate() {
while let Some(j) = line.find(attr) {
let rest = &line[j + attr.len()..];
+ // The base tag should always be the first link in the document so
+ // we can get away with using one pass.
+ let is_base = line[..j].ends_with("<base");
line = rest;
let pos_equals = match rest.find("=") {
Some(i) => i,
Some(i) => &rest[..i],
None => continue,
};
- f(url, i)
+ if is_base {
+ base = url;
+ continue;
+ }
+ f(url, i, base)
}
}
}
-Subproject commit 67babd2d63710444a3071dfd9184648fd85a6a3d
+Subproject commit 38ca9b702b73c03959e447f5dae56eff7497c986
--- /dev/null
+Subproject commit daa2a05ebe7b8d07a309e8891ebc548652362954
"Unlicense/MIT",
];
-/// These MPL licensed projects are acceptable, but only these.
+// These are exceptions to Rust's permissive licensing policy, and
+// should be considered bugs. Exceptions are only allowed in Rust
+// tooling. It is _crucial_ that no exception crates be dependencies
+// of the Rust runtime (std / test).
static EXCEPTIONS: &'static [&'static str] = &[
- "mdbook",
- "openssl",
- "pest",
- "thread-id",
+ "mdbook", // MPL2, mdbook
+ "openssl", // BSD+advertising clause, cargo, mdbook
+ "pest", // MPL2, mdbook via handlebars
+ "thread-id", // Apache-2.0, mdbook
+ "strings", // this is actually MIT/Apache-2.0 but it's not in the manifest yet
];
pub fn check(path: &Path, bad: &mut bool) {
"src/libbacktrace",
"src/compiler-rt",
"src/rustllvm",
- "src/rust-installer",
"src/liblibc",
"src/vendor",
"src/rt/hoedown",
"src/tools/cargo",
"src/tools/rls",
+ "src/tools/rust-installer",
];
skip.iter().any(|p| path.ends_with(p))
}
contents.truncate(0);
t!(t!(File::open(file), file).read_to_string(&mut contents));
+
+ if contents.is_empty() {
+ tidy_error!(bad, "{}: empty file", file.display());
+ }
+
let skip_cr = contents.contains("ignore-tidy-cr");
let skip_tab = contents.contains("ignore-tidy-tab");
let skip_length = contents.contains("ignore-tidy-linelength");